Skip to content

Commit fdf55c4

Browse files
committed
UnsafeCell unfortunately lets the niches of the inner type "leak through" to the outer type, which can cause unsoundness. Fixes #29
1 parent 4902333 commit fdf55c4

File tree

2 files changed

+33
-23
lines changed

2 files changed

+33
-23
lines changed

src/fallback.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,13 @@
66
// copied, modified, or distributed except according to those terms.
77

88
use core::cmp;
9+
use core::hint;
910
use core::mem;
1011
use core::num::Wrapping;
1112
use core::ops;
1213
use core::ptr;
1314
use core::slice;
14-
use core::sync::atomic::{self, AtomicUsize, Ordering};
15+
use core::sync::atomic::{AtomicUsize, Ordering};
1516

1617
// We use an AtomicUsize instead of an AtomicBool because it performs better
1718
// on architectures that don't have byte-sized atomics.
@@ -28,7 +29,7 @@ impl SpinLock {
2829
.is_err()
2930
{
3031
while self.0.load(Ordering::Relaxed) != 0 {
31-
atomic::spin_loop_hint();
32+
hint::spin_loop();
3233
}
3334
}
3435
}

src/lib.rs

Lines changed: 30 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@
3838
#[macro_use]
3939
extern crate std;
4040

41+
use core::mem::MaybeUninit;
4142
// Re-export some useful definitions from libcore
4243
pub use core::sync::atomic::{fence, Ordering};
4344

@@ -55,7 +56,8 @@ mod ops;
5556
/// between threads.
5657
#[repr(transparent)]
5758
pub struct Atomic<T> {
58-
v: UnsafeCell<T>,
59+
// The MaybeUninit is here to work around rust-lang/rust#87341.
60+
v: UnsafeCell<MaybeUninit<T>>,
5961
}
6062

6163
// Atomic<T> is only Sync if T is Send
@@ -90,7 +92,7 @@ impl<T> Atomic<T> {
9092
#[inline]
9193
pub const fn new(v: T) -> Atomic<T> {
9294
Atomic {
93-
v: UnsafeCell::new(v),
95+
v: UnsafeCell::new(MaybeUninit::new(v)),
9496
}
9597
}
9698

@@ -106,13 +108,18 @@ impl<T> Atomic<T> {
106108
}
107109

108110
impl<T: Copy> Atomic<T> {
111+
#[inline]
112+
fn inner_ptr(&self) -> *mut T {
113+
self.v.get() as *mut T
114+
}
115+
109116
/// Returns a mutable reference to the underlying type.
110117
///
111118
/// This is safe because the mutable reference guarantees that no other threads are
112119
/// concurrently accessing the atomic data.
113120
#[inline]
114121
pub fn get_mut(&mut self) -> &mut T {
115-
unsafe { &mut *self.v.get() }
122+
unsafe { &mut *self.inner_ptr() }
116123
}
117124

118125
/// Consumes the atomic and returns the contained value.
@@ -121,7 +128,7 @@ impl<T: Copy> Atomic<T> {
121128
/// concurrently accessing the atomic data.
122129
#[inline]
123130
pub fn into_inner(self) -> T {
124-
self.v.into_inner()
131+
unsafe { self.v.into_inner().assume_init() }
125132
}
126133

127134
/// Loads a value from the `Atomic`.
@@ -134,7 +141,7 @@ impl<T: Copy> Atomic<T> {
134141
/// Panics if `order` is `Release` or `AcqRel`.
135142
#[inline]
136143
pub fn load(&self, order: Ordering) -> T {
137-
unsafe { ops::atomic_load(self.v.get(), order) }
144+
unsafe { ops::atomic_load(self.inner_ptr(), order) }
138145
}
139146

140147
/// Stores a value into the `Atomic`.
@@ -148,7 +155,7 @@ impl<T: Copy> Atomic<T> {
148155
#[inline]
149156
pub fn store(&self, val: T, order: Ordering) {
150157
unsafe {
151-
ops::atomic_store(self.v.get(), val, order);
158+
ops::atomic_store(self.inner_ptr(), val, order);
152159
}
153160
}
154161

@@ -158,7 +165,7 @@ impl<T: Copy> Atomic<T> {
158165
/// of this operation.
159166
#[inline]
160167
pub fn swap(&self, val: T, order: Ordering) -> T {
161-
unsafe { ops::atomic_swap(self.v.get(), val, order) }
168+
unsafe { ops::atomic_swap(self.inner_ptr(), val, order) }
162169
}
163170

164171
/// Stores a value into the `Atomic` if the current value is the same as the
@@ -181,7 +188,7 @@ impl<T: Copy> Atomic<T> {
181188
success: Ordering,
182189
failure: Ordering,
183190
) -> Result<T, T> {
184-
unsafe { ops::atomic_compare_exchange(self.v.get(), current, new, success, failure) }
191+
unsafe { ops::atomic_compare_exchange(self.inner_ptr(), current, new, success, failure) }
185192
}
186193

187194
/// Stores a value into the `Atomic` if the current value is the same as the
@@ -206,7 +213,9 @@ impl<T: Copy> Atomic<T> {
206213
success: Ordering,
207214
failure: Ordering,
208215
) -> Result<T, T> {
209-
unsafe { ops::atomic_compare_exchange_weak(self.v.get(), current, new, success, failure) }
216+
unsafe {
217+
ops::atomic_compare_exchange_weak(self.inner_ptr(), current, new, success, failure)
218+
}
210219
}
211220

212221
/// Fetches the value, and applies a function to it that returns an optional
@@ -275,7 +284,7 @@ impl Atomic<bool> {
275284
/// Returns the previous value.
276285
#[inline]
277286
pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
278-
unsafe { ops::atomic_and(self.v.get(), val, order) }
287+
unsafe { ops::atomic_and(self.inner_ptr(), val, order) }
279288
}
280289

281290
/// Logical "or" with a boolean value.
@@ -286,7 +295,7 @@ impl Atomic<bool> {
286295
/// Returns the previous value.
287296
#[inline]
288297
pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
289-
unsafe { ops::atomic_or(self.v.get(), val, order) }
298+
unsafe { ops::atomic_or(self.inner_ptr(), val, order) }
290299
}
291300

292301
/// Logical "xor" with a boolean value.
@@ -297,7 +306,7 @@ impl Atomic<bool> {
297306
/// Returns the previous value.
298307
#[inline]
299308
pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool {
300-
unsafe { ops::atomic_xor(self.v.get(), val, order) }
309+
unsafe { ops::atomic_xor(self.inner_ptr(), val, order) }
301310
}
302311
}
303312

@@ -307,31 +316,31 @@ macro_rules! atomic_ops_common {
307316
/// Add to the current value, returning the previous value.
308317
#[inline]
309318
pub fn fetch_add(&self, val: $t, order: Ordering) -> $t {
310-
unsafe { ops::atomic_add(self.v.get(), val, order) }
319+
unsafe { ops::atomic_add(self.inner_ptr(), val, order) }
311320
}
312321

313322
/// Subtract from the current value, returning the previous value.
314323
#[inline]
315324
pub fn fetch_sub(&self, val: $t, order: Ordering) -> $t {
316-
unsafe { ops::atomic_sub(self.v.get(), val, order) }
325+
unsafe { ops::atomic_sub(self.inner_ptr(), val, order) }
317326
}
318327

319328
/// Bitwise and with the current value, returning the previous value.
320329
#[inline]
321330
pub fn fetch_and(&self, val: $t, order: Ordering) -> $t {
322-
unsafe { ops::atomic_and(self.v.get(), val, order) }
331+
unsafe { ops::atomic_and(self.inner_ptr(), val, order) }
323332
}
324333

325334
/// Bitwise or with the current value, returning the previous value.
326335
#[inline]
327336
pub fn fetch_or(&self, val: $t, order: Ordering) -> $t {
328-
unsafe { ops::atomic_or(self.v.get(), val, order) }
337+
unsafe { ops::atomic_or(self.inner_ptr(), val, order) }
329338
}
330339

331340
/// Bitwise xor with the current value, returning the previous value.
332341
#[inline]
333342
pub fn fetch_xor(&self, val: $t, order: Ordering) -> $t {
334-
unsafe { ops::atomic_xor(self.v.get(), val, order) }
343+
unsafe { ops::atomic_xor(self.inner_ptr(), val, order) }
335344
}
336345
}
337346
)*);
@@ -344,13 +353,13 @@ macro_rules! atomic_ops_signed {
344353
/// Minimum with the current value.
345354
#[inline]
346355
pub fn fetch_min(&self, val: $t, order: Ordering) -> $t {
347-
unsafe { ops::atomic_min(self.v.get(), val, order) }
356+
unsafe { ops::atomic_min(self.inner_ptr(), val, order) }
348357
}
349358

350359
/// Maximum with the current value.
351360
#[inline]
352361
pub fn fetch_max(&self, val: $t, order: Ordering) -> $t {
353-
unsafe { ops::atomic_max(self.v.get(), val, order) }
362+
unsafe { ops::atomic_max(self.inner_ptr(), val, order) }
354363
}
355364
}
356365
)*
@@ -364,13 +373,13 @@ macro_rules! atomic_ops_unsigned {
364373
/// Minimum with the current value.
365374
#[inline]
366375
pub fn fetch_min(&self, val: $t, order: Ordering) -> $t {
367-
unsafe { ops::atomic_umin(self.v.get(), val, order) }
376+
unsafe { ops::atomic_umin(self.inner_ptr(), val, order) }
368377
}
369378

370379
/// Maximum with the current value.
371380
#[inline]
372381
pub fn fetch_max(&self, val: $t, order: Ordering) -> $t {
373-
unsafe { ops::atomic_umax(self.v.get(), val, order) }
382+
unsafe { ops::atomic_umax(self.inner_ptr(), val, order) }
374383
}
375384
}
376385
)*

0 commit comments

Comments
 (0)