13
13
//! Concurrency-enabled mechanisms for sharing mutable and/or immutable state
14
14
//! between tasks.
15
15
16
- use core:: atomics ;
16
+ use core:: atomic ;
17
17
use core:: clone:: Clone ;
18
18
use core:: kinds:: { Share , Send } ;
19
19
use core:: mem:: { min_align_of, size_of, drop} ;
@@ -71,8 +71,8 @@ pub struct Weak<T> {
71
71
}
72
72
73
73
struct ArcInner < T > {
74
- strong : atomics :: AtomicUint ,
75
- weak : atomics :: AtomicUint ,
74
+ strong : atomic :: AtomicUint ,
75
+ weak : atomic :: AtomicUint ,
76
76
data : T ,
77
77
}
78
78
@@ -84,8 +84,8 @@ impl<T: Share + Send> Arc<T> {
84
84
// Start the weak pointer count as 1 which is the weak pointer that's
85
85
// held by all the strong pointers (kinda), see std/rc.rs for more info
86
86
let x = box ArcInner {
87
- strong : atomics :: AtomicUint :: new ( 1 ) ,
88
- weak : atomics :: AtomicUint :: new ( 1 ) ,
87
+ strong : atomic :: AtomicUint :: new ( 1 ) ,
88
+ weak : atomic :: AtomicUint :: new ( 1 ) ,
89
89
data : data,
90
90
} ;
91
91
Arc { _ptr : unsafe { mem:: transmute ( x) } }
@@ -109,7 +109,7 @@ impl<T: Share + Send> Arc<T> {
109
109
#[ experimental = "Weak pointers may not belong in this module." ]
110
110
pub fn downgrade ( & self ) -> Weak < T > {
111
111
// See the clone() impl for why this is relaxed
112
- self . inner ( ) . weak . fetch_add ( 1 , atomics :: Relaxed ) ;
112
+ self . inner ( ) . weak . fetch_add ( 1 , atomic :: Relaxed ) ;
113
113
Weak { _ptr : self . _ptr }
114
114
}
115
115
}
@@ -134,7 +134,7 @@ impl<T: Share + Send> Clone for Arc<T> {
134
134
// another must already provide any required synchronization.
135
135
//
136
136
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
137
- self . inner ( ) . strong . fetch_add ( 1 , atomics :: Relaxed ) ;
137
+ self . inner ( ) . strong . fetch_add ( 1 , atomic :: Relaxed ) ;
138
138
Arc { _ptr : self . _ptr }
139
139
}
140
140
}
@@ -159,8 +159,8 @@ impl<T: Send + Share + Clone> Arc<T> {
159
159
// Note that we hold a strong reference, which also counts as
160
160
// a weak reference, so we only clone if there is an
161
161
// additional reference of either kind.
162
- if self . inner ( ) . strong . load ( atomics :: SeqCst ) != 1 ||
163
- self . inner ( ) . weak . load ( atomics :: SeqCst ) != 1 {
162
+ if self . inner ( ) . strong . load ( atomic :: SeqCst ) != 1 ||
163
+ self . inner ( ) . weak . load ( atomic :: SeqCst ) != 1 {
164
164
* self = Arc :: new ( self . deref ( ) . clone ( ) )
165
165
}
166
166
// This unsafety is ok because we're guaranteed that the pointer
@@ -185,7 +185,7 @@ impl<T: Share + Send> Drop for Arc<T> {
185
185
// Because `fetch_sub` is already atomic, we do not need to synchronize
186
186
// with other threads unless we are going to delete the object. This
187
187
// same logic applies to the below `fetch_sub` to the `weak` count.
188
- if self . inner ( ) . strong . fetch_sub ( 1 , atomics :: Release ) != 1 { return }
188
+ if self . inner ( ) . strong . fetch_sub ( 1 , atomic :: Release ) != 1 { return }
189
189
190
190
// This fence is needed to prevent reordering of use of the data and
191
191
// deletion of the data. Because it is marked `Release`, the
@@ -204,14 +204,14 @@ impl<T: Share + Send> Drop for Arc<T> {
204
204
// and an "acquire" operation before deleting the object.
205
205
//
206
206
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
207
- atomics :: fence ( atomics :: Acquire ) ;
207
+ atomic :: fence ( atomic :: Acquire ) ;
208
208
209
209
// Destroy the data at this time, even though we may not free the box
210
210
// allocation itself (there may still be weak pointers lying around).
211
211
unsafe { drop ( ptr:: read ( & self . inner ( ) . data ) ) ; }
212
212
213
- if self . inner ( ) . weak . fetch_sub ( 1 , atomics :: Release ) == 1 {
214
- atomics :: fence ( atomics :: Acquire ) ;
213
+ if self . inner ( ) . weak . fetch_sub ( 1 , atomic :: Release ) == 1 {
214
+ atomic :: fence ( atomic :: Acquire ) ;
215
215
unsafe { deallocate ( self . _ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
216
216
min_align_of :: < ArcInner < T > > ( ) ) }
217
217
}
@@ -230,9 +230,9 @@ impl<T: Share + Send> Weak<T> {
230
230
// fetch_add because once the count hits 0 is must never be above 0.
231
231
let inner = self . inner ( ) ;
232
232
loop {
233
- let n = inner. strong . load ( atomics :: SeqCst ) ;
233
+ let n = inner. strong . load ( atomic :: SeqCst ) ;
234
234
if n == 0 { return None }
235
- let old = inner. strong . compare_and_swap ( n, n + 1 , atomics :: SeqCst ) ;
235
+ let old = inner. strong . compare_and_swap ( n, n + 1 , atomic :: SeqCst ) ;
236
236
if old == n { return Some ( Arc { _ptr : self . _ptr } ) }
237
237
}
238
238
}
@@ -249,7 +249,7 @@ impl<T: Share + Send> Clone for Weak<T> {
249
249
#[ inline]
250
250
fn clone ( & self ) -> Weak < T > {
251
251
// See comments in Arc::clone() for why this is relaxed
252
- self . inner ( ) . weak . fetch_add ( 1 , atomics :: Relaxed ) ;
252
+ self . inner ( ) . weak . fetch_add ( 1 , atomic :: Relaxed ) ;
253
253
Weak { _ptr : self . _ptr }
254
254
}
255
255
}
@@ -264,8 +264,8 @@ impl<T: Share + Send> Drop for Weak<T> {
264
264
// If we find out that we were the last weak pointer, then its time to
265
265
// deallocate the data entirely. See the discussion in Arc::drop() about
266
266
// the memory orderings
267
- if self . inner ( ) . weak . fetch_sub ( 1 , atomics :: Release ) == 1 {
268
- atomics :: fence ( atomics :: Acquire ) ;
267
+ if self . inner ( ) . weak . fetch_sub ( 1 , atomic :: Release ) == 1 {
268
+ atomic :: fence ( atomic :: Acquire ) ;
269
269
unsafe { deallocate ( self . _ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
270
270
min_align_of :: < ArcInner < T > > ( ) ) }
271
271
}
@@ -281,21 +281,21 @@ mod tests {
281
281
use std:: mem:: drop;
282
282
use std:: ops:: Drop ;
283
283
use std:: option:: { Option , Some , None } ;
284
- use std:: sync:: atomics ;
284
+ use std:: sync:: atomic ;
285
285
use std:: task;
286
286
use std:: vec:: Vec ;
287
287
use super :: { Arc , Weak } ;
288
288
use std:: sync:: Mutex ;
289
289
290
- struct Canary ( * mut atomics :: AtomicUint ) ;
290
+ struct Canary ( * mut atomic :: AtomicUint ) ;
291
291
292
292
impl Drop for Canary
293
293
{
294
294
fn drop ( & mut self ) {
295
295
unsafe {
296
296
match * self {
297
297
Canary ( c) => {
298
- ( * c) . fetch_add ( 1 , atomics :: SeqCst ) ;
298
+ ( * c) . fetch_add ( 1 , atomic :: SeqCst ) ;
299
299
}
300
300
}
301
301
}
@@ -413,20 +413,20 @@ mod tests {
413
413
414
414
#[ test]
415
415
fn drop_arc ( ) {
416
- let mut canary = atomics :: AtomicUint :: new ( 0 ) ;
417
- let x = Arc :: new ( Canary ( & mut canary as * mut atomics :: AtomicUint ) ) ;
416
+ let mut canary = atomic :: AtomicUint :: new ( 0 ) ;
417
+ let x = Arc :: new ( Canary ( & mut canary as * mut atomic :: AtomicUint ) ) ;
418
418
drop ( x) ;
419
- assert ! ( canary. load( atomics :: Acquire ) == 1 ) ;
419
+ assert ! ( canary. load( atomic :: Acquire ) == 1 ) ;
420
420
}
421
421
422
422
#[ test]
423
423
fn drop_arc_weak ( ) {
424
- let mut canary = atomics :: AtomicUint :: new ( 0 ) ;
425
- let arc = Arc :: new ( Canary ( & mut canary as * mut atomics :: AtomicUint ) ) ;
424
+ let mut canary = atomic :: AtomicUint :: new ( 0 ) ;
425
+ let arc = Arc :: new ( Canary ( & mut canary as * mut atomic :: AtomicUint ) ) ;
426
426
let arc_weak = arc. downgrade ( ) ;
427
- assert ! ( canary. load( atomics :: Acquire ) == 0 ) ;
427
+ assert ! ( canary. load( atomic :: Acquire ) == 0 ) ;
428
428
drop ( arc) ;
429
- assert ! ( canary. load( atomics :: Acquire ) == 1 ) ;
429
+ assert ! ( canary. load( atomic :: Acquire ) == 1 ) ;
430
430
drop ( arc_weak) ;
431
431
}
432
432
}
0 commit comments