@@ -23,7 +23,7 @@ use core::borrow;
23
23
use core:: fmt;
24
24
use core:: cmp:: Ordering ;
25
25
use core:: intrinsics:: abort;
26
- use core:: mem:: { self , align_of_val, size_of_val, uninitialized } ;
26
+ use core:: mem:: { self , align_of_val, size_of_val} ;
27
27
use core:: ops:: Deref ;
28
28
use core:: ops:: CoerceUnsized ;
29
29
use core:: ptr:: { self , NonNull } ;
@@ -43,6 +43,9 @@ use vec::Vec;
43
43
/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
44
44
const MAX_REFCOUNT : usize = ( isize:: MAX ) as usize ;
45
45
46
+ /// A sentinel value that is used for the pointer of `Weak::new()`.
47
+ const WEAK_EMPTY : usize = 1 ;
48
+
46
49
/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
47
50
/// Reference Counted'.
48
51
///
@@ -235,6 +238,10 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
235
238
/// [`None`]: ../../std/option/enum.Option.html#variant.None
236
239
#[ stable( feature = "arc_weak" , since = "1.4.0" ) ]
237
240
pub struct Weak < T : ?Sized > {
241
+ // This is a `NonNull` to allow optimizing the size of this type in enums,
242
+ // but it is actually not truly "non-null". A `Weak::new()` will set this
243
+ // to a sentinel value, instead of needing to allocate some space in the
244
+ // heap.
238
245
ptr : NonNull < ArcInner < T > > ,
239
246
}
240
247
@@ -1011,8 +1018,8 @@ impl Arc<Any + Send + Sync> {
1011
1018
}
1012
1019
1013
1020
impl < T > Weak < T > {
1014
- /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
1015
- /// it. Calling [`upgrade`] on the return value always gives [`None`].
1021
+ /// Constructs a new `Weak<T>`, without allocating any memory.
1022
+ /// Calling [`upgrade`] on the return value always gives [`None`].
1016
1023
///
1017
1024
/// [`upgrade`]: struct.Weak.html#method.upgrade
1018
1025
/// [`None`]: ../../std/option/enum.Option.html#variant.None
@@ -1029,11 +1036,7 @@ impl<T> Weak<T> {
1029
1036
pub fn new ( ) -> Weak < T > {
1030
1037
unsafe {
1031
1038
Weak {
1032
- ptr : Box :: into_raw_non_null ( box ArcInner {
1033
- strong : atomic:: AtomicUsize :: new ( 0 ) ,
1034
- weak : atomic:: AtomicUsize :: new ( 1 ) ,
1035
- data : uninitialized ( ) ,
1036
- } ) ,
1039
+ ptr : NonNull :: new_unchecked ( WEAK_EMPTY as * mut _ ) ,
1037
1040
}
1038
1041
}
1039
1042
}
@@ -1070,7 +1073,11 @@ impl<T: ?Sized> Weak<T> {
1070
1073
pub fn upgrade ( & self ) -> Option < Arc < T > > {
1071
1074
// We use a CAS loop to increment the strong count instead of a
1072
1075
// fetch_add because once the count hits 0 it must never be above 0.
1073
- let inner = self . inner ( ) ;
1076
+ let inner = if self . ptr . as_ptr ( ) as * const u8 as usize == WEAK_EMPTY {
1077
+ return None ;
1078
+ } else {
1079
+ unsafe { self . ptr . as_ref ( ) }
1080
+ } ;
1074
1081
1075
1082
// Relaxed load because any write of 0 that we can observe
1076
1083
// leaves the field in a permanently zero state (so a
@@ -1092,17 +1099,15 @@ impl<T: ?Sized> Weak<T> {
1092
1099
1093
1100
// Relaxed is valid for the same reason it is on Arc's Clone impl
1094
1101
match inner. strong . compare_exchange_weak ( n, n + 1 , Relaxed , Relaxed ) {
1095
- Ok ( _) => return Some ( Arc { ptr : self . ptr , phantom : PhantomData } ) ,
1102
+ Ok ( _) => return Some ( Arc {
1103
+ // null checked above
1104
+ ptr : self . ptr ,
1105
+ phantom : PhantomData ,
1106
+ } ) ,
1096
1107
Err ( old) => n = old,
1097
1108
}
1098
1109
}
1099
1110
}
1100
-
1101
- #[ inline]
1102
- fn inner ( & self ) -> & ArcInner < T > {
1103
- // See comments above for why this is "safe"
1104
- unsafe { self . ptr . as_ref ( ) }
1105
- }
1106
1111
}
1107
1112
1108
1113
#[ stable( feature = "arc_weak" , since = "1.4.0" ) ]
@@ -1120,11 +1125,16 @@ impl<T: ?Sized> Clone for Weak<T> {
1120
1125
/// ```
1121
1126
#[ inline]
1122
1127
fn clone ( & self ) -> Weak < T > {
1128
+ let inner = if self . ptr . as_ptr ( ) as * const u8 as usize == WEAK_EMPTY {
1129
+ return Weak { ptr : self . ptr } ;
1130
+ } else {
1131
+ unsafe { self . ptr . as_ref ( ) }
1132
+ } ;
1123
1133
// See comments in Arc::clone() for why this is relaxed. This can use a
1124
1134
// fetch_add (ignoring the lock) because the weak count is only locked
1125
1135
// where are *no other* weak pointers in existence. (So we can't be
1126
1136
// running this code in that case).
1127
- let old_size = self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
1137
+ let old_size = inner. weak . fetch_add ( 1 , Relaxed ) ;
1128
1138
1129
1139
// See comments in Arc::clone() for why we do this (for mem::forget).
1130
1140
if old_size > MAX_REFCOUNT {
@@ -1139,8 +1149,8 @@ impl<T: ?Sized> Clone for Weak<T> {
1139
1149
1140
1150
#[ stable( feature = "downgraded_weak" , since = "1.10.0" ) ]
1141
1151
impl < T > Default for Weak < T > {
1142
- /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
1143
- /// it. Calling [`upgrade`] on the return value always gives [`None`].
1152
+ /// Constructs a new `Weak<T>`, without allocating memory.
1153
+ /// Calling [`upgrade`] on the return value always gives [`None`].
1144
1154
///
1145
1155
/// [`upgrade`]: struct.Weak.html#method.upgrade
1146
1156
/// [`None`]: ../../std/option/enum.Option.html#variant.None
@@ -1193,7 +1203,13 @@ impl<T: ?Sized> Drop for Weak<T> {
1193
1203
// weak count can only be locked if there was precisely one weak ref,
1194
1204
// meaning that drop could only subsequently run ON that remaining weak
1195
1205
// ref, which can only happen after the lock is released.
1196
- if self . inner ( ) . weak . fetch_sub ( 1 , Release ) == 1 {
1206
+ let inner = if self . ptr . as_ptr ( ) as * const u8 as usize == WEAK_EMPTY {
1207
+ return ;
1208
+ } else {
1209
+ unsafe { self . ptr . as_ref ( ) }
1210
+ } ;
1211
+
1212
+ if inner. weak . fetch_sub ( 1 , Release ) == 1 {
1197
1213
atomic:: fence ( Acquire ) ;
1198
1214
unsafe {
1199
1215
Global . dealloc ( self . ptr . cast ( ) , Layout :: for_value ( self . ptr . as_ref ( ) ) )
0 commit comments