@@ -7,6 +7,7 @@ package kotlinx.coroutines.scheduling
7
7
import kotlinx.atomicfu.*
8
8
import kotlinx.coroutines.*
9
9
import java.util.concurrent.atomic.*
10
+ import kotlin.jvm.internal.Ref.ObjectRef
10
11
11
12
internal const val BUFFER_CAPACITY_BASE = 7
12
13
internal const val BUFFER_CAPACITY = 1 shl BUFFER_CAPACITY_BASE
@@ -31,7 +32,7 @@ internal const val NOTHING_TO_STEAL = -2L
31
32
* (scheduler workers without a CPU permit steal blocking tasks via this mechanism). Such property enforces us to use CAS in
32
33
* order to properly claim value from the buffer.
33
34
* Moreover, [Task] objects are reusable, so it may seem that this queue is prone to ABA problem.
34
- * Indeed it formally has ABA-problem, but the whole processing logic is written in the way that such ABA is harmless.
35
+ * Indeed, it formally has ABA-problem, but the whole processing logic is written in the way that such ABA is harmless.
35
36
* I have discovered a truly marvelous proof of this, which this KDoc is too narrow to contain.
36
37
*/
37
38
internal class WorkQueue {
@@ -46,10 +47,12 @@ internal class WorkQueue {
46
47
* [T2] changeProducerIndex (3)
47
48
* [T3] changeConsumerIndex (4)
48
49
*
49
- * Which can lead to resulting size bigger than actual size at any moment of time.
50
- * This is in general harmless because steal will be blocked by timer
50
+ * Which can lead to resulting size being negative or bigger than actual size at any moment of time.
51
+ * This is in general harmless because steal will be blocked by timer.
52
+ * Negative sizes can be observed only when non-owner reads the size, which happens only
53
+ * for diagnostic toString().
51
54
*/
52
- internal val bufferSize: Int get() = producerIndex.value - consumerIndex.value
55
+ private val bufferSize: Int get() = producerIndex.value - consumerIndex.value
53
56
internal val size: Int get() = if (lastScheduledTask.value != null ) bufferSize + 1 else bufferSize
54
57
private val buffer: AtomicReferenceArray <Task ?> = AtomicReferenceArray (BUFFER_CAPACITY )
55
58
private val lastScheduledTask = atomic<Task ?>(null )
@@ -100,41 +103,61 @@ internal class WorkQueue {
100
103
}
101
104
102
105
/* *
103
- * Tries stealing from [victim] queue into this queue .
106
+ * Tries stealing from this queue into the [stolenTaskRef] argument .
104
107
*
105
108
* Returns [NOTHING_TO_STEAL] if queue has nothing to steal, [TASK_STOLEN] if at least task was stolen
106
109
* or positive value of how many nanoseconds should pass until the head of this queue will be available to steal.
107
110
*/
108
- fun tryStealFrom (victim : WorkQueue ): Long {
109
- assert { bufferSize == 0 }
110
- val task = victim.pollBuffer()
111
+ fun trySteal (stolenTaskRef : ObjectRef <Task ?>): Long {
112
+ val task = pollBuffer()
111
113
if (task != null ) {
112
- val notAdded = add(task)
113
- assert { notAdded == null }
114
+ stolenTaskRef.element = task
114
115
return TASK_STOLEN
115
116
}
116
- return tryStealLastScheduled(victim , blockingOnly = false )
117
+ return tryStealLastScheduled(stolenTaskRef , blockingOnly = false )
117
118
}
118
119
119
- fun tryStealBlockingFrom (victim : WorkQueue ): Long {
120
- assert { bufferSize == 0 }
121
- var start = victim.consumerIndex.value
122
- val end = victim.producerIndex.value
123
- val buffer = victim.buffer
124
-
125
- while (start != end) {
126
- val index = start and MASK
127
- if (victim.blockingTasksInBuffer.value == 0 ) break
128
- val value = buffer[index]
129
- if (value != null && value.isBlocking && buffer.compareAndSet(index, value, null )) {
130
- victim.blockingTasksInBuffer.decrementAndGet()
131
- add(value)
132
- return TASK_STOLEN
133
- } else {
134
- ++ start
120
+ fun tryStealBlocking (stolenTaskRef : ObjectRef <Task ?>): Long {
121
+ var start = consumerIndex.value
122
+ val end = producerIndex.value
123
+
124
+ while (start != end && blockingTasksInBuffer.value > 0 ) {
125
+ stolenTaskRef.element = tryExtractBlockingTask(start++ ) ? : continue
126
+ return TASK_STOLEN
127
+ }
128
+ return tryStealLastScheduled(stolenTaskRef, blockingOnly = true )
129
+ }
130
+
131
+ // Polls for blocking task, invoked only by the owner
132
+ fun pollBlocking (): Task ? {
133
+ while (true ) { // Poll the slot
134
+ val lastScheduled = lastScheduledTask.value ? : break
135
+ if (! lastScheduled.isBlocking) break
136
+ if (lastScheduledTask.compareAndSet(lastScheduled, null )) {
137
+ return lastScheduled
138
+ } // Failed -> someone else stole it
139
+ }
140
+
141
+ val start = consumerIndex.value
142
+ var end = producerIndex.value
143
+
144
+ while (start != end && blockingTasksInBuffer.value > 0 ) {
145
+ val task = tryExtractBlockingTask(-- end)
146
+ if (task != null ) {
147
+ return task
135
148
}
136
149
}
137
- return tryStealLastScheduled(victim, blockingOnly = true )
150
+ return null
151
+ }
152
+
153
+ private fun tryExtractBlockingTask (index : Int ): Task ? {
154
+ val arrayIndex = index and MASK
155
+ val value = buffer[arrayIndex]
156
+ if (value != null && value.isBlocking && buffer.compareAndSet(arrayIndex, value, null )) {
157
+ blockingTasksInBuffer.decrementAndGet()
158
+ return value
159
+ }
160
+ return null
138
161
}
139
162
140
163
fun offloadAllWorkTo (globalQueue : GlobalQueue ) {
@@ -145,11 +168,11 @@ internal class WorkQueue {
145
168
}
146
169
147
170
/* *
148
- * Contract on return value is the same as for [tryStealFrom ]
171
+ * Contract on return value is the same as for [trySteal ]
149
172
*/
150
- private fun tryStealLastScheduled (victim : WorkQueue , blockingOnly : Boolean ): Long {
173
+ private fun tryStealLastScheduled (stolenTaskRef : ObjectRef < Task ?> , blockingOnly : Boolean ): Long {
151
174
while (true ) {
152
- val lastScheduled = victim. lastScheduledTask.value ? : return NOTHING_TO_STEAL
175
+ val lastScheduled = lastScheduledTask.value ? : return NOTHING_TO_STEAL
153
176
if (blockingOnly && ! lastScheduled.isBlocking) return NOTHING_TO_STEAL
154
177
155
178
// TODO time wraparound ?
@@ -163,8 +186,8 @@ internal class WorkQueue {
163
186
* If CAS has failed, either someone else had stolen this task or the owner executed this task
164
187
* and dispatched another one. In the latter case we should retry to avoid missing task.
165
188
*/
166
- if (victim. lastScheduledTask.compareAndSet(lastScheduled, null )) {
167
- add( lastScheduled)
189
+ if (lastScheduledTask.compareAndSet(lastScheduled, null )) {
190
+ stolenTaskRef.element = lastScheduled
168
191
return TASK_STOLEN
169
192
}
170
193
continue
0 commit comments