4
4
5
5
package benchmarks.actors
6
6
7
- import benchmarks.ParametrizedDispatcherBase
8
- import benchmarks.actors.StatefulActorBenchmark.Letter
9
- import kotlinx.coroutines.experimental.channels.Channel
10
- import kotlinx.coroutines.experimental.channels.SendChannel
11
- import kotlinx.coroutines.experimental.channels.actor
12
- import kotlinx.coroutines.experimental.runBlocking
7
+ import benchmarks.*
8
+ import benchmarks.actors.StatefulActorBenchmark.*
9
+ import kotlinx.coroutines.experimental.*
10
+ import kotlinx.coroutines.experimental.channels.*
13
11
import org.openjdk.jmh.annotations.*
14
- import java.util.concurrent.ThreadLocalRandom
15
- import java.util.concurrent.TimeUnit
16
- import kotlin.coroutines.experimental.CoroutineContext
12
+ import java.util.concurrent.*
17
13
18
14
/*
19
15
* Noisy benchmarks useful to measure scheduling fairness and migration of affinity-sensitive tasks.
@@ -70,67 +66,74 @@ open class ConcurrentStatefulActorBenchmark : ParametrizedDispatcherBase() {
70
66
@Benchmark
71
67
fun multipleComputationsUnfair () = runBlocking {
72
68
val resultChannel: Channel <Unit > = Channel (1 )
73
- val computations = (0 until CORES_COUNT ).map { computationActor(benchmarkContext, stateSize) }
74
- val requestor = requestorActorUnfair(benchmarkContext, computations, resultChannel)
69
+ val computations = (0 until CORES_COUNT ).map { computationActor(stateSize) }
70
+ val requestor = requestorActorUnfair(computations, resultChannel)
75
71
requestor.send(Letter (Start (), Channel (0 )))
76
72
resultChannel.receive()
77
73
}
78
74
79
75
@Benchmark
80
76
fun multipleComputationsFair () = runBlocking {
81
77
val resultChannel: Channel <Unit > = Channel (1 )
82
- val computations = (0 until CORES_COUNT ).map { computationActor(benchmarkContext, stateSize) }
83
- val requestor = requestorActorFair(benchmarkContext, computations, resultChannel)
78
+ val computations = (0 until CORES_COUNT ).map { computationActor(stateSize) }
79
+ val requestor = requestorActorFair(computations, resultChannel)
84
80
requestor.send(Letter (Start (), Channel (0 )))
85
81
resultChannel.receive()
86
82
}
87
83
88
- fun requestorActorUnfair (context : CoroutineContext , computations : List <SendChannel <Letter >>,
89
- stopChannel : Channel <Unit >) = actor<Letter >(context, 1024 ) {
90
- var received = 0
91
- for (letter in channel) with (letter) {
92
- when (message) {
93
- is Start -> {
94
- computations.shuffled().forEach { it.send(Letter (ThreadLocalRandom .current().nextLong(), channel)) }
95
- }
96
- is Long -> {
97
- if (++ received >= ROUNDS * 8 ) {
98
- stopChannel.send(Unit )
99
- return @actor
100
- } else {
101
- sender.send(Letter (ThreadLocalRandom .current().nextLong(), channel))
84
+ fun requestorActorUnfair (
85
+ computations : List <SendChannel <Letter >>,
86
+ stopChannel : Channel <Unit >
87
+ ) =
88
+ actor<Letter >(capacity = 1024 ) {
89
+ var received = 0
90
+ for (letter in channel) with (letter) {
91
+ when (message) {
92
+ is Start -> {
93
+ computations.shuffled()
94
+ .forEach { it.send(Letter (ThreadLocalRandom .current().nextLong(), channel)) }
102
95
}
96
+ is Long -> {
97
+ if (++ received >= ROUNDS * 8 ) {
98
+ stopChannel.send(Unit )
99
+ return @actor
100
+ } else {
101
+ sender.send(Letter (ThreadLocalRandom .current().nextLong(), channel))
102
+ }
103
+ }
104
+ else -> error(" Cannot happen: $letter " )
103
105
}
104
- else -> error(" Cannot happen: $letter " )
105
106
}
106
107
}
107
- }
108
108
109
+ fun requestorActorFair (
110
+ computations : List <SendChannel <Letter >>,
111
+ stopChannel : Channel <Unit >
112
+ ) =
113
+ actor<Letter >(capacity = 1024 ) {
114
+ val received = hashMapOf(* computations.map { it to 0 }.toTypedArray())
115
+ var receivedTotal = 0
109
116
110
- fun requestorActorFair (context : CoroutineContext , computations : List <SendChannel <Letter >>,
111
- stopChannel : Channel <Unit >) = actor<Letter >(context, 1024 ) {
112
- val received = hashMapOf(* computations.map { it to 0 }.toTypedArray())
113
- var receivedTotal = 0
114
-
115
- for (letter in channel) with (letter) {
116
- when (message) {
117
- is Start -> {
118
- computations.shuffled().forEach { it.send(Letter (ThreadLocalRandom .current().nextLong(), channel)) }
119
- }
120
- is Long -> {
121
- if (++ receivedTotal >= ROUNDS * computations.size) {
122
- stopChannel.send(Unit )
123
- return @actor
124
- } else {
125
- val receivedFromSender = received[sender]!!
126
- if (receivedFromSender <= ROUNDS ) {
127
- received[sender] = receivedFromSender + 1
128
- sender.send(Letter (ThreadLocalRandom .current().nextLong(), channel))
117
+ for (letter in channel) with (letter) {
118
+ when (message) {
119
+ is Start -> {
120
+ computations.shuffled()
121
+ .forEach { it.send(Letter (ThreadLocalRandom .current().nextLong(), channel)) }
122
+ }
123
+ is Long -> {
124
+ if (++ receivedTotal >= ROUNDS * computations.size) {
125
+ stopChannel.send(Unit )
126
+ return @actor
127
+ } else {
128
+ val receivedFromSender = received[sender]!!
129
+ if (receivedFromSender <= ROUNDS ) {
130
+ received[sender] = receivedFromSender + 1
131
+ sender.send(Letter (ThreadLocalRandom .current().nextLong(), channel))
132
+ }
129
133
}
130
134
}
135
+ else -> error(" Cannot happen: $letter " )
131
136
}
132
- else -> error(" Cannot happen: $letter " )
133
137
}
134
138
}
135
- }
136
139
}
0 commit comments