Skip to content

Commit 03abdfb

Browse files
authored
Merge pull request #40 from xuwei-k/procedure-syntax
fix procedure syntax
2 parents e5eb994 + 2851493 commit 03abdfb

31 files changed

+118
-118
lines changed

core/src/main/scala/scala/collection/parallel/ParIterableLike.scala

+10-10
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
166166
@volatile
167167
private var _tasksupport = defaultTaskSupport
168168

169-
protected def initTaskSupport() {
169+
protected def initTaskSupport(): Unit = {
170170
_tasksupport = defaultTaskSupport
171171
}
172172

@@ -331,7 +331,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
331331

332332
protected implicit def builder2ops[Elem, To](cb: Builder[Elem, To]) = new BuilderOps[Elem, To] {
333333
def ifIs[Cmb](isbody: Cmb => Unit) = new Otherwise[Cmb] {
334-
def otherwise(notbody: => Unit)(implicit t: ClassTag[Cmb]) {
334+
def otherwise(notbody: => Unit)(implicit t: ClassTag[Cmb]): Unit = {
335335
if (cb.getClass == t.runtimeClass) isbody(cb.asInstanceOf[Cmb]) else notbody
336336
}
337337
}
@@ -914,11 +914,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
914914
extends NonDivisibleTask[R, Composite[FR, SR, R, First, Second]] {
915915
def combineResults(fr: FR, sr: SR): R
916916
@volatile var result: R = null.asInstanceOf[R]
917-
private[parallel] override def signalAbort() {
917+
private[parallel] override def signalAbort(): Unit = {
918918
ft.signalAbort()
919919
st.signalAbort()
920920
}
921-
protected def mergeSubtasks() {
921+
protected def mergeSubtasks(): Unit = {
922922
ft mergeThrowables st
923923
if (throwable eq null) result = combineResults(ft.result, st.result)
924924
}
@@ -956,7 +956,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
956956
val initialResult = tasksupport.executeAndWaitResult(inner)
957957
result = map(initialResult)
958958
}
959-
private[parallel] override def signalAbort() {
959+
private[parallel] override def signalAbort(): Unit = {
960960
inner.signalAbort()
961961
}
962962
override def requiresStrictSplitters = inner.requiresStrictSplitters
@@ -1334,7 +1334,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
13341334
protected[this] class ToParCollection[U >: T, That](cbf: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T])
13351335
extends Transformer[Combiner[U, That], ToParCollection[U, That]] {
13361336
@volatile var result: Result = null
1337-
def leaf(prev: Option[Combiner[U, That]]) {
1337+
def leaf(prev: Option[Combiner[U, That]]): Unit = {
13381338
result = cbf()
13391339
while (pit.hasNext) result += pit.next
13401340
}
@@ -1345,7 +1345,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
13451345
protected[this] class ToParMap[K, V, That](cbf: CombinerFactory[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V))
13461346
extends Transformer[Combiner[(K, V), That], ToParMap[K, V, That]] {
13471347
@volatile var result: Result = null
1348-
def leaf(prev: Option[Combiner[(K, V), That]]) {
1348+
def leaf(prev: Option[Combiner[(K, V), That]]): Unit = {
13491349
result = cbf()
13501350
while (pit.hasNext) result += pit.next
13511351
}
@@ -1394,7 +1394,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
13941394
(tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CombinerFactory[U, That])
13951395
extends StrictSplitterCheckTask[Combiner[U, That], FromScanTree[U, That]] {
13961396
@volatile var result: Combiner[U, That] = null
1397-
def leaf(prev: Option[Combiner[U, That]]) {
1397+
def leaf(prev: Option[Combiner[U, That]]): Unit = {
13981398
val cb = reuse(prev, cbf())
13991399
iterate(tree, cb)
14001400
result = cb
@@ -1443,11 +1443,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
14431443
val rightmost = right.rightmost
14441444

14451445
def beginsAt = left.beginsAt
1446-
def pushdown(v: U) {
1446+
def pushdown(v: U): Unit = {
14471447
left.pushdown(v)
14481448
right.pushdown(v)
14491449
}
1450-
def print(depth: Int) {
1450+
def print(depth: Int): Unit = {
14511451
println((" " * depth) + "ScanNode, begins at " + beginsAt)
14521452
left.print(depth + 1)
14531453
right.print(depth + 1)

core/src/main/scala/scala/collection/parallel/RemainsIterator.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
8585
r
8686
}
8787

88-
override def copyToArray[U >: T](array: Array[U], from: Int, len: Int) {
88+
override def copyToArray[U >: T](array: Array[U], from: Int, len: Int): Unit = {
8989
var i = from
9090
val until = from + len
9191
while (i < until && hasNext) {
@@ -229,7 +229,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
229229
(before, after)
230230
}
231231

232-
def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, array: Array[A], from: Int) {
232+
def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, array: Array[A], from: Int): Unit = {
233233
var last = z
234234
var i = from
235235
while (hasNext) {

core/src/main/scala/scala/collection/parallel/Tasks.scala

+6-6
Original file line numberDiff line numberDiff line change
@@ -35,14 +35,14 @@ trait Task[R, +Tp] {
3535
private[parallel] def split: Seq[Task[R, Tp]]
3636

3737
/** Read of results of `that` task and merge them into results of this one. */
38-
private[parallel] def merge(that: Tp @uncheckedVariance) {}
38+
private[parallel] def merge(that: Tp @uncheckedVariance): Unit = {}
3939

4040
// exception handling mechanism
4141
@volatile var throwable: Throwable = null
4242
def forwardThrowable() = if (throwable != null) throw throwable
4343

4444
// tries to do the leaf computation, storing the possible exception
45-
private[parallel] def tryLeaf(lastres: Option[R]) {
45+
private[parallel] def tryLeaf(lastres: Option[R]): Unit = {
4646
try {
4747
tryBreakable {
4848
leaf(lastres)
@@ -58,21 +58,21 @@ trait Task[R, +Tp] {
5858
}
5959
}
6060

61-
private[parallel] def tryMerge(t: Tp @uncheckedVariance) {
61+
private[parallel] def tryMerge(t: Tp @uncheckedVariance): Unit = {
6262
val that = t.asInstanceOf[Task[R, Tp]]
6363
if (this.throwable == null && that.throwable == null) merge(t)
6464
mergeThrowables(that)
6565
}
6666

67-
private[parallel] def mergeThrowables(that: Task[_, _]) {
67+
private[parallel] def mergeThrowables(that: Task[_, _]): Unit = {
6868
if (this.throwable != null && that.throwable != null)
6969
this.throwable.addSuppressed(that.throwable)
7070
else if (this.throwable == null && that.throwable != null)
7171
this.throwable = that.throwable
7272
}
7373

7474
// override in concrete task implementations to signal abort to other tasks
75-
private[parallel] def signalAbort() {}
75+
private[parallel] def signalAbort(): Unit = {}
7676
}
7777

7878

@@ -109,7 +109,7 @@ trait Tasks {
109109
*
110110
* This method may be overridden.
111111
*/
112-
def release() {}
112+
def release(): Unit = {}
113113
}
114114

115115
/* task control */

core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ self =>
118118

119119
/* debug */
120120

121-
private[parallel] def printDebugInfo() {
121+
private[parallel] def printDebugInfo(): Unit = {
122122
println("Parallel hash trie")
123123
println("Top level inner trie type: " + trie.getClass)
124124
trie match {

core/src/main/scala/scala/collection/parallel/mutable/ParArray.scala

+13-13
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,7 @@ self =>
302302
this
303303
}
304304

305-
override def copyToArray[U >: T](array: Array[U], from: Int, len: Int) {
305+
override def copyToArray[U >: T](array: Array[U], from: Int, len: Int): Unit = {
306306
val totallen = (self.length - i) min len min (array.length - from)
307307
Array.copy(arr, i, array, from, totallen)
308308
i += totallen
@@ -384,7 +384,7 @@ self =>
384384
cb
385385
}
386386

387-
private def map2combiner_quick[S, That](f: T => S, a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) {
387+
private def map2combiner_quick[S, That](f: T => S, a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int): Unit = {
388388
var j = from
389389
while (j < ntil) {
390390
cb += f(a(j).asInstanceOf[T])
@@ -399,7 +399,7 @@ self =>
399399
cb
400400
}
401401

402-
private def collect2combiner_quick[S, That](pf: PartialFunction[T, S], a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) {
402+
private def collect2combiner_quick[S, That](pf: PartialFunction[T, S], a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int): Unit = {
403403
var j = from
404404
val runWith = pf.runWith(b => cb += b)
405405
while (j < ntil) {
@@ -426,7 +426,7 @@ self =>
426426
cb
427427
}
428428

429-
private def filter2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
429+
private def filter2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int): Unit = {
430430
var j = i
431431
while(j < ntil) {
432432
val curr = a(j).asInstanceOf[T]
@@ -441,7 +441,7 @@ self =>
441441
cb
442442
}
443443

444-
private def filterNot2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
444+
private def filterNot2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int): Unit = {
445445
var j = i
446446
while(j < ntil) {
447447
val curr = a(j).asInstanceOf[T]
@@ -474,7 +474,7 @@ self =>
474474
cb
475475
}
476476

477-
private def copy2builder_quick[U >: T, Coll](b: Builder[U, Coll], a: Array[Any], ntil: Int, from: Int) {
477+
private def copy2builder_quick[U >: T, Coll](b: Builder[U, Coll], a: Array[Any], ntil: Int, from: Int): Unit = {
478478
var j = from
479479
while (j < ntil) {
480480
b += a(j).asInstanceOf[T]
@@ -488,7 +488,7 @@ self =>
488488
(btrue, bfalse)
489489
}
490490

491-
private def partition2combiners_quick[U >: T, This](p: T => Boolean, btrue: Builder[U, This], bfalse: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
491+
private def partition2combiners_quick[U >: T, This](p: T => Boolean, btrue: Builder[U, This], bfalse: Builder[U, This], a: Array[Any], ntil: Int, from: Int): Unit = {
492492
var j = from
493493
while (j < ntil) {
494494
val curr = a(j).asInstanceOf[T]
@@ -542,7 +542,7 @@ self =>
542542
cb
543543
}
544544

545-
private def reverse2combiner_quick(targ: Array[Any], a: Array[Any], targfrom: Int, srcfrom: Int, srcuntil: Int) {
545+
private def reverse2combiner_quick(targ: Array[Any], a: Array[Any], targfrom: Int, srcfrom: Int, srcuntil: Int): Unit = {
546546
var j = srcfrom
547547
var k = targfrom + srcuntil - srcfrom - 1
548548
while (j < srcuntil) {
@@ -552,12 +552,12 @@ self =>
552552
}
553553
}
554554

555-
override def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, destarr: Array[A], from: Int) {
555+
override def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, destarr: Array[A], from: Int): Unit = {
556556
scanToArray_quick[U](array, destarr.asInstanceOf[Array[Any]], op, z, i, until, from)
557557
i = until
558558
}
559559

560-
protected def scanToArray_quick[U](srcarr: Array[Any], destarr: Array[Any], op: (U, U) => U, z: U, srcfrom: Int, srcntil: Int, destfrom: Int) {
560+
protected def scanToArray_quick[U](srcarr: Array[Any], destarr: Array[Any], op: (U, U) => U, z: U, srcfrom: Int, srcntil: Int, destfrom: Int): Unit = {
561561
var last = z
562562
var j = srcfrom
563563
var k = destfrom
@@ -619,7 +619,7 @@ self =>
619619
case ScanLeaf(_, _, from, len, None, _) =>
620620
scanLeaf(array, targetarr, from, len, z)
621621
}
622-
private def scanLeaf(srcarr: Array[Any], targetarr: Array[Any], from: Int, len: Int, startval: U) {
622+
private def scanLeaf(srcarr: Array[Any], targetarr: Array[Any], from: Int, len: Int, startval: U): Unit = {
623623
var i = from
624624
val until = from + len
625625
var curr = startval
@@ -665,11 +665,11 @@ self =>
665665

666666
/* serialization */
667667

668-
private def writeObject(out: java.io.ObjectOutputStream) {
668+
private def writeObject(out: java.io.ObjectOutputStream): Unit = {
669669
out.defaultWriteObject
670670
}
671671

672-
private def readObject(in: java.io.ObjectInputStream) {
672+
private def readObject(in: java.io.ObjectInputStream): Unit = {
673673
in.defaultReadObject
674674

675675
// get raw array from arrayseq

core/src/main/scala/scala/collection/parallel/mutable/ParFlatHashTable.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
3333

3434
if (hasNext) scan()
3535

36-
private[this] def scan() {
36+
private[this] def scan(): Unit = {
3737
while (itertable(idx) eq null) {
3838
idx += 1
3939
}

core/src/main/scala/scala/collection/parallel/mutable/ParHashMap.scala

+4-4
Original file line numberDiff line numberDiff line change
@@ -103,14 +103,14 @@ self =>
103103
new Entry(key, value.asInstanceOf[V])
104104
}
105105

106-
private def writeObject(out: java.io.ObjectOutputStream) {
106+
private def writeObject(out: java.io.ObjectOutputStream): Unit = {
107107
serializeTo(out, { entry =>
108108
out.writeObject(entry.key)
109109
out.writeObject(entry.value)
110110
})
111111
}
112112

113-
private def readObject(in: java.io.ObjectInputStream) {
113+
private def readObject(in: java.io.ObjectInputStream): Unit = {
114114
init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject()))
115115
}
116116

@@ -190,7 +190,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
190190
// TODO parallelize by keeping separate sizemaps and merging them
191191
object table extends HashTable[K, DefaultEntry[K, V]] {
192192
type Entry = DefaultEntry[K, V]
193-
def insertEntry(e: Entry) { super.findOrAddEntry(e.key, e) }
193+
def insertEntry(e: Entry): Unit = { super.findOrAddEntry(e.key, e) }
194194
def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry]
195195
sizeMapInit(table.length)
196196
}
@@ -285,7 +285,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
285285
val fp = howmany / 2
286286
List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp))
287287
}
288-
override def merge(that: FillBlocks) {
288+
override def merge(that: FillBlocks): Unit = {
289289
this.result += that.result
290290
}
291291
def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)

core/src/main/scala/scala/collection/parallel/mutable/ParHashSet.scala

+4-4
Original file line numberDiff line numberDiff line change
@@ -81,11 +81,11 @@ extends ParSet[T]
8181
def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total)
8282
}
8383

84-
private def writeObject(s: java.io.ObjectOutputStream) {
84+
private def writeObject(s: java.io.ObjectOutputStream): Unit = {
8585
serializeTo(s)
8686
}
8787

88-
private def readObject(in: java.io.ObjectInputStream) {
88+
private def readObject(in: java.io.ObjectInputStream): Unit = {
8989
init(in, x => ())
9090
}
9191

@@ -238,7 +238,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
238238
extends Task[(Int, UnrolledBuffer[AnyRef]), FillBlocks] {
239239
var result = (Int.MinValue, new UnrolledBuffer[AnyRef])
240240

241-
def leaf(prev: Option[(Int, UnrolledBuffer[AnyRef])]) {
241+
def leaf(prev: Option[(Int, UnrolledBuffer[AnyRef])]): Unit = {
242242
var i = offset
243243
var totalinserts = 0
244244
var leftover = new UnrolledBuffer[AnyRef]()
@@ -301,7 +301,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
301301
val fp = howmany / 2
302302
List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp))
303303
}
304-
override def merge(that: FillBlocks) {
304+
override def merge(that: FillBlocks): Unit = {
305305
// take the leftovers from the left task, store them into the block of the right task
306306
val atPos = blockStart(that.offset)
307307
val beforePos = blockStart(that.offset + that.howmany)

core/src/main/scala/scala/collection/parallel/mutable/ParHashTable.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec
4545
entry2item(res)
4646
}
4747

48-
def scan() {
48+
def scan(): Unit = {
4949
while (es == null && idx < until) {
5050
es = itertable(idx).asInstanceOf[Entry]
5151
idx = idx + 1

core/src/main/scala/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA
5858
ind = 0
5959
}
6060
}
61-
private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int) {
61+
private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int): Unit = {
6262
Array.copy(buffarr, buffStart, ra, arrayStart, until - buffStart)
6363
}
6464
private def findStart(pos: Int) = {

core/src/main/scala/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ extends Combiner[T, ParArray[T]] {
3636
new ParArray(arrayseq)
3737
}
3838

39-
def clear() {
39+
def clear(): Unit = {
4040
buff.clear()
4141
}
4242

core/src/main/scala/scala/collection/parallel/package.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -193,9 +193,9 @@ package parallel {
193193
sz = 0
194194
}
195195

196-
def beforeCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
196+
def beforeCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Unit = {}
197197

198-
def afterCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
198+
def afterCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Unit = {}
199199

200200
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = {
201201
if (this eq other) this

0 commit comments

Comments
 (0)