Skip to content

prepare for Dotty, part 3 #119

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Oct 15, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions core/src/main/scala/scala/collection/generic/ParFactory.scala
Original file line number Diff line number Diff line change
Expand Up @@ -225,8 +225,8 @@ extends GenericParCompanion[CC] {
* all calls to `apply(from)` to the `genericParBuilder` method of the $coll
* `from`, and calls to `apply()` to this factory.
*/
class GenericCanCombineFrom[A] extends CanCombineFrom[CC[_], A, CC[A]] {
override def apply(from: CC[_]) = from.genericCombiner
override def apply() = newBuilder[A]
class GenericCanCombineFrom[From, To] extends CanCombineFrom[CC[From], To, CC[To]] {
override def apply(from: CC[From]) = from.genericCombiner
override def apply() = newBuilder[To]
}
}
10 changes: 3 additions & 7 deletions core/src/main/scala/scala/collection/generic/ParMapFactory.scala
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,9 @@ import scala.collection.parallel.Combiner
* @define factoryInfo
* This object provides a set of operations needed to create `$Coll` values.
*/
abstract class ParMapFactory[CC[X, Y] <: ParMap[X, Y] with ParMapLike[X, Y, CC, CC[X, Y], _]]
abstract class ParMapFactory[CC[X, Y] <: ParMap[X, Y] with ParMapLike[X, Y, CC, CC[X, Y], Sequential[X, Y]], Sequential[X, Y] <: collection.Map[X, Y] with collection.MapOps[X, Y, Sequential, Sequential[X, Y]]]
extends GenericParMapCompanion[CC] {

type Coll = MapColl

// `apply` and `empty` methods were previously inherited from `GenMapFactory`, which
// has been removed from the Scala library in 2.13

Expand All @@ -45,8 +43,6 @@ extends GenericParMapCompanion[CC] {

def empty[K, V]: CC[K, V]

type MapColl = CC[_, _]

/** The default builder for $Coll objects.
* @tparam K the type of the keys
* @tparam V the type of the associated values
Expand All @@ -59,8 +55,8 @@ extends GenericParMapCompanion[CC] {
*/
def newCombiner[K, V]: Combiner[(K, V), CC[K, V]]

class CanCombineFromMap[K, V] extends CanCombineFrom[CC[_, _], (K, V), CC[K, V]] {
def apply(from: MapColl) = from.genericMapCombiner[K, V].asInstanceOf[Combiner[(K, V), CC[K, V]]]
class CanCombineFromMap[FromK, FromV, K, V] extends CanCombineFrom[CC[FromK, FromV], (K, V), CC[K, V]] {
def apply(from: CC[FromK, FromV]) = from.genericMapCombiner[K, V].asInstanceOf[Combiner[(K, V), CC[K, V]]]
def apply() = newCombiner[K, V]
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,8 @@ abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC, CC[X], _]

def newCombiner[A]: Combiner[A, CC[A]]

class GenericCanCombineFrom[A] extends CanCombineFrom[CC[_], A, CC[A]] {
override def apply(from: CC[_]) = from.genericCombiner[A]
class GenericCanCombineFrom[B, A] extends CanCombineFrom[CC[B], A, CC[A]] {
override def apply(from: CC[B]) = from.genericCombiner[A]
override def apply() = newCombiner[A]
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ trait ParIterable[+T]
/** $factoryInfo
*/
object ParIterable extends ParFactory[ParIterable] {
implicit def canBuildFrom[T]: CanCombineFrom[ParIterable[_], T, ParIterable[T]] = new GenericCanCombineFrom[T]
implicit def canBuildFrom[T, S]: CanCombineFrom[ParIterable[S], T, ParIterable[T]] = new GenericCanCombineFrom[S, T]

def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]()

Expand Down
4 changes: 2 additions & 2 deletions core/src/main/scala/scala/collection/parallel/ParMap.scala
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,12 @@ self =>



object ParMap extends ParMapFactory[ParMap] {
object ParMap extends ParMapFactory[ParMap, collection.Map] {
def empty[K, V]: ParMap[K, V] = new mutable.ParHashMap[K, V]

def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = mutable.ParHashMapCombiner[K, V]

implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
implicit def canBuildFrom[FromK, FromV, K, V]: CanCombineFrom[ParMap[FromK, FromV], (K, V), ParMap[K, V]] = new CanCombineFromMap[FromK, FromV, K, V]

/** An abstract shell used by { mutable, immutable }.Map but not by collection.Map
* because of variance issues.
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/scala/collection/parallel/ParSeq.scala
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ trait ParSeq[+T] extends ParIterable[T]
}

object ParSeq extends ParFactory[ParSeq] {
implicit def canBuildFrom[T]: CanCombineFrom[ParSeq[_], T, ParSeq[T]] = new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParSeq[S], T, ParSeq[T]] = new GenericCanCombineFrom[S, T]

def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]()
def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]()
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/scala/collection/parallel/ParSet.scala
Original file line number Diff line number Diff line change
Expand Up @@ -40,5 +40,5 @@ trait ParSet[T]
object ParSet extends ParSetFactory[ParSet] {
def newCombiner[T]: Combiner[T, ParSet[T]] = mutable.ParHashSetCombiner[T]

implicit def canBuildFrom[T]: CanCombineFrom[ParSet[_], T, ParSet[T]] = new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParSet[S], T, ParSet[T]] = new GenericCanCombineFrom[S, T]
}
Original file line number Diff line number Diff line change
Expand Up @@ -465,7 +465,7 @@ self =>
def split: Seq[IterableSplitter[S]] = self.split.map { _ map f }
}

override def map[S](f: T => S) = new Mapped(f)
override def map[S](f: T => S): IterableSplitter[S] = new Mapped[S](f)

class Appended[U >: T, PI <: IterableSplitter[U]](protected val that: PI) extends IterableSplitter[U] {
signalDelegate = self.signalDelegate
Expand Down Expand Up @@ -500,7 +500,7 @@ self =>
}
}

def zipParSeq[S](that: SeqSplitter[S]) = new Zipped(that)
def zipParSeq[S](that: SeqSplitter[S]): IterableSplitter[(T, S)] = new Zipped(that)

class ZippedAll[U >: T, S](protected val that: SeqSplitter[S], protected val thiselem: U, protected val thatelem: S)
extends IterableSplitter[(U, S)] {
Expand Down Expand Up @@ -578,7 +578,7 @@ self =>
def psplit(sizes: Int*): Seq[SeqSplitter[S]] = self.psplit(sizes: _*).map { _ map f }
}

override def map[S](f: T => S) = new RemainsIteratorMapped(f)
override def map[S](f: T => S): SeqSplitter[S] = new RemainsIteratorMapped(f)

class RemainsIteratorAppended[U >: T, PI <: SeqSplitter[U]](it: PI) extends Appended[U, PI](it) with SeqSplitter[U] {
override def dup = super.dup.asInstanceOf[SeqSplitter[U]]
Expand Down Expand Up @@ -614,10 +614,10 @@ self =>
class RemainsIteratorZipped[S](ti: SeqSplitter[S]) extends Zipped[S](ti) with SeqSplitter[(T, S)] {
override def dup = super.dup.asInstanceOf[SeqSplitter[(T, S)]]
override def split: Seq[SeqSplitter[(T, S)]] = super.split.asInstanceOf[Seq[SeqSplitter[(T, S)]]]
def psplit(szs: Int*) = (self.psplit(szs: _*) zip that.psplit(szs: _*)) map { p => p._1 zipParSeq p._2 }
def psplit(szs: Int*) = (self.psplit(szs: _*) zip that.psplit(szs: _*)) map { p => (p._1 zipParSeq p._2) }
}

override def zipParSeq[S](that: SeqSplitter[S]) = new RemainsIteratorZipped(that)
override def zipParSeq[S](that: SeqSplitter[S]): SeqSplitter[(T, S)] = new RemainsIteratorZipped(that)

class RemainsIteratorZippedAll[U >: T, S](ti: SeqSplitter[S], thise: U, thate: S) extends ZippedAll[U, S](ti, thise, thate) with SeqSplitter[(U, S)] {
override def dup = super.dup.asInstanceOf[SeqSplitter[(U, S)]]
Expand All @@ -635,7 +635,7 @@ self =>
}
def psplit(sizes: Int*): Seq[SeqSplitter[(U, S)]] = {
val (thisit, thatit) = patchem
val zipped = thisit zipParSeq thatit
val zipped = (thisit zipParSeq thatit)
zipped.psplit(sizes: _*)
}
}
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/scala/collection/parallel/Tasks.scala
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkS
def split = body.split.map(b => newWrappedTask(b))
}

def newWrappedTask[R, Tp](b: Task[R, Tp]) = new AWSFJTWrappedTask[R, Tp](b)
def newWrappedTask[R, Tp](b: Task[R, Tp]): AWSFJTWrappedTask[R, Tp] = new AWSFJTWrappedTask[R, Tp](b)
}

/** An implementation of the `Tasks` that uses Scala `Future`s to compute
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,9 @@ self =>
phit
}
def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match {
case t: TrieIterator[_] =>
case t: TrieIterator[(K, V)] =>
val previousRemaining = remaining
val ((fst: Iterator[(K, V) @unchecked], fstlength), snd: Iterator[(K, V) @unchecked]) = t.split
val ((fst, fstlength), snd) = t.split
val sndlength = previousRemaining - fstlength
Seq(
new ParHashMapIterator(fst, fstlength),
Expand Down Expand Up @@ -140,13 +140,13 @@ self =>
* @define Coll `immutable.ParHashMap`
* @define coll immutable parallel hash map
*/
object ParHashMap extends ParMapFactory[ParHashMap] {
object ParHashMap extends ParMapFactory[ParHashMap, OldHashMap] {
def empty[K, V]: ParHashMap[K, V] = new ParHashMap[K, V]

def newCombiner[K, V]: Combiner[(K, V), ParHashMap[K, V]] = HashMapCombiner[K, V]

implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = {
new CanCombineFromMap[K, V]
implicit def canBuildFrom[FromK, FromV, K, V]: CanCombineFrom[ParHashMap[FromK, FromV], (K, V), ParHashMap[K, V]] = {
new CanCombineFromMap[FromK, FromV, K, V]
}

def fromTrie[K, V](t: OldHashMap[K, V]) = new ParHashMap(t)
Expand Down Expand Up @@ -308,7 +308,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V
case hm1: OldHashMap.OldHashMap1[_, _] =>
val evaledvalue = hm1.value.result()
new OldHashMap.OldHashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null)
case hmc: OldHashMap.OldHashMapCollision1[_, _] =>
case hmc: OldHashMap.OldHashMapCollision1[_, Combiner[_, Repr]] =>
val evaledkvs = hmc.kvs map { p => (p._1, p._2.result()) }
new OldHashMap.OldHashMapCollision1[K, Repr](hmc.hash, evaledkvs)
case htm: OldHashMap.HashTrieMap[k, v] =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ self =>
extends IterableSplitter[T] {
var i = 0
def dup = triter match {
case t: TrieIterator[_] =>
case t: TrieIterator[T] =>
dupFromIterator(t.dupIterator)
case _ =>
val buff = triter.toBuffer
Expand All @@ -91,7 +91,7 @@ self =>
phit
}
def split: Seq[IterableSplitter[T]] = if (remaining < 2) Seq(this) else triter match {
case t: TrieIterator[_] =>
case t: TrieIterator[T] =>
val previousRemaining = remaining
val ((fst, fstlength), snd) = t.split
val sndlength = previousRemaining - fstlength
Expand Down Expand Up @@ -125,8 +125,8 @@ self =>
object ParHashSet extends ParSetFactory[ParHashSet] {
def newCombiner[T]: Combiner[T, ParHashSet[T]] = HashSetCombiner[T]

implicit def canBuildFrom[T]: CanCombineFrom[ParHashSet[_], T, ParHashSet[T]] =
new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParHashSet[S], T, ParHashSet[T]] =
new GenericCanCombineFrom[S, T]

def fromTrie[T](t: OldHashSet[T]) = new ParHashSet(t)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ extends scala.collection.parallel.ParIterable[T]
/** $factoryInfo
*/
object ParIterable extends ParFactory[ParIterable] {
implicit def canBuildFrom[T]: CanCombineFrom[ParIterable[_], T, ParIterable[T]] =
new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParIterable[S], T, ParIterable[T]] =
new GenericCanCombineFrom[S, T]

def newBuilder[T]: Combiner[T, ParIterable[T]] = ParVector.newBuilder[T]
def newCombiner[T]: Combiner[T, ParIterable[T]] = ParVector.newCombiner[T]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,12 @@ trait ParMapLike[



object ParMap extends ParMapFactory[ParMap] {
object ParMap extends ParMapFactory[ParMap, scala.collection.immutable.Map] {
def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V]

def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = HashMapCombiner[K, V]

implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
implicit def canBuildFrom[FromK, FromV, K, V]: CanCombineFrom[ParMap[FromK, FromV], (K, V), ParMap[K, V]] = new CanCombineFromMap[FromK, FromV, K, V]

class WithDefault[K, +V](underlying: ParMap[K, V], d: K => V)
extends scala.collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ extends scala.collection.parallel.ParSeq[T]
* @define coll mutable parallel sequence
*/
object ParSeq extends ParFactory[ParSeq] {
implicit def canBuildFrom[T]: CanCombineFrom[ParSeq[_], T, ParSeq[T]] = new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParSeq[S], T, ParSeq[T]] = new GenericCanCombineFrom[S, T]

def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T]
def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,5 +47,5 @@ self =>
object ParSet extends ParSetFactory[ParSet] {
def newCombiner[T]: Combiner[T, ParSet[T]] = HashSetCombiner[T]

implicit def canBuildFrom[T]: CanCombineFrom[ParSet[_], T, ParSet[T]] = new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParSet[S], T, ParSet[T]] = new GenericCanCombineFrom[S, T]
}
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ extends ParSeq[T]
* @define coll immutable parallel vector
*/
object ParVector extends ParFactory[ParVector] {
implicit def canBuildFrom[T]: CanCombineFrom[ParVector[_], T, ParVector[T]] =
new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParVector[S], T, ParVector[T]] =
new GenericCanCombineFrom[S, T]

def newBuilder[T]: Combiner[T, ParVector[T]] = newCombiner[T]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -689,7 +689,7 @@ self =>
* @define coll parallel array
*/
object ParArray extends ParFactory[ParArray] {
implicit def canBuildFrom[T]: CanCombineFrom[ParArray[_], T, ParArray[T]] = new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParArray[S], T, ParArray[T]] = new GenericCanCombineFrom[S, T]
def newBuilder[T]: Combiner[T, ParArray[T]] = newCombiner
def newCombiner[T]: Combiner[T, ParArray[T]] = ParArrayCombiner[T]()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,14 +150,14 @@ self =>
* @define Coll `mutable.ParHashMap`
* @define coll parallel hash map
*/
object ParHashMap extends ParMapFactory[ParHashMap] {
object ParHashMap extends ParMapFactory[ParHashMap, scala.collection.mutable.HashMap] {
var iters = 0

def empty[K, V]: ParHashMap[K, V] = new ParHashMap[K, V]

def newCombiner[K, V]: Combiner[(K, V), ParHashMap[K, V]] = ParHashMapCombiner.apply[K, V]

implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = new CanCombineFromMap[K, V]
implicit def canBuildFrom[FromK, FromV, K, V]: CanCombineFrom[ParHashMap[FromK, FromV], (K, V), ParHashMap[K, V]] = new CanCombineFromMap[FromK, FromV, K, V]

final class DefaultEntry[K, V](val key: K, var value: V) extends HashEntry[K, DefaultEntry[K, V]] with Serializable {
override def toString: String = s"DefaultEntry($key -> $value)"
Expand Down Expand Up @@ -196,7 +196,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
} else {
// construct a normal table and fill it sequentially
// TODO parallelize by keeping separate sizemaps and merging them
object table extends HashTable[K, DefaultEntry[K, V], DefaultEntry[K, V]] with WithContents[K, DefaultEntry[K, V], DefaultEntry[K, V]] {
object newTable extends HashTable[K, DefaultEntry[K, V], DefaultEntry[K, V]] with WithContents[K, DefaultEntry[K, V], DefaultEntry[K, V]] {
type Entry = DefaultEntry[K, V]
def insertEntry(e: Entry): Unit = { super.findOrAddEntry(e.key, e) }
def createNewEntry(key: K, entry: Entry): Entry = entry
Expand All @@ -205,11 +205,11 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
var i = 0
while (i < ParHashMapCombiner.numblocks) {
if (buckets(i) ne null) {
for (elem <- buckets(i)) table.insertEntry(elem)
for (elem <- buckets(i)) newTable.insertEntry(elem)
}
i += 1
}
new ParHashMap(table.hashTableContents)
new ParHashMap(newTable.hashTableContents)
}

/* classes */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ extends ParSet[T]
* @define coll parallel hash set
*/
object ParHashSet extends ParSetFactory[ParHashSet] {
implicit def canBuildFrom[T]: CanCombineFrom[ParHashSet[_], T, ParHashSet[T]] = new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParHashSet[S], T, ParHashSet[T]] = new GenericCanCombineFrom[S, T]

override def newBuilder[T]: Combiner[T, ParHashSet[T]] = newCombiner

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ trait ParIterable[T] extends scala.collection.parallel.ParIterable[T]
/** $factoryInfo
*/
object ParIterable extends ParFactory[ParIterable] {
implicit def canBuildFrom[T]: CanCombineFrom[ParIterable[_], T, ParIterable[T]] = new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParIterable[S], T, ParIterable[T]] = new GenericCanCombineFrom[S, T]

def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]()
def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,12 @@ extends parallel.ParMap[K, V]
def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d)
}

object ParMap extends ParMapFactory[ParMap] {
object ParMap extends ParMapFactory[ParMap, scala.collection.mutable.Map] {
def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V]

def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = ParHashMapCombiner.apply[K, V]

implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
implicit def canBuildFrom[FromK, FromV, K, V]: CanCombineFrom[ParMap[FromK, FromV], (K, V), ParMap[K, V]] = new CanCombineFromMap[FromK, FromV, K, V]

class WithDefault[K, V](underlying: ParMap[K, V], d: K => V)
extends scala.collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ self =>
* @define coll mutable parallel sequence
*/
object ParSeq extends ParFactory[ParSeq] {
implicit def canBuildFrom[T]: CanCombineFrom[ParSeq[_], T, ParSeq[T]] = new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParSeq[S], T, ParSeq[T]] = new GenericCanCombineFrom[S, T]

def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ self =>
* @define coll mutable parallel set
*/
object ParSet extends ParSetFactory[ParSet] {
implicit def canBuildFrom[T]: CanCombineFrom[ParSet[_], T, ParSet[T]] = new GenericCanCombineFrom[T]
implicit def canBuildFrom[S, T]: CanCombineFrom[ParSet[S], T, ParSet[T]] = new GenericCanCombineFrom[S, T]

override def newBuilder[T]: Combiner[T, ParSet[T]] = ParHashSet.newBuilder

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ extends TrieMapIterator[K, V](lev, ct, mustInit)
lazy val totalsize = new ParTrieMap(ct).size
var iterated = 0

protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean) = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit)
protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean): ParTrieMapSplitter[K, V] = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit)

override def shouldSplitFurther[S](coll: scala.collection.parallel.ParIterable[S], parallelismLevel: Int) = {
val maxsplits = 3 + Integer.highestOneBit(parallelismLevel)
Expand Down Expand Up @@ -163,9 +163,9 @@ private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrie
override def canBeShared = true
}

object ParTrieMap extends ParMapFactory[ParTrieMap] {
object ParTrieMap extends ParMapFactory[ParTrieMap, TrieMap] {
def empty[K, V]: ParTrieMap[K, V] = new ParTrieMap[K, V]
def newCombiner[K, V]: Combiner[(K, V), ParTrieMap[K, V]] = new ParTrieMap[K, V]

implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParTrieMap[K, V]] = new CanCombineFromMap[K, V]
implicit def canBuildFrom[FromK, FromV, K, V]: CanCombineFrom[ParTrieMap[FromK, FromV], (K, V), ParTrieMap[K, V]] = new CanCombineFromMap[FromK, FromV, K, V]
}