@@ -126,12 +126,12 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], AnyRef, ParHa
126
126
with scala.collection.mutable.FlatHashTable .HashUtils [T ] {
127
127
// self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
128
128
private val nonmasklen = ParHashSetCombiner .nonmasklength
129
- private val seedvalue = 27
129
+ private val seed = 27
130
130
131
131
def addOne (elem : T ) = {
132
132
val entry = elemToEntry(elem)
133
133
sz += 1
134
- val hc = improve(entry.hashCode, seedvalue )
134
+ val hc = improve(entry.hashCode, seed )
135
135
val pos = hc >>> nonmasklen
136
136
if (buckets(pos) eq null ) {
137
137
// initialize bucket
@@ -149,7 +149,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
149
149
150
150
private def parPopulate : FlatHashTable .Contents [T ] = {
151
151
// construct it in parallel
152
- val table = new AddingFlatHashTable (size, tableLoadFactor, seedvalue )
152
+ val table = new AddingFlatHashTable (size, tableLoadFactor, seed )
153
153
val (inserted, leftovers) = combinerTaskSupport.executeAndWaitResult(new FillBlocks (buckets, table, 0 , buckets.length))
154
154
var leftinserts = 0
155
155
for (entry <- leftovers) leftinserts += table.insertEntry(0 , table.tableLength, entry)
@@ -162,7 +162,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
162
162
// TODO parallelize by keeping separate size maps and merging them
163
163
val tbl = new FlatHashTable [T ] {
164
164
sizeMapInit(table.length)
165
- seedvalue = ParHashSetCombiner . this .seedvalue
165
+ seedvalue = seed
166
166
for {
167
167
buffer <- buckets
168
168
if buffer ne null
0 commit comments