From ddc849b9ecc71f0e55cc7e02b8da15bad4c1d3e0 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 11:36:52 +0200 Subject: [PATCH 01/33] Rename HashTable -> IdentityHashMap Also, give it a more standard map interface. Also, fix removals in small tables --- .../tools/dotc/core/SymDenotations.scala | 18 +++++----- ...{HashTable.scala => IdentityHashMap.scala} | 36 +++++++++---------- compiler/src/dotty/tools/dotc/util/Map.scala | 19 ++++++++++ 3 files changed, 46 insertions(+), 27 deletions(-) rename compiler/src/dotty/tools/dotc/util/{HashTable.scala => IdentityHashMap.scala} (86%) create mode 100644 compiler/src/dotty/tools/dotc/util/Map.scala diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index fc014ec6684f..1904546f06a1 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1564,14 +1564,14 @@ object SymDenotations { initPrivateWithin: Symbol) extends SymDenotation(symbol, maybeOwner, name, initFlags, initInfo, initPrivateWithin) { - import util.HashTable + import util.IdentityHashMap // ----- caches ------------------------------------------------------- private var myTypeParams: List[TypeSymbol] = null private var fullNameCache: SimpleIdentityMap[QualifiedNameKind, Name] = SimpleIdentityMap.Empty - private var myMemberCache: HashTable[Name, PreDenotation] = null + private var myMemberCache: IdentityHashMap[Name, PreDenotation] = null private var myMemberCachePeriod: Period = Nowhere /** A cache from types T to baseType(T, C) */ @@ -1582,9 +1582,9 @@ object SymDenotations { private var baseDataCache: BaseData = BaseData.None private var memberNamesCache: MemberNames = MemberNames.None - private def memberCache(using Context): HashTable[Name, PreDenotation] = { + private def memberCache(using Context): IdentityHashMap[Name, PreDenotation] = { if (myMemberCachePeriod != ctx.period) { - myMemberCache = HashTable() + myMemberCache = IdentityHashMap() myMemberCachePeriod = ctx.period } myMemberCache @@ -1613,12 +1613,12 @@ object SymDenotations { } def invalidateMemberCaches(sym: Symbol)(using Context): Unit = - if myMemberCache != null then myMemberCache.invalidate(sym.name) + if myMemberCache != null then myMemberCache.remove(sym.name) if !sym.flagsUNSAFE.is(Private) then invalidateMemberNamesCache() if sym.isWrappedToplevelDef then val outerCache = sym.owner.owner.asClass.classDenot.myMemberCache - if outerCache != null then outerCache.invalidate(sym.name) + if outerCache != null then outerCache.remove(sym.name) override def copyCaches(from: SymDenotation, phase: Phase)(using Context): this.type = { from match { @@ -1823,7 +1823,7 @@ object SymDenotations { */ def replace(prev: Symbol, replacement: Symbol)(using Context): Unit = { unforcedDecls.openForMutations.replace(prev, replacement) - if (myMemberCache != null) myMemberCache.invalidate(replacement.name) + if (myMemberCache != null) myMemberCache.remove(replacement.name) } /** Delete symbol from current scope. @@ -1832,7 +1832,7 @@ object SymDenotations { */ def delete(sym: Symbol)(using Context): Unit = { info.decls.openForMutations.unlink(sym) - if (myMemberCache != null) myMemberCache.invalidate(sym.name) + if (myMemberCache != null) myMemberCache.remove(sym.name) if (!sym.flagsUNSAFE.is(Private)) invalidateMemberNamesCache() } @@ -1861,7 +1861,7 @@ object SymDenotations { var denots: PreDenotation = memberCache.lookup(name) if denots == null then denots = computeMembersNamed(name) - memberCache.enter(name, denots) + memberCache(name) = denots else if Config.checkCacheMembersNamed then val denots1 = computeMembersNamed(name) assert(denots.exists == denots1.exists, s"cache inconsistency: cached: $denots, computed $denots1, name = $name, owner = $this") diff --git a/compiler/src/dotty/tools/dotc/util/HashTable.scala b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala similarity index 86% rename from compiler/src/dotty/tools/dotc/util/HashTable.scala rename to compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala index d91d6c5d1a9f..064f9a3509f0 100644 --- a/compiler/src/dotty/tools/dotc/util/HashTable.scala +++ b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.util -object HashTable: +object IdentityHashMap: /** The number of elements up to which dense packing is used. * If the number of elements reaches `DenseLimit` a hash table is used instead */ @@ -19,9 +19,9 @@ object HashTable: * However, a table of size up to DenseLimit will be re-sized only * once the number of elements reaches the table's size. */ -class HashTable[Key >: Null <: AnyRef, Value >: Null <: AnyRef] - (initialCapacity: Int = 8, capacityMultiple: Int = 3): - import HashTable.DenseLimit +class IdentityHashMap[Key >: Null <: AnyRef, Value >: Null <: AnyRef] + (initialCapacity: Int = 8, capacityMultiple: Int = 3) extends Map[Key, Value]: + import IdentityHashMap.DenseLimit private var used: Int = _ private var limit: Int = _ @@ -33,13 +33,14 @@ class HashTable[Key >: Null <: AnyRef, Value >: Null <: AnyRef] limit = if capacity <= DenseLimit then capacity - 1 else capacity / capacityMultiple private def roundToPower(n: Int) = - if Integer.bitCount(n) == 1 then n + if n < 4 then 4 + else if Integer.bitCount(n) == 1 then n else 1 << (32 - Integer.numberOfLeadingZeros(n)) /** Remove all elements from this table and set back to initial configuration */ def clear(): Unit = used = 0 - allocate(roundToPower(initialCapacity max 4)) + allocate(roundToPower(initialCapacity)) /** The number of elements in the set */ def size: Int = used @@ -73,7 +74,7 @@ class HashTable[Key >: Null <: AnyRef, Value >: Null <: AnyRef] k = keyAt(idx) null - def enter(key: Key, value: Value): Unit = + def update(key: Key, value: Value): Unit = var idx = firstIndex(key) var k = keyAt(idx) while k != null do @@ -87,21 +88,20 @@ class HashTable[Key >: Null <: AnyRef, Value >: Null <: AnyRef] used += 1 if used > limit then growTable() - def invalidate(key: Key): Unit = + def remove(key: Key): Unit = var idx = firstIndex(key) var k = keyAt(idx) while k != null do if isEqual(k, key) then var hole = idx - if !isDense then - while - idx = nextIndex(idx) - k = keyAt(idx) - k != null && index(hash(k)) != idx - do - table(hole) = k - table(hole + 1) = valueAt(idx) - hole = idx + while + idx = nextIndex(idx) + k = keyAt(idx) + k != null && (isDense || index(hash(k)) != idx) + do + table(hole) = k + table(hole + 1) = valueAt(idx) + hole = idx table(hole) = null used -= 1 return @@ -139,4 +139,4 @@ class HashTable[Key >: Null <: AnyRef, Value >: Null <: AnyRef] override def toString: String = iterator.map((k, v) => s"$k -> $v").mkString("HashTable(", ", ", ")") -end HashTable +end IdentityHashMap diff --git a/compiler/src/dotty/tools/dotc/util/Map.scala b/compiler/src/dotty/tools/dotc/util/Map.scala new file mode 100644 index 000000000000..b4dcd4db2ae0 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/Map.scala @@ -0,0 +1,19 @@ +package dotty.tools.dotc.util + +/** A common class for lightweight mutable maps. + */ +abstract class Map[Key >: Null <: AnyRef, Value >: Null <: AnyRef]: + + def lookup(x: Key): Value /* | Null */ + + def update(k: Key, v: Value): Unit + + def remove(k: Key): Unit + + def size: Int + + def clear(): Unit + + def iterator: Iterator[(Key, Value)] + + def get(x: Key): Option[Value] = Option(lookup(x)) From cf671f88eb36083aacede170325509defd137734 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 13:26:31 +0200 Subject: [PATCH 02/33] Generalize HashMap Breakout specialized version for `eq` and `equals`. The duplicate some methods for efficiency. --- .../tools/dotc/util/GenericHashMap.scala | 146 ++++++++++++++++++ .../src/dotty/tools/dotc/util/HashMap.scala | 54 +++++++ .../tools/dotc/util/IdentityHashMap.scala | 112 ++------------ 3 files changed, 212 insertions(+), 100 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/util/GenericHashMap.scala create mode 100644 compiler/src/dotty/tools/dotc/util/HashMap.scala diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala new file mode 100644 index 000000000000..6eada4cacb0a --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -0,0 +1,146 @@ +package dotty.tools.dotc.util + +object GenericHashMap: + + /** The number of elements up to which dense packing is used. + * If the number of elements reaches `DenseLimit` a hash table is used instead + */ + inline val DenseLimit = 8 + +/** A hash table using open hashing with linear scan which is also very space efficient + * at small sizes. The implementations of `hash` and `isEqual` are left open. They have + * to be provided by subclasses. + * + * @param initialCapacity Indicates the initial number of slots in the hash table. + * The actual number of slots is always a power of 2, so the + * initial size of the table will be the smallest power of two + * that is equal or greater than the given `initialCapacity`. + * Minimum value is 4. + * @param capacityMultiple The minimum multiple of capacity relative to used elements. + * The hash table will be re-sized once the number of elements + * multiplied by capacityMultiple exceeds the current size of the hash table. + * However, a table of size up to DenseLimit will be re-sized only + * once the number of elements reaches the table's size. + */ +abstract class GenericHashMap[Key >: Null <: AnyRef, Value >: Null <: AnyRef] + (protected val initialCapacity: Int = 8, + protected val capacityMultiple: Int = 3) extends Map[Key, Value]: + import GenericHashMap.DenseLimit + + protected var used: Int = _ + protected var limit: Int = _ + protected var table: Array[AnyRef] = _ + clear() + + private def allocate(capacity: Int) = + table = new Array[AnyRef](capacity * 2) + limit = if capacity <= DenseLimit then capacity - 1 else capacity / capacityMultiple + + private def roundToPower(n: Int) = + if n < 4 then 4 + else if Integer.bitCount(n) == 1 then n + else 1 << (32 - Integer.numberOfLeadingZeros(n)) + + /** Remove all elements from this table and set back to initial configuration */ + def clear(): Unit = + used = 0 + allocate(roundToPower(initialCapacity)) + + /** The number of elements in the set */ + def size: Int = used + + protected def isDense = limit < DenseLimit + + /** Hashcode, to be implemented in subclass */ + protected def hash(x: Key): Int + + /** Equality, to be implemented in subclass */ + protected def isEqual(x: Key, y: Key): Boolean + + /** Turn hashcode `x` into a table index */ + private def index(x: Int): Int = x & (table.length - 2) + + private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) + private def nextIndex(idx: Int) = index(idx + 2) + + private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] + private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] + + /** Find entry such that `isEqual(x, entry)`. If it exists, return it. + * If not, enter `x` in set and return `x`. + */ + def lookup(key: Key): Value = + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then return valueAt(idx) + idx = nextIndex(idx) + k = keyAt(idx) + null + + def update(key: Key, value: Value): Unit = + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then + table(idx + 1) = value + return + idx = nextIndex(idx) + k = keyAt(idx) + table(idx) = key + table(idx + 1) = value + used += 1 + if used > limit then growTable() + + def remove(key: Key): Unit = + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then + var hole = idx + while + idx = nextIndex(idx) + k = keyAt(idx) + k != null && (isDense || index(hash(k)) != idx) + do + table(hole) = k + table(hole + 1) = valueAt(idx) + hole = idx + table(hole) = null + used -= 1 + return + idx = nextIndex(idx) + k = keyAt(idx) + + private def addOld(key: Key, value: AnyRef): Unit = + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + idx = nextIndex(idx) + k = keyAt(idx) + table(idx) = key + table(idx + 1) = value + + protected def growTable(): Unit = + val oldTable = table + val newLength = + if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) + else table.length + allocate(newLength) + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val key = oldTable(idx).asInstanceOf[Key] + if key != null then addOld(key, oldTable(idx + 1)) + idx += 2 + + def iterator: Iterator[(Key, Value)] = + for idx <- (0 until table.length by 2).iterator + if keyAt(idx) != null + yield (keyAt(idx), valueAt(idx)) + + override def toString: String = + iterator.map((k, v) => s"$k -> $v").mkString("LinearTable(", ", ", ")") +end GenericHashMap diff --git a/compiler/src/dotty/tools/dotc/util/HashMap.scala b/compiler/src/dotty/tools/dotc/util/HashMap.scala new file mode 100644 index 000000000000..0457ccd6c6e1 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/HashMap.scala @@ -0,0 +1,54 @@ +package dotty.tools.dotc.util + +/** A specialized implementation of GenericHashMap with standard hashCode and equals + * as comparison + */ +class HashMap[Key >: Null <: AnyRef, Value >: Null <: AnyRef] + (initialCapacity: Int = 8, capacityMultiple: Int = 3) +extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): + import GenericHashMap.DenseLimit + + /** Hashcode, by default `System.identityHashCode`, but can be overriden */ + final def hash(x: Key): Int = x.hashCode + + /** Equality, by default `eq`, but can be overridden */ + final def isEqual(x: Key, y: Key): Boolean = x.equals(y) + + // The following methdods are duplicated from GenericHashMap + // to avoid polymorphic dispatches + + /** Turn hashcode `x` into a table index */ + private def index(x: Int): Int = x & (table.length - 2) + + private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) + private def nextIndex(idx: Int) = index(idx + 2) + + private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] + private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] + + /** Find entry such that `isEqual(x, entry)`. If it exists, return it. + * If not, enter `x` in set and return `x`. + */ + override def lookup(key: Key): Value = + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then return valueAt(idx) + idx = nextIndex(idx) + k = keyAt(idx) + null + + override def update(key: Key, value: Value): Unit = + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then + table(idx + 1) = value + return + idx = nextIndex(idx) + k = keyAt(idx) + table(idx) = key + table(idx + 1) = value + used += 1 + if used > limit then growTable() +end HashMap diff --git a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala index 064f9a3509f0..bb4f3c886258 100644 --- a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala @@ -1,57 +1,21 @@ package dotty.tools.dotc.util -object IdentityHashMap: - /** The number of elements up to which dense packing is used. - * If the number of elements reaches `DenseLimit` a hash table is used instead - */ - inline val DenseLimit = 8 - -/** A hash table using open hashing with linear scan which is also very space efficient - * at small sizes. - * @param initialCapacity Indicates the initial number of slots in the hash table. - * The actual number of slots is always a power of 2, so the - * initial size of the table will be the smallest power of two - * that is equal or greater than the given `initialCapacity`. - * Minimum value is 4. - * @param capacityMultiple The minimum multiple of capacity relative to used elements. - * The hash table will be re-sized once the number of elements - * multiplied by capacityMultiple exceeds the current size of the hash table. - * However, a table of size up to DenseLimit will be re-sized only - * once the number of elements reaches the table's size. +/** A specialized implementation of GenericHashMap with identity hash and `eq` + * as comparison. */ class IdentityHashMap[Key >: Null <: AnyRef, Value >: Null <: AnyRef] - (initialCapacity: Int = 8, capacityMultiple: Int = 3) extends Map[Key, Value]: - import IdentityHashMap.DenseLimit - - private var used: Int = _ - private var limit: Int = _ - private var table: Array[AnyRef] = _ - clear() - - private def allocate(capacity: Int) = - table = new Array[AnyRef](capacity * 2) - limit = if capacity <= DenseLimit then capacity - 1 else capacity / capacityMultiple - - private def roundToPower(n: Int) = - if n < 4 then 4 - else if Integer.bitCount(n) == 1 then n - else 1 << (32 - Integer.numberOfLeadingZeros(n)) - - /** Remove all elements from this table and set back to initial configuration */ - def clear(): Unit = - used = 0 - allocate(roundToPower(initialCapacity)) - - /** The number of elements in the set */ - def size: Int = used - - private def isDense = limit < DenseLimit + (initialCapacity: Int = 8, capacityMultiple: Int = 3) +extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): + import GenericHashMap.DenseLimit /** Hashcode, by default `System.identityHashCode`, but can be overriden */ - protected def hash(x: Key): Int = System.identityHashCode(x) + final def hash(x: Key): Int = System.identityHashCode(x) /** Equality, by default `eq`, but can be overridden */ - protected def isEqual(x: Key, y: Key): Boolean = x eq y + final def isEqual(x: Key, y: Key): Boolean = x eq y + + // The following methdods are duplicated from GenericHashMap + // to avoid polymorphic dispatches /** Turn hashcode `x` into a table index */ private def index(x: Int): Int = x & (table.length - 2) @@ -65,7 +29,7 @@ class IdentityHashMap[Key >: Null <: AnyRef, Value >: Null <: AnyRef] /** Find entry such that `isEqual(x, entry)`. If it exists, return it. * If not, enter `x` in set and return `x`. */ - def lookup(key: Key): Value = + override def lookup(key: Key): Value = var idx = firstIndex(key) var k = keyAt(idx) while k != null do @@ -74,7 +38,7 @@ class IdentityHashMap[Key >: Null <: AnyRef, Value >: Null <: AnyRef] k = keyAt(idx) null - def update(key: Key, value: Value): Unit = + override def update(key: Key, value: Value): Unit = var idx = firstIndex(key) var k = keyAt(idx) while k != null do @@ -87,56 +51,4 @@ class IdentityHashMap[Key >: Null <: AnyRef, Value >: Null <: AnyRef] table(idx + 1) = value used += 1 if used > limit then growTable() - - def remove(key: Key): Unit = - var idx = firstIndex(key) - var k = keyAt(idx) - while k != null do - if isEqual(k, key) then - var hole = idx - while - idx = nextIndex(idx) - k = keyAt(idx) - k != null && (isDense || index(hash(k)) != idx) - do - table(hole) = k - table(hole + 1) = valueAt(idx) - hole = idx - table(hole) = null - used -= 1 - return - idx = nextIndex(idx) - k = keyAt(idx) - - private def addOld(key: Key, value: AnyRef): Unit = - var idx = firstIndex(key) - var k = keyAt(idx) - while k != null do - idx = nextIndex(idx) - k = keyAt(idx) - table(idx) = key - table(idx + 1) = value - - private def growTable(): Unit = - val oldTable = table - val newLength = - if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) - else table.length - allocate(newLength) - if isDense then - Array.copy(oldTable, 0, table, 0, oldTable.length) - else - var idx = 0 - while idx < oldTable.length do - val key = oldTable(idx).asInstanceOf[Key] - if key != null then addOld(key, oldTable(idx + 1)) - idx += 2 - - def iterator: Iterator[(Key, Value)] = - for idx <- (0 until table.length by 2).iterator - if keyAt(idx) != null - yield (keyAt(idx), valueAt(idx)) - - override def toString: String = - iterator.map((k, v) => s"$k -> $v").mkString("HashTable(", ", ", ")") end IdentityHashMap From c48898f9a6c3dad22baf4e2e77a4cbbb9457cacb Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 13:30:59 +0200 Subject: [PATCH 03/33] Remove wrong comment --- compiler/src/dotty/tools/dotc/util/GenericHashMap.scala | 3 --- compiler/src/dotty/tools/dotc/util/HashMap.scala | 3 --- compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala | 3 --- 3 files changed, 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index 6eada4cacb0a..81fb490b227a 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -66,9 +66,6 @@ abstract class GenericHashMap[Key >: Null <: AnyRef, Value >: Null <: AnyRef] private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] - /** Find entry such that `isEqual(x, entry)`. If it exists, return it. - * If not, enter `x` in set and return `x`. - */ def lookup(key: Key): Value = var idx = firstIndex(key) var k = keyAt(idx) diff --git a/compiler/src/dotty/tools/dotc/util/HashMap.scala b/compiler/src/dotty/tools/dotc/util/HashMap.scala index 0457ccd6c6e1..7cb1d539c69f 100644 --- a/compiler/src/dotty/tools/dotc/util/HashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/HashMap.scala @@ -26,9 +26,6 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] - /** Find entry such that `isEqual(x, entry)`. If it exists, return it. - * If not, enter `x` in set and return `x`. - */ override def lookup(key: Key): Value = var idx = firstIndex(key) var k = keyAt(idx) diff --git a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala index bb4f3c886258..25a4c02fd1dc 100644 --- a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala @@ -26,9 +26,6 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] - /** Find entry such that `isEqual(x, entry)`. If it exists, return it. - * If not, enter `x` in set and return `x`. - */ override def lookup(key: Key): Value = var idx = firstIndex(key) var k = keyAt(idx) From 0e534d8cfccf46efe377e52462cdb813d8f37d93 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 13:33:40 +0200 Subject: [PATCH 04/33] Break out dual maps in Names#derived into its own LinearMap data type. The reason for using a linear map instead of a mutable.Map here is that most derived instances are very small. --- .../src/dotty/tools/dotc/core/Names.scala | 40 ++++------------- .../src/dotty/tools/dotc/util/LinearMap.scala | 43 +++++++++++++++++++ 2 files changed, 52 insertions(+), 31 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/util/LinearMap.scala diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index 61196f245dc8..f9e21c3525da 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -10,7 +10,7 @@ import StdNames.str import scala.internal.Chars.isIdentifierStart import collection.immutable import config.Config -import java.util.HashMap +import util.LinearMap import scala.annotation.internal.sharable @@ -182,38 +182,16 @@ object Names { def underlying: TermName = unsupported("underlying") @sharable // because of synchronized block in `and` - private var derivedNames: immutable.Map[NameInfo, DerivedName] | HashMap[NameInfo, DerivedName] = - immutable.Map.empty[NameInfo, DerivedName] - - private def getDerived(info: NameInfo): DerivedName /* | Null */ = (derivedNames: @unchecked) match { - case derivedNames: immutable.AbstractMap[NameInfo, DerivedName] @unchecked => - if (derivedNames.contains(info)) derivedNames(info) else null - case derivedNames: HashMap[NameInfo, DerivedName] @unchecked => - derivedNames.get(info) - } - - private def putDerived(info: NameInfo, name: DerivedName): name.type = { - derivedNames match { - case derivedNames: immutable.Map[NameInfo, DerivedName] @unchecked => - if (derivedNames.size < 4) - this.derivedNames = derivedNames.updated(info, name) - else { - val newMap = new HashMap[NameInfo, DerivedName] - derivedNames.foreach { case (k, v) => newMap.put(k, v) } - newMap.put(info, name) - this.derivedNames = newMap - } - case derivedNames: HashMap[NameInfo, DerivedName] @unchecked => - derivedNames.put(info, name) - } - name - } + private var derivedNames: LinearMap[NameInfo, DerivedName] = LinearMap.Empty private def add(info: NameInfo): TermName = synchronized { - getDerived(info) match { - case null => putDerived(info, new DerivedName(this, info)) - case derivedName => derivedName - } + derivedNames(info) match + case null => + val derivedName = new DerivedName(this, info) + derivedNames = derivedNames.updated(info, derivedName) + derivedName + case derivedName => + derivedName } private def rewrap(underlying: TermName) = diff --git a/compiler/src/dotty/tools/dotc/util/LinearMap.scala b/compiler/src/dotty/tools/dotc/util/LinearMap.scala new file mode 100644 index 000000000000..77c6b3f940c1 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/LinearMap.scala @@ -0,0 +1,43 @@ +package dotty.tools.dotc.util + +import collection.immutable + +/** A linear map is a map where after an `updated` the previous map + * value cannot be used anymore. The map is implemented as an immutable + * map for sizes <= 4 (where immutable maps have specialized, compact + * representations) and as a HashMap for larger sizes. + */ +opaque type LinearMap[K <: AnyRef, V >: Null <: AnyRef] = + immutable.Map[K, V] | HashMap[K, V] + +object LinearMap: + + def Empty[K <: AnyRef, V >: Null <: AnyRef]: LinearMap[K, V] = + immutable.Map.empty[K, V] + + extension [K <: AnyRef, V >: Null <: AnyRef](m: LinearMap[K, V]): + + def apply(key: K): V /*| Null*/ = m match + case m: immutable.Map[K, V] @unchecked => + if m.contains(key) then m(key) else null + case m: HashMap[K, V] @unchecked => + m.get(key) + + def updated(key: K, value: V): LinearMap[K, V] = m match + case m: immutable.Map[K, V] @unchecked => + if m.size < 4 then + m.updated(key, value) + else + val m1 = HashMap[K, V]() + m.foreach(m1.put(_, _)) + m1.put(key, value) + m1 + case m: HashMap[K, V] @unchecked => + m.put(key, value) + m + + def size = m match + case m: immutable.Map[K, V] @unchecked => m.size + case m: HashMap[K, V] @unchecked => m.size + +end LinearMap \ No newline at end of file From 228aee7453029681a928057bdb3cf6afe40f7764 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 13:34:53 +0200 Subject: [PATCH 05/33] Rename Empty -> empty in SimpleIdentityMap It is already `empty` in SimpleIdentitySet. That way we use the same convention as in the stdlib. # Conflicts: # compiler/src/dotty/tools/dotc/util/LinearIdentityMap.scala --- compiler/src/dotty/tools/dotc/core/Contexts.scala | 2 +- compiler/src/dotty/tools/dotc/core/Definitions.scala | 2 +- compiler/src/dotty/tools/dotc/core/GadtConstraint.scala | 6 +++--- compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala | 2 +- compiler/src/dotty/tools/dotc/core/SymDenotations.scala | 6 +++--- compiler/src/dotty/tools/dotc/typer/ImportInfo.scala | 6 +++--- compiler/src/dotty/tools/dotc/typer/Inferencing.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Inliner.scala | 2 +- compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala | 6 +++--- 11 files changed, 19 insertions(+), 19 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 28cfe0d0a4f7..d94bdddef6d0 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -289,7 +289,7 @@ object Contexts { private def lookup(key: Phase | SourceFile): Context = util.Stats.record("Context.related.lookup") if related == null then - related = SimpleIdentityMap.Empty + related = SimpleIdentityMap.empty null else related(key) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 9e4608fda3e0..0bbd62123195 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1603,7 +1603,7 @@ class Definitions { valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0 @tu lazy val specialErasure: SimpleIdentityMap[Symbol, ClassSymbol] = - SimpleIdentityMap.Empty[Symbol] + SimpleIdentityMap.empty[Symbol] .updated(AnyClass, ObjectClass) .updated(AnyValClass, ObjectClass) .updated(SingletonClass, ObjectClass) diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index 1596b5fcdd7f..515163764c97 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -65,9 +65,9 @@ final class ProperGadtConstraint private( import dotty.tools.dotc.config.Printers.{gadts, gadtsConstr} def this() = this( - myConstraint = new OrderingConstraint(SimpleIdentityMap.Empty, SimpleIdentityMap.Empty, SimpleIdentityMap.Empty), - mapping = SimpleIdentityMap.Empty, - reverseMapping = SimpleIdentityMap.Empty + myConstraint = new OrderingConstraint(SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty), + mapping = SimpleIdentityMap.empty, + reverseMapping = SimpleIdentityMap.empty ) /** Exposes ConstraintHandling.subsumes */ diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index af55456df539..c42344c1fce2 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -105,7 +105,7 @@ object OrderingConstraint { } @sharable - val empty = new OrderingConstraint(SimpleIdentityMap.Empty, SimpleIdentityMap.Empty, SimpleIdentityMap.Empty) + val empty = new OrderingConstraint(SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty) } import OrderingConstraint._ diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 1904546f06a1..9bc36cfc5d1e 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1569,7 +1569,7 @@ object SymDenotations { // ----- caches ------------------------------------------------------- private var myTypeParams: List[TypeSymbol] = null - private var fullNameCache: SimpleIdentityMap[QualifiedNameKind, Name] = SimpleIdentityMap.Empty + private var fullNameCache: SimpleIdentityMap[QualifiedNameKind, Name] = SimpleIdentityMap.empty private var myMemberCache: IdentityHashMap[Name, PreDenotation] = null private var myMemberCachePeriod: Period = Nowhere @@ -2609,7 +2609,7 @@ object SymDenotations { } private class MemberNamesImpl(createdAt: Period) extends InheritedCacheImpl(createdAt) with MemberNames { - private var cache: SimpleIdentityMap[NameFilter, Set[Name]] = SimpleIdentityMap.Empty + private var cache: SimpleIdentityMap[NameFilter, Set[Name]] = SimpleIdentityMap.empty final def isValid(using Context): Boolean = cache != null && isValidAt(ctx.phase) @@ -2622,7 +2622,7 @@ object SymDenotations { */ def invalidate(): Unit = if (cache != null) - if (locked) cache = SimpleIdentityMap.Empty + if (locked) cache = SimpleIdentityMap.empty else { cache = null invalidateDependents() diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala index 6704ffd1157f..a88af260eed0 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala @@ -99,8 +99,8 @@ class ImportInfo(symf: Context ?=> Symbol, /** Compute info relating to the selector list */ private def ensureInitialized(): Unit = if myExcluded == null then myExcluded = Set() - myForwardMapping = SimpleIdentityMap.Empty - myReverseMapping = SimpleIdentityMap.Empty + myForwardMapping = SimpleIdentityMap.empty + myReverseMapping = SimpleIdentityMap.empty for sel <- selectors do if sel.isWildcard then myWildcardImport = true @@ -180,7 +180,7 @@ class ImportInfo(symf: Context ?=> Symbol, private var myUnimported: Symbol = _ private var myOwner: Symbol = null - private var myResults: SimpleIdentityMap[TermName, java.lang.Boolean] = SimpleIdentityMap.Empty + private var myResults: SimpleIdentityMap[TermName, java.lang.Boolean] = SimpleIdentityMap.empty /** Does this import clause or a preceding import clause import `owner.feature`? */ def featureImported(feature: TermName, owner: Symbol)(using Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 39cc1ffa10ac..962c52fed35a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -438,7 +438,7 @@ object Inferencing { if (vmap1 eq vmap) vmap else propagate(vmap1) } - propagate(accu(SimpleIdentityMap.Empty, tp)) + propagate(accu(SimpleIdentityMap.empty, tp)) } } diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index c97ea8c4b546..93c1b42b2ae6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -1041,7 +1041,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) { } } - extractBindVariance(SimpleIdentityMap.Empty, tpt.tpe) + extractBindVariance(SimpleIdentityMap.empty, tpt.tpe) } def addTypeBindings(typeBinds: TypeBindsMap)(using Context): Unit = diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 0f33a950d2c0..fa368a0b7213 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -246,7 +246,7 @@ object ProtoTypes { var typedArgs: List[Tree] = Nil /** A map in which typed arguments can be stored to be later integrated in `typedArgs`. */ - var typedArg: SimpleIdentityMap[untpd.Tree, Tree] = SimpleIdentityMap.Empty + var typedArg: SimpleIdentityMap[untpd.Tree, Tree] = SimpleIdentityMap.empty /** The tupled or untupled version of this prototype, if it has been computed */ var tupledDual: Type = NoType diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index bf88a38b4cb9..5d0b94a45186 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2594,7 +2594,7 @@ class Typer extends Namer def typedStats(stats: List[untpd.Tree], exprOwner: Symbol)(using Context): (List[Tree], Context) = { val buf = new mutable.ListBuffer[Tree] - var enumContexts: SimpleIdentityMap[Symbol, Context] = SimpleIdentityMap.Empty + var enumContexts: SimpleIdentityMap[Symbol, Context] = SimpleIdentityMap.empty val initialNotNullInfos = ctx.notNullInfos // A map from `enum` symbols to the contexts enclosing their definitions @tailrec def traverse(stats: List[untpd.Tree])(using Context): (List[Tree], Context) = stats match { diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala index b5d2b840c5e7..437f7e140f68 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala @@ -41,7 +41,7 @@ object SimpleIdentityMap { def forallBinding(f: (AnyRef, Null) => Boolean) = true } - def Empty[K <: AnyRef]: SimpleIdentityMap[K, Null] = myEmpty.asInstanceOf[SimpleIdentityMap[K, Null]] + def empty[K <: AnyRef]: SimpleIdentityMap[K, Null] = myEmpty.asInstanceOf[SimpleIdentityMap[K, Null]] class Map1[K <: AnyRef, +V >: Null <: AnyRef] (k1: K, v1: V) extends SimpleIdentityMap[K, V] { def size: Int = 1 @@ -49,7 +49,7 @@ object SimpleIdentityMap { if (k eq k1) v1 else null def remove(k: K): SimpleIdentityMap[K, V] = - if (k eq k1) Empty.asInstanceOf[SimpleIdentityMap[K, V]] + if (k eq k1) empty.asInstanceOf[SimpleIdentityMap[K, V]] else this def updated[V1 >: V <: AnyRef](k: K, v: V1): SimpleIdentityMap[K, V1] = if (k eq k1) new Map1(k, v) @@ -162,7 +162,7 @@ object SimpleIdentityMap { if (bindings(i) eq k) return { if (size == CompactifyThreshold) { - var m: SimpleIdentityMap[K, V] = Empty[K] + var m: SimpleIdentityMap[K, V] = empty[K] for (j <- 0 until bindings.length by 2) if (j != i) m = m.updated(key(j), value(j)) m From 97cd9890cbcb73af5adaa1e85839e6400a989b6c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 14:35:30 +0200 Subject: [PATCH 06/33] Polish map and set API in dotc.util - Rename Map to MutableMap, in order to avoid confusion with the default immutable map - Rename Set to MutableSet - Rename `apply` to `lookup` in LinearMap - Drop >: Null lower bound for keys --- compiler/src/dotty/tools/dotc/Run.scala | 2 +- compiler/src/dotty/tools/dotc/core/Names.scala | 2 +- .../src/dotty/tools/dotc/util/GenericHashMap.scala | 4 ++-- compiler/src/dotty/tools/dotc/util/HashMap.scala | 2 +- compiler/src/dotty/tools/dotc/util/HashSet.scala | 2 +- .../src/dotty/tools/dotc/util/IdentityHashMap.scala | 2 +- compiler/src/dotty/tools/dotc/util/LinearMap.scala | 10 +++++----- .../tools/dotc/util/{Map.scala => MutableMap.scala} | 3 ++- .../tools/dotc/util/{Set.scala => MutableSet.scala} | 4 ++-- .../tools/languageserver/DottyLanguageServer.scala | 2 +- 10 files changed, 17 insertions(+), 16 deletions(-) rename compiler/src/dotty/tools/dotc/util/{Map.scala => MutableMap.scala} (82%) rename compiler/src/dotty/tools/dotc/util/{Set.scala => MutableSet.scala} (79%) diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 96c9a647b960..aacdb10295ca 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -14,7 +14,7 @@ import io.{AbstractFile, PlainFile} import Phases.unfusedPhases import scala.io.Codec -import util.{Set => _, _} +import util._ import reporting.Reporter import rewrites.Rewrites import java.io.{BufferedWriter, OutputStreamWriter} diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index f9e21c3525da..b4a1b9e0584e 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -185,7 +185,7 @@ object Names { private var derivedNames: LinearMap[NameInfo, DerivedName] = LinearMap.Empty private def add(info: NameInfo): TermName = synchronized { - derivedNames(info) match + derivedNames.lookup(info) match case null => val derivedName = new DerivedName(this, info) derivedNames = derivedNames.updated(info, derivedName) diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index 81fb490b227a..20dd78512739 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -22,9 +22,9 @@ object GenericHashMap: * However, a table of size up to DenseLimit will be re-sized only * once the number of elements reaches the table's size. */ -abstract class GenericHashMap[Key >: Null <: AnyRef, Value >: Null <: AnyRef] +abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] (protected val initialCapacity: Int = 8, - protected val capacityMultiple: Int = 3) extends Map[Key, Value]: + protected val capacityMultiple: Int = 3) extends MutableMap[Key, Value]: import GenericHashMap.DenseLimit protected var used: Int = _ diff --git a/compiler/src/dotty/tools/dotc/util/HashMap.scala b/compiler/src/dotty/tools/dotc/util/HashMap.scala index 7cb1d539c69f..cac47c4a6e51 100644 --- a/compiler/src/dotty/tools/dotc/util/HashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/HashMap.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.util /** A specialized implementation of GenericHashMap with standard hashCode and equals * as comparison */ -class HashMap[Key >: Null <: AnyRef, Value >: Null <: AnyRef] +class HashMap[Key <: AnyRef, Value >: Null <: AnyRef] (initialCapacity: Int = 8, capacityMultiple: Int = 3) extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): import GenericHashMap.DenseLimit diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index 9ffc982ad920..c06caac769ba 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -2,7 +2,7 @@ package dotty.tools.dotc.util /** A hash set that allows some privileged protected access to its internals */ -class HashSet[T >: Null <: AnyRef](powerOfTwoInitialCapacity: Int, loadFactor: Float = 0.25f) extends Set[T] { +class HashSet[T >: Null <: AnyRef](powerOfTwoInitialCapacity: Int = 16, loadFactor: Float = 0.25f) extends MutableSet[T] { private var used: Int = _ private var limit: Int = _ private var table: Array[AnyRef] = _ diff --git a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala index 25a4c02fd1dc..0ec448054c8b 100644 --- a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.util /** A specialized implementation of GenericHashMap with identity hash and `eq` * as comparison. */ -class IdentityHashMap[Key >: Null <: AnyRef, Value >: Null <: AnyRef] +class IdentityHashMap[Key <: AnyRef, Value >: Null <: AnyRef] (initialCapacity: Int = 8, capacityMultiple: Int = 3) extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): import GenericHashMap.DenseLimit diff --git a/compiler/src/dotty/tools/dotc/util/LinearMap.scala b/compiler/src/dotty/tools/dotc/util/LinearMap.scala index 77c6b3f940c1..9e06ae0a813d 100644 --- a/compiler/src/dotty/tools/dotc/util/LinearMap.scala +++ b/compiler/src/dotty/tools/dotc/util/LinearMap.scala @@ -17,11 +17,11 @@ object LinearMap: extension [K <: AnyRef, V >: Null <: AnyRef](m: LinearMap[K, V]): - def apply(key: K): V /*| Null*/ = m match + def lookup(key: K): V /*| Null*/ = m match case m: immutable.Map[K, V] @unchecked => if m.contains(key) then m(key) else null case m: HashMap[K, V] @unchecked => - m.get(key) + m.lookup(key) def updated(key: K, value: V): LinearMap[K, V] = m match case m: immutable.Map[K, V] @unchecked => @@ -29,11 +29,11 @@ object LinearMap: m.updated(key, value) else val m1 = HashMap[K, V]() - m.foreach(m1.put(_, _)) - m1.put(key, value) + m.foreach(m1(_) = _) + m1(key) = value m1 case m: HashMap[K, V] @unchecked => - m.put(key, value) + m(key) = value m def size = m match diff --git a/compiler/src/dotty/tools/dotc/util/Map.scala b/compiler/src/dotty/tools/dotc/util/MutableMap.scala similarity index 82% rename from compiler/src/dotty/tools/dotc/util/Map.scala rename to compiler/src/dotty/tools/dotc/util/MutableMap.scala index b4dcd4db2ae0..6b4dad177554 100644 --- a/compiler/src/dotty/tools/dotc/util/Map.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableMap.scala @@ -2,7 +2,7 @@ package dotty.tools.dotc.util /** A common class for lightweight mutable maps. */ -abstract class Map[Key >: Null <: AnyRef, Value >: Null <: AnyRef]: +abstract class MutableMap[Key <: AnyRef, Value >: Null <: AnyRef]: def lookup(x: Key): Value /* | Null */ @@ -17,3 +17,4 @@ abstract class Map[Key >: Null <: AnyRef, Value >: Null <: AnyRef]: def iterator: Iterator[(Key, Value)] def get(x: Key): Option[Value] = Option(lookup(x)) + diff --git a/compiler/src/dotty/tools/dotc/util/Set.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala similarity index 79% rename from compiler/src/dotty/tools/dotc/util/Set.scala rename to compiler/src/dotty/tools/dotc/util/MutableSet.scala index 3a8fe9e823aa..8587b56277b3 100644 --- a/compiler/src/dotty/tools/dotc/util/Set.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc.util -/** A common class for lightweight sets. +/** A common class for lightweight mutable sets. */ -abstract class Set[T >: Null] { +abstract class MutableSet[T >: Null] { def findEntry(x: T): T diff --git a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala index 277f80428cc5..80018b324fa0 100644 --- a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala +++ b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala @@ -23,7 +23,7 @@ import Comments._, Constants._, Contexts._, Flags._, Names._, NameOps._, Symbols import classpath.ClassPathEntries import reporting._ import typer.Typer -import util.{Set => _, _} +import util._ import interactive._, interactive.InteractiveDriver._ import decompiler.IDEDecompilerDriver import Interactive.Include From 5cd4505837aea4451056692676ba0e68ccbee780 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 14:57:08 +0200 Subject: [PATCH 07/33] Drop TypeHashSet Use a normal util.HashSet instead --- compiler/src/dotty/tools/dotc/core/Types.scala | 6 +----- .../src/dotty/tools/dotc/typer/Implicits.scala | 16 ++++++++-------- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 64419a101578..7bf73f8d6c2f 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5527,14 +5527,10 @@ object Types { def apply(x: Unit, tp: Type): Unit = foldOver(p(tp), tp) } - class TypeHashSet extends util.HashSet[Type](64): - override def hash(x: Type): Int = System.identityHashCode(x) - override def isEqual(x: Type, y: Type) = x.eq(y) - class NamedPartsAccumulator(p: NamedType => Boolean)(using Context) extends TypeAccumulator[List[NamedType]]: def maybeAdd(xs: List[NamedType], tp: NamedType): List[NamedType] = if p(tp) then tp :: xs else xs - val seen = TypeHashSet() + val seen = util.HashSet[Type]() def apply(xs: List[NamedType], tp: Type): List[NamedType] = if seen contains tp then xs else diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5499e2048ec4..73e4078fd7a7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -28,7 +28,7 @@ import Trees._ import transform.SymUtils._ import transform.TypeUtils._ import Hashable._ -import util.{SourceFile, NoSource} +import util.{SourceFile, NoSource, IdentityHashMap} import config.{Config, Feature} import Feature.migrateTo3 import config.Printers.{implicits, implicitsDetailed} @@ -289,7 +289,7 @@ object Implicits: * @param outerCtx the next outer context that makes visible further implicits */ class ContextualImplicits(val refs: List[ImplicitRef], val outerImplicits: ContextualImplicits)(initctx: Context) extends ImplicitRefs(initctx) { - private val eligibleCache = new java.util.IdentityHashMap[Type, List[Candidate]] + private val eligibleCache = IdentityHashMap[Type, List[Candidate]]() /** The level increases if current context has a different owner or scope than * the context of the next-outer ImplicitRefs. This is however disabled under @@ -316,7 +316,7 @@ object Implicits: def eligible(tp: Type): List[Candidate] = if (tp.hash == NotCached) computeEligible(tp) else { - val eligibles = eligibleCache.get(tp) + val eligibles = eligibleCache.lookup(tp) if (eligibles != null) { def elided(ci: ContextualImplicits): Int = { val n = ci.refs.length @@ -329,7 +329,7 @@ object Implicits: else if (irefCtx eq NoContext) Nil else { val result = computeEligible(tp) - eligibleCache.put(tp, result) + eligibleCache(tp) = result result } } @@ -528,7 +528,7 @@ trait ImplicitRunInfo: private var provisional: Boolean = _ private var parts: mutable.LinkedHashSet[Type] = _ - private val partSeen = TypeHashSet() + private val partSeen = util.HashSet[Type]() def traverse(t: Type) = if partSeen.contains(t) then () @@ -566,8 +566,8 @@ trait ImplicitRunInfo: (parts, provisional) end collectParts - val seen = TypeHashSet() - val incomplete = TypeHashSet() + val seen = util.HashSet[Type]() + val incomplete = util.HashSet[Type]() def collectCompanions(tp: Type, parts: collection.Set[Type]): TermRefSet = val companions = new TermRefSet @@ -687,7 +687,7 @@ trait ImplicitRunInfo: record(i"implicitScope") val liftToAnchors = new TypeMap: override def stopAtStatic = true - private val seen = TypeHashSet() + private val seen = util.HashSet[Type]() def applyToUnderlying(t: TypeProxy) = if seen.contains(t) then From 3d469cc4514a560ef36594156c5c146fec6fa83a Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 17:13:03 +0200 Subject: [PATCH 08/33] Optimize LinearMap Using AbstractMap instead of Map speeds up type tests --- compiler/src/dotty/tools/dotc/core/Names.scala | 2 +- compiler/src/dotty/tools/dotc/util/LinearMap.scala | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index b4a1b9e0584e..243153ad627a 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -182,7 +182,7 @@ object Names { def underlying: TermName = unsupported("underlying") @sharable // because of synchronized block in `and` - private var derivedNames: LinearMap[NameInfo, DerivedName] = LinearMap.Empty + private var derivedNames: LinearMap[NameInfo, DerivedName] = LinearMap.empty private def add(info: NameInfo): TermName = synchronized { derivedNames.lookup(info) match diff --git a/compiler/src/dotty/tools/dotc/util/LinearMap.scala b/compiler/src/dotty/tools/dotc/util/LinearMap.scala index 9e06ae0a813d..b4f8ef0d97a7 100644 --- a/compiler/src/dotty/tools/dotc/util/LinearMap.scala +++ b/compiler/src/dotty/tools/dotc/util/LinearMap.scala @@ -12,19 +12,19 @@ opaque type LinearMap[K <: AnyRef, V >: Null <: AnyRef] = object LinearMap: - def Empty[K <: AnyRef, V >: Null <: AnyRef]: LinearMap[K, V] = + def empty[K <: AnyRef, V >: Null <: AnyRef]: LinearMap[K, V] = immutable.Map.empty[K, V] extension [K <: AnyRef, V >: Null <: AnyRef](m: LinearMap[K, V]): - def lookup(key: K): V /*| Null*/ = m match - case m: immutable.Map[K, V] @unchecked => + def lookup(key: K): V /*| Null*/ = (m: @unchecked) match + case m: immutable.AbstractMap[K, V] @unchecked => if m.contains(key) then m(key) else null case m: HashMap[K, V] @unchecked => m.lookup(key) - def updated(key: K, value: V): LinearMap[K, V] = m match - case m: immutable.Map[K, V] @unchecked => + def updated(key: K, value: V): LinearMap[K, V] = (m: @unchecked) match + case m: immutable.AbstractMap[K, V] @unchecked => if m.size < 4 then m.updated(key, value) else @@ -36,8 +36,8 @@ object LinearMap: m(key) = value m - def size = m match - case m: immutable.Map[K, V] @unchecked => m.size + def size = (m: @unchecked) match + case m: immutable.AbstractMap[K, V] @unchecked => m.size case m: HashMap[K, V] @unchecked => m.size end LinearMap \ No newline at end of file From 660a0583f79ba8979c8344a7a7e7f67a4bbecc28 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 17:34:08 +0200 Subject: [PATCH 09/33] Make util.Set and util.HashSet more usable Add size and -= methods. Rename addEntry to +=. Rename findEntry to lookup. # Conflicts: # compiler/src/dotty/tools/dotc/util/HashSet.scala --- .../src/dotty/tools/dotc/core/TypeOps.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 2 +- .../dotty/tools/dotc/typer/Implicits.scala | 10 +- .../dotty/tools/dotc/typer/RefChecks.scala | 4 +- .../src/dotty/tools/dotc/util/HashSet.scala | 111 ++++++++++++------ .../dotty/tools/dotc/util/MutableSet.scala | 17 +-- 6 files changed, 92 insertions(+), 54 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 3d1cdaf759cf..e9eb3e011ded 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -187,7 +187,7 @@ object TypeOps: /** a faster version of cs1 intersect cs2 */ def intersect(cs1: List[ClassSymbol], cs2: List[ClassSymbol]): List[ClassSymbol] = { val cs2AsSet = new util.HashSet[ClassSymbol](128) - cs2.foreach(cs2AsSet.addEntry) + cs2.foreach(cs2AsSet +=) cs1.filter(cs2AsSet.contains) } diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 7bf73f8d6c2f..7884b44a5f19 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5534,7 +5534,7 @@ object Types { def apply(xs: List[NamedType], tp: Type): List[NamedType] = if seen contains tp then xs else - seen.addEntry(tp) + seen += tp tp match case tp: TypeRef => foldOver(maybeAdd(xs, tp), tp) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 73e4078fd7a7..c1b6731abe92 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -534,7 +534,7 @@ trait ImplicitRunInfo: if partSeen.contains(t) then () else if implicitScopeCache.contains(t) then parts += t else - partSeen.addEntry(t) + partSeen += t t.dealias match case t: TypeRef => if isAnchor(t.symbol) then @@ -578,12 +578,12 @@ trait ImplicitRunInfo: is.companionRefs case None => if seen.contains(t) then - incomplete.addEntry(tp) // all references for `t` will be accounted for in `seen` so we return `EmptySet`. + incomplete += tp // all references for `t` will be accounted for in `seen` so we return `EmptySet`. TermRefSet.empty // on the other hand, the refs of `tp` are now inaccurate, so `tp` is marked incomplete. else - seen.addEntry(t) + seen += t val is = recur(t) - if !implicitScopeCache.contains(t) then incomplete.addEntry(tp) + if !implicitScopeCache.contains(t) then incomplete += tp is.companionRefs end iscopeRefs @@ -693,7 +693,7 @@ trait ImplicitRunInfo: if seen.contains(t) then WildcardType else - seen.addEntry(t) + seen += t t.underlying match case TypeBounds(lo, hi) => if defn.isBottomTypeAfterErasure(lo) then apply(hi) diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 7f57adee6352..c85aee7aa65b 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -651,11 +651,11 @@ object RefChecks { val seenClasses = new util.HashSet[Symbol](256) def addDecls(cls: Symbol): Unit = if (!seenClasses.contains(cls)) { - seenClasses.addEntry(cls) + seenClasses += cls for (mbr <- cls.info.decls) if (mbr.isTerm && !mbr.isOneOf(Synthetic | Bridge) && mbr.memberCanMatchInheritedSymbols && !membersToCheck.contains(mbr.name)) - membersToCheck.addEntry(mbr.name) + membersToCheck += mbr.name cls.info.parents.map(_.classSymbol) .filter(_.isOneOf(AbstractOrTrait)) .dropWhile(_.isOneOf(JavaDefined | Scala2x)) diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index c06caac769ba..d172fbbbcfd0 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -1,13 +1,29 @@ package dotty.tools.dotc.util /** A hash set that allows some privileged protected access to its internals + * @param initialCapacity Indicates the initial number of slots in the hash table. + * The actual number of slots is always a power of 2, so the + * initial size of the table will be the smallest power of two + * that is equal or greater than the given `initialCapacity`. + * @param loadFactor The maximum fraction of used elements relative to capacity. + * The hash table will be re-sized once the number of elements exceeds + * the current size of the hash table multiplied by loadFactor. + * With the defaults given, the first resize of the table happens once the number of elements + * grows beyond 16. */ -class HashSet[T >: Null <: AnyRef](powerOfTwoInitialCapacity: Int = 16, loadFactor: Float = 0.25f) extends MutableSet[T] { +class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 16, loadFactor: Float = 0.25f) extends MutableSet[T] { private var used: Int = _ private var limit: Int = _ private var table: Array[AnyRef] = _ - assert(Integer.bitCount(powerOfTwoInitialCapacity) == 1) + private def roundToPower(n: Int) = + if Integer.bitCount(n) == 1 then n + else + def recur(n: Int): Int = + if n == 1 then 2 + else recur(n >>> 1) << 1 + recur(n) + protected def isEqual(x: T, y: T): Boolean = x.equals(y) // Counters for Stats @@ -27,7 +43,7 @@ class HashSet[T >: Null <: AnyRef](powerOfTwoInitialCapacity: Int = 16, loadFact /** Remove all elements from this set and set back to initial configuration */ def clear(): Unit = { used = 0 - allocate(powerOfTwoInitialCapacity) + allocate(roundToPower(initialCapacity)) } /** Turn hashcode `x` into a table index */ @@ -36,7 +52,7 @@ class HashSet[T >: Null <: AnyRef](powerOfTwoInitialCapacity: Int = 16, loadFact /** Hashcode, can be overridden */ def hash(x: T): Int = x.hashCode - private def entryAt(idx: Int) = table.apply(idx).asInstanceOf[T] + private def entryAt(idx: Int) = table(idx).asInstanceOf[T] /** Find entry such that `isEqual(x, entry)`. If it exists, return it. * If not, enter `x` in set and return `x`. @@ -63,7 +79,7 @@ class HashSet[T >: Null <: AnyRef](powerOfTwoInitialCapacity: Int = 16, loadFact } /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ - def findEntry(x: T): T = { + def lookup(x: T): T = { if (Stats.enabled) accesses += 1 var h = index(hash(x)) var entry = entryAt(h) @@ -77,38 +93,6 @@ class HashSet[T >: Null <: AnyRef](powerOfTwoInitialCapacity: Int = 16, loadFact private var rover: Int = -1 - /** Add entry `x` to set */ - def addEntry(x: T): Unit = { - if (Stats.enabled) accesses += 1 - var h = index(hash(x)) - var entry = entryAt(h) - while (entry ne null) { - if (isEqual(x, entry)) return - if (Stats.enabled) misses += 1 - h = index(h + 1) - entry = entryAt(h) - } - table(h) = x - used += 1 - if (used > (table.length >> 2)) growTable() - } - - /** Add all entries in `xs` to set */ - def addEntries(xs: TraversableOnce[T]): Unit = - xs.iterator foreach addEntry - - /** The iterator of all elements in the set */ - def iterator: Iterator[T] = new Iterator[T] { - private var i = 0 - def hasNext: Boolean = { - while (i < table.length && (table(i) eq null)) i += 1 - i < table.length - } - def next(): T = - if (hasNext) { i += 1; table(i - 1).asInstanceOf[T] } - else null - } - /** Privileged access: Find first entry with given hashcode */ protected def findEntryByHash(hashCode: Int): T = { rover = index(hashCode) @@ -158,5 +142,58 @@ class HashSet[T >: Null <: AnyRef](powerOfTwoInitialCapacity: Int = 16, loadFact } } + /** Add entry `x` to set */ + def += (x: T): Unit = { + if (Stats.enabled) accesses += 1 + var h = index(hash(x)) + var entry = entryAt(h) + while (entry ne null) { + if (isEqual(x, entry)) return + if (Stats.enabled) misses += 1 + h = index(h + 1) + entry = entryAt(h) + } + table(h) = x + used += 1 + if (used > (table.length >> 2)) growTable() + } + + def -= (x: T): Unit = + if (Stats.enabled) accesses += 1 + var h = index(hash(x)) + var entry = entryAt(h) + while entry != null do + if isEqual(x, entry) then + var hole = h + while + h = index(h + 1) + entry = entryAt(h) + entry != null && index(hash(entry)) != h + do + table(hole) = entry + hole = h + table(hole) = null + used -= 1 + return + h = index(h + 1) + entry = entryAt(h) + end -= + + /** Add all entries in `xs` to set */ + def ++= (xs: IterableOnce[T]): Unit = + xs.iterator.foreach(this += _) + + /** The iterator of all elements in the set */ + def iterator: Iterator[T] = new Iterator[T] { + private var i = 0 + def hasNext: Boolean = { + while (i < table.length && (table(i) eq null)) i += 1 + i < table.length + } + def next(): T = + if (hasNext) { i += 1; table(i - 1).asInstanceOf[T] } + else null + } + override def toString(): String = "HashSet(%d / %d)".format(used, table.length) } diff --git a/compiler/src/dotty/tools/dotc/util/MutableSet.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala index 8587b56277b3..12c762558f6e 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableSet.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala @@ -4,20 +4,21 @@ package dotty.tools.dotc.util */ abstract class MutableSet[T >: Null] { - def findEntry(x: T): T + /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ + def lookup(x: T): T /* | Null */ - def addEntry(x: T): Unit + def +=(x: T): Unit - def iterator: Iterator[T] + def clear(): Unit - def foreach[U](f: T => U): Unit = iterator foreach f + def size: Int - def apply(x: T): Boolean = contains(x) + def iterator: Iterator[T] - def contains(x: T): Boolean = - findEntry(x) != null + def contains(x: T): Boolean = lookup(x) != null + + def foreach[U](f: T => U): Unit = iterator foreach f def toList: List[T] = iterator.toList - def clear(): Unit } From 8107048fae3a69791c23b5a0aefb49198a2a849d Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 17:41:47 +0200 Subject: [PATCH 10/33] Use LinearSet instead of mutable.HashSet in TailRec --- .../dotty/tools/dotc/transform/TailRec.scala | 5 +- .../src/dotty/tools/dotc/util/LinearSet.scala | 46 +++++++++++++++++++ 2 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/util/LinearSet.scala diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index f66944385893..d6b17f3c82e1 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -13,6 +13,7 @@ import core.StdNames.nme import core.Symbols._ import reporting._ import transform.MegaPhase.MiniPhase +import util.LinearSet import scala.collection.mutable @@ -258,7 +259,7 @@ class TailRec extends MiniPhase { } /** Symbols of Labeled blocks that are in tail position. */ - private val tailPositionLabeledSyms = new mutable.HashSet[Symbol]() + private var tailPositionLabeledSyms = LinearSet.empty[Symbol] private var inTailPosition = true @@ -283,7 +284,7 @@ class TailRec extends MiniPhase { * a recursive call of a @tailrec annotated method (i.e. `isMandatory`). */ private def isTraversalNeeded = - isMandatory || tailPositionLabeledSyms.nonEmpty + isMandatory || tailPositionLabeledSyms.size > 0 def noTailTransform(tree: Tree)(using Context): Tree = if (isTraversalNeeded) transform(tree, tailPosition = false) diff --git a/compiler/src/dotty/tools/dotc/util/LinearSet.scala b/compiler/src/dotty/tools/dotc/util/LinearSet.scala new file mode 100644 index 000000000000..ebd5903c9f05 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/LinearSet.scala @@ -0,0 +1,46 @@ +package dotty.tools.dotc.util +import collection.immutable + +/** A linear identity set is a set that uses `eq` as the underlying + * equality where after a `+` the previous set value cannot be used anymore. + * The set is implemented as an immutable set for + * sizes <= 4 and as a HashSet for larger sizes. + */ +opaque type LinearSet[Elem >: Null <: AnyRef] = + immutable.Set[Elem] | HashSet[Elem] + +object LinearSet: + + def empty[Elem >: Null <: AnyRef]: LinearSet[Elem] = immutable.Set.empty[Elem] + + extension [Elem >: Null <: AnyRef](s: LinearSet[Elem]): + + def contains(elem: Elem): Boolean = (s: @unchecked) match + case s: immutable.AbstractSet[Elem] @unchecked => s.contains(elem) + case s: HashSet[Elem] @unchecked => s.contains(elem) + + def + (elem: Elem): LinearSet[Elem] = (s: @unchecked) match + case s: immutable.AbstractSet[Elem] @unchecked => + if s.size < 4 then + s + elem + else + val s1 = HashSet[Elem](initialCapacity = 8) + s.foreach(s1 += _) + s1 += elem + s1 + case s: HashSet[Elem] @unchecked => + s += elem + s + + def - (elem: Elem): LinearSet[Elem] = (s: @unchecked) match + case s: immutable.AbstractSet[Elem] @unchecked => + s - elem + case s: HashSet[Elem] @unchecked => + s -= elem + s + + def size = (s: @unchecked) match + case s: immutable.AbstractSet[Elem] @unchecked => s.size + case s: HashSet[Elem] @unchecked => s.size + +end LinearSet \ No newline at end of file From 9ffbc81816271167f00aa5e5ce186d2aa6ae1810 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 17:48:31 +0200 Subject: [PATCH 11/33] Use HashSet to record seen items Use HashSet to record seen items in type size and covering set computations. --- compiler/src/dotty/tools/dotc/core/Types.scala | 12 ++++++------ compiler/src/dotty/tools/dotc/util/LinearSet.scala | 7 +++---- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 7884b44a5f19..d70385094c41 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5568,11 +5568,11 @@ object Types { } class TypeSizeAccumulator(using Context) extends TypeAccumulator[Int] { - val seen = new java.util.IdentityHashMap[Type, Type] + var seen = util.HashSet[Type](initialCapacity = 8) def apply(n: Int, tp: Type): Int = - if (seen.get(tp) != null) n + if seen.contains(tp) then n else { - seen.put(tp, tp) + seen += tp tp match { case tp: AppliedType => foldOver(n + 1, tp) @@ -5589,11 +5589,11 @@ object Types { } class CoveringSetAccumulator(using Context) extends TypeAccumulator[Set[Symbol]] { - val seen = new java.util.IdentityHashMap[Type, Type] + var seen = util.HashSet[Type](initialCapacity = 8) def apply(cs: Set[Symbol], tp: Type): Set[Symbol] = - if (seen.get(tp) != null) cs + if seen.contains(tp) then cs else { - seen.put(tp, tp) + seen += tp tp match { case tp if tp.isTopType || tp.isBottomType => cs diff --git a/compiler/src/dotty/tools/dotc/util/LinearSet.scala b/compiler/src/dotty/tools/dotc/util/LinearSet.scala index ebd5903c9f05..13db1af8c613 100644 --- a/compiler/src/dotty/tools/dotc/util/LinearSet.scala +++ b/compiler/src/dotty/tools/dotc/util/LinearSet.scala @@ -1,10 +1,9 @@ package dotty.tools.dotc.util import collection.immutable -/** A linear identity set is a set that uses `eq` as the underlying - * equality where after a `+` the previous set value cannot be used anymore. - * The set is implemented as an immutable set for - * sizes <= 4 and as a HashSet for larger sizes. +/** A linear set is a set here after a `+` the previous set value cannot be + * used anymore. The set is implemented as an immutable set for sizes <= 4 + * and as a HashSet for larger sizes. */ opaque type LinearSet[Elem >: Null <: AnyRef] = immutable.Set[Elem] | HashSet[Elem] From 5470757d69662196a8fd30a3cca60b08962fd8bd Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 29 Aug 2020 17:49:49 +0200 Subject: [PATCH 12/33] Avoid postfixops --- compiler/src/dotty/tools/dotc/core/TypeOps.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index e9eb3e011ded..8cfb7759b6d0 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -187,7 +187,7 @@ object TypeOps: /** a faster version of cs1 intersect cs2 */ def intersect(cs1: List[ClassSymbol], cs2: List[ClassSymbol]): List[ClassSymbol] = { val cs2AsSet = new util.HashSet[ClassSymbol](128) - cs2.foreach(cs2AsSet +=) + cs2.foreach(cs2AsSet += _) cs1.filter(cs2AsSet.contains) } From 64746d5e91cc3c51948af32e1cbeed0866ba3714 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 30 Aug 2020 11:49:06 +0200 Subject: [PATCH 13/33] Collection tweaks --- .../tools/dotc/config/ScalaSettings.scala | 3 +- .../src/dotty/tools/dotc/core/Contexts.scala | 25 +- .../src/dotty/tools/dotc/core/Uniques.scala | 118 +++----- .../dotc/transform/Instrumentation.scala | 5 +- .../tools/dotc/util/GenericHashMap.scala | 49 ++- .../src/dotty/tools/dotc/util/HashMap.scala | 27 +- .../src/dotty/tools/dotc/util/HashSet.scala | 283 ++++++++---------- .../tools/dotc/util/IdentityHashMap.scala | 28 +- .../dotty/tools/dotc/util/MutableSet.scala | 6 + .../src/dotty/tools/dotc/util/Stats.scala | 1 - 10 files changed, 272 insertions(+), 273 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 00b3d73cc6d1..e8c7bf8343b0 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -177,8 +177,7 @@ class ScalaSettings extends Settings.SettingGroup { val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting("-Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") - val YinstrumentClosures: Setting[Boolean] = BooleanSetting("-Yinstrument-closures", "Add instrumentation code that counts closure creations.") - val YinstrumentAllocations: Setting[Boolean] = BooleanSetting("-Yinstrument-allocations", "Add instrumentation code that counts allocations.") + val Yinstrument: Setting[Boolean] = BooleanSetting("-Yinstrument", "Add instrumentation code that counts allocations and closure creations.") /** Dottydoc specific settings */ val siteRoot: Setting[String] = StringSetting( diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index d94bdddef6d0..801972aab3f6 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -843,25 +843,13 @@ object Contexts { // Types state /** A table for hash consing unique types */ - private[core] val uniques: util.HashSet[Type] = new util.HashSet[Type](Config.initialUniquesCapacity) { - override def hash(x: Type): Int = x.hash - override def isEqual(x: Type, y: Type) = x.eql(y) - } + private[core] val uniques: Uniques = Uniques() /** A table for hash consing unique applied types */ - private[dotc] val uniqueAppliedTypes: AppliedUniques = new AppliedUniques + private[dotc] val uniqueAppliedTypes: AppliedUniques = AppliedUniques() /** A table for hash consing unique named types */ - private[core] val uniqueNamedTypes: NamedTypeUniques = new NamedTypeUniques - - private def uniqueSets = Map( - "uniques" -> uniques, - "uniqueAppliedTypes" -> uniqueAppliedTypes, - "uniqueNamedTypes" -> uniqueNamedTypes) - - /** A map that associates label and size of all uniques sets */ - def uniquesSizes: Map[String, (Int, Int, Int)] = - uniqueSets.transform((_, s) => (s.size, s.accesses, s.misses)) + private[core] val uniqueNamedTypes: NamedTypeUniques = NamedTypeUniques() var emptyTypeBounds: TypeBounds = null var emptyWildcardBounds: WildcardType = null @@ -925,15 +913,16 @@ object Contexts { charArray = new Array[Char](charArray.length * 2) charArray - def reset(): Unit = { - for ((_, set) <- uniqueSets) set.clear() + def reset(): Unit = + uniques.clear() + uniqueAppliedTypes.clear() + uniqueNamedTypes.clear() emptyTypeBounds = null emptyWildcardBounds = null errorTypeMsg.clear() sources.clear() sourceNamed.clear() comparers.clear() // forces re-evaluation of top and bottom classes in TypeComparer - } // Test that access is single threaded diff --git a/compiler/src/dotty/tools/dotc/core/Uniques.scala b/compiler/src/dotty/tools/dotc/core/Uniques.scala index d02a728bb81c..5c94103454c6 100644 --- a/compiler/src/dotty/tools/dotc/core/Uniques.scala +++ b/compiler/src/dotty/tools/dotc/core/Uniques.scala @@ -4,93 +4,71 @@ package core import Types._, Contexts._, util.Stats._, Hashable._, Names._ import config.Config import Decorators._ -import util.HashSet +import util.{HashSet, Stats} + +class Uniques extends HashSet[Type](Config.initialUniquesCapacity): + override def hash(x: Type): Int = x.hash + override def isEqual(x: Type, y: Type) = x.eql(y) /** Defines operation `unique` for hash-consing types. * Also defines specialized hash sets for hash consing uniques of a specific type. * All sets offer a `enterIfNew` method which checks whether a type * with the given parts exists already and creates a new one if not. */ -object Uniques { +object Uniques: - private def recordCaching(tp: Type): Unit = recordCaching(tp.hash, tp.getClass) - private def recordCaching(h: Int, clazz: Class[?]): Unit = - if (h == NotCached) { - record("uncached-types") - record(s"uncached: $clazz") - } - else { - record("cached-types") - record(s"cached: $clazz") - } + private inline def recordCaching(tp: Type): Unit = recordCaching(tp.hash, tp.getClass) + private inline def recordCaching(h: Int, clazz: Class[?]): Unit = + if monitored then + if h == NotCached then + record("uncached-types") + record(s"uncached: $clazz") + else + record("cached-types") + record(s"cached: $clazz") - def unique[T <: Type](tp: T)(using Context): T = { - if (monitored) recordCaching(tp) - if (tp.hash == NotCached) tp - else if (monitored) { - val size = ctx.uniques.size - val result = ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T] - if (ctx.uniques.size > size) record(s"fresh unique ${tp.getClass}") - result - } - else ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T] - } - /* !!! DEBUG - ensuring ( - result => tp.toString == result.toString || { - println(s"cache mismatch; tp = $tp, cached = $result") - false - } - ) - */ + def unique[T <: Type](tp: T)(using Context): T = + recordCaching(tp) + if tp.hash == NotCached then tp + else ctx.uniques.put(tp).asInstanceOf[T] - final class NamedTypeUniques extends HashSet[NamedType](Config.initialUniquesCapacity) with Hashable { + final class NamedTypeUniques extends HashSet[NamedType](Config.initialUniquesCapacity) with Hashable: override def hash(x: NamedType): Int = x.hash - private def findPrevious(h: Int, prefix: Type, designator: Designator): NamedType = { - var e = findEntryByHash(h) - while (e != null) { - if ((e.prefix eq prefix) && (e.designator eq designator)) return e - e = nextEntryByHash(h) - } - e - } - - def enterIfNew(prefix: Type, designator: Designator, isTerm: Boolean)(using Context): NamedType = { + def enterIfNew(prefix: Type, designator: Designator, isTerm: Boolean)(using Context): NamedType = val h = doHash(null, designator, prefix) - if (monitored) recordCaching(h, classOf[NamedType]) + if monitored then recordCaching(h, classOf[NamedType]) def newType = if (isTerm) new CachedTermRef(prefix, designator, h) else new CachedTypeRef(prefix, designator, h) - if (h == NotCached) newType - else { - val r = findPrevious(h, prefix, designator) - if ((r ne null) && (r.isTerm == isTerm)) r else addEntryAfterScan(newType) - } - } - } + if h == NotCached then newType + else + Stats.record(statsItem("put")) + var idx = index(h) + var e = entryAt(idx) + while e != null do + if (e.prefix eq prefix) && (e.designator eq designator) && (e.isTerm == isTerm) then return e + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, newType) + end NamedTypeUniques - final class AppliedUniques extends HashSet[AppliedType](Config.initialUniquesCapacity) with Hashable { + final class AppliedUniques extends HashSet[AppliedType](Config.initialUniquesCapacity) with Hashable: override def hash(x: AppliedType): Int = x.hash - private def findPrevious(h: Int, tycon: Type, args: List[Type]): AppliedType = { - var e = findEntryByHash(h) - while (e != null) { - if ((e.tycon eq tycon) && e.args.eqElements(args)) return e - e = nextEntryByHash(h) - } - e - } - - def enterIfNew(tycon: Type, args: List[Type]): AppliedType = { + def enterIfNew(tycon: Type, args: List[Type]): AppliedType = val h = doHash(null, tycon, args) def newType = new CachedAppliedType(tycon, args, h) - if (monitored) recordCaching(h, classOf[CachedAppliedType]) - if (h == NotCached) newType - else { - val r = findPrevious(h, tycon, args) - if (r ne null) r else addEntryAfterScan(newType) - } - } - } -} + if monitored then recordCaching(h, classOf[CachedAppliedType]) + if h == NotCached then newType + else + Stats.record(statsItem("put")) + var idx = index(h) + var e = entryAt(idx) + while e != null do + if (e.tycon eq tycon) && e.args.eqElements(args) then return e + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, newType) + end AppliedUniques +end Uniques diff --git a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala index 97126df3a6d6..5f8f9c91faf3 100644 --- a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala +++ b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala @@ -15,7 +15,7 @@ import Names._ import Constants.Constant -/** The phase is enabled if a -Yinstrument-... option is set. +/** The phase is enabled if the -Yinstrument option is set. * If enabled, it counts the number of closures or allocations for each source position. * It does this by generating a call to dotty.tools.dotc.util.Stats.doRecord. */ @@ -25,8 +25,7 @@ class Instrumentation extends MiniPhase { thisPhase => override def phaseName: String = "instrumentation" override def isEnabled(using Context) = - ctx.settings.YinstrumentClosures.value || - ctx.settings.YinstrumentAllocations.value + ctx.settings.Yinstrument.value private val namesOfInterest = List( "::", "+=", "toString", "newArray", "box", "toCharArray", diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index 20dd78512739..398a560ce93c 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -23,8 +23,7 @@ object GenericHashMap: * once the number of elements reaches the table's size. */ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] - (protected val initialCapacity: Int = 8, - protected val capacityMultiple: Int = 3) extends MutableMap[Key, Value]: + (initialCapacity: Int = 8, capacityMultiple: Int = 3) extends MutableMap[Key, Value]: import GenericHashMap.DenseLimit protected var used: Int = _ @@ -61,12 +60,15 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] private def index(x: Int): Int = x & (table.length - 2) private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) - private def nextIndex(idx: Int) = index(idx + 2) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 2) private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] def lookup(key: Key): Value = + Stats.record(statsItem("lookup")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do @@ -76,6 +78,7 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] null def update(key: Key, value: Value): Unit = + Stats.record(statsItem("update")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do @@ -90,6 +93,7 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] if used > limit then growTable() def remove(key: Key): Unit = + Stats.record(statsItem("remove")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do @@ -110,6 +114,7 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] k = keyAt(idx) private def addOld(key: Key, value: AnyRef): Unit = + Stats.record(statsItem("re-enter")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do @@ -118,12 +123,7 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] table(idx) = key table(idx + 1) = value - protected def growTable(): Unit = - val oldTable = table - val newLength = - if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) - else table.length - allocate(newLength) + def copyFrom(oldTable: Array[AnyRef]): Unit = if isDense then Array.copy(oldTable, 0, table, 0, oldTable.length) else @@ -133,11 +133,32 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] if key != null then addOld(key, oldTable(idx + 1)) idx += 2 - def iterator: Iterator[(Key, Value)] = - for idx <- (0 until table.length by 2).iterator - if keyAt(idx) != null - yield (keyAt(idx), valueAt(idx)) + protected def growTable(): Unit = + val oldTable = table + val newLength = + if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) + else table.length + allocate(newLength) + copyFrom(oldTable) + + private abstract class EntryIterator[T] extends Iterator[T]: + def entry(idx: Int): T + private var idx = 0 + def hasNext = + while idx < table.length && table(idx) == null do idx += 2 + idx < table.length + def next() = + require(hasNext) + try entry(idx) finally idx += 2 + + def iterator: Iterator[(Key, Value)] = new EntryIterator: + def entry(idx: Int) = (keyAt(idx), valueAt(idx)) override def toString: String = - iterator.map((k, v) => s"$k -> $v").mkString("LinearTable(", ", ", ")") + iterator.map((k, v) => s"$k -> $v").mkString("HashMap(", ", ", ")") + + protected def statsItem(op: String) = + val prefix = if isDense then "HashMap(dense)." else "HashMap." + val suffix = getClass.getSimpleName + s"$prefix$op $suffix" end GenericHashMap diff --git a/compiler/src/dotty/tools/dotc/util/HashMap.scala b/compiler/src/dotty/tools/dotc/util/HashMap.scala index cac47c4a6e51..59f8f4846afe 100644 --- a/compiler/src/dotty/tools/dotc/util/HashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/HashMap.scala @@ -8,13 +8,10 @@ class HashMap[Key <: AnyRef, Value >: Null <: AnyRef] extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): import GenericHashMap.DenseLimit - /** Hashcode, by default `System.identityHashCode`, but can be overriden */ final def hash(x: Key): Int = x.hashCode - - /** Equality, by default `eq`, but can be overridden */ final def isEqual(x: Key, y: Key): Boolean = x.equals(y) - // The following methdods are duplicated from GenericHashMap + // The following methods are duplicated from GenericHashMap // to avoid polymorphic dispatches /** Turn hashcode `x` into a table index */ @@ -27,6 +24,7 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] override def lookup(key: Key): Value = + Stats.record(statsItem("lookup")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do @@ -36,6 +34,7 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): null override def update(key: Key, value: Value): Unit = + Stats.record(statsItem("update")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do @@ -48,4 +47,24 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): table(idx + 1) = value used += 1 if used > limit then growTable() + + private def addOld(key: Key, value: AnyRef): Unit = + Stats.record(statsItem("re-enter")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + idx = nextIndex(idx) + k = keyAt(idx) + table(idx) = key + table(idx + 1) = value + + override def copyFrom(oldTable: Array[AnyRef]): Unit = + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val key = oldTable(idx).asInstanceOf[Key] + if key != null then addOld(key, oldTable(idx + 1)) + idx += 2 end HashMap diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index d172fbbbcfd0..79ba6a158daa 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -1,44 +1,41 @@ package dotty.tools.dotc.util +object HashSet: + + /** The number of elements up to which dense packing is used. + * If the number of elements reaches `DenseLimit` a hash table is used instead + */ + inline val DenseLimit = 8 + /** A hash set that allows some privileged protected access to its internals * @param initialCapacity Indicates the initial number of slots in the hash table. * The actual number of slots is always a power of 2, so the * initial size of the table will be the smallest power of two * that is equal or greater than the given `initialCapacity`. - * @param loadFactor The maximum fraction of used elements relative to capacity. - * The hash table will be re-sized once the number of elements exceeds - * the current size of the hash table multiplied by loadFactor. - * With the defaults given, the first resize of the table happens once the number of elements - * grows beyond 16. + * Minimum value is 4. +* @param capacityMultiple The minimum multiple of capacity relative to used elements. + * The hash table will be re-sized once the number of elements + * multiplied by capacityMultiple exceeds the current size of the hash table. + * However, a table of size up to DenseLimit will be re-sized only + * once the number of elements reaches the table's size. */ -class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 16, loadFactor: Float = 0.25f) extends MutableSet[T] { +class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: Int = 4) extends MutableSet[T] { + import HashSet.DenseLimit + private var used: Int = _ private var limit: Int = _ private var table: Array[AnyRef] = _ - private def roundToPower(n: Int) = - if Integer.bitCount(n) == 1 then n - else - def recur(n: Int): Int = - if n == 1 then 2 - else recur(n >>> 1) << 1 - recur(n) - - protected def isEqual(x: T, y: T): Boolean = x.equals(y) - - // Counters for Stats - var accesses: Int = 0 - var misses: Int = 0 - clear() - /** The number of elements in the set */ - def size: Int = used + private def allocate(capacity: Int) = + table = new Array[AnyRef](capacity) + limit = if capacity <= DenseLimit then capacity - 1 else capacity / capacityMultiple - private def allocate(size: Int) = { - table = new Array[AnyRef](size) - limit = (size * loadFactor).toInt - } + private def roundToPower(n: Int) = + if n < 4 then 4 + else if Integer.bitCount(n) == 1 then n + else 1 << (32 - Integer.numberOfLeadingZeros(n)) /** Remove all elements from this set and set back to initial configuration */ def clear(): Unit = { @@ -46,154 +43,122 @@ class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 16, loadFactor: Float allocate(roundToPower(initialCapacity)) } - /** Turn hashcode `x` into a table index */ - private def index(x: Int): Int = x & (table.length - 1) + /** The number of elements in the set */ + def size: Int = used - /** Hashcode, can be overridden */ - def hash(x: T): Int = x.hashCode + protected def isDense = limit < DenseLimit - private def entryAt(idx: Int) = table(idx).asInstanceOf[T] + /** Hashcode, by defualt `x.hashCode`, can be overridden */ + protected def hash(x: T): Int = x.hashCode - /** Find entry such that `isEqual(x, entry)`. If it exists, return it. - * If not, enter `x` in set and return `x`. - */ - def findEntryOrUpdate(x: T): T = { - if (Stats.enabled) accesses += 1 - var h = index(hash(x)) - var entry = entryAt(h) - while (entry ne null) { - if (isEqual(x, entry)) return entry - if (Stats.enabled) misses += 1 - h = index(h + 1) - entry = entryAt(h) - } - addEntryAt(h, x) - } + /** Hashcode, by default `equals`, can be overridden */ + protected def isEqual(x: T, y: T): Boolean = x.equals(y) + + /** Turn hashcode `x` into a table index */ + protected def index(x: Int): Int = x & (table.length - 1) + + protected def firstIndex(x: T) = if isDense then 0 else index(hash(x)) + protected def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 1) + + protected def entryAt(idx: Int) = table(idx).asInstanceOf[T] + + def lookup(x: T): T = + Stats.record(statsItem("lookup")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + if isEqual(e, x) then return e + idx = nextIndex(idx) + e = entryAt(idx) + null - /** Add entry at `x` at index `idx` */ - private def addEntryAt(idx: Int, x: T) = { +/** Add entry at `x` at index `idx` */ + protected def addEntryAt(idx: Int, x: T): T = + Stats.record(statsItem("addEntryAt")) table(idx) = x used += 1 - if (used > limit) growTable() + if used > limit then growTable() x - } - /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ - def lookup(x: T): T = { - if (Stats.enabled) accesses += 1 - var h = index(hash(x)) - var entry = entryAt(h) - while ((entry ne null) && !isEqual(x, entry)) { - if (Stats.enabled) misses += 1 - h = index(h + 1) - entry = entryAt(h) - } - entry.asInstanceOf[T] - } - - private var rover: Int = -1 + def put(x: T): T = + Stats.record(statsItem("put")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + if isEqual(e, x) then return e + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, x) - /** Privileged access: Find first entry with given hashcode */ - protected def findEntryByHash(hashCode: Int): T = { - rover = index(hashCode) - nextEntryByHash(hashCode) - } - - /** Privileged access: Find next entry with given hashcode. Needs to immediately - * follow a `findEntryByhash` or `nextEntryByHash` operation. - */ - protected def nextEntryByHash(hashCode: Int): T = { - if (Stats.enabled) accesses += 1 - var entry = table(rover) - while (entry ne null) { - rover = index(rover + 1) - if (hash(entry.asInstanceOf[T]) == hashCode) return entry.asInstanceOf[T] - if (Stats.enabled) misses += 1 - entry = table(rover) - } - null - } - - /** Privileged access: Add entry `x` at the last position where an unsuccsessful - * `findEntryByHash` or `nextEntryByhash` operation returned. Needs to immediately - * follow a `findEntryByhash` or `nextEntryByHash` operation that was unsuccessful, - * i.e. that returned `null`. - */ - protected def addEntryAfterScan(x: T): T = addEntryAt(rover, x) - - private def addOldEntry(x: T): Unit = { - var h = index(hash(x)) - var entry = entryAt(h) - while (entry ne null) { - h = index(h + 1) - entry = entryAt(h) - } - table(h) = x - } - - private def growTable(): Unit = { - val oldtable = table - allocate(table.length * 2) - var i = 0 - while (i < oldtable.length) { - val entry = oldtable(i) - if (entry ne null) addOldEntry(entry.asInstanceOf[T]) - i += 1 - } - } - - /** Add entry `x` to set */ - def += (x: T): Unit = { - if (Stats.enabled) accesses += 1 - var h = index(hash(x)) - var entry = entryAt(h) - while (entry ne null) { - if (isEqual(x, entry)) return - if (Stats.enabled) misses += 1 - h = index(h + 1) - entry = entryAt(h) - } - table(h) = x - used += 1 - if (used > (table.length >> 2)) growTable() - } + def +=(x: T): Unit = put(x) def -= (x: T): Unit = - if (Stats.enabled) accesses += 1 - var h = index(hash(x)) - var entry = entryAt(h) - while entry != null do - if isEqual(x, entry) then - var hole = h + Stats.record(statsItem("remove")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + if isEqual(e, x) then + var hole = idx while - h = index(h + 1) - entry = entryAt(h) - entry != null && index(hash(entry)) != h + idx = nextIndex(idx) + e = entryAt(idx) + e != null && (isDense || index(hash(e)) != idx) do - table(hole) = entry - hole = h + table(hole) = e + hole = idx table(hole) = null used -= 1 return - h = index(h + 1) - entry = entryAt(h) - end -= - - /** Add all entries in `xs` to set */ - def ++= (xs: IterableOnce[T]): Unit = - xs.iterator.foreach(this += _) - - /** The iterator of all elements in the set */ - def iterator: Iterator[T] = new Iterator[T] { - private var i = 0 - def hasNext: Boolean = { - while (i < table.length && (table(i) eq null)) i += 1 - i < table.length - } - def next(): T = - if (hasNext) { i += 1; table(i - 1).asInstanceOf[T] } - else null - } + idx = nextIndex(idx) + e = entryAt(idx) + + private def addOld(x: T) = + Stats.record(statsItem("re-enter")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + idx = nextIndex(idx) + e = entryAt(idx) + table(idx) = x - override def toString(): String = "HashSet(%d / %d)".format(used, table.length) + def copyFrom(oldTable: Array[AnyRef]): Unit = + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val e = oldTable(idx).asInstanceOf[T] + if e != null then addOld(e) + idx += 1 + + protected def growTable(): Unit = + val oldTable = table + val newLength = + if oldTable.length == DenseLimit then DenseLimit * roundToPower(capacityMultiple) + else table.length * 2 + allocate(newLength) + copyFrom(oldTable) + + abstract class EntryIterator extends Iterator[T]: + def entry(idx: Int): T + private var idx = 0 + def hasNext = + while idx < table.length && table(idx) == null do idx += 1 + idx < table.length + def next() = + require(hasNext) + try entry(idx) finally idx += 1 + + def iterator: Iterator[T] = new EntryIterator(): + def entry(idx: Int) = entryAt(idx) + + override def toString: String = + iterator.mkString("HashSet(", ", ", ")") + + protected def statsItem(op: String) = + val prefix = if isDense then "HashSet(dense)." else "HashSet." + val suffix = getClass.getSimpleName + s"$prefix$op $suffix" } diff --git a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala index 0ec448054c8b..5f8e9725f753 100644 --- a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala @@ -14,8 +14,10 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): /** Equality, by default `eq`, but can be overridden */ final def isEqual(x: Key, y: Key): Boolean = x eq y - // The following methdods are duplicated from GenericHashMap - // to avoid polymorphic dispatches + // The following methods are duplicated from GenericHashMap + // to avoid polymorphic dispatches. + // Aside: It would be nice to have a @specialized annotation that does + // this automatically /** Turn hashcode `x` into a table index */ private def index(x: Int): Int = x & (table.length - 2) @@ -27,6 +29,7 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] override def lookup(key: Key): Value = + Stats.record(statsItem("lookup")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do @@ -36,6 +39,7 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): null override def update(key: Key, value: Value): Unit = + Stats.record(statsItem("update")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do @@ -48,4 +52,24 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): table(idx + 1) = value used += 1 if used > limit then growTable() + + private def addOld(key: Key, value: AnyRef): Unit = + Stats.record(statsItem("re-enter")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + idx = nextIndex(idx) + k = keyAt(idx) + table(idx) = key + table(idx + 1) = value + + override def copyFrom(oldTable: Array[AnyRef]): Unit = + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val key = oldTable(idx).asInstanceOf[Key] + if key != null then addOld(key, oldTable(idx + 1)) + idx += 2 end IdentityHashMap diff --git a/compiler/src/dotty/tools/dotc/util/MutableSet.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala index 12c762558f6e..f62e63e89f1c 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableSet.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala @@ -7,8 +7,14 @@ abstract class MutableSet[T >: Null] { /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ def lookup(x: T): T /* | Null */ + /** Add element `x` to the set */ def +=(x: T): Unit + /** Like `+=` but return existing element equal to `x` of it exists, + * `x` itself otherwose. + */ + def put(x: T): T + def clear(): Unit def size: Int diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala index 7967b41ddb18..ecbf266866eb 100644 --- a/compiler/src/dotty/tools/dotc/util/Stats.scala +++ b/compiler/src/dotty/tools/dotc/util/Stats.scala @@ -58,7 +58,6 @@ import collection.mutable aggregate() println() println(hits.toList.sortBy(_._2).map{ case (x, y) => s"$x -> $y" } mkString "\n") - println(s"uniqueInfo (size, accesses, collisions): ${ctx.base.uniquesSizes}") } } else op From f09a3368f29e2d736f270c7d2f1f92eef9b24c72 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 30 Aug 2020 16:01:32 +0200 Subject: [PATCH 14/33] Increase loadFactor Reduce capacityMultiple for HashSets from 4 to 2 and for HashMaps from 3 to 2. We observed not a large increase in misses, for half the space used, and consequently half as many re-enter operations. --- compiler/src/dotty/tools/dotc/config/Config.scala | 6 +++--- compiler/src/dotty/tools/dotc/core/Uniques.scala | 4 ++-- compiler/src/dotty/tools/dotc/util/HashSet.scala | 6 ++++-- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 19724f28698f..26dbbbe145a4 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -181,10 +181,10 @@ object Config { /** If set, enables tracing */ inline val tracingEnabled = false - /** Initial capacity of uniques HashMap. - * Note: This MUST BE a power of two to work with util.HashSet + /** Initial capacity of the uniques HashMap. + * Note: This should be a power of two to work with util.HashSet */ - inline val initialUniquesCapacity = 65536 + inline val initialUniquesCapacity = 0x8000 /** How many recursive calls to NamedType#underlying are performed before logging starts. */ inline val LogPendingUnderlyingThreshold = 50 diff --git a/compiler/src/dotty/tools/dotc/core/Uniques.scala b/compiler/src/dotty/tools/dotc/core/Uniques.scala index 5c94103454c6..5b1ae1a499e9 100644 --- a/compiler/src/dotty/tools/dotc/core/Uniques.scala +++ b/compiler/src/dotty/tools/dotc/core/Uniques.scala @@ -32,7 +32,7 @@ object Uniques: if tp.hash == NotCached then tp else ctx.uniques.put(tp).asInstanceOf[T] - final class NamedTypeUniques extends HashSet[NamedType](Config.initialUniquesCapacity) with Hashable: + final class NamedTypeUniques extends HashSet[NamedType](Config.initialUniquesCapacity * 4) with Hashable: override def hash(x: NamedType): Int = x.hash def enterIfNew(prefix: Type, designator: Designator, isTerm: Boolean)(using Context): NamedType = @@ -53,7 +53,7 @@ object Uniques: addEntryAt(idx, newType) end NamedTypeUniques - final class AppliedUniques extends HashSet[AppliedType](Config.initialUniquesCapacity) with Hashable: + final class AppliedUniques extends HashSet[AppliedType](Config.initialUniquesCapacity * 2) with Hashable: override def hash(x: AppliedType): Int = x.hash def enterIfNew(tycon: Type, args: List[Type]): AppliedType = diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index 79ba6a158daa..bf194d959a74 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -19,7 +19,7 @@ object HashSet: * However, a table of size up to DenseLimit will be re-sized only * once the number of elements reaches the table's size. */ -class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: Int = 4) extends MutableSet[T] { +class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T] { import HashSet.DenseLimit private var used: Int = _ @@ -57,6 +57,8 @@ class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: I /** Turn hashcode `x` into a table index */ protected def index(x: Int): Int = x & (table.length - 1) + protected def currentTable: Array[AnyRef] = table + protected def firstIndex(x: T) = if isDense then 0 else index(hash(x)) protected def nextIndex(idx: Int) = Stats.record(statsItem("miss")) @@ -136,7 +138,7 @@ class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: I protected def growTable(): Unit = val oldTable = table val newLength = - if oldTable.length == DenseLimit then DenseLimit * roundToPower(capacityMultiple) + if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) else table.length * 2 allocate(newLength) copyFrom(oldTable) From c5de2b2fd7794b4d5465584b175a53b1b0da9a51 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 30 Aug 2020 18:40:05 +0200 Subject: [PATCH 15/33] Fix HashMap/Set remove operation --- .../tools/dotc/util/GenericHashMap.scala | 14 ++-- .../src/dotty/tools/dotc/util/HashSet.scala | 8 +- tests/run-with-compiler/maptest.scala | 74 +++++++++++++++++++ 3 files changed, 89 insertions(+), 7 deletions(-) create mode 100644 tests/run-with-compiler/maptest.scala diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index 398a560ce93c..5e26e590cb9c 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -23,7 +23,7 @@ object GenericHashMap: * once the number of elements reaches the table's size. */ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] - (initialCapacity: Int = 8, capacityMultiple: Int = 3) extends MutableMap[Key, Value]: + (initialCapacity: Int, capacityMultiple: Int) extends MutableMap[Key, Value]: import GenericHashMap.DenseLimit protected var used: Int = _ @@ -102,11 +102,15 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] while idx = nextIndex(idx) k = keyAt(idx) - k != null && (isDense || index(hash(k)) != idx) + k != null do - table(hole) = k - table(hole + 1) = valueAt(idx) - hole = idx + if isDense + || index(hole - index(hash(k))) < limit * 2 + // hash(k) is then logically at or after hole; can be moved forward to fill hole + then + table(hole) = k + table(hole + 1) = valueAt(idx) + hole = idx table(hole) = null used -= 1 return diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index bf194d959a74..151f983fcb7a 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -108,8 +108,12 @@ class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: I e = entryAt(idx) e != null && (isDense || index(hash(e)) != idx) do - table(hole) = e - hole = idx + if isDense + || index(hole - index(hash(k))) < limit + // hash(k) is then logically at or after hole; can be moved forward to fill hole + then + table(hole) = e + hole = idx table(hole) = null used -= 1 return diff --git a/tests/run-with-compiler/maptest.scala b/tests/run-with-compiler/maptest.scala new file mode 100644 index 000000000000..41f2da34551b --- /dev/null +++ b/tests/run-with-compiler/maptest.scala @@ -0,0 +1,74 @@ +trait Generator[+T]: + self => + def generate: T + def map[S](f: T => S) = new Generator[S]: + def generate: S = f(self.generate) + def flatMap[S](f: T => Generator[S]) = new Generator[S]: + def generate: S = f(self.generate).generate + +object Generator: + val NumLimit = 300 + val Iterations = 10000 + + given integers as Generator[Int]: + val rand = new java.util.Random + def generate = rand.nextInt() + + given booleans as Generator[Boolean] = + integers.map(x => x > 0) + + def range(end: Int): Generator[Int] = + integers.map(x => (x % end).abs) + + enum Op: + case Lookup, Update, Remove + export Op._ + + given ops as Generator[Op] = + range(10).map { + case 0 | 1 | 2 | 3 => Lookup + case 4 | 5 | 6 | 7 => Update + case 8 | 9 => Remove + } + + val nums: Generator[Integer] = range(NumLimit).map(Integer(_)) + +@main def Test = + import Generator._ + + val map1 = dotty.tools.dotc.util.HashMap[Integer, Integer]() + val map2 = scala.collection.mutable.HashMap[Integer, Integer]() + + def checkSame() = + assert(map1.size == map2.size) + for (k, v) <- map1.iterator do + assert(map2.get(k) == Some(v)) + for (k, v) <- map2.iterator do + assert(Option(map1.lookup(k)) == Some(v)) + + def lookupTest(num: Integer) = + //println(s"test lookup $num") + val res1 = Option(map1.lookup(num)) + val res2 = map2.get(num) + assert(res1 == res2) + + def updateTest(num: Integer) = + //println(s"test update $num") + lookupTest(num) + map1(num) = num + map2(num) = num + checkSame() + + def removeTest(num: Integer) = + //println(s"test remove $num") + map1.remove(num) + map2.remove(num) + checkSame() + + for i <- 0 until Iterations do + //if i % 1000 == 0 then println(map1.size) + val num = nums.generate + Generator.ops.generate match + case Lookup => lookupTest(num) + case Update => updateTest(num) + case Remove => removeTest(num) From 435820a6da3d20d134b43bed296bc986e8552e1a Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 30 Aug 2020 18:47:32 +0200 Subject: [PATCH 16/33] Tuning of HashMap operations - Increase load factor to 0.5 -- the miss rate seems to be still OK (around one miss per hit or better) while re-sizing costs and go down. - Don't lose lowest hashCode bit. --- .../src/dotty/tools/dotc/util/GenericHashMap.scala | 2 +- compiler/src/dotty/tools/dotc/util/HashMap.scala | 14 ++++++++++---- .../dotty/tools/dotc/util/IdentityHashMap.scala | 14 +++++++++----- 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index 5e26e590cb9c..8f406bc06971 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -56,7 +56,7 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] /** Equality, to be implemented in subclass */ protected def isEqual(x: Key, y: Key): Boolean - /** Turn hashcode `x` into a table index */ + /** Turn successor index or hash code `x` into a table index */ private def index(x: Int): Int = x & (table.length - 2) private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) diff --git a/compiler/src/dotty/tools/dotc/util/HashMap.scala b/compiler/src/dotty/tools/dotc/util/HashMap.scala index 59f8f4846afe..3747fe3a9427 100644 --- a/compiler/src/dotty/tools/dotc/util/HashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/HashMap.scala @@ -4,21 +4,27 @@ package dotty.tools.dotc.util * as comparison */ class HashMap[Key <: AnyRef, Value >: Null <: AnyRef] - (initialCapacity: Int = 8, capacityMultiple: Int = 3) + (initialCapacity: Int = 8, capacityMultiple: Int = 2) extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): import GenericHashMap.DenseLimit - final def hash(x: Key): Int = x.hashCode + /** Hashcode is left-shifted by 1, so lowest bit is not lost + * when taking the index. + */ + final def hash(x: Key): Int = x.hashCode << 1 + final def isEqual(x: Key, y: Key): Boolean = x.equals(y) // The following methods are duplicated from GenericHashMap // to avoid polymorphic dispatches - /** Turn hashcode `x` into a table index */ + /** Turn successor index or hash code `x` into a table index */ private def index(x: Int): Int = x & (table.length - 2) private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) - private def nextIndex(idx: Int) = index(idx + 2) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 2) private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] diff --git a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala index 5f8e9725f753..86f9ebeb823f 100644 --- a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala @@ -4,12 +4,14 @@ package dotty.tools.dotc.util * as comparison. */ class IdentityHashMap[Key <: AnyRef, Value >: Null <: AnyRef] - (initialCapacity: Int = 8, capacityMultiple: Int = 3) + (initialCapacity: Int = 8, capacityMultiple: Int = 2) extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): import GenericHashMap.DenseLimit - /** Hashcode, by default `System.identityHashCode`, but can be overriden */ - final def hash(x: Key): Int = System.identityHashCode(x) + /** Hashcode is identityHashCode left-shifted by 1, so lowest bit is not lost + * when taking the index. + */ + final def hash(x: Key): Int = System.identityHashCode(x) << 1 /** Equality, by default `eq`, but can be overridden */ final def isEqual(x: Key, y: Key): Boolean = x eq y @@ -19,11 +21,13 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): // Aside: It would be nice to have a @specialized annotation that does // this automatically - /** Turn hashcode `x` into a table index */ + /** Turn successor index or hash code `x` into a table index */ private def index(x: Int): Int = x & (table.length - 2) private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) - private def nextIndex(idx: Int) = index(idx + 2) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 2) private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] From 7cf99982fedb125de91af2ed9ddf17bfa529578d Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 30 Aug 2020 18:49:23 +0200 Subject: [PATCH 17/33] Implement NameTable in terms of HashSet Reduce synchronized region to updates. --- .../src/dotty/tools/dotc/core/Names.scala | 135 ++++++++---------- 1 file changed, 59 insertions(+), 76 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index 243153ad627a..a62c9dd97960 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -10,7 +10,7 @@ import StdNames.str import scala.internal.Chars.isIdentifierStart import collection.immutable import config.Config -import util.LinearMap +import util.{LinearMap, HashSet} import scala.annotation.internal.sharable @@ -262,10 +262,9 @@ object Names { } /** A simple name is essentially an interned string */ - final class SimpleName(val start: Int, val length: Int, @sharable private[Names] var next: SimpleName) extends TermName { - // `next` is @sharable because it is only modified in the synchronized block of termName. + final class SimpleName(val start: Int, val length: Int) extends TermName { - /** The n'th character */ + /** The n'th character */ def apply(n: Int): Char = chrs(start + n) /** A character in this name satisfies predicate `p` */ @@ -506,27 +505,70 @@ object Names { override def debugString: String = s"${underlying.debugString}[$info]" } + /** The term name represented by the empty string */ + val EmptyTermName: SimpleName = SimpleName(-1, 0) + // Nametable - private final val InitialHashSize = 0x8000 - private final val InitialNameSize = 0x20000 - private final val fillFactor = 0.7 + inline val InitialNameSize = 0x20000 /** Memory to store all names sequentially. */ - @sharable // because it's only mutated in synchronized block of termName + @sharable // because it's only mutated in synchronized block of enterIfNew private[dotty] var chrs: Array[Char] = new Array[Char](InitialNameSize) /** The number of characters filled. */ - @sharable // because it's only mutated in synchronized block of termName + @sharable // because it's only mutated in synchronized block of enterIfNew private var nc = 0 - /** Hashtable for finding term names quickly. */ - @sharable // because it's only mutated in synchronized block of termName - private var table = new Array[SimpleName](InitialHashSize) + /** Make sure the capacity of the character array is at least `n` */ + private def ensureCapacity(n: Int) = + if n > chrs.length then + val newchrs = new Array[Char](chrs.length * 2) + chrs.copyToArray(newchrs) + chrs = newchrs + + private class NameTable extends HashSet[SimpleName](initialCapacity = 0x10000, capacityMultiple = 2): + import util.Stats + + override def hash(x: SimpleName) = hashValue(chrs, x.start, x.length) // needed for resize + override def isEqual(x: SimpleName, y: SimpleName) = ??? // not needed + + def enterIfNew(cs: Array[Char], offset: Int, len: Int): SimpleName = + Stats.record(statsItem("put")) + val table = currentTable + var idx = hashValue(cs, offset, len) & (table.length - 1) + var name = table(idx).asInstanceOf[SimpleName] + while name != null do + if name.length == len && Names.equals(name.start, cs, offset, len) then + return name + Stats.record(statsItem("miss")) + idx = (idx + 1) & (table.length - 1) + name = table(idx).asInstanceOf[SimpleName] + Stats.record(statsItem("addEntryAt")) + synchronized { + if (table eq currentTable) && table(idx) == null then + // Our previous unsynchronized computation of the next free index is still correct. + // This relies on the fact that table entries go from null to non-null, and then + // stay the same. Note that we do not need the table or the entry in it to be + // volatile since SimpleNames are immutable, and hence safely published. + // The same holds for the chrs array. We might miss before the synchronized + // on published characters but that would make name comparison false, which + // means we end up in the synchronized block here, where we get the correct state + name = SimpleName(nc, len) + ensureCapacity(nc + len) + Array.copy(cs, offset, chrs, nc, len) + nc += len + addEntryAt(idx, name) + else + enterIfNew(cs, offset, len) + } + + addEntryAt(0, EmptyTermName) + end NameTable - /** The number of defined names. */ - @sharable // because it's only mutated in synchronized block of termName - private var size = 1 + /** Hashtable for finding term names quickly. */ + @sharable // because it's only mutated in synchronized block of enterIfNew + private val nameTable = NameTable() /** The hash of a name made of from characters cs[offset..offset+len-1]. */ private def hashValue(cs: Array[Char], offset: Int, len: Int): Int = { @@ -552,62 +594,8 @@ object Names { /** Create a term name from the characters in cs[offset..offset+len-1]. * Assume they are already encoded. */ - def termName(cs: Array[Char], offset: Int, len: Int): SimpleName = synchronized { - util.Stats.record("termName") - val h = hashValue(cs, offset, len) & (table.length - 1) - - /** Make sure the capacity of the character array is at least `n` */ - def ensureCapacity(n: Int) = - if (n > chrs.length) { - val newchrs = new Array[Char](chrs.length * 2) - chrs.copyToArray(newchrs) - chrs = newchrs - } - - /** Enter characters into chrs array. */ - def enterChars(): Unit = { - ensureCapacity(nc + len) - var i = 0 - while (i < len) { - chrs(nc + i) = cs(offset + i) - i += 1 - } - nc += len - } - - /** Rehash chain of names */ - def rehash(name: SimpleName): Unit = - if (name != null) { - val oldNext = name.next - val h = hashValue(chrs, name.start, name.length) & (table.size - 1) - name.next = table(h) - table(h) = name - rehash(oldNext) - } - - /** Make sure the hash table is large enough for the given load factor */ - def incTableSize() = { - size += 1 - if (size.toDouble / table.size > fillFactor) { - val oldTable = table - table = new Array[SimpleName](table.size * 2) - for (i <- 0 until oldTable.size) rehash(oldTable(i)) - } - } - - val next = table(h) - var name = next - while (name ne null) { - if (name.length == len && equals(name.start, cs, offset, len)) - return name - name = name.next - } - name = new SimpleName(nc, len, next) - enterChars() - table(h) = name - incTableSize() - name - } + def termName(cs: Array[Char], offset: Int, len: Int): SimpleName = + nameTable.enterIfNew(cs, offset, len) /** Create a type name from the characters in cs[offset..offset+len-1]. * Assume they are already encoded. @@ -638,11 +626,6 @@ object Names { /** Create a type name from a string */ def typeName(s: String): TypeName = typeName(s.toCharArray, 0, s.length) - table(0) = new SimpleName(-1, 0, null) - - /** The term name represented by the empty string */ - val EmptyTermName: TermName = table(0) - /** The type name represented by the empty string */ val EmptyTypeName: TypeName = EmptyTermName.toTypeName From 09187820d7a04c907493fa982bf7ae640c31c2a9 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 30 Aug 2020 18:52:01 +0200 Subject: [PATCH 18/33] Correct comment --- compiler/src/dotty/tools/dotc/util/GenericHashMap.scala | 2 +- compiler/src/dotty/tools/dotc/util/HashSet.scala | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index 8f406bc06971..3df9a38560f2 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -106,7 +106,7 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] do if isDense || index(hole - index(hash(k))) < limit * 2 - // hash(k) is then logically at or after hole; can be moved forward to fill hole + // hash(k) is then logically at or before hole; can be moved forward to fill hole then table(hole) = k table(hole + 1) = valueAt(idx) diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index 151f983fcb7a..3d7513069507 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -109,8 +109,8 @@ class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: I e != null && (isDense || index(hash(e)) != idx) do if isDense - || index(hole - index(hash(k))) < limit - // hash(k) is then logically at or after hole; can be moved forward to fill hole + || index(hole - index(hash(e))) < limit + // hash(k) is then logically at or before hole; can be moved forward to fill hole then table(hole) = e hole = idx From c8e13d212672da7f364222d3a9369ee8855b5ed4 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 31 Aug 2020 11:49:03 +0200 Subject: [PATCH 19/33] Avoid use @volatile in toTypeName --- .../src/dotty/tools/dotc/core/Names.scala | 27 +++++++++---------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index a62c9dd97960..d5e538abeac0 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -165,16 +165,15 @@ object Names { override def asTermName: TermName = this @sharable // because it is only modified in the synchronized block of toTypeName. - @volatile private var _typeName: TypeName = null + private var myTypeName: TypeName = null + // Note: no @volatile needed since type names are immutable and therefore safely published - override def toTypeName: TypeName = { - if (_typeName == null) + override def toTypeName: TypeName = + if myTypeName == null then synchronized { - if (_typeName == null) - _typeName = new TypeName(this) + if myTypeName == null then myTypeName = new TypeName(this) } - _typeName - } + myTypeName override def likeSpaced(name: Name): TermName = name.toTermName @@ -535,25 +534,25 @@ object Names { def enterIfNew(cs: Array[Char], offset: Int, len: Int): SimpleName = Stats.record(statsItem("put")) - val table = currentTable - var idx = hashValue(cs, offset, len) & (table.length - 1) - var name = table(idx).asInstanceOf[SimpleName] + val myTable = currentTable // could be outdated under parallel execution + var idx = hashValue(cs, offset, len) & (myTable.length - 1) + var name = myTable(idx).asInstanceOf[SimpleName] while name != null do if name.length == len && Names.equals(name.start, cs, offset, len) then return name Stats.record(statsItem("miss")) - idx = (idx + 1) & (table.length - 1) - name = table(idx).asInstanceOf[SimpleName] + idx = (idx + 1) & (myTable.length - 1) + name = myTable(idx).asInstanceOf[SimpleName] Stats.record(statsItem("addEntryAt")) synchronized { - if (table eq currentTable) && table(idx) == null then + if (myTable eq currentTable) && myTable(idx) == null then // Our previous unsynchronized computation of the next free index is still correct. // This relies on the fact that table entries go from null to non-null, and then // stay the same. Note that we do not need the table or the entry in it to be // volatile since SimpleNames are immutable, and hence safely published. // The same holds for the chrs array. We might miss before the synchronized // on published characters but that would make name comparison false, which - // means we end up in the synchronized block here, where we get the correct state + // means we end up in the synchronized block here, where we get the correct state. name = SimpleName(nc, len) ensureCapacity(nc + len) Array.copy(cs, offset, chrs, nc, len) From 799c0888bce611ade5f8054f635217074a8a2dd5 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 1 Sep 2020 15:01:15 +0200 Subject: [PATCH 20/33] Use IdentityHashMaps in pickler --- .../tools/dotc/core/tasty/TreePickler.scala | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 241e31b568a2..5f664a0f7c17 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -44,19 +44,19 @@ class TreePickler(pickler: TastyPickler) { private val symRefs = Symbols.newMutableSymbolMap[Addr] private val forwardSymRefs = Symbols.newMutableSymbolMap[List[Addr]] - private val pickledTypes = new java.util.IdentityHashMap[Type, Any] // Value type is really Addr, but that's not compatible with null + private val pickledTypes = util.IdentityHashMap[Type, AnyRef]() // Value type is really Addr, but that's not compatible with null /** A list of annotation trees for every member definition, so that later * parallel position pickling does not need to access and force symbols. */ - private val annotTrees = util.HashTable[untpd.MemberDef, mutable.ListBuffer[Tree]]() + private val annotTrees = util.IdentityHashMap[untpd.MemberDef, mutable.ListBuffer[Tree]]() /** A map from member definitions to their doc comments, so that later * parallel comment pickling does not need to access symbols of trees (which * would involve accessing symbols of named types and possibly changing phases * in doing so). */ - private val docStrings = util.HashTable[untpd.MemberDef, Comment]() + private val docStrings = util.IdentityHashMap[untpd.MemberDef, Comment]() def treeAnnots(tree: untpd.MemberDef): List[Tree] = val ts = annotTrees.lookup(tree) @@ -169,9 +169,9 @@ class TreePickler(pickler: TastyPickler) { def pickleType(tpe0: Type, richTypes: Boolean = false)(using Context): Unit = { val tpe = tpe0.stripTypeVar try { - val prev = pickledTypes.get(tpe) + val prev = pickledTypes.lookup(tpe) if (prev == null) { - pickledTypes.put(tpe, currentAddr) + pickledTypes(tpe) = currentAddr.asInstanceOf[AnyRef] pickleNewType(tpe, richTypes) } else { @@ -244,7 +244,7 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe) } case tpe: RecThis => writeByte(RECthis) - val binderAddr = pickledTypes.get(tpe.binder) + val binderAddr = pickledTypes.lookup(tpe.binder) assert(binderAddr != null, tpe.binder) writeRef(binderAddr.asInstanceOf[Addr]) case tpe: SkolemType => @@ -314,7 +314,7 @@ class TreePickler(pickler: TastyPickler) { } def pickleParamRef(tpe: ParamRef)(using Context): Boolean = { - val binder = pickledTypes.get(tpe.binder) + val binder = pickledTypes.lookup(tpe.binder) val pickled = binder != null if (pickled) { writeByte(PARAMtype) @@ -349,7 +349,7 @@ class TreePickler(pickler: TastyPickler) { docCtx <- ctx.docCtx comment <- docCtx.docstring(sym) do - docStrings.enter(mdef, comment) + docStrings(mdef) = comment } def pickleParam(tree: Tree)(using Context): Unit = { @@ -605,7 +605,7 @@ class TreePickler(pickler: TastyPickler) { else { val refineCls = refinements.head.symbol.owner.asClass registerDef(refineCls) - pickledTypes.put(refineCls.typeRef, currentAddr) + pickledTypes(refineCls.typeRef) = currentAddr.asInstanceOf[AnyRef] writeByte(REFINEDtpt) refinements.foreach(preRegister) withLength { pickleTree(parent); refinements.foreach(pickleTree) } @@ -757,7 +757,7 @@ class TreePickler(pickler: TastyPickler) { var treeBuf = annotTrees.lookup(mdef) if treeBuf == null then treeBuf = new mutable.ListBuffer[Tree] - annotTrees.enter(mdef, treeBuf) + annotTrees(mdef) = treeBuf treeBuf += ann.tree // ---- main entry points --------------------------------------- From 712cb068137b17b2571520f27f38ae0871c5908c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 1 Sep 2020 21:35:36 +0200 Subject: [PATCH 21/33] Cache staticRef Per-run cache of what is returned for a staticRef --- compiler/src/dotty/tools/dotc/Run.scala | 5 +++++ compiler/src/dotty/tools/dotc/core/Denotations.scala | 3 ++- compiler/src/dotty/tools/dotc/util/GenericHashMap.scala | 5 +++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index aacdb10295ca..9071ab4b3ea2 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -7,6 +7,8 @@ import Periods._ import Symbols._ import Types._ import Scopes._ +import Names.Name +import Denotations.Denotation import typer.Typer import typer.ImportInfo._ import Decorators._ @@ -116,6 +118,9 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint /** The source files of all late entered symbols, as a set */ private var lateFiles = mutable.Set[AbstractFile]() + /** A cache for static references to packages and classes */ + val staticRefs = util.IdentityHashMap[Name, Denotation](initialCapacity = 1024) + /** Actions that need to be performed at the end of the current compilation run */ private var finalizeActions = mutable.ListBuffer[() => Unit]() diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 21afdcc85af6..73e98461046e 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -1259,7 +1259,8 @@ object Denotations { } recurSimple(path.length, wrap) } - recur(path) + if ctx.run == null then recur(path) + else ctx.run.staticRefs.getOrElseUpdate(path, recur(path)) } diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index 3df9a38560f2..3889255dd6cf 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -117,6 +117,11 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] idx = nextIndex(idx) k = keyAt(idx) + def getOrElseUpdate(key: Key, value: => Value): Value = + var v = lookup(key) + if v == null then v = value + v + private def addOld(key: Key, value: AnyRef): Unit = Stats.record(statsItem("re-enter")) var idx = firstIndex(key) From 8a5cb060d8bcaceedf1c988fe7bde644c16aeb7b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Sep 2020 09:13:04 +0200 Subject: [PATCH 22/33] Drop bounds on HashMap Value type Make lookup return a Value | Null result instead, which makes it a bit safer to use. To support this well, forward port a casting method from the explicit-nulls branch. --- compiler/src/dotty/tools/dotc/Compiler.scala | 2 +- .../tools/dotc/core/tasty/TreePickler.scala | 18 +++++------ .../tools/dotc/util/GenericHashMap.scala | 30 +++++++++++-------- .../src/dotty/tools/dotc/util/HashMap.scala | 19 +++--------- .../tools/dotc/util/IdentityHashMap.scala | 19 +++--------- .../dotty/tools/dotc/util/MutableMap.scala | 12 ++++---- compiler/src/dotty/tools/package.scala | 8 +++++ 7 files changed, 50 insertions(+), 58 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 8b51cb1a3ce2..f3310db45892 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -109,7 +109,7 @@ class Compiler { List(new Constructors, // Collect initialization code in primary constructors // Note: constructors changes decls in transformTemplate, no InfoTransformers should be added after it new FunctionalInterfaces, // Rewrites closures to implement @specialized types of Functions. - new Instrumentation) :: // Count closure allocations under -Yinstrument-closures + new Instrumentation) :: // Count calls and allocations under -Yinstrument List(new LambdaLift, // Lifts out nested functions to class scope, storing free variables in environments // Note: in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here new ElimStaticThis, // Replace `this` references to static objects by global identifiers diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 5f664a0f7c17..7cbf5122a802 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -44,7 +44,7 @@ class TreePickler(pickler: TastyPickler) { private val symRefs = Symbols.newMutableSymbolMap[Addr] private val forwardSymRefs = Symbols.newMutableSymbolMap[List[Addr]] - private val pickledTypes = util.IdentityHashMap[Type, AnyRef]() // Value type is really Addr, but that's not compatible with null + private val pickledTypes = util.IdentityHashMap[Type, Addr]() /** A list of annotation trees for every member definition, so that later * parallel position pickling does not need to access and force symbols. @@ -169,14 +169,14 @@ class TreePickler(pickler: TastyPickler) { def pickleType(tpe0: Type, richTypes: Boolean = false)(using Context): Unit = { val tpe = tpe0.stripTypeVar try { - val prev = pickledTypes.lookup(tpe) + val prev: Addr | Null = pickledTypes.lookup(tpe) if (prev == null) { - pickledTypes(tpe) = currentAddr.asInstanceOf[AnyRef] + pickledTypes(tpe) = currentAddr pickleNewType(tpe, richTypes) } else { writeByte(SHAREDtype) - writeRef(prev.asInstanceOf[Addr]) + writeRef(prev.uncheckedNN) } } catch { @@ -244,9 +244,9 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe) } case tpe: RecThis => writeByte(RECthis) - val binderAddr = pickledTypes.lookup(tpe.binder) + val binderAddr: Addr | Null = pickledTypes.lookup(tpe.binder) assert(binderAddr != null, tpe.binder) - writeRef(binderAddr.asInstanceOf[Addr]) + writeRef(binderAddr.uncheckedNN) case tpe: SkolemType => pickleType(tpe.info) case tpe: RefinedType => @@ -314,11 +314,11 @@ class TreePickler(pickler: TastyPickler) { } def pickleParamRef(tpe: ParamRef)(using Context): Boolean = { - val binder = pickledTypes.lookup(tpe.binder) + val binder: Addr | Null = pickledTypes.lookup(tpe.binder) val pickled = binder != null if (pickled) { writeByte(PARAMtype) - withLength { writeRef(binder.asInstanceOf[Addr]); writeNat(tpe.paramNum) } + withLength { writeRef(binder.uncheckedNN); writeNat(tpe.paramNum) } } pickled } @@ -605,7 +605,7 @@ class TreePickler(pickler: TastyPickler) { else { val refineCls = refinements.head.symbol.owner.asClass registerDef(refineCls) - pickledTypes(refineCls.typeRef) = currentAddr.asInstanceOf[AnyRef] + pickledTypes(refineCls.typeRef) = currentAddr writeByte(REFINEDtpt) refinements.foreach(preRegister) withLength { pickleTree(parent); refinements.foreach(pickleTree) } diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index 3889255dd6cf..5653faad1835 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc.util +package dotty.tools +package dotc.util object GenericHashMap: @@ -22,7 +23,7 @@ object GenericHashMap: * However, a table of size up to DenseLimit will be re-sized only * once the number of elements reaches the table's size. */ -abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] +abstract class GenericHashMap[Key <: AnyRef, Value] (initialCapacity: Int, capacityMultiple: Int) extends MutableMap[Key, Value]: import GenericHashMap.DenseLimit @@ -57,17 +58,20 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] protected def isEqual(x: Key, y: Key): Boolean /** Turn successor index or hash code `x` into a table index */ - private def index(x: Int): Int = x & (table.length - 2) + inline protected def index(x: Int): Int = x & (table.length - 2) - private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) - private def nextIndex(idx: Int) = + inline protected def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) + inline protected def nextIndex(idx: Int) = Stats.record(statsItem("miss")) index(idx + 2) - private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] - private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] + inline protected def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] + inline protected def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] - def lookup(key: Key): Value = + inline protected def setTable(idx: Int, value: Value) = + table(idx) = value.asInstanceOf[AnyRef] + + def lookup(key: Key): Value | Null = Stats.record(statsItem("lookup")) var idx = firstIndex(key) var k = keyAt(idx) @@ -83,12 +87,12 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] var k = keyAt(idx) while k != null do if isEqual(k, key) then - table(idx + 1) = value + setTable(idx + 1, value) return idx = nextIndex(idx) k = keyAt(idx) table(idx) = key - table(idx + 1) = value + setTable(idx + 1, value) used += 1 if used > limit then growTable() @@ -109,7 +113,7 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] // hash(k) is then logically at or before hole; can be moved forward to fill hole then table(hole) = k - table(hole + 1) = valueAt(idx) + setTable(hole + 1, valueAt(idx)) hole = idx table(hole) = null used -= 1 @@ -118,9 +122,9 @@ abstract class GenericHashMap[Key <: AnyRef, Value >: Null <: AnyRef] k = keyAt(idx) def getOrElseUpdate(key: Key, value: => Value): Value = - var v = lookup(key) + var v: Value | Null = lookup(key) if v == null then v = value - v + v.uncheckedNN private def addOld(key: Key, value: AnyRef): Unit = Stats.record(statsItem("re-enter")) diff --git a/compiler/src/dotty/tools/dotc/util/HashMap.scala b/compiler/src/dotty/tools/dotc/util/HashMap.scala index 3747fe3a9427..799c9a237660 100644 --- a/compiler/src/dotty/tools/dotc/util/HashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/HashMap.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.util /** A specialized implementation of GenericHashMap with standard hashCode and equals * as comparison */ -class HashMap[Key <: AnyRef, Value >: Null <: AnyRef] +class HashMap[Key <: AnyRef, Value] (initialCapacity: Int = 8, capacityMultiple: Int = 2) extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): import GenericHashMap.DenseLimit @@ -18,18 +18,7 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): // The following methods are duplicated from GenericHashMap // to avoid polymorphic dispatches - /** Turn successor index or hash code `x` into a table index */ - private def index(x: Int): Int = x & (table.length - 2) - - private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) - private def nextIndex(idx: Int) = - Stats.record(statsItem("miss")) - index(idx + 2) - - private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] - private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] - - override def lookup(key: Key): Value = + override def lookup(key: Key): Value | Null = Stats.record(statsItem("lookup")) var idx = firstIndex(key) var k = keyAt(idx) @@ -45,12 +34,12 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): var k = keyAt(idx) while k != null do if isEqual(k, key) then - table(idx + 1) = value + setTable(idx + 1, value) return idx = nextIndex(idx) k = keyAt(idx) table(idx) = key - table(idx + 1) = value + setTable(idx + 1, value) used += 1 if used > limit then growTable() diff --git a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala index 86f9ebeb823f..9da313259d14 100644 --- a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.util /** A specialized implementation of GenericHashMap with identity hash and `eq` * as comparison. */ -class IdentityHashMap[Key <: AnyRef, Value >: Null <: AnyRef] +class IdentityHashMap[Key <: AnyRef, Value] (initialCapacity: Int = 8, capacityMultiple: Int = 2) extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): import GenericHashMap.DenseLimit @@ -21,18 +21,7 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): // Aside: It would be nice to have a @specialized annotation that does // this automatically - /** Turn successor index or hash code `x` into a table index */ - private def index(x: Int): Int = x & (table.length - 2) - - private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) - private def nextIndex(idx: Int) = - Stats.record(statsItem("miss")) - index(idx + 2) - - private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] - private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] - - override def lookup(key: Key): Value = + override def lookup(key: Key): Value | Null = Stats.record(statsItem("lookup")) var idx = firstIndex(key) var k = keyAt(idx) @@ -48,12 +37,12 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): var k = keyAt(idx) while k != null do if isEqual(k, key) then - table(idx + 1) = value + setTable(idx + 1, value) return idx = nextIndex(idx) k = keyAt(idx) table(idx) = key - table(idx + 1) = value + setTable(idx + 1, value) used += 1 if used > limit then growTable() diff --git a/compiler/src/dotty/tools/dotc/util/MutableMap.scala b/compiler/src/dotty/tools/dotc/util/MutableMap.scala index 6b4dad177554..0ad30e2db4e0 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableMap.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableMap.scala @@ -1,10 +1,11 @@ -package dotty.tools.dotc.util +package dotty.tools +package dotc.util /** A common class for lightweight mutable maps. */ -abstract class MutableMap[Key <: AnyRef, Value >: Null <: AnyRef]: +abstract class MutableMap[Key <: AnyRef, Value]: - def lookup(x: Key): Value /* | Null */ + def lookup(x: Key): Value | Null def update(k: Key, v: Value): Unit @@ -16,5 +17,6 @@ abstract class MutableMap[Key <: AnyRef, Value >: Null <: AnyRef]: def iterator: Iterator[(Key, Value)] - def get(x: Key): Option[Value] = Option(lookup(x)) - + def get(x: Key): Option[Value] = lookup(x) match + case null => None + case v => Some(v.uncheckedNN) diff --git a/compiler/src/dotty/tools/package.scala b/compiler/src/dotty/tools/package.scala index 0bf617895b2c..a26b2dfd5594 100644 --- a/compiler/src/dotty/tools/package.scala +++ b/compiler/src/dotty/tools/package.scala @@ -24,6 +24,14 @@ package object tools { def unsupported(methodName: String): Nothing = throw new UnsupportedOperationException(methodName) + /** Forward-ported from the explicit-nulls branch. + * Should be used when we know from the context that `x` is not null. + * Flow-typing under explicit nulls will automatically insert many necessary + * occurrences of uncheckedNN. + */ + extension [T](x: T | Null) + inline def uncheckedNN: T = x.asInstanceOf[T] + object resultWrapper { opaque type WrappedResult[T] = T private[tools] def unwrap[T](x: WrappedResult[T]): T = x From abeb4e35f66bdcf610e171f6e077e4d5da2814a2 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Sep 2020 09:29:39 +0200 Subject: [PATCH 23/33] Drop bounds on HashMap Key type --- .../tools/dotc/util/GenericHashMap.scala | 36 ++++++++++--------- .../src/dotty/tools/dotc/util/HashMap.scala | 30 +++++++++++----- .../tools/dotc/util/IdentityHashMap.scala | 31 +++++++++++----- .../dotty/tools/dotc/util/MutableMap.scala | 2 +- 4 files changed, 64 insertions(+), 35 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index 5653faad1835..ba7ffd4357b4 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -23,7 +23,7 @@ object GenericHashMap: * However, a table of size up to DenseLimit will be re-sized only * once the number of elements reaches the table's size. */ -abstract class GenericHashMap[Key <: AnyRef, Value] +abstract class GenericHashMap[Key, Value] (initialCapacity: Int, capacityMultiple: Int) extends MutableMap[Key, Value]: import GenericHashMap.DenseLimit @@ -58,18 +58,20 @@ abstract class GenericHashMap[Key <: AnyRef, Value] protected def isEqual(x: Key, y: Key): Boolean /** Turn successor index or hash code `x` into a table index */ - inline protected def index(x: Int): Int = x & (table.length - 2) + private def index(x: Int): Int = x & (table.length - 2) - inline protected def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) - inline protected def nextIndex(idx: Int) = + private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) + private def nextIndex(idx: Int) = Stats.record(statsItem("miss")) index(idx + 2) - inline protected def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] - inline protected def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] + private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] + private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] - inline protected def setTable(idx: Int, value: Value) = - table(idx) = value.asInstanceOf[AnyRef] + private def setKey(idx: Int, key: Key) = + table(idx) = key.asInstanceOf[AnyRef] + private def setValue(idx: Int, value: Value) = + table(idx + 1) = value.asInstanceOf[AnyRef] def lookup(key: Key): Value | Null = Stats.record(statsItem("lookup")) @@ -87,12 +89,12 @@ abstract class GenericHashMap[Key <: AnyRef, Value] var k = keyAt(idx) while k != null do if isEqual(k, key) then - setTable(idx + 1, value) + setValue(idx, value) return idx = nextIndex(idx) k = keyAt(idx) - table(idx) = key - setTable(idx + 1, value) + setKey(idx, key) + setValue(idx, value) used += 1 if used > limit then growTable() @@ -112,8 +114,8 @@ abstract class GenericHashMap[Key <: AnyRef, Value] || index(hole - index(hash(k))) < limit * 2 // hash(k) is then logically at or before hole; can be moved forward to fill hole then - table(hole) = k - setTable(hole + 1, valueAt(idx)) + setKey(hole, k) + setValue(hole, valueAt(idx)) hole = idx table(hole) = null used -= 1 @@ -126,15 +128,15 @@ abstract class GenericHashMap[Key <: AnyRef, Value] if v == null then v = value v.uncheckedNN - private def addOld(key: Key, value: AnyRef): Unit = + private def addOld(key: Key, value: Value): Unit = Stats.record(statsItem("re-enter")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do idx = nextIndex(idx) k = keyAt(idx) - table(idx) = key - table(idx + 1) = value + setKey(idx, key) + setValue(idx, value) def copyFrom(oldTable: Array[AnyRef]): Unit = if isDense then @@ -143,7 +145,7 @@ abstract class GenericHashMap[Key <: AnyRef, Value] var idx = 0 while idx < oldTable.length do val key = oldTable(idx).asInstanceOf[Key] - if key != null then addOld(key, oldTable(idx + 1)) + if key != null then addOld(key, oldTable(idx + 1).asInstanceOf[Value]) idx += 2 protected def growTable(): Unit = diff --git a/compiler/src/dotty/tools/dotc/util/HashMap.scala b/compiler/src/dotty/tools/dotc/util/HashMap.scala index 799c9a237660..7cf92817763a 100644 --- a/compiler/src/dotty/tools/dotc/util/HashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/HashMap.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.util /** A specialized implementation of GenericHashMap with standard hashCode and equals * as comparison */ -class HashMap[Key <: AnyRef, Value] +class HashMap[Key, Value] (initialCapacity: Int = 8, capacityMultiple: Int = 2) extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): import GenericHashMap.DenseLimit @@ -17,6 +17,20 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): // The following methods are duplicated from GenericHashMap // to avoid polymorphic dispatches + private def index(x: Int): Int = x & (table.length - 2) + + private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 2) + + private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] + private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] + + private def setKey(idx: Int, key: Key) = + table(idx) = key.asInstanceOf[AnyRef] + private def setValue(idx: Int, value: Value) = + table(idx + 1) = value.asInstanceOf[AnyRef] override def lookup(key: Key): Value | Null = Stats.record(statsItem("lookup")) @@ -34,24 +48,24 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): var k = keyAt(idx) while k != null do if isEqual(k, key) then - setTable(idx + 1, value) + setValue(idx, value) return idx = nextIndex(idx) k = keyAt(idx) - table(idx) = key - setTable(idx + 1, value) + setKey(idx, key) + setValue(idx, value) used += 1 if used > limit then growTable() - private def addOld(key: Key, value: AnyRef): Unit = + private def addOld(key: Key, value: Value): Unit = Stats.record(statsItem("re-enter")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do idx = nextIndex(idx) k = keyAt(idx) - table(idx) = key - table(idx + 1) = value + setKey(idx, key) + setValue(idx, value) override def copyFrom(oldTable: Array[AnyRef]): Unit = if isDense then @@ -60,6 +74,6 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): var idx = 0 while idx < oldTable.length do val key = oldTable(idx).asInstanceOf[Key] - if key != null then addOld(key, oldTable(idx + 1)) + if key != null then addOld(key, oldTable(idx + 1).asInstanceOf[Value]) idx += 2 end HashMap diff --git a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala index 9da313259d14..f28fd0df6a47 100644 --- a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.util /** A specialized implementation of GenericHashMap with identity hash and `eq` * as comparison. */ -class IdentityHashMap[Key <: AnyRef, Value] +class IdentityHashMap[Key, Value] (initialCapacity: Int = 8, capacityMultiple: Int = 2) extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): import GenericHashMap.DenseLimit @@ -14,13 +14,26 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): final def hash(x: Key): Int = System.identityHashCode(x) << 1 /** Equality, by default `eq`, but can be overridden */ - final def isEqual(x: Key, y: Key): Boolean = x eq y + final def isEqual(x: Key, y: Key): Boolean = x.asInstanceOf[AnyRef] eq y.asInstanceOf[AnyRef] // The following methods are duplicated from GenericHashMap // to avoid polymorphic dispatches. // Aside: It would be nice to have a @specialized annotation that does // this automatically + private def index(x: Int): Int = x & (table.length - 2) + + private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 2) + + private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] + private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] + + private def setKey(idx: Int, key: Key) = table(idx) = key.asInstanceOf[AnyRef] + private def setValue(idx: Int, value: Value) = table(idx + 1) = value.asInstanceOf[AnyRef] + override def lookup(key: Key): Value | Null = Stats.record(statsItem("lookup")) var idx = firstIndex(key) @@ -37,24 +50,24 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): var k = keyAt(idx) while k != null do if isEqual(k, key) then - setTable(idx + 1, value) + setValue(idx, value) return idx = nextIndex(idx) k = keyAt(idx) - table(idx) = key - setTable(idx + 1, value) + setKey(idx, key) + setValue(idx, value) used += 1 if used > limit then growTable() - private def addOld(key: Key, value: AnyRef): Unit = + private def addOld(key: Key, value: Value): Unit = Stats.record(statsItem("re-enter")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do idx = nextIndex(idx) k = keyAt(idx) - table(idx) = key - table(idx + 1) = value + setKey(idx, key) + setValue(idx, value) override def copyFrom(oldTable: Array[AnyRef]): Unit = if isDense then @@ -63,6 +76,6 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): var idx = 0 while idx < oldTable.length do val key = oldTable(idx).asInstanceOf[Key] - if key != null then addOld(key, oldTable(idx + 1)) + if key != null then addOld(key, oldTable(idx + 1).asInstanceOf[Value]) idx += 2 end IdentityHashMap diff --git a/compiler/src/dotty/tools/dotc/util/MutableMap.scala b/compiler/src/dotty/tools/dotc/util/MutableMap.scala index 0ad30e2db4e0..a15722cb4da2 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableMap.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableMap.scala @@ -3,7 +3,7 @@ package dotc.util /** A common class for lightweight mutable maps. */ -abstract class MutableMap[Key <: AnyRef, Value]: +abstract class MutableMap[Key, Value]: def lookup(x: Key): Value | Null From c75a555b6fd65ae714f10633912bd2df59e11537 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Sep 2020 09:45:15 +0200 Subject: [PATCH 24/33] Drop bounds on HashSet entry type --- compiler/src/dotty/tools/dotc/util/HashSet.scala | 11 ++++++----- compiler/src/dotty/tools/dotc/util/MutableSet.scala | 4 ++-- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index 3d7513069507..2a9ed940e143 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -19,7 +19,7 @@ object HashSet: * However, a table of size up to DenseLimit will be re-sized only * once the number of elements reaches the table's size. */ -class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T] { +class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T] { import HashSet.DenseLimit private var used: Int = _ @@ -65,8 +65,9 @@ class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: I index(idx + 1) protected def entryAt(idx: Int) = table(idx).asInstanceOf[T] + protected def setEntry(idx: Int, x: T) = table(idx) = x.asInstanceOf[AnyRef] - def lookup(x: T): T = + def lookup(x: T): T | Null = Stats.record(statsItem("lookup")) var idx = firstIndex(x) var e = entryAt(idx) @@ -79,7 +80,7 @@ class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: I /** Add entry at `x` at index `idx` */ protected def addEntryAt(idx: Int, x: T): T = Stats.record(statsItem("addEntryAt")) - table(idx) = x + setEntry(idx, x) used += 1 if used > limit then growTable() x @@ -112,7 +113,7 @@ class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: I || index(hole - index(hash(e))) < limit // hash(k) is then logically at or before hole; can be moved forward to fill hole then - table(hole) = e + setEntry(hole, e) hole = idx table(hole) = null used -= 1 @@ -127,7 +128,7 @@ class HashSet[T >: Null <: AnyRef](initialCapacity: Int = 8, capacityMultiple: I while e != null do idx = nextIndex(idx) e = entryAt(idx) - table(idx) = x + setEntry(idx, x) def copyFrom(oldTable: Array[AnyRef]): Unit = if isDense then diff --git a/compiler/src/dotty/tools/dotc/util/MutableSet.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala index f62e63e89f1c..9b60e2bbe61b 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableSet.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala @@ -2,10 +2,10 @@ package dotty.tools.dotc.util /** A common class for lightweight mutable sets. */ -abstract class MutableSet[T >: Null] { +abstract class MutableSet[T] { /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ - def lookup(x: T): T /* | Null */ + def lookup(x: T): T | Null /** Add element `x` to the set */ def +=(x: T): Unit From fa7698e15f3d0aaa00c3209ad3a2d587cc325ea7 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Sep 2020 11:07:15 +0200 Subject: [PATCH 25/33] Drop MutableSymbolMap as a separate class It's now an alias of util.IdentityHashMap[Symbol, _] --- .../backend/jvm/DottyBackendInterface.scala | 4 +- .../dotty/tools/backend/jvm/GenBCode.scala | 8 +-- .../tools/backend/jvm/scalaPrimitives.scala | 9 +-- .../tools/backend/sjs/JSPrimitives.scala | 9 +-- .../src/dotty/tools/dotc/core/Comments.scala | 8 +-- .../src/dotty/tools/dotc/core/Symbols.scala | 58 +------------------ .../tools/dotc/core/tasty/TreePickler.scala | 4 +- .../tools/dotc/transform/AccessProxies.scala | 7 ++- .../dotty/tools/dotc/transform/Bridges.scala | 2 +- .../dotc/transform/CompleteJavaEnums.scala | 9 +-- .../dotc/transform/ExtensionMethods.scala | 15 ++--- .../dotc/transform/NonLocalReturns.scala | 9 ++- .../dotc/transform/OverridingPairs.scala | 5 +- .../tools/dotc/transform/PatternMatcher.scala | 2 +- .../tools/dotc/transform/SuperAccessors.scala | 7 ++- .../tools/dotc/transform/TreeChecker.scala | 2 +- .../src/dotty/tools/dotc/typer/Inliner.scala | 6 +- .../tools/dotc/util/GenericHashMap.scala | 14 ++++- .../dotty/tools/dotc/util/MutableMap.scala | 16 ++--- .../dotty/tools/dotc/util/ReadOnlyMap.scala | 29 ++++++++++ compiler/src/dotty/tools/package.scala | 22 +++++-- 21 files changed, 116 insertions(+), 129 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/util/ReadOnlyMap.scala diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index f983743281cc..665cf0d8c92c 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -22,7 +22,7 @@ import Symbols._ import Phases._ import dotty.tools.dotc.util -import dotty.tools.dotc.util.Spans +import dotty.tools.dotc.util.{Spans, ReadOnlyMap} import dotty.tools.dotc.report import Decorators._ @@ -36,7 +36,7 @@ import Names.TermName import Annotations.Annotation import Names.Name -class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap: Map[Symbol, Set[ClassSymbol]])(using val ctx: Context) { +class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context) { private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index e7b2684bb4cd..181e4c18f452 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -36,7 +36,7 @@ import dotty.tools.io._ class GenBCode extends Phase { def phaseName: String = GenBCode.name - private val superCallsMap = newMutableSymbolMap[Set[ClassSymbol]] + private val superCallsMap = new MutableSymbolMap[Set[ClassSymbol]] def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = { val old = superCallsMap.getOrElse(sym, Set.empty) superCallsMap.update(sym, old + calls) @@ -51,10 +51,8 @@ class GenBCode extends Phase { } def run(using Context): Unit = - new GenBCodePipeline( - new DottyBackendInterface( - outputDir, superCallsMap.toMap - ) + GenBCodePipeline( + DottyBackendInterface(outputDir, superCallsMap) ).run(ctx.compilationUnit.tpdTree) diff --git a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala index 8b21afd8377a..e1b6d8d922f7 100644 --- a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala +++ b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala @@ -9,6 +9,7 @@ import Names.TermName, StdNames._ import Types.{JavaArrayType, UnspecifiedErrorType, Type} import Symbols.{Symbol, NoSymbol} import dotc.report +import dotc.util.ReadOnlyMap import scala.annotation.threadUnsafe import scala.collection.immutable @@ -34,7 +35,7 @@ import scala.collection.immutable class DottyPrimitives(ictx: Context) { import dotty.tools.backend.ScalaPrimitivesOps._ - @threadUnsafe private lazy val primitives: immutable.Map[Symbol, Int] = init + @threadUnsafe private lazy val primitives: ReadOnlyMap[Symbol, Int] = init /** Return the code for the given symbol. */ def getPrimitive(sym: Symbol): Int = { @@ -118,12 +119,12 @@ class DottyPrimitives(ictx: Context) { } /** Initialize the primitive map */ - private def init: immutable.Map[Symbol, Int] = { + private def init: ReadOnlyMap[Symbol, Int] = { given Context = ictx import Symbols.defn - val primitives = Symbols.newMutableSymbolMap[Int] + val primitives = Symbols.MutableSymbolMap[Int](512) /** Add a primitive operation to the map */ def addPrimitive(s: Symbol, code: Int): Unit = { @@ -394,7 +395,7 @@ class DottyPrimitives(ictx: Context) { addPrimitives(DoubleClass, nme.UNARY_-, NEG) - primitives.toMap + primitives } def isPrimitive(sym: Symbol): Boolean = diff --git a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala index 382b8b9d9a14..1beb9bdf30aa 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala @@ -10,6 +10,7 @@ import Symbols._ import dotty.tools.dotc.ast.tpd._ import dotty.tools.backend.jvm.DottyPrimitives import dotty.tools.dotc.report +import dotty.tools.dotc.util.ReadOnlyMap import scala.collection.mutable @@ -55,7 +56,7 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { import JSPrimitives._ import dotty.tools.backend.ScalaPrimitivesOps._ - private lazy val jsPrimitives: Map[Symbol, Int] = initJSPrimitives(using ictx) + private lazy val jsPrimitives: ReadOnlyMap[Symbol, Int] = initJSPrimitives(using ictx) override def getPrimitive(sym: Symbol): Int = jsPrimitives.getOrElse(sym, super.getPrimitive(sym)) @@ -70,9 +71,9 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { jsPrimitives.contains(fun.symbol(using ictx)) || super.isPrimitive(fun) /** Initialize the primitive map */ - private def initJSPrimitives(using Context): Map[Symbol, Int] = { + private def initJSPrimitives(using Context): ReadOnlyMap[Symbol, Int] = { - val primitives = newMutableSymbolMap[Int] + val primitives = MutableSymbolMap[Int]() // !!! Code duplicate with DottyPrimitives /** Add a primitive operation to the map */ @@ -120,7 +121,7 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { addPrimitive(jsdefn.ReflectSelectable_selectDynamic, REFLECT_SELECTABLE_SELECTDYN) addPrimitive(jsdefn.ReflectSelectable_applyDynamic, REFLECT_SELECTABLE_APPLYDYN) - primitives.toMap + primitives } } diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala index 71037e19bc6e..0199e2474354 100644 --- a/compiler/src/dotty/tools/dotc/core/Comments.scala +++ b/compiler/src/dotty/tools/dotc/core/Comments.scala @@ -4,7 +4,7 @@ package core import ast.{ untpd, tpd } import Decorators._, Symbols._, Contexts._ -import util.SourceFile +import util.{SourceFile, ReadOnlyMap} import util.Spans._ import util.CommentParsing._ import util.Property.Key @@ -23,11 +23,11 @@ object Comments { */ class ContextDocstrings { - private val _docstrings: MutableSymbolMap[Comment] = newMutableSymbolMap + private val _docstrings: MutableSymbolMap[Comment] = MutableSymbolMap[Comment](512) // FIXME: 2nd [Comment] needed or "not a class type" val templateExpander: CommentExpander = new CommentExpander - def docstrings: Map[Symbol, Comment] = _docstrings.toMap + def docstrings: ReadOnlyMap[Symbol, Comment] = _docstrings def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym) @@ -180,7 +180,7 @@ object Comments { protected def superComment(sym: Symbol)(using Context): Option[String] = allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "") - private val cookedDocComments = newMutableSymbolMap[String] + private val cookedDocComments = MutableSymbolMap[String]() /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by * missing sections of an inherited doc comment. diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 7eaa1f411e07..44df4d433637 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -30,7 +30,7 @@ import reporting.Message import collection.mutable import io.AbstractFile import language.implicitConversions -import util.{SourceFile, NoSource, Property, SourcePosition, SrcPos} +import util.{SourceFile, NoSource, Property, SourcePosition, SrcPos, IdentityHashMap} import scala.collection.JavaConverters._ import scala.annotation.internal.sharable import config.Printers.typr @@ -495,61 +495,7 @@ object Symbols { /** The current class */ def currentClass(using Context): ClassSymbol = ctx.owner.enclosingClass.asClass - /* Mutable map from symbols any T */ - class MutableSymbolMap[T](private[Symbols] val value: java.util.IdentityHashMap[Symbol, T]) extends AnyVal { - - def apply(sym: Symbol): T = value.get(sym) - - def get(sym: Symbol): Option[T] = Option(value.get(sym)) - - def getOrElse[U >: T](sym: Symbol, default: => U): U = { - val v = value.get(sym) - if (v != null) v else default - } - - def getOrElseUpdate(sym: Symbol, op: => T): T = { - val v = value.get(sym) - if (v != null) v - else { - val v = op - assert(v != null) - value.put(sym, v) - v - } - } - - def update(sym: Symbol, x: T): Unit = { - assert(x != null) - value.put(sym, x) - } - def put(sym: Symbol, x: T): T = { - assert(x != null) - value.put(sym, x) - } - - def -=(sym: Symbol): Unit = value.remove(sym) - def remove(sym: Symbol): Option[T] = Option(value.remove(sym)) - - def contains(sym: Symbol): Boolean = value.containsKey(sym) - - def isEmpty: Boolean = value.isEmpty - - def clear(): Unit = value.clear() - - def filter(p: ((Symbol, T)) => Boolean): Map[Symbol, T] = - value.asScala.toMap.filter(p) - - def iterator: Iterator[(Symbol, T)] = value.asScala.iterator - - def keysIterator: Iterator[Symbol] = value.keySet().asScala.iterator - - def toMap: Map[Symbol, T] = value.asScala.toMap - - override def toString: String = value.asScala.toString() - } - - inline def newMutableSymbolMap[T]: MutableSymbolMap[T] = - new MutableSymbolMap(new java.util.IdentityHashMap[Symbol, T]()) + type MutableSymbolMap[T] = IdentityHashMap[Symbol, T] // ---- Factory methods for symbol creation ---------------------- // diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 7cbf5122a802..323ced731d1b 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -42,8 +42,8 @@ class TreePickler(pickler: TastyPickler) { import pickler.nameBuffer.nameIndex import tpd._ - private val symRefs = Symbols.newMutableSymbolMap[Addr] - private val forwardSymRefs = Symbols.newMutableSymbolMap[List[Addr]] + private val symRefs = Symbols.MutableSymbolMap[Addr](256) + private val forwardSymRefs = Symbols.MutableSymbolMap[List[Addr]]() private val pickledTypes = util.IdentityHashMap[Type, Addr]() /** A list of annotation trees for every member definition, so that later diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala index 31bfb9772bcb..321aadac4130 100644 --- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala +++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import core._ @@ -23,7 +24,7 @@ abstract class AccessProxies { import AccessProxies._ /** accessor -> accessed */ - private val accessedBy = newMutableSymbolMap[Symbol] + private val accessedBy = MutableSymbolMap[Symbol]() /** Given the name of an accessor, is the receiver of the call to accessed obtained * as a parameterer? @@ -35,7 +36,7 @@ abstract class AccessProxies { * So a second call of the same method will yield the empty list. */ private def accessorDefs(cls: Symbol)(using Context): Iterator[DefDef] = - for (accessor <- cls.info.decls.iterator; accessed <- accessedBy.remove(accessor)) yield + for (accessor <- cls.info.decls.iterator; accessed <- accessedBy.remove(accessor).toOption) yield polyDefDef(accessor.asTerm, tps => argss => { def numTypeParams = accessed.info match { case info: PolyType => info.paramNames.length diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala index 8ba39ea3ad8a..038201d0afe6 100644 --- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala +++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala @@ -36,7 +36,7 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { private var toBeRemoved = immutable.Set[Symbol]() private val bridges = mutable.ListBuffer[Tree]() private val bridgesScope = newScope - private val bridgeTarget = newMutableSymbolMap[Symbol] + private val bridgeTarget = MutableSymbolMap[Symbol]() def bridgePosFor(member: Symbol): SrcPos = (if (member.owner == root && member.span.exists) member else root).srcPos diff --git a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala index 23e5d12622b4..d308cea9f6d7 100644 --- a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala +++ b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import core._ @@ -115,13 +116,13 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => && (((cls.owner.name eq nme.DOLLAR_NEW) && cls.owner.isAllOf(Private|Synthetic)) || cls.owner.isAllOf(EnumCase)) && cls.owner.owner.linkedClass.derivesFromJavaEnum - private val enumCaseOrdinals: MutableSymbolMap[Int] = newMutableSymbolMap + private val enumCaseOrdinals = MutableSymbolMap[Int]() private def registerEnumClass(cls: Symbol)(using Context): Unit = - cls.children.zipWithIndex.foreach(enumCaseOrdinals.put) + cls.children.zipWithIndex.foreach(enumCaseOrdinals.update) private def ordinalFor(enumCase: Symbol): Int = - enumCaseOrdinals.remove(enumCase).get + enumCaseOrdinals.remove(enumCase).nn /** 1. If this is an enum class, add $name and $ordinal parameters to its * parameter accessors and pass them on to the java.lang.Enum constructor. diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala index d2c772e472a5..546c4e18c633 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala @@ -140,24 +140,21 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete extensionMeth } - private val extensionDefs = newMutableSymbolMap[mutable.ListBuffer[Tree]] - // TODO: this is state and should be per-run + private val extensionDefs = MutableSymbolMap[mutable.ListBuffer[Tree]]() // todo: check that when transformation finished map is empty override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = - if (isDerivedValueClass(ctx.owner)) + if isDerivedValueClass(ctx.owner) then /* This is currently redundant since value classes may not wrap over other value classes anyway. checkNonCyclic(ctx.owner.pos, Set(), ctx.owner) */ tree - else if (ctx.owner.isStaticOwner) - extensionDefs remove tree.symbol.owner match { - case Some(defns) if defns.nonEmpty => - cpy.Template(tree)(body = tree.body ++ - defns.map(transformFollowing(_))) + else if ctx.owner.isStaticOwner then + extensionDefs.remove(tree.symbol.owner) match + case defns: mutable.ListBuffer[Tree] if defns.nonEmpty => + cpy.Template(tree)(body = tree.body ++ defns.map(transformFollowing(_))) case _ => tree - } else tree override def transformDefDef(tree: tpd.DefDef)(using Context): tpd.Tree = diff --git a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala index 1653ef4acd3d..34a26867037e 100644 --- a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala +++ b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala @@ -35,7 +35,7 @@ class NonLocalReturns extends MiniPhase { nonLocalReturnControl.appliedTo(argtype) /** A hashmap from method symbols to non-local return keys */ - private val nonLocalReturnKeys = newMutableSymbolMap[TermSymbol] + private val nonLocalReturnKeys = MutableSymbolMap[TermSymbol]() /** Return non-local return key for given method */ private def nonLocalReturnKey(meth: Symbol)(using Context) = @@ -83,10 +83,9 @@ class NonLocalReturns extends MiniPhase { } override def transformDefDef(tree: DefDef)(using Context): Tree = - nonLocalReturnKeys.remove(tree.symbol) match { - case Some(key) => cpy.DefDef(tree)(rhs = nonLocalReturnTry(tree.rhs, key, tree.symbol)) - case _ => tree - } + nonLocalReturnKeys.remove(tree.symbol) match + case key: TermSymbol => cpy.DefDef(tree)(rhs = nonLocalReturnTry(tree.rhs, key, tree.symbol)) + case null => tree override def transformReturn(tree: Return)(using Context): Tree = if isNonLocalReturn(tree) then diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index c2e7b20fe14e..d46258df94b9 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -64,12 +64,11 @@ object OverridingPairs { decls } - private val subParents = { - val subParents = newMutableSymbolMap[BitSet] + private val subParents = + val subParents = MutableSymbolMap[BitSet]() for (bc <- base.info.baseClasses) subParents(bc) = BitSet(parents.indices.filter(parents(_).derivesFrom(bc)): _*) subParents - } private def hasCommonParentAsSubclass(cls1: Symbol, cls2: Symbol): Boolean = (subParents(cls1) intersect subParents(cls2)).nonEmpty diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index dc1c26299c88..012f694f7ee0 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -92,7 +92,7 @@ object PatternMatcher { /** A map from variable symbols to their defining trees * and from labels to their defining plans */ - private val initializer = newMutableSymbolMap[Tree] + private val initializer = MutableSymbolMap[Tree]() private def newVar(rhs: Tree, flags: FlagSet): TermSymbol = newSymbol(ctx.owner, PatMatStdBinderName.fresh(), Synthetic | Case | flags, diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index a2dd74ac9907..5e84def109bb 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import dotty.tools.dotc.ast.{Trees, tpd} @@ -59,7 +60,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { ctx.owner.enclosingClass != invalidEnclClass /** List buffers for new accessor definitions, indexed by class */ - private val accDefs = newMutableSymbolMap[mutable.ListBuffer[Tree]] + private val accDefs = MutableSymbolMap[mutable.ListBuffer[Tree]]() /** A super accessor call corresponding to `sel` */ private def superAccessorCall(sel: Select, mixName: Name = nme.EMPTY)(using Context) = { @@ -205,7 +206,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { def wrapTemplate(tree: Template)(op: Template => Template)(using Context): Template = { accDefs(currentClass) = new mutable.ListBuffer[Tree] val impl = op(tree) - val accessors = accDefs.remove(currentClass).get + val accessors = accDefs.remove(currentClass).nn if (accessors.isEmpty) impl else { val (params, rest) = impl.body span { diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 334161f582ed..62a7896074df 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -162,7 +162,7 @@ class TreeChecker extends Phase with SymTransformer { private val nowDefinedSyms = new mutable.HashSet[Symbol] private val patBoundSyms = new mutable.HashSet[Symbol] - private val everDefinedSyms = newMutableSymbolMap[untpd.Tree] + private val everDefinedSyms = MutableSymbolMap[untpd.Tree]() // don't check value classes after typer, as the constraint about constructors doesn't hold after transform override def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(using Context): Unit = () diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index 93c1b42b2ae6..cc43f0ddc79e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -21,7 +21,7 @@ import SymDenotations.SymDenotation import Inferencing.isFullyDefined import config.Printers.inlining import ErrorReporting.errorTree -import dotty.tools.dotc.util.{SimpleIdentityMap, SimpleIdentitySet, SourceFile, SourcePosition, SrcPos} +import dotty.tools.dotc.util.{SimpleIdentityMap, SimpleIdentitySet, IdentityHashMap, SourceFile, SourcePosition, SrcPos} import dotty.tools.dotc.parsing.Parsers.Parser import Nullables.{given _} @@ -1320,8 +1320,8 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) { dropUnusedDefs(termBindings1.asInstanceOf[List[ValOrDefDef]], tree1) } else { - val refCount = newMutableSymbolMap[Int] - val bindingOfSym = newMutableSymbolMap[MemberDef] + val refCount = MutableSymbolMap[Int]() + val bindingOfSym = MutableSymbolMap[MemberDef]() def isInlineable(binding: MemberDef) = binding match { case ddef @ DefDef(_, Nil, Nil, _, _) => isElideableExpr(ddef.rhs) diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index ba7ffd4357b4..060e0dc9ec14 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -98,12 +98,13 @@ abstract class GenericHashMap[Key, Value] used += 1 if used > limit then growTable() - def remove(key: Key): Unit = + def remove(key: Key): Value | Null = Stats.record(statsItem("remove")) var idx = firstIndex(key) var k = keyAt(idx) while k != null do if isEqual(k, key) then + val result = valueAt(idx) var hole = idx while idx = nextIndex(idx) @@ -119,13 +120,17 @@ abstract class GenericHashMap[Key, Value] hole = idx table(hole) = null used -= 1 - return + return result idx = nextIndex(idx) k = keyAt(idx) + null def getOrElseUpdate(key: Key, value: => Value): Value = var v: Value | Null = lookup(key) - if v == null then v = value + if v == null then + val v1 = value + v = v1 + update(key, v1) v.uncheckedNN private def addOld(key: Key, value: Value): Unit = @@ -169,6 +174,9 @@ abstract class GenericHashMap[Key, Value] def iterator: Iterator[(Key, Value)] = new EntryIterator: def entry(idx: Int) = (keyAt(idx), valueAt(idx)) + def keysIterator: Iterator[Key] = new EntryIterator: + def entry(idx: Int) = keyAt(idx) + override def toString: String = iterator.map((k, v) => s"$k -> $v").mkString("HashMap(", ", ", ")") diff --git a/compiler/src/dotty/tools/dotc/util/MutableMap.scala b/compiler/src/dotty/tools/dotc/util/MutableMap.scala index a15722cb4da2..ba912a312aea 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableMap.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableMap.scala @@ -3,20 +3,16 @@ package dotc.util /** A common class for lightweight mutable maps. */ -abstract class MutableMap[Key, Value]: - - def lookup(x: Key): Value | Null +abstract class MutableMap[Key, Value] extends ReadOnlyMap[Key, Value]: def update(k: Key, v: Value): Unit - def remove(k: Key): Unit + def remove(k: Key): Value | Null - def size: Int + def -=(k: Key): this.type = + remove(k) + this def clear(): Unit - def iterator: Iterator[(Key, Value)] - - def get(x: Key): Option[Value] = lookup(x) match - case null => None - case v => Some(v.uncheckedNN) + def getOrElseUpdate(key: Key, value: => Value): Value diff --git a/compiler/src/dotty/tools/dotc/util/ReadOnlyMap.scala b/compiler/src/dotty/tools/dotc/util/ReadOnlyMap.scala new file mode 100644 index 000000000000..933bf4a7c57c --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/ReadOnlyMap.scala @@ -0,0 +1,29 @@ +package dotty.tools +package dotc.util + +/** A class for the reading part of mutable or immutable maps. + */ +abstract class ReadOnlyMap[Key, Value]: + + def lookup(x: Key): Value | Null + + def size: Int + + def iterator: Iterator[(Key, Value)] + def keysIterator: Iterator[Key] + + def isEmpty: Boolean = size == 0 + + def get(key: Key): Option[Value] = lookup(key) match + case null => None + case v => Some(v.uncheckedNN) + + def getOrElse(key: Key, value: => Value) = lookup(key) match + case null => value + case v => v.uncheckedNN + + def contains(key: Key): Boolean = lookup(key) != null + + def apply(key: Key): Value = lookup(key) match + case null => throw new NoSuchElementException(s"$key") + case v => v.uncheckedNN \ No newline at end of file diff --git a/compiler/src/dotty/tools/package.scala b/compiler/src/dotty/tools/package.scala index a26b2dfd5594..538ab83f73f2 100644 --- a/compiler/src/dotty/tools/package.scala +++ b/compiler/src/dotty/tools/package.scala @@ -24,14 +24,24 @@ package object tools { def unsupported(methodName: String): Nothing = throw new UnsupportedOperationException(methodName) - /** Forward-ported from the explicit-nulls branch. - * Should be used when we know from the context that `x` is not null. - * Flow-typing under explicit nulls will automatically insert many necessary - * occurrences of uncheckedNN. - */ - extension [T](x: T | Null) + /** Forward-ported from the explicit-nulls branch. */ + extension [T](x: T | Null): + + /** Assert `x` is non null and strip `Null` from type */ + inline def nn: T = + assert(x != null) + x.asInstanceOf[T] + + /** Should be used when we know from the context that `x` is not null. + * Flow-typing under explicit nulls will automatically insert many necessary + * occurrences of uncheckedNN. + */ inline def uncheckedNN: T = x.asInstanceOf[T] + inline def toOption: Option[T] = + if x == null then None else Some(x.asInstanceOf[T]) + end extension + object resultWrapper { opaque type WrappedResult[T] = T private[tools] def unwrap[T](x: WrappedResult[T]): T = x From b13f68ea0865ae02b0e7f5cd523ec690c251cb8b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Sep 2020 14:48:56 +0200 Subject: [PATCH 26/33] Rename IdentityHashMap -> EqHashMap --- compiler/src/dotty/tools/dotc/Run.scala | 2 +- compiler/src/dotty/tools/dotc/core/SymDenotations.scala | 8 ++++---- compiler/src/dotty/tools/dotc/core/Symbols.scala | 4 ++-- .../src/dotty/tools/dotc/core/tasty/TreePickler.scala | 6 +++--- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 4 ++-- compiler/src/dotty/tools/dotc/typer/Inliner.scala | 2 +- .../dotc/util/{IdentityHashMap.scala => EqHashMap.scala} | 4 ++-- 7 files changed, 15 insertions(+), 15 deletions(-) rename compiler/src/dotty/tools/dotc/util/{IdentityHashMap.scala => EqHashMap.scala} (97%) diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 9071ab4b3ea2..d8083cee7309 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -119,7 +119,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint private var lateFiles = mutable.Set[AbstractFile]() /** A cache for static references to packages and classes */ - val staticRefs = util.IdentityHashMap[Name, Denotation](initialCapacity = 1024) + val staticRefs = util.EqHashMap[Name, Denotation](initialCapacity = 1024) /** Actions that need to be performed at the end of the current compilation run */ private var finalizeActions = mutable.ListBuffer[() => Unit]() diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 9bc36cfc5d1e..e35a8214b63e 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1564,14 +1564,14 @@ object SymDenotations { initPrivateWithin: Symbol) extends SymDenotation(symbol, maybeOwner, name, initFlags, initInfo, initPrivateWithin) { - import util.IdentityHashMap + import util.EqHashMap // ----- caches ------------------------------------------------------- private var myTypeParams: List[TypeSymbol] = null private var fullNameCache: SimpleIdentityMap[QualifiedNameKind, Name] = SimpleIdentityMap.empty - private var myMemberCache: IdentityHashMap[Name, PreDenotation] = null + private var myMemberCache: EqHashMap[Name, PreDenotation] = null private var myMemberCachePeriod: Period = Nowhere /** A cache from types T to baseType(T, C) */ @@ -1582,9 +1582,9 @@ object SymDenotations { private var baseDataCache: BaseData = BaseData.None private var memberNamesCache: MemberNames = MemberNames.None - private def memberCache(using Context): IdentityHashMap[Name, PreDenotation] = { + private def memberCache(using Context): EqHashMap[Name, PreDenotation] = { if (myMemberCachePeriod != ctx.period) { - myMemberCache = IdentityHashMap() + myMemberCache = EqHashMap() myMemberCachePeriod = ctx.period } myMemberCache diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 44df4d433637..9b0b42734381 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -30,7 +30,7 @@ import reporting.Message import collection.mutable import io.AbstractFile import language.implicitConversions -import util.{SourceFile, NoSource, Property, SourcePosition, SrcPos, IdentityHashMap} +import util.{SourceFile, NoSource, Property, SourcePosition, SrcPos, EqHashMap} import scala.collection.JavaConverters._ import scala.annotation.internal.sharable import config.Printers.typr @@ -495,7 +495,7 @@ object Symbols { /** The current class */ def currentClass(using Context): ClassSymbol = ctx.owner.enclosingClass.asClass - type MutableSymbolMap[T] = IdentityHashMap[Symbol, T] + type MutableSymbolMap[T] = EqHashMap[Symbol, T] // ---- Factory methods for symbol creation ---------------------- // diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 323ced731d1b..575f58e273bf 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -44,19 +44,19 @@ class TreePickler(pickler: TastyPickler) { private val symRefs = Symbols.MutableSymbolMap[Addr](256) private val forwardSymRefs = Symbols.MutableSymbolMap[List[Addr]]() - private val pickledTypes = util.IdentityHashMap[Type, Addr]() + private val pickledTypes = util.EqHashMap[Type, Addr]() /** A list of annotation trees for every member definition, so that later * parallel position pickling does not need to access and force symbols. */ - private val annotTrees = util.IdentityHashMap[untpd.MemberDef, mutable.ListBuffer[Tree]]() + private val annotTrees = util.EqHashMap[untpd.MemberDef, mutable.ListBuffer[Tree]]() /** A map from member definitions to their doc comments, so that later * parallel comment pickling does not need to access symbols of trees (which * would involve accessing symbols of named types and possibly changing phases * in doing so). */ - private val docStrings = util.IdentityHashMap[untpd.MemberDef, Comment]() + private val docStrings = util.EqHashMap[untpd.MemberDef, Comment]() def treeAnnots(tree: untpd.MemberDef): List[Tree] = val ts = annotTrees.lookup(tree) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index c1b6731abe92..a52654cd3979 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -28,7 +28,7 @@ import Trees._ import transform.SymUtils._ import transform.TypeUtils._ import Hashable._ -import util.{SourceFile, NoSource, IdentityHashMap} +import util.{SourceFile, NoSource, EqHashMap} import config.{Config, Feature} import Feature.migrateTo3 import config.Printers.{implicits, implicitsDetailed} @@ -289,7 +289,7 @@ object Implicits: * @param outerCtx the next outer context that makes visible further implicits */ class ContextualImplicits(val refs: List[ImplicitRef], val outerImplicits: ContextualImplicits)(initctx: Context) extends ImplicitRefs(initctx) { - private val eligibleCache = IdentityHashMap[Type, List[Candidate]]() + private val eligibleCache = EqHashMap[Type, List[Candidate]]() /** The level increases if current context has a different owner or scope than * the context of the next-outer ImplicitRefs. This is however disabled under diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index cc43f0ddc79e..34e75ef62c29 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -21,7 +21,7 @@ import SymDenotations.SymDenotation import Inferencing.isFullyDefined import config.Printers.inlining import ErrorReporting.errorTree -import dotty.tools.dotc.util.{SimpleIdentityMap, SimpleIdentitySet, IdentityHashMap, SourceFile, SourcePosition, SrcPos} +import dotty.tools.dotc.util.{SimpleIdentityMap, SimpleIdentitySet, EqHashMap, SourceFile, SourcePosition, SrcPos} import dotty.tools.dotc.parsing.Parsers.Parser import Nullables.{given _} diff --git a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala b/compiler/src/dotty/tools/dotc/util/EqHashMap.scala similarity index 97% rename from compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala rename to compiler/src/dotty/tools/dotc/util/EqHashMap.scala index f28fd0df6a47..df7b16d48927 100644 --- a/compiler/src/dotty/tools/dotc/util/IdentityHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/EqHashMap.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.util /** A specialized implementation of GenericHashMap with identity hash and `eq` * as comparison. */ -class IdentityHashMap[Key, Value] +class EqHashMap[Key, Value] (initialCapacity: Int = 8, capacityMultiple: Int = 2) extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): import GenericHashMap.DenseLimit @@ -78,4 +78,4 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): val key = oldTable(idx).asInstanceOf[Key] if key != null then addOld(key, oldTable(idx + 1).asInstanceOf[Value]) idx += 2 -end IdentityHashMap +end EqHashMap From 8a5d9a24d9ab11c7b097e35cf89a899a4576d419 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Sep 2020 14:54:45 +0200 Subject: [PATCH 27/33] Use a util.EqHashMap for BaseTypeCache --- .../tools/dotc/core/SymDenotations.scala | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index e35a8214b63e..a8c1a1da3078 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1575,7 +1575,7 @@ object SymDenotations { private var myMemberCachePeriod: Period = Nowhere /** A cache from types T to baseType(T, C) */ - type BaseTypeMap = java.util.IdentityHashMap[CachedType, Type] + type BaseTypeMap = EqHashMap[CachedType, Type] private var myBaseTypeCache: BaseTypeMap = null private var myBaseTypeCachePeriod: Period = Nowhere @@ -1592,7 +1592,7 @@ object SymDenotations { private def baseTypeCache(using Context): BaseTypeMap = { if !currentHasSameBaseTypesAs(myBaseTypeCachePeriod) then - myBaseTypeCache = new BaseTypeMap + myBaseTypeCache = BaseTypeMap() myBaseTypeCachePeriod = ctx.period myBaseTypeCache } @@ -1906,14 +1906,16 @@ object SymDenotations { /** Compute tp.baseType(this) */ final def baseTypeOf(tp: Type)(using Context): Type = { val btrCache = baseTypeCache - def inCache(tp: Type) = btrCache.get(tp) != null + def inCache(tp: Type) = tp match + case tp: CachedType => btrCache.contains(tp) + case _ => false def record(tp: CachedType, baseTp: Type) = { if (Stats.monitored) { Stats.record("basetype cache entries") if (!baseTp.exists) Stats.record("basetype cache NoTypes") } if (!tp.isProvisional) - btrCache.put(tp, baseTp) + btrCache(tp) = baseTp else btrCache.remove(tp) // Remove any potential sentinel value } @@ -1926,7 +1928,7 @@ object SymDenotations { def recur(tp: Type): Type = try { tp match { case tp: CachedType => - val baseTp = btrCache.get(tp) + val baseTp = btrCache.lookup(tp) if (baseTp != null) return ensureAcyclic(baseTp) case _ => } @@ -1945,7 +1947,7 @@ object SymDenotations { } def computeTypeRef = { - btrCache.put(tp, NoPrefix) + btrCache(tp) = NoPrefix val tpSym = tp.symbol tpSym.denot match { case clsd: ClassDenotation => @@ -1980,7 +1982,7 @@ object SymDenotations { case tp @ AppliedType(tycon, args) => def computeApplied = { - btrCache.put(tp, NoPrefix) + btrCache(tp) = NoPrefix val baseTp = if (tycon.typeSymbol eq symbol) tp else (tycon.typeParams: @unchecked) match { @@ -2041,7 +2043,9 @@ object SymDenotations { } catch { case ex: Throwable => - btrCache.remove(tp) + tp match + case tp: CachedType => btrCache.remove(tp) + case _ => throw ex } From 30f03e8a1daac637131504bc08a16797a85b5620 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Sep 2020 09:36:04 +0200 Subject: [PATCH 28/33] Add ReadOnlySet --- .../dotty/tools/dotc/util/MutableSet.scala | 16 +------------ .../dotty/tools/dotc/util/ReadOnlySet.scala | 24 +++++++++++++++++++ 2 files changed, 25 insertions(+), 15 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/util/ReadOnlySet.scala diff --git a/compiler/src/dotty/tools/dotc/util/MutableSet.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala index 9b60e2bbe61b..1eac1929413b 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableSet.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala @@ -2,10 +2,7 @@ package dotty.tools.dotc.util /** A common class for lightweight mutable sets. */ -abstract class MutableSet[T] { - - /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ - def lookup(x: T): T | Null +abstract class MutableSet[T] extends ReadOnlySet[T]: /** Add element `x` to the set */ def +=(x: T): Unit @@ -17,14 +14,3 @@ abstract class MutableSet[T] { def clear(): Unit - def size: Int - - def iterator: Iterator[T] - - def contains(x: T): Boolean = lookup(x) != null - - def foreach[U](f: T => U): Unit = iterator foreach f - - def toList: List[T] = iterator.toList - -} diff --git a/compiler/src/dotty/tools/dotc/util/ReadOnlySet.scala b/compiler/src/dotty/tools/dotc/util/ReadOnlySet.scala new file mode 100644 index 000000000000..4826d02743a9 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/ReadOnlySet.scala @@ -0,0 +1,24 @@ +package dotty.tools.dotc.util + +/** A class for the readonly part of mutable sets. + */ +abstract class ReadOnlySet[T]: + + /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ + def lookup(x: T): T | Null + + def size: Int + + def iterator: Iterator[T] + + def contains(x: T): Boolean = lookup(x) != null + + def foreach[U](f: T => U): Unit = iterator.foreach(f) + + def toList: List[T] = iterator.toList + + def isEmpty = size == 0 + +object ReadOnlySet: + def empty[T]: ReadOnlySet[T] = HashSet[T](4) + From 785a31b3938030bbddfa070138e9f6809f7714e4 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Sep 2020 10:42:38 +0200 Subject: [PATCH 29/33] Convert more sets to util.HashSet --- .../dotc/classpath/AggregateClassPath.scala | 3 ++- .../src/dotty/tools/dotc/core/Contexts.scala | 10 +++++----- .../src/dotty/tools/dotc/core/NameKinds.scala | 17 ++++++++--------- .../dotty/tools/dotc/core/TypeComparer.scala | 6 +++--- compiler/src/dotty/tools/dotc/core/Types.scala | 2 +- .../src/dotty/tools/dotc/plugins/Plugin.scala | 4 ++-- .../tools/dotc/transform/CapturedVars.scala | 8 ++++---- .../dotty/tools/dotc/transform/Getters.scala | 2 +- .../tools/dotc/transform/OverridingPairs.scala | 2 +- .../src/dotty/tools/dotc/util/MutableSet.scala | 9 +++++++++ tests/neg-custom-args/i1650.scala | 2 +- 11 files changed, 37 insertions(+), 28 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index 13d7e7a8b692..9ff0e71d3d9f 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -7,6 +7,7 @@ package dotc.classpath import java.net.URL import scala.collection.mutable.ArrayBuffer import scala.collection.immutable.ArraySeq +import dotc.util import dotty.tools.io.{ AbstractFile, ClassPath, ClassRepresentation, EfficientClassPath } @@ -132,7 +133,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { } private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = { - val seenNames = collection.mutable.HashSet[String]() + val seenNames = util.HashSet[String]() val entriesBuffer = new ArrayBuffer[EntryType](1024) for { cp <- aggregates diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 801972aab3f6..b2258dee613d 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -14,7 +14,7 @@ import Uniques._ import ast.Trees._ import ast.untpd import Flags.GivenOrImplicit -import util.{NoSource, SimpleIdentityMap, SourceFile} +import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet} import typer.{Implicits, ImportInfo, Inliner, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} import Nullables.{NotNullInfo, given _} import Implicits.ContextualImplicits @@ -534,7 +534,7 @@ object Contexts { def settings: ScalaSettings = base.settings def definitions: Definitions = base.definitions def platform: Platform = base.platform - def pendingUnderlying: mutable.HashSet[Type] = base.pendingUnderlying + def pendingUnderlying: util.HashSet[Type] = base.pendingUnderlying def uniqueNamedTypes: Uniques.NamedTypeUniques = base.uniqueNamedTypes def uniques: util.HashSet[Type] = base.uniques @@ -838,8 +838,8 @@ object Contexts { def nextSymId: Int = { _nextSymId += 1; _nextSymId } /** Sources that were loaded */ - val sources: mutable.HashMap[AbstractFile, SourceFile] = new mutable.HashMap[AbstractFile, SourceFile] - val sourceNamed: mutable.HashMap[TermName, SourceFile] = new mutable.HashMap[TermName, SourceFile] + val sources: util.HashMap[AbstractFile, SourceFile] = util.HashMap[AbstractFile, SourceFile]() + val sourceNamed: util.HashMap[TermName, SourceFile] = util.HashMap[TermName, SourceFile]() // Types state /** A table for hash consing unique types */ @@ -869,7 +869,7 @@ object Contexts { /** The set of named types on which a currently active invocation * of underlying during a controlled operation exists. */ - private[core] val pendingUnderlying: mutable.HashSet[Type] = new mutable.HashSet[Type] + private[core] val pendingUnderlying: util.HashSet[Type] = util.HashSet[Type]() /** A map from ErrorType to associated message. We use this map * instead of storing messages directly in ErrorTypes in order diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index 4185c5171089..6d3bd93f4a6d 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -8,7 +8,6 @@ import StdNames._ import NameTags._ import Contexts._ import Decorators._ -import collection.mutable import scala.annotation.internal.sharable @@ -18,10 +17,10 @@ object NameKinds { // These are sharable since all NameKinds are created eagerly at the start of the program // before any concurrent threads are forked. for this to work, NameKinds should never // be created lazily or in modules that start running after compilers are forked. - @sharable private val simpleNameKinds = new mutable.HashMap[Int, ClassifiedNameKind] - @sharable private val qualifiedNameKinds = new mutable.HashMap[Int, QualifiedNameKind] - @sharable private val numberedNameKinds = new mutable.HashMap[Int, NumberedNameKind] - @sharable private val uniqueNameKinds = new mutable.HashMap[String, UniqueNameKind] + @sharable private val simpleNameKinds = util.HashMap[Int, ClassifiedNameKind]() + @sharable private val qualifiedNameKinds = util.HashMap[Int, QualifiedNameKind]() + @sharable private val numberedNameKinds = util.HashMap[Int, NumberedNameKind]() + @sharable private val uniqueNameKinds = util.HashMap[String, UniqueNameKind]() /** A class for the info stored in a derived name */ abstract class NameInfo { @@ -393,8 +392,8 @@ object NameKinds { val Scala2MethodNameKinds: List[NameKind] = List(DefaultGetterName, ExtMethName, UniqueExtMethName) - def simpleNameKindOfTag : collection.Map[Int, ClassifiedNameKind] = simpleNameKinds - def qualifiedNameKindOfTag : collection.Map[Int, QualifiedNameKind] = qualifiedNameKinds - def numberedNameKindOfTag : collection.Map[Int, NumberedNameKind] = numberedNameKinds - def uniqueNameKindOfSeparator: collection.Map[String, UniqueNameKind] = uniqueNameKinds + def simpleNameKindOfTag : util.ReadOnlyMap[Int, ClassifiedNameKind] = simpleNameKinds + def qualifiedNameKindOfTag : util.ReadOnlyMap[Int, QualifiedNameKind] = qualifiedNameKinds + def numberedNameKindOfTag : util.ReadOnlyMap[Int, NumberedNameKind] = numberedNameKinds + def uniqueNameKindOfSeparator: util.ReadOnlyMap[String, UniqueNameKind] = uniqueNameKinds } diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index f0721fd648bd..330124c39da3 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -49,7 +49,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling needsGc = false if Config.checkTypeComparerReset then checkReset() - private var pendingSubTypes: mutable.Set[(Type, Type)] = null + private var pendingSubTypes: util.MutableSet[(Type, Type)] = null private var recCount = 0 private var monitored = false @@ -202,7 +202,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def monitoredIsSubType = { if (pendingSubTypes == null) { - pendingSubTypes = new mutable.HashSet[(Type, Type)] + pendingSubTypes = util.HashSet[(Type, Type)]() report.log(s"!!! deep subtype recursion involving ${tp1.show} <:< ${tp2.show}, constraint = ${state.constraint.show}") report.log(s"!!! constraint = ${constraint.show}") //if (ctx.settings.YnoDeepSubtypes.value) { @@ -231,7 +231,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } } val p = (normalize(tp1), normalize(tp2)) - !pendingSubTypes(p) && { + !pendingSubTypes.contains(p) && { try { pendingSubTypes += p firstTry diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index d70385094c41..774ffb8b0a77 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3405,7 +3405,7 @@ object Types { abstract class LambdaTypeCompanion[N <: Name, PInfo <: Type, LT <: LambdaType] { def syntheticParamName(n: Int): N - @sharable private val memoizedNames = new mutable.HashMap[Int, List[N]] + @sharable private val memoizedNames = util.HashMap[Int, List[N]]() def syntheticParamNames(n: Int): List[N] = synchronized { memoizedNames.getOrElseUpdate(n, (0 until n).map(syntheticParamName).toList) } diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala index cb4a6d9e99af..76cf60d5e3be 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala @@ -162,12 +162,12 @@ object Plugin { case Failure(e) => Failure(e) }) - val seen = mutable.HashSet[String]() + val seen = util.HashSet[String]() val enabled = (fromPaths ::: fromDirs) map(_.flatMap { case (classname, loader) => Plugin.load(classname, loader).flatMap { clazz => val plugin = instantiate(clazz) - if (seen(classname)) // a nod to scala/bug#7494, take the plugin classes distinctly + if (seen.contains(classname)) // a nod to scala/bug#7494, take the plugin classes distinctly Failure(new PluginLoadException(plugin.name, s"Ignoring duplicate plugin ${plugin.name} (${classname})")) else if (ignoring contains plugin.name) Failure(new PluginLoadException(plugin.name, s"Disabling plugin ${plugin.name}")) diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index 2517b58ea617..7b6d7ee64cd2 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -26,11 +26,11 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisPhase = override def runsAfterGroupsOf: Set[String] = Set(LiftTry.name) // lifting tries changes what variables are considered to be captured - private[this] var Captured: Store.Location[collection.Set[Symbol]] = _ + private[this] var Captured: Store.Location[util.ReadOnlySet[Symbol]] = _ private def captured(using Context) = ctx.store(Captured) override def initContext(ctx: FreshContext): Unit = - Captured = ctx.addLocation(Set.empty) + Captured = ctx.addLocation(util.ReadOnlySet.empty) private class RefInfo(using Context) { /** The classes for which a Ref type exists. */ @@ -54,7 +54,7 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisPhase = } private class CollectCaptured extends TreeTraverser { - private val captured = mutable.HashSet[Symbol]() + private val captured = util.HashSet[Symbol]() def traverse(tree: Tree)(using Context) = tree match { case id: Ident => val sym = id.symbol @@ -68,7 +68,7 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisPhase = case _ => traverseChildren(tree) } - def runOver(tree: Tree)(using Context): collection.Set[Symbol] = { + def runOver(tree: Tree)(using Context): util.ReadOnlySet[Symbol] = { traverse(tree) captured } diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala index 27150e727532..7e5ea9c81b2a 100644 --- a/compiler/src/dotty/tools/dotc/transform/Getters.scala +++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala @@ -91,7 +91,7 @@ class Getters extends MiniPhase with SymTransformer { thisPhase => } private val NoGetterNeededFlags = Method | Param | JavaDefined | JavaStatic - val newSetters = mutable.HashSet[Symbol]() + val newSetters = util.HashSet[Symbol]() def ensureSetter(sym: TermSymbol)(using Context) = if !sym.setter.exists then diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index d46258df94b9..b3b82856fd80 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -77,7 +77,7 @@ object OverridingPairs { * (maybe excluded because of hasCommonParentAsSubclass). * These will not appear as overriding */ - private val visited = new mutable.HashSet[Symbol] + private val visited = util.HashSet[Symbol]() /** The current entry candidate for overriding */ diff --git a/compiler/src/dotty/tools/dotc/util/MutableSet.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala index 1eac1929413b..bedb079f18ca 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableSet.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala @@ -12,5 +12,14 @@ abstract class MutableSet[T] extends ReadOnlySet[T]: */ def put(x: T): T + /** Remove element `x` from the set */ + def -=(x: T): Unit + def clear(): Unit + def ++= (xs: IterableOnce[T]): Unit = + xs.iterator.foreach(this += _) + + def --= (xs: IterableOnce[T]): Unit = + xs.iterator.foreach(this -= _) + diff --git a/tests/neg-custom-args/i1650.scala b/tests/neg-custom-args/i1650.scala index 6f709cd843eb..3ce5d19bf7dd 100644 --- a/tests/neg-custom-args/i1650.scala +++ b/tests/neg-custom-args/i1650.scala @@ -1,5 +1,5 @@ object Test { - test4(test4$default$1) // error: recursion limit exceeded + test4(test4$default$1) def test4[T[P]](x: T[T[List[T[X forSome { type X }]]]]) = ??? // error // error def test4$default$1[T[P]]: T[Int] = ??? } From eebdcba5ca5a806c06995a67b14ae618778245cd Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Sep 2020 10:45:03 +0200 Subject: [PATCH 30/33] Fix HashSet.-= I overlooked a backport of a fix from HashMap to HashSet. --- compiler/src/dotty/tools/dotc/util/HashSet.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index 2a9ed940e143..8d73eada970f 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -107,7 +107,7 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu while idx = nextIndex(idx) e = entryAt(idx) - e != null && (isDense || index(hash(e)) != idx) + e != null do if isDense || index(hole - index(hash(e))) < limit From 29ddc17b94317c3c81cf3b5cf9fed786ed0ce0a7 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Sep 2020 10:45:57 +0200 Subject: [PATCH 31/33] Convert sets in TreeChecker to util.HashSet --- compiler/src/dotty/tools/dotc/transform/TreeChecker.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 62a7896074df..32be8fc1dd18 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -160,8 +160,8 @@ class TreeChecker extends Phase with SymTransformer { class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking { - private val nowDefinedSyms = new mutable.HashSet[Symbol] - private val patBoundSyms = new mutable.HashSet[Symbol] + private val nowDefinedSyms = util.HashSet[Symbol]() + private val patBoundSyms = util.HashSet[Symbol]() private val everDefinedSyms = MutableSymbolMap[untpd.Tree]() // don't check value classes after typer, as the constraint about constructors doesn't hold after transform From 57c0a43ce6dc46a529f2f3ff0cb18b9ac5c61396 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Sep 2020 22:28:22 +0200 Subject: [PATCH 32/33] Convert some mutable.HashMaps to util.HashMaps --- .../tools/dotc/classpath/AggregateClassPath.scala | 2 +- .../dotc/classpath/ZipAndJarFileLookupFactory.scala | 12 +++++++----- compiler/src/dotty/tools/dotc/core/Types.scala | 4 ++-- .../tools/dotc/core/classfile/ClassfileParser.scala | 12 +++++------- .../src/dotty/tools/dotc/util/GenericHashMap.scala | 3 +++ 5 files changed, 18 insertions(+), 15 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index 9ff0e71d3d9f..163667a18f44 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -108,7 +108,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { private def mergeClassesAndSources(entries: scala.collection.Seq[ClassRepresentation]): Seq[ClassRepresentation] = { // based on the implementation from MergedClassPath var count = 0 - val indices = new collection.mutable.HashMap[String, Int]() + val indices = util.HashMap[String, Int]() val mergedEntries = new ArrayBuffer[ClassRepresentation](entries.size) for { entry <- entries diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index 9ddec8ebbdf7..eb872abb80f0 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -1,7 +1,8 @@ /* * Copyright (c) 2014 Contributor. All rights reserved. */ -package dotty.tools.dotc.classpath +package dotty.tools.dotc +package classpath import java.io.File import java.net.URL @@ -12,6 +13,7 @@ import scala.annotation.tailrec import dotty.tools.io.{AbstractFile, ClassPath, ClassRepresentation, FileZipArchive, ManifestResources} import dotty.tools.dotc.core.Contexts._ import FileUtils._ +import util._ /** * A trait providing an optional cache for classpath entries obtained from zip and jar files. @@ -89,8 +91,8 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { * when we need subpackages of a given package or its classes, we traverse once and cache only packages. * Classes for given package can be then easily loaded when they are needed. */ - private lazy val cachedPackages: collection.mutable.HashMap[String, PackageFileInfo] = { - val packages = collection.mutable.HashMap[String, PackageFileInfo]() + private lazy val cachedPackages: util.HashMap[String, PackageFileInfo] = { + val packages = util.HashMap[String, PackageFileInfo]() def getSubpackages(dir: AbstractFile): List[AbstractFile] = (for (file <- dir if file.isPackage) yield file).toList @@ -102,7 +104,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { case pkgFile :: remainingFiles => val subpackages = getSubpackages(pkgFile) val fullPkgName = packagePrefix + pkgFile.name - packages.put(fullPkgName, PackageFileInfo(pkgFile, subpackages)) + packages(fullPkgName) = PackageFileInfo(pkgFile, subpackages) val newPackagePrefix = fullPkgName + "." subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages)) traverse(packagePrefix, remainingFiles, subpackagesQueue) @@ -113,7 +115,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { } val subpackages = getSubpackages(file) - packages.put(ClassPath.RootPackage, PackageFileInfo(file, subpackages)) + packages(ClassPath.RootPackage) = PackageFileInfo(file, subpackages) traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue()) packages } diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 774ffb8b0a77..8e328c0c882a 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4321,7 +4321,7 @@ object Types { def underlying(using Context): Type = bound private var myReduced: Type = null - private var reductionContext: mutable.Map[Type, Type] = null + private var reductionContext: util.MutableMap[Type, Type] = null override def tryNormalize(using Context): Type = reduced.normalized @@ -4340,7 +4340,7 @@ object Types { } def updateReductionContext(footprint: collection.Set[Type]): Unit = - reductionContext = new mutable.HashMap + reductionContext = util.HashMap() for (tp <- footprint) reductionContext(tp) = contextInfo(tp) typr.println(i"footprint for $this $hashCode: ${footprint.toList.map(x => (x, contextInfo(x)))}%, %") diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index f558defee1d5..f4c95001ceaa 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -732,15 +732,13 @@ class ClassfileParser( classTranslation.flags(jflags), getScope(jflags)) - for (entry <- innerClasses.values) { + for entry <- innerClasses.valuesIterator do // create a new class member for immediate inner classes - if (entry.outerName == currentClassName) { + if entry.outerName == currentClassName then val file = ctx.platform.classPath.findClassFile(entry.externalName.toString) getOrElse { throw new AssertionError(entry.externalName) } enterClassAndModule(entry, file, entry.jflags) - } - } } // Nothing$ and Null$ were incorrectly emitted with a Scala attribute @@ -937,14 +935,14 @@ class ClassfileParser( s"$originalName in $outerName($externalName)" } - object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] { + object innerClasses extends util.HashMap[Name, InnerClassEntry] { /** Return the Symbol of the top level class enclosing `name`, * or 'name's symbol if no entry found for `name`. */ def topLevelClass(name: Name)(using Context): Symbol = { - val tlName = if (isDefinedAt(name)) { + val tlName = if (contains(name)) { var entry = this(name) - while (isDefinedAt(entry.outerName)) + while (contains(entry.outerName)) entry = this(entry.outerName) entry.outerName } diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index 060e0dc9ec14..134a94e8b888 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -177,6 +177,9 @@ abstract class GenericHashMap[Key, Value] def keysIterator: Iterator[Key] = new EntryIterator: def entry(idx: Int) = keyAt(idx) + def valuesIterator: Iterator[Value] = new EntryIterator: + def entry(idx: Int) = valueAt(idx) + override def toString: String = iterator.map((k, v) => s"$k -> $v").mkString("HashMap(", ", ", ")") From 5e2d8014cef036381ef777210ab99eebaedbf41c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Sep 2020 22:40:28 +0200 Subject: [PATCH 33/33] Convert more mutable.HashMaps to util.HashMaps (1) --- .../tools/dotc/core/tasty/CommentUnpickler.scala | 14 +++++++------- .../tools/dotc/core/tasty/PositionUnpickler.scala | 12 ++++++------ .../src/dotty/tools/dotc/util/ReadOnlyMap.scala | 14 +++++++++++++- 3 files changed, 26 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala index 3b12f6d773c3..5c18594b7ebe 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala @@ -1,19 +1,19 @@ -package dotty.tools.dotc.core.tasty +package dotty.tools.dotc +package core.tasty -import dotty.tools.dotc.core.Comments.Comment -import dotty.tools.dotc.util.Spans.Span +import core.Comments.Comment +import util.Spans.Span +import util.HashMap import dotty.tools.tasty.{TastyReader, TastyBuffer} import TastyBuffer.Addr -import scala.collection.mutable.HashMap - import java.nio.charset.Charset class CommentUnpickler(reader: TastyReader) { import reader._ - private[tasty] lazy val comments: Map[Addr, Comment] = { + private[tasty] lazy val comments: HashMap[Addr, Comment] = { val comments = new HashMap[Addr, Comment] while (!isAtEnd) { val addr = readAddr() @@ -25,7 +25,7 @@ class CommentUnpickler(reader: TastyReader) { comments(addr) = Comment(position, rawComment) } } - comments.toMap + comments } def commentAt(addr: Addr): Option[Comment] = diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala index 4d15b39999f6..872f60837515 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala @@ -15,14 +15,14 @@ import Names.TermName class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { import reader._ - private var mySpans: mutable.HashMap[Addr, Span] = _ - private var mySourcePaths: mutable.HashMap[Addr, String] = _ + private var mySpans: util.HashMap[Addr, Span] = _ + private var mySourcePaths: util.HashMap[Addr, String] = _ private var isDefined = false def ensureDefined(): Unit = { if (!isDefined) { - mySpans = new mutable.HashMap[Addr, Span] - mySourcePaths = new mutable.HashMap[Addr, String] + mySpans = util.HashMap[Addr, Span]() + mySourcePaths = util.HashMap[Addr, String]() var curIndex = 0 var curStart = 0 var curEnd = 0 @@ -50,12 +50,12 @@ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { } } - private[tasty] def spans: Map[Addr, Span] = { + private[tasty] def spans: util.ReadOnlyMap[Addr, Span] = { ensureDefined() mySpans } - private[tasty] def sourcePaths: Map[Addr, String] = { + private[tasty] def sourcePaths: util.ReadOnlyMap[Addr, String] = { ensureDefined() mySourcePaths } diff --git a/compiler/src/dotty/tools/dotc/util/ReadOnlyMap.scala b/compiler/src/dotty/tools/dotc/util/ReadOnlyMap.scala index 933bf4a7c57c..020303c18bc2 100644 --- a/compiler/src/dotty/tools/dotc/util/ReadOnlyMap.scala +++ b/compiler/src/dotty/tools/dotc/util/ReadOnlyMap.scala @@ -11,6 +11,7 @@ abstract class ReadOnlyMap[Key, Value]: def iterator: Iterator[(Key, Value)] def keysIterator: Iterator[Key] + def valuesIterator: Iterator[Value] def isEmpty: Boolean = size == 0 @@ -26,4 +27,15 @@ abstract class ReadOnlyMap[Key, Value]: def apply(key: Key): Value = lookup(key) match case null => throw new NoSuchElementException(s"$key") - case v => v.uncheckedNN \ No newline at end of file + case v => v.uncheckedNN + + def toArray: Array[(Key, Value)] = + val result = new Array[(Key, Value)](size) + var idx = 0 + for pair <- iterator do + result(idx) = pair + idx += 1 + result + + def toSeq: Seq[(Key, Value)] = toArray.toSeq +