diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index f983743281cc..665cf0d8c92c 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -22,7 +22,7 @@ import Symbols._ import Phases._ import dotty.tools.dotc.util -import dotty.tools.dotc.util.Spans +import dotty.tools.dotc.util.{Spans, ReadOnlyMap} import dotty.tools.dotc.report import Decorators._ @@ -36,7 +36,7 @@ import Names.TermName import Annotations.Annotation import Names.Name -class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap: Map[Symbol, Set[ClassSymbol]])(using val ctx: Context) { +class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context) { private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index e7b2684bb4cd..181e4c18f452 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -36,7 +36,7 @@ import dotty.tools.io._ class GenBCode extends Phase { def phaseName: String = GenBCode.name - private val superCallsMap = newMutableSymbolMap[Set[ClassSymbol]] + private val superCallsMap = new MutableSymbolMap[Set[ClassSymbol]] def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = { val old = superCallsMap.getOrElse(sym, Set.empty) superCallsMap.update(sym, old + calls) @@ -51,10 +51,8 @@ class GenBCode extends Phase { } def run(using Context): Unit = - new GenBCodePipeline( - new DottyBackendInterface( - outputDir, superCallsMap.toMap - ) + GenBCodePipeline( + DottyBackendInterface(outputDir, superCallsMap) ).run(ctx.compilationUnit.tpdTree) diff --git a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala index 8b21afd8377a..e1b6d8d922f7 100644 --- a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala +++ b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala @@ -9,6 +9,7 @@ import Names.TermName, StdNames._ import Types.{JavaArrayType, UnspecifiedErrorType, Type} import Symbols.{Symbol, NoSymbol} import dotc.report +import dotc.util.ReadOnlyMap import scala.annotation.threadUnsafe import scala.collection.immutable @@ -34,7 +35,7 @@ import scala.collection.immutable class DottyPrimitives(ictx: Context) { import dotty.tools.backend.ScalaPrimitivesOps._ - @threadUnsafe private lazy val primitives: immutable.Map[Symbol, Int] = init + @threadUnsafe private lazy val primitives: ReadOnlyMap[Symbol, Int] = init /** Return the code for the given symbol. */ def getPrimitive(sym: Symbol): Int = { @@ -118,12 +119,12 @@ class DottyPrimitives(ictx: Context) { } /** Initialize the primitive map */ - private def init: immutable.Map[Symbol, Int] = { + private def init: ReadOnlyMap[Symbol, Int] = { given Context = ictx import Symbols.defn - val primitives = Symbols.newMutableSymbolMap[Int] + val primitives = Symbols.MutableSymbolMap[Int](512) /** Add a primitive operation to the map */ def addPrimitive(s: Symbol, code: Int): Unit = { @@ -394,7 +395,7 @@ class DottyPrimitives(ictx: Context) { addPrimitives(DoubleClass, nme.UNARY_-, NEG) - primitives.toMap + primitives } def isPrimitive(sym: Symbol): Boolean = diff --git a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala index 382b8b9d9a14..1beb9bdf30aa 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala @@ -10,6 +10,7 @@ import Symbols._ import dotty.tools.dotc.ast.tpd._ import dotty.tools.backend.jvm.DottyPrimitives import dotty.tools.dotc.report +import dotty.tools.dotc.util.ReadOnlyMap import scala.collection.mutable @@ -55,7 +56,7 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { import JSPrimitives._ import dotty.tools.backend.ScalaPrimitivesOps._ - private lazy val jsPrimitives: Map[Symbol, Int] = initJSPrimitives(using ictx) + private lazy val jsPrimitives: ReadOnlyMap[Symbol, Int] = initJSPrimitives(using ictx) override def getPrimitive(sym: Symbol): Int = jsPrimitives.getOrElse(sym, super.getPrimitive(sym)) @@ -70,9 +71,9 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { jsPrimitives.contains(fun.symbol(using ictx)) || super.isPrimitive(fun) /** Initialize the primitive map */ - private def initJSPrimitives(using Context): Map[Symbol, Int] = { + private def initJSPrimitives(using Context): ReadOnlyMap[Symbol, Int] = { - val primitives = newMutableSymbolMap[Int] + val primitives = MutableSymbolMap[Int]() // !!! Code duplicate with DottyPrimitives /** Add a primitive operation to the map */ @@ -120,7 +121,7 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { addPrimitive(jsdefn.ReflectSelectable_selectDynamic, REFLECT_SELECTABLE_SELECTDYN) addPrimitive(jsdefn.ReflectSelectable_applyDynamic, REFLECT_SELECTABLE_APPLYDYN) - primitives.toMap + primitives } } diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 8b51cb1a3ce2..f3310db45892 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -109,7 +109,7 @@ class Compiler { List(new Constructors, // Collect initialization code in primary constructors // Note: constructors changes decls in transformTemplate, no InfoTransformers should be added after it new FunctionalInterfaces, // Rewrites closures to implement @specialized types of Functions. - new Instrumentation) :: // Count closure allocations under -Yinstrument-closures + new Instrumentation) :: // Count calls and allocations under -Yinstrument List(new LambdaLift, // Lifts out nested functions to class scope, storing free variables in environments // Note: in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here new ElimStaticThis, // Replace `this` references to static objects by global identifiers diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 96c9a647b960..d8083cee7309 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -7,6 +7,8 @@ import Periods._ import Symbols._ import Types._ import Scopes._ +import Names.Name +import Denotations.Denotation import typer.Typer import typer.ImportInfo._ import Decorators._ @@ -14,7 +16,7 @@ import io.{AbstractFile, PlainFile} import Phases.unfusedPhases import scala.io.Codec -import util.{Set => _, _} +import util._ import reporting.Reporter import rewrites.Rewrites import java.io.{BufferedWriter, OutputStreamWriter} @@ -116,6 +118,9 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint /** The source files of all late entered symbols, as a set */ private var lateFiles = mutable.Set[AbstractFile]() + /** A cache for static references to packages and classes */ + val staticRefs = util.EqHashMap[Name, Denotation](initialCapacity = 1024) + /** Actions that need to be performed at the end of the current compilation run */ private var finalizeActions = mutable.ListBuffer[() => Unit]() diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index 13d7e7a8b692..163667a18f44 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -7,6 +7,7 @@ package dotc.classpath import java.net.URL import scala.collection.mutable.ArrayBuffer import scala.collection.immutable.ArraySeq +import dotc.util import dotty.tools.io.{ AbstractFile, ClassPath, ClassRepresentation, EfficientClassPath } @@ -107,7 +108,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { private def mergeClassesAndSources(entries: scala.collection.Seq[ClassRepresentation]): Seq[ClassRepresentation] = { // based on the implementation from MergedClassPath var count = 0 - val indices = new collection.mutable.HashMap[String, Int]() + val indices = util.HashMap[String, Int]() val mergedEntries = new ArrayBuffer[ClassRepresentation](entries.size) for { entry <- entries @@ -132,7 +133,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { } private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = { - val seenNames = collection.mutable.HashSet[String]() + val seenNames = util.HashSet[String]() val entriesBuffer = new ArrayBuffer[EntryType](1024) for { cp <- aggregates diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index 9ddec8ebbdf7..eb872abb80f0 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -1,7 +1,8 @@ /* * Copyright (c) 2014 Contributor. All rights reserved. */ -package dotty.tools.dotc.classpath +package dotty.tools.dotc +package classpath import java.io.File import java.net.URL @@ -12,6 +13,7 @@ import scala.annotation.tailrec import dotty.tools.io.{AbstractFile, ClassPath, ClassRepresentation, FileZipArchive, ManifestResources} import dotty.tools.dotc.core.Contexts._ import FileUtils._ +import util._ /** * A trait providing an optional cache for classpath entries obtained from zip and jar files. @@ -89,8 +91,8 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { * when we need subpackages of a given package or its classes, we traverse once and cache only packages. * Classes for given package can be then easily loaded when they are needed. */ - private lazy val cachedPackages: collection.mutable.HashMap[String, PackageFileInfo] = { - val packages = collection.mutable.HashMap[String, PackageFileInfo]() + private lazy val cachedPackages: util.HashMap[String, PackageFileInfo] = { + val packages = util.HashMap[String, PackageFileInfo]() def getSubpackages(dir: AbstractFile): List[AbstractFile] = (for (file <- dir if file.isPackage) yield file).toList @@ -102,7 +104,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { case pkgFile :: remainingFiles => val subpackages = getSubpackages(pkgFile) val fullPkgName = packagePrefix + pkgFile.name - packages.put(fullPkgName, PackageFileInfo(pkgFile, subpackages)) + packages(fullPkgName) = PackageFileInfo(pkgFile, subpackages) val newPackagePrefix = fullPkgName + "." subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages)) traverse(packagePrefix, remainingFiles, subpackagesQueue) @@ -113,7 +115,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { } val subpackages = getSubpackages(file) - packages.put(ClassPath.RootPackage, PackageFileInfo(file, subpackages)) + packages(ClassPath.RootPackage) = PackageFileInfo(file, subpackages) traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue()) packages } diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 19724f28698f..26dbbbe145a4 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -181,10 +181,10 @@ object Config { /** If set, enables tracing */ inline val tracingEnabled = false - /** Initial capacity of uniques HashMap. - * Note: This MUST BE a power of two to work with util.HashSet + /** Initial capacity of the uniques HashMap. + * Note: This should be a power of two to work with util.HashSet */ - inline val initialUniquesCapacity = 65536 + inline val initialUniquesCapacity = 0x8000 /** How many recursive calls to NamedType#underlying are performed before logging starts. */ inline val LogPendingUnderlyingThreshold = 50 diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 00b3d73cc6d1..e8c7bf8343b0 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -177,8 +177,7 @@ class ScalaSettings extends Settings.SettingGroup { val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting("-Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") - val YinstrumentClosures: Setting[Boolean] = BooleanSetting("-Yinstrument-closures", "Add instrumentation code that counts closure creations.") - val YinstrumentAllocations: Setting[Boolean] = BooleanSetting("-Yinstrument-allocations", "Add instrumentation code that counts allocations.") + val Yinstrument: Setting[Boolean] = BooleanSetting("-Yinstrument", "Add instrumentation code that counts allocations and closure creations.") /** Dottydoc specific settings */ val siteRoot: Setting[String] = StringSetting( diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala index 71037e19bc6e..0199e2474354 100644 --- a/compiler/src/dotty/tools/dotc/core/Comments.scala +++ b/compiler/src/dotty/tools/dotc/core/Comments.scala @@ -4,7 +4,7 @@ package core import ast.{ untpd, tpd } import Decorators._, Symbols._, Contexts._ -import util.SourceFile +import util.{SourceFile, ReadOnlyMap} import util.Spans._ import util.CommentParsing._ import util.Property.Key @@ -23,11 +23,11 @@ object Comments { */ class ContextDocstrings { - private val _docstrings: MutableSymbolMap[Comment] = newMutableSymbolMap + private val _docstrings: MutableSymbolMap[Comment] = MutableSymbolMap[Comment](512) // FIXME: 2nd [Comment] needed or "not a class type" val templateExpander: CommentExpander = new CommentExpander - def docstrings: Map[Symbol, Comment] = _docstrings.toMap + def docstrings: ReadOnlyMap[Symbol, Comment] = _docstrings def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym) @@ -180,7 +180,7 @@ object Comments { protected def superComment(sym: Symbol)(using Context): Option[String] = allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "") - private val cookedDocComments = newMutableSymbolMap[String] + private val cookedDocComments = MutableSymbolMap[String]() /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by * missing sections of an inherited doc comment. diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 28cfe0d0a4f7..b2258dee613d 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -14,7 +14,7 @@ import Uniques._ import ast.Trees._ import ast.untpd import Flags.GivenOrImplicit -import util.{NoSource, SimpleIdentityMap, SourceFile} +import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet} import typer.{Implicits, ImportInfo, Inliner, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} import Nullables.{NotNullInfo, given _} import Implicits.ContextualImplicits @@ -289,7 +289,7 @@ object Contexts { private def lookup(key: Phase | SourceFile): Context = util.Stats.record("Context.related.lookup") if related == null then - related = SimpleIdentityMap.Empty + related = SimpleIdentityMap.empty null else related(key) @@ -534,7 +534,7 @@ object Contexts { def settings: ScalaSettings = base.settings def definitions: Definitions = base.definitions def platform: Platform = base.platform - def pendingUnderlying: mutable.HashSet[Type] = base.pendingUnderlying + def pendingUnderlying: util.HashSet[Type] = base.pendingUnderlying def uniqueNamedTypes: Uniques.NamedTypeUniques = base.uniqueNamedTypes def uniques: util.HashSet[Type] = base.uniques @@ -838,30 +838,18 @@ object Contexts { def nextSymId: Int = { _nextSymId += 1; _nextSymId } /** Sources that were loaded */ - val sources: mutable.HashMap[AbstractFile, SourceFile] = new mutable.HashMap[AbstractFile, SourceFile] - val sourceNamed: mutable.HashMap[TermName, SourceFile] = new mutable.HashMap[TermName, SourceFile] + val sources: util.HashMap[AbstractFile, SourceFile] = util.HashMap[AbstractFile, SourceFile]() + val sourceNamed: util.HashMap[TermName, SourceFile] = util.HashMap[TermName, SourceFile]() // Types state /** A table for hash consing unique types */ - private[core] val uniques: util.HashSet[Type] = new util.HashSet[Type](Config.initialUniquesCapacity) { - override def hash(x: Type): Int = x.hash - override def isEqual(x: Type, y: Type) = x.eql(y) - } + private[core] val uniques: Uniques = Uniques() /** A table for hash consing unique applied types */ - private[dotc] val uniqueAppliedTypes: AppliedUniques = new AppliedUniques + private[dotc] val uniqueAppliedTypes: AppliedUniques = AppliedUniques() /** A table for hash consing unique named types */ - private[core] val uniqueNamedTypes: NamedTypeUniques = new NamedTypeUniques - - private def uniqueSets = Map( - "uniques" -> uniques, - "uniqueAppliedTypes" -> uniqueAppliedTypes, - "uniqueNamedTypes" -> uniqueNamedTypes) - - /** A map that associates label and size of all uniques sets */ - def uniquesSizes: Map[String, (Int, Int, Int)] = - uniqueSets.transform((_, s) => (s.size, s.accesses, s.misses)) + private[core] val uniqueNamedTypes: NamedTypeUniques = NamedTypeUniques() var emptyTypeBounds: TypeBounds = null var emptyWildcardBounds: WildcardType = null @@ -881,7 +869,7 @@ object Contexts { /** The set of named types on which a currently active invocation * of underlying during a controlled operation exists. */ - private[core] val pendingUnderlying: mutable.HashSet[Type] = new mutable.HashSet[Type] + private[core] val pendingUnderlying: util.HashSet[Type] = util.HashSet[Type]() /** A map from ErrorType to associated message. We use this map * instead of storing messages directly in ErrorTypes in order @@ -925,15 +913,16 @@ object Contexts { charArray = new Array[Char](charArray.length * 2) charArray - def reset(): Unit = { - for ((_, set) <- uniqueSets) set.clear() + def reset(): Unit = + uniques.clear() + uniqueAppliedTypes.clear() + uniqueNamedTypes.clear() emptyTypeBounds = null emptyWildcardBounds = null errorTypeMsg.clear() sources.clear() sourceNamed.clear() comparers.clear() // forces re-evaluation of top and bottom classes in TypeComparer - } // Test that access is single threaded diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 9e4608fda3e0..0bbd62123195 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1603,7 +1603,7 @@ class Definitions { valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0 @tu lazy val specialErasure: SimpleIdentityMap[Symbol, ClassSymbol] = - SimpleIdentityMap.Empty[Symbol] + SimpleIdentityMap.empty[Symbol] .updated(AnyClass, ObjectClass) .updated(AnyValClass, ObjectClass) .updated(SingletonClass, ObjectClass) diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 21afdcc85af6..73e98461046e 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -1259,7 +1259,8 @@ object Denotations { } recurSimple(path.length, wrap) } - recur(path) + if ctx.run == null then recur(path) + else ctx.run.staticRefs.getOrElseUpdate(path, recur(path)) } diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index 1596b5fcdd7f..515163764c97 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -65,9 +65,9 @@ final class ProperGadtConstraint private( import dotty.tools.dotc.config.Printers.{gadts, gadtsConstr} def this() = this( - myConstraint = new OrderingConstraint(SimpleIdentityMap.Empty, SimpleIdentityMap.Empty, SimpleIdentityMap.Empty), - mapping = SimpleIdentityMap.Empty, - reverseMapping = SimpleIdentityMap.Empty + myConstraint = new OrderingConstraint(SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty), + mapping = SimpleIdentityMap.empty, + reverseMapping = SimpleIdentityMap.empty ) /** Exposes ConstraintHandling.subsumes */ diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index 4185c5171089..6d3bd93f4a6d 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -8,7 +8,6 @@ import StdNames._ import NameTags._ import Contexts._ import Decorators._ -import collection.mutable import scala.annotation.internal.sharable @@ -18,10 +17,10 @@ object NameKinds { // These are sharable since all NameKinds are created eagerly at the start of the program // before any concurrent threads are forked. for this to work, NameKinds should never // be created lazily or in modules that start running after compilers are forked. - @sharable private val simpleNameKinds = new mutable.HashMap[Int, ClassifiedNameKind] - @sharable private val qualifiedNameKinds = new mutable.HashMap[Int, QualifiedNameKind] - @sharable private val numberedNameKinds = new mutable.HashMap[Int, NumberedNameKind] - @sharable private val uniqueNameKinds = new mutable.HashMap[String, UniqueNameKind] + @sharable private val simpleNameKinds = util.HashMap[Int, ClassifiedNameKind]() + @sharable private val qualifiedNameKinds = util.HashMap[Int, QualifiedNameKind]() + @sharable private val numberedNameKinds = util.HashMap[Int, NumberedNameKind]() + @sharable private val uniqueNameKinds = util.HashMap[String, UniqueNameKind]() /** A class for the info stored in a derived name */ abstract class NameInfo { @@ -393,8 +392,8 @@ object NameKinds { val Scala2MethodNameKinds: List[NameKind] = List(DefaultGetterName, ExtMethName, UniqueExtMethName) - def simpleNameKindOfTag : collection.Map[Int, ClassifiedNameKind] = simpleNameKinds - def qualifiedNameKindOfTag : collection.Map[Int, QualifiedNameKind] = qualifiedNameKinds - def numberedNameKindOfTag : collection.Map[Int, NumberedNameKind] = numberedNameKinds - def uniqueNameKindOfSeparator: collection.Map[String, UniqueNameKind] = uniqueNameKinds + def simpleNameKindOfTag : util.ReadOnlyMap[Int, ClassifiedNameKind] = simpleNameKinds + def qualifiedNameKindOfTag : util.ReadOnlyMap[Int, QualifiedNameKind] = qualifiedNameKinds + def numberedNameKindOfTag : util.ReadOnlyMap[Int, NumberedNameKind] = numberedNameKinds + def uniqueNameKindOfSeparator: util.ReadOnlyMap[String, UniqueNameKind] = uniqueNameKinds } diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index 61196f245dc8..d5e538abeac0 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -10,7 +10,7 @@ import StdNames.str import scala.internal.Chars.isIdentifierStart import collection.immutable import config.Config -import java.util.HashMap +import util.{LinearMap, HashSet} import scala.annotation.internal.sharable @@ -165,16 +165,15 @@ object Names { override def asTermName: TermName = this @sharable // because it is only modified in the synchronized block of toTypeName. - @volatile private var _typeName: TypeName = null + private var myTypeName: TypeName = null + // Note: no @volatile needed since type names are immutable and therefore safely published - override def toTypeName: TypeName = { - if (_typeName == null) + override def toTypeName: TypeName = + if myTypeName == null then synchronized { - if (_typeName == null) - _typeName = new TypeName(this) + if myTypeName == null then myTypeName = new TypeName(this) } - _typeName - } + myTypeName override def likeSpaced(name: Name): TermName = name.toTermName @@ -182,38 +181,16 @@ object Names { def underlying: TermName = unsupported("underlying") @sharable // because of synchronized block in `and` - private var derivedNames: immutable.Map[NameInfo, DerivedName] | HashMap[NameInfo, DerivedName] = - immutable.Map.empty[NameInfo, DerivedName] - - private def getDerived(info: NameInfo): DerivedName /* | Null */ = (derivedNames: @unchecked) match { - case derivedNames: immutable.AbstractMap[NameInfo, DerivedName] @unchecked => - if (derivedNames.contains(info)) derivedNames(info) else null - case derivedNames: HashMap[NameInfo, DerivedName] @unchecked => - derivedNames.get(info) - } - - private def putDerived(info: NameInfo, name: DerivedName): name.type = { - derivedNames match { - case derivedNames: immutable.Map[NameInfo, DerivedName] @unchecked => - if (derivedNames.size < 4) - this.derivedNames = derivedNames.updated(info, name) - else { - val newMap = new HashMap[NameInfo, DerivedName] - derivedNames.foreach { case (k, v) => newMap.put(k, v) } - newMap.put(info, name) - this.derivedNames = newMap - } - case derivedNames: HashMap[NameInfo, DerivedName] @unchecked => - derivedNames.put(info, name) - } - name - } + private var derivedNames: LinearMap[NameInfo, DerivedName] = LinearMap.empty private def add(info: NameInfo): TermName = synchronized { - getDerived(info) match { - case null => putDerived(info, new DerivedName(this, info)) - case derivedName => derivedName - } + derivedNames.lookup(info) match + case null => + val derivedName = new DerivedName(this, info) + derivedNames = derivedNames.updated(info, derivedName) + derivedName + case derivedName => + derivedName } private def rewrap(underlying: TermName) = @@ -284,10 +261,9 @@ object Names { } /** A simple name is essentially an interned string */ - final class SimpleName(val start: Int, val length: Int, @sharable private[Names] var next: SimpleName) extends TermName { - // `next` is @sharable because it is only modified in the synchronized block of termName. + final class SimpleName(val start: Int, val length: Int) extends TermName { - /** The n'th character */ + /** The n'th character */ def apply(n: Int): Char = chrs(start + n) /** A character in this name satisfies predicate `p` */ @@ -528,27 +504,70 @@ object Names { override def debugString: String = s"${underlying.debugString}[$info]" } + /** The term name represented by the empty string */ + val EmptyTermName: SimpleName = SimpleName(-1, 0) + // Nametable - private final val InitialHashSize = 0x8000 - private final val InitialNameSize = 0x20000 - private final val fillFactor = 0.7 + inline val InitialNameSize = 0x20000 /** Memory to store all names sequentially. */ - @sharable // because it's only mutated in synchronized block of termName + @sharable // because it's only mutated in synchronized block of enterIfNew private[dotty] var chrs: Array[Char] = new Array[Char](InitialNameSize) /** The number of characters filled. */ - @sharable // because it's only mutated in synchronized block of termName + @sharable // because it's only mutated in synchronized block of enterIfNew private var nc = 0 - /** Hashtable for finding term names quickly. */ - @sharable // because it's only mutated in synchronized block of termName - private var table = new Array[SimpleName](InitialHashSize) + /** Make sure the capacity of the character array is at least `n` */ + private def ensureCapacity(n: Int) = + if n > chrs.length then + val newchrs = new Array[Char](chrs.length * 2) + chrs.copyToArray(newchrs) + chrs = newchrs + + private class NameTable extends HashSet[SimpleName](initialCapacity = 0x10000, capacityMultiple = 2): + import util.Stats + + override def hash(x: SimpleName) = hashValue(chrs, x.start, x.length) // needed for resize + override def isEqual(x: SimpleName, y: SimpleName) = ??? // not needed + + def enterIfNew(cs: Array[Char], offset: Int, len: Int): SimpleName = + Stats.record(statsItem("put")) + val myTable = currentTable // could be outdated under parallel execution + var idx = hashValue(cs, offset, len) & (myTable.length - 1) + var name = myTable(idx).asInstanceOf[SimpleName] + while name != null do + if name.length == len && Names.equals(name.start, cs, offset, len) then + return name + Stats.record(statsItem("miss")) + idx = (idx + 1) & (myTable.length - 1) + name = myTable(idx).asInstanceOf[SimpleName] + Stats.record(statsItem("addEntryAt")) + synchronized { + if (myTable eq currentTable) && myTable(idx) == null then + // Our previous unsynchronized computation of the next free index is still correct. + // This relies on the fact that table entries go from null to non-null, and then + // stay the same. Note that we do not need the table or the entry in it to be + // volatile since SimpleNames are immutable, and hence safely published. + // The same holds for the chrs array. We might miss before the synchronized + // on published characters but that would make name comparison false, which + // means we end up in the synchronized block here, where we get the correct state. + name = SimpleName(nc, len) + ensureCapacity(nc + len) + Array.copy(cs, offset, chrs, nc, len) + nc += len + addEntryAt(idx, name) + else + enterIfNew(cs, offset, len) + } - /** The number of defined names. */ - @sharable // because it's only mutated in synchronized block of termName - private var size = 1 + addEntryAt(0, EmptyTermName) + end NameTable + + /** Hashtable for finding term names quickly. */ + @sharable // because it's only mutated in synchronized block of enterIfNew + private val nameTable = NameTable() /** The hash of a name made of from characters cs[offset..offset+len-1]. */ private def hashValue(cs: Array[Char], offset: Int, len: Int): Int = { @@ -574,62 +593,8 @@ object Names { /** Create a term name from the characters in cs[offset..offset+len-1]. * Assume they are already encoded. */ - def termName(cs: Array[Char], offset: Int, len: Int): SimpleName = synchronized { - util.Stats.record("termName") - val h = hashValue(cs, offset, len) & (table.length - 1) - - /** Make sure the capacity of the character array is at least `n` */ - def ensureCapacity(n: Int) = - if (n > chrs.length) { - val newchrs = new Array[Char](chrs.length * 2) - chrs.copyToArray(newchrs) - chrs = newchrs - } - - /** Enter characters into chrs array. */ - def enterChars(): Unit = { - ensureCapacity(nc + len) - var i = 0 - while (i < len) { - chrs(nc + i) = cs(offset + i) - i += 1 - } - nc += len - } - - /** Rehash chain of names */ - def rehash(name: SimpleName): Unit = - if (name != null) { - val oldNext = name.next - val h = hashValue(chrs, name.start, name.length) & (table.size - 1) - name.next = table(h) - table(h) = name - rehash(oldNext) - } - - /** Make sure the hash table is large enough for the given load factor */ - def incTableSize() = { - size += 1 - if (size.toDouble / table.size > fillFactor) { - val oldTable = table - table = new Array[SimpleName](table.size * 2) - for (i <- 0 until oldTable.size) rehash(oldTable(i)) - } - } - - val next = table(h) - var name = next - while (name ne null) { - if (name.length == len && equals(name.start, cs, offset, len)) - return name - name = name.next - } - name = new SimpleName(nc, len, next) - enterChars() - table(h) = name - incTableSize() - name - } + def termName(cs: Array[Char], offset: Int, len: Int): SimpleName = + nameTable.enterIfNew(cs, offset, len) /** Create a type name from the characters in cs[offset..offset+len-1]. * Assume they are already encoded. @@ -660,11 +625,6 @@ object Names { /** Create a type name from a string */ def typeName(s: String): TypeName = typeName(s.toCharArray, 0, s.length) - table(0) = new SimpleName(-1, 0, null) - - /** The term name represented by the empty string */ - val EmptyTermName: TermName = table(0) - /** The type name represented by the empty string */ val EmptyTypeName: TypeName = EmptyTermName.toTypeName diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index af55456df539..c42344c1fce2 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -105,7 +105,7 @@ object OrderingConstraint { } @sharable - val empty = new OrderingConstraint(SimpleIdentityMap.Empty, SimpleIdentityMap.Empty, SimpleIdentityMap.Empty) + val empty = new OrderingConstraint(SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty) } import OrderingConstraint._ diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index fc014ec6684f..a8c1a1da3078 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1564,27 +1564,27 @@ object SymDenotations { initPrivateWithin: Symbol) extends SymDenotation(symbol, maybeOwner, name, initFlags, initInfo, initPrivateWithin) { - import util.HashTable + import util.EqHashMap // ----- caches ------------------------------------------------------- private var myTypeParams: List[TypeSymbol] = null - private var fullNameCache: SimpleIdentityMap[QualifiedNameKind, Name] = SimpleIdentityMap.Empty + private var fullNameCache: SimpleIdentityMap[QualifiedNameKind, Name] = SimpleIdentityMap.empty - private var myMemberCache: HashTable[Name, PreDenotation] = null + private var myMemberCache: EqHashMap[Name, PreDenotation] = null private var myMemberCachePeriod: Period = Nowhere /** A cache from types T to baseType(T, C) */ - type BaseTypeMap = java.util.IdentityHashMap[CachedType, Type] + type BaseTypeMap = EqHashMap[CachedType, Type] private var myBaseTypeCache: BaseTypeMap = null private var myBaseTypeCachePeriod: Period = Nowhere private var baseDataCache: BaseData = BaseData.None private var memberNamesCache: MemberNames = MemberNames.None - private def memberCache(using Context): HashTable[Name, PreDenotation] = { + private def memberCache(using Context): EqHashMap[Name, PreDenotation] = { if (myMemberCachePeriod != ctx.period) { - myMemberCache = HashTable() + myMemberCache = EqHashMap() myMemberCachePeriod = ctx.period } myMemberCache @@ -1592,7 +1592,7 @@ object SymDenotations { private def baseTypeCache(using Context): BaseTypeMap = { if !currentHasSameBaseTypesAs(myBaseTypeCachePeriod) then - myBaseTypeCache = new BaseTypeMap + myBaseTypeCache = BaseTypeMap() myBaseTypeCachePeriod = ctx.period myBaseTypeCache } @@ -1613,12 +1613,12 @@ object SymDenotations { } def invalidateMemberCaches(sym: Symbol)(using Context): Unit = - if myMemberCache != null then myMemberCache.invalidate(sym.name) + if myMemberCache != null then myMemberCache.remove(sym.name) if !sym.flagsUNSAFE.is(Private) then invalidateMemberNamesCache() if sym.isWrappedToplevelDef then val outerCache = sym.owner.owner.asClass.classDenot.myMemberCache - if outerCache != null then outerCache.invalidate(sym.name) + if outerCache != null then outerCache.remove(sym.name) override def copyCaches(from: SymDenotation, phase: Phase)(using Context): this.type = { from match { @@ -1823,7 +1823,7 @@ object SymDenotations { */ def replace(prev: Symbol, replacement: Symbol)(using Context): Unit = { unforcedDecls.openForMutations.replace(prev, replacement) - if (myMemberCache != null) myMemberCache.invalidate(replacement.name) + if (myMemberCache != null) myMemberCache.remove(replacement.name) } /** Delete symbol from current scope. @@ -1832,7 +1832,7 @@ object SymDenotations { */ def delete(sym: Symbol)(using Context): Unit = { info.decls.openForMutations.unlink(sym) - if (myMemberCache != null) myMemberCache.invalidate(sym.name) + if (myMemberCache != null) myMemberCache.remove(sym.name) if (!sym.flagsUNSAFE.is(Private)) invalidateMemberNamesCache() } @@ -1861,7 +1861,7 @@ object SymDenotations { var denots: PreDenotation = memberCache.lookup(name) if denots == null then denots = computeMembersNamed(name) - memberCache.enter(name, denots) + memberCache(name) = denots else if Config.checkCacheMembersNamed then val denots1 = computeMembersNamed(name) assert(denots.exists == denots1.exists, s"cache inconsistency: cached: $denots, computed $denots1, name = $name, owner = $this") @@ -1906,14 +1906,16 @@ object SymDenotations { /** Compute tp.baseType(this) */ final def baseTypeOf(tp: Type)(using Context): Type = { val btrCache = baseTypeCache - def inCache(tp: Type) = btrCache.get(tp) != null + def inCache(tp: Type) = tp match + case tp: CachedType => btrCache.contains(tp) + case _ => false def record(tp: CachedType, baseTp: Type) = { if (Stats.monitored) { Stats.record("basetype cache entries") if (!baseTp.exists) Stats.record("basetype cache NoTypes") } if (!tp.isProvisional) - btrCache.put(tp, baseTp) + btrCache(tp) = baseTp else btrCache.remove(tp) // Remove any potential sentinel value } @@ -1926,7 +1928,7 @@ object SymDenotations { def recur(tp: Type): Type = try { tp match { case tp: CachedType => - val baseTp = btrCache.get(tp) + val baseTp = btrCache.lookup(tp) if (baseTp != null) return ensureAcyclic(baseTp) case _ => } @@ -1945,7 +1947,7 @@ object SymDenotations { } def computeTypeRef = { - btrCache.put(tp, NoPrefix) + btrCache(tp) = NoPrefix val tpSym = tp.symbol tpSym.denot match { case clsd: ClassDenotation => @@ -1980,7 +1982,7 @@ object SymDenotations { case tp @ AppliedType(tycon, args) => def computeApplied = { - btrCache.put(tp, NoPrefix) + btrCache(tp) = NoPrefix val baseTp = if (tycon.typeSymbol eq symbol) tp else (tycon.typeParams: @unchecked) match { @@ -2041,7 +2043,9 @@ object SymDenotations { } catch { case ex: Throwable => - btrCache.remove(tp) + tp match + case tp: CachedType => btrCache.remove(tp) + case _ => throw ex } @@ -2609,7 +2613,7 @@ object SymDenotations { } private class MemberNamesImpl(createdAt: Period) extends InheritedCacheImpl(createdAt) with MemberNames { - private var cache: SimpleIdentityMap[NameFilter, Set[Name]] = SimpleIdentityMap.Empty + private var cache: SimpleIdentityMap[NameFilter, Set[Name]] = SimpleIdentityMap.empty final def isValid(using Context): Boolean = cache != null && isValidAt(ctx.phase) @@ -2622,7 +2626,7 @@ object SymDenotations { */ def invalidate(): Unit = if (cache != null) - if (locked) cache = SimpleIdentityMap.Empty + if (locked) cache = SimpleIdentityMap.empty else { cache = null invalidateDependents() diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 7eaa1f411e07..9b0b42734381 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -30,7 +30,7 @@ import reporting.Message import collection.mutable import io.AbstractFile import language.implicitConversions -import util.{SourceFile, NoSource, Property, SourcePosition, SrcPos} +import util.{SourceFile, NoSource, Property, SourcePosition, SrcPos, EqHashMap} import scala.collection.JavaConverters._ import scala.annotation.internal.sharable import config.Printers.typr @@ -495,61 +495,7 @@ object Symbols { /** The current class */ def currentClass(using Context): ClassSymbol = ctx.owner.enclosingClass.asClass - /* Mutable map from symbols any T */ - class MutableSymbolMap[T](private[Symbols] val value: java.util.IdentityHashMap[Symbol, T]) extends AnyVal { - - def apply(sym: Symbol): T = value.get(sym) - - def get(sym: Symbol): Option[T] = Option(value.get(sym)) - - def getOrElse[U >: T](sym: Symbol, default: => U): U = { - val v = value.get(sym) - if (v != null) v else default - } - - def getOrElseUpdate(sym: Symbol, op: => T): T = { - val v = value.get(sym) - if (v != null) v - else { - val v = op - assert(v != null) - value.put(sym, v) - v - } - } - - def update(sym: Symbol, x: T): Unit = { - assert(x != null) - value.put(sym, x) - } - def put(sym: Symbol, x: T): T = { - assert(x != null) - value.put(sym, x) - } - - def -=(sym: Symbol): Unit = value.remove(sym) - def remove(sym: Symbol): Option[T] = Option(value.remove(sym)) - - def contains(sym: Symbol): Boolean = value.containsKey(sym) - - def isEmpty: Boolean = value.isEmpty - - def clear(): Unit = value.clear() - - def filter(p: ((Symbol, T)) => Boolean): Map[Symbol, T] = - value.asScala.toMap.filter(p) - - def iterator: Iterator[(Symbol, T)] = value.asScala.iterator - - def keysIterator: Iterator[Symbol] = value.keySet().asScala.iterator - - def toMap: Map[Symbol, T] = value.asScala.toMap - - override def toString: String = value.asScala.toString() - } - - inline def newMutableSymbolMap[T]: MutableSymbolMap[T] = - new MutableSymbolMap(new java.util.IdentityHashMap[Symbol, T]()) + type MutableSymbolMap[T] = EqHashMap[Symbol, T] // ---- Factory methods for symbol creation ---------------------- // diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index f0721fd648bd..330124c39da3 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -49,7 +49,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling needsGc = false if Config.checkTypeComparerReset then checkReset() - private var pendingSubTypes: mutable.Set[(Type, Type)] = null + private var pendingSubTypes: util.MutableSet[(Type, Type)] = null private var recCount = 0 private var monitored = false @@ -202,7 +202,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def monitoredIsSubType = { if (pendingSubTypes == null) { - pendingSubTypes = new mutable.HashSet[(Type, Type)] + pendingSubTypes = util.HashSet[(Type, Type)]() report.log(s"!!! deep subtype recursion involving ${tp1.show} <:< ${tp2.show}, constraint = ${state.constraint.show}") report.log(s"!!! constraint = ${constraint.show}") //if (ctx.settings.YnoDeepSubtypes.value) { @@ -231,7 +231,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } } val p = (normalize(tp1), normalize(tp2)) - !pendingSubTypes(p) && { + !pendingSubTypes.contains(p) && { try { pendingSubTypes += p firstTry diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 3d1cdaf759cf..8cfb7759b6d0 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -187,7 +187,7 @@ object TypeOps: /** a faster version of cs1 intersect cs2 */ def intersect(cs1: List[ClassSymbol], cs2: List[ClassSymbol]): List[ClassSymbol] = { val cs2AsSet = new util.HashSet[ClassSymbol](128) - cs2.foreach(cs2AsSet.addEntry) + cs2.foreach(cs2AsSet += _) cs1.filter(cs2AsSet.contains) } diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 64419a101578..8e328c0c882a 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3405,7 +3405,7 @@ object Types { abstract class LambdaTypeCompanion[N <: Name, PInfo <: Type, LT <: LambdaType] { def syntheticParamName(n: Int): N - @sharable private val memoizedNames = new mutable.HashMap[Int, List[N]] + @sharable private val memoizedNames = util.HashMap[Int, List[N]]() def syntheticParamNames(n: Int): List[N] = synchronized { memoizedNames.getOrElseUpdate(n, (0 until n).map(syntheticParamName).toList) } @@ -4321,7 +4321,7 @@ object Types { def underlying(using Context): Type = bound private var myReduced: Type = null - private var reductionContext: mutable.Map[Type, Type] = null + private var reductionContext: util.MutableMap[Type, Type] = null override def tryNormalize(using Context): Type = reduced.normalized @@ -4340,7 +4340,7 @@ object Types { } def updateReductionContext(footprint: collection.Set[Type]): Unit = - reductionContext = new mutable.HashMap + reductionContext = util.HashMap() for (tp <- footprint) reductionContext(tp) = contextInfo(tp) typr.println(i"footprint for $this $hashCode: ${footprint.toList.map(x => (x, contextInfo(x)))}%, %") @@ -5527,18 +5527,14 @@ object Types { def apply(x: Unit, tp: Type): Unit = foldOver(p(tp), tp) } - class TypeHashSet extends util.HashSet[Type](64): - override def hash(x: Type): Int = System.identityHashCode(x) - override def isEqual(x: Type, y: Type) = x.eq(y) - class NamedPartsAccumulator(p: NamedType => Boolean)(using Context) extends TypeAccumulator[List[NamedType]]: def maybeAdd(xs: List[NamedType], tp: NamedType): List[NamedType] = if p(tp) then tp :: xs else xs - val seen = TypeHashSet() + val seen = util.HashSet[Type]() def apply(xs: List[NamedType], tp: Type): List[NamedType] = if seen contains tp then xs else - seen.addEntry(tp) + seen += tp tp match case tp: TypeRef => foldOver(maybeAdd(xs, tp), tp) @@ -5572,11 +5568,11 @@ object Types { } class TypeSizeAccumulator(using Context) extends TypeAccumulator[Int] { - val seen = new java.util.IdentityHashMap[Type, Type] + var seen = util.HashSet[Type](initialCapacity = 8) def apply(n: Int, tp: Type): Int = - if (seen.get(tp) != null) n + if seen.contains(tp) then n else { - seen.put(tp, tp) + seen += tp tp match { case tp: AppliedType => foldOver(n + 1, tp) @@ -5593,11 +5589,11 @@ object Types { } class CoveringSetAccumulator(using Context) extends TypeAccumulator[Set[Symbol]] { - val seen = new java.util.IdentityHashMap[Type, Type] + var seen = util.HashSet[Type](initialCapacity = 8) def apply(cs: Set[Symbol], tp: Type): Set[Symbol] = - if (seen.get(tp) != null) cs + if seen.contains(tp) then cs else { - seen.put(tp, tp) + seen += tp tp match { case tp if tp.isTopType || tp.isBottomType => cs diff --git a/compiler/src/dotty/tools/dotc/core/Uniques.scala b/compiler/src/dotty/tools/dotc/core/Uniques.scala index d02a728bb81c..5b1ae1a499e9 100644 --- a/compiler/src/dotty/tools/dotc/core/Uniques.scala +++ b/compiler/src/dotty/tools/dotc/core/Uniques.scala @@ -4,93 +4,71 @@ package core import Types._, Contexts._, util.Stats._, Hashable._, Names._ import config.Config import Decorators._ -import util.HashSet +import util.{HashSet, Stats} + +class Uniques extends HashSet[Type](Config.initialUniquesCapacity): + override def hash(x: Type): Int = x.hash + override def isEqual(x: Type, y: Type) = x.eql(y) /** Defines operation `unique` for hash-consing types. * Also defines specialized hash sets for hash consing uniques of a specific type. * All sets offer a `enterIfNew` method which checks whether a type * with the given parts exists already and creates a new one if not. */ -object Uniques { +object Uniques: - private def recordCaching(tp: Type): Unit = recordCaching(tp.hash, tp.getClass) - private def recordCaching(h: Int, clazz: Class[?]): Unit = - if (h == NotCached) { - record("uncached-types") - record(s"uncached: $clazz") - } - else { - record("cached-types") - record(s"cached: $clazz") - } + private inline def recordCaching(tp: Type): Unit = recordCaching(tp.hash, tp.getClass) + private inline def recordCaching(h: Int, clazz: Class[?]): Unit = + if monitored then + if h == NotCached then + record("uncached-types") + record(s"uncached: $clazz") + else + record("cached-types") + record(s"cached: $clazz") - def unique[T <: Type](tp: T)(using Context): T = { - if (monitored) recordCaching(tp) - if (tp.hash == NotCached) tp - else if (monitored) { - val size = ctx.uniques.size - val result = ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T] - if (ctx.uniques.size > size) record(s"fresh unique ${tp.getClass}") - result - } - else ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T] - } - /* !!! DEBUG - ensuring ( - result => tp.toString == result.toString || { - println(s"cache mismatch; tp = $tp, cached = $result") - false - } - ) - */ + def unique[T <: Type](tp: T)(using Context): T = + recordCaching(tp) + if tp.hash == NotCached then tp + else ctx.uniques.put(tp).asInstanceOf[T] - final class NamedTypeUniques extends HashSet[NamedType](Config.initialUniquesCapacity) with Hashable { + final class NamedTypeUniques extends HashSet[NamedType](Config.initialUniquesCapacity * 4) with Hashable: override def hash(x: NamedType): Int = x.hash - private def findPrevious(h: Int, prefix: Type, designator: Designator): NamedType = { - var e = findEntryByHash(h) - while (e != null) { - if ((e.prefix eq prefix) && (e.designator eq designator)) return e - e = nextEntryByHash(h) - } - e - } - - def enterIfNew(prefix: Type, designator: Designator, isTerm: Boolean)(using Context): NamedType = { + def enterIfNew(prefix: Type, designator: Designator, isTerm: Boolean)(using Context): NamedType = val h = doHash(null, designator, prefix) - if (monitored) recordCaching(h, classOf[NamedType]) + if monitored then recordCaching(h, classOf[NamedType]) def newType = if (isTerm) new CachedTermRef(prefix, designator, h) else new CachedTypeRef(prefix, designator, h) - if (h == NotCached) newType - else { - val r = findPrevious(h, prefix, designator) - if ((r ne null) && (r.isTerm == isTerm)) r else addEntryAfterScan(newType) - } - } - } + if h == NotCached then newType + else + Stats.record(statsItem("put")) + var idx = index(h) + var e = entryAt(idx) + while e != null do + if (e.prefix eq prefix) && (e.designator eq designator) && (e.isTerm == isTerm) then return e + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, newType) + end NamedTypeUniques - final class AppliedUniques extends HashSet[AppliedType](Config.initialUniquesCapacity) with Hashable { + final class AppliedUniques extends HashSet[AppliedType](Config.initialUniquesCapacity * 2) with Hashable: override def hash(x: AppliedType): Int = x.hash - private def findPrevious(h: Int, tycon: Type, args: List[Type]): AppliedType = { - var e = findEntryByHash(h) - while (e != null) { - if ((e.tycon eq tycon) && e.args.eqElements(args)) return e - e = nextEntryByHash(h) - } - e - } - - def enterIfNew(tycon: Type, args: List[Type]): AppliedType = { + def enterIfNew(tycon: Type, args: List[Type]): AppliedType = val h = doHash(null, tycon, args) def newType = new CachedAppliedType(tycon, args, h) - if (monitored) recordCaching(h, classOf[CachedAppliedType]) - if (h == NotCached) newType - else { - val r = findPrevious(h, tycon, args) - if (r ne null) r else addEntryAfterScan(newType) - } - } - } -} + if monitored then recordCaching(h, classOf[CachedAppliedType]) + if h == NotCached then newType + else + Stats.record(statsItem("put")) + var idx = index(h) + var e = entryAt(idx) + while e != null do + if (e.tycon eq tycon) && e.args.eqElements(args) then return e + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, newType) + end AppliedUniques +end Uniques diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index f558defee1d5..f4c95001ceaa 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -732,15 +732,13 @@ class ClassfileParser( classTranslation.flags(jflags), getScope(jflags)) - for (entry <- innerClasses.values) { + for entry <- innerClasses.valuesIterator do // create a new class member for immediate inner classes - if (entry.outerName == currentClassName) { + if entry.outerName == currentClassName then val file = ctx.platform.classPath.findClassFile(entry.externalName.toString) getOrElse { throw new AssertionError(entry.externalName) } enterClassAndModule(entry, file, entry.jflags) - } - } } // Nothing$ and Null$ were incorrectly emitted with a Scala attribute @@ -937,14 +935,14 @@ class ClassfileParser( s"$originalName in $outerName($externalName)" } - object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] { + object innerClasses extends util.HashMap[Name, InnerClassEntry] { /** Return the Symbol of the top level class enclosing `name`, * or 'name's symbol if no entry found for `name`. */ def topLevelClass(name: Name)(using Context): Symbol = { - val tlName = if (isDefinedAt(name)) { + val tlName = if (contains(name)) { var entry = this(name) - while (isDefinedAt(entry.outerName)) + while (contains(entry.outerName)) entry = this(entry.outerName) entry.outerName } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala index 3b12f6d773c3..5c18594b7ebe 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala @@ -1,19 +1,19 @@ -package dotty.tools.dotc.core.tasty +package dotty.tools.dotc +package core.tasty -import dotty.tools.dotc.core.Comments.Comment -import dotty.tools.dotc.util.Spans.Span +import core.Comments.Comment +import util.Spans.Span +import util.HashMap import dotty.tools.tasty.{TastyReader, TastyBuffer} import TastyBuffer.Addr -import scala.collection.mutable.HashMap - import java.nio.charset.Charset class CommentUnpickler(reader: TastyReader) { import reader._ - private[tasty] lazy val comments: Map[Addr, Comment] = { + private[tasty] lazy val comments: HashMap[Addr, Comment] = { val comments = new HashMap[Addr, Comment] while (!isAtEnd) { val addr = readAddr() @@ -25,7 +25,7 @@ class CommentUnpickler(reader: TastyReader) { comments(addr) = Comment(position, rawComment) } } - comments.toMap + comments } def commentAt(addr: Addr): Option[Comment] = diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala index 4d15b39999f6..872f60837515 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala @@ -15,14 +15,14 @@ import Names.TermName class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { import reader._ - private var mySpans: mutable.HashMap[Addr, Span] = _ - private var mySourcePaths: mutable.HashMap[Addr, String] = _ + private var mySpans: util.HashMap[Addr, Span] = _ + private var mySourcePaths: util.HashMap[Addr, String] = _ private var isDefined = false def ensureDefined(): Unit = { if (!isDefined) { - mySpans = new mutable.HashMap[Addr, Span] - mySourcePaths = new mutable.HashMap[Addr, String] + mySpans = util.HashMap[Addr, Span]() + mySourcePaths = util.HashMap[Addr, String]() var curIndex = 0 var curStart = 0 var curEnd = 0 @@ -50,12 +50,12 @@ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { } } - private[tasty] def spans: Map[Addr, Span] = { + private[tasty] def spans: util.ReadOnlyMap[Addr, Span] = { ensureDefined() mySpans } - private[tasty] def sourcePaths: Map[Addr, String] = { + private[tasty] def sourcePaths: util.ReadOnlyMap[Addr, String] = { ensureDefined() mySourcePaths } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 241e31b568a2..575f58e273bf 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -42,21 +42,21 @@ class TreePickler(pickler: TastyPickler) { import pickler.nameBuffer.nameIndex import tpd._ - private val symRefs = Symbols.newMutableSymbolMap[Addr] - private val forwardSymRefs = Symbols.newMutableSymbolMap[List[Addr]] - private val pickledTypes = new java.util.IdentityHashMap[Type, Any] // Value type is really Addr, but that's not compatible with null + private val symRefs = Symbols.MutableSymbolMap[Addr](256) + private val forwardSymRefs = Symbols.MutableSymbolMap[List[Addr]]() + private val pickledTypes = util.EqHashMap[Type, Addr]() /** A list of annotation trees for every member definition, so that later * parallel position pickling does not need to access and force symbols. */ - private val annotTrees = util.HashTable[untpd.MemberDef, mutable.ListBuffer[Tree]]() + private val annotTrees = util.EqHashMap[untpd.MemberDef, mutable.ListBuffer[Tree]]() /** A map from member definitions to their doc comments, so that later * parallel comment pickling does not need to access symbols of trees (which * would involve accessing symbols of named types and possibly changing phases * in doing so). */ - private val docStrings = util.HashTable[untpd.MemberDef, Comment]() + private val docStrings = util.EqHashMap[untpd.MemberDef, Comment]() def treeAnnots(tree: untpd.MemberDef): List[Tree] = val ts = annotTrees.lookup(tree) @@ -169,14 +169,14 @@ class TreePickler(pickler: TastyPickler) { def pickleType(tpe0: Type, richTypes: Boolean = false)(using Context): Unit = { val tpe = tpe0.stripTypeVar try { - val prev = pickledTypes.get(tpe) + val prev: Addr | Null = pickledTypes.lookup(tpe) if (prev == null) { - pickledTypes.put(tpe, currentAddr) + pickledTypes(tpe) = currentAddr pickleNewType(tpe, richTypes) } else { writeByte(SHAREDtype) - writeRef(prev.asInstanceOf[Addr]) + writeRef(prev.uncheckedNN) } } catch { @@ -244,9 +244,9 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe) } case tpe: RecThis => writeByte(RECthis) - val binderAddr = pickledTypes.get(tpe.binder) + val binderAddr: Addr | Null = pickledTypes.lookup(tpe.binder) assert(binderAddr != null, tpe.binder) - writeRef(binderAddr.asInstanceOf[Addr]) + writeRef(binderAddr.uncheckedNN) case tpe: SkolemType => pickleType(tpe.info) case tpe: RefinedType => @@ -314,11 +314,11 @@ class TreePickler(pickler: TastyPickler) { } def pickleParamRef(tpe: ParamRef)(using Context): Boolean = { - val binder = pickledTypes.get(tpe.binder) + val binder: Addr | Null = pickledTypes.lookup(tpe.binder) val pickled = binder != null if (pickled) { writeByte(PARAMtype) - withLength { writeRef(binder.asInstanceOf[Addr]); writeNat(tpe.paramNum) } + withLength { writeRef(binder.uncheckedNN); writeNat(tpe.paramNum) } } pickled } @@ -349,7 +349,7 @@ class TreePickler(pickler: TastyPickler) { docCtx <- ctx.docCtx comment <- docCtx.docstring(sym) do - docStrings.enter(mdef, comment) + docStrings(mdef) = comment } def pickleParam(tree: Tree)(using Context): Unit = { @@ -605,7 +605,7 @@ class TreePickler(pickler: TastyPickler) { else { val refineCls = refinements.head.symbol.owner.asClass registerDef(refineCls) - pickledTypes.put(refineCls.typeRef, currentAddr) + pickledTypes(refineCls.typeRef) = currentAddr writeByte(REFINEDtpt) refinements.foreach(preRegister) withLength { pickleTree(parent); refinements.foreach(pickleTree) } @@ -757,7 +757,7 @@ class TreePickler(pickler: TastyPickler) { var treeBuf = annotTrees.lookup(mdef) if treeBuf == null then treeBuf = new mutable.ListBuffer[Tree] - annotTrees.enter(mdef, treeBuf) + annotTrees(mdef) = treeBuf treeBuf += ann.tree // ---- main entry points --------------------------------------- diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala index cb4a6d9e99af..76cf60d5e3be 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala @@ -162,12 +162,12 @@ object Plugin { case Failure(e) => Failure(e) }) - val seen = mutable.HashSet[String]() + val seen = util.HashSet[String]() val enabled = (fromPaths ::: fromDirs) map(_.flatMap { case (classname, loader) => Plugin.load(classname, loader).flatMap { clazz => val plugin = instantiate(clazz) - if (seen(classname)) // a nod to scala/bug#7494, take the plugin classes distinctly + if (seen.contains(classname)) // a nod to scala/bug#7494, take the plugin classes distinctly Failure(new PluginLoadException(plugin.name, s"Ignoring duplicate plugin ${plugin.name} (${classname})")) else if (ignoring contains plugin.name) Failure(new PluginLoadException(plugin.name, s"Disabling plugin ${plugin.name}")) diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala index 31bfb9772bcb..321aadac4130 100644 --- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala +++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import core._ @@ -23,7 +24,7 @@ abstract class AccessProxies { import AccessProxies._ /** accessor -> accessed */ - private val accessedBy = newMutableSymbolMap[Symbol] + private val accessedBy = MutableSymbolMap[Symbol]() /** Given the name of an accessor, is the receiver of the call to accessed obtained * as a parameterer? @@ -35,7 +36,7 @@ abstract class AccessProxies { * So a second call of the same method will yield the empty list. */ private def accessorDefs(cls: Symbol)(using Context): Iterator[DefDef] = - for (accessor <- cls.info.decls.iterator; accessed <- accessedBy.remove(accessor)) yield + for (accessor <- cls.info.decls.iterator; accessed <- accessedBy.remove(accessor).toOption) yield polyDefDef(accessor.asTerm, tps => argss => { def numTypeParams = accessed.info match { case info: PolyType => info.paramNames.length diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala index 8ba39ea3ad8a..038201d0afe6 100644 --- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala +++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala @@ -36,7 +36,7 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { private var toBeRemoved = immutable.Set[Symbol]() private val bridges = mutable.ListBuffer[Tree]() private val bridgesScope = newScope - private val bridgeTarget = newMutableSymbolMap[Symbol] + private val bridgeTarget = MutableSymbolMap[Symbol]() def bridgePosFor(member: Symbol): SrcPos = (if (member.owner == root && member.span.exists) member else root).srcPos diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index 2517b58ea617..7b6d7ee64cd2 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -26,11 +26,11 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisPhase = override def runsAfterGroupsOf: Set[String] = Set(LiftTry.name) // lifting tries changes what variables are considered to be captured - private[this] var Captured: Store.Location[collection.Set[Symbol]] = _ + private[this] var Captured: Store.Location[util.ReadOnlySet[Symbol]] = _ private def captured(using Context) = ctx.store(Captured) override def initContext(ctx: FreshContext): Unit = - Captured = ctx.addLocation(Set.empty) + Captured = ctx.addLocation(util.ReadOnlySet.empty) private class RefInfo(using Context) { /** The classes for which a Ref type exists. */ @@ -54,7 +54,7 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisPhase = } private class CollectCaptured extends TreeTraverser { - private val captured = mutable.HashSet[Symbol]() + private val captured = util.HashSet[Symbol]() def traverse(tree: Tree)(using Context) = tree match { case id: Ident => val sym = id.symbol @@ -68,7 +68,7 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisPhase = case _ => traverseChildren(tree) } - def runOver(tree: Tree)(using Context): collection.Set[Symbol] = { + def runOver(tree: Tree)(using Context): util.ReadOnlySet[Symbol] = { traverse(tree) captured } diff --git a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala index 23e5d12622b4..d308cea9f6d7 100644 --- a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala +++ b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import core._ @@ -115,13 +116,13 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => && (((cls.owner.name eq nme.DOLLAR_NEW) && cls.owner.isAllOf(Private|Synthetic)) || cls.owner.isAllOf(EnumCase)) && cls.owner.owner.linkedClass.derivesFromJavaEnum - private val enumCaseOrdinals: MutableSymbolMap[Int] = newMutableSymbolMap + private val enumCaseOrdinals = MutableSymbolMap[Int]() private def registerEnumClass(cls: Symbol)(using Context): Unit = - cls.children.zipWithIndex.foreach(enumCaseOrdinals.put) + cls.children.zipWithIndex.foreach(enumCaseOrdinals.update) private def ordinalFor(enumCase: Symbol): Int = - enumCaseOrdinals.remove(enumCase).get + enumCaseOrdinals.remove(enumCase).nn /** 1. If this is an enum class, add $name and $ordinal parameters to its * parameter accessors and pass them on to the java.lang.Enum constructor. diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala index d2c772e472a5..546c4e18c633 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala @@ -140,24 +140,21 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete extensionMeth } - private val extensionDefs = newMutableSymbolMap[mutable.ListBuffer[Tree]] - // TODO: this is state and should be per-run + private val extensionDefs = MutableSymbolMap[mutable.ListBuffer[Tree]]() // todo: check that when transformation finished map is empty override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = - if (isDerivedValueClass(ctx.owner)) + if isDerivedValueClass(ctx.owner) then /* This is currently redundant since value classes may not wrap over other value classes anyway. checkNonCyclic(ctx.owner.pos, Set(), ctx.owner) */ tree - else if (ctx.owner.isStaticOwner) - extensionDefs remove tree.symbol.owner match { - case Some(defns) if defns.nonEmpty => - cpy.Template(tree)(body = tree.body ++ - defns.map(transformFollowing(_))) + else if ctx.owner.isStaticOwner then + extensionDefs.remove(tree.symbol.owner) match + case defns: mutable.ListBuffer[Tree] if defns.nonEmpty => + cpy.Template(tree)(body = tree.body ++ defns.map(transformFollowing(_))) case _ => tree - } else tree override def transformDefDef(tree: tpd.DefDef)(using Context): tpd.Tree = diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala index 27150e727532..7e5ea9c81b2a 100644 --- a/compiler/src/dotty/tools/dotc/transform/Getters.scala +++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala @@ -91,7 +91,7 @@ class Getters extends MiniPhase with SymTransformer { thisPhase => } private val NoGetterNeededFlags = Method | Param | JavaDefined | JavaStatic - val newSetters = mutable.HashSet[Symbol]() + val newSetters = util.HashSet[Symbol]() def ensureSetter(sym: TermSymbol)(using Context) = if !sym.setter.exists then diff --git a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala index 97126df3a6d6..5f8f9c91faf3 100644 --- a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala +++ b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala @@ -15,7 +15,7 @@ import Names._ import Constants.Constant -/** The phase is enabled if a -Yinstrument-... option is set. +/** The phase is enabled if the -Yinstrument option is set. * If enabled, it counts the number of closures or allocations for each source position. * It does this by generating a call to dotty.tools.dotc.util.Stats.doRecord. */ @@ -25,8 +25,7 @@ class Instrumentation extends MiniPhase { thisPhase => override def phaseName: String = "instrumentation" override def isEnabled(using Context) = - ctx.settings.YinstrumentClosures.value || - ctx.settings.YinstrumentAllocations.value + ctx.settings.Yinstrument.value private val namesOfInterest = List( "::", "+=", "toString", "newArray", "box", "toCharArray", diff --git a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala index 1653ef4acd3d..34a26867037e 100644 --- a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala +++ b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala @@ -35,7 +35,7 @@ class NonLocalReturns extends MiniPhase { nonLocalReturnControl.appliedTo(argtype) /** A hashmap from method symbols to non-local return keys */ - private val nonLocalReturnKeys = newMutableSymbolMap[TermSymbol] + private val nonLocalReturnKeys = MutableSymbolMap[TermSymbol]() /** Return non-local return key for given method */ private def nonLocalReturnKey(meth: Symbol)(using Context) = @@ -83,10 +83,9 @@ class NonLocalReturns extends MiniPhase { } override def transformDefDef(tree: DefDef)(using Context): Tree = - nonLocalReturnKeys.remove(tree.symbol) match { - case Some(key) => cpy.DefDef(tree)(rhs = nonLocalReturnTry(tree.rhs, key, tree.symbol)) - case _ => tree - } + nonLocalReturnKeys.remove(tree.symbol) match + case key: TermSymbol => cpy.DefDef(tree)(rhs = nonLocalReturnTry(tree.rhs, key, tree.symbol)) + case null => tree override def transformReturn(tree: Return)(using Context): Tree = if isNonLocalReturn(tree) then diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index c2e7b20fe14e..b3b82856fd80 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -64,12 +64,11 @@ object OverridingPairs { decls } - private val subParents = { - val subParents = newMutableSymbolMap[BitSet] + private val subParents = + val subParents = MutableSymbolMap[BitSet]() for (bc <- base.info.baseClasses) subParents(bc) = BitSet(parents.indices.filter(parents(_).derivesFrom(bc)): _*) subParents - } private def hasCommonParentAsSubclass(cls1: Symbol, cls2: Symbol): Boolean = (subParents(cls1) intersect subParents(cls2)).nonEmpty @@ -78,7 +77,7 @@ object OverridingPairs { * (maybe excluded because of hasCommonParentAsSubclass). * These will not appear as overriding */ - private val visited = new mutable.HashSet[Symbol] + private val visited = util.HashSet[Symbol]() /** The current entry candidate for overriding */ diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index dc1c26299c88..012f694f7ee0 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -92,7 +92,7 @@ object PatternMatcher { /** A map from variable symbols to their defining trees * and from labels to their defining plans */ - private val initializer = newMutableSymbolMap[Tree] + private val initializer = MutableSymbolMap[Tree]() private def newVar(rhs: Tree, flags: FlagSet): TermSymbol = newSymbol(ctx.owner, PatMatStdBinderName.fresh(), Synthetic | Case | flags, diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index a2dd74ac9907..5e84def109bb 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import dotty.tools.dotc.ast.{Trees, tpd} @@ -59,7 +60,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { ctx.owner.enclosingClass != invalidEnclClass /** List buffers for new accessor definitions, indexed by class */ - private val accDefs = newMutableSymbolMap[mutable.ListBuffer[Tree]] + private val accDefs = MutableSymbolMap[mutable.ListBuffer[Tree]]() /** A super accessor call corresponding to `sel` */ private def superAccessorCall(sel: Select, mixName: Name = nme.EMPTY)(using Context) = { @@ -205,7 +206,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { def wrapTemplate(tree: Template)(op: Template => Template)(using Context): Template = { accDefs(currentClass) = new mutable.ListBuffer[Tree] val impl = op(tree) - val accessors = accDefs.remove(currentClass).get + val accessors = accDefs.remove(currentClass).nn if (accessors.isEmpty) impl else { val (params, rest) = impl.body span { diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index f66944385893..d6b17f3c82e1 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -13,6 +13,7 @@ import core.StdNames.nme import core.Symbols._ import reporting._ import transform.MegaPhase.MiniPhase +import util.LinearSet import scala.collection.mutable @@ -258,7 +259,7 @@ class TailRec extends MiniPhase { } /** Symbols of Labeled blocks that are in tail position. */ - private val tailPositionLabeledSyms = new mutable.HashSet[Symbol]() + private var tailPositionLabeledSyms = LinearSet.empty[Symbol] private var inTailPosition = true @@ -283,7 +284,7 @@ class TailRec extends MiniPhase { * a recursive call of a @tailrec annotated method (i.e. `isMandatory`). */ private def isTraversalNeeded = - isMandatory || tailPositionLabeledSyms.nonEmpty + isMandatory || tailPositionLabeledSyms.size > 0 def noTailTransform(tree: Tree)(using Context): Tree = if (isTraversalNeeded) transform(tree, tailPosition = false) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 334161f582ed..32be8fc1dd18 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -160,9 +160,9 @@ class TreeChecker extends Phase with SymTransformer { class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking { - private val nowDefinedSyms = new mutable.HashSet[Symbol] - private val patBoundSyms = new mutable.HashSet[Symbol] - private val everDefinedSyms = newMutableSymbolMap[untpd.Tree] + private val nowDefinedSyms = util.HashSet[Symbol]() + private val patBoundSyms = util.HashSet[Symbol]() + private val everDefinedSyms = MutableSymbolMap[untpd.Tree]() // don't check value classes after typer, as the constraint about constructors doesn't hold after transform override def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(using Context): Unit = () diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5499e2048ec4..a52654cd3979 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -28,7 +28,7 @@ import Trees._ import transform.SymUtils._ import transform.TypeUtils._ import Hashable._ -import util.{SourceFile, NoSource} +import util.{SourceFile, NoSource, EqHashMap} import config.{Config, Feature} import Feature.migrateTo3 import config.Printers.{implicits, implicitsDetailed} @@ -289,7 +289,7 @@ object Implicits: * @param outerCtx the next outer context that makes visible further implicits */ class ContextualImplicits(val refs: List[ImplicitRef], val outerImplicits: ContextualImplicits)(initctx: Context) extends ImplicitRefs(initctx) { - private val eligibleCache = new java.util.IdentityHashMap[Type, List[Candidate]] + private val eligibleCache = EqHashMap[Type, List[Candidate]]() /** The level increases if current context has a different owner or scope than * the context of the next-outer ImplicitRefs. This is however disabled under @@ -316,7 +316,7 @@ object Implicits: def eligible(tp: Type): List[Candidate] = if (tp.hash == NotCached) computeEligible(tp) else { - val eligibles = eligibleCache.get(tp) + val eligibles = eligibleCache.lookup(tp) if (eligibles != null) { def elided(ci: ContextualImplicits): Int = { val n = ci.refs.length @@ -329,7 +329,7 @@ object Implicits: else if (irefCtx eq NoContext) Nil else { val result = computeEligible(tp) - eligibleCache.put(tp, result) + eligibleCache(tp) = result result } } @@ -528,13 +528,13 @@ trait ImplicitRunInfo: private var provisional: Boolean = _ private var parts: mutable.LinkedHashSet[Type] = _ - private val partSeen = TypeHashSet() + private val partSeen = util.HashSet[Type]() def traverse(t: Type) = if partSeen.contains(t) then () else if implicitScopeCache.contains(t) then parts += t else - partSeen.addEntry(t) + partSeen += t t.dealias match case t: TypeRef => if isAnchor(t.symbol) then @@ -566,8 +566,8 @@ trait ImplicitRunInfo: (parts, provisional) end collectParts - val seen = TypeHashSet() - val incomplete = TypeHashSet() + val seen = util.HashSet[Type]() + val incomplete = util.HashSet[Type]() def collectCompanions(tp: Type, parts: collection.Set[Type]): TermRefSet = val companions = new TermRefSet @@ -578,12 +578,12 @@ trait ImplicitRunInfo: is.companionRefs case None => if seen.contains(t) then - incomplete.addEntry(tp) // all references for `t` will be accounted for in `seen` so we return `EmptySet`. + incomplete += tp // all references for `t` will be accounted for in `seen` so we return `EmptySet`. TermRefSet.empty // on the other hand, the refs of `tp` are now inaccurate, so `tp` is marked incomplete. else - seen.addEntry(t) + seen += t val is = recur(t) - if !implicitScopeCache.contains(t) then incomplete.addEntry(tp) + if !implicitScopeCache.contains(t) then incomplete += tp is.companionRefs end iscopeRefs @@ -687,13 +687,13 @@ trait ImplicitRunInfo: record(i"implicitScope") val liftToAnchors = new TypeMap: override def stopAtStatic = true - private val seen = TypeHashSet() + private val seen = util.HashSet[Type]() def applyToUnderlying(t: TypeProxy) = if seen.contains(t) then WildcardType else - seen.addEntry(t) + seen += t t.underlying match case TypeBounds(lo, hi) => if defn.isBottomTypeAfterErasure(lo) then apply(hi) diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala index 6704ffd1157f..a88af260eed0 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala @@ -99,8 +99,8 @@ class ImportInfo(symf: Context ?=> Symbol, /** Compute info relating to the selector list */ private def ensureInitialized(): Unit = if myExcluded == null then myExcluded = Set() - myForwardMapping = SimpleIdentityMap.Empty - myReverseMapping = SimpleIdentityMap.Empty + myForwardMapping = SimpleIdentityMap.empty + myReverseMapping = SimpleIdentityMap.empty for sel <- selectors do if sel.isWildcard then myWildcardImport = true @@ -180,7 +180,7 @@ class ImportInfo(symf: Context ?=> Symbol, private var myUnimported: Symbol = _ private var myOwner: Symbol = null - private var myResults: SimpleIdentityMap[TermName, java.lang.Boolean] = SimpleIdentityMap.Empty + private var myResults: SimpleIdentityMap[TermName, java.lang.Boolean] = SimpleIdentityMap.empty /** Does this import clause or a preceding import clause import `owner.feature`? */ def featureImported(feature: TermName, owner: Symbol)(using Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 39cc1ffa10ac..962c52fed35a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -438,7 +438,7 @@ object Inferencing { if (vmap1 eq vmap) vmap else propagate(vmap1) } - propagate(accu(SimpleIdentityMap.Empty, tp)) + propagate(accu(SimpleIdentityMap.empty, tp)) } } diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index c97ea8c4b546..34e75ef62c29 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -21,7 +21,7 @@ import SymDenotations.SymDenotation import Inferencing.isFullyDefined import config.Printers.inlining import ErrorReporting.errorTree -import dotty.tools.dotc.util.{SimpleIdentityMap, SimpleIdentitySet, SourceFile, SourcePosition, SrcPos} +import dotty.tools.dotc.util.{SimpleIdentityMap, SimpleIdentitySet, EqHashMap, SourceFile, SourcePosition, SrcPos} import dotty.tools.dotc.parsing.Parsers.Parser import Nullables.{given _} @@ -1041,7 +1041,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) { } } - extractBindVariance(SimpleIdentityMap.Empty, tpt.tpe) + extractBindVariance(SimpleIdentityMap.empty, tpt.tpe) } def addTypeBindings(typeBinds: TypeBindsMap)(using Context): Unit = @@ -1320,8 +1320,8 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) { dropUnusedDefs(termBindings1.asInstanceOf[List[ValOrDefDef]], tree1) } else { - val refCount = newMutableSymbolMap[Int] - val bindingOfSym = newMutableSymbolMap[MemberDef] + val refCount = MutableSymbolMap[Int]() + val bindingOfSym = MutableSymbolMap[MemberDef]() def isInlineable(binding: MemberDef) = binding match { case ddef @ DefDef(_, Nil, Nil, _, _) => isElideableExpr(ddef.rhs) diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 0f33a950d2c0..fa368a0b7213 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -246,7 +246,7 @@ object ProtoTypes { var typedArgs: List[Tree] = Nil /** A map in which typed arguments can be stored to be later integrated in `typedArgs`. */ - var typedArg: SimpleIdentityMap[untpd.Tree, Tree] = SimpleIdentityMap.Empty + var typedArg: SimpleIdentityMap[untpd.Tree, Tree] = SimpleIdentityMap.empty /** The tupled or untupled version of this prototype, if it has been computed */ var tupledDual: Type = NoType diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 7f57adee6352..c85aee7aa65b 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -651,11 +651,11 @@ object RefChecks { val seenClasses = new util.HashSet[Symbol](256) def addDecls(cls: Symbol): Unit = if (!seenClasses.contains(cls)) { - seenClasses.addEntry(cls) + seenClasses += cls for (mbr <- cls.info.decls) if (mbr.isTerm && !mbr.isOneOf(Synthetic | Bridge) && mbr.memberCanMatchInheritedSymbols && !membersToCheck.contains(mbr.name)) - membersToCheck.addEntry(mbr.name) + membersToCheck += mbr.name cls.info.parents.map(_.classSymbol) .filter(_.isOneOf(AbstractOrTrait)) .dropWhile(_.isOneOf(JavaDefined | Scala2x)) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index bf88a38b4cb9..5d0b94a45186 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2594,7 +2594,7 @@ class Typer extends Namer def typedStats(stats: List[untpd.Tree], exprOwner: Symbol)(using Context): (List[Tree], Context) = { val buf = new mutable.ListBuffer[Tree] - var enumContexts: SimpleIdentityMap[Symbol, Context] = SimpleIdentityMap.Empty + var enumContexts: SimpleIdentityMap[Symbol, Context] = SimpleIdentityMap.empty val initialNotNullInfos = ctx.notNullInfos // A map from `enum` symbols to the contexts enclosing their definitions @tailrec def traverse(stats: List[untpd.Tree])(using Context): (List[Tree], Context) = stats match { diff --git a/compiler/src/dotty/tools/dotc/util/EqHashMap.scala b/compiler/src/dotty/tools/dotc/util/EqHashMap.scala new file mode 100644 index 000000000000..df7b16d48927 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/EqHashMap.scala @@ -0,0 +1,81 @@ +package dotty.tools.dotc.util + +/** A specialized implementation of GenericHashMap with identity hash and `eq` + * as comparison. + */ +class EqHashMap[Key, Value] + (initialCapacity: Int = 8, capacityMultiple: Int = 2) +extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): + import GenericHashMap.DenseLimit + + /** Hashcode is identityHashCode left-shifted by 1, so lowest bit is not lost + * when taking the index. + */ + final def hash(x: Key): Int = System.identityHashCode(x) << 1 + + /** Equality, by default `eq`, but can be overridden */ + final def isEqual(x: Key, y: Key): Boolean = x.asInstanceOf[AnyRef] eq y.asInstanceOf[AnyRef] + + // The following methods are duplicated from GenericHashMap + // to avoid polymorphic dispatches. + // Aside: It would be nice to have a @specialized annotation that does + // this automatically + + private def index(x: Int): Int = x & (table.length - 2) + + private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 2) + + private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] + private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] + + private def setKey(idx: Int, key: Key) = table(idx) = key.asInstanceOf[AnyRef] + private def setValue(idx: Int, value: Value) = table(idx + 1) = value.asInstanceOf[AnyRef] + + override def lookup(key: Key): Value | Null = + Stats.record(statsItem("lookup")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then return valueAt(idx) + idx = nextIndex(idx) + k = keyAt(idx) + null + + override def update(key: Key, value: Value): Unit = + Stats.record(statsItem("update")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then + setValue(idx, value) + return + idx = nextIndex(idx) + k = keyAt(idx) + setKey(idx, key) + setValue(idx, value) + used += 1 + if used > limit then growTable() + + private def addOld(key: Key, value: Value): Unit = + Stats.record(statsItem("re-enter")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + idx = nextIndex(idx) + k = keyAt(idx) + setKey(idx, key) + setValue(idx, value) + + override def copyFrom(oldTable: Array[AnyRef]): Unit = + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val key = oldTable(idx).asInstanceOf[Key] + if key != null then addOld(key, oldTable(idx + 1).asInstanceOf[Value]) + idx += 2 +end EqHashMap diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala new file mode 100644 index 000000000000..134a94e8b888 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -0,0 +1,190 @@ +package dotty.tools +package dotc.util + +object GenericHashMap: + + /** The number of elements up to which dense packing is used. + * If the number of elements reaches `DenseLimit` a hash table is used instead + */ + inline val DenseLimit = 8 + +/** A hash table using open hashing with linear scan which is also very space efficient + * at small sizes. The implementations of `hash` and `isEqual` are left open. They have + * to be provided by subclasses. + * + * @param initialCapacity Indicates the initial number of slots in the hash table. + * The actual number of slots is always a power of 2, so the + * initial size of the table will be the smallest power of two + * that is equal or greater than the given `initialCapacity`. + * Minimum value is 4. + * @param capacityMultiple The minimum multiple of capacity relative to used elements. + * The hash table will be re-sized once the number of elements + * multiplied by capacityMultiple exceeds the current size of the hash table. + * However, a table of size up to DenseLimit will be re-sized only + * once the number of elements reaches the table's size. + */ +abstract class GenericHashMap[Key, Value] + (initialCapacity: Int, capacityMultiple: Int) extends MutableMap[Key, Value]: + import GenericHashMap.DenseLimit + + protected var used: Int = _ + protected var limit: Int = _ + protected var table: Array[AnyRef] = _ + clear() + + private def allocate(capacity: Int) = + table = new Array[AnyRef](capacity * 2) + limit = if capacity <= DenseLimit then capacity - 1 else capacity / capacityMultiple + + private def roundToPower(n: Int) = + if n < 4 then 4 + else if Integer.bitCount(n) == 1 then n + else 1 << (32 - Integer.numberOfLeadingZeros(n)) + + /** Remove all elements from this table and set back to initial configuration */ + def clear(): Unit = + used = 0 + allocate(roundToPower(initialCapacity)) + + /** The number of elements in the set */ + def size: Int = used + + protected def isDense = limit < DenseLimit + + /** Hashcode, to be implemented in subclass */ + protected def hash(x: Key): Int + + /** Equality, to be implemented in subclass */ + protected def isEqual(x: Key, y: Key): Boolean + + /** Turn successor index or hash code `x` into a table index */ + private def index(x: Int): Int = x & (table.length - 2) + + private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 2) + + private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] + private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] + + private def setKey(idx: Int, key: Key) = + table(idx) = key.asInstanceOf[AnyRef] + private def setValue(idx: Int, value: Value) = + table(idx + 1) = value.asInstanceOf[AnyRef] + + def lookup(key: Key): Value | Null = + Stats.record(statsItem("lookup")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then return valueAt(idx) + idx = nextIndex(idx) + k = keyAt(idx) + null + + def update(key: Key, value: Value): Unit = + Stats.record(statsItem("update")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then + setValue(idx, value) + return + idx = nextIndex(idx) + k = keyAt(idx) + setKey(idx, key) + setValue(idx, value) + used += 1 + if used > limit then growTable() + + def remove(key: Key): Value | Null = + Stats.record(statsItem("remove")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then + val result = valueAt(idx) + var hole = idx + while + idx = nextIndex(idx) + k = keyAt(idx) + k != null + do + if isDense + || index(hole - index(hash(k))) < limit * 2 + // hash(k) is then logically at or before hole; can be moved forward to fill hole + then + setKey(hole, k) + setValue(hole, valueAt(idx)) + hole = idx + table(hole) = null + used -= 1 + return result + idx = nextIndex(idx) + k = keyAt(idx) + null + + def getOrElseUpdate(key: Key, value: => Value): Value = + var v: Value | Null = lookup(key) + if v == null then + val v1 = value + v = v1 + update(key, v1) + v.uncheckedNN + + private def addOld(key: Key, value: Value): Unit = + Stats.record(statsItem("re-enter")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + idx = nextIndex(idx) + k = keyAt(idx) + setKey(idx, key) + setValue(idx, value) + + def copyFrom(oldTable: Array[AnyRef]): Unit = + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val key = oldTable(idx).asInstanceOf[Key] + if key != null then addOld(key, oldTable(idx + 1).asInstanceOf[Value]) + idx += 2 + + protected def growTable(): Unit = + val oldTable = table + val newLength = + if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) + else table.length + allocate(newLength) + copyFrom(oldTable) + + private abstract class EntryIterator[T] extends Iterator[T]: + def entry(idx: Int): T + private var idx = 0 + def hasNext = + while idx < table.length && table(idx) == null do idx += 2 + idx < table.length + def next() = + require(hasNext) + try entry(idx) finally idx += 2 + + def iterator: Iterator[(Key, Value)] = new EntryIterator: + def entry(idx: Int) = (keyAt(idx), valueAt(idx)) + + def keysIterator: Iterator[Key] = new EntryIterator: + def entry(idx: Int) = keyAt(idx) + + def valuesIterator: Iterator[Value] = new EntryIterator: + def entry(idx: Int) = valueAt(idx) + + override def toString: String = + iterator.map((k, v) => s"$k -> $v").mkString("HashMap(", ", ", ")") + + protected def statsItem(op: String) = + val prefix = if isDense then "HashMap(dense)." else "HashMap." + val suffix = getClass.getSimpleName + s"$prefix$op $suffix" +end GenericHashMap diff --git a/compiler/src/dotty/tools/dotc/util/HashMap.scala b/compiler/src/dotty/tools/dotc/util/HashMap.scala new file mode 100644 index 000000000000..7cf92817763a --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/HashMap.scala @@ -0,0 +1,79 @@ +package dotty.tools.dotc.util + +/** A specialized implementation of GenericHashMap with standard hashCode and equals + * as comparison + */ +class HashMap[Key, Value] + (initialCapacity: Int = 8, capacityMultiple: Int = 2) +extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): + import GenericHashMap.DenseLimit + + /** Hashcode is left-shifted by 1, so lowest bit is not lost + * when taking the index. + */ + final def hash(x: Key): Int = x.hashCode << 1 + + final def isEqual(x: Key, y: Key): Boolean = x.equals(y) + + // The following methods are duplicated from GenericHashMap + // to avoid polymorphic dispatches + private def index(x: Int): Int = x & (table.length - 2) + + private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 2) + + private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] + private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] + + private def setKey(idx: Int, key: Key) = + table(idx) = key.asInstanceOf[AnyRef] + private def setValue(idx: Int, value: Value) = + table(idx + 1) = value.asInstanceOf[AnyRef] + + override def lookup(key: Key): Value | Null = + Stats.record(statsItem("lookup")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then return valueAt(idx) + idx = nextIndex(idx) + k = keyAt(idx) + null + + override def update(key: Key, value: Value): Unit = + Stats.record(statsItem("update")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then + setValue(idx, value) + return + idx = nextIndex(idx) + k = keyAt(idx) + setKey(idx, key) + setValue(idx, value) + used += 1 + if used > limit then growTable() + + private def addOld(key: Key, value: Value): Unit = + Stats.record(statsItem("re-enter")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + idx = nextIndex(idx) + k = keyAt(idx) + setKey(idx, key) + setValue(idx, value) + + override def copyFrom(oldTable: Array[AnyRef]): Unit = + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val key = oldTable(idx).asInstanceOf[Key] + if key != null then addOld(key, oldTable(idx + 1).asInstanceOf[Value]) + idx += 2 +end HashMap diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index 9ffc982ad920..8d73eada970f 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -1,162 +1,171 @@ package dotty.tools.dotc.util +object HashSet: + + /** The number of elements up to which dense packing is used. + * If the number of elements reaches `DenseLimit` a hash table is used instead + */ + inline val DenseLimit = 8 + /** A hash set that allows some privileged protected access to its internals + * @param initialCapacity Indicates the initial number of slots in the hash table. + * The actual number of slots is always a power of 2, so the + * initial size of the table will be the smallest power of two + * that is equal or greater than the given `initialCapacity`. + * Minimum value is 4. +* @param capacityMultiple The minimum multiple of capacity relative to used elements. + * The hash table will be re-sized once the number of elements + * multiplied by capacityMultiple exceeds the current size of the hash table. + * However, a table of size up to DenseLimit will be re-sized only + * once the number of elements reaches the table's size. */ -class HashSet[T >: Null <: AnyRef](powerOfTwoInitialCapacity: Int, loadFactor: Float = 0.25f) extends Set[T] { +class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T] { + import HashSet.DenseLimit + private var used: Int = _ private var limit: Int = _ private var table: Array[AnyRef] = _ - assert(Integer.bitCount(powerOfTwoInitialCapacity) == 1) - protected def isEqual(x: T, y: T): Boolean = x.equals(y) - - // Counters for Stats - var accesses: Int = 0 - var misses: Int = 0 - clear() - /** The number of elements in the set */ - def size: Int = used + private def allocate(capacity: Int) = + table = new Array[AnyRef](capacity) + limit = if capacity <= DenseLimit then capacity - 1 else capacity / capacityMultiple - private def allocate(size: Int) = { - table = new Array[AnyRef](size) - limit = (size * loadFactor).toInt - } + private def roundToPower(n: Int) = + if n < 4 then 4 + else if Integer.bitCount(n) == 1 then n + else 1 << (32 - Integer.numberOfLeadingZeros(n)) /** Remove all elements from this set and set back to initial configuration */ def clear(): Unit = { used = 0 - allocate(powerOfTwoInitialCapacity) - } - - /** Turn hashcode `x` into a table index */ - private def index(x: Int): Int = x & (table.length - 1) - - /** Hashcode, can be overridden */ - def hash(x: T): Int = x.hashCode - - private def entryAt(idx: Int) = table.apply(idx).asInstanceOf[T] - - /** Find entry such that `isEqual(x, entry)`. If it exists, return it. - * If not, enter `x` in set and return `x`. - */ - def findEntryOrUpdate(x: T): T = { - if (Stats.enabled) accesses += 1 - var h = index(hash(x)) - var entry = entryAt(h) - while (entry ne null) { - if (isEqual(x, entry)) return entry - if (Stats.enabled) misses += 1 - h = index(h + 1) - entry = entryAt(h) - } - addEntryAt(h, x) - } - - /** Add entry at `x` at index `idx` */ - private def addEntryAt(idx: Int, x: T) = { - table(idx) = x - used += 1 - if (used > limit) growTable() - x + allocate(roundToPower(initialCapacity)) } - /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ - def findEntry(x: T): T = { - if (Stats.enabled) accesses += 1 - var h = index(hash(x)) - var entry = entryAt(h) - while ((entry ne null) && !isEqual(x, entry)) { - if (Stats.enabled) misses += 1 - h = index(h + 1) - entry = entryAt(h) - } - entry.asInstanceOf[T] - } + /** The number of elements in the set */ + def size: Int = used - private var rover: Int = -1 - - /** Add entry `x` to set */ - def addEntry(x: T): Unit = { - if (Stats.enabled) accesses += 1 - var h = index(hash(x)) - var entry = entryAt(h) - while (entry ne null) { - if (isEqual(x, entry)) return - if (Stats.enabled) misses += 1 - h = index(h + 1) - entry = entryAt(h) - } - table(h) = x - used += 1 - if (used > (table.length >> 2)) growTable() - } + protected def isDense = limit < DenseLimit - /** Add all entries in `xs` to set */ - def addEntries(xs: TraversableOnce[T]): Unit = - xs.iterator foreach addEntry - - /** The iterator of all elements in the set */ - def iterator: Iterator[T] = new Iterator[T] { - private var i = 0 - def hasNext: Boolean = { - while (i < table.length && (table(i) eq null)) i += 1 - i < table.length - } - def next(): T = - if (hasNext) { i += 1; table(i - 1).asInstanceOf[T] } - else null - } + /** Hashcode, by defualt `x.hashCode`, can be overridden */ + protected def hash(x: T): Int = x.hashCode - /** Privileged access: Find first entry with given hashcode */ - protected def findEntryByHash(hashCode: Int): T = { - rover = index(hashCode) - nextEntryByHash(hashCode) - } + /** Hashcode, by default `equals`, can be overridden */ + protected def isEqual(x: T, y: T): Boolean = x.equals(y) - /** Privileged access: Find next entry with given hashcode. Needs to immediately - * follow a `findEntryByhash` or `nextEntryByHash` operation. - */ - protected def nextEntryByHash(hashCode: Int): T = { - if (Stats.enabled) accesses += 1 - var entry = table(rover) - while (entry ne null) { - rover = index(rover + 1) - if (hash(entry.asInstanceOf[T]) == hashCode) return entry.asInstanceOf[T] - if (Stats.enabled) misses += 1 - entry = table(rover) - } + /** Turn hashcode `x` into a table index */ + protected def index(x: Int): Int = x & (table.length - 1) + + protected def currentTable: Array[AnyRef] = table + + protected def firstIndex(x: T) = if isDense then 0 else index(hash(x)) + protected def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 1) + + protected def entryAt(idx: Int) = table(idx).asInstanceOf[T] + protected def setEntry(idx: Int, x: T) = table(idx) = x.asInstanceOf[AnyRef] + + def lookup(x: T): T | Null = + Stats.record(statsItem("lookup")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + if isEqual(e, x) then return e + idx = nextIndex(idx) + e = entryAt(idx) null - } - /** Privileged access: Add entry `x` at the last position where an unsuccsessful - * `findEntryByHash` or `nextEntryByhash` operation returned. Needs to immediately - * follow a `findEntryByhash` or `nextEntryByHash` operation that was unsuccessful, - * i.e. that returned `null`. - */ - protected def addEntryAfterScan(x: T): T = addEntryAt(rover, x) - - private def addOldEntry(x: T): Unit = { - var h = index(hash(x)) - var entry = entryAt(h) - while (entry ne null) { - h = index(h + 1) - entry = entryAt(h) - } - table(h) = x - } - - private def growTable(): Unit = { - val oldtable = table - allocate(table.length * 2) - var i = 0 - while (i < oldtable.length) { - val entry = oldtable(i) - if (entry ne null) addOldEntry(entry.asInstanceOf[T]) - i += 1 - } - } +/** Add entry at `x` at index `idx` */ + protected def addEntryAt(idx: Int, x: T): T = + Stats.record(statsItem("addEntryAt")) + setEntry(idx, x) + used += 1 + if used > limit then growTable() + x - override def toString(): String = "HashSet(%d / %d)".format(used, table.length) + def put(x: T): T = + Stats.record(statsItem("put")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + if isEqual(e, x) then return e + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, x) + + def +=(x: T): Unit = put(x) + + def -= (x: T): Unit = + Stats.record(statsItem("remove")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + if isEqual(e, x) then + var hole = idx + while + idx = nextIndex(idx) + e = entryAt(idx) + e != null + do + if isDense + || index(hole - index(hash(e))) < limit + // hash(k) is then logically at or before hole; can be moved forward to fill hole + then + setEntry(hole, e) + hole = idx + table(hole) = null + used -= 1 + return + idx = nextIndex(idx) + e = entryAt(idx) + + private def addOld(x: T) = + Stats.record(statsItem("re-enter")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + idx = nextIndex(idx) + e = entryAt(idx) + setEntry(idx, x) + + def copyFrom(oldTable: Array[AnyRef]): Unit = + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val e = oldTable(idx).asInstanceOf[T] + if e != null then addOld(e) + idx += 1 + + protected def growTable(): Unit = + val oldTable = table + val newLength = + if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) + else table.length * 2 + allocate(newLength) + copyFrom(oldTable) + + abstract class EntryIterator extends Iterator[T]: + def entry(idx: Int): T + private var idx = 0 + def hasNext = + while idx < table.length && table(idx) == null do idx += 1 + idx < table.length + def next() = + require(hasNext) + try entry(idx) finally idx += 1 + + def iterator: Iterator[T] = new EntryIterator(): + def entry(idx: Int) = entryAt(idx) + + override def toString: String = + iterator.mkString("HashSet(", ", ", ")") + + protected def statsItem(op: String) = + val prefix = if isDense then "HashSet(dense)." else "HashSet." + val suffix = getClass.getSimpleName + s"$prefix$op $suffix" } diff --git a/compiler/src/dotty/tools/dotc/util/HashTable.scala b/compiler/src/dotty/tools/dotc/util/HashTable.scala deleted file mode 100644 index d91d6c5d1a9f..000000000000 --- a/compiler/src/dotty/tools/dotc/util/HashTable.scala +++ /dev/null @@ -1,142 +0,0 @@ -package dotty.tools.dotc.util - -object HashTable: - /** The number of elements up to which dense packing is used. - * If the number of elements reaches `DenseLimit` a hash table is used instead - */ - inline val DenseLimit = 8 - -/** A hash table using open hashing with linear scan which is also very space efficient - * at small sizes. - * @param initialCapacity Indicates the initial number of slots in the hash table. - * The actual number of slots is always a power of 2, so the - * initial size of the table will be the smallest power of two - * that is equal or greater than the given `initialCapacity`. - * Minimum value is 4. - * @param capacityMultiple The minimum multiple of capacity relative to used elements. - * The hash table will be re-sized once the number of elements - * multiplied by capacityMultiple exceeds the current size of the hash table. - * However, a table of size up to DenseLimit will be re-sized only - * once the number of elements reaches the table's size. - */ -class HashTable[Key >: Null <: AnyRef, Value >: Null <: AnyRef] - (initialCapacity: Int = 8, capacityMultiple: Int = 3): - import HashTable.DenseLimit - - private var used: Int = _ - private var limit: Int = _ - private var table: Array[AnyRef] = _ - clear() - - private def allocate(capacity: Int) = - table = new Array[AnyRef](capacity * 2) - limit = if capacity <= DenseLimit then capacity - 1 else capacity / capacityMultiple - - private def roundToPower(n: Int) = - if Integer.bitCount(n) == 1 then n - else 1 << (32 - Integer.numberOfLeadingZeros(n)) - - /** Remove all elements from this table and set back to initial configuration */ - def clear(): Unit = - used = 0 - allocate(roundToPower(initialCapacity max 4)) - - /** The number of elements in the set */ - def size: Int = used - - private def isDense = limit < DenseLimit - - /** Hashcode, by default `System.identityHashCode`, but can be overriden */ - protected def hash(x: Key): Int = System.identityHashCode(x) - - /** Equality, by default `eq`, but can be overridden */ - protected def isEqual(x: Key, y: Key): Boolean = x eq y - - /** Turn hashcode `x` into a table index */ - private def index(x: Int): Int = x & (table.length - 2) - - private def firstIndex(key: Key) = if isDense then 0 else index(hash(key)) - private def nextIndex(idx: Int) = index(idx + 2) - - private def keyAt(idx: Int): Key = table(idx).asInstanceOf[Key] - private def valueAt(idx: Int): Value = table(idx + 1).asInstanceOf[Value] - - /** Find entry such that `isEqual(x, entry)`. If it exists, return it. - * If not, enter `x` in set and return `x`. - */ - def lookup(key: Key): Value = - var idx = firstIndex(key) - var k = keyAt(idx) - while k != null do - if isEqual(k, key) then return valueAt(idx) - idx = nextIndex(idx) - k = keyAt(idx) - null - - def enter(key: Key, value: Value): Unit = - var idx = firstIndex(key) - var k = keyAt(idx) - while k != null do - if isEqual(k, key) then - table(idx + 1) = value - return - idx = nextIndex(idx) - k = keyAt(idx) - table(idx) = key - table(idx + 1) = value - used += 1 - if used > limit then growTable() - - def invalidate(key: Key): Unit = - var idx = firstIndex(key) - var k = keyAt(idx) - while k != null do - if isEqual(k, key) then - var hole = idx - if !isDense then - while - idx = nextIndex(idx) - k = keyAt(idx) - k != null && index(hash(k)) != idx - do - table(hole) = k - table(hole + 1) = valueAt(idx) - hole = idx - table(hole) = null - used -= 1 - return - idx = nextIndex(idx) - k = keyAt(idx) - - private def addOld(key: Key, value: AnyRef): Unit = - var idx = firstIndex(key) - var k = keyAt(idx) - while k != null do - idx = nextIndex(idx) - k = keyAt(idx) - table(idx) = key - table(idx + 1) = value - - private def growTable(): Unit = - val oldTable = table - val newLength = - if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) - else table.length - allocate(newLength) - if isDense then - Array.copy(oldTable, 0, table, 0, oldTable.length) - else - var idx = 0 - while idx < oldTable.length do - val key = oldTable(idx).asInstanceOf[Key] - if key != null then addOld(key, oldTable(idx + 1)) - idx += 2 - - def iterator: Iterator[(Key, Value)] = - for idx <- (0 until table.length by 2).iterator - if keyAt(idx) != null - yield (keyAt(idx), valueAt(idx)) - - override def toString: String = - iterator.map((k, v) => s"$k -> $v").mkString("HashTable(", ", ", ")") -end HashTable diff --git a/compiler/src/dotty/tools/dotc/util/LinearMap.scala b/compiler/src/dotty/tools/dotc/util/LinearMap.scala new file mode 100644 index 000000000000..b4f8ef0d97a7 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/LinearMap.scala @@ -0,0 +1,43 @@ +package dotty.tools.dotc.util + +import collection.immutable + +/** A linear map is a map where after an `updated` the previous map + * value cannot be used anymore. The map is implemented as an immutable + * map for sizes <= 4 (where immutable maps have specialized, compact + * representations) and as a HashMap for larger sizes. + */ +opaque type LinearMap[K <: AnyRef, V >: Null <: AnyRef] = + immutable.Map[K, V] | HashMap[K, V] + +object LinearMap: + + def empty[K <: AnyRef, V >: Null <: AnyRef]: LinearMap[K, V] = + immutable.Map.empty[K, V] + + extension [K <: AnyRef, V >: Null <: AnyRef](m: LinearMap[K, V]): + + def lookup(key: K): V /*| Null*/ = (m: @unchecked) match + case m: immutable.AbstractMap[K, V] @unchecked => + if m.contains(key) then m(key) else null + case m: HashMap[K, V] @unchecked => + m.lookup(key) + + def updated(key: K, value: V): LinearMap[K, V] = (m: @unchecked) match + case m: immutable.AbstractMap[K, V] @unchecked => + if m.size < 4 then + m.updated(key, value) + else + val m1 = HashMap[K, V]() + m.foreach(m1(_) = _) + m1(key) = value + m1 + case m: HashMap[K, V] @unchecked => + m(key) = value + m + + def size = (m: @unchecked) match + case m: immutable.AbstractMap[K, V] @unchecked => m.size + case m: HashMap[K, V] @unchecked => m.size + +end LinearMap \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/util/LinearSet.scala b/compiler/src/dotty/tools/dotc/util/LinearSet.scala new file mode 100644 index 000000000000..13db1af8c613 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/LinearSet.scala @@ -0,0 +1,45 @@ +package dotty.tools.dotc.util +import collection.immutable + +/** A linear set is a set here after a `+` the previous set value cannot be + * used anymore. The set is implemented as an immutable set for sizes <= 4 + * and as a HashSet for larger sizes. + */ +opaque type LinearSet[Elem >: Null <: AnyRef] = + immutable.Set[Elem] | HashSet[Elem] + +object LinearSet: + + def empty[Elem >: Null <: AnyRef]: LinearSet[Elem] = immutable.Set.empty[Elem] + + extension [Elem >: Null <: AnyRef](s: LinearSet[Elem]): + + def contains(elem: Elem): Boolean = (s: @unchecked) match + case s: immutable.AbstractSet[Elem] @unchecked => s.contains(elem) + case s: HashSet[Elem] @unchecked => s.contains(elem) + + def + (elem: Elem): LinearSet[Elem] = (s: @unchecked) match + case s: immutable.AbstractSet[Elem] @unchecked => + if s.size < 4 then + s + elem + else + val s1 = HashSet[Elem](initialCapacity = 8) + s.foreach(s1 += _) + s1 += elem + s1 + case s: HashSet[Elem] @unchecked => + s += elem + s + + def - (elem: Elem): LinearSet[Elem] = (s: @unchecked) match + case s: immutable.AbstractSet[Elem] @unchecked => + s - elem + case s: HashSet[Elem] @unchecked => + s -= elem + s + + def size = (s: @unchecked) match + case s: immutable.AbstractSet[Elem] @unchecked => s.size + case s: HashSet[Elem] @unchecked => s.size + +end LinearSet \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/util/MutableMap.scala b/compiler/src/dotty/tools/dotc/util/MutableMap.scala new file mode 100644 index 000000000000..ba912a312aea --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/MutableMap.scala @@ -0,0 +1,18 @@ +package dotty.tools +package dotc.util + +/** A common class for lightweight mutable maps. + */ +abstract class MutableMap[Key, Value] extends ReadOnlyMap[Key, Value]: + + def update(k: Key, v: Value): Unit + + def remove(k: Key): Value | Null + + def -=(k: Key): this.type = + remove(k) + this + + def clear(): Unit + + def getOrElseUpdate(key: Key, value: => Value): Value diff --git a/compiler/src/dotty/tools/dotc/util/MutableSet.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala new file mode 100644 index 000000000000..bedb079f18ca --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala @@ -0,0 +1,25 @@ +package dotty.tools.dotc.util + +/** A common class for lightweight mutable sets. + */ +abstract class MutableSet[T] extends ReadOnlySet[T]: + + /** Add element `x` to the set */ + def +=(x: T): Unit + + /** Like `+=` but return existing element equal to `x` of it exists, + * `x` itself otherwose. + */ + def put(x: T): T + + /** Remove element `x` from the set */ + def -=(x: T): Unit + + def clear(): Unit + + def ++= (xs: IterableOnce[T]): Unit = + xs.iterator.foreach(this += _) + + def --= (xs: IterableOnce[T]): Unit = + xs.iterator.foreach(this -= _) + diff --git a/compiler/src/dotty/tools/dotc/util/ReadOnlyMap.scala b/compiler/src/dotty/tools/dotc/util/ReadOnlyMap.scala new file mode 100644 index 000000000000..020303c18bc2 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/ReadOnlyMap.scala @@ -0,0 +1,41 @@ +package dotty.tools +package dotc.util + +/** A class for the reading part of mutable or immutable maps. + */ +abstract class ReadOnlyMap[Key, Value]: + + def lookup(x: Key): Value | Null + + def size: Int + + def iterator: Iterator[(Key, Value)] + def keysIterator: Iterator[Key] + def valuesIterator: Iterator[Value] + + def isEmpty: Boolean = size == 0 + + def get(key: Key): Option[Value] = lookup(key) match + case null => None + case v => Some(v.uncheckedNN) + + def getOrElse(key: Key, value: => Value) = lookup(key) match + case null => value + case v => v.uncheckedNN + + def contains(key: Key): Boolean = lookup(key) != null + + def apply(key: Key): Value = lookup(key) match + case null => throw new NoSuchElementException(s"$key") + case v => v.uncheckedNN + + def toArray: Array[(Key, Value)] = + val result = new Array[(Key, Value)](size) + var idx = 0 + for pair <- iterator do + result(idx) = pair + idx += 1 + result + + def toSeq: Seq[(Key, Value)] = toArray.toSeq + diff --git a/compiler/src/dotty/tools/dotc/util/ReadOnlySet.scala b/compiler/src/dotty/tools/dotc/util/ReadOnlySet.scala new file mode 100644 index 000000000000..4826d02743a9 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/ReadOnlySet.scala @@ -0,0 +1,24 @@ +package dotty.tools.dotc.util + +/** A class for the readonly part of mutable sets. + */ +abstract class ReadOnlySet[T]: + + /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ + def lookup(x: T): T | Null + + def size: Int + + def iterator: Iterator[T] + + def contains(x: T): Boolean = lookup(x) != null + + def foreach[U](f: T => U): Unit = iterator.foreach(f) + + def toList: List[T] = iterator.toList + + def isEmpty = size == 0 + +object ReadOnlySet: + def empty[T]: ReadOnlySet[T] = HashSet[T](4) + diff --git a/compiler/src/dotty/tools/dotc/util/Set.scala b/compiler/src/dotty/tools/dotc/util/Set.scala deleted file mode 100644 index 3a8fe9e823aa..000000000000 --- a/compiler/src/dotty/tools/dotc/util/Set.scala +++ /dev/null @@ -1,23 +0,0 @@ -package dotty.tools.dotc.util - -/** A common class for lightweight sets. - */ -abstract class Set[T >: Null] { - - def findEntry(x: T): T - - def addEntry(x: T): Unit - - def iterator: Iterator[T] - - def foreach[U](f: T => U): Unit = iterator foreach f - - def apply(x: T): Boolean = contains(x) - - def contains(x: T): Boolean = - findEntry(x) != null - - def toList: List[T] = iterator.toList - - def clear(): Unit -} diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala index b5d2b840c5e7..437f7e140f68 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala @@ -41,7 +41,7 @@ object SimpleIdentityMap { def forallBinding(f: (AnyRef, Null) => Boolean) = true } - def Empty[K <: AnyRef]: SimpleIdentityMap[K, Null] = myEmpty.asInstanceOf[SimpleIdentityMap[K, Null]] + def empty[K <: AnyRef]: SimpleIdentityMap[K, Null] = myEmpty.asInstanceOf[SimpleIdentityMap[K, Null]] class Map1[K <: AnyRef, +V >: Null <: AnyRef] (k1: K, v1: V) extends SimpleIdentityMap[K, V] { def size: Int = 1 @@ -49,7 +49,7 @@ object SimpleIdentityMap { if (k eq k1) v1 else null def remove(k: K): SimpleIdentityMap[K, V] = - if (k eq k1) Empty.asInstanceOf[SimpleIdentityMap[K, V]] + if (k eq k1) empty.asInstanceOf[SimpleIdentityMap[K, V]] else this def updated[V1 >: V <: AnyRef](k: K, v: V1): SimpleIdentityMap[K, V1] = if (k eq k1) new Map1(k, v) @@ -162,7 +162,7 @@ object SimpleIdentityMap { if (bindings(i) eq k) return { if (size == CompactifyThreshold) { - var m: SimpleIdentityMap[K, V] = Empty[K] + var m: SimpleIdentityMap[K, V] = empty[K] for (j <- 0 until bindings.length by 2) if (j != i) m = m.updated(key(j), value(j)) m diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala index 7967b41ddb18..ecbf266866eb 100644 --- a/compiler/src/dotty/tools/dotc/util/Stats.scala +++ b/compiler/src/dotty/tools/dotc/util/Stats.scala @@ -58,7 +58,6 @@ import collection.mutable aggregate() println() println(hits.toList.sortBy(_._2).map{ case (x, y) => s"$x -> $y" } mkString "\n") - println(s"uniqueInfo (size, accesses, collisions): ${ctx.base.uniquesSizes}") } } else op diff --git a/compiler/src/dotty/tools/package.scala b/compiler/src/dotty/tools/package.scala index 0bf617895b2c..538ab83f73f2 100644 --- a/compiler/src/dotty/tools/package.scala +++ b/compiler/src/dotty/tools/package.scala @@ -24,6 +24,24 @@ package object tools { def unsupported(methodName: String): Nothing = throw new UnsupportedOperationException(methodName) + /** Forward-ported from the explicit-nulls branch. */ + extension [T](x: T | Null): + + /** Assert `x` is non null and strip `Null` from type */ + inline def nn: T = + assert(x != null) + x.asInstanceOf[T] + + /** Should be used when we know from the context that `x` is not null. + * Flow-typing under explicit nulls will automatically insert many necessary + * occurrences of uncheckedNN. + */ + inline def uncheckedNN: T = x.asInstanceOf[T] + + inline def toOption: Option[T] = + if x == null then None else Some(x.asInstanceOf[T]) + end extension + object resultWrapper { opaque type WrappedResult[T] = T private[tools] def unwrap[T](x: WrappedResult[T]): T = x diff --git a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala index 277f80428cc5..80018b324fa0 100644 --- a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala +++ b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala @@ -23,7 +23,7 @@ import Comments._, Constants._, Contexts._, Flags._, Names._, NameOps._, Symbols import classpath.ClassPathEntries import reporting._ import typer.Typer -import util.{Set => _, _} +import util._ import interactive._, interactive.InteractiveDriver._ import decompiler.IDEDecompilerDriver import Interactive.Include diff --git a/tests/neg-custom-args/i1650.scala b/tests/neg-custom-args/i1650.scala index 6f709cd843eb..3ce5d19bf7dd 100644 --- a/tests/neg-custom-args/i1650.scala +++ b/tests/neg-custom-args/i1650.scala @@ -1,5 +1,5 @@ object Test { - test4(test4$default$1) // error: recursion limit exceeded + test4(test4$default$1) def test4[T[P]](x: T[T[List[T[X forSome { type X }]]]]) = ??? // error // error def test4$default$1[T[P]]: T[Int] = ??? } diff --git a/tests/run-with-compiler/maptest.scala b/tests/run-with-compiler/maptest.scala new file mode 100644 index 000000000000..41f2da34551b --- /dev/null +++ b/tests/run-with-compiler/maptest.scala @@ -0,0 +1,74 @@ +trait Generator[+T]: + self => + def generate: T + def map[S](f: T => S) = new Generator[S]: + def generate: S = f(self.generate) + def flatMap[S](f: T => Generator[S]) = new Generator[S]: + def generate: S = f(self.generate).generate + +object Generator: + val NumLimit = 300 + val Iterations = 10000 + + given integers as Generator[Int]: + val rand = new java.util.Random + def generate = rand.nextInt() + + given booleans as Generator[Boolean] = + integers.map(x => x > 0) + + def range(end: Int): Generator[Int] = + integers.map(x => (x % end).abs) + + enum Op: + case Lookup, Update, Remove + export Op._ + + given ops as Generator[Op] = + range(10).map { + case 0 | 1 | 2 | 3 => Lookup + case 4 | 5 | 6 | 7 => Update + case 8 | 9 => Remove + } + + val nums: Generator[Integer] = range(NumLimit).map(Integer(_)) + +@main def Test = + import Generator._ + + val map1 = dotty.tools.dotc.util.HashMap[Integer, Integer]() + val map2 = scala.collection.mutable.HashMap[Integer, Integer]() + + def checkSame() = + assert(map1.size == map2.size) + for (k, v) <- map1.iterator do + assert(map2.get(k) == Some(v)) + for (k, v) <- map2.iterator do + assert(Option(map1.lookup(k)) == Some(v)) + + def lookupTest(num: Integer) = + //println(s"test lookup $num") + val res1 = Option(map1.lookup(num)) + val res2 = map2.get(num) + assert(res1 == res2) + + def updateTest(num: Integer) = + //println(s"test update $num") + lookupTest(num) + map1(num) = num + map2(num) = num + checkSame() + + def removeTest(num: Integer) = + //println(s"test remove $num") + map1.remove(num) + map2.remove(num) + checkSame() + + for i <- 0 until Iterations do + //if i % 1000 == 0 then println(map1.size) + val num = nums.generate + Generator.ops.generate match + case Lookup => lookupTest(num) + case Update => updateTest(num) + case Remove => removeTest(num)