Skip to content

Some performance related changes #16566

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 28 commits into from
Jan 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
de04a7d
Better benchmark and Stats infrastructure
odersky Jan 5, 2023
7e0c815
Reduce allocations for pickling
odersky Jan 5, 2023
cb40386
Reduce context creations for value class related ops
odersky Jan 5, 2023
2fbb891
Refactor context pools
odersky Jan 5, 2023
3d21427
Avoid creation of Type lists when assigning types to Apply nodes
odersky Dec 20, 2022
47f5b02
Pickling reorganizations
odersky Jan 5, 2023
8dee679
Reduce string computations
odersky Dec 22, 2022
ac95795
Avoid recomputing hot requiredMethods
odersky Dec 22, 2022
7036ed8
Don't have Diagnostic inherit from Exception
odersky Jan 5, 2023
f8e7f78
Reuse regex matcher in replaceAll calls
odersky Jan 5, 2023
e0e703a
Add specialized versions of tasty.Util.dble for common array element …
odersky Dec 22, 2022
c9a4670
Allow to reuse table of a util.{MutableHashSet,MutableHashMap}
odersky Jan 5, 2023
e459eda
Avoid some Some wrappers when accessing maps
odersky Dec 22, 2022
10359a5
Avoid unnecessary uses of `parentSyms`
odersky Dec 22, 2022
03555e8
Avoid some boxings of vars
odersky Jan 5, 2023
4e7ab60
Avoid creating large CharBuffers in LookaheadScanners
odersky Dec 22, 2022
50c1595
Avoid expensive settings lookup in `setDenot`
odersky Dec 23, 2022
9f13e56
Make `validFor` monomorphic
odersky Dec 23, 2022
84c4fe0
Cache `isType` in SymDenotations
odersky Jan 5, 2023
b28faf9
Streamline some hot compuations
odersky Jan 5, 2023
f992f7a
Inline rollbackGadtUnless in GadtConstraint
odersky Jan 5, 2023
c920896
Replace tpd.mapInline by flattenMapConserve
odersky Dec 25, 2022
bc8df3e
Optimize period equality tests
odersky Dec 25, 2022
b64b73c
Drop expensive escapeToNext
odersky Jan 5, 2023
88c84ea
Use home-brewed futures for parallel pickling
odersky Dec 26, 2022
78f1be0
Drop redundant catch and re-throw
odersky Jan 5, 2023
586c459
Revert some changes in NameBuffer
odersky Jan 5, 2023
f2caf05
Make lazy vals threadunsafe.
odersky Jan 20, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 18 additions & 14 deletions compiler/src/dotty/tools/dotc/Bench.scala
Original file line number Diff line number Diff line change
Expand Up @@ -14,24 +14,22 @@ import scala.annotation.internal.sharable
object Bench extends Driver:

@sharable private var numRuns = 1

private def ntimes(n: Int)(op: => Reporter): Reporter =
(0 until n).foldLeft(emptyReporter)((_, _) => op)

@sharable private var numCompilers = 1
@sharable private var waitAfter = -1
@sharable private var curCompiler = 0
@sharable private var times: Array[Int] = _

override def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter =
times = new Array[Int](numRuns)
var reporter: Reporter = emptyReporter
for i <- 0 until numRuns do
val curRun = curCompiler * numRuns + i
val start = System.nanoTime()
reporter = super.doCompile(compiler, files)
times(i) = ((System.nanoTime - start) / 1000000).toInt
println(s"time elapsed: ${times(i)}ms")
if ctx.settings.Xprompt.value then
times(curRun) = ((System.nanoTime - start) / 1000000).toInt
println(s"time elapsed: ${times(curRun)}ms")
if ctx.settings.Xprompt.value || waitAfter == curRun + 1 then
print("hit <return> to continue >")
System.in.nn.read()
println()
reporter

def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = {
Expand All @@ -42,20 +40,26 @@ object Bench extends Driver:

def reportTimes() =
val best = times.sorted
val measured = numRuns / 3
val measured = numCompilers * numRuns / 3
val avgBest = best.take(measured).sum / measured
val avgLast = times.reverse.take(measured).sum / measured
println(s"best out of $numRuns runs: ${best(0)}")
println(s"best out of ${numCompilers * numRuns} runs: ${best(0)}")
println(s"average out of best $measured: $avgBest")
println(s"average out of last $measured: $avgLast")

override def process(args: Array[String], rootCtx: Context): Reporter =
override def process(args: Array[String]): Reporter =
val (numCompilers, args1) = extractNumArg(args, "#compilers")
val (numRuns, args2) = extractNumArg(args1, "#runs")
val (waitAfter, args3) = extractNumArg(args2, "#wait-after", -1)
this.numCompilers = numCompilers
this.numRuns = numRuns
this.waitAfter = waitAfter
this.times = new Array[Int](numCompilers * numRuns)
var reporter: Reporter = emptyReporter
for i <- 0 until numCompilers do
reporter = super.process(args2, rootCtx)
curCompiler = 0
while curCompiler < numCompilers do
reporter = super.process(args3)
curCompiler += 1
reportTimes()
reporter

Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/Driver.scala
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ class Driver {
* the other overloads without worrying about breaking compatibility
* with sbt.
*/
final def process(args: Array[String]): Reporter =
def process(args: Array[String]): Reporter =
process(args, null: Reporter | Null, null: interfaces.CompilerCallback | Null)

/** Entry point to the compiler using a custom `Context`.
Expand Down
3 changes: 3 additions & 0 deletions compiler/src/dotty/tools/dotc/Run.scala
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,9 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint
ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, stopAfter, ctx.settings.Ycheck.value)
ctx.base.usePhases(phases)

if ctx.settings.YnoDoubleBindings.value then
ctx.base.checkNoDoubleBindings = true

def runPhases(using Context) = {
var lastPrintedTree: PrintedTree = NoPrintedTree
val profiler = ctx.profiler
Expand Down
19 changes: 11 additions & 8 deletions compiler/src/dotty/tools/dotc/ast/Positioned.scala
Original file line number Diff line number Diff line change
Expand Up @@ -154,14 +154,17 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src
}
}

private class LastPosRef:
var positioned: Positioned | Null = null
var span = NoSpan

/** Check that all positioned items in this tree satisfy the following conditions:
* - Parent spans contain child spans
* - If item is a non-empty tree, it has a position
*/
def checkPos(nonOverlapping: Boolean)(using Context): Unit = try {
import untpd._
var lastPositioned: Positioned | Null = null
var lastSpan = NoSpan
val last = LastPosRef()
def check(p: Any): Unit = p match {
case p: Positioned =>
assert(span contains p.span,
Expand All @@ -181,19 +184,19 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src
case _: XMLBlock =>
// FIXME: Trees generated by the XML parser do not satisfy `checkPos`
case _: WildcardFunction
if lastPositioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] =>
if last.positioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] =>
// ignore transition from last wildcard parameter to body
case _ =>
assert(!lastSpan.exists || !p.span.exists || lastSpan.end <= p.span.start,
assert(!last.span.exists || !p.span.exists || last.span.end <= p.span.start,
i"""position error, child positions overlap or in wrong order
|parent = $this
|1st child = $lastPositioned
|1st child span = $lastSpan
|1st child = ${last.positioned}
|1st child span = ${last.span}
|2nd child = $p
|2nd child span = ${p.span}""".stripMargin)
}
lastPositioned = p
lastSpan = p.span
last.positioned = p
last.span = p.span
p.checkPos(nonOverlapping)
case m: untpd.Modifiers =>
m.annotations.foreach(check)
Expand Down
10 changes: 6 additions & 4 deletions compiler/src/dotty/tools/dotc/ast/TreeInfo.scala
Original file line number Diff line number Diff line change
Expand Up @@ -830,10 +830,12 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] =>

/** The symbols defined locally in a statement list */
def localSyms(stats: List[Tree])(using Context): List[Symbol] =
val locals = new mutable.ListBuffer[Symbol]
for stat <- stats do
if stat.isDef && stat.symbol.exists then locals += stat.symbol
locals.toList
if stats.isEmpty then Nil
else
val locals = new mutable.ListBuffer[Symbol]
for stat <- stats do
if stat.isDef && stat.symbol.exists then locals += stat.symbol
locals.toList

/** If `tree` is a DefTree, the symbol defined by it, otherwise NoSymbol */
def definedSym(tree: Tree)(using Context): Symbol =
Expand Down
51 changes: 27 additions & 24 deletions compiler/src/dotty/tools/dotc/ast/tpd.scala
Original file line number Diff line number Diff line change
Expand Up @@ -857,7 +857,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
}

/** After phase `trans`, set the owner of every definition in this tree that was formerly
* owner by `from` to `to`.
* owned by `from` to `to`.
*/
def changeOwnerAfter(from: Symbol, to: Symbol, trans: DenotTransformer)(using Context): ThisTree =
if (ctx.phase == trans.next) {
Expand Down Expand Up @@ -1144,35 +1144,38 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
expand(tree, tree.tpe.widen)
}

inline val MapRecursionLimit = 10

extension (trees: List[Tree])

/** A map that expands to a recursive function. It's equivalent to
/** Equivalent (but faster) to
*
* flatten(trees.mapConserve(op))
*
* and falls back to it after `MaxRecursionLimit` recursions.
* Before that it uses a simpler method that uses stackspace
* instead of heap.
* Note `op` is duplicated in the generated code, so it should be
* kept small.
* assuming that `trees` does not contain `Thicket`s to start with.
*/
inline def mapInline(inline op: Tree => Tree): List[Tree] =
def recur(trees: List[Tree], count: Int): List[Tree] =
if count > MapRecursionLimit then
// use a slower implementation that avoids stack overflows
flatten(trees.mapConserve(op))
else trees match
case tree :: rest =>
val tree1 = op(tree)
val rest1 = recur(rest, count + 1)
if (tree1 eq tree) && (rest1 eq rest) then trees
else tree1 match
case Thicket(elems1) => elems1 ::: rest1
case _ => tree1 :: rest1
case nil => nil
recur(trees, 0)
inline def flattenedMapConserve(inline f: Tree => Tree): List[Tree] =
@tailrec
def loop(mapped: ListBuffer[Tree] | Null, unchanged: List[Tree], pending: List[Tree]): List[Tree] =
if pending.isEmpty then
if mapped == null then unchanged
else mapped.prependToList(unchanged)
else
val head0 = pending.head
val head1 = f(head0)

if head1 eq head0 then
loop(mapped, unchanged, pending.tail)
else
val buf = if mapped == null then new ListBuffer[Tree] else mapped
var xc = unchanged
while xc ne pending do
buf += xc.head
xc = xc.tail
head1 match
case Thicket(elems1) => buf ++= elems1
case _ => buf += head1
val tail0 = pending.tail
loop(buf, tail0, tail0)
loop(null, trees, trees)

/** Transform statements while maintaining import contexts and expression contexts
* in the same way as Typer does. The code addresses additional concerns:
Expand Down
7 changes: 0 additions & 7 deletions compiler/src/dotty/tools/dotc/config/Config.scala
Original file line number Diff line number Diff line change
Expand Up @@ -83,13 +83,6 @@ object Config {
*/
inline val failOnInstantiationToNothing = false

/** Enable noDoubleDef checking if option "-YnoDoubleDefs" is set.
* The reason to have an option as well as the present global switch is
* that the noDoubleDef checking is done in a hotspot, and we do not
* want to incur the overhead of checking an option each time.
*/
inline val checkNoDoubleBindings = true

/** Check positions for consistency after parsing */
inline val checkPositions = true

Expand Down
Loading