Skip to content

write pipelined tasty in parallel. #20153

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Apr 16, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,11 @@ import dotty.tools.io.JarArchive

import scala.language.unsafeNulls


/** !!! This file is now copied in `dotty.tools.io.FileWriters` in a more general way that does not rely upon
* `PostProcessorFrontendAccess`, this should probably be changed to wrap that class instead.
*
* Until then, any changes to this file should be copied to `dotty.tools.io.FileWriters` as well.
*/
class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) {
type NullableFile = AbstractFile | Null
import frontendAccess.{compilerSettings, backendReporting}
Expand Down
13 changes: 13 additions & 0 deletions compiler/src/dotty/tools/backend/jvm/GenBCode.scala
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ import Symbols.*
import dotty.tools.io.*
import scala.collection.mutable
import scala.compiletime.uninitialized
import java.util.concurrent.TimeoutException

import scala.concurrent.duration.Duration
import scala.concurrent.Await

class GenBCode extends Phase { self =>

Expand Down Expand Up @@ -90,6 +94,15 @@ class GenBCode extends Phase { self =>
try
val result = super.runOn(units)
generatedClassHandler.complete()
try
for
async <- ctx.run.nn.asyncTasty
bufferedReporter <- async.sync()
do
bufferedReporter.relayReports(frontendAccess.backendReporting)
catch
case ex: Exception =>
report.error(s"exception from future: $ex, (${Option(ex.getCause())})")
result
finally
// frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized
Expand Down
10 changes: 7 additions & 3 deletions compiler/src/dotty/tools/dotc/CompilationUnit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -98,11 +98,15 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn
depRecorder.clear()
if !suspended then
suspended = true
ctx.run.nn.suspendedUnits += this
val currRun = ctx.run.nn
currRun.suspendedUnits += this
val isInliningPhase = ctx.phase == Phases.inliningPhase
if ctx.settings.XprintSuspension.value then
ctx.run.nn.suspendedHints += (this -> hint)
if ctx.phase == Phases.inliningPhase then
currRun.suspendedHints += (this -> (hint, isInliningPhase))
if isInliningPhase then
suspendedAtInliningPhase = true
else
currRun.suspendedAtTyperPhase = true
throw CompilationUnit.SuspendException()

private var myAssignmentSpans: Map[Int, List[Span]] | Null = null
Expand Down
6 changes: 3 additions & 3 deletions compiler/src/dotty/tools/dotc/Driver.scala
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,10 @@ class Driver {
if (ctx.settings.XprintSuspension.value)
val suspendedHints = run.suspendedHints.toList
report.echo(i"compiling suspended $suspendedUnits%, %")
for (unit, hint) <- suspendedHints do
report.echo(s" $unit: $hint")
for (unit, (hint, atInlining)) <- suspendedHints do
report.echo(s" $unit at ${if atInlining then "inlining" else "typer"}: $hint")
val run1 = compiler.newRun
run1.compileSuspendedUnits(suspendedUnits)
run1.compileSuspendedUnits(suspendedUnits, !run.suspendedAtTyperPhase)
finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh))

protected def initCtx: Context = (new ContextBase).initialCtx
Expand Down
36 changes: 33 additions & 3 deletions compiler/src/dotty/tools/dotc/Run.scala
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import scala.io.Codec
import Run.Progress
import scala.compiletime.uninitialized
import dotty.tools.dotc.transform.MegaPhase
import dotty.tools.dotc.transform.Pickler.AsyncTastyHolder

/** A compiler run. Exports various methods to compile source files */
class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo {
Expand Down Expand Up @@ -130,7 +131,10 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint
myUnits = us

var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer()
var suspendedHints: mutable.Map[CompilationUnit, String] = mutable.HashMap()
var suspendedHints: mutable.Map[CompilationUnit, (String, Boolean)] = mutable.HashMap()

/** Were any units suspended in the typer phase? if so then pipeline tasty can not complete. */
var suspendedAtTyperPhase: Boolean = false

def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit =
if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then
Expand Down Expand Up @@ -231,6 +235,22 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint
if !progress.isCancelled() then
progress.tickSubphase()

/** if true, then we are done writing pipelined TASTy files (i.e. finished in a previous run.) */
private var myAsyncTastyWritten = false

private var _asyncTasty: Option[AsyncTastyHolder] = None

/** populated when this run needs to write pipeline TASTy files. */
def asyncTasty: Option[AsyncTastyHolder] = _asyncTasty

private def initializeAsyncTasty()(using Context): () => Unit =
// should we provide a custom ExecutionContext?
// currently it is just used to call the `apiPhaseCompleted` and `dependencyPhaseCompleted` callbacks in Zinc
import scala.concurrent.ExecutionContext.Implicits.global
val async = AsyncTastyHolder.init
_asyncTasty = Some(async)
() => async.cancel()

/** Will be set to true if any of the compiled compilation units contains
* a pureFunctions language import.
*/
Expand Down Expand Up @@ -348,7 +368,14 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint
runCtx.setProperty(CyclicReference.Trace, new CyclicReference.Trace())
runCtx.withProgressCallback: cb =>
_progress = Progress(cb, this, fusedPhases.map(_.traversals).sum)
val cancelAsyncTasty: () => Unit =
if !myAsyncTastyWritten && Phases.picklerPhase.exists && !ctx.settings.YearlyTastyOutput.isDefault then
initializeAsyncTasty()
else () => {}

runPhases(allPhases = fusedPhases)(using runCtx)
cancelAsyncTasty()

ctx.reporter.finalizeReporting()
if (!ctx.reporter.hasErrors)
Rewrites.writeBack()
Expand All @@ -365,9 +392,12 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint
/** Is this run started via a compilingSuspended? */
def isCompilingSuspended: Boolean = myCompilingSuspended

/** Compile units `us` which were suspended in a previous run */
def compileSuspendedUnits(us: List[CompilationUnit]): Unit =
/** Compile units `us` which were suspended in a previous run,
* also signal if all necessary async tasty files were written in a previous run.
*/
def compileSuspendedUnits(us: List[CompilationUnit], asyncTastyWritten: Boolean): Unit =
myCompilingSuspended = true
myAsyncTastyWritten = asyncTastyWritten
for unit <- us do unit.suspended = false
compileUnits(us)

Expand Down
1 change: 1 addition & 0 deletions compiler/src/dotty/tools/dotc/core/Contexts.scala
Original file line number Diff line number Diff line change
Expand Up @@ -685,6 +685,7 @@ object Contexts {
updateStore(compilationUnitLoc, compilationUnit)
}


def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback)
def setIncCallback(callback: IncrementalCallback): this.type = updateStore(incCallbackLoc, callback)
def setProgressCallback(callback: ProgressCallback): this.type = updateStore(progressCallbackLoc, callback)
Expand Down
53 changes: 1 addition & 52 deletions compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
Original file line number Diff line number Diff line change
Expand Up @@ -70,19 +70,13 @@ class ExtractAPI extends Phase {

override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] =
val doZincCallback = ctx.runZincPhases
val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YearlyTastyOutput.value match
case earlyOut if earlyOut.isDirectory && earlyOut.exists =>
Some(Pickler.EarlyFileWriter(earlyOut))
case _ =>
None
val nonLocalClassSymbols = new mutable.HashSet[Symbol]
val units0 =
if doZincCallback then
val ctx0 = ctx.withProperty(NonLocalClassSymbolsInCurrentUnits, Some(nonLocalClassSymbols))
super.runOn(units)(using ctx0)
else
units // still run the phase for the side effects (writing TASTy files to -Yearly-tasty-output)
sigWriter.foreach(writeSigFiles(units0, _))
if doZincCallback then
ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _))
if ctx.settings.YjavaTasty.value then
Expand All @@ -91,57 +85,12 @@ class ExtractAPI extends Phase {
units0
end runOn

// Why we only write to early output in the first run?
// ===================================================
// TL;DR the point of pipeline compilation is to start downstream projects early,
// so we don't want to wait for suspended units to be compiled.
//
// But why is it safe to ignore suspended units?
// If this project contains a transparent macro that is called in the same project,
// the compilation unit of that call will be suspended (if the macro implementation
// is also in this project), causing a second run.
// However before we do that run, we will have already requested sbt to begin
// early downstream compilation. This means that the suspended definitions will not
// be visible in *early* downstream compilation.
//
// However, sbt will by default prevent downstream compilation happening in this scenario,
// due to the existence of macro definitions. So we are protected from failure if user tries
// to use the suspended definitions.
//
// Additionally, it is recommended for the user to move macro implementations to another project
// if they want to force early output. In this scenario the suspensions will no longer occur, so now
// they will become visible in the early-output.
//
// See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force`
// for examples of this in action.
//
// Therefore we only need to write to early output in the first run. We also provide the option
// to diagnose suspensions with the `-Yno-suspended-units` flag.
private def writeSigFiles(units: List[CompilationUnit], writer: Pickler.EarlyFileWriter)(using Context): Unit = {
try
for
unit <- units
(cls, pickled) <- unit.pickled
if cls.isDefinedInCurrentRun
do
val internalName =
if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn
else cls.binaryClassName
val _ = writer.writeTasty(internalName, pickled())
finally
writer.close()
if ctx.settings.verbose.value then
report.echo("[sig files written]")
end try
}

private def recordNonLocalClasses(nonLocalClassSymbols: mutable.HashSet[Symbol], cb: interfaces.IncrementalCallback)(using Context): Unit =
for cls <- nonLocalClassSymbols do
val sourceFile = cls.source
if sourceFile.exists && cls.isDefinedInCurrentRun then
recordNonLocalClass(cls, sourceFile, cb)
cb.apiPhaseCompleted()
cb.dependencyPhaseCompleted()
ctx.run.nn.asyncTasty.foreach(_.signalAPIComplete())

private def recordNonLocalClass(cls: Symbol, sourceFile: SourceFile, cb: interfaces.IncrementalCallback)(using Context): Unit =
def registerProductNames(fullClassName: String, binaryClassName: String) =
Expand Down
19 changes: 19 additions & 0 deletions compiler/src/dotty/tools/dotc/sbt/package.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,29 @@ import dotty.tools.dotc.core.NameOps.stripModuleClassSuffix
import dotty.tools.dotc.core.Names.Name
import dotty.tools.dotc.core.Names.termName

import interfaces.IncrementalCallback
import dotty.tools.io.FileWriters.BufferingReporter
import dotty.tools.dotc.core.Decorators.em

import scala.util.chaining.given
import scala.util.control.NonFatal

inline val TermNameHash = 1987 // 300th prime
inline val TypeNameHash = 1993 // 301st prime
inline val InlineParamHash = 1997 // 302nd prime

def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingReporter]): Option[BufferingReporter] =
val zincReporter = pending match
case Some(buffered) => buffered
case None => BufferingReporter()
try
cb.apiPhaseCompleted()
cb.dependencyPhaseCompleted()
catch
case NonFatal(t) =>
zincReporter.exception(em"signaling API and Dependencies phases completion", t)
if zincReporter.hasErrors then Some(zincReporter) else None

extension (sym: Symbol)

/** Mangle a JVM symbol name in a format better suited for internal uses by sbt.
Expand Down
Loading
Loading