diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala index ec251b4aa3f0..44498082c697 100644 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala @@ -20,7 +20,11 @@ import dotty.tools.io.JarArchive import scala.language.unsafeNulls - +/** !!! This file is now copied in `dotty.tools.io.FileWriters` in a more general way that does not rely upon + * `PostProcessorFrontendAccess`, this should probably be changed to wrap that class instead. + * + * Until then, any changes to this file should be copied to `dotty.tools.io.FileWriters` as well. + */ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { type NullableFile = AbstractFile | Null import frontendAccess.{compilerSettings, backendReporting} diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 8d467529d60e..d9f413a5d5ab 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -10,6 +10,10 @@ import Symbols.* import dotty.tools.io.* import scala.collection.mutable import scala.compiletime.uninitialized +import java.util.concurrent.TimeoutException + +import scala.concurrent.duration.Duration +import scala.concurrent.Await class GenBCode extends Phase { self => @@ -90,6 +94,15 @@ class GenBCode extends Phase { self => try val result = super.runOn(units) generatedClassHandler.complete() + try + for + async <- ctx.run.nn.asyncTasty + bufferedReporter <- async.sync() + do + bufferedReporter.relayReports(frontendAccess.backendReporting) + catch + case ex: Exception => + report.error(s"exception from future: $ex, (${Option(ex.getCause())})") result finally // frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index adced57d5801..193baaf7a55b 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -98,11 +98,15 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn depRecorder.clear() if !suspended then suspended = true - ctx.run.nn.suspendedUnits += this + val currRun = ctx.run.nn + currRun.suspendedUnits += this + val isInliningPhase = ctx.phase == Phases.inliningPhase if ctx.settings.XprintSuspension.value then - ctx.run.nn.suspendedHints += (this -> hint) - if ctx.phase == Phases.inliningPhase then + currRun.suspendedHints += (this -> (hint, isInliningPhase)) + if isInliningPhase then suspendedAtInliningPhase = true + else + currRun.suspendedAtTyperPhase = true throw CompilationUnit.SuspendException() private var myAssignmentSpans: Map[Int, List[Span]] | Null = null diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index dcc6cf8d71c0..f2f104d1c387 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -54,10 +54,10 @@ class Driver { if (ctx.settings.XprintSuspension.value) val suspendedHints = run.suspendedHints.toList report.echo(i"compiling suspended $suspendedUnits%, %") - for (unit, hint) <- suspendedHints do - report.echo(s" $unit: $hint") + for (unit, (hint, atInlining)) <- suspendedHints do + report.echo(s" $unit at ${if atInlining then "inlining" else "typer"}: $hint") val run1 = compiler.newRun - run1.compileSuspendedUnits(suspendedUnits) + run1.compileSuspendedUnits(suspendedUnits, !run.suspendedAtTyperPhase) finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh)) protected def initCtx: Context = (new ContextBase).initialCtx diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index ffc54e969b1f..02a0618bb6e9 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -37,6 +37,7 @@ import scala.io.Codec import Run.Progress import scala.compiletime.uninitialized import dotty.tools.dotc.transform.MegaPhase +import dotty.tools.dotc.transform.Pickler.AsyncTastyHolder /** A compiler run. Exports various methods to compile source files */ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo { @@ -130,7 +131,10 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint myUnits = us var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer() - var suspendedHints: mutable.Map[CompilationUnit, String] = mutable.HashMap() + var suspendedHints: mutable.Map[CompilationUnit, (String, Boolean)] = mutable.HashMap() + + /** Were any units suspended in the typer phase? if so then pipeline tasty can not complete. */ + var suspendedAtTyperPhase: Boolean = false def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit = if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then @@ -231,6 +235,22 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if !progress.isCancelled() then progress.tickSubphase() + /** if true, then we are done writing pipelined TASTy files (i.e. finished in a previous run.) */ + private var myAsyncTastyWritten = false + + private var _asyncTasty: Option[AsyncTastyHolder] = None + + /** populated when this run needs to write pipeline TASTy files. */ + def asyncTasty: Option[AsyncTastyHolder] = _asyncTasty + + private def initializeAsyncTasty()(using Context): () => Unit = + // should we provide a custom ExecutionContext? + // currently it is just used to call the `apiPhaseCompleted` and `dependencyPhaseCompleted` callbacks in Zinc + import scala.concurrent.ExecutionContext.Implicits.global + val async = AsyncTastyHolder.init + _asyncTasty = Some(async) + () => async.cancel() + /** Will be set to true if any of the compiled compilation units contains * a pureFunctions language import. */ @@ -348,7 +368,14 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint runCtx.setProperty(CyclicReference.Trace, new CyclicReference.Trace()) runCtx.withProgressCallback: cb => _progress = Progress(cb, this, fusedPhases.map(_.traversals).sum) + val cancelAsyncTasty: () => Unit = + if !myAsyncTastyWritten && Phases.picklerPhase.exists && !ctx.settings.YearlyTastyOutput.isDefault then + initializeAsyncTasty() + else () => {} + runPhases(allPhases = fusedPhases)(using runCtx) + cancelAsyncTasty() + ctx.reporter.finalizeReporting() if (!ctx.reporter.hasErrors) Rewrites.writeBack() @@ -365,9 +392,12 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint /** Is this run started via a compilingSuspended? */ def isCompilingSuspended: Boolean = myCompilingSuspended - /** Compile units `us` which were suspended in a previous run */ - def compileSuspendedUnits(us: List[CompilationUnit]): Unit = + /** Compile units `us` which were suspended in a previous run, + * also signal if all necessary async tasty files were written in a previous run. + */ + def compileSuspendedUnits(us: List[CompilationUnit], asyncTastyWritten: Boolean): Unit = myCompilingSuspended = true + myAsyncTastyWritten = asyncTastyWritten for unit <- us do unit.suspended = false compileUnits(us) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 47006bdbe561..ab6fda68a09e 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -685,6 +685,7 @@ object Contexts { updateStore(compilationUnitLoc, compilationUnit) } + def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) def setIncCallback(callback: IncrementalCallback): this.type = updateStore(incCallbackLoc, callback) def setProgressCallback(callback: ProgressCallback): this.type = updateStore(progressCallbackLoc, callback) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 138cda099040..7da119fedf52 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -70,11 +70,6 @@ class ExtractAPI extends Phase { override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = val doZincCallback = ctx.runZincPhases - val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YearlyTastyOutput.value match - case earlyOut if earlyOut.isDirectory && earlyOut.exists => - Some(Pickler.EarlyFileWriter(earlyOut)) - case _ => - None val nonLocalClassSymbols = new mutable.HashSet[Symbol] val units0 = if doZincCallback then @@ -82,7 +77,6 @@ class ExtractAPI extends Phase { super.runOn(units)(using ctx0) else units // still run the phase for the side effects (writing TASTy files to -Yearly-tasty-output) - sigWriter.foreach(writeSigFiles(units0, _)) if doZincCallback then ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _)) if ctx.settings.YjavaTasty.value then @@ -91,57 +85,12 @@ class ExtractAPI extends Phase { units0 end runOn - // Why we only write to early output in the first run? - // =================================================== - // TL;DR the point of pipeline compilation is to start downstream projects early, - // so we don't want to wait for suspended units to be compiled. - // - // But why is it safe to ignore suspended units? - // If this project contains a transparent macro that is called in the same project, - // the compilation unit of that call will be suspended (if the macro implementation - // is also in this project), causing a second run. - // However before we do that run, we will have already requested sbt to begin - // early downstream compilation. This means that the suspended definitions will not - // be visible in *early* downstream compilation. - // - // However, sbt will by default prevent downstream compilation happening in this scenario, - // due to the existence of macro definitions. So we are protected from failure if user tries - // to use the suspended definitions. - // - // Additionally, it is recommended for the user to move macro implementations to another project - // if they want to force early output. In this scenario the suspensions will no longer occur, so now - // they will become visible in the early-output. - // - // See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force` - // for examples of this in action. - // - // Therefore we only need to write to early output in the first run. We also provide the option - // to diagnose suspensions with the `-Yno-suspended-units` flag. - private def writeSigFiles(units: List[CompilationUnit], writer: Pickler.EarlyFileWriter)(using Context): Unit = { - try - for - unit <- units - (cls, pickled) <- unit.pickled - if cls.isDefinedInCurrentRun - do - val internalName = - if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn - else cls.binaryClassName - val _ = writer.writeTasty(internalName, pickled()) - finally - writer.close() - if ctx.settings.verbose.value then - report.echo("[sig files written]") - end try - } - private def recordNonLocalClasses(nonLocalClassSymbols: mutable.HashSet[Symbol], cb: interfaces.IncrementalCallback)(using Context): Unit = for cls <- nonLocalClassSymbols do val sourceFile = cls.source if sourceFile.exists && cls.isDefinedInCurrentRun then recordNonLocalClass(cls, sourceFile, cb) - cb.apiPhaseCompleted() - cb.dependencyPhaseCompleted() + ctx.run.nn.asyncTasty.foreach(_.signalAPIComplete()) private def recordNonLocalClass(cls: Symbol, sourceFile: SourceFile, cb: interfaces.IncrementalCallback)(using Context): Unit = def registerProductNames(fullClassName: String, binaryClassName: String) = diff --git a/compiler/src/dotty/tools/dotc/sbt/package.scala b/compiler/src/dotty/tools/dotc/sbt/package.scala index dc0df381f08f..1c6b38b07a84 100644 --- a/compiler/src/dotty/tools/dotc/sbt/package.scala +++ b/compiler/src/dotty/tools/dotc/sbt/package.scala @@ -6,10 +6,29 @@ import dotty.tools.dotc.core.NameOps.stripModuleClassSuffix import dotty.tools.dotc.core.Names.Name import dotty.tools.dotc.core.Names.termName +import interfaces.IncrementalCallback +import dotty.tools.io.FileWriters.BufferingReporter +import dotty.tools.dotc.core.Decorators.em + +import scala.util.chaining.given +import scala.util.control.NonFatal + inline val TermNameHash = 1987 // 300th prime inline val TypeNameHash = 1993 // 301st prime inline val InlineParamHash = 1997 // 302nd prime +def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingReporter]): BufferingReporter = + val zincReporter = pending match + case Some(buffered) => buffered + case None => BufferingReporter() + try + cb.apiPhaseCompleted() + cb.dependencyPhaseCompleted() + catch + case NonFatal(t) => + zincReporter.exception(em"signaling API and Dependencies phases completion", t) + zincReporter + extension (sym: Symbol) /** Mangle a JVM symbol name in a format better suited for internal uses by sbt. diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 3a4212547d16..6fe687072828 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -9,7 +9,7 @@ import tasty.* import config.Printers.{noPrinter, pickling} import config.Feature import java.io.PrintStream -import io.FileWriters.TastyWriter +import io.FileWriters.{TastyWriter, ReadOnlyContext} import StdNames.{str, nme} import Periods.* import Phases.* @@ -22,6 +22,16 @@ import compiletime.uninitialized import dotty.tools.io.{JarArchive, AbstractFile} import dotty.tools.dotc.printing.OutlinePrinter import scala.annotation.constructorOnly +import scala.concurrent.Promise +import dotty.tools.dotc.transform.Pickler.writeSigFilesAsync + +import scala.util.chaining.given +import dotty.tools.io.FileWriters.{EagerReporter, BufferingReporter} +import dotty.tools.dotc.sbt.interfaces.IncrementalCallback +import dotty.tools.dotc.sbt.asyncZincPhasesCompleted +import scala.concurrent.ExecutionContext +import scala.util.control.NonFatal +import java.util.concurrent.atomic.AtomicBoolean object Pickler { val name: String = "pickler" @@ -33,30 +43,155 @@ object Pickler { */ inline val ParallelPickling = true - class EarlyFileWriter private (writer: TastyWriter, origin: AbstractFile): - def this(dest: AbstractFile)(using @constructorOnly ctx: Context) = this(TastyWriter(dest), dest) + /**A holder for syncronization points and reports when writing TASTy asynchronously. + * The callbacks should only be called once. + */ + class AsyncTastyHolder private ( + val earlyOut: AbstractFile, incCallback: IncrementalCallback | Null)(using @constructorOnly ex: ExecutionContext): + import scala.concurrent.Future as StdFuture + import scala.concurrent.Await + import scala.concurrent.duration.Duration + import AsyncTastyHolder.Signal + + private val _cancelled = AtomicBoolean(false) + + /**Cancel any outstanding work. + * This should be done at the end of a run, e.g. background work may be running even though + * errors in main thread will prevent reaching the backend. */ + def cancel(): Unit = + if _cancelled.compareAndSet(false, true) then + asyncTastyWritten.trySuccess(None) // cancel the wait for TASTy writing + if incCallback != null then + asyncAPIComplete.trySuccess(Signal.Cancelled) // cancel the wait for API completion + else + () // nothing else to do + + /** check if the work has been cancelled. */ + def cancelled: Boolean = _cancelled.get() + + private val asyncTastyWritten = Promise[Option[AsyncTastyHolder.State]]() + private val asyncAPIComplete = + if incCallback == null then Promise.successful(Signal.Done) // no need to wait for API completion + else Promise[Signal]() + + private val backendFuture: StdFuture[Option[BufferingReporter]] = + val asyncState = asyncTastyWritten.future + .zipWith(asyncAPIComplete.future)((state, api) => state.filterNot(_ => api == Signal.Cancelled)) + asyncState.map: optState => + optState.flatMap: state => + if incCallback != null && state.done && !state.hasErrors then + asyncZincPhasesCompleted(incCallback, state.pending).toBuffered + else state.pending + + /** awaits the state of async TASTy operations indefinitely, returns optionally any buffered reports. */ + def sync(): Option[BufferingReporter] = + Await.result(backendFuture, Duration.Inf) + + def signalAPIComplete(): Unit = + if incCallback != null then + asyncAPIComplete.trySuccess(Signal.Done) + + /** should only be called once */ + def signalAsyncTastyWritten()(using ctx: ReadOnlyContext): Unit = + val done = !ctx.run.suspendedAtTyperPhase + if done then + try + // when we are done, i.e. no suspended units, + // we should close the file system so it can be read in the same JVM process. + // Note: we close even if we have been cancelled. + earlyOut match + case jar: JarArchive => jar.close() + case _ => + catch + case NonFatal(t) => + ctx.reporter.error(em"Error closing early output: ${t}") + + asyncTastyWritten.trySuccess: + Some( + AsyncTastyHolder.State( + hasErrors = ctx.reporter.hasErrors, + done = done, + pending = ctx.reporter.toBuffered + ) + ) + end signalAsyncTastyWritten + end AsyncTastyHolder + + object AsyncTastyHolder: + /** The state after writing async tasty. Any errors should have been reported, or pending. + * if suspendedUnits is true, then we can't signal Zinc yet. + */ + private class State(val hasErrors: Boolean, val done: Boolean, val pending: Option[BufferingReporter]) + private enum Signal: + case Done, Cancelled + + /**Create a holder for Asynchronous state of early-TASTy operations. + * the `ExecutionContext` parameter is used to call into Zinc to signal + * that API and Dependency phases are complete. + */ + def init(using Context, ExecutionContext): AsyncTastyHolder = + AsyncTastyHolder(ctx.settings.YearlyTastyOutput.value, ctx.incCallback) + + + /** Asynchronously writes TASTy files to the destination -Yearly-tasty-output. + * If no units have been suspended, then we are "done", which enables Zinc to be signalled. + * + * If there are suspended units, (due to calling a macro defined in the same run), then the API is incomplete, + * so it would be a mistake to signal Zinc. This is a sensible default, because Zinc by default will ignore the + * signal if there are macros in the API. + * - See `sbt-test/pipelining/pipelining-scala-macro` for an example. + * + * TODO: The user can override this default behaviour in Zinc to always listen to the signal, + * (e.g. if they define the macro implementation in an upstream, non-pipelined project). + * - See `sbt-test/pipelining/pipelining-scala-macro-force` where we force Zinc to listen to the signal. + * If the user wants force early output to be written, then they probably also want to benefit from pipelining, + * which then makes suspension problematic as it increases compilation times. + * Proposal: perhaps we should provide a flag `-Ystrict-pipelining` (as an alternative to `-Yno-suspended-units`), + * which fails in the condition of definition of a macro where its implementation is in the same project. + * (regardless of if it is used); this is also more strict than preventing suspension at typer. + * The user is then certain that they are always benefitting as much as possible from pipelining. + */ + def writeSigFilesAsync( + tasks: List[(String, Array[Byte])], + writer: EarlyFileWriter, + async: AsyncTastyHolder)(using ctx: ReadOnlyContext): Unit = { + try + try + for (internalName, pickled) <- tasks do + if !async.cancelled then + val _ = writer.writeTasty(internalName, pickled) + catch + case NonFatal(t) => ctx.reporter.exception(em"writing TASTy to early output", t) + finally + writer.close() + catch + case NonFatal(t) => ctx.reporter.exception(em"closing early output writer", t) + finally + async.signalAsyncTastyWritten() + } - export writer.writeTasty + class EarlyFileWriter private (writer: TastyWriter): + def this(dest: AbstractFile)(using @constructorOnly ctx: ReadOnlyContext) = this(TastyWriter(dest)) - def close(): Unit = - writer.close() - origin match { - case jar: JarArchive => jar.close() // also close the file system - case _ => - } + export writer.{writeTasty, close} } /** This phase pickles trees */ class Pickler extends Phase { import ast.tpd.* + private def doAsyncTasty(using Context): Boolean = ctx.run.nn.asyncTasty.isDefined + + private var fastDoAsyncTasty: Boolean = false + override def phaseName: String = Pickler.name override def description: String = Pickler.description - // No need to repickle trees coming from TASTY + // No need to repickle trees coming from TASTY, however in the case that we need to write TASTy to early-output, + // then we need to run this phase to send the tasty from compilation units to the early-output. override def isRunnable(using Context): Boolean = - super.isRunnable && !ctx.settings.fromTasty.value + super.isRunnable && (!ctx.settings.fromTasty.value || doAsyncTasty) // when `-Yjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false @@ -86,11 +221,20 @@ class Pickler extends Phase { */ object serialized: val scratch = new ScratchData + private val buf = mutable.ListBuffer.empty[(String, Array[Byte])] def run(body: ScratchData => Array[Byte]): Array[Byte] = synchronized { scratch.reset() body(scratch) } + def commit(internalName: String, tasty: Array[Byte]): Unit = synchronized { + buf += ((internalName, tasty)) + } + def result(): List[(String, Array[Byte])] = synchronized { + val res = buf.toList + buf.clear() + res + } private val executor = Executor[Array[Byte]]() @@ -100,10 +244,29 @@ class Pickler extends Phase { if isOutline then ctx.fresh.setPrinterFn(OutlinePrinter(_)) else ctx + /** only ran under -Ypickle-write and -from-tasty */ + private def runFromTasty(unit: CompilationUnit)(using Context): Unit = { + val pickled = unit.pickled + for (cls, bytes) <- pickled do + serialized.commit(computeInternalName(cls), bytes()) + } + + private def computeInternalName(cls: ClassSymbol)(using Context): String = + if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn + else cls.binaryClassName + override def run(using Context): Unit = { val unit = ctx.compilationUnit pickling.println(i"unpickling in run ${ctx.runId}") + if ctx.settings.fromTasty.value then + // skip the rest of the phase, as tasty is already "pickled", + // however we still need to set up tasks to write TASTy to + // early output when pipelining is enabled. + if fastDoAsyncTasty then + runFromTasty(unit) + return () + for cls <- dropCompanionModuleClasses(topLevelClasses(unit.tpdTree)) tree <- sliceTopLevel(unit.tpdTree, cls) @@ -137,6 +300,8 @@ class Pickler extends Phase { val positionWarnings = new mutable.ListBuffer[Message]() def reportPositionWarnings() = positionWarnings.foreach(report.warning(_)) + val internalName = if fastDoAsyncTasty then computeInternalName(cls) else "" + def computePickled(): Array[Byte] = inContext(ctx.fresh) { serialized.run { scratch => treePkl.compactify(scratch) @@ -166,6 +331,10 @@ class Pickler extends Phase { println(i"**** pickled info of $cls") println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) println(i"**** end of pickled info of $cls") + + if fastDoAsyncTasty then + serialized.commit(internalName, pickled) + pickled } } @@ -194,13 +363,29 @@ class Pickler extends Phase { } override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { + val useExecutor = this.useExecutor + + val writeTask: Option[() => Unit] = + ctx.run.nn.asyncTasty.map: async => + fastDoAsyncTasty = true + () => + given ReadOnlyContext = if useExecutor then ReadOnlyContext.buffered else ReadOnlyContext.eager + val writer = Pickler.EarlyFileWriter(async.earlyOut) + writeSigFilesAsync(serialized.result(), writer, async) + + def runPhase(writeCB: (doWrite: () => Unit) => Unit) = + super.runOn(units).tap(_ => writeTask.foreach(writeCB)) + val result = if useExecutor then executor.start() - try super.runOn(units) + try + runPhase: doWrite => + // unless we redesign executor to have "Unit" schedule overload, we need some sentinel value. + executor.schedule(() => { doWrite(); Array.emptyByteArray }) finally executor.close() else - super.runOn(units) + runPhase(_()) if ctx.settings.YtestPickler.value then val ctx2 = ctx.fresh .setSetting(ctx.settings.YreadComments, true) diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index 4f03194fa4ce..87825b025734 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -1,8 +1,7 @@ package dotty.tools.io -import dotty.tools.dotc.core.Contexts.* -import dotty.tools.dotc.core.Decorators.em -import dotty.tools.dotc.report +import scala.language.unsafeNulls + import dotty.tools.io.AbstractFile import dotty.tools.io.JarArchive import dotty.tools.io.PlainFile @@ -25,13 +24,148 @@ import java.util.zip.CRC32 import java.util.zip.Deflater import java.util.zip.ZipEntry import java.util.zip.ZipOutputStream -import scala.language.unsafeNulls +import scala.collection.mutable + +import dotty.tools.dotc.core.Contexts, Contexts.Context +import dotty.tools.dotc.core.Decorators.em + +import dotty.tools.dotc.util.{SourcePosition, NoSourcePosition} + +import dotty.tools.dotc.reporting.Message +import dotty.tools.dotc.report -/** Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed */ +import dotty.tools.backend.jvm.PostProcessorFrontendAccess.BackendReporting +import scala.annotation.constructorOnly +import java.util.concurrent.atomic.AtomicReference +import java.util.concurrent.atomic.AtomicBoolean +import java.util.ConcurrentModificationException + +/** !!!Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed. + * this should probably be changed to wrap that class instead. + * + * Until then, any changes to this file should be copied to `dotty.tools.backend.jvm.ClassfileWriters` as well. + */ object FileWriters { type InternalName = String type NullableFile = AbstractFile | Null + inline def ctx(using ReadOnlyContext): ReadOnlyContext = summon[ReadOnlyContext] + + sealed trait DelayedReporter { + def hasErrors: Boolean + def error(message: Context ?=> Message, position: SourcePosition): Unit + def warning(message: Context ?=> Message, position: SourcePosition): Unit + def log(message: String): Unit + + final def toBuffered: Option[BufferingReporter] = this match + case buffered: BufferingReporter => + if buffered.hasReports then Some(buffered) else None + case _: EagerReporter => None + + def error(message: Context ?=> Message): Unit = error(message, NoSourcePosition) + def warning(message: Context ?=> Message): Unit = warning(message, NoSourcePosition) + final def exception(reason: Context ?=> Message, throwable: Throwable): Unit = + error({ + val trace = throwable.getStackTrace().nn.mkString("\n ") + em"An unhandled exception was thrown in the compiler while\n ${reason.message}.\n${throwable}\n $trace" + }, NoSourcePosition) + } + + final class EagerReporter(using captured: Context) extends DelayedReporter: + private var _hasErrors = false + + def hasErrors: Boolean = _hasErrors + + def error(message: Context ?=> Message, position: SourcePosition): Unit = + report.error(message, position) + _hasErrors = true + + def warning(message: Context ?=> Message, position: SourcePosition): Unit = + report.warning(message, position) + + def log(message: String): Unit = report.echo(message) + + final class BufferingReporter extends DelayedReporter { + // We optimise access to the buffered reports for the common case - that there are no warning/errors to report + // We could use a listBuffer etc - but that would be extra allocation in the common case + // buffered logs are updated atomically. + + private val _bufferedReports = AtomicReference(List.empty[Report]) + private val _hasErrors = AtomicBoolean(false) + + enum Report(val relay: Context ?=> BackendReporting => Unit): + case Error(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.error(message(ctx), position)) + case Warning(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.warning(message(ctx), position)) + case Log(message: String) extends Report(_.log(message)) + + /** Atomically record that an error occurred */ + private def recordError(): Unit = + _hasErrors.set(true) + + /** Atomically add a report to the log */ + private def recordReport(report: Report): Unit = + _bufferedReports.getAndUpdate(report :: _) + + /** atomically extract and clear the buffered reports, must only be called at a synchonization point. */ + private def resetReports(): List[Report] = + val curr = _bufferedReports.get() + if curr.nonEmpty && !_bufferedReports.compareAndSet(curr, Nil) then + throw ConcurrentModificationException("concurrent modification of buffered reports") + else curr + + def hasErrors: Boolean = _hasErrors.get() + def hasReports: Boolean = _bufferedReports.get().nonEmpty + + def error(message: Context ?=> Message, position: SourcePosition): Unit = + recordReport(Report.Error({case given Context => message}, position)) + recordError() + + def warning(message: Context ?=> Message, position: SourcePosition): Unit = + recordReport(Report.Warning({case given Context => message}, position)) + + def log(message: String): Unit = + recordReport(Report.Log(message)) + + /** Should only be called from main compiler thread. */ + def relayReports(toReporting: BackendReporting)(using Context): Unit = + val reports = resetReports() + if reports.nonEmpty then + reports.reverse.foreach(_.relay(toReporting)) + } + + trait ReadOnlySettings: + def jarCompressionLevel: Int + def debug: Boolean + + trait ReadOnlyRun: + def suspendedAtTyperPhase: Boolean + + trait ReadOnlyContext: + val run: ReadOnlyRun + val settings: ReadOnlySettings + val reporter: DelayedReporter + + trait BufferedReadOnlyContext extends ReadOnlyContext: + val reporter: BufferingReporter + + object ReadOnlyContext: + def readSettings(using ctx: Context): ReadOnlySettings = new: + val jarCompressionLevel = ctx.settings.YjarCompressionLevel.value + val debug = ctx.settings.Ydebug.value + + def readRun(using ctx: Context): ReadOnlyRun = new: + val suspendedAtTyperPhase = ctx.run.suspendedAtTyperPhase + + def buffered(using Context): BufferedReadOnlyContext = new: + val settings = readSettings + val reporter = BufferingReporter() + val run = readRun + + def eager(using Context): ReadOnlyContext = new: + val settings = readSettings + val reporter = EagerReporter() + val run = readRun + /** * The interface to writing classfiles. GeneratedClassHandler calls these methods to generate the * directory and files that are created, and eventually calls `close` when the writing is complete. @@ -47,7 +181,7 @@ object FileWriters { * * @param name the internal name of the class, e.g. "scala.Option" */ - def writeTasty(name: InternalName, bytes: Array[Byte])(using Context): NullableFile + def writeTasty(name: InternalName, bytes: Array[Byte])(using ReadOnlyContext): NullableFile /** * Close the writer. Behavior is undefined after a call to `close`. @@ -60,7 +194,7 @@ object FileWriters { object TastyWriter { - def apply(output: AbstractFile)(using Context): TastyWriter = { + def apply(output: AbstractFile)(using ReadOnlyContext): TastyWriter = { // In Scala 2 depenening on cardinality of distinct output dirs MultiClassWriter could have been used // In Dotty we always use single output directory @@ -73,7 +207,7 @@ object FileWriters { private final class SingleTastyWriter(underlying: FileWriter) extends TastyWriter { - override def writeTasty(className: InternalName, bytes: Array[Byte])(using Context): NullableFile = { + override def writeTasty(className: InternalName, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = { underlying.writeFile(classToRelativePath(className), bytes) } @@ -83,14 +217,14 @@ object FileWriters { } sealed trait FileWriter { - def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile + def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile def close(): Unit } object FileWriter { - def apply(file: AbstractFile, jarManifestMainClass: Option[String])(using Context): FileWriter = + def apply(file: AbstractFile, jarManifestMainClass: Option[String])(using ReadOnlyContext): FileWriter = if (file.isInstanceOf[JarArchive]) { - val jarCompressionLevel = ctx.settings.YjarCompressionLevel.value + val jarCompressionLevel = ctx.settings.jarCompressionLevel // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where // created using `AbstractFile.bufferedOutputStream`instead of JarWritter val jarFile = file.underlyingSource.getOrElse{ @@ -127,7 +261,7 @@ object FileWriters { lazy val crc = new CRC32 - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile = this.synchronized { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = this.synchronized { val entry = new ZipEntry(relativePath) if (storeOnly) { // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ @@ -155,14 +289,14 @@ object FileWriters { val noAttributes = Array.empty[FileAttribute[?]] private val isWindows = scala.util.Properties.isWin - private def checkName(component: Path)(using Context): Unit = if (isWindows) { + private def checkName(component: Path)(using ReadOnlyContext): Unit = if (isWindows) { val specials = raw"(?i)CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]".r val name = component.toString - def warnSpecial(): Unit = report.warning(em"path component is special Windows device: ${name}") + def warnSpecial(): Unit = ctx.reporter.warning(em"path component is special Windows device: ${name}") specials.findPrefixOf(name).foreach(prefix => if (prefix.length == name.length || name(prefix.length) == '.') warnSpecial()) } - def ensureDirForPath(baseDir: Path, filePath: Path)(using Context): Unit = { + def ensureDirForPath(baseDir: Path, filePath: Path)(using ReadOnlyContext): Unit = { import java.lang.Boolean.TRUE val parent = filePath.getParent if (!builtPaths.containsKey(parent)) { @@ -192,7 +326,7 @@ object FileWriters { private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = { val path = base.resolve(relativePath) try { ensureDirForPath(base, path) @@ -213,10 +347,10 @@ object FileWriters { os.close() } catch { case e: FileConflictException => - report.error(em"error writing ${path.toString}: ${e.getMessage}") + ctx.reporter.error(em"error writing ${path.toString}: ${e.getMessage}") case e: java.nio.file.FileSystemException => - if (ctx.settings.Ydebug.value) e.printStackTrace() - report.error(em"error writing ${path.toString}: ${e.getClass.getName} ${e.getMessage}") + if (ctx.settings.debug) e.printStackTrace() + ctx.reporter.error(em"error writing ${path.toString}: ${e.getClass.getName} ${e.getMessage}") } AbstractFile.getFile(path) } @@ -241,7 +375,7 @@ object FileWriters { finally out.close() } - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context):NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext):NullableFile = { val outFile = getFile(base, relativePath) writeBytes(outFile, bytes) outFile diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index e95dbe97bb19..728f89966af0 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -11,7 +11,7 @@ import scala.jdk.CollectionConverters.* * that be can used as the compiler's output directory. */ class JarArchive private (root: Directory) extends PlainDirectory(root) { - def close(): Unit = jpath.getFileSystem().close() + def close(): Unit = this.synchronized(jpath.getFileSystem().close()) override def exists: Boolean = jpath.getFileSystem().isOpen() && super.exists def allFileNames(): Iterator[String] = java.nio.file.Files.walk(jpath).iterator().asScala.map(_.toString) diff --git a/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..35b27f3d4662 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala @@ -0,0 +1,7 @@ +package a + +import scala.util.Success + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala b/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala new file mode 100644 index 000000000000..629f1c0e6cfe --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala @@ -0,0 +1,10 @@ +package a + +import org.junit.Test + +class Hello { + + @Test def test(): Unit = { + assert(A.foo == (1,2,3)) + } +} diff --git a/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala b/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala new file mode 100644 index 000000000000..bbb4eb5ba7f7 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala @@ -0,0 +1,9 @@ +package b + +import a.A + +object Hello { + @main def test(): Unit = { + assert(A.foo == (1,2,3)) + } +} diff --git a/sbt-test/pipelining/pipelining-cancel/build.sbt b/sbt-test/pipelining/pipelining-cancel/build.sbt new file mode 100644 index 000000000000..f23e65895c78 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/build.sbt @@ -0,0 +1,12 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ystop-after:pickler", // before ExtractAPI is reached, will cancel the pipeline output + ) + +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-cancel/test b/sbt-test/pipelining/pipelining-cancel/test new file mode 100644 index 000000000000..d84f55ca3c31 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/test @@ -0,0 +1,6 @@ +# - Test depending on a project where upstream runs short of reaching backend, +# and cancels pipelined tasty writing. +# - Because `a` finishes compile run before the sending the signal to Zinc +# that pipeline jar is written, sbt will continue to the downstream project anyway. +# - Downstream project `b` will fail as it can't find a.A from upstream. +-> b/compile diff --git a/sbt-test/pipelining/pipelining-scala-macro/build.sbt b/sbt-test/pipelining/pipelining-scala-macro/build.sbt index f8576cdae796..5f703bb0d815 100644 --- a/sbt-test/pipelining/pipelining-scala-macro/build.sbt +++ b/sbt-test/pipelining/pipelining-scala-macro/build.sbt @@ -6,14 +6,14 @@ ThisBuild / usePipelining := true // force a failure by always forcing early output. lazy val a = project.in(file("a")) .settings( - scalacOptions += "-Ycheck:all", + // scalacOptions += "-Ycheck:all", scalacOptions += "-Xprint-suspension", Compile / incOptions := { val old = (Compile / incOptions).value val hooks = old.externalHooks val newHooks = hooks.withExternalLookup( new sbt.internal.inc.NoopExternalLookup { - @volatile var knownSuspension = false + @volatile var earlyOutputChecks = 0 def didFindMacros(analysis: xsbti.compile.CompileAnalysis) = { val foundMacros = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal.values.exists(_.hasMacro) @@ -23,20 +23,26 @@ lazy val a = project.in(file("a")) // force early output, this is safe because the macro class from `macros` will be available. override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + earlyOutputChecks += 1 + assert(earlyOutputChecks <= 2, "should only be called twice (apiPhaseCompleted, dependencyPhaseCompleted).") val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal val a_A = internalClasses.get("a.A") val a_ASuspendTyper = internalClasses.get("a.ASuspendTyper") val a_ASuspendInlining = internalClasses.get("a.ASuspendInlining") + + // both `a.A` and `a.ASuspendInlining` should be found in the analysis. + // even though `a.ASuspendInlining` suspends, it happens at inlining, so we should still + // record API for it in the first run. assert(a_A.isDefined, s"`a.A` wasn't found.") + assert(a_ASuspendInlining.isDefined, s"`a.ASuspendInlining` wasn't found.") - if (!knownSuspension) { - // this callback is called multiple times, so we only want to assert the first time, - // in subsequent runs the suspended definition will be "resumed", so a.ASuspendTyper be found. - knownSuspension = true - assert(a_ASuspendTyper.isEmpty, s"`a.ASuspendTyper` should have been suspended initially.") - } + // in run 1, `a.ASuspendTyper` would have suspended at typer, and not be present in Analysis. + // Therefore we wouldn't close the early output jar. + // Therefore, because it is present here, we waited to the second run to close the early output jar, + // at which point we recorded API for `a.ASuspendTyper`, and because we closed the early output jar, + // we send the signal to Zinc that the early output was written. + assert(a_ASuspendTyper.isDefined, s"`a.ASuspendTyper` wasn't found.") - assert(a_ASuspendInlining.isDefined, s"`a.ASuspendInlining` wasn't found.") // do what sbt does typically, // it will not force early output because macros are found