From 0caf5035ddd5b81969193bad10cef62eaa5db264 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 28 Nov 2023 12:05:34 +0100 Subject: [PATCH 1/3] write pipelined tasty in parallel. --- .../dotty/tools/backend/jvm/GenBCode.scala | 18 +++ compiler/src/dotty/tools/dotc/Driver.scala | 8 ++ .../src/dotty/tools/dotc/core/Contexts.scala | 11 +- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 60 ++------ .../dotty/tools/dotc/transform/Pickler.scala | 121 +++++++++++++++- compiler/src/dotty/tools/io/FileWriters.scala | 131 +++++++++++++++--- compiler/src/dotty/tools/io/JarArchive.scala | 2 +- 7 files changed, 272 insertions(+), 79 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 8d467529d60e..ade7c4705c52 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -10,6 +10,10 @@ import Symbols.* import dotty.tools.io.* import scala.collection.mutable import scala.compiletime.uninitialized +import java.util.concurrent.TimeoutException + +import scala.concurrent.duration.given +import scala.concurrent.Await class GenBCode extends Phase { self => @@ -90,6 +94,20 @@ class GenBCode extends Phase { self => try val result = super.runOn(units) generatedClassHandler.complete() + for holder <- ctx.asyncTastyPromise do + try + val asyncState = Await.result(holder.promise.future, 5.seconds) + for reporter <- asyncState.pending do + reporter.relayReports(frontendAccess.backendReporting) + catch + case _: TimeoutException => + report.error( + """Timeout (5s) in backend while waiting for async writing of TASTy files to -Yearly-tasty-output, + | this may be a bug in the compiler. + | + |Alternatively consider turning off pipelining for this project.""".stripMargin + ) + end for result finally // frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index dcc6cf8d71c0..1e3d002d4391 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -66,6 +66,13 @@ class Driver { protected def command: CompilerCommand = ScalacCommand + private def setupAsyncTasty(ictx: FreshContext): Unit = inContext(ictx): + ictx.settings.YearlyTastyOutput.value match + case earlyOut if earlyOut.isDirectory && earlyOut.exists => + ictx.setInitialAsyncTasty() + case _ => + () // do nothing + /** Setup context with initialized settings from CLI arguments, then check if there are any settings that * would change the default behaviour of the compiler. * @@ -82,6 +89,7 @@ class Driver { Positioned.init(using ictx) inContext(ictx) { + setupAsyncTasty(ictx) if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then ictx.setProperty(ContextDoc, new ContextDocstrings) val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 47006bdbe561..add8859948a6 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -31,9 +31,11 @@ import StdNames.nme import compiletime.uninitialized import scala.annotation.internal.sharable +import scala.concurrent.Promise import DenotTransformers.DenotTransformer import dotty.tools.dotc.profile.Profiler +import dotty.tools.dotc.transform.Pickler.AsyncTastyHolder import dotty.tools.dotc.sbt.interfaces.{IncrementalCallback, ProgressCallback} import util.Property.Key import util.Store @@ -54,8 +56,9 @@ object Contexts { private val (importInfoLoc, store9) = store8.newLocation[ImportInfo | Null]() private val (typeAssignerLoc, store10) = store9.newLocation[TypeAssigner](TypeAssigner) private val (progressCallbackLoc, store11) = store10.newLocation[ProgressCallback | Null]() + private val (tastyPromiseLoc, store12) = store11.newLocation[Option[AsyncTastyHolder]](None) - private val initialStore = store11 + private val initialStore = store12 /** The current context */ inline def ctx(using ctx: Context): Context = ctx @@ -197,6 +200,8 @@ object Contexts { /** The current settings values */ def settingsState: SettingsState = store(settingsStateLoc) + def asyncTastyPromise: Option[AsyncTastyHolder] = store(tastyPromiseLoc) + /** The current compilation unit */ def compilationUnit: CompilationUnit = store(compilationUnitLoc) @@ -685,6 +690,10 @@ object Contexts { updateStore(compilationUnitLoc, compilationUnit) } + def setInitialAsyncTasty(): this.type = + assert(store(tastyPromiseLoc) == None, "trying to set async tasty promise twice!") + updateStore(tastyPromiseLoc, Some(AsyncTastyHolder(settings.YearlyTastyOutput.value, Promise()))) + def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) def setIncCallback(callback: IncrementalCallback): this.type = updateStore(incCallbackLoc, callback) def setProgressCallback(callback: ProgressCallback): this.type = updateStore(progressCallbackLoc, callback) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 138cda099040..962529e9786a 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -70,11 +70,6 @@ class ExtractAPI extends Phase { override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = val doZincCallback = ctx.runZincPhases - val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YearlyTastyOutput.value match - case earlyOut if earlyOut.isDirectory && earlyOut.exists => - Some(Pickler.EarlyFileWriter(earlyOut)) - case _ => - None val nonLocalClassSymbols = new mutable.HashSet[Symbol] val units0 = if doZincCallback then @@ -82,7 +77,6 @@ class ExtractAPI extends Phase { super.runOn(units)(using ctx0) else units // still run the phase for the side effects (writing TASTy files to -Yearly-tasty-output) - sigWriter.foreach(writeSigFiles(units0, _)) if doZincCallback then ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _)) if ctx.settings.YjavaTasty.value then @@ -91,57 +85,19 @@ class ExtractAPI extends Phase { units0 end runOn - // Why we only write to early output in the first run? - // =================================================== - // TL;DR the point of pipeline compilation is to start downstream projects early, - // so we don't want to wait for suspended units to be compiled. - // - // But why is it safe to ignore suspended units? - // If this project contains a transparent macro that is called in the same project, - // the compilation unit of that call will be suspended (if the macro implementation - // is also in this project), causing a second run. - // However before we do that run, we will have already requested sbt to begin - // early downstream compilation. This means that the suspended definitions will not - // be visible in *early* downstream compilation. - // - // However, sbt will by default prevent downstream compilation happening in this scenario, - // due to the existence of macro definitions. So we are protected from failure if user tries - // to use the suspended definitions. - // - // Additionally, it is recommended for the user to move macro implementations to another project - // if they want to force early output. In this scenario the suspensions will no longer occur, so now - // they will become visible in the early-output. - // - // See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force` - // for examples of this in action. - // - // Therefore we only need to write to early output in the first run. We also provide the option - // to diagnose suspensions with the `-Yno-suspended-units` flag. - private def writeSigFiles(units: List[CompilationUnit], writer: Pickler.EarlyFileWriter)(using Context): Unit = { - try - for - unit <- units - (cls, pickled) <- unit.pickled - if cls.isDefinedInCurrentRun - do - val internalName = - if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn - else cls.binaryClassName - val _ = writer.writeTasty(internalName, pickled()) - finally - writer.close() - if ctx.settings.verbose.value then - report.echo("[sig files written]") - end try - } - private def recordNonLocalClasses(nonLocalClassSymbols: mutable.HashSet[Symbol], cb: interfaces.IncrementalCallback)(using Context): Unit = for cls <- nonLocalClassSymbols do val sourceFile = cls.source if sourceFile.exists && cls.isDefinedInCurrentRun then recordNonLocalClass(cls, sourceFile, cb) - cb.apiPhaseCompleted() - cb.dependencyPhaseCompleted() + for holder <- ctx.asyncTastyPromise do + import scala.concurrent.ExecutionContext.Implicits.global + // do not expect to be completed with failure + holder.promise.future.foreach: state => + if !state.hasErrors then + // We also await the promise at GenBCode to emit warnings/errors + cb.apiPhaseCompleted() + cb.dependencyPhaseCompleted() private def recordNonLocalClass(cls: Symbol, sourceFile: SourceFile, cb: interfaces.IncrementalCallback)(using Context): Unit = def registerProductNames(fullClassName: String, binaryClassName: String) = diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 3a4212547d16..30e6b0c8332f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -9,7 +9,7 @@ import tasty.* import config.Printers.{noPrinter, pickling} import config.Feature import java.io.PrintStream -import io.FileWriters.TastyWriter +import io.FileWriters.{TastyWriter, ReadOnlyContext} import StdNames.{str, nme} import Periods.* import Phases.* @@ -22,6 +22,11 @@ import compiletime.uninitialized import dotty.tools.io.{JarArchive, AbstractFile} import dotty.tools.dotc.printing.OutlinePrinter import scala.annotation.constructorOnly +import scala.concurrent.Promise +import dotty.tools.dotc.transform.Pickler.writeSigFilesAsync + +import scala.util.chaining.given +import dotty.tools.io.FileWriters.BufferingDelayedReporting object Pickler { val name: String = "pickler" @@ -33,8 +38,62 @@ object Pickler { */ inline val ParallelPickling = true + class AsyncTastyHolder(val earlyOut: AbstractFile, val promise: Promise[AsyncTastyState]) + class AsyncTastyState(val hasErrors: Boolean, val pending: Option[BufferingDelayedReporting]) + + // Why we only write to early output in the first run? + // =================================================== + // TL;DR the point of pipeline compilation is to start downstream projects early, + // so we don't want to wait for suspended units to be compiled. + // + // But why is it safe to ignore suspended units? + // If this project contains a transparent macro that is called in the same project, + // the compilation unit of that call will be suspended (if the macro implementation + // is also in this project), causing a second run. + // However before we do that run, we will have already requested sbt to begin + // early downstream compilation. This means that the suspended definitions will not + // be visible in *early* downstream compilation. + // + // However, sbt will by default prevent downstream compilation happening in this scenario, + // due to the existence of macro definitions. So we are protected from failure if user tries + // to use the suspended definitions. + // + // Additionally, it is recommended for the user to move macro implementations to another project + // if they want to force early output. In this scenario the suspensions will no longer occur, so now + // they will become visible in the early-output. + // + // See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force` + // for examples of this in action. + // + // Therefore we only need to write to early output in the first run. We also provide the option + // to diagnose suspensions with the `-Yno-suspended-units` flag. + def writeSigFilesAsync( + tasks: List[(String, Array[Byte])], + writer: EarlyFileWriter, + promise: Promise[AsyncTastyState])(using ctx: ReadOnlyContext): Unit = { + try + for (internalName, pickled) <- tasks do + val _ = writer.writeTasty(internalName, pickled) + finally + try + writer.close() + finally + promise.success( + AsyncTastyState( + hasErrors = ctx.reporter.hasErrors, + pending = ( + ctx.reporter match + case buffered: BufferingDelayedReporting => Some(buffered) + case _ => None + ) + ) + ) + end try + end try + } + class EarlyFileWriter private (writer: TastyWriter, origin: AbstractFile): - def this(dest: AbstractFile)(using @constructorOnly ctx: Context) = this(TastyWriter(dest), dest) + def this(dest: AbstractFile)(using @constructorOnly ctx: ReadOnlyContext) = this(TastyWriter(dest), dest) export writer.writeTasty @@ -50,13 +109,15 @@ object Pickler { class Pickler extends Phase { import ast.tpd.* + def doAsyncTasty(using Context): Boolean = ctx.asyncTastyPromise.isDefined + override def phaseName: String = Pickler.name override def description: String = Pickler.description // No need to repickle trees coming from TASTY override def isRunnable(using Context): Boolean = - super.isRunnable && !ctx.settings.fromTasty.value + super.isRunnable && (!ctx.settings.fromTasty.value || doAsyncTasty) // when `-Yjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false @@ -86,11 +147,20 @@ class Pickler extends Phase { */ object serialized: val scratch = new ScratchData + private val buf = mutable.ListBuffer.empty[(String, Array[Byte])] def run(body: ScratchData => Array[Byte]): Array[Byte] = synchronized { scratch.reset() body(scratch) } + def commit(internalName: String, tasty: Array[Byte]): Unit = synchronized { + buf += ((internalName, tasty)) + } + def result(): List[(String, Array[Byte])] = synchronized { + val res = buf.toList + buf.clear() + res + } private val executor = Executor[Array[Byte]]() @@ -100,10 +170,29 @@ class Pickler extends Phase { if isOutline then ctx.fresh.setPrinterFn(OutlinePrinter(_)) else ctx + /** only ran under -Ypickle-write and -from-tasty */ + private def runFromTasty(unit: CompilationUnit)(using Context): Unit = { + val pickled = unit.pickled + for (cls, bytes) <- pickled do + serialized.commit(computeInternalName(cls), bytes()) + } + + private def computeInternalName(cls: ClassSymbol)(using Context): String = + if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn + else cls.binaryClassName + override def run(using Context): Unit = { val unit = ctx.compilationUnit pickling.println(i"unpickling in run ${ctx.runId}") + if ctx.settings.fromTasty.value then + // skip the rest of the phase, as tasty is already "pickled", + // however we still need to set up tasks to write TASTy to + // early output when pipelining is enabled. + if doAsyncTasty then + runFromTasty(unit) + return () + for cls <- dropCompanionModuleClasses(topLevelClasses(unit.tpdTree)) tree <- sliceTopLevel(unit.tpdTree, cls) @@ -137,6 +226,8 @@ class Pickler extends Phase { val positionWarnings = new mutable.ListBuffer[Message]() def reportPositionWarnings() = positionWarnings.foreach(report.warning(_)) + val internalName = if doAsyncTasty then computeInternalName(cls) else "" + def computePickled(): Array[Byte] = inContext(ctx.fresh) { serialized.run { scratch => treePkl.compactify(scratch) @@ -166,6 +257,10 @@ class Pickler extends Phase { println(i"**** pickled info of $cls") println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) println(i"**** end of pickled info of $cls") + + if doAsyncTasty then + serialized.commit(internalName, pickled) + pickled } } @@ -194,13 +289,27 @@ class Pickler extends Phase { } override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { + val isConcurrent = useExecutor + + val writeTask: Option[() => Unit] = ctx.asyncTastyPromise.map: holder => + () => + given ReadOnlyContext = if isConcurrent then ReadOnlyContext.buffered else ReadOnlyContext.eager + val writer = Pickler.EarlyFileWriter(holder.earlyOut) + writeSigFilesAsync(serialized.result(), writer, holder.promise) + + def runPhase(writeCB: (doWrite: () => Unit) => Unit) = + super.runOn(units).tap(_ => writeTask.foreach(writeCB)) + val result = - if useExecutor then + if isConcurrent then executor.start() - try super.runOn(units) + try + runPhase: doWrite => + // unless we redesign executor to have "Unit" schedule overload, we need some sentinel value. + executor.schedule(() => { doWrite(); Array.emptyByteArray }) finally executor.close() else - super.runOn(units) + runPhase(_()) if ctx.settings.YtestPickler.value then val ctx2 = ctx.fresh .setSetting(ctx.settings.YreadComments, true) diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index 4f03194fa4ce..d8c397a6166f 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -1,8 +1,7 @@ package dotty.tools.io -import dotty.tools.dotc.core.Contexts.* -import dotty.tools.dotc.core.Decorators.em -import dotty.tools.dotc.report +import scala.language.unsafeNulls + import dotty.tools.io.AbstractFile import dotty.tools.io.JarArchive import dotty.tools.io.PlainFile @@ -25,13 +24,107 @@ import java.util.zip.CRC32 import java.util.zip.Deflater import java.util.zip.ZipEntry import java.util.zip.ZipOutputStream -import scala.language.unsafeNulls +import scala.collection.mutable + +import dotty.tools.dotc.core.Contexts, Contexts.Context +import dotty.tools.dotc.core.Decorators.em + +import dotty.tools.dotc.util.{SourcePosition, NoSourcePosition} + +import dotty.tools.dotc.reporting.Message +import dotty.tools.dotc.report + +import dotty.tools.backend.jvm.PostProcessorFrontendAccess.BackendReporting +import scala.annotation.constructorOnly /** Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed */ object FileWriters { type InternalName = String type NullableFile = AbstractFile | Null + inline def ctx(using ReadOnlyContext): ReadOnlyContext = summon[ReadOnlyContext] + + sealed trait DelayedReporting { + def hasErrors: Boolean + def error(message: Context ?=> Message, position: SourcePosition): Unit + def warning(message: Context ?=> Message, position: SourcePosition): Unit + def log(message: String): Unit + + def error(message: Context ?=> Message): Unit = error(message, NoSourcePosition) + def warning(message: Context ?=> Message): Unit = warning(message, NoSourcePosition) + } + + final class EagerDelayedReporting(using captured: Context) extends DelayedReporting: + private var _hasErrors = false + + def hasErrors: Boolean = _hasErrors + + def error(message: Context ?=> Message, position: SourcePosition): Unit = + report.error(message, position) + _hasErrors = true + + def warning(message: Context ?=> Message, position: SourcePosition): Unit = + report.warning(message, position) + + def log(message: String): Unit = report.echo(message) + + final class BufferingDelayedReporting extends DelayedReporting { + // We optimise access to the buffered reports for the common case - that there are no warning/errors to report + // We could use a listBuffer etc - but that would be extra allocation in the common case + // Note - all access is externally synchronized, as this allow the reports to be generated in on thread and + // consumed in another + private var bufferedReports = List.empty[Report] + private var _hasErrors = false + enum Report(val relay: Context ?=> BackendReporting => Unit): + case Error(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.error(message(ctx), position)) + case Warning(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.warning(message(ctx), position)) + case Log(message: String) extends Report(_.log(message)) + + def hasErrors: Boolean = synchronized: + _hasErrors + + def error(message: Context ?=> Message, position: SourcePosition): Unit = synchronized: + bufferedReports ::= Report.Error({case given Context => message}, position) + _hasErrors = true + + def warning(message: Context ?=> Message, position: SourcePosition): Unit = synchronized: + bufferedReports ::= Report.Warning({case given Context => message}, position) + + def log(message: String): Unit = synchronized: + bufferedReports ::= Report.Log(message) + + /** Should only be called from main compiler thread. */ + def relayReports(toReporting: BackendReporting)(using Context): Unit = synchronized: + if bufferedReports.nonEmpty then + bufferedReports.reverse.foreach(_.relay(toReporting)) + bufferedReports = Nil + } + + trait ReadSettings: + def jarCompressionLevel: Int + def debug: Boolean + + trait ReadOnlyContext: + + val settings: ReadSettings + val reporter: DelayedReporting + + trait BufferedReadOnlyContext extends ReadOnlyContext: + val reporter: BufferingDelayedReporting + + object ReadOnlyContext: + def readSettings(using ctx: Context): ReadSettings = new: + val jarCompressionLevel = ctx.settings.YjarCompressionLevel.value + val debug = ctx.settings.Ydebug.value + + def buffered(using Context): BufferedReadOnlyContext = new: + val settings = readSettings + val reporter = BufferingDelayedReporting() + + def eager(using Context): ReadOnlyContext = new: + val settings = readSettings + val reporter = EagerDelayedReporting() + /** * The interface to writing classfiles. GeneratedClassHandler calls these methods to generate the * directory and files that are created, and eventually calls `close` when the writing is complete. @@ -47,7 +140,7 @@ object FileWriters { * * @param name the internal name of the class, e.g. "scala.Option" */ - def writeTasty(name: InternalName, bytes: Array[Byte])(using Context): NullableFile + def writeTasty(name: InternalName, bytes: Array[Byte])(using ReadOnlyContext): NullableFile /** * Close the writer. Behavior is undefined after a call to `close`. @@ -60,7 +153,7 @@ object FileWriters { object TastyWriter { - def apply(output: AbstractFile)(using Context): TastyWriter = { + def apply(output: AbstractFile)(using ReadOnlyContext): TastyWriter = { // In Scala 2 depenening on cardinality of distinct output dirs MultiClassWriter could have been used // In Dotty we always use single output directory @@ -73,7 +166,7 @@ object FileWriters { private final class SingleTastyWriter(underlying: FileWriter) extends TastyWriter { - override def writeTasty(className: InternalName, bytes: Array[Byte])(using Context): NullableFile = { + override def writeTasty(className: InternalName, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = { underlying.writeFile(classToRelativePath(className), bytes) } @@ -83,14 +176,14 @@ object FileWriters { } sealed trait FileWriter { - def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile + def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile def close(): Unit } object FileWriter { - def apply(file: AbstractFile, jarManifestMainClass: Option[String])(using Context): FileWriter = + def apply(file: AbstractFile, jarManifestMainClass: Option[String])(using ReadOnlyContext): FileWriter = if (file.isInstanceOf[JarArchive]) { - val jarCompressionLevel = ctx.settings.YjarCompressionLevel.value + val jarCompressionLevel = ctx.settings.jarCompressionLevel // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where // created using `AbstractFile.bufferedOutputStream`instead of JarWritter val jarFile = file.underlyingSource.getOrElse{ @@ -127,7 +220,7 @@ object FileWriters { lazy val crc = new CRC32 - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile = this.synchronized { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = this.synchronized { val entry = new ZipEntry(relativePath) if (storeOnly) { // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ @@ -155,14 +248,14 @@ object FileWriters { val noAttributes = Array.empty[FileAttribute[?]] private val isWindows = scala.util.Properties.isWin - private def checkName(component: Path)(using Context): Unit = if (isWindows) { + private def checkName(component: Path)(using ReadOnlyContext): Unit = if (isWindows) { val specials = raw"(?i)CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]".r val name = component.toString - def warnSpecial(): Unit = report.warning(em"path component is special Windows device: ${name}") + def warnSpecial(): Unit = ctx.reporter.warning(em"path component is special Windows device: ${name}") specials.findPrefixOf(name).foreach(prefix => if (prefix.length == name.length || name(prefix.length) == '.') warnSpecial()) } - def ensureDirForPath(baseDir: Path, filePath: Path)(using Context): Unit = { + def ensureDirForPath(baseDir: Path, filePath: Path)(using ReadOnlyContext): Unit = { import java.lang.Boolean.TRUE val parent = filePath.getParent if (!builtPaths.containsKey(parent)) { @@ -192,7 +285,7 @@ object FileWriters { private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = { val path = base.resolve(relativePath) try { ensureDirForPath(base, path) @@ -213,10 +306,10 @@ object FileWriters { os.close() } catch { case e: FileConflictException => - report.error(em"error writing ${path.toString}: ${e.getMessage}") + ctx.reporter.error(em"error writing ${path.toString}: ${e.getMessage}") case e: java.nio.file.FileSystemException => - if (ctx.settings.Ydebug.value) e.printStackTrace() - report.error(em"error writing ${path.toString}: ${e.getClass.getName} ${e.getMessage}") + if (ctx.settings.debug) e.printStackTrace() + ctx.reporter.error(em"error writing ${path.toString}: ${e.getClass.getName} ${e.getMessage}") } AbstractFile.getFile(path) } @@ -241,7 +334,7 @@ object FileWriters { finally out.close() } - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context):NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext):NullableFile = { val outFile = getFile(base, relativePath) writeBytes(outFile, bytes) outFile diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index e95dbe97bb19..728f89966af0 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -11,7 +11,7 @@ import scala.jdk.CollectionConverters.* * that be can used as the compiler's output directory. */ class JarArchive private (root: Directory) extends PlainDirectory(root) { - def close(): Unit = jpath.getFileSystem().close() + def close(): Unit = this.synchronized(jpath.getFileSystem().close()) override def exists: Boolean = jpath.getFileSystem().isOpen() && super.exists def allFileNames(): Iterator[String] = java.nio.file.Files.walk(jpath).iterator().asScala.map(_.toString) From a25d2523bd6dcaf0d130ad164d77ef34be0644a5 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 11 Apr 2024 18:04:51 +0200 Subject: [PATCH 2/3] Address feedback. Adjust semantics. Now in backend we block on the composition of the Zinc API callbacks and tasty being written. We also now conditionally signal that we are "done" writing tasty, based on if any units were suspended. This works in line with the Zinc default, which will ignore the early output anyway under the presence of macros (user can override this). Also move async tasty holder to the Run, as it is now context dependent on suspending. TODO: give the user an option to optimise performance by preventing definition of such problematic macros (which would also avoid suspensions). --- .../tools/backend/jvm/ClassfileWriters.scala | 6 +- .../dotty/tools/backend/jvm/GenBCode.scala | 25 +- .../dotty/tools/dotc/CompilationUnit.scala | 10 +- compiler/src/dotty/tools/dotc/Driver.scala | 14 +- compiler/src/dotty/tools/dotc/Run.scala | 36 ++- .../src/dotty/tools/dotc/core/Contexts.scala | 10 +- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 9 +- .../src/dotty/tools/dotc/sbt/package.scala | 19 ++ .../dotty/tools/dotc/transform/Pickler.scala | 219 ++++++++++++------ compiler/src/dotty/tools/io/FileWriters.scala | 105 ++++++--- .../a/src/main/scala/a/A.scala | 7 + .../a/src/test/scala/a/Hello.scala | 10 + .../b/src/main/scala/b/Hello.scala | 9 + .../pipelining/pipelining-cancel/build.sbt | 12 + .../project/DottyInjectedPlugin.scala | 12 + sbt-test/pipelining/pipelining-cancel/test | 6 + .../pipelining-scala-macro/build.sbt | 24 +- 17 files changed, 375 insertions(+), 158 deletions(-) create mode 100644 sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala create mode 100644 sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala create mode 100644 sbt-test/pipelining/pipelining-cancel/build.sbt create mode 100644 sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-cancel/test diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala index ec251b4aa3f0..44498082c697 100644 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala @@ -20,7 +20,11 @@ import dotty.tools.io.JarArchive import scala.language.unsafeNulls - +/** !!! This file is now copied in `dotty.tools.io.FileWriters` in a more general way that does not rely upon + * `PostProcessorFrontendAccess`, this should probably be changed to wrap that class instead. + * + * Until then, any changes to this file should be copied to `dotty.tools.io.FileWriters` as well. + */ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { type NullableFile = AbstractFile | Null import frontendAccess.{compilerSettings, backendReporting} diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index ade7c4705c52..d9f413a5d5ab 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -12,7 +12,7 @@ import scala.collection.mutable import scala.compiletime.uninitialized import java.util.concurrent.TimeoutException -import scala.concurrent.duration.given +import scala.concurrent.duration.Duration import scala.concurrent.Await class GenBCode extends Phase { self => @@ -94,20 +94,15 @@ class GenBCode extends Phase { self => try val result = super.runOn(units) generatedClassHandler.complete() - for holder <- ctx.asyncTastyPromise do - try - val asyncState = Await.result(holder.promise.future, 5.seconds) - for reporter <- asyncState.pending do - reporter.relayReports(frontendAccess.backendReporting) - catch - case _: TimeoutException => - report.error( - """Timeout (5s) in backend while waiting for async writing of TASTy files to -Yearly-tasty-output, - | this may be a bug in the compiler. - | - |Alternatively consider turning off pipelining for this project.""".stripMargin - ) - end for + try + for + async <- ctx.run.nn.asyncTasty + bufferedReporter <- async.sync() + do + bufferedReporter.relayReports(frontendAccess.backendReporting) + catch + case ex: Exception => + report.error(s"exception from future: $ex, (${Option(ex.getCause())})") result finally // frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index adced57d5801..193baaf7a55b 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -98,11 +98,15 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn depRecorder.clear() if !suspended then suspended = true - ctx.run.nn.suspendedUnits += this + val currRun = ctx.run.nn + currRun.suspendedUnits += this + val isInliningPhase = ctx.phase == Phases.inliningPhase if ctx.settings.XprintSuspension.value then - ctx.run.nn.suspendedHints += (this -> hint) - if ctx.phase == Phases.inliningPhase then + currRun.suspendedHints += (this -> (hint, isInliningPhase)) + if isInliningPhase then suspendedAtInliningPhase = true + else + currRun.suspendedAtTyperPhase = true throw CompilationUnit.SuspendException() private var myAssignmentSpans: Map[Int, List[Span]] | Null = null diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 1e3d002d4391..f2f104d1c387 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -54,10 +54,10 @@ class Driver { if (ctx.settings.XprintSuspension.value) val suspendedHints = run.suspendedHints.toList report.echo(i"compiling suspended $suspendedUnits%, %") - for (unit, hint) <- suspendedHints do - report.echo(s" $unit: $hint") + for (unit, (hint, atInlining)) <- suspendedHints do + report.echo(s" $unit at ${if atInlining then "inlining" else "typer"}: $hint") val run1 = compiler.newRun - run1.compileSuspendedUnits(suspendedUnits) + run1.compileSuspendedUnits(suspendedUnits, !run.suspendedAtTyperPhase) finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh)) protected def initCtx: Context = (new ContextBase).initialCtx @@ -66,13 +66,6 @@ class Driver { protected def command: CompilerCommand = ScalacCommand - private def setupAsyncTasty(ictx: FreshContext): Unit = inContext(ictx): - ictx.settings.YearlyTastyOutput.value match - case earlyOut if earlyOut.isDirectory && earlyOut.exists => - ictx.setInitialAsyncTasty() - case _ => - () // do nothing - /** Setup context with initialized settings from CLI arguments, then check if there are any settings that * would change the default behaviour of the compiler. * @@ -89,7 +82,6 @@ class Driver { Positioned.init(using ictx) inContext(ictx) { - setupAsyncTasty(ictx) if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then ictx.setProperty(ContextDoc, new ContextDocstrings) val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index ffc54e969b1f..02a0618bb6e9 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -37,6 +37,7 @@ import scala.io.Codec import Run.Progress import scala.compiletime.uninitialized import dotty.tools.dotc.transform.MegaPhase +import dotty.tools.dotc.transform.Pickler.AsyncTastyHolder /** A compiler run. Exports various methods to compile source files */ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo { @@ -130,7 +131,10 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint myUnits = us var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer() - var suspendedHints: mutable.Map[CompilationUnit, String] = mutable.HashMap() + var suspendedHints: mutable.Map[CompilationUnit, (String, Boolean)] = mutable.HashMap() + + /** Were any units suspended in the typer phase? if so then pipeline tasty can not complete. */ + var suspendedAtTyperPhase: Boolean = false def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit = if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then @@ -231,6 +235,22 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if !progress.isCancelled() then progress.tickSubphase() + /** if true, then we are done writing pipelined TASTy files (i.e. finished in a previous run.) */ + private var myAsyncTastyWritten = false + + private var _asyncTasty: Option[AsyncTastyHolder] = None + + /** populated when this run needs to write pipeline TASTy files. */ + def asyncTasty: Option[AsyncTastyHolder] = _asyncTasty + + private def initializeAsyncTasty()(using Context): () => Unit = + // should we provide a custom ExecutionContext? + // currently it is just used to call the `apiPhaseCompleted` and `dependencyPhaseCompleted` callbacks in Zinc + import scala.concurrent.ExecutionContext.Implicits.global + val async = AsyncTastyHolder.init + _asyncTasty = Some(async) + () => async.cancel() + /** Will be set to true if any of the compiled compilation units contains * a pureFunctions language import. */ @@ -348,7 +368,14 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint runCtx.setProperty(CyclicReference.Trace, new CyclicReference.Trace()) runCtx.withProgressCallback: cb => _progress = Progress(cb, this, fusedPhases.map(_.traversals).sum) + val cancelAsyncTasty: () => Unit = + if !myAsyncTastyWritten && Phases.picklerPhase.exists && !ctx.settings.YearlyTastyOutput.isDefault then + initializeAsyncTasty() + else () => {} + runPhases(allPhases = fusedPhases)(using runCtx) + cancelAsyncTasty() + ctx.reporter.finalizeReporting() if (!ctx.reporter.hasErrors) Rewrites.writeBack() @@ -365,9 +392,12 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint /** Is this run started via a compilingSuspended? */ def isCompilingSuspended: Boolean = myCompilingSuspended - /** Compile units `us` which were suspended in a previous run */ - def compileSuspendedUnits(us: List[CompilationUnit]): Unit = + /** Compile units `us` which were suspended in a previous run, + * also signal if all necessary async tasty files were written in a previous run. + */ + def compileSuspendedUnits(us: List[CompilationUnit], asyncTastyWritten: Boolean): Unit = myCompilingSuspended = true + myAsyncTastyWritten = asyncTastyWritten for unit <- us do unit.suspended = false compileUnits(us) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index add8859948a6..ab6fda68a09e 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -31,11 +31,9 @@ import StdNames.nme import compiletime.uninitialized import scala.annotation.internal.sharable -import scala.concurrent.Promise import DenotTransformers.DenotTransformer import dotty.tools.dotc.profile.Profiler -import dotty.tools.dotc.transform.Pickler.AsyncTastyHolder import dotty.tools.dotc.sbt.interfaces.{IncrementalCallback, ProgressCallback} import util.Property.Key import util.Store @@ -56,9 +54,8 @@ object Contexts { private val (importInfoLoc, store9) = store8.newLocation[ImportInfo | Null]() private val (typeAssignerLoc, store10) = store9.newLocation[TypeAssigner](TypeAssigner) private val (progressCallbackLoc, store11) = store10.newLocation[ProgressCallback | Null]() - private val (tastyPromiseLoc, store12) = store11.newLocation[Option[AsyncTastyHolder]](None) - private val initialStore = store12 + private val initialStore = store11 /** The current context */ inline def ctx(using ctx: Context): Context = ctx @@ -200,8 +197,6 @@ object Contexts { /** The current settings values */ def settingsState: SettingsState = store(settingsStateLoc) - def asyncTastyPromise: Option[AsyncTastyHolder] = store(tastyPromiseLoc) - /** The current compilation unit */ def compilationUnit: CompilationUnit = store(compilationUnitLoc) @@ -690,9 +685,6 @@ object Contexts { updateStore(compilationUnitLoc, compilationUnit) } - def setInitialAsyncTasty(): this.type = - assert(store(tastyPromiseLoc) == None, "trying to set async tasty promise twice!") - updateStore(tastyPromiseLoc, Some(AsyncTastyHolder(settings.YearlyTastyOutput.value, Promise()))) def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) def setIncCallback(callback: IncrementalCallback): this.type = updateStore(incCallbackLoc, callback) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 962529e9786a..7da119fedf52 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -90,14 +90,7 @@ class ExtractAPI extends Phase { val sourceFile = cls.source if sourceFile.exists && cls.isDefinedInCurrentRun then recordNonLocalClass(cls, sourceFile, cb) - for holder <- ctx.asyncTastyPromise do - import scala.concurrent.ExecutionContext.Implicits.global - // do not expect to be completed with failure - holder.promise.future.foreach: state => - if !state.hasErrors then - // We also await the promise at GenBCode to emit warnings/errors - cb.apiPhaseCompleted() - cb.dependencyPhaseCompleted() + ctx.run.nn.asyncTasty.foreach(_.signalAPIComplete()) private def recordNonLocalClass(cls: Symbol, sourceFile: SourceFile, cb: interfaces.IncrementalCallback)(using Context): Unit = def registerProductNames(fullClassName: String, binaryClassName: String) = diff --git a/compiler/src/dotty/tools/dotc/sbt/package.scala b/compiler/src/dotty/tools/dotc/sbt/package.scala index dc0df381f08f..12e7f6eceac7 100644 --- a/compiler/src/dotty/tools/dotc/sbt/package.scala +++ b/compiler/src/dotty/tools/dotc/sbt/package.scala @@ -6,10 +6,29 @@ import dotty.tools.dotc.core.NameOps.stripModuleClassSuffix import dotty.tools.dotc.core.Names.Name import dotty.tools.dotc.core.Names.termName +import interfaces.IncrementalCallback +import dotty.tools.io.FileWriters.BufferingReporter +import dotty.tools.dotc.core.Decorators.em + +import scala.util.chaining.given +import scala.util.control.NonFatal + inline val TermNameHash = 1987 // 300th prime inline val TypeNameHash = 1993 // 301st prime inline val InlineParamHash = 1997 // 302nd prime +def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingReporter]): Option[BufferingReporter] = + val zincReporter = pending match + case Some(buffered) => buffered + case None => BufferingReporter() + try + cb.apiPhaseCompleted() + cb.dependencyPhaseCompleted() + catch + case NonFatal(t) => + zincReporter.exception(em"signaling API and Dependencies phases completion", t) + if zincReporter.hasErrors then Some(zincReporter) else None + extension (sym: Symbol) /** Mangle a JVM symbol name in a format better suited for internal uses by sbt. diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 30e6b0c8332f..b7a7d874db3f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -26,7 +26,12 @@ import scala.concurrent.Promise import dotty.tools.dotc.transform.Pickler.writeSigFilesAsync import scala.util.chaining.given -import dotty.tools.io.FileWriters.BufferingDelayedReporting +import dotty.tools.io.FileWriters.{EagerReporter, BufferingReporter} +import dotty.tools.dotc.sbt.interfaces.IncrementalCallback +import dotty.tools.dotc.sbt.asyncZincPhasesCompleted +import scala.concurrent.ExecutionContext +import scala.util.control.NonFatal +import java.util.concurrent.atomic.AtomicBoolean object Pickler { val name: String = "pickler" @@ -38,84 +43,158 @@ object Pickler { */ inline val ParallelPickling = true - class AsyncTastyHolder(val earlyOut: AbstractFile, val promise: Promise[AsyncTastyState]) - class AsyncTastyState(val hasErrors: Boolean, val pending: Option[BufferingDelayedReporting]) - - // Why we only write to early output in the first run? - // =================================================== - // TL;DR the point of pipeline compilation is to start downstream projects early, - // so we don't want to wait for suspended units to be compiled. - // - // But why is it safe to ignore suspended units? - // If this project contains a transparent macro that is called in the same project, - // the compilation unit of that call will be suspended (if the macro implementation - // is also in this project), causing a second run. - // However before we do that run, we will have already requested sbt to begin - // early downstream compilation. This means that the suspended definitions will not - // be visible in *early* downstream compilation. - // - // However, sbt will by default prevent downstream compilation happening in this scenario, - // due to the existence of macro definitions. So we are protected from failure if user tries - // to use the suspended definitions. - // - // Additionally, it is recommended for the user to move macro implementations to another project - // if they want to force early output. In this scenario the suspensions will no longer occur, so now - // they will become visible in the early-output. - // - // See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force` - // for examples of this in action. - // - // Therefore we only need to write to early output in the first run. We also provide the option - // to diagnose suspensions with the `-Yno-suspended-units` flag. - def writeSigFilesAsync( - tasks: List[(String, Array[Byte])], - writer: EarlyFileWriter, - promise: Promise[AsyncTastyState])(using ctx: ReadOnlyContext): Unit = { - try - for (internalName, pickled) <- tasks do - val _ = writer.writeTasty(internalName, pickled) - finally - try - writer.close() - finally - promise.success( - AsyncTastyState( + /**A holder for syncronization points and reports when writing TASTy asynchronously. + * The callbacks should only be called once. + */ + class AsyncTastyHolder private ( + val earlyOut: AbstractFile, incCallback: IncrementalCallback | Null)(using @constructorOnly ex: ExecutionContext): + import scala.concurrent.Future as StdFuture + import scala.concurrent.Await + import scala.concurrent.duration.Duration + import AsyncTastyHolder.Signal + + private val _cancel = AtomicBoolean(false) + + /**Cancel any outstanding work. + * This should be done at the end of a run, e.g. if there were errors that prevented reaching the backend. */ + def cancel(): Unit = + while + val cancelled = _cancel.get() + !cancelled && !_cancel.compareAndSet(false, true) + do () + if incCallback != null then + asyncTastyWritten.trySuccess(None) // cancel the wait for TASTy writing + if incCallback != null then + asyncAPIComplete.trySuccess(Signal.Cancelled) // cancel the wait for API completion + + /** check if the work has been cancelled. */ + def cancelled: Boolean = _cancel.get() + + private val asyncTastyWritten = Promise[Option[AsyncTastyHolder.State]]() + private val asyncAPIComplete = + if incCallback == null then Promise.successful(Signal.Done) // no need to wait for API completion + else Promise[Signal]() + + private val backendFuture: StdFuture[Option[BufferingReporter]] = + val asyncState = asyncTastyWritten.future + .zipWith(asyncAPIComplete.future)((state, api) => state.filterNot(_ => api == Signal.Cancelled)) + asyncState.map: optState => + optState.flatMap: state => + if incCallback != null && state.done && !state.hasErrors then + asyncZincPhasesCompleted(incCallback, state.pending) + else state.pending + + /** awaits the state of async TASTy operations indefinitely, returns optionally any buffered reports. */ + def sync(): Option[BufferingReporter] = + Await.result(backendFuture, Duration.Inf) + + def signalAPIComplete(): Unit = + if incCallback != null then + asyncAPIComplete.trySuccess(Signal.Done) + + /** should only be called once */ + def signalAsyncTastyWritten()(using ctx: ReadOnlyContext): Unit = + val done = !ctx.run.suspendedAtTyperPhase + if done then + try + // when we are done, i.e. no suspended units, + // we should close the file system so it can be read in the same JVM process. + // Note: we close even if we have been cancelled. + earlyOut match + case jar: JarArchive => jar.close() + case _ => + catch + case NonFatal(t) => + ctx.reporter.error(em"Error closing early output: ${t}") + + asyncTastyWritten.trySuccess: + Some( + AsyncTastyHolder.State( hasErrors = ctx.reporter.hasErrors, + done = done, pending = ( ctx.reporter match - case buffered: BufferingDelayedReporting => Some(buffered) - case _ => None + case buffered: BufferingReporter => Some(buffered) + case _: EagerReporter => None // already reported ) ) ) - end try - end try + end signalAsyncTastyWritten + end AsyncTastyHolder + + object AsyncTastyHolder: + /** The state after writing async tasty. Any errors should have been reported, or pending. + * if suspendedUnits is true, then we can't signal Zinc yet. + */ + private class State(val hasErrors: Boolean, val done: Boolean, val pending: Option[BufferingReporter]) + private enum Signal: + case Done, Cancelled + + /**Create a holder for Asynchronous state of early-TASTy operations. + * the `ExecutionContext` parameter is used to call into Zinc to signal + * that API and Dependency phases are complete. + */ + def init(using Context, ExecutionContext): AsyncTastyHolder = + AsyncTastyHolder(ctx.settings.YearlyTastyOutput.value, ctx.incCallback) + + + /** Asynchronously writes TASTy files to the destination -Yearly-tasty-output. + * If no units have been suspended, then we are "done", which enables Zinc to be signalled. + * + * If there are suspended units, (due to calling a macro defined in the same run), then the API is incomplete, + * so it would be a mistake to signal Zinc. This is a sensible default, because Zinc by default will ignore the + * signal if there are macros in the API. + * - See `sbt-test/pipelining/pipelining-scala-macro` for an example. + * + * TODO: The user can override this default behaviour in Zinc to always listen to the signal, + * (e.g. if they define the macro implementation in an upstream, non-pipelined project). + * - See `sbt-test/pipelining/pipelining-scala-macro-force` where we force Zinc to listen to the signal. + * If the user wants force early output to be written, then they probably also want to benefit from pipelining, + * which then makes suspension problematic as it increases compilation times. + * Proposal: perhaps we should provide a flag `-Ystrict-pipelining` (as an alternative to `-Yno-suspended-units`), + * which fails in the condition of definition of a macro where its implementation is in the same project. + * (regardless of if it is used); this is also more strict than preventing suspension at typer. + * The user is then certain that they are always benefitting as much as possible from pipelining. + */ + def writeSigFilesAsync( + tasks: List[(String, Array[Byte])], + writer: EarlyFileWriter, + async: AsyncTastyHolder)(using ctx: ReadOnlyContext): Unit = { + try + try + for (internalName, pickled) <- tasks do + if !async.cancelled then + val _ = writer.writeTasty(internalName, pickled) + catch + case NonFatal(t) => ctx.reporter.exception(em"writing TASTy to early output", t) + finally + writer.close() + catch + case NonFatal(t) => ctx.reporter.exception(em"closing early output writer", t) + finally + async.signalAsyncTastyWritten() } - class EarlyFileWriter private (writer: TastyWriter, origin: AbstractFile): - def this(dest: AbstractFile)(using @constructorOnly ctx: ReadOnlyContext) = this(TastyWriter(dest), dest) - - export writer.writeTasty + class EarlyFileWriter private (writer: TastyWriter): + def this(dest: AbstractFile)(using @constructorOnly ctx: ReadOnlyContext) = this(TastyWriter(dest)) - def close(): Unit = - writer.close() - origin match { - case jar: JarArchive => jar.close() // also close the file system - case _ => - } + export writer.{writeTasty, close} } /** This phase pickles trees */ class Pickler extends Phase { import ast.tpd.* - def doAsyncTasty(using Context): Boolean = ctx.asyncTastyPromise.isDefined + private def doAsyncTasty(using Context): Boolean = ctx.run.nn.asyncTasty.isDefined + + private var fastDoAsyncTasty: Boolean = false override def phaseName: String = Pickler.name override def description: String = Pickler.description - // No need to repickle trees coming from TASTY + // No need to repickle trees coming from TASTY, however in the case that we need to write TASTy to early-output, + // then we need to run this phase to send the tasty from compilation units to the early-output. override def isRunnable(using Context): Boolean = super.isRunnable && (!ctx.settings.fromTasty.value || doAsyncTasty) @@ -189,7 +268,7 @@ class Pickler extends Phase { // skip the rest of the phase, as tasty is already "pickled", // however we still need to set up tasks to write TASTy to // early output when pipelining is enabled. - if doAsyncTasty then + if fastDoAsyncTasty then runFromTasty(unit) return () @@ -226,7 +305,7 @@ class Pickler extends Phase { val positionWarnings = new mutable.ListBuffer[Message]() def reportPositionWarnings() = positionWarnings.foreach(report.warning(_)) - val internalName = if doAsyncTasty then computeInternalName(cls) else "" + val internalName = if fastDoAsyncTasty then computeInternalName(cls) else "" def computePickled(): Array[Byte] = inContext(ctx.fresh) { serialized.run { scratch => @@ -258,7 +337,7 @@ class Pickler extends Phase { println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) println(i"**** end of pickled info of $cls") - if doAsyncTasty then + if fastDoAsyncTasty then serialized.commit(internalName, pickled) pickled @@ -289,19 +368,21 @@ class Pickler extends Phase { } override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - val isConcurrent = useExecutor + val useExecutor = this.useExecutor - val writeTask: Option[() => Unit] = ctx.asyncTastyPromise.map: holder => - () => - given ReadOnlyContext = if isConcurrent then ReadOnlyContext.buffered else ReadOnlyContext.eager - val writer = Pickler.EarlyFileWriter(holder.earlyOut) - writeSigFilesAsync(serialized.result(), writer, holder.promise) + val writeTask: Option[() => Unit] = + ctx.run.nn.asyncTasty.map: async => + fastDoAsyncTasty = true + () => + given ReadOnlyContext = if useExecutor then ReadOnlyContext.buffered else ReadOnlyContext.eager + val writer = Pickler.EarlyFileWriter(async.earlyOut) + writeSigFilesAsync(serialized.result(), writer, async) def runPhase(writeCB: (doWrite: () => Unit) => Unit) = super.runOn(units).tap(_ => writeTask.foreach(writeCB)) val result = - if isConcurrent then + if useExecutor then executor.start() try runPhase: doWrite => diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index d8c397a6166f..3d0ddccfd1f8 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -36,15 +36,21 @@ import dotty.tools.dotc.report import dotty.tools.backend.jvm.PostProcessorFrontendAccess.BackendReporting import scala.annotation.constructorOnly - -/** Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed */ +import java.util.concurrent.atomic.AtomicReference +import java.util.concurrent.atomic.AtomicBoolean + +/** !!!Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed. + * this should probably be changed to wrap that class instead. + * + * Until then, any changes to this file should be copied to `dotty.tools.backend.jvm.ClassfileWriters` as well. + */ object FileWriters { type InternalName = String type NullableFile = AbstractFile | Null inline def ctx(using ReadOnlyContext): ReadOnlyContext = summon[ReadOnlyContext] - sealed trait DelayedReporting { + sealed trait DelayedReporter { def hasErrors: Boolean def error(message: Context ?=> Message, position: SourcePosition): Unit def warning(message: Context ?=> Message, position: SourcePosition): Unit @@ -52,9 +58,14 @@ object FileWriters { def error(message: Context ?=> Message): Unit = error(message, NoSourcePosition) def warning(message: Context ?=> Message): Unit = warning(message, NoSourcePosition) + final def exception(reason: Context ?=> Message, throwable: Throwable): Unit = + error({ + val trace = throwable.getStackTrace().nn.mkString("\n ") + em"An unhandled exception was thrown in the compiler while\n ${reason.message}.\n${throwable}\n $trace" + }, NoSourcePosition) } - final class EagerDelayedReporting(using captured: Context) extends DelayedReporting: + final class EagerReporter(using captured: Context) extends DelayedReporter: private var _hasErrors = false def hasErrors: Boolean = _hasErrors @@ -68,62 +79,96 @@ object FileWriters { def log(message: String): Unit = report.echo(message) - final class BufferingDelayedReporting extends DelayedReporting { + final class BufferingReporter extends DelayedReporter { // We optimise access to the buffered reports for the common case - that there are no warning/errors to report // We could use a listBuffer etc - but that would be extra allocation in the common case - // Note - all access is externally synchronized, as this allow the reports to be generated in on thread and - // consumed in another - private var bufferedReports = List.empty[Report] - private var _hasErrors = false + // buffered logs are updated atomically. + + private val _bufferedReports = AtomicReference(List.empty[Report]) + private val _hasErrors = AtomicBoolean(false) + enum Report(val relay: Context ?=> BackendReporting => Unit): case Error(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.error(message(ctx), position)) case Warning(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.warning(message(ctx), position)) case Log(message: String) extends Report(_.log(message)) - def hasErrors: Boolean = synchronized: - _hasErrors + /** Atomically record that an error occurred */ + private def recordError(): Unit = + while + val old = _hasErrors.get + !old && !_hasErrors.compareAndSet(old, true) + do () + + /** Atomically add a report to the log */ + private def recordReport(report: Report): Unit = + while + val old = _bufferedReports.get + !_bufferedReports.compareAndSet(old, report :: old) + do () + + /** atomically extract and clear the buffered reports */ + private def resetReports(): List[Report] = + while + val old = _bufferedReports.get + if _bufferedReports.compareAndSet(old, Nil) then + return old + else + true + do () + throw new AssertionError("Unreachable") + + def hasErrors: Boolean = _hasErrors.get() + def hasReports: Boolean = _bufferedReports.get().nonEmpty - def error(message: Context ?=> Message, position: SourcePosition): Unit = synchronized: - bufferedReports ::= Report.Error({case given Context => message}, position) - _hasErrors = true + def error(message: Context ?=> Message, position: SourcePosition): Unit = + recordReport(Report.Error({case given Context => message}, position)) + recordError() - def warning(message: Context ?=> Message, position: SourcePosition): Unit = synchronized: - bufferedReports ::= Report.Warning({case given Context => message}, position) + def warning(message: Context ?=> Message, position: SourcePosition): Unit = + recordReport(Report.Warning({case given Context => message}, position)) - def log(message: String): Unit = synchronized: - bufferedReports ::= Report.Log(message) + def log(message: String): Unit = + recordReport(Report.Log(message)) /** Should only be called from main compiler thread. */ - def relayReports(toReporting: BackendReporting)(using Context): Unit = synchronized: - if bufferedReports.nonEmpty then - bufferedReports.reverse.foreach(_.relay(toReporting)) - bufferedReports = Nil + def relayReports(toReporting: BackendReporting)(using Context): Unit = + val reports = resetReports() + if reports.nonEmpty then + reports.reverse.foreach(_.relay(toReporting)) } - trait ReadSettings: + trait ReadOnlySettings: def jarCompressionLevel: Int def debug: Boolean - trait ReadOnlyContext: + trait ReadOnlyRun: + def suspendedAtTyperPhase: Boolean - val settings: ReadSettings - val reporter: DelayedReporting + trait ReadOnlyContext: + val run: ReadOnlyRun + val settings: ReadOnlySettings + val reporter: DelayedReporter trait BufferedReadOnlyContext extends ReadOnlyContext: - val reporter: BufferingDelayedReporting + val reporter: BufferingReporter object ReadOnlyContext: - def readSettings(using ctx: Context): ReadSettings = new: + def readSettings(using ctx: Context): ReadOnlySettings = new: val jarCompressionLevel = ctx.settings.YjarCompressionLevel.value val debug = ctx.settings.Ydebug.value + def readRun(using ctx: Context): ReadOnlyRun = new: + val suspendedAtTyperPhase = ctx.run.suspendedAtTyperPhase + def buffered(using Context): BufferedReadOnlyContext = new: val settings = readSettings - val reporter = BufferingDelayedReporting() + val reporter = BufferingReporter() + val run = readRun def eager(using Context): ReadOnlyContext = new: val settings = readSettings - val reporter = EagerDelayedReporting() + val reporter = EagerReporter() + val run = readRun /** * The interface to writing classfiles. GeneratedClassHandler calls these methods to generate the diff --git a/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..35b27f3d4662 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala @@ -0,0 +1,7 @@ +package a + +import scala.util.Success + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala b/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala new file mode 100644 index 000000000000..629f1c0e6cfe --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala @@ -0,0 +1,10 @@ +package a + +import org.junit.Test + +class Hello { + + @Test def test(): Unit = { + assert(A.foo == (1,2,3)) + } +} diff --git a/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala b/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala new file mode 100644 index 000000000000..bbb4eb5ba7f7 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala @@ -0,0 +1,9 @@ +package b + +import a.A + +object Hello { + @main def test(): Unit = { + assert(A.foo == (1,2,3)) + } +} diff --git a/sbt-test/pipelining/pipelining-cancel/build.sbt b/sbt-test/pipelining/pipelining-cancel/build.sbt new file mode 100644 index 000000000000..f23e65895c78 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/build.sbt @@ -0,0 +1,12 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ystop-after:pickler", // before ExtractAPI is reached, will cancel the pipeline output + ) + +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-cancel/test b/sbt-test/pipelining/pipelining-cancel/test new file mode 100644 index 000000000000..d84f55ca3c31 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/test @@ -0,0 +1,6 @@ +# - Test depending on a project where upstream runs short of reaching backend, +# and cancels pipelined tasty writing. +# - Because `a` finishes compile run before the sending the signal to Zinc +# that pipeline jar is written, sbt will continue to the downstream project anyway. +# - Downstream project `b` will fail as it can't find a.A from upstream. +-> b/compile diff --git a/sbt-test/pipelining/pipelining-scala-macro/build.sbt b/sbt-test/pipelining/pipelining-scala-macro/build.sbt index f8576cdae796..5f703bb0d815 100644 --- a/sbt-test/pipelining/pipelining-scala-macro/build.sbt +++ b/sbt-test/pipelining/pipelining-scala-macro/build.sbt @@ -6,14 +6,14 @@ ThisBuild / usePipelining := true // force a failure by always forcing early output. lazy val a = project.in(file("a")) .settings( - scalacOptions += "-Ycheck:all", + // scalacOptions += "-Ycheck:all", scalacOptions += "-Xprint-suspension", Compile / incOptions := { val old = (Compile / incOptions).value val hooks = old.externalHooks val newHooks = hooks.withExternalLookup( new sbt.internal.inc.NoopExternalLookup { - @volatile var knownSuspension = false + @volatile var earlyOutputChecks = 0 def didFindMacros(analysis: xsbti.compile.CompileAnalysis) = { val foundMacros = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal.values.exists(_.hasMacro) @@ -23,20 +23,26 @@ lazy val a = project.in(file("a")) // force early output, this is safe because the macro class from `macros` will be available. override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + earlyOutputChecks += 1 + assert(earlyOutputChecks <= 2, "should only be called twice (apiPhaseCompleted, dependencyPhaseCompleted).") val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal val a_A = internalClasses.get("a.A") val a_ASuspendTyper = internalClasses.get("a.ASuspendTyper") val a_ASuspendInlining = internalClasses.get("a.ASuspendInlining") + + // both `a.A` and `a.ASuspendInlining` should be found in the analysis. + // even though `a.ASuspendInlining` suspends, it happens at inlining, so we should still + // record API for it in the first run. assert(a_A.isDefined, s"`a.A` wasn't found.") + assert(a_ASuspendInlining.isDefined, s"`a.ASuspendInlining` wasn't found.") - if (!knownSuspension) { - // this callback is called multiple times, so we only want to assert the first time, - // in subsequent runs the suspended definition will be "resumed", so a.ASuspendTyper be found. - knownSuspension = true - assert(a_ASuspendTyper.isEmpty, s"`a.ASuspendTyper` should have been suspended initially.") - } + // in run 1, `a.ASuspendTyper` would have suspended at typer, and not be present in Analysis. + // Therefore we wouldn't close the early output jar. + // Therefore, because it is present here, we waited to the second run to close the early output jar, + // at which point we recorded API for `a.ASuspendTyper`, and because we closed the early output jar, + // we send the signal to Zinc that the early output was written. + assert(a_ASuspendTyper.isDefined, s"`a.ASuspendTyper` wasn't found.") - assert(a_ASuspendInlining.isDefined, s"`a.ASuspendInlining` wasn't found.") // do what sbt does typically, // it will not force early output because macros are found From 4bfc43ffa5bbca345ffe41b87bb91f0caf5158ba Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 16 Apr 2024 18:10:29 +0200 Subject: [PATCH 3/3] adjust cancellation --- .../src/dotty/tools/dotc/sbt/package.scala | 4 +-- .../dotty/tools/dotc/transform/Pickler.scala | 27 +++++++---------- compiler/src/dotty/tools/io/FileWriters.scala | 30 ++++++++----------- 3 files changed, 26 insertions(+), 35 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/package.scala b/compiler/src/dotty/tools/dotc/sbt/package.scala index 12e7f6eceac7..1c6b38b07a84 100644 --- a/compiler/src/dotty/tools/dotc/sbt/package.scala +++ b/compiler/src/dotty/tools/dotc/sbt/package.scala @@ -17,7 +17,7 @@ inline val TermNameHash = 1987 // 300th prime inline val TypeNameHash = 1993 // 301st prime inline val InlineParamHash = 1997 // 302nd prime -def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingReporter]): Option[BufferingReporter] = +def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingReporter]): BufferingReporter = val zincReporter = pending match case Some(buffered) => buffered case None => BufferingReporter() @@ -27,7 +27,7 @@ def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingR catch case NonFatal(t) => zincReporter.exception(em"signaling API and Dependencies phases completion", t) - if zincReporter.hasErrors then Some(zincReporter) else None + zincReporter extension (sym: Symbol) diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index b7a7d874db3f..6fe687072828 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -53,22 +53,21 @@ object Pickler { import scala.concurrent.duration.Duration import AsyncTastyHolder.Signal - private val _cancel = AtomicBoolean(false) + private val _cancelled = AtomicBoolean(false) /**Cancel any outstanding work. - * This should be done at the end of a run, e.g. if there were errors that prevented reaching the backend. */ + * This should be done at the end of a run, e.g. background work may be running even though + * errors in main thread will prevent reaching the backend. */ def cancel(): Unit = - while - val cancelled = _cancel.get() - !cancelled && !_cancel.compareAndSet(false, true) - do () - if incCallback != null then + if _cancelled.compareAndSet(false, true) then asyncTastyWritten.trySuccess(None) // cancel the wait for TASTy writing - if incCallback != null then - asyncAPIComplete.trySuccess(Signal.Cancelled) // cancel the wait for API completion + if incCallback != null then + asyncAPIComplete.trySuccess(Signal.Cancelled) // cancel the wait for API completion + else + () // nothing else to do /** check if the work has been cancelled. */ - def cancelled: Boolean = _cancel.get() + def cancelled: Boolean = _cancelled.get() private val asyncTastyWritten = Promise[Option[AsyncTastyHolder.State]]() private val asyncAPIComplete = @@ -81,7 +80,7 @@ object Pickler { asyncState.map: optState => optState.flatMap: state => if incCallback != null && state.done && !state.hasErrors then - asyncZincPhasesCompleted(incCallback, state.pending) + asyncZincPhasesCompleted(incCallback, state.pending).toBuffered else state.pending /** awaits the state of async TASTy operations indefinitely, returns optionally any buffered reports. */ @@ -112,11 +111,7 @@ object Pickler { AsyncTastyHolder.State( hasErrors = ctx.reporter.hasErrors, done = done, - pending = ( - ctx.reporter match - case buffered: BufferingReporter => Some(buffered) - case _: EagerReporter => None // already reported - ) + pending = ctx.reporter.toBuffered ) ) end signalAsyncTastyWritten diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index 3d0ddccfd1f8..87825b025734 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -38,6 +38,7 @@ import dotty.tools.backend.jvm.PostProcessorFrontendAccess.BackendReporting import scala.annotation.constructorOnly import java.util.concurrent.atomic.AtomicReference import java.util.concurrent.atomic.AtomicBoolean +import java.util.ConcurrentModificationException /** !!!Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed. * this should probably be changed to wrap that class instead. @@ -56,6 +57,11 @@ object FileWriters { def warning(message: Context ?=> Message, position: SourcePosition): Unit def log(message: String): Unit + final def toBuffered: Option[BufferingReporter] = this match + case buffered: BufferingReporter => + if buffered.hasReports then Some(buffered) else None + case _: EagerReporter => None + def error(message: Context ?=> Message): Unit = error(message, NoSourcePosition) def warning(message: Context ?=> Message): Unit = warning(message, NoSourcePosition) final def exception(reason: Context ?=> Message, throwable: Throwable): Unit = @@ -94,28 +100,18 @@ object FileWriters { /** Atomically record that an error occurred */ private def recordError(): Unit = - while - val old = _hasErrors.get - !old && !_hasErrors.compareAndSet(old, true) - do () + _hasErrors.set(true) /** Atomically add a report to the log */ private def recordReport(report: Report): Unit = - while - val old = _bufferedReports.get - !_bufferedReports.compareAndSet(old, report :: old) - do () + _bufferedReports.getAndUpdate(report :: _) - /** atomically extract and clear the buffered reports */ + /** atomically extract and clear the buffered reports, must only be called at a synchonization point. */ private def resetReports(): List[Report] = - while - val old = _bufferedReports.get - if _bufferedReports.compareAndSet(old, Nil) then - return old - else - true - do () - throw new AssertionError("Unreachable") + val curr = _bufferedReports.get() + if curr.nonEmpty && !_bufferedReports.compareAndSet(curr, Nil) then + throw ConcurrentModificationException("concurrent modification of buffered reports") + else curr def hasErrors: Boolean = _hasErrors.get() def hasReports: Boolean = _bufferedReports.get().nonEmpty