From defc4551806a3d3c28a51062b47a46f295c16bb7 Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Tue, 25 Apr 2017 17:13:15 +0200 Subject: [PATCH 01/12] Make sure only aliases in dotty.io are used Previously, we would use `scala.reflect.io._` directly. The first part of abstracting away reflect is to kill this direct dependency. --- compiler/sjs/backend/sjs/JSCodeGen.scala | 2 +- .../tools/backend/jvm/CollectEntryPoints.scala | 2 +- .../backend/jvm/DottyBackendInterface.scala | 2 +- .../src/dotty/tools/backend/jvm/GenBCode.scala | 6 +++--- .../src/dotty/tools/backend/jvm/LabelDefs.scala | 2 +- compiler/src/dotty/tools/dotc/Run.scala | 2 +- .../dotc/classpath/AggregateClassPath.scala | 8 +++----- .../dotty/tools/dotc/classpath/ClassPath.scala | 2 +- .../tools/dotc/classpath/ClassPathFactory.scala | 4 ++-- .../dotc/classpath/DirectoryClassPath.scala | 9 ++++----- .../dotty/tools/dotc/classpath/FileUtils.scala | 6 +++--- .../classpath/VirtualDirectoryClassPath.scala | 2 +- .../classpath/ZipAndJarFileLookupFactory.scala | 3 +-- .../dotc/classpath/ZipArchiveFileLookup.scala | 3 +-- .../dotty/tools/dotc/core/SymDenotations.scala | 2 +- .../tools/dotc/repl/CompilingInterpreter.scala | 2 +- compiler/src/dotty/tools/dotc/repl/REPL.scala | 2 +- compiler/src/dotty/tools/io/package.scala | 4 +++- compiler/src/scala/tools/nsc/io/package.scala | 16 ++++++++-------- compiler/test/dotc/tests.scala | 2 +- .../tools/backend/jvm/DottyBytecodeTest.scala | 2 +- .../test/dotty/tools/dotc/CompilerTest.scala | 2 +- .../dotty/tools/dotc/parsing/DeSugarTest.scala | 2 +- .../dotty/tools/dotc/parsing/ParserTest.scala | 2 +- .../dotty/tools/dotc/parsing/ScannerTest.scala | 2 +- .../dotc/transform/PatmatExhaustivityTest.scala | 2 +- 26 files changed, 45 insertions(+), 48 deletions(-) diff --git a/compiler/sjs/backend/sjs/JSCodeGen.scala b/compiler/sjs/backend/sjs/JSCodeGen.scala index 69a5651fc5c3..25974a641251 100644 --- a/compiler/sjs/backend/sjs/JSCodeGen.scala +++ b/compiler/sjs/backend/sjs/JSCodeGen.scala @@ -182,7 +182,7 @@ class JSCodeGen()(implicit ctx: Context) { private def getFileFor(cunit: CompilationUnit, sym: Symbol, suffix: String) = { - import scala.reflect.io._ + import dotty.tools.io._ val outputDirectory: AbstractFile = // TODO Support virtual files new PlainDirectory(new Directory(new java.io.File(ctx.settings.d.value))) diff --git a/compiler/src/dotty/tools/backend/jvm/CollectEntryPoints.scala b/compiler/src/dotty/tools/backend/jvm/CollectEntryPoints.scala index abcbbbb83059..bd3a1894c2cc 100644 --- a/compiler/src/dotty/tools/backend/jvm/CollectEntryPoints.scala +++ b/compiler/src/dotty/tools/backend/jvm/CollectEntryPoints.scala @@ -15,7 +15,7 @@ import java.io.{File => JFile} import scala.collection.generic.Clearable import scala.collection.mutable import scala.reflect.ClassTag -import scala.reflect.io.{Directory, PlainDirectory, AbstractFile} +import dotty.tools.io.{Directory, PlainDirectory, AbstractFile} import scala.tools.asm.{ClassVisitor, FieldVisitor, MethodVisitor} import scala.tools.nsc.backend.jvm.{BCodeHelpers, BackendInterface} import dotty.tools.dotc.core._ diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index 923f0a95a632..709ce4616f9b 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -12,7 +12,7 @@ import scala.collection.generic.Clearable import scala.collection.mutable import scala.reflect.ClassTag import scala.reflect.internal.util.WeakHashSet -import scala.reflect.io.{AbstractFile, Directory, PlainDirectory} +import dotty.tools.io.{AbstractFile, Directory, PlainDirectory} import scala.tools.asm.{AnnotationVisitor, ClassVisitor, FieldVisitor, MethodVisitor} import scala.tools.nsc.backend.jvm.{BCodeHelpers, BackendInterface} import dotty.tools.dotc.core._ diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 90f13f83a791..c1100f4f1e6b 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -33,7 +33,7 @@ import dotty.tools.dotc.util.{DotClass, Positions} import tpd._ import StdNames._ -import scala.reflect.io.{AbstractFile, Directory, PlainDirectory} +import dotty.tools.io.{AbstractFile, Directory, PlainDirectory} class GenBCode extends Phase { def phaseName: String = "genBCode" @@ -62,10 +62,10 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter val sourceFile = ctx.compilationUnit.source - /** Convert a `scala.reflect.io.AbstractFile` into a + /** Convert a `dotty.tools.io.AbstractFile` into a * `dotty.tools.dotc.interfaces.AbstractFile`. */ - private[this] def convertAbstractFile(absfile: scala.reflect.io.AbstractFile): interfaces.AbstractFile = + private[this] def convertAbstractFile(absfile: dotty.tools.io.AbstractFile): interfaces.AbstractFile = new interfaces.AbstractFile { override def name = absfile.name override def path = absfile.path diff --git a/compiler/src/dotty/tools/backend/jvm/LabelDefs.scala b/compiler/src/dotty/tools/backend/jvm/LabelDefs.scala index 1950d300d915..d4f09e99159a 100644 --- a/compiler/src/dotty/tools/backend/jvm/LabelDefs.scala +++ b/compiler/src/dotty/tools/backend/jvm/LabelDefs.scala @@ -16,7 +16,7 @@ import scala.collection.generic.Clearable import scala.collection.mutable import scala.collection.mutable.{ListBuffer, ArrayBuffer} import scala.reflect.ClassTag -import scala.reflect.io.{Directory, PlainDirectory, AbstractFile} +import dotty.tools.io.{Directory, PlainDirectory, AbstractFile} import scala.tools.asm.{ClassVisitor, FieldVisitor, MethodVisitor} import scala.tools.nsc.backend.jvm.{BCodeHelpers, BackendInterface} import dotty.tools.dotc.core._ diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 23c45e354b6c..acd885dd3afe 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -18,7 +18,7 @@ import java.io.{BufferedWriter, OutputStreamWriter} import printing.XprintMode import scala.annotation.tailrec -import scala.reflect.io.VirtualFile +import dotty.tools.io.VirtualFile import scala.util.control.NonFatal /** A compiler run. Exports various methods to compile source files */ diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index ec3e8fdf4bdd..c6b15d773614 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -1,15 +1,13 @@ /* * Copyright (c) 2014 Contributor. All rights reserved. */ -package dotty.tools.dotc.classpath +package dotty.tools +package dotc.classpath import java.net.URL import scala.annotation.tailrec import scala.collection.mutable.ArrayBuffer -import scala.reflect.internal.FatalError -import scala.reflect.io.AbstractFile -import dotty.tools.io.ClassPath -import dotty.tools.io.ClassRepresentation +import dotty.tools.io.{ AbstractFile, ClassPath, ClassRepresentation } /** * A classpath unifying multiple class- and sourcepath entries. diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala index 129c6b9feb25..89c68ae67112 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala @@ -3,7 +3,7 @@ */ package dotty.tools.dotc.classpath -import scala.reflect.io.AbstractFile +import dotty.tools.io.AbstractFile import dotty.tools.io.ClassRepresentation case class ClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepresentation]) diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala index ac8fc633fdc0..86dde8a23cd7 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala @@ -3,8 +3,8 @@ */ package dotty.tools.dotc.classpath -import scala.reflect.io.{AbstractFile, VirtualDirectory} -import scala.reflect.io.Path.string2path +import dotty.tools.io.{AbstractFile, VirtualDirectory} +import dotty.tools.io.Path.string2path import dotty.tools.dotc.config.Settings import FileUtils.AbstractFileOps import dotty.tools.io.ClassPath diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index 1ed233ed72fe..0a3496be4f50 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -10,8 +10,7 @@ import java.util.function.IntFunction import java.util import java.util.Comparator -import scala.reflect.io.{AbstractFile, PlainFile} -import dotty.tools.io.{ClassPath, ClassRepresentation, PlainNioFile} +import dotty.tools.io.{AbstractFile, PlainFile, ClassPath, ClassRepresentation, PlainNioFile} import FileUtils._ import scala.collection.JavaConverters._ @@ -118,7 +117,7 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo } else Array() } protected def getName(f: File): String = f.getName - protected def toAbstractFile(f: File): AbstractFile = new PlainFile(new scala.reflect.io.File(f)) + protected def toAbstractFile(f: File): AbstractFile = new PlainFile(new dotty.tools.io.File(f)) protected def isPackage(f: File): Boolean = f.isPackage assert(dir != null, "Directory file in DirectoryFileLookup cannot be null") @@ -208,7 +207,7 @@ case class DirectoryClassPath(dir: File) extends JFileDirectoryLookup[ClassFileE val relativePath = FileUtils.dirPath(className) val classFile = new File(s"$dir/$relativePath.class") if (classFile.exists) { - val wrappedClassFile = new scala.reflect.io.File(classFile) + val wrappedClassFile = new dotty.tools.io.File(classFile) val abstractClassFile = new PlainFile(wrappedClassFile) Some(abstractClassFile) } else None @@ -235,7 +234,7 @@ case class DirectorySourcePath(dir: File) extends JFileDirectoryLookup[SourceFil .collectFirst { case file if file.exists() => file } sourceFile.map { file => - val wrappedSourceFile = new scala.reflect.io.File(file) + val wrappedSourceFile = new dotty.tools.io.File(file) val abstractSourceFile = new PlainFile(wrappedSourceFile) abstractSourceFile } diff --git a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala index 823efbb9df52..ef571dbbf4b5 100644 --- a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala +++ b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala @@ -1,12 +1,12 @@ /* * Copyright (c) 2014 Contributor. All rights reserved. */ -package dotty.tools.dotc.classpath +package dotty.tools +package dotc.classpath import java.io.{File => JFile, FileFilter} import java.net.URL -import scala.reflect.internal.FatalError -import scala.reflect.io.AbstractFile +import dotty.tools.io.AbstractFile /** * Common methods related to Java files and abstract files used in the context of classpath diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index 5b08555540b7..c32152028854 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc.classpath import dotty.tools.io.ClassRepresentation -import scala.reflect.io.{AbstractFile, Path, PlainFile, VirtualDirectory} +import dotty.tools.io.{AbstractFile, Path, PlainFile, VirtualDirectory} import FileUtils._ import java.net.URL diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index 5210c699efc2..648fb59497c0 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -6,8 +6,7 @@ package dotty.tools.dotc.classpath import java.io.File import java.net.URL import scala.annotation.tailrec -import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources} -import dotty.tools.io.ClassPath +import dotty.tools.io.{AbstractFile, ClassPath, FileZipArchive, ManifestResources} import dotty.tools.dotc.config.Settings import dotty.tools.dotc.core.Contexts.Context import FileUtils._ diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala index 8184708ad704..352a566097fe 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala @@ -6,8 +6,7 @@ package dotty.tools.dotc.classpath import java.io.File import java.net.URL import scala.collection.Seq -import scala.reflect.io.AbstractFile -import scala.reflect.io.FileZipArchive +import dotty.tools.io.{ AbstractFile, FileZipArchive } import FileUtils.AbstractFileOps import dotty.tools.io.{ClassPath, ClassRepresentation} diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 310838d8aa73..d3865d1e0ce1 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -8,7 +8,7 @@ import NameOps._, NameKinds._ import Scopes.Scope import collection.mutable import collection.BitSet -import scala.reflect.io.AbstractFile +import dotty.tools.io.AbstractFile import Decorators.SymbolIteratorDecorator import ast._ import annotation.tailrec diff --git a/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala b/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala index 98f098fbcbf3..3010f7761763 100644 --- a/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala +++ b/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala @@ -15,7 +15,7 @@ import scala.collection.mutable.{ListBuffer, HashSet, ArrayBuffer} //import ast.parser.SyntaxAnalyzer import io.{PlainFile, VirtualDirectory} -import scala.reflect.io.{PlainDirectory, Directory} +import dotty.tools.io.{PlainDirectory, Directory} import reporting.{ConsoleReporter, Reporter} import core.Flags import util.{SourceFile, NameTransformer} diff --git a/compiler/src/dotty/tools/dotc/repl/REPL.scala b/compiler/src/dotty/tools/dotc/repl/REPL.scala index 211e3c93196a..11ea7636d1a4 100644 --- a/compiler/src/dotty/tools/dotc/repl/REPL.scala +++ b/compiler/src/dotty/tools/dotc/repl/REPL.scala @@ -5,7 +5,7 @@ package repl import core.Contexts.Context import reporting.Reporter import io.{AbstractFile, PlainFile, VirtualDirectory} -import scala.reflect.io.{PlainDirectory, Directory} +import dotty.tools.io.{PlainDirectory, Directory} import java.io.{BufferedReader, File => JFile, FileReader, PrintWriter} import java.net.{URL, URLClassLoader} diff --git a/compiler/src/dotty/tools/io/package.scala b/compiler/src/dotty/tools/io/package.scala index 7acb827c9a97..50451dddf101 100644 --- a/compiler/src/dotty/tools/io/package.scala +++ b/compiler/src/dotty/tools/io/package.scala @@ -18,13 +18,15 @@ package object io { val Directory = scala.reflect.io.Directory type File = scala.reflect.io.File val File = scala.reflect.io.File + type ManifestResources = scala.reflect.io.ManifestResources type Path = scala.reflect.io.Path val Path = scala.reflect.io.Path val Streamable = scala.reflect.io.Streamable type VirtualDirectory = scala.reflect.io.VirtualDirectory type VirtualFile = scala.reflect.io.VirtualFile - val ZipArchive = scala.reflect.io.ZipArchive type ZipArchive = scala.reflect.io.ZipArchive + val ZipArchive = scala.reflect.io.ZipArchive + type FileZipArchive = scala.reflect.io.FileZipArchive type JManifest = java.util.jar.Manifest type JFile = java.io.File diff --git a/compiler/src/scala/tools/nsc/io/package.scala b/compiler/src/scala/tools/nsc/io/package.scala index 6b30e54413ed..f364824c6c83 100644 --- a/compiler/src/scala/tools/nsc/io/package.scala +++ b/compiler/src/scala/tools/nsc/io/package.scala @@ -10,17 +10,17 @@ package scala.tools.nsc * See http://dotty.epfl.ch/docs/contributing/backend.html for more information. */ package object io { - type AbstractFile = scala.reflect.io.AbstractFile - val AbstractFile = scala.reflect.io.AbstractFile + type AbstractFile = dotty.tools.io.AbstractFile + val AbstractFile = dotty.tools.io.AbstractFile - type Directory = scala.reflect.io.Directory - val Directory = scala.reflect.io.Directory + type Directory = dotty.tools.io.Directory + val Directory = dotty.tools.io.Directory - type Path = scala.reflect.io.Path - val Path = scala.reflect.io.Path + type Path = dotty.tools.io.Path + val Path = dotty.tools.io.Path - type File = scala.reflect.io.File - val File = scala.reflect.io.File + type File = dotty.tools.io.File + val File = dotty.tools.io.File type Jar = dotty.tools.io.Jar val Jar = dotty.tools.io.Jar diff --git a/compiler/test/dotc/tests.scala b/compiler/test/dotc/tests.scala index b61747468d8c..cb6bc394db1e 100644 --- a/compiler/test/dotc/tests.scala +++ b/compiler/test/dotc/tests.scala @@ -8,7 +8,7 @@ import org.junit.{Before, Test} import org.junit.Assert._ import java.io.{ File => JFile } -import scala.reflect.io.Directory +import dotty.tools.io.Directory import scala.io.Source /** WARNING diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala index c423089d07a4..799c550ec7e9 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala @@ -5,7 +5,7 @@ import dotc.core.Contexts.{Context, ContextBase} import dotc.core.Phases.Phase import dotc.Compiler -import scala.reflect.io.{VirtualDirectory => Directory} +import dotty.tools.io.{VirtualDirectory => Directory} import scala.tools.asm import asm._ import asm.tree._ diff --git a/compiler/test/dotty/tools/dotc/CompilerTest.scala b/compiler/test/dotty/tools/dotc/CompilerTest.scala index 83e10a4b8261..7075756c04c9 100644 --- a/compiler/test/dotty/tools/dotc/CompilerTest.scala +++ b/compiler/test/dotty/tools/dotc/CompilerTest.scala @@ -9,7 +9,7 @@ import util.SourcePosition import config.CompilerCommand import dotty.tools.io.PlainFile import scala.collection.mutable.ListBuffer -import scala.reflect.io.{ Path, Directory, File => SFile, AbstractFile } +import dotty.tools.io.{ Path, Directory, File => SFile, AbstractFile } import scala.annotation.tailrec import java.io.{ RandomAccessFile, File => JFile } diff --git a/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala b/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala index 1f79c2cf66dd..4624b39d3e5e 100644 --- a/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala +++ b/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala @@ -3,7 +3,7 @@ package dotc package parsing import Tokens._, Parsers._ -import scala.reflect.io._ +import dotty.tools.io._ import util._ import core._ import ast.Trees._ diff --git a/compiler/test/dotty/tools/dotc/parsing/ParserTest.scala b/compiler/test/dotty/tools/dotc/parsing/ParserTest.scala index a89b34512f58..64bd5a81d9ef 100644 --- a/compiler/test/dotty/tools/dotc/parsing/ParserTest.scala +++ b/compiler/test/dotty/tools/dotc/parsing/ParserTest.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package parsing -import scala.reflect.io._ +import dotty.tools.io._ import util._ import core._ import scala.io.Codec diff --git a/compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala b/compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala index 48ac280d0578..d1e188d2893c 100644 --- a/compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala +++ b/compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package parsing -import scala.reflect.io._ +import dotty.tools.io._ import scala.io.Codec import util._ import Tokens._, Scanners._ diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala index 354a86e10aff..a370a8404bff 100644 --- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala @@ -6,7 +6,7 @@ package transform import java.io._ import scala.io.Source._ -import scala.reflect.io.Directory +import dotty.tools.io.Directory import org.junit.Test import reporting.TestReporter import vulpix.TestConfiguration From 92263a553386efb1b2c26c8881816fc85c0c111c Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Tue, 25 Apr 2017 18:21:27 +0200 Subject: [PATCH 02/12] Add WeakHashSet from scala/scala 2.12.x 5a5ed5826f297bca6291 --- .../dotty/tools/dotc/util/WeakHashSet.scala | 410 ++++++++++++++++++ 1 file changed, 410 insertions(+) create mode 100644 compiler/src/dotty/tools/dotc/util/WeakHashSet.scala diff --git a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala new file mode 100644 index 000000000000..712799aaed31 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala @@ -0,0 +1,410 @@ +/** Taken from the original implementation of WeakHashSet in scala-reflect + * + * @author: Eugene Burmako + */ +package dotty.tools.dotc.util + +import java.lang.ref.{WeakReference, ReferenceQueue} +import scala.annotation.tailrec +import scala.collection.generic.Clearable +import scala.collection.mutable.{Set => MSet} + +/** + * A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other + * hard references are associated with them. Its primary use case is as a canonical reference + * identity holder (aka "hash-consing") via findEntryOrUpdate + * + * This Set implementation cannot hold null. Any attempt to put a null in it will result in a NullPointerException + * + * This set implmeentation is not in general thread safe without external concurrency control. However it behaves + * properly when GC concurrently collects elements in this set. + */ +final class WeakHashSet[A >: Null <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with MSet[A] { + + import WeakHashSet._ + + def this() = this(initialCapacity = WeakHashSet.defaultInitialCapacity, loadFactor = WeakHashSet.defaultLoadFactor) + + type This = WeakHashSet[A] + + /** + * queue of Entries that hold elements scheduled for GC + * the removeStaleEntries() method works through the queue to remeove + * stale entries from the table + */ + private[this] val queue = new ReferenceQueue[A] + + /** + * the number of elements in this set + */ + private[this] var count = 0 + + /** + * from a specified initial capacity compute the capacity we'll use as being the next + * power of two equal to or greater than the specified initial capacity + */ + private def computeCapacity = { + if (initialCapacity < 0) throw new IllegalArgumentException("initial capacity cannot be less than 0"); + var candidate = 1 + while (candidate < initialCapacity) { + candidate *= 2 + } + candidate + } + + /** + * the underlying table of entries which is an array of Entry linked lists + */ + private[this] var table = new Array[Entry[A]](computeCapacity) + + /** + * the limit at which we'll increase the size of the hash table + */ + var threshhold = computeThreshHold + + private[this] def computeThreshHold: Int = (table.size * loadFactor).ceil.toInt + + /** + * find the bucket associated with an elements's hash code + */ + private[this] def bucketFor(hash: Int): Int = { + // spread the bits around to try to avoid accidental collisions using the + // same algorithm as java.util.HashMap + var h = hash + h ^= h >>> 20 ^ h >>> 12 + h ^= h >>> 7 ^ h >>> 4 + + // this is finding h % table.length, but takes advantage of the + // fact that table length is a power of 2, + // if you don't do bit flipping in your head, if table.length + // is binary 100000.. (with n 0s) then table.length - 1 + // is 1111.. with n 1's. + // In other words this masks on the last n bits in the hash + h & (table.length - 1) + } + + /** + * remove a single entry from a linked list in a given bucket + */ + private[this] def remove(bucket: Int, prevEntry: Entry[A], entry: Entry[A]) { + prevEntry match { + case null => table(bucket) = entry.tail + case _ => prevEntry.tail = entry.tail + } + count -= 1 + } + + /** + * remove entries associated with elements that have been gc'ed + */ + private[this] def removeStaleEntries() { + def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]] + + @tailrec + def queueLoop(): Unit = { + val stale = poll() + if (stale != null) { + val bucket = bucketFor(stale.hash) + + @tailrec + def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = if (stale eq entry) remove(bucket, prevEntry, entry) + else if (entry != null) linkedListLoop(entry, entry.tail) + + linkedListLoop(null, table(bucket)) + + queueLoop() + } + } + + queueLoop() + } + + /** + * Double the size of the internal table + */ + private[this] def resize() { + val oldTable = table + table = new Array[Entry[A]](oldTable.size * 2) + threshhold = computeThreshHold + + @tailrec + def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) { + @tailrec + def linkedListLoop(entry: Entry[A]): Unit = entry match { + case null => () + case _ => { + val bucket = bucketFor(entry.hash) + val oldNext = entry.tail + entry.tail = table(bucket) + table(bucket) = entry + linkedListLoop(oldNext) + } + } + linkedListLoop(oldTable(oldBucket)) + + tableLoop(oldBucket + 1) + } + tableLoop(0) + } + + // from scala.reflect.internal.Set, find an element or null if it isn't contained + override def findEntry(elem: A): A = elem match { + case null => throw new NullPointerException("WeakHashSet cannot hold nulls") + case _ => { + removeStaleEntries() + val hash = elem.hashCode + val bucket = bucketFor(hash) + + @tailrec + def linkedListLoop(entry: Entry[A]): A = entry match { + case null => null.asInstanceOf[A] + case _ => { + val entryElem = entry.get + if (elem == entryElem) entryElem + else linkedListLoop(entry.tail) + } + } + + linkedListLoop(table(bucket)) + } + } + // add an element to this set unless it's already in there and return the element + def findEntryOrUpdate(elem: A): A = elem match { + case null => throw new NullPointerException("WeakHashSet cannot hold nulls") + case _ => { + removeStaleEntries() + val hash = elem.hashCode + val bucket = bucketFor(hash) + val oldHead = table(bucket) + + def add() = { + table(bucket) = new Entry(elem, hash, oldHead, queue) + count += 1 + if (count > threshhold) resize() + elem + } + + @tailrec + def linkedListLoop(entry: Entry[A]): A = entry match { + case null => add() + case _ => { + val entryElem = entry.get + if (elem == entryElem) entryElem + else linkedListLoop(entry.tail) + } + } + + linkedListLoop(oldHead) + } + } + + // add an element to this set unless it's already in there and return this set + override def +(elem: A): this.type = elem match { + case null => throw new NullPointerException("WeakHashSet cannot hold nulls") + case _ => { + removeStaleEntries() + val hash = elem.hashCode + val bucket = bucketFor(hash) + val oldHead = table(bucket) + + def add() { + table(bucket) = new Entry(elem, hash, oldHead, queue) + count += 1 + if (count > threshhold) resize() + } + + @tailrec + def linkedListLoop(entry: Entry[A]): Unit = entry match { + case null => add() + case _ if (elem == entry.get) => () + case _ => linkedListLoop(entry.tail) + } + + linkedListLoop(oldHead) + this + } + } + + def +=(elem: A) = this + elem + + // from scala.reflect.interanl.Set + override def addEntry(x: A) { this += x } + + // remove an element from this set and return this set + override def -(elem: A): this.type = elem match { + case null => this + case _ => { + removeStaleEntries() + val bucket = bucketFor(elem.hashCode) + + + + @tailrec + def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = entry match { + case null => () + case _ if (elem == entry.get) => remove(bucket, prevEntry, entry) + case _ => linkedListLoop(entry, entry.tail) + } + + linkedListLoop(null, table(bucket)) + this + } + } + + def -=(elem: A) = this - elem + + // empty this set + override def clear(): Unit = { + table = new Array[Entry[A]](table.size) + threshhold = computeThreshHold + count = 0 + + // drain the queue - doesn't do anything because we're throwing away all the values anyway + @tailrec def queueLoop(): Unit = if (queue.poll() != null) queueLoop() + queueLoop() + } + + // true if this set is empty + override def empty: This = new WeakHashSet[A](initialCapacity, loadFactor) + + // the number of elements in this set + override def size: Int = { + removeStaleEntries() + count + } + + override def apply(x: A): Boolean = this contains x + + override def foreach[U](f: A => U): Unit = iterator foreach f + + // It has the `()` because iterator runs `removeStaleEntries()` + override def toList(): List[A] = iterator.toList + + // Iterator over all the elements in this set in no particular order + override def iterator: Iterator[A] = { + removeStaleEntries() + + new Iterator[A] { + + /** + * the bucket currently being examined. Initially it's set past the last bucket and will be decremented + */ + private[this] var currentBucket: Int = table.size + + /** + * the entry that was last examined + */ + private[this] var entry: Entry[A] = null + + /** + * the element that will be the result of the next call to next() + */ + private[this] var lookaheadelement: A = null.asInstanceOf[A] + + @tailrec + def hasNext: Boolean = { + while (entry == null && currentBucket > 0) { + currentBucket -= 1 + entry = table(currentBucket) + } + + if (entry == null) false + else { + lookaheadelement = entry.get + if (lookaheadelement == null) { + // element null means the weakref has been cleared since we last did a removeStaleEntries(), move to the next entry + entry = entry.tail + hasNext + } else { + true + } + } + } + + def next(): A = if (lookaheadelement == null) + throw new IndexOutOfBoundsException("next on an empty iterator") + else { + val result = lookaheadelement + lookaheadelement = null.asInstanceOf[A] + entry = entry.tail + result + } + } + } + + /** + * Diagnostic information about the internals of this set. Not normally + * needed by ordinary code, but may be useful for diagnosing performance problems + */ + private[util] class Diagnostics { + /** + * Verify that the internal structure of this hash set is fully consistent. + * Throws an assertion error on any problem. In order for it to be reliable + * the entries must be stable. If any are garbage collected during validation + * then an assertion may inappropriately fire. + */ + def fullyValidate: Unit = { + var computedCount = 0 + var bucket = 0 + while (bucket < table.size) { + var entry = table(bucket) + while (entry != null) { + assert(entry.get != null, s"$entry had a null value indicated that gc activity was happening during diagnostic validation or that a null value was inserted") + computedCount += 1 + val cachedHash = entry.hash + val realHash = entry.get.hashCode + assert(cachedHash == realHash, s"for $entry cached hash was $cachedHash but should have been $realHash") + val computedBucket = bucketFor(realHash) + assert(computedBucket == bucket, s"for $entry the computed bucket was $computedBucket but should have been $bucket") + + entry = entry.tail + } + + bucket += 1 + } + + assert(computedCount == count, s"The computed count was $computedCount but should have been $count") + } + + /** + * Produces a diagnostic dump of the table that underlies this hash set. + */ + def dump = table.deep + + /** + * Number of buckets that hold collisions. Useful for diagnosing performance issues. + */ + def collisionBucketsCount: Int = + (table filter (entry => entry != null && entry.tail != null)).size + + /** + * Number of buckets that are occupied in this hash table. + */ + def fullBucketsCount: Int = + (table filter (entry => entry != null)).size + + /** + * Number of buckets in the table + */ + def bucketsCount: Int = table.size + } + + private[util] def diagnostics = new Diagnostics +} + +/** + * Companion object for WeakHashSet + */ +object WeakHashSet { + /** + * A single entry in a WeakHashSet. It's a WeakReference plus a cached hash code and + * a link to the next Entry in the same bucket + */ + private class Entry[A](element: A, val hash:Int, var tail: Entry[A], queue: ReferenceQueue[A]) extends WeakReference[A](element, queue) + + val defaultInitialCapacity = 16 + val defaultLoadFactor = .75 + + def apply[A >: Null <: AnyRef](initialCapacity: Int = WeakHashSet.defaultInitialCapacity, loadFactor: Double = WeakHashSet.defaultLoadFactor) = + new WeakHashSet[A](initialCapacity, defaultLoadFactor) +} From 079dfd6b682fc3cacdaa71ce075cd7fbe66ac319 Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Tue, 25 Apr 2017 18:12:04 +0200 Subject: [PATCH 03/12] Abstract away WeakHashSet --- .../src/dotty/tools/backend/jvm/DottyBackendInterface.scala | 2 +- compiler/src/scala/tools/nsc/util/package.scala | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 compiler/src/scala/tools/nsc/util/package.scala diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index 709ce4616f9b..690c62577f9f 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -11,7 +11,7 @@ import java.io.{File => JFile} import scala.collection.generic.Clearable import scala.collection.mutable import scala.reflect.ClassTag -import scala.reflect.internal.util.WeakHashSet +import scala.tools.nsc.util.WeakHashSet import dotty.tools.io.{AbstractFile, Directory, PlainDirectory} import scala.tools.asm.{AnnotationVisitor, ClassVisitor, FieldVisitor, MethodVisitor} import scala.tools.nsc.backend.jvm.{BCodeHelpers, BackendInterface} diff --git a/compiler/src/scala/tools/nsc/util/package.scala b/compiler/src/scala/tools/nsc/util/package.scala new file mode 100644 index 000000000000..015164500c21 --- /dev/null +++ b/compiler/src/scala/tools/nsc/util/package.scala @@ -0,0 +1,5 @@ +package scala.tools.nsc + +package object util { + type WeakHashSet[T >: Null <: AnyRef] = dotty.tools.dotc.util.WeakHashSet[T] +} From 54a3be6213551d122192a47c2ba414845d465db7 Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Tue, 25 Apr 2017 18:32:13 +0200 Subject: [PATCH 04/12] Blacklist files not needed to compile the backend --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 38ab9afb212b..1a54323d1652 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -373,7 +373,7 @@ object Build { val files = ((backendDir * (allScalaFiles - "JavaPlatform.scala" - "Platform.scala" - "ScalaPrimitives.scala")) +++ (backendDir / "jvm") * - (allScalaFiles - "BCodeICodeCommon.scala" - "GenASM.scala" - "GenBCode.scala" - "ScalacBackendInterface.scala") + (allScalaFiles - "BCodeICodeCommon.scala" - "GenASM.scala" - "GenBCode.scala" - "ScalacBackendInterface.scala" - "BackendStats.scala" - "BCodeAsmEncode.scala") ).get val pairs = files.pair(sbt.Path.rebase(submoduleCompilerDir, outputDir)) From f35308743e773332cf3c9b680f5a07ecfac21a2e Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Tue, 25 Apr 2017 18:46:22 +0200 Subject: [PATCH 05/12] Abstract away `Pickle{Buffer,Format}` --- compiler/src/scala/tools/nsc/util/package.scala | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/compiler/src/scala/tools/nsc/util/package.scala b/compiler/src/scala/tools/nsc/util/package.scala index 015164500c21..1d803f540f08 100644 --- a/compiler/src/scala/tools/nsc/util/package.scala +++ b/compiler/src/scala/tools/nsc/util/package.scala @@ -2,4 +2,9 @@ package scala.tools.nsc package object util { type WeakHashSet[T >: Null <: AnyRef] = dotty.tools.dotc.util.WeakHashSet[T] + + object pickling { + type PickleBuffer = dotty.tools.dotc.core.unpickleScala2.PickleBuffer + val PickleFormat = dotty.tools.dotc.core.unpickleScala2.PickleFormat + } } From bf7704fd4b71aaae10639b5cd420d14f657131a0 Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Tue, 25 Apr 2017 19:00:53 +0200 Subject: [PATCH 06/12] Remove unused files and type aliases --- .../dotty/tools/io/DaemonThreadFactory.scala | 16 --------- compiler/src/dotty/tools/io/Fileish.scala | 34 ------------------- compiler/src/dotty/tools/io/Jar.scala | 3 +- compiler/src/dotty/tools/io/package.scala | 28 --------------- 4 files changed, 2 insertions(+), 79 deletions(-) delete mode 100644 compiler/src/dotty/tools/io/DaemonThreadFactory.scala delete mode 100644 compiler/src/dotty/tools/io/Fileish.scala diff --git a/compiler/src/dotty/tools/io/DaemonThreadFactory.scala b/compiler/src/dotty/tools/io/DaemonThreadFactory.scala deleted file mode 100644 index ae0cda260014..000000000000 --- a/compiler/src/dotty/tools/io/DaemonThreadFactory.scala +++ /dev/null @@ -1,16 +0,0 @@ -package dotty.tools -package io - -import java.util.concurrent._ - -class DaemonThreadFactory extends ThreadFactory { - def newThread(r: Runnable): Thread = { - val thread = new Thread(r) - thread setDaemon true - thread - } -} - -object DaemonThreadFactory { - def newPool() = Executors.newCachedThreadPool(new DaemonThreadFactory) -} diff --git a/compiler/src/dotty/tools/io/Fileish.scala b/compiler/src/dotty/tools/io/Fileish.scala deleted file mode 100644 index 0fcb133075b2..000000000000 --- a/compiler/src/dotty/tools/io/Fileish.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2012 LAMP/EPFL - * @author Paul Phillips - */ - -package dotty.tools -package io - -import java.io.{ InputStream } -import java.util.jar.JarEntry -import language.postfixOps - -/** A common interface for File-based things and Stream-based things. - * (In particular, io.File and JarEntry.) - */ -class Fileish(val path: Path, val input: () => InputStream) extends Streamable.Chars { - def inputStream() = input() - - def parent = path.parent - def name = path.name - def isSourceFile = path.hasExtension("java", "scala") - - private lazy val pkgLines = lines() collect { case x if x startsWith "package " => x stripPrefix "package" trim } - lazy val pkgFromPath = parent.path.replaceAll("""[/\\]""", ".") - lazy val pkgFromSource = pkgLines map (_ stripSuffix ";") mkString "." - - override def toString = path.path -} - -object Fileish { - def apply(f: File): Fileish = new Fileish(f, () => f.inputStream()) - def apply(f: JarEntry, in: () => InputStream): Fileish = new Fileish(Path(f.getName), in) - def apply(path: String, in: () => InputStream): Fileish = new Fileish(Path(path), in) -} diff --git a/compiler/src/dotty/tools/io/Jar.scala b/compiler/src/dotty/tools/io/Jar.scala index 142226ea57a9..850a8f3dbca7 100644 --- a/compiler/src/dotty/tools/io/Jar.scala +++ b/compiler/src/dotty/tools/io/Jar.scala @@ -40,6 +40,8 @@ class Jar(file: File) extends Iterable[JarEntry] { protected def errorFn(msg: String): Unit = Console println msg + private implicit def enrichManifest(m: JManifest): Jar.WManifest = Jar.WManifest(m) + lazy val jarFile = new JarFile(file.jfile) lazy val manifest = withJarInput(s => Option(s.getManifest)) @@ -65,7 +67,6 @@ class Jar(file: File) extends Iterable[JarEntry] { Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f } override def iterator: Iterator[JarEntry] = this.toList.iterator - def fileishIterator: Iterator[Fileish] = jarFile.entries.asScala map (x => Fileish(x, () => getEntryStream(x))) private def getEntryStream(entry: JarEntry) = jarFile getInputStream entry match { case null => errorFn("No such entry: " + entry) ; null diff --git a/compiler/src/dotty/tools/io/package.scala b/compiler/src/dotty/tools/io/package.scala index 50451dddf101..37f3b0f3b1b3 100644 --- a/compiler/src/dotty/tools/io/package.scala +++ b/compiler/src/dotty/tools/io/package.scala @@ -21,38 +21,10 @@ package object io { type ManifestResources = scala.reflect.io.ManifestResources type Path = scala.reflect.io.Path val Path = scala.reflect.io.Path - val Streamable = scala.reflect.io.Streamable type VirtualDirectory = scala.reflect.io.VirtualDirectory type VirtualFile = scala.reflect.io.VirtualFile type ZipArchive = scala.reflect.io.ZipArchive - val ZipArchive = scala.reflect.io.ZipArchive type FileZipArchive = scala.reflect.io.FileZipArchive type JManifest = java.util.jar.Manifest type JFile = java.io.File - - implicit def enrichManifest(m: JManifest): Jar.WManifest = Jar.WManifest(m) - private lazy val daemonThreadPool = DaemonThreadFactory.newPool() - - def runnable(body: => Unit): Runnable = new Runnable { override def run() = body } - def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body } - def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body) - def submit(runnable: Runnable) = daemonThreadPool submit runnable - - // Create, start, and return a daemon thread - def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body) - def newThread(f: Thread => Unit)(body: => Unit): Thread = { - val thread = new Thread(runnable(body)) - f(thread) - thread.start - thread - } - - // Set a timer to execute the given code. - def timer(seconds: Int)(body: => Unit): Timer = { - val alarm = new Timer(true) // daemon - val tt = new TimerTask { def run() = body } - - alarm.schedule(tt, seconds * 1000) - alarm - } } From 02b64507187017a5538d56fd60e3bab32fed33ac Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Tue, 25 Apr 2017 19:18:40 +0200 Subject: [PATCH 07/12] Remove unnecessary changes done to scalac --- .../tools/backend/jvm/DottyBackendInterface.scala | 2 +- .../src/scala/reflect/internal/pickling/package.scala | 6 ++++++ compiler/src/scala/reflect/internal/util/package.scala | 5 +++++ compiler/src/scala/tools/nsc/util/package.scala | 10 ---------- 4 files changed, 12 insertions(+), 11 deletions(-) create mode 100644 compiler/src/scala/reflect/internal/pickling/package.scala create mode 100644 compiler/src/scala/reflect/internal/util/package.scala delete mode 100644 compiler/src/scala/tools/nsc/util/package.scala diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index 690c62577f9f..709ce4616f9b 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -11,7 +11,7 @@ import java.io.{File => JFile} import scala.collection.generic.Clearable import scala.collection.mutable import scala.reflect.ClassTag -import scala.tools.nsc.util.WeakHashSet +import scala.reflect.internal.util.WeakHashSet import dotty.tools.io.{AbstractFile, Directory, PlainDirectory} import scala.tools.asm.{AnnotationVisitor, ClassVisitor, FieldVisitor, MethodVisitor} import scala.tools.nsc.backend.jvm.{BCodeHelpers, BackendInterface} diff --git a/compiler/src/scala/reflect/internal/pickling/package.scala b/compiler/src/scala/reflect/internal/pickling/package.scala new file mode 100644 index 000000000000..9940d4369929 --- /dev/null +++ b/compiler/src/scala/reflect/internal/pickling/package.scala @@ -0,0 +1,6 @@ +package scala.reflect.internal + +package object pickling { + type PickleBuffer = dotty.tools.dotc.core.unpickleScala2.PickleBuffer + val PickleFormat = dotty.tools.dotc.core.unpickleScala2.PickleFormat +} diff --git a/compiler/src/scala/reflect/internal/util/package.scala b/compiler/src/scala/reflect/internal/util/package.scala new file mode 100644 index 000000000000..c005f4a41d4f --- /dev/null +++ b/compiler/src/scala/reflect/internal/util/package.scala @@ -0,0 +1,5 @@ +package scala.reflect.internal + +package object util { + type WeakHashSet[T >: Null <: AnyRef] = dotty.tools.dotc.util.WeakHashSet[T] +} diff --git a/compiler/src/scala/tools/nsc/util/package.scala b/compiler/src/scala/tools/nsc/util/package.scala deleted file mode 100644 index 1d803f540f08..000000000000 --- a/compiler/src/scala/tools/nsc/util/package.scala +++ /dev/null @@ -1,10 +0,0 @@ -package scala.tools.nsc - -package object util { - type WeakHashSet[T >: Null <: AnyRef] = dotty.tools.dotc.util.WeakHashSet[T] - - object pickling { - type PickleBuffer = dotty.tools.dotc.core.unpickleScala2.PickleBuffer - val PickleFormat = dotty.tools.dotc.core.unpickleScala2.PickleFormat - } -} From 693351ff85afcba7c3b1483b98247b197876e21e Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Thu, 27 Apr 2017 09:18:23 +0200 Subject: [PATCH 08/12] Inline reflect io sources needed from scala/scala 2.12.x 5a5ed5826f297bca6291 --- .../classpath/VirtualDirectoryClassPath.scala | 1 - .../tools/dotc/sbt/ExtractDependencies.scala | 2 +- .../src/dotty/tools/io/AbstractFile.scala | 266 +++++++++++++++ compiler/src/dotty/tools/io/Directory.scala | 66 ++++ compiler/src/dotty/tools/io/File.scala | 114 +++++++ .../tools/io/FileOperationException.scala | 12 + compiler/src/dotty/tools/io/IOStats.scala | 29 ++ .../src/dotty/tools/io/NoAbstractFile.scala | 33 ++ compiler/src/dotty/tools/io/Path.scala | 266 +++++++++++++++ compiler/src/dotty/tools/io/Statistics.scala | 284 +++++++++++++++ compiler/src/dotty/tools/io/Streamable.scala | 137 ++++++++ .../src/dotty/tools/io/VirtualDirectory.scala | 72 ++++ compiler/src/dotty/tools/io/VirtualFile.scala | 96 ++++++ compiler/src/dotty/tools/io/ZipArchive.scala | 323 ++++++++++++++++++ compiler/src/dotty/tools/io/package.scala | 19 -- doc-tool/test/DottyDocTest.scala | 2 +- project/Build.scala | 2 - 17 files changed, 1700 insertions(+), 24 deletions(-) create mode 100644 compiler/src/dotty/tools/io/AbstractFile.scala create mode 100644 compiler/src/dotty/tools/io/Directory.scala create mode 100644 compiler/src/dotty/tools/io/File.scala create mode 100644 compiler/src/dotty/tools/io/FileOperationException.scala create mode 100644 compiler/src/dotty/tools/io/IOStats.scala create mode 100644 compiler/src/dotty/tools/io/NoAbstractFile.scala create mode 100644 compiler/src/dotty/tools/io/Path.scala create mode 100644 compiler/src/dotty/tools/io/Statistics.scala create mode 100644 compiler/src/dotty/tools/io/Streamable.scala create mode 100644 compiler/src/dotty/tools/io/VirtualDirectory.scala create mode 100644 compiler/src/dotty/tools/io/VirtualFile.scala create mode 100644 compiler/src/dotty/tools/io/ZipArchive.scala diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index c32152028854..51a97ef7ac14 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -5,7 +5,6 @@ import dotty.tools.io.{AbstractFile, Path, PlainFile, VirtualDirectory} import FileUtils._ import java.net.URL -import scala.reflect.internal.util.AbstractFileClassLoader import dotty.tools.io.ClassPath case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index fcef88eb3481..088b22b5ce3a 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -101,7 +101,7 @@ class ExtractDependencies extends Phase { val classSegments = Path(ze.path).segments binaryDependency(zipFile, className(classSegments)) } - case pf: scala.reflect.io.PlainFile => + case pf: dotty.tools.io.PlainFile => val packages = dep.ownersIterator .filter(x => x.is(PackageClass) && !x.isEffectiveRoot).length // We can recover the fully qualified name of a classfile from diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala new file mode 100644 index 000000000000..a026e877c0b1 --- /dev/null +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -0,0 +1,266 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Martin Odersky + */ + +package dotty.tools.io + +import java.io.{ + IOException, InputStream, OutputStream, BufferedOutputStream, + ByteArrayOutputStream +} +import java.net.URL + +/** + * An abstraction over files for use in the reflection/compiler libraries. + * + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + * + * @author Philippe Altherr + * @version 1.0, 23/03/2004 + */ +object AbstractFile { + /** Returns "getFile(new File(path))". */ + def getFile(path: String): AbstractFile = getFile(File(path)) + def getFile(path: Path): AbstractFile = getFile(path.toFile) + + /** + * If the specified File exists and is a regular file, returns an + * abstract regular file backed by it. Otherwise, returns `null`. + */ + def getFile(file: File): AbstractFile = + if (file.isFile) new PlainFile(file) else null + + /** Returns "getDirectory(new File(path))". */ + def getDirectory(path: Path): AbstractFile = getDirectory(path.toFile) + + /** + * If the specified File exists and is either a directory or a + * readable zip or jar archive, returns an abstract directory + * backed by it. Otherwise, returns `null`. + */ + def getDirectory(file: File): AbstractFile = + if (file.isDirectory) new PlainFile(file) + else if (file.isFile && Path.isExtensionJarOrZip(file.jfile)) ZipArchive fromFile file + else null + + /** + * If the specified URL exists and is a regular file or a directory, returns an + * abstract regular file or an abstract directory, respectively, backed by it. + * Otherwise, returns `null`. + */ + def getURL(url: URL): AbstractFile = + if (url.getProtocol == "file") { + val f = new java.io.File(url.getPath) + if (f.isDirectory) getDirectory(f) + else getFile(f) + } else null + + def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url +} + +/** + *

+ * This class and its children serve to unify handling of files and + * directories. These files and directories may or may not have some + * real counter part within the file system. For example, some file + * handles reference files within a zip archive or virtual ones + * that exist only in memory. + *

+ *

+ * Every abstract file has a path (i.e. a full name) and a name + * (i.e. a short name) and may be backed by some real File. There are + * two different kinds of abstract files: regular files and + * directories. Regular files may be read and have a last modification + * time. Directories may list their content and look for subfiles with + * a specified name or path and of a specified kind. + *

+ *

+ * The interface does not allow to access the content. + * The class `symtab.classfile.AbstractFileReader` accesses + * bytes, knowing that the character set of classfiles is UTF-8. For + * all other cases, the class `SourceFile` is used, which honors + * `global.settings.encoding.value`. + *

+ * + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + */ +abstract class AbstractFile extends Iterable[AbstractFile] { + + /** Returns the name of this abstract file. */ + def name: String + + /** Returns the path of this abstract file. */ + def path: String + + /** Returns the path of this abstract file in a canonical form. */ + def canonicalPath: String = if (file == null) path else file.getCanonicalPath + + /** Checks extension case insensitively. */ + def hasExtension(other: String) = extension == other.toLowerCase + private lazy val extension: String = Path.extension(name) + + /** The absolute file, if this is a relative file. */ + def absolute: AbstractFile + + /** Returns the containing directory of this abstract file */ + def container : AbstractFile + + /** Returns the underlying File if any and null otherwise. */ + def file: JFile + + /** An underlying source, if known. Mostly, a zip/jar file. */ + def underlyingSource: Option[AbstractFile] = None + + /** Does this abstract file denote an existing file? */ + def exists: Boolean = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount) + (file eq null) || file.exists + } + + /** Does this abstract file represent something which can contain classfiles? */ + def isClassContainer = isDirectory || (file != null && (extension == "jar" || extension == "zip")) + + /** Create a file on disk, if one does not exist already. */ + def create(): Unit + + /** Delete the underlying file or directory (recursively). */ + def delete(): Unit + + /** Is this abstract file a directory? */ + def isDirectory: Boolean + + /** Does this abstract file correspond to something on-disk? */ + def isVirtual: Boolean = false + + /** Returns the time that this abstract file was last modified. */ + def lastModified: Long + + /** returns an input stream so the file can be read */ + def input: InputStream + + /** Returns an output stream for writing the file */ + def output: OutputStream + + /** Returns a buffered output stream for writing the file - defaults to out */ + def bufferedOutput: BufferedOutputStream = new BufferedOutputStream(output) + + /** size of this file if it is a concrete file. */ + def sizeOption: Option[Int] = None + + def toURL: URL = if (file == null) null else file.toURI.toURL + + /** Returns contents of file (if applicable) in a Char array. + * warning: use `Global.getSourceFile()` to use the proper + * encoding when converting to the char array. + */ + @throws(classOf[IOException]) + def toCharArray = new String(toByteArray).toCharArray + + /** Returns contents of file (if applicable) in a byte array. + */ + @throws(classOf[IOException]) + def toByteArray: Array[Byte] = { + val in = input + sizeOption match { + case Some(size) => + var rest = size + val arr = new Array[Byte](rest) + while (rest > 0) { + val res = in.read(arr, arr.length - rest, rest) + if (res == -1) + throw new IOException("read error") + rest -= res + } + in.close() + arr + case None => + val out = new ByteArrayOutputStream() + var c = in.read() + while(c != -1) { + out.write(c) + c = in.read() + } + in.close() + out.toByteArray() + } + } + + /** Returns all abstract subfiles of this abstract directory. */ + def iterator: Iterator[AbstractFile] + + /** Returns the abstract file in this abstract directory with the specified + * name. If there is no such file, returns `null`. The argument + * `directory` tells whether to look for a directory or + * a regular file. + */ + def lookupName(name: String, directory: Boolean): AbstractFile + + /** Returns an abstract file with the given name. It does not + * check that it exists. + */ + def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile + + /** Return an abstract file that does not check that `path` denotes + * an existing file. + */ + def lookupPathUnchecked(path: String, directory: Boolean): AbstractFile = { + lookup((f, p, dir) => f.lookupNameUnchecked(p, dir), path, directory) + } + + private def lookup(getFile: (AbstractFile, String, Boolean) => AbstractFile, + path0: String, + directory: Boolean): AbstractFile = { + val separator = java.io.File.separatorChar + // trim trailing '/'s + val path: String = if (path0.last == separator) path0 dropRight 1 else path0 + val length = path.length() + assert(length > 0 && !(path.last == separator), path) + var file = this + var start = 0 + while (true) { + val index = path.indexOf(separator, start) + assert(index < 0 || start < index, ((path, directory, start, index))) + val name = path.substring(start, if (index < 0) length else index) + file = getFile(file, name, if (index < 0) directory else true) + if ((file eq null) || index < 0) return file + start = index + 1 + } + file + } + + private def fileOrSubdirectoryNamed(name: String, isDir: Boolean): AbstractFile = { + val lookup = lookupName(name, isDir) + if (lookup != null) lookup + else { + val jfile = new JFile(file, name) + if (isDir) jfile.mkdirs() else jfile.createNewFile() + new PlainFile(jfile) + } + } + + /** + * Get the file in this directory with the given name, + * creating an empty file if it does not already existing. + */ + def fileNamed(name: String): AbstractFile = { + assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path)) + fileOrSubdirectoryNamed(name, isDir = false) + } + + /** + * Get the subdirectory with a given name, creating it if it + * does not already exist. + */ + def subdirectoryNamed(name: String): AbstractFile = { + assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path)) + fileOrSubdirectoryNamed(name, isDir = true) + } + + protected def unsupported(): Nothing = unsupported(null) + protected def unsupported(msg: String): Nothing = throw new UnsupportedOperationException(msg) + + /** Returns the path of this abstract file. */ + override def toString() = path + +} diff --git a/compiler/src/dotty/tools/io/Directory.scala b/compiler/src/dotty/tools/io/Directory.scala new file mode 100644 index 000000000000..6c23fd413194 --- /dev/null +++ b/compiler/src/dotty/tools/io/Directory.scala @@ -0,0 +1,66 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package dotty.tools.io + +/** + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + */ +object Directory { + import scala.util.Properties.userDir + + private def normalizePath(s: String) = Some(apply(Path(s).normalize)) + def Current: Option[Directory] = if (userDir == "") None else normalizePath(userDir) + + def apply(path: Path): Directory = path.toDirectory + + // Like File.makeTemp but creates a directory instead + def makeTemp(prefix: String = Path.randomPrefix, suffix: String = null, dir: JFile = null): Directory = { + val path = File.makeTemp(prefix, suffix, dir) + path.delete() + path.createDirectory() + } +} + +/** An abstraction for directories. + * + * @author Paul Phillips + * @since 2.8 + * + * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' + */ +class Directory(jfile: JFile) extends Path(jfile) { + override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory + override def toDirectory: Directory = this + override def toFile: File = new File(jfile) + override def normalize: Directory = super.normalize.toDirectory + + /** An iterator over the contents of this directory. + */ + def list: Iterator[Path] = + jfile.listFiles match { + case null => Iterator.empty + case xs => xs.iterator map Path.apply + } + + def dirs: Iterator[Directory] = list collect { case x: Directory => x } + def files: Iterator[File] = list collect { case x: File => x } + + override def walkFilter(cond: Path => Boolean): Iterator[Path] = + list filter cond flatMap (_ walkFilter cond) + + def deepFiles: Iterator[File] = Path.onlyFiles(deepList()) + + /** If optional depth argument is not given, will recurse + * until it runs out of contents. + */ + def deepList(depth: Int = -1): Iterator[Path] = + if (depth < 0) list ++ (dirs flatMap (_ deepList (depth))) + else if (depth == 0) Iterator.empty + else list ++ (dirs flatMap (_ deepList (depth - 1))) +} diff --git a/compiler/src/dotty/tools/io/File.scala b/compiler/src/dotty/tools/io/File.scala new file mode 100644 index 000000000000..61dcb1a1b6e0 --- /dev/null +++ b/compiler/src/dotty/tools/io/File.scala @@ -0,0 +1,114 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package dotty.tools.io + +import java.io.{ + FileInputStream, FileOutputStream, BufferedWriter, OutputStreamWriter, + BufferedOutputStream, IOException, PrintWriter +} + +import scala.io.Codec +/** + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + */ +object File { + def pathSeparator = java.io.File.pathSeparator + def separator = java.io.File.separator + def apply(path: Path)(implicit codec: Codec) = new File(path.jfile)(codec) + + // Create a temporary file, which will be deleted upon jvm exit. + def makeTemp(prefix: String = Path.randomPrefix, suffix: String = null, dir: JFile = null) = { + val jfile = java.io.File.createTempFile(prefix, suffix, dir) + jfile.deleteOnExit() + apply(jfile) + } +} + +/** An abstraction for files. For character data, a Codec + * can be supplied at either creation time or when a method + * involving character data is called (with the latter taking + * precedence if supplied.) If neither is available, the value + * of scala.io.Codec.default is used. + * + * @author Paul Phillips + * @since 2.8 + * + * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' + */ +class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars { + override val creationCodec = constructorCodec + + override def addExtension(ext: String): File = super.addExtension(ext).toFile + override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile + override def toDirectory: Directory = new Directory(jfile) + override def toFile: File = this + override def normalize: File = super.normalize.toFile + override def length = super[Path].length + override def walkFilter(cond: Path => Boolean): Iterator[Path] = + if (cond(this)) Iterator.single(this) else Iterator.empty + + /** Obtains an InputStream. */ + def inputStream() = new FileInputStream(jfile) + + /** Obtains a OutputStream. */ + def outputStream(append: Boolean = false) = new FileOutputStream(jfile, append) + def bufferedOutput(append: Boolean = false) = new BufferedOutputStream(outputStream(append)) + + /** Obtains an OutputStreamWriter wrapped around a FileOutputStream. + * This should behave like a less broken version of java.io.FileWriter, + * in that unlike the java version you can specify the encoding. + */ + def writer(append: Boolean, codec: Codec): OutputStreamWriter = + new OutputStreamWriter(outputStream(append), codec.charSet) + + /** Wraps a BufferedWriter around the result of writer(). + */ + def bufferedWriter(): BufferedWriter = bufferedWriter(append = false) + def bufferedWriter(append: Boolean): BufferedWriter = bufferedWriter(append, creationCodec) + def bufferedWriter(append: Boolean, codec: Codec): BufferedWriter = + new BufferedWriter(writer(append, codec)) + + def printWriter(): PrintWriter = new PrintWriter(bufferedWriter(), true) + + /** Creates a new file and writes all the Strings to it. */ + def writeAll(strings: String*): Unit = { + val out = bufferedWriter() + try strings foreach (out write _) + finally out.close() + } + + def appendAll(strings: String*): Unit = { + val out = bufferedWriter(append = true) + try strings foreach (out write _) + finally out.close() + } + + /** Calls println on each string (so it adds a newline in the PrintWriter fashion.) */ + def printlnAll(strings: String*): Unit = { + val out = printWriter() + try strings foreach (out println _) + finally out.close() + } + + def safeSlurp(): Option[String] = + try Some(slurp()) + catch { case _: IOException => None } + + /** Reflection since we're into the java 6+ API. + */ + def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = { + type JBoolean = java.lang.Boolean + val method = + try classOf[JFile].getMethod("setExecutable", classOf[Boolean], classOf[Boolean]) + catch { case _: NoSuchMethodException => return false } + + try method.invoke(jfile, executable: JBoolean, ownerOnly: JBoolean).asInstanceOf[JBoolean].booleanValue + catch { case _: Exception => false } + } +} diff --git a/compiler/src/dotty/tools/io/FileOperationException.scala b/compiler/src/dotty/tools/io/FileOperationException.scala new file mode 100644 index 000000000000..533384317732 --- /dev/null +++ b/compiler/src/dotty/tools/io/FileOperationException.scala @@ -0,0 +1,12 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + +package dotty.tools.io +/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ +case class FileOperationException(msg: String) extends RuntimeException(msg) diff --git a/compiler/src/dotty/tools/io/IOStats.scala b/compiler/src/dotty/tools/io/IOStats.scala new file mode 100644 index 000000000000..7196b65f4d09 --- /dev/null +++ b/compiler/src/dotty/tools/io/IOStats.scala @@ -0,0 +1,29 @@ +package dotty.tools.io + +// Due to limitations in the Statistics machinery, these are only +// reported if this patch is applied. +// +// --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +// +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala +// @@ -109,7 +109,7 @@ quant) +// * Quantities with non-empty prefix are printed in the statistics info. +// */ +// trait Quantity { +// - if (enabled && prefix.nonEmpty) { +// + if (prefix.nonEmpty) { +// val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix" +// qs(key) = this +// } +// @@ -243,7 +243,7 @@ quant) +// * +// * to remove all Statistics code from build +// */ +// - final val canEnable = _enabled +// + final val canEnable = true // _enabled +// +// We can commit this change as the first diff reverts a fix for an IDE memory leak. +private[io] object IOStats { + val fileExistsCount = Statistics.newCounter("# File.exists calls") + val fileIsDirectoryCount = Statistics.newCounter("# File.isDirectory calls") + val fileIsFileCount = Statistics.newCounter("# File.isFile calls") +} diff --git a/compiler/src/dotty/tools/io/NoAbstractFile.scala b/compiler/src/dotty/tools/io/NoAbstractFile.scala new file mode 100644 index 000000000000..3b2bbeb58022 --- /dev/null +++ b/compiler/src/dotty/tools/io/NoAbstractFile.scala @@ -0,0 +1,33 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package dotty.tools.io + +import java.io.InputStream + +/** A distinguished object so you can avoid both null + * and Option. + * + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + */ +object NoAbstractFile extends AbstractFile { + def absolute: AbstractFile = this + def container: AbstractFile = this + def create(): Unit = ??? + def delete(): Unit = ??? + def file: java.io.File = null + def input: InputStream = null + def isDirectory: Boolean = false + override def isVirtual: Boolean = true + def iterator: Iterator[AbstractFile] = Iterator.empty + def lastModified: Long = 0L + def lookupName(name: String, directory: Boolean): AbstractFile = null + def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = null + def name: String = "" + def output: java.io.OutputStream = null + def path: String = "" + override def toByteArray = Array[Byte]() + override def toString = "" +} diff --git a/compiler/src/dotty/tools/io/Path.scala b/compiler/src/dotty/tools/io/Path.scala new file mode 100644 index 000000000000..1786269449d4 --- /dev/null +++ b/compiler/src/dotty/tools/io/Path.scala @@ -0,0 +1,266 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package dotty.tools.io + +import scala.language.implicitConversions + +import java.io.RandomAccessFile +import java.net.{ URI, URL } +import scala.util.Random.alphanumeric + +/** An abstraction for filesystem paths. The differences between + * Path, File, and Directory are primarily to communicate intent. + * Since the filesystem can change at any time, there is no way to + * reliably associate Files only with files and so on. Any Path + * can be converted to a File or Directory (and thus gain access to + * the additional entity specific methods) by calling toFile or + * toDirectory, which has no effect on the filesystem. + * + * Also available are createFile and createDirectory, which attempt + * to create the path in question. + * + * @author Paul Phillips + * @since 2.8 + * + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + */ +object Path { + def isExtensionJarOrZip(jfile: JFile): Boolean = isExtensionJarOrZip(jfile.getName) + def isExtensionJarOrZip(name: String): Boolean = { + val ext = extension(name) + ext == "jar" || ext == "zip" + } + def extension(name: String): String = { + var i = name.length - 1 + while (i >= 0 && name.charAt(i) != '.') + i -= 1 + + if (i < 0) "" + else name.substring(i + 1).toLowerCase + } + + // not certain these won't be problematic, but looks good so far + implicit def string2path(s: String): Path = apply(s) + implicit def jfile2path(jfile: JFile): Path = apply(jfile) + + def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs filter (_.isDirectory) map (_.toDirectory) + def onlyDirs(xs: List[Path]): List[Directory] = xs filter (_.isDirectory) map (_.toDirectory) + def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile) + + def roots: List[Path] = java.io.File.listRoots().toList map Path.apply + + def apply(path: String): Path = apply(new JFile(path)) + def apply(jfile: JFile): Path = try { + def isFile = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) + jfile.isFile + } + + def isDirectory = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) + jfile.isDirectory + } + + if (isFile) new File(jfile) + else if (isDirectory) new Directory(jfile) + else new Path(jfile) + } catch { case ex: SecurityException => new Path(jfile) } + + /** Avoiding any shell/path issues by only using alphanumerics. */ + private[io] def randomPrefix = alphanumeric take 6 mkString "" + private[io] def fail(msg: String) = throw FileOperationException(msg) +} +import Path._ + +/** The Path constructor is private so we can enforce some + * semantics regarding how a Path might relate to the world. + * + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + */ +class Path private[io] (val jfile: JFile) { + val separator = java.io.File.separatorChar + val separatorStr = java.io.File.separator + + // conversions + def toFile: File = new File(jfile) + def toDirectory: Directory = new Directory(jfile) + def toAbsolute: Path = if (isAbsolute) this else Path(jfile.getAbsolutePath()) + def toCanonical: Path = Path(jfile.getCanonicalPath()) + def toURI: URI = jfile.toURI() + def toURL: URL = toURI.toURL() + + /** If this path is absolute, returns it: otherwise, returns an absolute + * path made up of root / this. + */ + def toAbsoluteWithRoot(root: Path) = if (isAbsolute) this else root.toAbsolute / this + + /** Creates a new Path with the specified path appended. Assumes + * the type of the new component implies the type of the result. + */ + def /(child: Path): Path = if (isEmpty) child else new Path(new JFile(jfile, child.path)) + def /(child: Directory): Directory = /(child: Path).toDirectory + def /(child: File): File = /(child: Path).toFile + + /** If this path is a directory, recursively iterate over its contents. + * The supplied condition is a filter which is applied to each element, + * with that branch of the tree being closed off if it is false. + * So for example if the condition is false for some subdirectory, nothing + * under that directory will be in the Iterator. If it's true, all files for + * which the condition holds and are directly in that subdirectory are in the + * Iterator, and all sub-subdirectories are recursively evaluated + */ + def walkFilter(cond: Path => Boolean): Iterator[Path] = + if (isFile) toFile walkFilter cond + else if (isDirectory) toDirectory walkFilter cond + else Iterator.empty + + /** Equivalent to walkFilter(_ => true). + */ + def walk: Iterator[Path] = walkFilter(_ => true) + + // identity + def name: String = jfile.getName() + def path: String = jfile.getPath() + def normalize: Path = Path(jfile.getAbsolutePath()) + + def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other) + def relativize(other: Path) = { + assert(isAbsolute == other.isAbsolute, "Paths not of same type: "+this+", "+other) + + def createRelativePath(baseSegs: List[String], otherSegs: List[String]) : String = { + (baseSegs, otherSegs) match { + case (b :: bs, o :: os) if b == o => createRelativePath(bs, os) + case (bs, os) => ((".."+separator)*bs.length)+os.mkString(separatorStr) + } + } + + Path(createRelativePath(segments, other.segments)) + } + + def segments: List[String] = (path split separator).toList filterNot (_.length == 0) + + /** + * @return The path of the parent directory, or root if path is already root + */ + def parent: Directory = path match { + case "" | "." => Directory("..") + case _ => + // the only solution <-- a comment which could have used elaboration + if (segments.nonEmpty && segments.last == "..") + (path / "..").toDirectory + else jfile.getParent match { + case null => + if (isAbsolute) toDirectory // it should be a root. BTW, don't need to worry about relative pathed root + else Directory(".") // a dir under pwd + case x => + Directory(x) + } + } + def parents: List[Directory] = { + val p = parent + if (p isSame this) Nil else p :: p.parents + } + // if name ends with an extension (e.g. "foo.jpg") returns the extension ("jpg"), otherwise "" + def extension: String = { + var i = name.length - 1 + while (i >= 0 && name.charAt(i) != '.') + i -= 1 + + if (i < 0) "" + else name.substring(i + 1) + } + // compares against extensions in a CASE INSENSITIVE way. + def hasExtension(ext: String, exts: String*) = { + val lower = extension.toLowerCase + ext.toLowerCase == lower || exts.exists(_.toLowerCase == lower) + } + // returns the filename without the extension. + def stripExtension: String = name stripSuffix ("." + extension) + // returns the Path with the extension. + def addExtension(ext: String): Path = Path(path + "." + ext) + // changes the existing extension out for a new one, or adds it + // if the current path has none. + def changeExtension(ext: String): Path = ( + if (extension == "") addExtension(ext) + else Path(path.stripSuffix(extension) + ext) + ) + + // conditionally execute + def ifFile[T](f: File => T): Option[T] = if (isFile) Some(f(toFile)) else None + def ifDirectory[T](f: Directory => T): Option[T] = if (isDirectory) Some(f(toDirectory)) else None + + // Boolean tests + def canRead = jfile.canRead() + def canWrite = jfile.canWrite() + def exists = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount) + try jfile.exists() catch { case ex: SecurityException => false } + } + + def isFile = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) + try jfile.isFile() catch { case ex: SecurityException => false } + } + def isDirectory = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) + try jfile.isDirectory() catch { case ex: SecurityException => jfile.getPath == "." } + } + def isAbsolute = jfile.isAbsolute() + def isEmpty = path.length == 0 + + // Information + def lastModified = jfile.lastModified() + def length = jfile.length() + + // Boolean path comparisons + def endsWith(other: Path) = segments endsWith other.segments + def isSame(other: Path) = toCanonical == other.toCanonical + def isFresher(other: Path) = lastModified > other.lastModified + + // creations + def createDirectory(force: Boolean = true, failIfExists: Boolean = false): Directory = { + val res = if (force) jfile.mkdirs() else jfile.mkdir() + if (!res && failIfExists && exists) fail("Directory '%s' already exists." format name) + else if (isDirectory) toDirectory + else new Directory(jfile) + } + def createFile(failIfExists: Boolean = false): File = { + val res = jfile.createNewFile() + if (!res && failIfExists && exists) fail("File '%s' already exists." format name) + else if (isFile) toFile + else new File(jfile) + } + + // deletions + def delete() = jfile.delete() + + /** Deletes the path recursively. Returns false on failure. + * Use with caution! + */ + def deleteRecursively(): Boolean = deleteRecursively(jfile) + private def deleteRecursively(f: JFile): Boolean = { + if (f.isDirectory) f.listFiles match { + case null => + case xs => xs foreach deleteRecursively + } + f.delete() + } + + def truncate() = + isFile && { + val raf = new RandomAccessFile(jfile, "rw") + raf setLength 0 + raf.close() + length == 0 + } + + override def toString() = path + override def equals(other: Any) = other match { + case x: Path => path == x.path + case _ => false + } + override def hashCode() = path.hashCode() +} diff --git a/compiler/src/dotty/tools/io/Statistics.scala b/compiler/src/dotty/tools/io/Statistics.scala new file mode 100644 index 000000000000..9f1c8d9d7340 --- /dev/null +++ b/compiler/src/dotty/tools/io/Statistics.scala @@ -0,0 +1,284 @@ +package dotty.tools.io + +import scala.collection.mutable + +object Statistics { + + type TimerSnapshot = (Long, Long) + + /** If enabled, increment counter by one */ + @inline final def incCounter(c: Counter) { + if (_enabled && c != null) c.value += 1 + } + + /** If enabled, increment counter by given delta */ + @inline final def incCounter(c: Counter, delta: Int) { + if (_enabled && c != null) c.value += delta + } + + /** If enabled, increment counter in map `ctrs` at index `key` by one */ + @inline final def incCounter[K](ctrs: QuantMap[K, Counter], key: K) = + if (_enabled && ctrs != null) ctrs(key).value += 1 + + /** If enabled, start subcounter. While active it will track all increments of + * its base counter. + */ + @inline final def startCounter(sc: SubCounter): (Int, Int) = + if (_enabled && sc != null) sc.start() else null + + /** If enabled, stop subcounter from tracking its base counter. */ + @inline final def stopCounter(sc: SubCounter, start: (Int, Int)) { + if (_enabled && sc != null) sc.stop(start) + } + + /** If enabled, start timer */ + @inline final def startTimer(tm: Timer): TimerSnapshot = + if (_enabled && tm != null) tm.start() else null + + /** If enabled, stop timer */ + @inline final def stopTimer(tm: Timer, start: TimerSnapshot) { + if (_enabled && tm != null) tm.stop(start) + } + + /** If enabled, push and start a new timer in timer stack */ + @inline final def pushTimer(timers: TimerStack, timer: => StackableTimer): TimerSnapshot = + if (_enabled && timers != null) timers.push(timer) else null + + /** If enabled, stop and pop timer from timer stack */ + @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot) { + if (_enabled && timers != null) timers.pop(prev) + } + + /** Create a new counter that shows as `prefix` and is active in given phases */ + def newCounter(prefix: String, phases: String*) = new Counter(prefix, phases) + + /** Create a new relative counter that shows as `prefix` and is active + * in the same phases as its base counter. Relative counters print as percentages + * of their base counters. + */ + def newRelCounter(prefix: String, ctr: Counter): Counter = new RelCounter(prefix, ctr) + + /** Create a new subcounter that shows as `prefix` and is active + * in the same phases as its base counter. Subcounters can track + * increments of their base counters and print as percentages + * of their base counters. + */ + def newSubCounter(prefix: String, ctr: Counter): SubCounter = new SubCounter(prefix, ctr) + + /** Create a new counter that shows as `prefix` and is active in given phases */ + def newTimer(prefix: String, phases: String*): Timer = new Timer(prefix, phases) + + /** Create a new subtimer that shows as `prefix` and is active + * in the same phases as its base timer. Subtimers can track + * increments of their base timers and print as percentages + * of their base timers. + */ + def newSubTimer(prefix: String, timer: Timer): Timer = new SubTimer(prefix, timer) + + /** Create a new stackable that shows as `prefix` and is active + * in the same phases as its base timer. Stackable timers are subtimers + * that can be stacked in a timerstack, and that print aggregate, as well as specific + * durations. + */ + def newStackableTimer(prefix: String, timer: Timer): StackableTimer = new StackableTimer(prefix, timer) + + /** Create a new view that shows as `prefix` and is active in given phases. + * The view always reflects the current value of `quant` as a quantity. + */ + def newView(prefix: String, phases: String*)(quant: => Any): View = new View(prefix, phases, +quant) + + /** Create a new quantity map that shows as `prefix` and is active in given phases. + */ + def newQuantMap[K, V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[K, V] = new QuantMap(prefix, phases, initValue) + + /** Same as newQuantMap, where the key type is fixed to be Class[_] */ + def newByClass[V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue) + + /** Create a new timer stack */ + def newTimerStack() = new TimerStack() + + def allQuantities: Iterable[Quantity] = + for ((_, q) <- qs if q.underlying == q; + r <- q :: q.children.toList if r.prefix.nonEmpty) yield r + + private def showPercent(x: Long, base: Long) = + if (base == 0) "" else f" (${x.toDouble / base.toDouble * 100}%2.1f%%)" + + /** The base trait for quantities. + * Quantities with non-empty prefix are printed in the statistics info. + */ + trait Quantity { + if (enabled && prefix.nonEmpty) { + val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix" + qs(key) = this + } + val prefix: String + val phases: Seq[String] + def underlying: Quantity = this + def showAt(phase: String) = phases.isEmpty || (phases contains phase) + def line = f"$prefix%-30s: ${this}" + val children = new mutable.ListBuffer[Quantity] + } + + trait SubQuantity extends Quantity { + protected def underlying: Quantity + underlying.children += this + } + + class Counter(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Counter] { + var value: Int = 0 + def compare(that: Counter): Int = + if (this.value < that.value) -1 + else if (this.value > that.value) 1 + else 0 + override def equals(that: Any): Boolean = + that match { + case that: Counter => (this compare that) == 0 + case _ => false + } + override def hashCode = value + override def toString = value.toString + } + + class View(val prefix: String, val phases: Seq[String], quant: => Any) extends Quantity { + override def toString = quant.toString + } + + private class RelCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity { + override def toString = + if (value == 0) "0" + else { + assert(underlying.value != 0, prefix+"/"+underlying.line) + f"${value.toFloat / underlying.value}%2.1f" + } + } + + class SubCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity { + def start() = (value, underlying.value) + def stop(prev: (Int, Int)) { + val (value0, uvalue0) = prev + value = value0 + underlying.value - uvalue0 + } + override def toString = + value + showPercent(value.toLong, underlying.value.toLong) + } + + class Timer(val prefix: String, val phases: Seq[String]) extends Quantity { + var nanos: Long = 0 + var timings = 0 + def start() = { + (nanos, System.nanoTime()) + } + def stop(prev: TimerSnapshot) { + val (nanos0, start) = prev + nanos = nanos0 + System.nanoTime() - start + timings += 1 + } + protected def show(ns: Long) = s"${ns/1000000}ms" + override def toString = s"$timings spans, ${show(nanos)}" + } + + class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity { + override protected def show(ns: Long) = super.show(ns) + showPercent(ns, underlying.nanos) + } + + class StackableTimer(prefix: String, underlying: Timer) extends SubTimer(prefix, underlying) with Ordered[StackableTimer] { + var specificNanos: Long = 0 + def compare(that: StackableTimer): Int = + if (this.specificNanos < that.specificNanos) -1 + else if (this.specificNanos > that.specificNanos) 1 + else 0 + override def equals(that: Any): Boolean = + that match { + case that: StackableTimer => (this compare that) == 0 + case _ => false + } + override def hashCode = specificNanos.## + override def toString = s"${super.toString} aggregate, ${show(specificNanos)} specific" + } + + /** A mutable map quantity where missing elements are automatically inserted + * on access by executing `initValue`. + */ + class QuantMap[K, V <% Ordered[V]](val prefix: String, val phases: Seq[String], initValue: => V) + extends mutable.HashMap[K, V] with mutable.SynchronizedMap[K, V] with Quantity { + override def default(key: K) = { + val elem = initValue + this(key) = elem + elem + } + override def toString = + this.toSeq.sortWith(_._2 > _._2).map { + case (cls: Class[_], elem) => + s"${cls.toString.substring(cls.toString.lastIndexOf("$") + 1)}: $elem" + case (key, elem) => + s"$key: $elem" + }.mkString(", ") + } + + /** A stack of timers, all active, where a timer's specific "clock" + * is stopped as long as it is buried by some other timer in the stack, but + * its aggregate clock keeps on ticking. + */ + class TimerStack { + private var elems: List[(StackableTimer, Long)] = Nil + /** Start given timer and push it onto the stack */ + def push(t: StackableTimer): TimerSnapshot = { + elems = (t, 0L) :: elems + t.start() + } + /** Stop and pop top timer in stack + */ + def pop(prev: TimerSnapshot) = { + val (nanos0, start) = prev + val duration = System.nanoTime() - start + val (topTimer, nestedNanos) :: rest = elems + topTimer.nanos = nanos0 + duration + topTimer.specificNanos += duration - nestedNanos + topTimer.timings += 1 + elems = rest match { + case (outerTimer, outerNested) :: elems1 => + (outerTimer, outerNested + duration) :: elems1 + case Nil => + Nil + } + } + } + + private var _enabled = false + private val qs = new mutable.HashMap[String, Quantity] + + /** replace with + * + * final val canEnable = false + * + * to remove all Statistics code from build + */ + final val canEnable = _enabled + + /** replace with + * + * final def hotEnabled = _enabled + * + * and rebuild, to also count tiny but super-hot methods + * such as phase, flags, owner, name. + */ + final val hotEnabled = false + + def enabled = _enabled + def enabled_=(cond: Boolean) = { + if (cond && !_enabled) { + val start = System.nanoTime() + var total = 0L + for (i <- 1 to 10000) { + val time = System.nanoTime() + total += System.nanoTime() - time + } + val total2 = System.nanoTime() - start + println("Enabling statistics, measuring overhead = "+ + total/10000.0+"ns to "+total2/10000.0+"ns per timer") + _enabled = true + } + } +} diff --git a/compiler/src/dotty/tools/io/Streamable.scala b/compiler/src/dotty/tools/io/Streamable.scala new file mode 100644 index 000000000000..a61a620e06ec --- /dev/null +++ b/compiler/src/dotty/tools/io/Streamable.scala @@ -0,0 +1,137 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package dotty.tools.io + +import java.net.URL +import java.io.{ BufferedInputStream, InputStream } +import java.io.{ BufferedReader, InputStreamReader, Closeable => JCloseable } +import scala.io.{ Codec, BufferedSource, Source } +import scala.collection.mutable.ArrayBuffer +import Path.fail + +/** Traits for objects which can be represented as Streams. + * + * @author Paul Phillips + * @since 2.8 + * + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + */ +object Streamable { + /** Traits which can be viewed as a sequence of bytes. Source types + * which know their length should override def length: Long for more + * efficient method implementations. + * + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + * + * Note that this code was not written with resource management in mind. + * Several methods (such as `chars` and `lines`) create InputStreams they + * don't close + */ + trait Bytes { + def inputStream(): InputStream + def length: Long = -1 + + def bufferedInput() = new BufferedInputStream(inputStream()) + def bytes(): Iterator[Byte] = bytesAsInts() map (_.toByte) + def bytesAsInts(): Iterator[Int] = { + val in = bufferedInput() + Iterator continually in.read() takeWhile (_ != -1) + } + + /** This method aspires to be the fastest way to read + * a stream of known length into memory. + */ + def toByteArray(): Array[Byte] = { + // if we don't know the length, fall back on relative inefficiency + if (length == -1L) + return (new ArrayBuffer[Byte]() ++= bytes()).toArray + + val arr = new Array[Byte](length.toInt) + val len = arr.length + lazy val in = bufferedInput() + var offset = 0 + + def loop() { + if (offset < len) { + val read = in.read(arr, offset, len - offset) + if (read >= 0) { + offset += read + loop() + } + } + } + try loop() + finally in.close() + + if (offset == arr.length) arr + else fail("Could not read entire source (%d of %d bytes)".format(offset, len)) + } + } + + /** For objects which can be viewed as Chars. + * + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + */ + trait Chars extends Bytes { + /** Calls to methods requiring byte<->char transformations should be offered + * in a form which allows specifying the codec. When it is not specified, + * the one discovered at creation time will be used, which will always find the + * one in scala.io.Codec if no other is available. This can be overridden + * to use a different default. + */ + def creationCodec: Codec = implicitly[Codec] + + /** Caller is responsible for closing the returned BufferedSource. */ + def chars(codec: Codec): BufferedSource = Source.fromInputStream(inputStream())(codec) + + /** Beware! Leaks an InputStream which will not be closed until it gets finalized. */ + def lines(): Iterator[String] = lines(creationCodec) + + /** Beware! Leaks an InputStream which will not be closed until it gets finalized. */ + def lines(codec: Codec): Iterator[String] = chars(codec).getLines() + + /** Obtains an InputStreamReader wrapped around a FileInputStream. + */ + def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream(), codec.charSet) + + /** Wraps a BufferedReader around the result of reader(). + */ + def bufferedReader(): BufferedReader = bufferedReader(creationCodec) + def bufferedReader(codec: Codec) = new BufferedReader(reader(codec)) + + /** Creates a BufferedReader and applies the closure, automatically closing it on completion. + */ + def applyReader[T](f: BufferedReader => T): T = { + val in = bufferedReader() + try f(in) + finally in.close() + } + + /** Convenience function to import entire file into a String. + */ + def slurp(): String = slurp(creationCodec) + def slurp(codec: Codec) = { + val src = chars(codec) + try src.mkString finally src.close() // Always Be Closing + } + } + + /** Call a function on something Closeable, finally closing it. */ + def closing[T <: JCloseable, U](stream: T)(f: T => U): U = + try f(stream) + finally stream.close() + + def bytes(is: => InputStream): Array[Byte] = + (new Bytes { + def inputStream() = is + }).toByteArray() + + def slurp(is: => InputStream)(implicit codec: Codec): String = + new Chars { def inputStream() = is } slurp codec + + def slurp(url: URL)(implicit codec: Codec): String = + slurp(url.openStream()) +} diff --git a/compiler/src/dotty/tools/io/VirtualDirectory.scala b/compiler/src/dotty/tools/io/VirtualDirectory.scala new file mode 100644 index 000000000000..bfd3a3012a39 --- /dev/null +++ b/compiler/src/dotty/tools/io/VirtualDirectory.scala @@ -0,0 +1,72 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + */ + +package dotty.tools.io + +import scala.collection.mutable + +/** + * An in-memory directory. + * + * @author Lex Spoon + * + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + */ +class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory]) +extends AbstractFile { + def path: String = + maybeContainer match { + case None => name + case Some(parent) => parent.path+'/'+ name + } + + def absolute = this + + def container = maybeContainer.get + def isDirectory = true + override def isVirtual = true + val lastModified: Long = System.currentTimeMillis + + override def file = null + override def input = sys.error("directories cannot be read") + override def output = sys.error("directories cannot be written") + + /** Does this abstract file denote an existing file? */ + def create() { unsupported() } + + /** Delete the underlying file or directory (recursively). */ + def delete() { unsupported() } + + /** Returns an abstract file with the given name. It does not + * check that it exists. + */ + def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported() + + private val files = mutable.Map.empty[String, AbstractFile] + + // the toList is so that the directory may continue to be + // modified while its elements are iterated + def iterator = files.values.toList.iterator + + override def lookupName(name: String, directory: Boolean): AbstractFile = + (files get name filter (_.isDirectory == directory)).orNull + + override def fileNamed(name: String): AbstractFile = + Option(lookupName(name, directory = false)) getOrElse { + val newFile = new VirtualFile(name, path+'/'+name) + files(name) = newFile + newFile + } + + override def subdirectoryNamed(name: String): AbstractFile = + Option(lookupName(name, directory = true)) getOrElse { + val dir = new VirtualDirectory(name, Some(this)) + files(name) = dir + dir + } + + def clear() { + files.clear() + } +} diff --git a/compiler/src/dotty/tools/io/VirtualFile.scala b/compiler/src/dotty/tools/io/VirtualFile.scala new file mode 100644 index 000000000000..3954c64eaab6 --- /dev/null +++ b/compiler/src/dotty/tools/io/VirtualFile.scala @@ -0,0 +1,96 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Martin Odersky + */ + +package dotty.tools.io + +import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream } + +/** This class implements an in-memory file. + * + * @author Philippe Altherr + * @version 1.0, 23/03/2004 + * + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + */ +class VirtualFile(val name: String, override val path: String) extends AbstractFile { + /** + * Initializes this instance with the specified name and an + * identical path. + * + * @param name the name of the virtual file to be created + * @return the created virtual file + */ + def this(name: String) = this(name, name) + + override def hashCode = path.hashCode + override def equals(that: Any) = that match { + case x: VirtualFile => x.path == path + case _ => false + } + + private var content = Array.emptyByteArray + + def absolute = this + + /** Returns null. */ + def file: JFile = null + + override def sizeOption: Option[Int] = Some(content.length) + + def input : InputStream = new ByteArrayInputStream(content) + + override def output: OutputStream = { + new ByteArrayOutputStream() { + override def close() { + super.close() + content = toByteArray() + } + } + } + + def container: AbstractFile = NoAbstractFile + + /** Is this abstract file a directory? */ + def isDirectory: Boolean = false + + /** @inheritdoc */ + override def isVirtual: Boolean = true + + // private var _lastModified: Long = 0 + // _lastModified + + /** Returns the time that this abstract file was last modified. */ + // !!! Except it doesn't - it's private and never set - so I replaced it + // with constant 0 to save the field. + def lastModified: Long = 0 + + /** Returns all abstract subfiles of this abstract directory. */ + def iterator: Iterator[AbstractFile] = { + assert(isDirectory, "not a directory '" + this + "'") + Iterator.empty + } + + /** Does this abstract file denote an existing file? */ + def create(): Unit = unsupported() + + /** Delete the underlying file or directory (recursively). */ + def delete(): Unit = unsupported() + + /** + * Returns the abstract file in this abstract directory with the + * specified name. If there is no such file, returns null. The + * argument "directory" tells whether to look for a directory or + * or a regular file. + */ + def lookupName(name: String, directory: Boolean): AbstractFile = { + assert(isDirectory, "not a directory '" + this + "'") + null + } + + /** Returns an abstract file with the given name. It does not + * check that it exists. + */ + def lookupNameUnchecked(name: String, directory: Boolean) = unsupported() +} diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala new file mode 100644 index 000000000000..3d1f6b07e60b --- /dev/null +++ b/compiler/src/dotty/tools/io/ZipArchive.scala @@ -0,0 +1,323 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package dotty.tools.io + +import java.net.URL +import java.io.{ IOException, InputStream, ByteArrayInputStream, FilterInputStream } +import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream } +import java.util.jar.Manifest +import scala.collection.mutable +import scala.collection.JavaConverters._ +import scala.annotation.tailrec + +/** An abstraction for zip files and streams. Everything is written the way + * it is for performance: we come through here a lot on every run. Be careful + * about changing it. + * + * @author Philippe Altherr (original version) + * @author Paul Phillips (this one) + * @version 2.0, + * + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' + */ +object ZipArchive { + private[io] val closeZipFile = sys.props.get("scala.classpath.closeZip").map(_.toBoolean).getOrElse(false) + + /** + * @param file a File + * @return A ZipArchive if `file` is a readable zip file, otherwise null. + */ + def fromFile(file: File): FileZipArchive = fromFile(file.jfile) + def fromFile(file: JFile): FileZipArchive = + try { new FileZipArchive(file) } + catch { case _: IOException => null } + + def fromManifestURL(url: URL): AbstractFile = new ManifestResources(url) + + private def dirName(path: String) = splitPath(path, front = true) + private def baseName(path: String) = splitPath(path, front = false) + private def splitPath(path0: String, front: Boolean): String = { + val isDir = path0.charAt(path0.length - 1) == '/' + val path = if (isDir) path0.substring(0, path0.length - 1) else path0 + val idx = path.lastIndexOf('/') + + if (idx < 0) + if (front) "/" + else path + else + if (front) path.substring(0, idx + 1) + else path.substring(idx + 1) + } +} +import ZipArchive._ +/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ +abstract class ZipArchive(override val file: JFile) extends AbstractFile with Equals { + self => + + override def underlyingSource = Some(this) + def isDirectory = true + def lookupName(name: String, directory: Boolean) = unsupported() + def lookupNameUnchecked(name: String, directory: Boolean) = unsupported() + def create() = unsupported() + def delete() = unsupported() + def output = unsupported() + def container = unsupported() + def absolute = unsupported() + + /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ + sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) { + // have to keep this name for compat with sbt's compiler-interface + def getArchive: ZipFile = null + override def underlyingSource = Some(self) + override def toString = self.path + "(" + path + ")" + } + + /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ + class DirEntry(path: String) extends Entry(path) { + val entries = mutable.HashMap[String, Entry]() + + override def isDirectory = true + override def iterator: Iterator[Entry] = entries.valuesIterator + override def lookupName(name: String, directory: Boolean): Entry = { + if (directory) entries(name + "/") + else entries(name) + } + } + + private def ensureDir(dirs: mutable.Map[String, DirEntry], path: String, zipEntry: ZipEntry): DirEntry = + //OPT inlined from getOrElseUpdate; saves ~50K closures on test run. + // was: + // dirs.getOrElseUpdate(path, { + // val parent = ensureDir(dirs, dirName(path), null) + // val dir = new DirEntry(path) + // parent.entries(baseName(path)) = dir + // dir + // }) + dirs get path match { + case Some(v) => v + case None => + val parent = ensureDir(dirs, dirName(path), null) + val dir = new DirEntry(path) + parent.entries(baseName(path)) = dir + dirs(path) = dir + dir + } + + protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = { + if (entry.isDirectory) ensureDir(dirs, entry.getName, entry) + else ensureDir(dirs, dirName(entry.getName), null) + } +} +/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ +final class FileZipArchive(file: JFile) extends ZipArchive(file) { + private[this] def openZipFile(): ZipFile = try { + new ZipFile(file) + } catch { + case ioe: IOException => throw new IOException("Error accessing " + file.getPath, ioe) + } + + private[this] class LazyEntry( + name: String, + time: Long, + size: Int + ) extends Entry(name) { + override def lastModified: Long = time // could be stale + override def input: InputStream = { + val zipFile = openZipFile() + val entry = zipFile.getEntry(name) + val delegate = zipFile.getInputStream(entry) + new FilterInputStream(delegate) { + override def close(): Unit = { zipFile.close() } + } + } + override def sizeOption: Option[Int] = Some(size) // could be stale + } + + // keeps a file handle open to ZipFile, which forbids file mutation + // on Windows, and leaks memory on all OS (typically by stopping + // classloaders from being garbage collected). But is slightly + // faster than LazyEntry. + private[this] class LeakyEntry( + zipFile: ZipFile, + zipEntry: ZipEntry + ) extends Entry(zipEntry.getName) { + override def lastModified: Long = zipEntry.getTime + override def input: InputStream = zipFile.getInputStream(zipEntry) + override def sizeOption: Option[Int] = Some(zipEntry.getSize.toInt) + } + + lazy val (root, allDirs) = { + val root = new DirEntry("/") + val dirs = mutable.HashMap[String, DirEntry]("/" -> root) + val zipFile = openZipFile() + val enum = zipFile.entries() + + try { + while (enum.hasMoreElements) { + val zipEntry = enum.nextElement + val dir = getDir(dirs, zipEntry) + if (zipEntry.isDirectory) dir + else { + val f = + if (ZipArchive.closeZipFile) + new LazyEntry( + zipEntry.getName(), + zipEntry.getTime(), + zipEntry.getSize().toInt + ) + else + new LeakyEntry(zipFile, zipEntry) + + dir.entries(f.name) = f + } + } + } finally { + if (ZipArchive.closeZipFile) zipFile.close() + } + (root, dirs) + } + + def iterator: Iterator[Entry] = root.iterator + + def name = file.getName + def path = file.getPath + def input = File(file).inputStream() + def lastModified = file.lastModified + + override def sizeOption = Some(file.length.toInt) + override def canEqual(other: Any) = other.isInstanceOf[FileZipArchive] + override def hashCode() = file.hashCode + override def equals(that: Any) = that match { + case x: FileZipArchive => file.getAbsoluteFile == x.file.getAbsoluteFile + case _ => false + } +} +///** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ +//final class URLZipArchive(val url: URL) extends ZipArchive(null) { +// def iterator: Iterator[Entry] = { +// val root = new DirEntry("/") +// val dirs = mutable.HashMap[String, DirEntry]("/" -> root) +// val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input))) +// +// @tailrec def loop() { +// val zipEntry = in.getNextEntry() +// class EmptyFileEntry() extends Entry(zipEntry.getName) { +// override def toByteArray: Array[Byte] = null +// override def sizeOption = Some(0) +// } +// class FileEntry() extends Entry(zipEntry.getName) { +// override val toByteArray: Array[Byte] = { +// val len = zipEntry.getSize().toInt +// val arr = if (len == 0) Array.emptyByteArray else new Array[Byte](len) +// var offset = 0 +// +// def loop() { +// if (offset < len) { +// val read = in.read(arr, offset, len - offset) +// if (read >= 0) { +// offset += read +// loop() +// } +// } +// } +// loop() +// +// if (offset == arr.length) arr +// else throw new IOException("Input stream truncated: read %d of %d bytes".format(offset, len)) +// } +// override def sizeOption = Some(zipEntry.getSize().toInt) +// } +// +// if (zipEntry != null) { +// val dir = getDir(dirs, zipEntry) +// if (zipEntry.isDirectory) +// dir +// else { +// val f = if (zipEntry.getSize() == 0) new EmptyFileEntry() else new FileEntry() +// dir.entries(f.name) = f +// } +// in.closeEntry() +// loop() +// } +// } +// +// loop() +// try root.iterator +// finally dirs.clear() +// } +// +// def name = url.getFile() +// def path = url.getPath() +// def input = url.openStream() +// def lastModified = +// try url.openConnection().getLastModified() +// catch { case _: IOException => 0 } +// +// override def canEqual(other: Any) = other.isInstanceOf[URLZipArchive] +// override def hashCode() = url.hashCode +// override def equals(that: Any) = that match { +// case x: URLZipArchive => url == x.url +// case _ => false +// } +//} + +final class ManifestResources(val url: URL) extends ZipArchive(null) { + def iterator = { + val root = new DirEntry("/") + val dirs = mutable.HashMap[String, DirEntry]("/" -> root) + val manifest = new Manifest(input) + val iter = manifest.getEntries().keySet().iterator().asScala.filter(_.endsWith(".class")).map(new ZipEntry(_)) + + for (zipEntry <- iter) { + val dir = getDir(dirs, zipEntry) + if (!zipEntry.isDirectory) { + class FileEntry() extends Entry(zipEntry.getName) { + override def lastModified = zipEntry.getTime() + override def input = resourceInputStream(path) + override def sizeOption = None + } + val f = new FileEntry() + dir.entries(f.name) = f + } + } + + try root.iterator + finally dirs.clear() + } + + def name = path + def path: String = { + val s = url.getPath + val n = s.lastIndexOf('!') + s.substring(0, n) + } + def input = url.openStream() + def lastModified = + try url.openConnection().getLastModified() + catch { case _: IOException => 0 } + + override def canEqual(other: Any) = other.isInstanceOf[ManifestResources] + override def hashCode() = url.hashCode + override def equals(that: Any) = that match { + case x: ManifestResources => url == x.url + case _ => false + } + + private def resourceInputStream(path: String): InputStream = { + new FilterInputStream(null) { + override def read(): Int = { + if(in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path) + if(in == null) throw new RuntimeException(path + " not found") + super.read() + } + + override def close(): Unit = { + super.close() + in = null + } + } + } +} diff --git a/compiler/src/dotty/tools/io/package.scala b/compiler/src/dotty/tools/io/package.scala index 37f3b0f3b1b3..7201bf0eb216 100644 --- a/compiler/src/dotty/tools/io/package.scala +++ b/compiler/src/dotty/tools/io/package.scala @@ -5,26 +5,7 @@ package dotty.tools -import java.util.concurrent.{ Future, Callable } -import java.util.{ Timer, TimerTask } -import java.util.jar.{ Attributes } -import scala.language.implicitConversions - package object io { - // Forwarders from scala.reflect.io - type AbstractFile = scala.reflect.io.AbstractFile - val AbstractFile = scala.reflect.io.AbstractFile - type Directory = scala.reflect.io.Directory - val Directory = scala.reflect.io.Directory - type File = scala.reflect.io.File - val File = scala.reflect.io.File - type ManifestResources = scala.reflect.io.ManifestResources - type Path = scala.reflect.io.Path - val Path = scala.reflect.io.Path - type VirtualDirectory = scala.reflect.io.VirtualDirectory - type VirtualFile = scala.reflect.io.VirtualFile - type ZipArchive = scala.reflect.io.ZipArchive - type FileZipArchive = scala.reflect.io.FileZipArchive type JManifest = java.util.jar.Manifest type JFile = java.io.File } diff --git a/doc-tool/test/DottyDocTest.scala b/doc-tool/test/DottyDocTest.scala index 9e7b70c8f868..12289baf5671 100644 --- a/doc-tool/test/DottyDocTest.scala +++ b/doc-tool/test/DottyDocTest.scala @@ -71,7 +71,7 @@ trait DottyDocTest extends MessageRendering { } private def sourceFileFromString(name: String, contents: String): SourceFile = { - val virtualFile = new scala.reflect.io.VirtualFile(name) + val virtualFile = new dotty.tools.io.VirtualFile(name) val writer = new BufferedWriter(new OutputStreamWriter(virtualFile.output, "UTF-8")) writer.write(contents) writer.close() diff --git a/project/Build.scala b/project/Build.scala index 1a54323d1652..c96df494aa23 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -420,7 +420,6 @@ object Build { libraryDependencies ++= Seq("com.typesafe.sbt" % "sbt-interface" % sbtVersion.value, "org.scala-lang.modules" % "scala-xml_2.11" % "1.0.1", "com.novocode" % "junit-interface" % "0.11" % "test", - "org.scala-lang" % "scala-reflect" % scalacVersion, "org.scala-lang" % "scala-library" % scalacVersion % "test"), // enable improved incremental compilation algorithm @@ -629,7 +628,6 @@ object Build { // Settings shared between dotty-library and dotty-library-bootstrapped lazy val dottyLibrarySettings = Seq( libraryDependencies ++= Seq( - "org.scala-lang" % "scala-reflect" % scalacVersion, "org.scala-lang" % "scala-library" % scalacVersion, "com.novocode" % "junit-interface" % "0.11" % "test" ) From cbc1546f0f713eaa1f3bf7102c802659a7dc4fb3 Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Fri, 28 Apr 2017 15:08:44 +0200 Subject: [PATCH 09/12] Remove reflect from scripts and fix incorrect bin-test --- bin/common | 9 +++++---- bin/dotc | 7 +++---- bin/test/TestScripts.scala | 20 +++++++++++++++----- project/Build.scala | 1 + 4 files changed, 24 insertions(+), 13 deletions(-) diff --git a/bin/common b/bin/common index d1a9db482333..2796be5aec56 100755 --- a/bin/common +++ b/bin/common @@ -115,6 +115,11 @@ else echo "Failed to parse .packages file" build_all fi + + if [ ! -f "$INTERFACES_JAR" -o ! -f "$MAIN_JAR" -o ! -f "$DOTTY_LIB_JAR" -o ! -f "$TEST_JAR" ]; then + echo ".packages file corrupted, rebuilding" + build_all + fi fi ################# After this point, jar variables will be set ################# @@ -142,10 +147,6 @@ if [ "$SCALA_LIBRARY_JAR" == "" ]; then SCALA_LIBRARY_JAR=$(find_jar "$HOME/.ivy2/cache/org.scala-lang/scala-library/jars" "scala-library-$SCALA_VERSION.jar") fi -if [ "$SCALA_REFLECT_JAR" == "" ]; then - SCALA_REFLECT_JAR=$(find_jar "$HOME/.ivy2/cache/org.scala-lang/scala-reflect/jars" "scala-reflect-$SCALA_VERSION.jar") -fi - if [ "$SCALA_ASM_JAR" == "" ]; then SCALA_ASM_JAR=$(find_jar "$HOME/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles" "scala-asm-$SCALA_ASM_VERSION.jar") fi diff --git a/bin/dotc b/bin/dotc index 39e3f8074b53..47fe1cea6a46 100755 --- a/bin/dotc +++ b/bin/dotc @@ -20,11 +20,10 @@ CompilerMain=dotty.tools.dotc.Main FromTasty=dotty.tools.dotc.FromTasty ReplMain=dotty.tools.dotc.repl.Main -if [ ! -f "$SCALA_LIBRARY_JAR" -o ! -f "$SCALA_REFLECT_JAR" -o ! -f "$SCALA_ASM_JAR" -o ! -f "$SBT_INTERFACE_JAR" ] +if [ ! -f "$SCALA_LIBRARY_JAR" -o ! -f "$SCALA_ASM_JAR" -o ! -f "$SBT_INTERFACE_JAR" ] then echo To use this script please set echo SCALA_LIBRARY_JAR to point to scala-library-$SCALA_VERSION.jar "(currently $SCALA_LIBRARY_JAR)" - echo SCALA_REFLECT_JAR to point to scala-reflect-$SCALA_VERSION.jar "(currently $SCALA_REFLECT_JAR)" echo SCALA_ASM_JAR to point to scala-asm-$SCALA_ASM_VERSION.jar "(currently $SCALA_ASM_JAR)" echo SBT_INTERFACE_JAR to point to interface-$SBT_VERSION.jar "(currently $SBT_INTERFACE_JAR)" fi @@ -116,9 +115,9 @@ trap onExit INT classpathArgs () { if [[ "true" == "$bootstrapped" ]]; then check_jar "dotty-bootstrapped" "$DOTTY_JAR" "target" 'build_jar "test:runMain dotc.build" target' &> /dev/null - toolchain="$DOTTY_JAR:$DOTTY_LIB_JAR:$SCALA_LIBRARY_JAR:$SCALA_REFLECT_JAR:$SCALA_ASM_JAR:$SBT_INTERFACE_JAR" + toolchain="$DOTTY_JAR:$DOTTY_LIB_JAR:$SCALA_LIBRARY_JAR:$SCALA_ASM_JAR:$SBT_INTERFACE_JAR" else - toolchain="$SCALA_LIBRARY_JAR:$DOTTY_LIB_JAR:$SCALA_REFLECT_JAR:$SCALA_ASM_JAR:$SBT_INTERFACE_JAR" + toolchain="$SCALA_LIBRARY_JAR:$DOTTY_LIB_JAR:$SCALA_ASM_JAR:$SBT_INTERFACE_JAR" fi bcpJars="$INTERFACES_JAR:$MAIN_JAR:$DOTTY_LIB_JAR" cpJars="$INTERFACES_JAR:$MAIN_JAR:$DOTTY_LIB_JAR:$TEST_JAR" diff --git a/bin/test/TestScripts.scala b/bin/test/TestScripts.scala index 6543ac7b7899..be0633450e7f 100644 --- a/bin/test/TestScripts.scala +++ b/bin/test/TestScripts.scala @@ -17,9 +17,8 @@ class TestScripts { private def executeScript(script: String): (Int, String) = { val sb = new StringBuilder - val ret = Process(script) ! ProcessLogger { line => println(line); sb.append(line) } + val ret = Process(script) ! ProcessLogger { line => println(line); sb.append(line + "\n") } val output = sb.toString - println(output) // For CI, otherwise "terminal inactive for 5m0s, build cancelled" (ret, output) } @@ -59,7 +58,7 @@ class TestScripts { val (retDotr, dotrOutput) = executeScript("./bin/dotr HelloWorld") assert( - retDotr == 0 && dotrOutput == "hello world", + retDotr == 0 && dotrOutput == "hello world\n", s"Running hello world exited with status: $retDotr and output: $dotrOutput" ) } @@ -93,8 +92,19 @@ class TestScripts { /** dotc script should work after corrupting .packages */ @Test def reCreatesPackagesIfNecessary = doUnlessWindows { - executeScript("sed -i.old 's/2.1/2.X/' ./.packages") // That's going to replace 2.11 with 2.X1 - val (retFirstBuild, _) = executeScript("./bin/dotc ./tests/pos/HelloWorld.scala") + import java.nio.file.{Paths, Files} + import java.nio.charset.StandardCharsets + val contents = + """|/Users/fixel/Projects/dotty/interfaces/target/dotty-interfaces-0.1.1-bin-SNAPSHOT-X.jar + |/Users/fixel/Projects/dotty/compiler/target/scala-2.11/dotty-compiler_2.1X-0.1.1-bin-SNAPSHOT.jar + |/Users/fixel/Projects/dotty/library/target/scala-2.11/dotty-library_2.1X-0.1.1-bin-SNAPSHOT.jar + |/Users/fixel/Projects/dotty/doc-tool/target/scala-2.11/dotty-doc_2.1X-0.1.1-bin-SNAPSHOT-tests.jar""" + .stripMargin + + Files.write(Paths.get("./.packages"), contents.getBytes(StandardCharsets.UTF_8)) + + val (retFirstBuild, output) = executeScript("./bin/dotc ./tests/pos/HelloWorld.scala") + assert(output.contains(".packages file corrupted")) assert(retFirstBuild == 0, "building dotc failed") } } diff --git a/project/Build.scala b/project/Build.scala index c96df494aa23..6188eabfd6d5 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -621,6 +621,7 @@ object Build { settings( publishArtifact := false, parallelExecution in Test := false, + testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test" ) From a16e3802be295258c9716981052becafcce85b02 Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Mon, 1 May 2017 14:54:03 +0200 Subject: [PATCH 10/12] Rewrite io files to compile under Dotty --- .../dotty/tools/dotc/util/WeakHashSet.scala | 10 +- .../src/dotty/tools/io/AbstractFile.scala | 1 - compiler/src/dotty/tools/io/IOStats.scala | 29 -- compiler/src/dotty/tools/io/Path.scala | 32 +- compiler/src/dotty/tools/io/Statistics.scala | 284 ------------------ compiler/src/dotty/tools/io/Streamable.scala | 2 +- .../src/dotty/tools/io/VirtualDirectory.scala | 6 +- compiler/src/dotty/tools/io/VirtualFile.scala | 2 +- compiler/src/dotty/tools/io/ZipArchive.scala | 77 +---- 9 files changed, 21 insertions(+), 422 deletions(-) delete mode 100644 compiler/src/dotty/tools/io/IOStats.scala delete mode 100644 compiler/src/dotty/tools/io/Statistics.scala diff --git a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala index 712799aaed31..641dc7654439 100644 --- a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala @@ -86,7 +86,7 @@ final class WeakHashSet[A >: Null <: AnyRef](val initialCapacity: Int, val loadF /** * remove a single entry from a linked list in a given bucket */ - private[this] def remove(bucket: Int, prevEntry: Entry[A], entry: Entry[A]) { + private[this] def remove(bucket: Int, prevEntry: Entry[A], entry: Entry[A]): Unit = { prevEntry match { case null => table(bucket) = entry.tail case _ => prevEntry.tail = entry.tail @@ -97,7 +97,7 @@ final class WeakHashSet[A >: Null <: AnyRef](val initialCapacity: Int, val loadF /** * remove entries associated with elements that have been gc'ed */ - private[this] def removeStaleEntries() { + private[this] def removeStaleEntries(): Unit = { def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]] @tailrec @@ -122,7 +122,7 @@ final class WeakHashSet[A >: Null <: AnyRef](val initialCapacity: Int, val loadF /** * Double the size of the internal table */ - private[this] def resize() { + private[this] def resize(): Unit = { val oldTable = table table = new Array[Entry[A]](oldTable.size * 2) threshhold = computeThreshHold @@ -207,7 +207,7 @@ final class WeakHashSet[A >: Null <: AnyRef](val initialCapacity: Int, val loadF val bucket = bucketFor(hash) val oldHead = table(bucket) - def add() { + def add() = { table(bucket) = new Entry(elem, hash, oldHead, queue) count += 1 if (count > threshhold) resize() @@ -228,7 +228,7 @@ final class WeakHashSet[A >: Null <: AnyRef](val initialCapacity: Int, val loadF def +=(elem: A) = this + elem // from scala.reflect.interanl.Set - override def addEntry(x: A) { this += x } + override def addEntry(x: A) = { this += x } // remove an element from this set and return this set override def -(elem: A): this.type = elem match { diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index a026e877c0b1..1ae9bcea423b 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -114,7 +114,6 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Does this abstract file denote an existing file? */ def exists: Boolean = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount) (file eq null) || file.exists } diff --git a/compiler/src/dotty/tools/io/IOStats.scala b/compiler/src/dotty/tools/io/IOStats.scala deleted file mode 100644 index 7196b65f4d09..000000000000 --- a/compiler/src/dotty/tools/io/IOStats.scala +++ /dev/null @@ -1,29 +0,0 @@ -package dotty.tools.io - -// Due to limitations in the Statistics machinery, these are only -// reported if this patch is applied. -// -// --- a/src/reflect/scala/reflect/internal/util/Statistics.scala -// +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala -// @@ -109,7 +109,7 @@ quant) -// * Quantities with non-empty prefix are printed in the statistics info. -// */ -// trait Quantity { -// - if (enabled && prefix.nonEmpty) { -// + if (prefix.nonEmpty) { -// val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix" -// qs(key) = this -// } -// @@ -243,7 +243,7 @@ quant) -// * -// * to remove all Statistics code from build -// */ -// - final val canEnable = _enabled -// + final val canEnable = true // _enabled -// -// We can commit this change as the first diff reverts a fix for an IDE memory leak. -private[io] object IOStats { - val fileExistsCount = Statistics.newCounter("# File.exists calls") - val fileIsDirectoryCount = Statistics.newCounter("# File.isDirectory calls") - val fileIsFileCount = Statistics.newCounter("# File.isFile calls") -} diff --git a/compiler/src/dotty/tools/io/Path.scala b/compiler/src/dotty/tools/io/Path.scala index 1786269449d4..034360a67515 100644 --- a/compiler/src/dotty/tools/io/Path.scala +++ b/compiler/src/dotty/tools/io/Path.scala @@ -54,18 +54,8 @@ object Path { def apply(path: String): Path = apply(new JFile(path)) def apply(jfile: JFile): Path = try { - def isFile = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) - jfile.isFile - } - - def isDirectory = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) - jfile.isDirectory - } - - if (isFile) new File(jfile) - else if (isDirectory) new Directory(jfile) + if (jfile.isFile) new File(jfile) + else if (jfile.isDirectory) new Directory(jfile) else new Path(jfile) } catch { case ex: SecurityException => new Path(jfile) } @@ -195,19 +185,11 @@ class Path private[io] (val jfile: JFile) { // Boolean tests def canRead = jfile.canRead() def canWrite = jfile.canWrite() - def exists = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount) - try jfile.exists() catch { case ex: SecurityException => false } - } - - def isFile = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) - try jfile.isFile() catch { case ex: SecurityException => false } - } - def isDirectory = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) - try jfile.isDirectory() catch { case ex: SecurityException => jfile.getPath == "." } - } + def exists = try jfile.exists() catch { case ex: SecurityException => false } + def isFile = try jfile.isFile() catch { case ex: SecurityException => false } + def isDirectory = + try jfile.isDirectory() + catch { case ex: SecurityException => jfile.getPath == "." } def isAbsolute = jfile.isAbsolute() def isEmpty = path.length == 0 diff --git a/compiler/src/dotty/tools/io/Statistics.scala b/compiler/src/dotty/tools/io/Statistics.scala deleted file mode 100644 index 9f1c8d9d7340..000000000000 --- a/compiler/src/dotty/tools/io/Statistics.scala +++ /dev/null @@ -1,284 +0,0 @@ -package dotty.tools.io - -import scala.collection.mutable - -object Statistics { - - type TimerSnapshot = (Long, Long) - - /** If enabled, increment counter by one */ - @inline final def incCounter(c: Counter) { - if (_enabled && c != null) c.value += 1 - } - - /** If enabled, increment counter by given delta */ - @inline final def incCounter(c: Counter, delta: Int) { - if (_enabled && c != null) c.value += delta - } - - /** If enabled, increment counter in map `ctrs` at index `key` by one */ - @inline final def incCounter[K](ctrs: QuantMap[K, Counter], key: K) = - if (_enabled && ctrs != null) ctrs(key).value += 1 - - /** If enabled, start subcounter. While active it will track all increments of - * its base counter. - */ - @inline final def startCounter(sc: SubCounter): (Int, Int) = - if (_enabled && sc != null) sc.start() else null - - /** If enabled, stop subcounter from tracking its base counter. */ - @inline final def stopCounter(sc: SubCounter, start: (Int, Int)) { - if (_enabled && sc != null) sc.stop(start) - } - - /** If enabled, start timer */ - @inline final def startTimer(tm: Timer): TimerSnapshot = - if (_enabled && tm != null) tm.start() else null - - /** If enabled, stop timer */ - @inline final def stopTimer(tm: Timer, start: TimerSnapshot) { - if (_enabled && tm != null) tm.stop(start) - } - - /** If enabled, push and start a new timer in timer stack */ - @inline final def pushTimer(timers: TimerStack, timer: => StackableTimer): TimerSnapshot = - if (_enabled && timers != null) timers.push(timer) else null - - /** If enabled, stop and pop timer from timer stack */ - @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot) { - if (_enabled && timers != null) timers.pop(prev) - } - - /** Create a new counter that shows as `prefix` and is active in given phases */ - def newCounter(prefix: String, phases: String*) = new Counter(prefix, phases) - - /** Create a new relative counter that shows as `prefix` and is active - * in the same phases as its base counter. Relative counters print as percentages - * of their base counters. - */ - def newRelCounter(prefix: String, ctr: Counter): Counter = new RelCounter(prefix, ctr) - - /** Create a new subcounter that shows as `prefix` and is active - * in the same phases as its base counter. Subcounters can track - * increments of their base counters and print as percentages - * of their base counters. - */ - def newSubCounter(prefix: String, ctr: Counter): SubCounter = new SubCounter(prefix, ctr) - - /** Create a new counter that shows as `prefix` and is active in given phases */ - def newTimer(prefix: String, phases: String*): Timer = new Timer(prefix, phases) - - /** Create a new subtimer that shows as `prefix` and is active - * in the same phases as its base timer. Subtimers can track - * increments of their base timers and print as percentages - * of their base timers. - */ - def newSubTimer(prefix: String, timer: Timer): Timer = new SubTimer(prefix, timer) - - /** Create a new stackable that shows as `prefix` and is active - * in the same phases as its base timer. Stackable timers are subtimers - * that can be stacked in a timerstack, and that print aggregate, as well as specific - * durations. - */ - def newStackableTimer(prefix: String, timer: Timer): StackableTimer = new StackableTimer(prefix, timer) - - /** Create a new view that shows as `prefix` and is active in given phases. - * The view always reflects the current value of `quant` as a quantity. - */ - def newView(prefix: String, phases: String*)(quant: => Any): View = new View(prefix, phases, -quant) - - /** Create a new quantity map that shows as `prefix` and is active in given phases. - */ - def newQuantMap[K, V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[K, V] = new QuantMap(prefix, phases, initValue) - - /** Same as newQuantMap, where the key type is fixed to be Class[_] */ - def newByClass[V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue) - - /** Create a new timer stack */ - def newTimerStack() = new TimerStack() - - def allQuantities: Iterable[Quantity] = - for ((_, q) <- qs if q.underlying == q; - r <- q :: q.children.toList if r.prefix.nonEmpty) yield r - - private def showPercent(x: Long, base: Long) = - if (base == 0) "" else f" (${x.toDouble / base.toDouble * 100}%2.1f%%)" - - /** The base trait for quantities. - * Quantities with non-empty prefix are printed in the statistics info. - */ - trait Quantity { - if (enabled && prefix.nonEmpty) { - val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix" - qs(key) = this - } - val prefix: String - val phases: Seq[String] - def underlying: Quantity = this - def showAt(phase: String) = phases.isEmpty || (phases contains phase) - def line = f"$prefix%-30s: ${this}" - val children = new mutable.ListBuffer[Quantity] - } - - trait SubQuantity extends Quantity { - protected def underlying: Quantity - underlying.children += this - } - - class Counter(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Counter] { - var value: Int = 0 - def compare(that: Counter): Int = - if (this.value < that.value) -1 - else if (this.value > that.value) 1 - else 0 - override def equals(that: Any): Boolean = - that match { - case that: Counter => (this compare that) == 0 - case _ => false - } - override def hashCode = value - override def toString = value.toString - } - - class View(val prefix: String, val phases: Seq[String], quant: => Any) extends Quantity { - override def toString = quant.toString - } - - private class RelCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity { - override def toString = - if (value == 0) "0" - else { - assert(underlying.value != 0, prefix+"/"+underlying.line) - f"${value.toFloat / underlying.value}%2.1f" - } - } - - class SubCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity { - def start() = (value, underlying.value) - def stop(prev: (Int, Int)) { - val (value0, uvalue0) = prev - value = value0 + underlying.value - uvalue0 - } - override def toString = - value + showPercent(value.toLong, underlying.value.toLong) - } - - class Timer(val prefix: String, val phases: Seq[String]) extends Quantity { - var nanos: Long = 0 - var timings = 0 - def start() = { - (nanos, System.nanoTime()) - } - def stop(prev: TimerSnapshot) { - val (nanos0, start) = prev - nanos = nanos0 + System.nanoTime() - start - timings += 1 - } - protected def show(ns: Long) = s"${ns/1000000}ms" - override def toString = s"$timings spans, ${show(nanos)}" - } - - class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity { - override protected def show(ns: Long) = super.show(ns) + showPercent(ns, underlying.nanos) - } - - class StackableTimer(prefix: String, underlying: Timer) extends SubTimer(prefix, underlying) with Ordered[StackableTimer] { - var specificNanos: Long = 0 - def compare(that: StackableTimer): Int = - if (this.specificNanos < that.specificNanos) -1 - else if (this.specificNanos > that.specificNanos) 1 - else 0 - override def equals(that: Any): Boolean = - that match { - case that: StackableTimer => (this compare that) == 0 - case _ => false - } - override def hashCode = specificNanos.## - override def toString = s"${super.toString} aggregate, ${show(specificNanos)} specific" - } - - /** A mutable map quantity where missing elements are automatically inserted - * on access by executing `initValue`. - */ - class QuantMap[K, V <% Ordered[V]](val prefix: String, val phases: Seq[String], initValue: => V) - extends mutable.HashMap[K, V] with mutable.SynchronizedMap[K, V] with Quantity { - override def default(key: K) = { - val elem = initValue - this(key) = elem - elem - } - override def toString = - this.toSeq.sortWith(_._2 > _._2).map { - case (cls: Class[_], elem) => - s"${cls.toString.substring(cls.toString.lastIndexOf("$") + 1)}: $elem" - case (key, elem) => - s"$key: $elem" - }.mkString(", ") - } - - /** A stack of timers, all active, where a timer's specific "clock" - * is stopped as long as it is buried by some other timer in the stack, but - * its aggregate clock keeps on ticking. - */ - class TimerStack { - private var elems: List[(StackableTimer, Long)] = Nil - /** Start given timer and push it onto the stack */ - def push(t: StackableTimer): TimerSnapshot = { - elems = (t, 0L) :: elems - t.start() - } - /** Stop and pop top timer in stack - */ - def pop(prev: TimerSnapshot) = { - val (nanos0, start) = prev - val duration = System.nanoTime() - start - val (topTimer, nestedNanos) :: rest = elems - topTimer.nanos = nanos0 + duration - topTimer.specificNanos += duration - nestedNanos - topTimer.timings += 1 - elems = rest match { - case (outerTimer, outerNested) :: elems1 => - (outerTimer, outerNested + duration) :: elems1 - case Nil => - Nil - } - } - } - - private var _enabled = false - private val qs = new mutable.HashMap[String, Quantity] - - /** replace with - * - * final val canEnable = false - * - * to remove all Statistics code from build - */ - final val canEnable = _enabled - - /** replace with - * - * final def hotEnabled = _enabled - * - * and rebuild, to also count tiny but super-hot methods - * such as phase, flags, owner, name. - */ - final val hotEnabled = false - - def enabled = _enabled - def enabled_=(cond: Boolean) = { - if (cond && !_enabled) { - val start = System.nanoTime() - var total = 0L - for (i <- 1 to 10000) { - val time = System.nanoTime() - total += System.nanoTime() - time - } - val total2 = System.nanoTime() - start - println("Enabling statistics, measuring overhead = "+ - total/10000.0+"ns to "+total2/10000.0+"ns per timer") - _enabled = true - } - } -} diff --git a/compiler/src/dotty/tools/io/Streamable.scala b/compiler/src/dotty/tools/io/Streamable.scala index a61a620e06ec..7143f04062e3 100644 --- a/compiler/src/dotty/tools/io/Streamable.scala +++ b/compiler/src/dotty/tools/io/Streamable.scala @@ -54,7 +54,7 @@ object Streamable { lazy val in = bufferedInput() var offset = 0 - def loop() { + def loop(): Unit = { if (offset < len) { val read = in.read(arr, offset, len - offset) if (read >= 0) { diff --git a/compiler/src/dotty/tools/io/VirtualDirectory.scala b/compiler/src/dotty/tools/io/VirtualDirectory.scala index bfd3a3012a39..a87bb799f8d7 100644 --- a/compiler/src/dotty/tools/io/VirtualDirectory.scala +++ b/compiler/src/dotty/tools/io/VirtualDirectory.scala @@ -33,10 +33,10 @@ extends AbstractFile { override def output = sys.error("directories cannot be written") /** Does this abstract file denote an existing file? */ - def create() { unsupported() } + def create() = { unsupported() } /** Delete the underlying file or directory (recursively). */ - def delete() { unsupported() } + def delete() = { unsupported() } /** Returns an abstract file with the given name. It does not * check that it exists. @@ -66,7 +66,7 @@ extends AbstractFile { dir } - def clear() { + def clear() = { files.clear() } } diff --git a/compiler/src/dotty/tools/io/VirtualFile.scala b/compiler/src/dotty/tools/io/VirtualFile.scala index 3954c64eaab6..e7af2a778f08 100644 --- a/compiler/src/dotty/tools/io/VirtualFile.scala +++ b/compiler/src/dotty/tools/io/VirtualFile.scala @@ -43,7 +43,7 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF override def output: OutputStream = { new ByteArrayOutputStream() { - override def close() { + override def close() = { super.close() content = toByteArray() } diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala index 3d1f6b07e60b..6b7c838b638d 100644 --- a/compiler/src/dotty/tools/io/ZipArchive.scala +++ b/compiler/src/dotty/tools/io/ZipArchive.scala @@ -153,11 +153,11 @@ final class FileZipArchive(file: JFile) extends ZipArchive(file) { val root = new DirEntry("/") val dirs = mutable.HashMap[String, DirEntry]("/" -> root) val zipFile = openZipFile() - val enum = zipFile.entries() + val entries = zipFile.entries() try { - while (enum.hasMoreElements) { - val zipEntry = enum.nextElement + while (entries.hasMoreElements) { + val zipEntry = entries.nextElement val dir = getDir(dirs, zipEntry) if (zipEntry.isDirectory) dir else { @@ -195,74 +195,6 @@ final class FileZipArchive(file: JFile) extends ZipArchive(file) { case _ => false } } -///** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -//final class URLZipArchive(val url: URL) extends ZipArchive(null) { -// def iterator: Iterator[Entry] = { -// val root = new DirEntry("/") -// val dirs = mutable.HashMap[String, DirEntry]("/" -> root) -// val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input))) -// -// @tailrec def loop() { -// val zipEntry = in.getNextEntry() -// class EmptyFileEntry() extends Entry(zipEntry.getName) { -// override def toByteArray: Array[Byte] = null -// override def sizeOption = Some(0) -// } -// class FileEntry() extends Entry(zipEntry.getName) { -// override val toByteArray: Array[Byte] = { -// val len = zipEntry.getSize().toInt -// val arr = if (len == 0) Array.emptyByteArray else new Array[Byte](len) -// var offset = 0 -// -// def loop() { -// if (offset < len) { -// val read = in.read(arr, offset, len - offset) -// if (read >= 0) { -// offset += read -// loop() -// } -// } -// } -// loop() -// -// if (offset == arr.length) arr -// else throw new IOException("Input stream truncated: read %d of %d bytes".format(offset, len)) -// } -// override def sizeOption = Some(zipEntry.getSize().toInt) -// } -// -// if (zipEntry != null) { -// val dir = getDir(dirs, zipEntry) -// if (zipEntry.isDirectory) -// dir -// else { -// val f = if (zipEntry.getSize() == 0) new EmptyFileEntry() else new FileEntry() -// dir.entries(f.name) = f -// } -// in.closeEntry() -// loop() -// } -// } -// -// loop() -// try root.iterator -// finally dirs.clear() -// } -// -// def name = url.getFile() -// def path = url.getPath() -// def input = url.openStream() -// def lastModified = -// try url.openConnection().getLastModified() -// catch { case _: IOException => 0 } -// -// override def canEqual(other: Any) = other.isInstanceOf[URLZipArchive] -// override def hashCode() = url.hashCode -// override def equals(that: Any) = that match { -// case x: URLZipArchive => url == x.url -// case _ => false -// } -//} final class ManifestResources(val url: URL) extends ZipArchive(null) { def iterator = { @@ -274,12 +206,11 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) { for (zipEntry <- iter) { val dir = getDir(dirs, zipEntry) if (!zipEntry.isDirectory) { - class FileEntry() extends Entry(zipEntry.getName) { + val f = new Entry(zipEntry.getName) { override def lastModified = zipEntry.getTime() override def input = resourceInputStream(path) override def sizeOption = None } - val f = new FileEntry() dir.entries(f.name) = f } } From 629fbb37beeff4dc3acbb4227d44b77d462724c4 Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Tue, 2 May 2017 14:32:36 +0200 Subject: [PATCH 11/12] Sync blacklisted files --- .../dotty/tools/dotc/CompilationTests.scala | 26 ++++++++++++++++++- project/Build.scala | 2 +- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index f16974fdf7be..c069c6a4461f 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -214,6 +214,30 @@ class CompilationTests extends ParallelTesting { compileDir("../library/src", allowDeepSubtypes.and("-Ycheck-reentrant", "-strict", "-priorityclasspath", defaultOutputDir)) + def sources(paths: JStream[Path], excludedFiles: List[String] = Nil): List[String] = + paths.iterator().asScala + .filter(path => + (path.toString.endsWith(".scala") || path.toString.endsWith(".java")) + && !excludedFiles.contains(path.getFileName.toString)) + .map(_.toString).toList + + val compilerDir = Paths.get("../compiler/src") + val compilerSources = sources(Files.walk(compilerDir)) + + val backendDir = Paths.get("../scala-backend/src/compiler/scala/tools/nsc/backend") + val backendJvmDir = Paths.get("../scala-backend/src/compiler/scala/tools/nsc/backend/jvm") + + // NOTE: Keep these exclusions synchronized with the ones in the sbt build (Build.scala) + val backendExcluded = + List("JavaPlatform.scala", "Platform.scala", "ScalaPrimitives.scala") + val backendJvmExcluded = + List("BCodeICodeCommon.scala", "GenASM.scala", "GenBCode.scala", "ScalacBackendInterface.scala", "BackendStats.scala", "BCodeAsmEncode.scala") + + val backendSources = + sources(Files.list(backendDir), excludedFiles = backendExcluded) + val backendJvmSources = + sources(Files.list(backendJvmDir), excludedFiles = backendJvmExcluded) + def dotty1 = { compileList( "dotty1", @@ -349,7 +373,7 @@ class CompilationTests extends ParallelTesting { val backendExcluded = List("JavaPlatform.scala", "Platform.scala", "ScalaPrimitives.scala") val backendJvmExcluded = - List("BCodeICodeCommon.scala", "GenASM.scala", "GenBCode.scala", "ScalacBackendInterface.scala") + List("BCodeICodeCommon.scala", "GenASM.scala", "GenBCode.scala", "ScalacBackendInterface.scala", "BackendStats.scala") val backendSources0 = sources(Files.list(backendDir), excludedFiles = backendExcluded) diff --git a/project/Build.scala b/project/Build.scala index 6188eabfd6d5..3b99b1e4b540 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -373,7 +373,7 @@ object Build { val files = ((backendDir * (allScalaFiles - "JavaPlatform.scala" - "Platform.scala" - "ScalaPrimitives.scala")) +++ (backendDir / "jvm") * - (allScalaFiles - "BCodeICodeCommon.scala" - "GenASM.scala" - "GenBCode.scala" - "ScalacBackendInterface.scala" - "BackendStats.scala" - "BCodeAsmEncode.scala") + (allScalaFiles - "BCodeICodeCommon.scala" - "GenASM.scala" - "GenBCode.scala" - "ScalacBackendInterface.scala" - "BackendStats.scala") ).get val pairs = files.pair(sbt.Path.rebase(submoduleCompilerDir, outputDir)) From ea59cb352f94bf75bb22f6617607c63c2e272372 Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Tue, 9 May 2017 12:09:12 +0200 Subject: [PATCH 12/12] Address review feedback --- compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala | 4 ++-- project/Build.scala | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 088b22b5ce3a..5992fe350c7d 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -8,7 +8,7 @@ import Names._, NameOps._, StdNames._ import scala.collection.{Set, mutable} -import dotty.tools.io.{AbstractFile, Path, ZipArchive} +import dotty.tools.io.{AbstractFile, Path, ZipArchive, PlainFile} import java.io.File import java.util.{Arrays, Comparator} @@ -101,7 +101,7 @@ class ExtractDependencies extends Phase { val classSegments = Path(ze.path).segments binaryDependency(zipFile, className(classSegments)) } - case pf: dotty.tools.io.PlainFile => + case pf: PlainFile => val packages = dep.ownersIterator .filter(x => x.is(PackageClass) && !x.isEffectiveRoot).length // We can recover the fully qualified name of a classfile from diff --git a/project/Build.scala b/project/Build.scala index 3b99b1e4b540..2ee71b094796 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -621,6 +621,8 @@ object Build { settings( publishArtifact := false, parallelExecution in Test := false, + // Increase verbosity of test output, started and passed tests are + // logged with: testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test"