From 51337013789f070062230223854eacf4f2e8a1d7 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 11:24:15 +0100 Subject: [PATCH 01/22] Fix desugaring of refined types with "&"-parent. need to generate more than one parent class. --- src/dotty/tools/dotc/ast/Desugar.scala | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/ast/Desugar.scala b/src/dotty/tools/dotc/ast/Desugar.scala index 7cdedb19a6cb..cd198818aba4 100644 --- a/src/dotty/tools/dotc/ast/Desugar.scala +++ b/src/dotty/tools/dotc/ast/Desugar.scala @@ -864,18 +864,19 @@ object desugar { * @param parentType The type of `parent` */ def refinedTypeToClass(parent: tpd.Tree, refinements: List[Tree])(implicit ctx: Context): TypeDef = { - def stripToCore(tp: Type): Type = tp match { - case tp: RefinedType if tp.argInfos.nonEmpty => tp // parameterized class type - case tp: TypeRef if tp.symbol.isClass => tp // monomorphic class type + def stripToCore(tp: Type): List[Type] = tp match { + case tp: RefinedType if tp.argInfos.nonEmpty => tp :: Nil // parameterized class type + case tp: TypeRef if tp.symbol.isClass => tp :: Nil // monomorphic class type case tp: TypeProxy => stripToCore(tp.underlying) - case _ => defn.AnyType + case AndType(tp1, tp2) => stripToCore(tp1) ::: stripToCore(tp2) + case _ => defn.AnyType :: Nil } - val parentCore = stripToCore(parent.tpe) + val parentCores = stripToCore(parent.tpe) val untpdParent = TypedSplice(parent) - val (classParent, self) = - if (parent.tpe eq parentCore) (untpdParent, EmptyValDef) - else (TypeTree(parentCore), ValDef(nme.WILDCARD, untpdParent, EmptyTree)) - val impl = Template(emptyConstructor, classParent :: Nil, self, refinements) + val (classParents, self) = + if (parentCores.length == 1 && (parent.tpe eq parentCores.head)) (untpdParent :: Nil, EmptyValDef) + else (parentCores map TypeTree, ValDef(nme.WILDCARD, untpdParent, EmptyTree)) + val impl = Template(emptyConstructor, classParents, self, refinements) TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) } From 723cbb184ab174ff0d461403a0b7e813c4964919 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 11:28:35 +0100 Subject: [PATCH 02/22] Disallow refinements of types or methods that do not appear in parent. We planned this for a long time but never implemented it. Instead, we sometimes issued an erro in Splitter, namely if reflection would have been needed to access the member. It turns out that some tests (e.g. neg/t625) fail -Ycheck (we knew that before and disabled) but also fail Pickling because they generate orhpan PolyParams. So rather than patching this up it seems now is a good time to enforce the restriction for real. --- src/dotty/tools/dotc/typer/Typer.scala | 4 ++ test/dotc/tests.scala | 17 ++----- tests/neg/structural.scala | 70 ++++++++++++++++++++++++++ tests/neg/t0586.scala | 9 ---- tests/neg/t0625.scala | 8 --- tests/neg/t1131.scala | 4 -- tests/neg/typers.scala | 4 -- tests/pos/i262-null-subtyping.scala | 11 ++-- tests/pos/structural.scala | 21 -------- tests/pos/t1053.scala | 3 +- tests/pos/t2810.scala | 8 --- tests/pos/typers.scala | 1 + tests/pos/zoo.scala | 27 +++++----- 13 files changed, 98 insertions(+), 89 deletions(-) create mode 100644 tests/neg/structural.scala delete mode 100644 tests/neg/t0586.scala delete mode 100644 tests/neg/t0625.scala delete mode 100644 tests/neg/t1131.scala delete mode 100644 tests/pos/structural.scala delete mode 100644 tests/pos/t2810.scala diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 1e07cbf793b1..7d1e950f4cf5 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -793,6 +793,10 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit typr.println(s"adding refinement $refinement") checkRefinementNonCyclic(refinement, refineCls, seen) val rsym = refinement.symbol + if ((rsym.is(Method) || rsym.isType) && rsym.allOverriddenSymbols.isEmpty) { + println(refineCls.baseClasses) + ctx.error(i"refinement $rsym without matching type in parent $parent", refinement.pos) + } val rinfo = if (rsym is Accessor) rsym.info.resultType else rsym.info RefinedType(parent, rsym.name, rt => rinfo.substThis(refineCls, SkolemType(rt))) // todo later: check that refinement is within bounds diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index 8b3965e88d1e..2b917b79513f 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -59,7 +59,6 @@ class tests extends CompilerTest { @Test def pos_overrides() = compileFile(posDir, "overrides") @Test def pos_javaOverride() = compileDir(posDir + "java-override") @Test def pos_templateParents() = compileFile(posDir, "templateParents") - @Test def pos_structural() = compileFile(posDir, "structural") @Test def pos_overloadedAccess = compileFile(posDir, "overloadedAccess") @Test def pos_approximateUnion = compileFile(posDir, "approximateUnion") @Test def pos_tailcall = compileDir(posDir + "tailcall/") @@ -87,16 +86,10 @@ class tests extends CompilerTest { @Test def neg_over = compileFile(negDir, "over", xerrors = 3) @Test def neg_overrides = compileFile(negDir, "overrides", xerrors = 11) @Test def neg_projections = compileFile(negDir, "projections", xerrors = 1) - @Test def neg_i39 = compileFile(negDir, "i39", xerrors = 1) - @Test def neg_i50_volatile = compileFile(negDir, "i50-volatile", xerrors = 4) + @Test def neg_i39 = compileFile(negDir, "i39", xerrors = 2) + @Test def neg_i50_volatile = compileFile(negDir, "i50-volatile", xerrors = 6) @Test def neg_t0273_doubledefs = compileFile(negDir, "t0273", xerrors = 1) - @Test def neg_t0586_structural = compileFile(negDir, "t0586", xerrors = 1) - @Test def neg_t0625_structural = compileFile(negDir, "t0625", xerrors = 1)( - defaultOptions = noCheckOptions) - // -Ycheck fails because there are structural types involving higher-kinded types. - // these are illegal, but are tested only later. - @Test def neg_t1131_structural = compileFile(negDir, "t1131", xerrors = 1) - @Test def neg_zoo = compileFile(negDir, "zoo", xerrors = 1) + @Test def neg_zoo = compileFile(negDir, "zoo", xerrors = 12) @Test def neg_t1192_legalPrefix = compileFile(negDir, "t1192", xerrors = 1) @Test def neg_tailcall_t1672b = compileFile(negDir, "tailcall/t1672b", xerrors = 6) @Test def neg_tailcall_t3275 = compileFile(negDir, "tailcall/t3275", xerrors = 1) @@ -108,13 +101,13 @@ class tests extends CompilerTest { @Test def neg_t1843_variances = compileFile(negDir, "t1843-variances", xerrors = 1) @Test def neg_t2660_ambi = compileFile(negDir, "t2660", xerrors = 2) @Test def neg_t2994 = compileFile(negDir, "t2994", xerrors = 2) - @Test def neg_subtyping = compileFile(negDir, "subtyping", xerrors = 2) + @Test def neg_subtyping = compileFile(negDir, "subtyping", xerrors = 4) @Test def neg_variances = compileFile(negDir, "variances", xerrors = 2) @Test def neg_badAuxConstr = compileFile(negDir, "badAuxConstr", xerrors = 2) @Test def neg_typetest = compileFile(negDir, "typetest", xerrors = 1) @Test def neg_t1569_failedAvoid = compileFile(negDir, "t1569-failedAvoid", xerrors = 1) @Test def neg_cycles = compileFile(negDir, "cycles", xerrors = 8) - @Test def neg_boundspropagation = compileFile(negDir, "boundspropagation", xerrors = 4) + @Test def neg_boundspropagation = compileFile(negDir, "boundspropagation", xerrors = 5) @Test def neg_refinedSubtyping = compileFile(negDir, "refinedSubtyping", xerrors = 2) @Test def neg_i0091_infpaths = compileFile(negDir, "i0091-infpaths", xerrors = 3) @Test def neg_i0248_inherit_refined = compileFile(negDir, "i0248-inherit-refined", xerrors = 4) diff --git a/tests/neg/structural.scala b/tests/neg/structural.scala new file mode 100644 index 000000000000..1d25062909f3 --- /dev/null +++ b/tests/neg/structural.scala @@ -0,0 +1,70 @@ +package p1 { + +object test123 { + type A = { def a: Int } + def f(a: A): A = a +} + +object structural2 { + type A = { def a: Int } + + type B = { + def b: Int + } + + type AB = A & B + + def f(ab: AB): AB = ab + + f(new { + def a = 43 + def b = 42 + }) +} +} + +package p2 { +object RClose { + type ReflectCloseable = { def close(): Unit } + def withReflectCloseable[T <: ReflectCloseable, R](s: T)(action: T => R): R = + try { + action(s) + } finally { + s.close() + } +} +} + +package p3 { +object Test { + def idMap[C[_],T](m: { def map[U](f: T => U): C[U] }): C[T] = m.map(t => t) + + def main(args: Array[String]): Unit = { + idMap(Some(5)) + idMap(Responder.constant(5)) + } +} +} +package p4 { + +trait A { self: Any { def p: Any } => + def f(b: => Unit): Unit = {} + f { p } // error: cannot access member 'p' from structural type +} +} + +package p5 { +// t2810 +object Test { + val closeable1: { def close(): Unit } = new scala.io.Source { val iter: Iterator[Char] = "".iterator } + val closeable2: { def close(): Unit } = new java.io.Closeable { def close() = {} } +} +} + +package p6 { + + class Refinements { + val y: C { val x: T; type T } // was adeprecated warning: illegal forward reference in refinement; now illegal + } + +} diff --git a/tests/neg/t0586.scala b/tests/neg/t0586.scala deleted file mode 100644 index 540e225a1465..000000000000 --- a/tests/neg/t0586.scala +++ /dev/null @@ -1,9 +0,0 @@ -object RClose { - type ReflectCloseable = { def close(): Unit } - def withReflectCloseable[T <: ReflectCloseable, R](s: T)(action: T => R): R = - try { - action(s) - } finally { - s.close() - } -} diff --git a/tests/neg/t0625.scala b/tests/neg/t0625.scala deleted file mode 100644 index 56145425998f..000000000000 --- a/tests/neg/t0625.scala +++ /dev/null @@ -1,8 +0,0 @@ -object Test { - def idMap[C[_],T](m: { def map[U](f: T => U): C[U] }): C[T] = m.map(t => t) - - def main(args: Array[String]): Unit = { - idMap(Some(5)) - idMap(Responder.constant(5)) - } -} diff --git a/tests/neg/t1131.scala b/tests/neg/t1131.scala deleted file mode 100644 index f4a7b377d98a..000000000000 --- a/tests/neg/t1131.scala +++ /dev/null @@ -1,4 +0,0 @@ -trait A { self: Any { def p: Any } => - def f(b: => Unit): Unit = {} - f { p } // error: cannot access member 'p' from structural type -} diff --git a/tests/neg/typers.scala b/tests/neg/typers.scala index 226fd2310408..b5bd1fa2c129 100644 --- a/tests/neg/typers.scala +++ b/tests/neg/typers.scala @@ -60,8 +60,4 @@ object typers { 123 } } - - class Refinements { - val y: C { val x: T; type T } // deprecated warning: illegal forward reference in refinement - } } diff --git a/tests/pos/i262-null-subtyping.scala b/tests/pos/i262-null-subtyping.scala index 284be49e8520..5e57fcca0a8f 100644 --- a/tests/pos/i262-null-subtyping.scala +++ b/tests/pos/i262-null-subtyping.scala @@ -1,12 +1,9 @@ object O { - // This compiles - val a: { type T } = null; - val b: Any { type T } = null; + trait Base extends Any { type T } + val a: Base { type T } = null; + val b: Any with Base { type T } = null; - // This doesn't: - // found : Null - // required: AnyRef{T} - val c: AnyRef { type T } = null; + val c: AnyRef with Base { type T } = null; class A class B diff --git a/tests/pos/structural.scala b/tests/pos/structural.scala deleted file mode 100644 index 8afa49ed0e0b..000000000000 --- a/tests/pos/structural.scala +++ /dev/null @@ -1,21 +0,0 @@ -object test123 { - type A = { def a: Int } - def f(a: A): A = a -} - -object structural2 { - type A = { def a: Int } - - type B = { - def b: Int - } - - type AB = A & B - - def f(ab: AB): AB = ab - - f(new { - def a = 43 - def b = 42 - }) -} \ No newline at end of file diff --git a/tests/pos/t1053.scala b/tests/pos/t1053.scala index 1d4dfb637e99..2c5dc1d5a9d8 100644 --- a/tests/pos/t1053.scala +++ b/tests/pos/t1053.scala @@ -1,6 +1,7 @@ trait T[A] { trait U { type W = A; val x = 3 } } +trait Base { type V } object Test { - val x : ({ type V = T[this.type] })#V = null + val x : (Base { type V = T[this.type] })#V = null val y = new x.U { } } diff --git a/tests/pos/t2810.scala b/tests/pos/t2810.scala deleted file mode 100644 index c85eca164aa3..000000000000 --- a/tests/pos/t2810.scala +++ /dev/null @@ -1,8 +0,0 @@ - - - - -object Test { - val closeable1: { def close(): Unit } = new scala.io.Source { val iter: Iterator[Char] = "".iterator } - val closeable2: { def close(): Unit } = new java.io.Closeable { def close() = {} } -} diff --git a/tests/pos/typers.scala b/tests/pos/typers.scala index fe11ca6021be..7f67d2c7265a 100644 --- a/tests/pos/typers.scala +++ b/tests/pos/typers.scala @@ -88,6 +88,7 @@ object typers { } class Refinements { + trait C { type T; def process(x: T): Int } val y: C { type T; val key: T; def process(x: T): Int } = ??? } diff --git a/tests/pos/zoo.scala b/tests/pos/zoo.scala index 08f7eba6380f..02dac8f5bf32 100644 --- a/tests/pos/zoo.scala +++ b/tests/pos/zoo.scala @@ -1,40 +1,37 @@ object Test { -type Meat = { +trait FoodStuff +trait Meat extends FoodStuff { type IsMeat = Any } -type Grass = { +trait Grass extends FoodStuff { type IsGrass = Any } -type Animal = { - type Food +trait Animal { + type Food <: FoodStuff def eats(food: Food): Unit def gets: Food } -type Cow = { +trait Cow extends Animal { type IsMeat = Any type Food <: Grass def eats(food: Grass): Unit - def gets: Grass + def gets: Food } -type Lion = { +trait Lion extends Animal { type Food = Meat def eats(food: Meat): Unit def gets: Meat } -def newMeat: Meat = new { - type IsMeat = Any +def newMeat: Meat = new Meat { } -def newGrass: Grass = new { - type IsGrass = Any +def newGrass: Grass = new Grass { } -def newCow: Cow = new { - type IsMeat = Any +def newCow: Cow = new Cow { type Food = Grass def eats(food: Grass) = () def gets = newGrass } -def newLion: Lion = new { - type Food = Meat +def newLion: Lion = new Lion { def eats(food: Meat) = () def gets = newMeat } From 5f337c0adafa1c6eeca6960a238b1e49e9139421 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 8 Feb 2015 10:06:43 +0100 Subject: [PATCH 03/22] First prototype of pickler. --- src/dotty/tools/dotc/core/Flags.scala | 8 +- .../tools/dotc/core/pickling/NameBuffer.scala | 74 ++++ .../dotc/core/pickling/PickleFormat.scala | 317 +++++++++++++++ .../dotc/core/pickling/TastyBuffer.scala | 162 ++++++++ .../tools/dotc/core/pickling/TastyName.scala | 22 ++ .../dotc/core/pickling/TastyPickler.scala | 50 +++ .../tools/dotc/core/pickling/TreeBuffer.scala | 133 +++++++ .../dotc/core/pickling/TreePickler.scala | 371 ++++++++++++++++++ src/dotty/tools/dotc/typer/Namer.scala | 4 +- 9 files changed, 1135 insertions(+), 6 deletions(-) create mode 100644 src/dotty/tools/dotc/core/pickling/NameBuffer.scala create mode 100644 src/dotty/tools/dotc/core/pickling/PickleFormat.scala create mode 100644 src/dotty/tools/dotc/core/pickling/TastyBuffer.scala create mode 100644 src/dotty/tools/dotc/core/pickling/TastyName.scala create mode 100644 src/dotty/tools/dotc/core/pickling/TastyPickler.scala create mode 100644 src/dotty/tools/dotc/core/pickling/TreeBuffer.scala create mode 100644 src/dotty/tools/dotc/core/pickling/TreePickler.scala diff --git a/src/dotty/tools/dotc/core/Flags.scala b/src/dotty/tools/dotc/core/Flags.scala index 53beae838c68..3a174d95f126 100644 --- a/src/dotty/tools/dotc/core/Flags.scala +++ b/src/dotty/tools/dotc/core/Flags.scala @@ -333,7 +333,7 @@ object Flags { final val JavaStaticType = JavaStatic.toTypeFlags /** Trait is not an interface, but does not have fields or intialization code */ - final val NoInits = typeFlag(32, "") + final val NoInits = typeFlag(32, "") // TODO reconstitute from context /** Variable is accessed from nested function. */ final val Captured = termFlag(32, "") @@ -345,7 +345,7 @@ object Flags { final val Bridge = termFlag(34, "") /** Symbol is a Java varargs bridge */ // (needed?) - final val VBridge = termFlag(35, "") + final val VBridge = termFlag(35, "") // TODO remove /** Symbol is a method which should be marked ACC_SYNCHRONIZED */ final val Synchronized = termFlag(36, "") @@ -364,7 +364,7 @@ object Flags { /** Symbol always defines a fresh named type */ final val Fresh = commonFlag(45, "") - /** Symbol is defined in a super call */ + /** Symbol is defined in a super call */ // TODO reconstitute from context final val InSuperCall = commonFlag(46, "") /** Symbol with private access is accessed outside its private scope */ @@ -551,7 +551,7 @@ object Flags { /** A Java interface, potentially with default methods */ final val JavaTrait = allOf(JavaDefined, Trait, NoInits) - /** A Java interface */ + /** A Java interface */ // TODO reconstitute from context final val JavaInterface = allOf(JavaDefined, Trait) /** A Java companion object */ diff --git a/src/dotty/tools/dotc/core/pickling/NameBuffer.scala b/src/dotty/tools/dotc/core/pickling/NameBuffer.scala new file mode 100644 index 000000000000..c9994ecb557f --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/NameBuffer.scala @@ -0,0 +1,74 @@ +package dotty.tools +package dotc +package core +package pickling + +import collection.mutable +import Names.{Name, chrs} +import Decorators._ +import TastyBuffer._ +import scala.io.Codec +import TastyName._ +import PickleFormat._ + +class NameBuffer extends TastyBuffer(100000) { + + private val nameRefs = new mutable.LinkedHashMap[TastyName, Ref] + + def nameIndex(name: TastyName): Ref = nameRefs.get(name) match { + case Some(ref) => + ref + case None => + val ref = new Ref(nameRefs.size) + nameRefs(name) = ref + ref + } + def nameIndex(name: Name): Ref = nameIndex(Simple(name.toTermName)) + def nameIndex(str: String): Ref = nameIndex(str.toTermName) + + private def withLength(op: => Unit): Unit = { + val lengthAddr = currentAddr + writeByte(0) + op + val length = currentAddr.index - lengthAddr.index - 1 + assert(length < 128) + putNat(lengthAddr, length, 1) + } + + def writeRef(ref: Ref) = writeNat(ref.index) + + def pickleName(name: TastyName): Unit = name match { + case Simple(name) => + val bytes = Codec.toUTF8(chrs, name.start, name.length) + writeByte(UTF8) + writeNat(bytes.length) + writeBytes(bytes, bytes.length) + case Qualified(qualified, selector) => + writeByte(QUALIFIED) + withLength { writeRef(qualified); writeRef(selector) } + case Signed(original, params, result) => + writeByte(SIGNED) + withLength { writeRef(original); writeRef(result); params.foreach(writeRef) } + case Expanded(original) => + writeByte(EXPANDED) + withLength { writeRef(original) } + case ModuleClass(module) => + writeByte(MODULECLASS) + withLength { writeRef(module) } + case SuperAccessor(accessed) => + writeByte(SUPERACCESSOR) + withLength { writeRef(accessed) } + case DefaultGetter(method, paramNumer) => + writeByte(DEFAULTGETTER) + withLength { writeRef(method); writeNat(paramNumer) } + } + + override def assemble(): Unit = { + var i = 0 + for ((name, ref) <- nameRefs) { + assert(ref.index == i) + i += 1 + pickleName(name) + } + } +} diff --git a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala new file mode 100644 index 000000000000..16356718c3fd --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala @@ -0,0 +1,317 @@ +package dotty.tools.dotc +package core +package pickling + +/************************************************************ +Notation: + +We use BNF notation. Terminal symbols start with at least two +consecutive upper case letters. Each terminal is represented as a +single byte tag. Non-terminals are mixed case. Prefixes of the form +lower case letter*_ are for explanation of semantic content only, they +can be dropped without changing the grammar. + +Micro-syntax: + + LongNat = Digit* StopDigit // big endian, value fits in a Long without overflow + Nat = LongNat // value fits in an Int without overflow + Digit = 0 | ... | 127 + StopDigit = 128 | ... | 255 // value = digit - 128 + FullInt = Byte Byte Byte Byte + FullLong = Byte Byte Byte Byte Byte Byte Byte Byte + Byte - 0 | ... | 255 + +Macro-format: + + File = Header majorVersion_Nat minorVersion_Nat nameTable_Length Name* Section* + Header = "5CA1AB1F" + + Section = NameRef Length Bytes + Length = Nat // length of rest of entry in bytes + + Name = UTF8 Length UTF8-CodePoint* + QUALIFIED Length qualified_NameRef selector_NameRef + SIGNED Length original_NameRef resultSig_NameRef paramSig_NameRef* + EXPANDED Length original_NameRef + MODULECLASS Length module_NameRef + SUPERACCESSOR Length accessed_NameRef + DEFAULTGETTER Length method_NameRef paramNumber_Nat + ... + + NameRef = Nat // ordinal number of name in name table, starting from 1. + +Note: Unqualified names in the name table are strings. The context decides whether a name is +a type-name or a term-name. The same string can represent both. + +Standard-Section: "ASTs" Tree* + + Tree = PACKAGE Length Path Tree* + Stat + + Stat = Term + VALDEF Length NameRef Type rhs_Tree Modifier* + DEFDEF Length NameRef TypeParam* Params* return_Type rhs_Tree + Modifier* + TYPEDEF Length NameRef (Type | Template) Modifier* + IMPORT Length qual_Term Selector* + + TypeParam = TYPEPARAM Length NameRef Type Modifier* + Params = PARAMS Length Param* + Param = PARAM Length NameRef Type Modifier + Selector = IMPORTED Length name_NameRef + RENAMED Length from_NameRef to_NameRef + + Term = Path + SELECT qual_Term possiblySigned_NameRef + SUPER Length this_Term mixinTrait_Type? + APPLY Length fn_Term arg_Term* + TYPEAPPLY Length fn_Term arg_Term* + NEW Length cls_Type + PAIR Length left_Term right_Term + TYPED Length expr_Term ascription_Type + NAMEDARG Length paramName_NameRef arg_Term + ASSIGN Length lhs_Term rhs_Term + BLOCK Length expr_Term Stat* + IF Length cond_Term then_Term else_Term + CLOSURE Length meth_Term target_Type env_Term* + MATCH Length sel_Term CaseDef* + RETURN Length meth_ASTRef expr_Term? + TRY Length expr_Term CaseDef* finalizer_Term? + THROW Length expr_Term + SEQLITERAL Length elem_Term* + JSEQLITERAL Length elem_Term* + BIND Length boundName_NameRef pat_Type pat_Term + ALTERNATIVE Length alt_Term* + UNAPPLY Length fun_Term ImplicitArg* pat_Term* + ANNOTATED Length annot_Term underlying_Term + EMPTYTREE + + CaseDef = CASEDEF Length pat_Tree guard_Tree rhs_Tree + ImplicitArg = IMPLICITARG Length arg_Tree + Template = TEMPLATE Length parent_Tree* SelfDef? Stat* +// if there is a primary constructor, it is the first statement in Stat*.. + SelfDef = Param + ASTRef = Nat // byte position in AST payload + + Path = Constant + TERMREFdirect sym_ASTRef + TERMREFsymbol qual_Type sym_ASTRef + TERMREF qual_Type possiblySigned_NameRef + THIS Length clsRef_Type + SHARED path_ASTRef + + Constant = UNITconst + FALSEconst + TRUEconst + BYTEconst Nat + BYTEneg NegNat + SHORTconst Nat + SHORTneg NegNat + CHARconst Nat + INTconst Nat + INTneg NegNat + LONGconst LongNat + LONGneg NegLongNat + FLOATconst FullInt + DOUBLEconst FullLong + STRINGconst NameRef + NULLconst + CLASSconst Length Type + ENUMconst Length Path + NegNat = Nat // negValue = -natValue - 1 + NegLongNat = LongNat // negValue = -natValue - 1 + + Type = Path + TYPEREFdirect sym_ASTRef + TYPEREFsymbol qual_Type sym_ASTRef + TYPEREF qual_Type possiblySigned_NameRef + SUPERtype Length this_Type underlying_Type + SKOLEMtype Length underlying_Type + REFINEDtype Length refinement_NameRef info_Type + APPLIEDtype Length tycon_Type arg_Type* + TYPEBOUNDS Length low_Type high_Type + TYPEALIAS Length alias_Type + ANNOTATED Length annot_Tree underlying_Type + ANDtype Length left_Type right_Type + ORtype Length left_Type right_Type + BYNAMEtype Length underlying_Type + NOTYPE + NOPREFIX + SHARED type_ASTRef + + Modifier = PRIVATE + INTERNAL // package private + PROTECTED + PRIVATEqualified qualifier_ASTRef // will be dropped + PROTECTEDqualified qualifier_ASTRef // will be dropped + ABSTRACT + FINAL + SEALED + CASE + IMPLICIT + LAZY + OVERRIDE + INLINE + ABSOVERRIDE // abstract override + STATIC // mapped to static Java member + MODULE // an object or its class + LOCAL // private[this] or protected[this] + SYNTHETIC // generated by Scala compiler + ARTIFACT // to be tagged Java Synthetic + MUTABLE // a var + LABEL // method generated as a label + FIELDaccessor // getter or setter + PARAMaccessor // getter or setter for class param + CASEaccessor // getter for case class param + COVARIANT // type param marked “+” + CONTRAVARIANT // type param marked “-” + SCALA2X // Imported from Scala2.x + DEFAULTparameterized // Method with default params + DEFAULTinit // variable with “_” initializer + annotation_Term + +Note: Tree tags are grouped into 4 categories that determine what follows, and thus allow to compute the size of the tagged tree in a generic way. + + Category 1 (tags 0-95): tag + Category 2 (tags 96-127): tag Nat + Category 3 (tags 128-159): tag AST Nat + Category 4 (tags 160-255): tag Length + +Standard Section: "Positions" startPos_Index endPos_Index + + Index = Length Assoc* + Assoc = Delta ASTRef // largest tree starting/ending at offset + Delta = Nat // # chars from last offset or start of file + +**************************************************************************************/ + +object PickleFormat { + + final val header = "5CA1AB1F" + final val MajorVersion = 0 + final val MinorVersion = 1 + + // Name tags + + final val UTF8 = 1 + final val QUALIFIED = 2 + final val SIGNED = 3 + final val EXPANDED = 4 + final val MODULECLASS = 5 + final val SUPERACCESSOR = 6 + final val DEFAULTGETTER = 7 + +// AST tags + + final val EMPTYTREE = 0 + final val NOTYPE = 1 + final val NOPREFIX = 2 + final val UNITconst = 3 + final val FALSEconst = 4 + final val TRUEconst = 5 + final val NULLconst = 6 + final val PRIVATE = 7 + final val INTERNAL = 8 + final val PROTECTED = 9 + final val ABSTRACT = 10 + final val FINAL = 11 + final val SEALED = 12 + final val CASE = 13 + final val IMPLICIT = 14 + final val LAZY = 15 + final val OVERRIDE = 16 + final val INLINE = 17 + final val ABSOVERRIDE = 18 + final val STATIC = 19 + final val MODULE = 20 + final val LOCAL = 21 + final val SYNTHETIC = 22 + final val ARTIFACT = 23 + final val MUTABLE = 24 + final val LABEL = 25 + final val FIELDaccessor = 26 + final val PARAMaccessor = 27 + final val CASEaccessor = 28 + final val COVARIANT = 29 + final val CONTRAVARIANT = 30 + final val SCALA2X = 31 + final val DEFAULTparameterized = 32 + final val DEFAULTinit = 33 + + final val SHARED = 96 + final val TERMREFdirect = 97 + final val TYPEREFdirect = 98 + final val BYTEconst = 99 + final val BYTEneg = 100 + final val SHORTconst = 101 + final val SHORTneg = 102 + final val CHARconst = 103 + final val INTconst = 104 + final val INTneg = 105 + final val LONGconst = 106 + final val LONGneg = 107 + final val FLOATconst = 108 + final val DOUBLEconst = 109 + final val STRINGconst = 110 + final val PRIVATEqualified = 111 + final val PROTECTEDqualified = 112 + + final val SELECT = 128 + final val TERMREFsymbol = 129 + final val TERMREF = 130 + final val TYPEREFsymbol = 131 + final val TYPEREF = 132 + + final val PACKAGE = 160 + final val VALDEF = 161 + final val DEFDEF = 162 + final val TYPEDEF = 163 + final val IMPORT = 164 + final val TYPEPARAM = 165 + final val PARAMS = 166 + final val PARAM = 167 + final val IMPORTED = 168 + final val RENAMED = 169 + final val APPLY = 170 + final val TYPEAPPLY = 171 + final val NEW = 172 + final val PAIR = 173 + final val TYPED = 174 + final val NAMEDARG = 175 + final val ASSIGN = 176 + final val BLOCK = 177 + final val IF = 178 + final val CLOSURE = 179 + final val MATCH = 180 + final val RETURN = 181 + final val TRY = 182 + final val THROW = 183 + final val SEQLITERAL = 184 + final val JSEQLITERAL = 185 + final val BIND = 186 + final val ALTERNATIVE = 187 + final val UNAPPLY = 188 + final val ANNOTATED = 189 + final val CASEDEF = 190 + final val IMPLICITarg = 191 + final val TEMPLATE = 192 + final val THIS = 193 + final val SUPER = 194 + final val CLASSconst = 195 + final val ENUMconst = 196 + final val SUPERtype = 197 + final val SKOLEMtype = 198 + final val REFINEDtype = 199 + final val APPLIEDtype = 200 + final val TYPEBOUNDS = 201 + final val TYPEALIAS = 202 + final val ANDtype = 203 + final val ORtype = 204 + final val BYNAMEtype = 205 + final val IMPLICITARG = 206 + + final val firstSimpleTreeTag = EMPTYTREE + final val firstNatTreeTag = SHARED + final val firstTreeNatTreeTag = SELECT + final val firstLengthTreeTag = PACKAGE +} diff --git a/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala new file mode 100644 index 000000000000..f6a7a17b4b06 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala @@ -0,0 +1,162 @@ +package dotty.tools +package dotc +package core +package pickling + +import util.Util.dble + +object TastyBuffer { + + /** The number of digits of the natural number `nat`, written in base 128 format. */ + def natSize(nat: Int): Int = + if (nat < 128) 1 else natSize(nat >>> 7) + 1 + + /** An address pointing to an index in a Tasty buffer's byte array */ + class Addr(val index: Int) extends AnyVal { + def -(delta: Int): Addr = new Addr(this.index - delta) + def +(delta: Int): Addr = new Addr(this.index + delta) + + def relativeTo(base: Addr): Addr = this - base.index - AddrWidth + } + + /** The maximal number of address bytes. + * Since addresses are written as base-128 natural numbers, + * the value of 4 gives a maximal array size of 512M. + */ + final val AddrWidth = 4 +} +import TastyBuffer._ + +/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format, + * and that supports reading and patching addresses represented as natural numbers. + */ +class TastyBuffer(initialSize: Int) { + + /** The current byte array, will be expanded as needed */ + var bytes = new Array[Byte](initialSize) + + /** The number of bytes written */ + var length = 0 + + // -- Output routines -------------------------------------------- + + /** Write a byte of data. */ + def writeByte(b: Int): Unit = { + if (length == bytes.length) bytes = dble(bytes) + bytes(length) = b.toByte + length += 1 + } + + /** Write the first `n` bytes of `data`. */ + def writeBytes(data: Array[Byte], n: Int): Unit = { + while (bytes.length < length + data.length) bytes = dble(bytes) + Array.copy(data, 0, bytes, length, n) + length += n + } + + /** Write a natural number in big endian format, base 128. + * All but the last digits have bit 0x80 set. + */ + def writeNat(x: Int): Unit = + writeLongNat(x.toLong & 0x00000000FFFFFFFFL) + + /** + * Like writeNat, but for longs. This is not the same as + * writeRaw, which writes in base 256. Note that the + * binary representation of LongNat is identical to Nat + * if the long value is in the range Int.MIN_VALUE to + * Int.MAX_VALUE. + */ + def writeLongNat(x: Long): Unit = { + def writeNatPrefix(x: Long): Unit = { + val y = x >>> 7 + if (y != 0L) writeNatPrefix(y) + writeByte(((x & 0x7f) | 0x80).toInt) + } + val y = x >>> 7 + if (y != 0L) writeNatPrefix(y) + writeByte((x & 0x7f).toInt) + } + + /** Write the `nbytes` least significant bytes of `x` in big endian format */ + def writeRaw(x: Long, nbytes: Int): Unit = { + def recur(x: Long, n: Int): Unit = + if (n > 0) { + recur(x >>> 8, n - 1) + writeByte((x & 0xff).toInt) + } + recur(x, nbytes) + } + + // -- Address handling -------------------------------------------- + + /** Write natural number `x` right-adjusted in a field of `width` bytes + * starting with address `at`. + */ + def putNat(at: Addr, x: Int, width: Int): Unit = { + var y = x + var w = width + var digit = y & 0x7f | 0x80 + while (w > 0) { + w -= 1 + bytes(at.index + w) = digit.toByte + y >>>= 7 + digit = y & 0x7f + } + assert(y == 0, s"number $x too large to fit in $width bytes") + } + + /** The byte at given address */ + def getByte(at: Addr): Int = bytes(at.index) + + /** The natural number at address `at` */ + def getNat(at: Addr): Int = getLongNat(at).toInt + + /** The long natural number at address `at` */ + def getLongNat(at: Addr): Long = { + var b = 0L + var x = 0L + var idx = at.index + do { + b = bytes(idx) + x = (x << 7) + (b & 0x7f) + idx += 1 + } while ((b & 0x80) != 0L) + x + } + + /** The address (represented as a natural number) at address `at` */ + def getAddr(at: Addr) = new Addr(getNat(at)) + + /** The smallest address equal to or following `at` which points to a non-zero byte */ + final def skipZeroes(at: Addr): Addr = + if (getByte(at) != 0) at else skipZeroes(at + 1) + + /** The address after the natural number found at address `at`. */ + final def skipNat(at: Addr): Addr = { + val next = at + 1 + if ((getByte(at) & 0x80) != 0) next else skipNat(next) + } + + /** The address referring to the end of data written so far */ + def currentAddr: Addr = new Addr(length) + + /** Reserve `AddrWidth` bytes to write an address into */ + def reserveAddr(): Addr = { + val result = currentAddr + length += AddrWidth + result + } + + /** Fill reserved space at address `at` with address `target` */ + def fillAddr(at: Addr, target: Addr) = + putNat(at, target.index, AddrWidth) + + // -- Finalization -------------------------------------------- + + /** Hook to be overridden in subclasses. + * Perform all actions necessary to assemble the final byte array. + * After `assemble` no more output actions to this buffer are permitted. + */ + def assemble(): Unit = () +} diff --git a/src/dotty/tools/dotc/core/pickling/TastyName.scala b/src/dotty/tools/dotc/core/pickling/TastyName.scala new file mode 100644 index 000000000000..911d4c0cd0ee --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyName.scala @@ -0,0 +1,22 @@ +package dotty.tools +package dotc +package core +package pickling + +import core.Names.TermName + +abstract class TastyName + +object TastyName { + + class Ref(val index: Int) extends AnyVal + + case class Simple(name: TermName) extends TastyName + case class Qualified(qualified: Ref, selector: Ref) extends TastyName + case class Signed(original: Ref, params: List[Ref], result: Ref) extends TastyName + case class Expanded(original: Ref) extends TastyName + case class ModuleClass(module: Ref) extends TastyName + case class SuperAccessor(accessed: Ref) extends TastyName + case class DefaultGetter(method: Ref, num: Int) extends TastyName + +} diff --git a/src/dotty/tools/dotc/core/pickling/TastyPickler.scala b/src/dotty/tools/dotc/core/pickling/TastyPickler.scala new file mode 100644 index 000000000000..2ae6848e0788 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyPickler.scala @@ -0,0 +1,50 @@ +package dotty.tools +package dotc +package core +package pickling + +import PickleFormat._ +import collection.mutable +import TastyBuffer._ + +class TastyPickler { + + private val sections = new mutable.ArrayBuffer[(TastyName.Ref, TastyBuffer)] + + private val headerBuffer = { + val buf = new TastyBuffer(16) + for (ch <- header) buf.writeByte(ch.toByte) + buf.writeNat(MajorVersion) + buf.writeNat(MinorVersion) + buf + } + + val nameBuffer = new NameBuffer + + def newSection(name: String, buf: TastyBuffer) = + sections += ((nameBuffer.nameIndex(name), buf)) + + def assembleParts: Array[Byte] = { + def lengthWithLength(buf: TastyBuffer) = { + buf.assemble() + buf.length + natSize(buf.length) + } + val totalSize = + headerBuffer.length + + lengthWithLength(nameBuffer) + { + for ((nameRef, buf) <- sections) yield + natSize(nameRef.index) + lengthWithLength(buf) + }.sum + val all = new TastyBuffer(totalSize) + all.writeBytes(headerBuffer.bytes, headerBuffer.length) + all.writeNat(nameBuffer.length) + all.writeBytes(nameBuffer.bytes, nameBuffer.length) + for ((nameRef, buf) <- sections) { + all.writeNat(nameRef.index) + all.writeNat(buf.length) + all.writeBytes(buf.bytes, buf.length) + } + assert(all.length == totalSize && all.bytes.length == totalSize) + all.bytes + } +} diff --git a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala new file mode 100644 index 000000000000..5a445124d3ce --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala @@ -0,0 +1,133 @@ +package dotty.tools +package dotc +package core +package pickling + +import util.Util.{bestFit, dble} +import TastyBuffer.{Addr, AddrWidth} + +class TreeBuffer extends TastyBuffer(1000000) { + + private final val ItemsOverOffsets = 2 + + private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets) + private var offsets = new Array[Int](initialOffsetSize) + private var isRelative = new Array[Boolean](initialOffsetSize) + private var delta: Array[Int] = _ + private var numOffsets = 0 + + private def offset(i: Int): Addr = new Addr(offsets(i)) + + private def keepOffset(relative: Boolean): Unit = { + if (numOffsets == offsets.length) { + offsets = dble(offsets) + isRelative = dble(isRelative) + } + offsets(numOffsets) = length + isRelative(numOffsets) = relative + numOffsets += 1 + } + + def reserveRef(relative: Boolean): Addr = { + val addr = currentAddr + keepOffset(relative) + reserveAddr() + addr + } + + def writeRef(target: Addr) = { + keepOffset(relative = false) + writeNat(target.index) + } + + def fillRef(at: Addr, target: Addr, relative: Boolean) = { + val addr = if (relative) target.relativeTo(at) else target + fillAddr(at, addr) + } + + def adjusted(x: Addr): Addr = { + val idx = bestFit(offsets, x.index - 1) + if (idx < 0) x else x - delta(idx) + } + + private def computeDeltas() = { + delta = new Array[Int](numOffsets) + var lastDelta = 0 + var i = 0 + while (i < numOffsets) { + val off = offset(i) + val skippedOff = skipZeroes(off) + val skippedCount = skippedOff.index - off.index + assert(skippedCount < AddrWidth, s"unset field at position $off") + lastDelta += skippedCount + delta(i) = lastDelta + i += 1 + } + } + + private def adjustedOffset(at: Addr, isRelative: Boolean): Addr = { + val original = getAddr(at) + if (isRelative) { + val start = skipNat(at).index + adjusted(original + start) - start + } else adjusted(original) + } + + private def adjustOffsets(): Unit = { + for (i <- 0 until numOffsets) { + val off = offset(i) + val original = getAddr(off) + val corrected = adjustedOffset(off, isRelative(i)) + fillAddr(off, corrected) + } + } + + private def adjustDeltas(): Int = { + val delta1 = new Array[Int](delta.length) + var lastDelta = 0 + var i = 0 + while (i < numOffsets) { + val corrected = adjustedOffset(offset(i), isRelative(i)) + lastDelta += AddrWidth - TastyBuffer.natSize(corrected.index) + delta1(i) = lastDelta + i += 1 + } + val saved = + if (numOffsets == 0) 0 + else delta1(numOffsets - 1) - delta(numOffsets - 1) + delta = delta1 + saved + } + + private def compress(): Int = { + var lastDelta = 0 + var start = 0 + var i = 0 + var wasted = 0 + while (i < numOffsets) { + val next = offsets(i) + Array.copy(bytes, start, bytes, start - lastDelta, next - start) + start = next + delta(i) - lastDelta + val pastZeroes = skipZeroes(new Addr(next)).index + assert(pastZeroes >= start, s"something's wrong: eliminated non-zero") + wasted += (pastZeroes - start) + lastDelta = delta(i) + i += 1 + } + length -= lastDelta + wasted + } + + override def assemble(): Unit = { + val origLength = length + computeDeltas() + adjustOffsets() + if (false) { + var saved = 0 + do saved = adjustDeltas() + while (saved > 0 && length / saved < 100) + } + val wasted = compress() + println(s"original length: $origLength, compressed to: $length, wasted: $wasted") + } +} diff --git a/src/dotty/tools/dotc/core/pickling/TreePickler.scala b/src/dotty/tools/dotc/core/pickling/TreePickler.scala new file mode 100644 index 000000000000..8c92e2ed83b1 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TreePickler.scala @@ -0,0 +1,371 @@ +package dotty.tools +package dotc +package core +package pickling + +import util.Util.{bestFit, dble} +import ast.Trees._ +import PickleFormat._ +import core._ +import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._ +import collection.mutable +import TastyBuffer._ + +class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { + val buf = new TreeBuffer + pickler.newSection("ASTs", buf) + import buf._ + import pickler.nameBuffer.nameIndex + import ast.tpd._ + + private val symRefs = new mutable.HashMap[Symbol, Addr] + private val forwardSymRefs = new mutable.HashMap[Symbol, List[Addr]] + private val pickledTypes = new java.util.IdentityHashMap[Type, Any] // Value type is really Addr, but that's not compatible with null + + private def withLength(op: => Unit) = { + val lengthAddr = reserveRef(relative = true) + op + fillRef(lengthAddr, currentAddr, relative = true) + } + + def registerDef(sym: Symbol) = { + symRefs(sym) = currentAddr + forwardSymRefs.get(sym) match { + case Some(refs) => + refs.foreach(fillRef(_, currentAddr, relative = false)) + forwardSymRefs -= sym + case None => + } + } + + private def pickleName(name: Name) = writeNat(nameIndex(name).index) + private def pickleName(name: TastyName) = writeNat(nameIndex(name).index) + private def pickleNameAndSig(name: Name, sig: Signature) = { + val Signature(params, result) = sig + pickleName(TastyName.Signed(nameIndex(name), params.map(nameIndex), nameIndex(result))) + } + + private def pickleSym(sym: Symbol) = symRefs.get(sym) match { + case Some(label) => + writeRef(label) + case None => + val ref = reserveRef(relative = false) + forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil) + } + + def pickle(tree: Tree)(implicit ctx: Context) = { + + def pickleConstant(c: Constant): Unit = { + def pickleNum(nonNegTag: Int, negTag: Int) = { + val x = c.longValue + if (x < 0) { + writeByte(negTag) + writeLongNat(-(x + 1)) + } + else { + writeByte(nonNegTag) + writeLongNat(x) + } + } + c.tag match { + case UnitTag => + writeByte(UNITconst) + case BooleanTag => + writeByte(if (c.booleanValue) TRUEconst else FALSEconst) + case ByteTag => + pickleNum(BYTEconst, BYTEneg) + case ShortTag => + pickleNum(SHORTconst, SHORTneg) + case CharTag => + writeByte(CHARconst) + writeNat(c.charValue) + case IntTag => + pickleNum(INTconst, INTneg) + case LongTag => + pickleNum(LONGconst, LONGneg) + case FloatTag => + writeByte(FLOATconst) + writeRaw(java.lang.Float.floatToRawIntBits(c.floatValue), 4) + case DoubleTag => + writeByte(DOUBLEconst) + writeRaw(java.lang.Double.doubleToRawLongBits(c.doubleValue), 8) + case StringTag => + writeByte(STRINGconst) + writeNat(nameIndex(c.stringValue).index) + case NullTag => + writeByte(NULLconst) + case ClazzTag => + writeByte(CLASSconst) + withLength { pickleType(c.typeValue) } + case EnumTag => + writeByte(ENUMconst) + withLength { pickleType(c.symbolValue.termRef) } + } + } + + def pickleType(tpe: Type): Unit = { + val prev = pickledTypes.get(tpe) + if (prev == null) { + val addr = currentAddr + pickleNewType(tpe) + pickledTypes.put(tpe, addr) + } + else { + writeByte(SHARED) + writeRef(prev.asInstanceOf[Addr]) + } + } + + def pickleNewType(tpe: Type)= tpe match { + case ConstantType(value) => pickleConstant(value) + case tpe: WithFixedSym => + if (tpe.prefix == NoPrefix) { + writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect) + pickleSym(tpe.symbol) + } + else { + writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol) + pickleType(tpe.prefix); pickleSym(tpe.symbol) + } + case tpe: TermRefWithSignature => + writeByte(TERMREF) + pickleType(tpe.prefix); pickleNameAndSig(tpe.name, tpe.signature) + case tpe: NamedType => + writeByte(if (tpe.isType) TYPEREF else TERMREF) + pickleType(tpe.prefix); pickleName(tpe.name) + case tpe: ThisType => + writeByte(THIS) + pickleType(tpe.tref) + case tpe: SuperType => + writeByte(SUPERtype) + withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)} + case tpe: SkolemType => + writeByte(SKOLEMtype) + withLength { pickleType(tpe.underlying) } + case tpe: RefinedType => + val args = tpe.argInfos(interpolate = false) + if (args.isEmpty) { + writeByte(REFINEDtype) + withLength { pickleName(tpe.refinedName); pickleType(tpe.refinedInfo) } + } + else { + writeByte(APPLIEDtype) + withLength { pickleType(tpe.withoutArgs(args)); args.foreach(pickleType) } + } + case tpe: TypeAlias => + writeByte(TYPEALIAS) + withLength { pickleType(tpe.alias) } + case tpe: TypeBounds => + writeByte(TYPEBOUNDS) + withLength { pickleType(tpe.lo); pickleType(tpe.hi) } + case tpe: AnnotatedType => + writeByte(ANNOTATED) + withLength { pickleTree(tpe.annot.tree); pickleType(tpe.tpe) } + case tpe: AndOrType => + writeByte(if (tpe.isAnd) ANDtype else ORtype) + withLength { pickleType(tpe.tp1); pickleType(tpe.tp2) } + case tpe: ExprType => + writeByte(BYNAMEtype) + withLength { pickleType(tpe.underlying) } + case NoType => + writeByte(NOTYPE) +// case NoPrefix => // not sure we need this! +// writeByte(NOPREFIX) + } + + def pickleTpt(tpt: Tree): Unit = pickleType(tpt.tpe) // TODO correlate with original when generating positions + + def pickleTreeIfNonEmpty(tree: Tree): Unit = + if (!tree.isEmpty) pickleTree(tree) + + def pickleTree(tree: Tree): Unit = tree match { + case Ident(_) | This(_) => + pickleType(tree.tpe) + case Select(qual, name) => + writeByte(SELECT) + val sig = tree.tpe.signature + if (sig == Signature.NotAMethod) pickleName(name) + else pickleNameAndSig(name, sig) + case Apply(fun, args) => + writeByte(APPLY) + withLength { + pickleTree(fun) + args.foreach(pickleTree) + } + case TypeApply(fun, args) => + writeByte(TYPEAPPLY) + withLength { + pickleTree(fun) + args.foreach(pickleTree) + } + case Literal(const) => + pickleConstant(const) + case Super(qual, mix) => + writeByte(SUPER) + withLength { + pickleTree(qual); + if (!mix.isEmpty) { + val SuperType(_, mixinType) = tree.tpe + pickleType(mixinType) + } + } + case New(tpt) => + writeByte(NEW) + withLength { pickleTpt(tpt) } + case Pair(left, right) => + writeByte(PAIR) + withLength { pickleTree(left); pickleTree(right) } + case Typed(expr, tpt) => + writeByte(TYPED) + withLength { pickleTree(expr); pickleTpt(tpt) } + case NamedArg(name, arg) => + writeByte(NAMEDARG) + withLength { pickleName(name); pickleTree(arg) } + case Assign(lhs, rhs) => + writeByte(ASSIGN) + withLength { pickleTree(lhs); pickleTree(rhs) } + case Block(stats, expr) => + writeByte(BLOCK) + withLength { pickleTree(expr); stats.foreach(pickleTree) } + case If(cond, thenp, elsep) => + writeByte(IF) + withLength{ pickleTree(cond); pickleTree(thenp); pickleTree(elsep) } + case Closure(env, meth, tpt) => + writeByte(CLOSURE) + withLength{ pickleTree(meth); pickleTpt(tpt); env.foreach(pickleTree) } + case Match(selector, cases) => + writeByte(MATCH) + withLength { pickleTree(selector); cases.foreach(pickleTree) } + case CaseDef(pat, guard, rhs) => + writeByte(CASEDEF) + withLength { pickleTree(pat); pickleTree(guard); pickleTree(rhs) } + case Return(expr, from) => + writeByte(RETURN) + withLength { pickleSym(from.symbol); pickleTreeIfNonEmpty(expr) } + case Try(block, cases, finalizer) => + writeByte(TRY) + withLength { pickleTree(block); cases.foreach(pickleTree); pickleTreeIfNonEmpty(finalizer) } + case Throw(expr) => + writeByte(THROW) + withLength { pickleTree(expr) } + case SeqLiteral(elems) => + writeByte(if (tree.isInstanceOf[JavaSeqLiteral]) JSEQLITERAL else SEQLITERAL) + withLength { elems.foreach(pickleTree) } + case TypeTree(original) => + pickleTpt(tree) + case Bind(name, body) => + registerDef(tree.symbol) + writeByte(BIND) + withLength { pickleName(name); pickleType(tree.symbol.info); pickleTree(body) } + case Alternative(alts) => + writeByte(ALTERNATIVE) + withLength { alts.foreach(pickleTree) } + case UnApply(fun, implicits, patterns) => + writeByte(UNAPPLY) + withLength { + pickleTree(fun) + for (implicitArg <- implicits) { + writeByte(IMPLICITARG) + withLength { pickleTree(implicitArg) } + } + patterns.foreach(pickleTree) + } + case tree: ValDef => + pickleDef(VALDEF, tree.symbol, tree.rhs) + case tree: DefDef => + def pickleParams = { + for (tparam <- tree.tparams) pickleDef(TYPEPARAM, tparam.symbol, EmptyTree) + for (vparams <- tree.vparamss) { + writeByte(PARAMS) + withLength { + for (vparam <- vparams) pickleDef(PARAM, vparam.symbol, EmptyTree) + } + } + } + pickleDef(DEFDEF, tree.symbol, tree.rhs, pickleParams) + case tree: TypeDef => + pickleDef(TYPEDEF, tree.symbol, tree.rhs) + case tree: Template => + writeByte(TEMPLATE) + withLength { + tree.parents.foreach(pickleTree) + if (!tree.self.isEmpty) + pickleDef(PARAM, tree.self.symbol, EmptyTree) + pickleTreeIfNonEmpty(tree.constr) + tree.body.foreach(pickleTree) + } + case Import(expr, selectors) => + writeByte(IMPORT) + withLength { + pickleTree(expr) + selectors foreach { + case Pair(Ident(from), Ident(to)) => + writeByte(RENAMED) + withLength { pickleName(from); pickleName(to) } + case Ident(name) => + writeByte(IMPORTED) + withLength { pickleName(name) } + } + } + case PackageDef(pid, stats) => + writeByte(PACKAGE) + withLength { pickleType(pid.tpe); stats.foreach(pickleTree) } + case Annotated(annot, arg) => + writeByte(ANNOTATED) + withLength { pickleTree(annot); pickleTree(arg) } + } + + def pickleDef(tag: Int, sym: Symbol, rhs: Tree, pickleParams: => Unit = ()) = { + registerDef(sym) + writeByte(tag) + withLength { + pickleName(sym.name) + pickleParams + if (tag != TYPEDEF) pickleType(sym.info.finalResultType) + if (tag != PARAM && tag != TYPEPARAM) pickleTree(rhs) + pickleModifiers(sym) + } + } + + def pickleModifiers(sym: Symbol): Unit = { + import Flags._ + val flags = sym.flags + val privateWithin = sym.privateWithin + if (privateWithin.exists) { + writeByte(if (flags is Protected) PROTECTEDqualified else PRIVATEqualified) + pickleSym(privateWithin) + } + if (flags is Private) writeByte(PRIVATE) + if (flags is Protected) if (!privateWithin.exists) writeByte(PROTECTED) + if (flags is Final) writeByte(FINAL) + if (flags is Case) writeByte(CASE) + if (flags is Override) writeByte(OVERRIDE) + if (flags is Inline) writeByte(INLINE) + if (flags is JavaStatic) writeByte(STATIC) + if (flags is Module) writeByte(MODULE) + if (flags is Local) writeByte(LOCAL) + if (flags is Synthetic) writeByte(SYNTHETIC) + if (flags is Artifact) writeByte(ARTIFACT) + if (flags is Scala2x) writeByte(SCALA2X) + if (sym.isTerm) { + if (flags is Implicit) writeByte(IMPLICIT) + if (flags is Lazy) writeByte(LAZY) + if (flags is AbsOverride) writeByte(ABSOVERRIDE) + if (flags is Mutable) writeByte(MUTABLE) + if (flags is Accessor) writeByte(FIELDaccessor) + if (flags is ParamAccessor) writeByte(PARAMaccessor) + if (flags is CaseAccessor) writeByte(CASEaccessor) + if (flags is DefaultParameterized) writeByte(DEFAULTparameterized) + if (flags is DefaultInit) writeByte(DEFAULTinit) + } else { + if (flags is Sealed) writeByte(SEALED) + if (flags is Abstract) writeByte(ABSTRACT) + if (flags is Covariant) writeByte(COVARIANT) + if (flags is Contravariant) writeByte(CONTRAVARIANT) + } + sym.annotations.foreach(ann => pickleTree(ann.tree)) + } + + pickleTree(tree) + } +} diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index c522a5998e90..95f0b41652a1 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -497,9 +497,9 @@ class Namer { typer: Typer => denot.info = ClassInfo(cls.owner.thisType, cls, parentRefs, decls, selfInfo) if (cls is Trait) { if (body forall isNoInitMember) { - cls.setFlag(NoInits) + cls.setFlag(NoInits) // TODO set when unpickling if (body forall isPureInterfaceMember) - cls.setFlag(PureInterface) + cls.setFlag(PureInterface) // TODO set when unpickling } } } From c288bc5a119da649389cb05bdced5b4400cade70 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 10 Feb 2015 12:31:21 +0100 Subject: [PATCH 04/22] Allow up to 64 phases. Doubles maximal number of phases from 32 to 64. Needed because adding the pickling phase would push us over the edge of 32 phases. --- src/dotty/tools/dotc/core/Periods.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/core/Periods.scala b/src/dotty/tools/dotc/core/Periods.scala index 66c26e38179b..8b822c718520 100644 --- a/src/dotty/tools/dotc/core/Periods.scala +++ b/src/dotty/tools/dotc/core/Periods.scala @@ -46,9 +46,9 @@ object Periods { * It is coded as follows: * * sign, always 0 1 bit - * runid 21 bits - * last phase id: 5 bits - * #phases before last: 5 bits + * runid 19 bits + * last phase id: 6 bits + * #phases before last: 6 bits * * // Dmitry: sign == 0 isn't actually always true, in some cases phaseId == -1 is used for shifts, that easily creates code < 0 */ @@ -148,7 +148,7 @@ object Periods { final val FirstPhaseId = 1 /** The number of bits needed to encode a phase identifier. */ - final val PhaseWidth = 5 + final val PhaseWidth = 6 final val PhaseMask = (1 << PhaseWidth) - 1 final val MaxPossiblePhaseId = PhaseMask } \ No newline at end of file From ecd7d56e865bbc2e16940e49ecb14e24d9aa2303 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 10 Feb 2015 12:32:22 +0100 Subject: [PATCH 05/22] Make bestFit work for partially filled arrays --- src/dotty/tools/dotc/core/pickling/TreeBuffer.scala | 2 +- src/dotty/tools/dotc/util/SourceFile.scala | 2 +- src/dotty/tools/dotc/util/Util.scala | 12 +++++------- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala index 5a445124d3ce..73b944b92185 100644 --- a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala +++ b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala @@ -46,7 +46,7 @@ class TreeBuffer extends TastyBuffer(1000000) { } def adjusted(x: Addr): Addr = { - val idx = bestFit(offsets, x.index - 1) + val idx = bestFit(offsets, numOffsets, x.index - 1) if (idx < 0) x else x - delta(idx) } diff --git a/src/dotty/tools/dotc/util/SourceFile.scala b/src/dotty/tools/dotc/util/SourceFile.scala index c5d88d7bf95f..45119a881538 100644 --- a/src/dotty/tools/dotc/util/SourceFile.scala +++ b/src/dotty/tools/dotc/util/SourceFile.scala @@ -99,7 +99,7 @@ case class SourceFile(file: AbstractFile, content: Array[Char]) { * Lines are numbered from 0 */ def offsetToLine(offset: Int): Int = { - lastLine = Util.bestFit(lineIndices, offset, lastLine) + lastLine = Util.bestFit(lineIndices, lineIndices.length, offset, lastLine) lastLine } diff --git a/src/dotty/tools/dotc/util/Util.scala b/src/dotty/tools/dotc/util/Util.scala index ed9a54e38d09..98f0b62dbbde 100644 --- a/src/dotty/tools/dotc/util/Util.scala +++ b/src/dotty/tools/dotc/util/Util.scala @@ -11,18 +11,16 @@ object Util { * `candidates.length/2`. * @pre candidates is sorted */ - def bestFit(candidates: Array[Int], x: Int, hint: Int = -1): Int = { + def bestFit(candidates: Array[Int], length: Int, x: Int, hint: Int = -1): Int = { def recur(lo: Int, hi: Int, mid: Int): Int = if (x < candidates(mid)) recur(lo, mid - 1, (lo + mid - 1) / 2) - else if (mid + 1 < candidates.length && x >= candidates(mid + 1)) + else if (mid + 1 < length && x >= candidates(mid + 1)) recur(mid + 1, hi, (mid + 1 + hi) / 2) else mid - val initMid = - if (0 <= hint && hint < candidates.length) hint - else candidates.length / 2 - if (candidates.isEmpty || x < candidates(0)) -1 - else recur(0, candidates.length, initMid) + val initMid = if (0 <= hint && hint < length) hint else length / 2 + if (length == 0 || x < candidates(0)) -1 + else recur(0, length, initMid) } /** An array twice the size of given array, with existing elements copied over */ From 8d3cf74d0ea83c73b1663ff262dde07f7e0fc3a6 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 10 Feb 2015 12:33:38 +0100 Subject: [PATCH 06/22] Treat as static --- src/dotty/tools/dotc/core/SymDenotations.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index 6b35329442df..ebdbcb4f6eb0 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -405,7 +405,7 @@ object SymDenotations { /** Is this denotation static (i.e. with no outer instance)? */ final def isStatic(implicit ctx: Context) = - (this is JavaStatic) || this.exists && owner.isStaticOwner + (this is JavaStatic) || this.exists && owner.isStaticOwner || this.isRoot /** Is this a package class or module class that defines static symbols? */ final def isStaticOwner(implicit ctx: Context): Boolean = From 354755cf6acbe74a46f9542db8150d8fd327826e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 10 Feb 2015 12:39:15 +0100 Subject: [PATCH 07/22] Add TASTY readers and printers for TASTy info. So far printing is the only reader, ie. deserializer. Numerous bugfixes to make first tests work. --- .../tools/dotc/core/pickling/NameBuffer.scala | 30 +-- .../dotc/core/pickling/PickleFormat.scala | 176 ++++++++++++++++-- .../dotc/core/pickling/TastyBuffer.scala | 20 +- .../tools/dotc/core/pickling/TastyName.scala | 21 ++- .../dotc/core/pickling/TastyPickler.scala | 6 +- .../dotc/core/pickling/TastyPrinter.scala | 107 +++++++++++ .../dotc/core/pickling/TastyReader.scala | 78 ++++++++ .../dotc/core/pickling/TastyUnpickler.scala | 84 +++++++++ .../tools/dotc/core/pickling/TreeBuffer.scala | 54 +++--- .../dotc/core/pickling/TreePickler.scala | 92 +++++++-- src/dotty/tools/dotc/transform/Pickler.scala | 29 +++ 11 files changed, 602 insertions(+), 95 deletions(-) create mode 100644 src/dotty/tools/dotc/core/pickling/TastyPrinter.scala create mode 100644 src/dotty/tools/dotc/core/pickling/TastyReader.scala create mode 100644 src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala create mode 100644 src/dotty/tools/dotc/transform/Pickler.scala diff --git a/src/dotty/tools/dotc/core/pickling/NameBuffer.scala b/src/dotty/tools/dotc/core/pickling/NameBuffer.scala index c9994ecb557f..70d6b9ee17cd 100644 --- a/src/dotty/tools/dotc/core/pickling/NameBuffer.scala +++ b/src/dotty/tools/dotc/core/pickling/NameBuffer.scala @@ -13,18 +13,18 @@ import PickleFormat._ class NameBuffer extends TastyBuffer(100000) { - private val nameRefs = new mutable.LinkedHashMap[TastyName, Ref] + private val nameRefs = new mutable.LinkedHashMap[TastyName, NameRef] - def nameIndex(name: TastyName): Ref = nameRefs.get(name) match { + def nameIndex(name: TastyName): NameRef = nameRefs.get(name) match { case Some(ref) => ref case None => - val ref = new Ref(nameRefs.size) + val ref = NameRef(nameRefs.size) nameRefs(name) = ref ref } - def nameIndex(name: Name): Ref = nameIndex(Simple(name.toTermName)) - def nameIndex(str: String): Ref = nameIndex(str.toTermName) + def nameIndex(name: Name): NameRef = nameIndex(Simple(name.toTermName)) + def nameIndex(str: String): NameRef = nameIndex(str.toTermName) private def withLength(op: => Unit): Unit = { val lengthAddr = currentAddr @@ -35,32 +35,34 @@ class NameBuffer extends TastyBuffer(100000) { putNat(lengthAddr, length, 1) } - def writeRef(ref: Ref) = writeNat(ref.index) + def writeNameRef(ref: NameRef) = writeNat(ref.index) def pickleName(name: TastyName): Unit = name match { case Simple(name) => - val bytes = Codec.toUTF8(chrs, name.start, name.length) + val bytes = + if (name.length == 0) new Array[Byte](0) + else Codec.toUTF8(chrs, name.start, name.length) writeByte(UTF8) writeNat(bytes.length) writeBytes(bytes, bytes.length) case Qualified(qualified, selector) => writeByte(QUALIFIED) - withLength { writeRef(qualified); writeRef(selector) } + withLength { writeNameRef(qualified); writeNameRef(selector) } case Signed(original, params, result) => writeByte(SIGNED) - withLength { writeRef(original); writeRef(result); params.foreach(writeRef) } + withLength { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) } case Expanded(original) => writeByte(EXPANDED) - withLength { writeRef(original) } + withLength { writeNameRef(original) } case ModuleClass(module) => writeByte(MODULECLASS) - withLength { writeRef(module) } + withLength { writeNameRef(module) } case SuperAccessor(accessed) => writeByte(SUPERACCESSOR) - withLength { writeRef(accessed) } - case DefaultGetter(method, paramNumer) => + withLength { writeNameRef(accessed) } + case DefaultGetter(method, paramNumber) => writeByte(DEFAULTGETTER) - withLength { writeRef(method); writeNat(paramNumer) } + withLength { writeNameRef(method); writeNat(paramNumber) } } override def assemble(): Unit = { diff --git a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala index 16356718c3fd..6769cd016c23 100644 --- a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala +++ b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala @@ -95,6 +95,7 @@ Standard-Section: "ASTs" Tree* Path = Constant TERMREFdirect sym_ASTRef + TERMREFstatic fullyQualified_NameRef TERMREFsymbol qual_Type sym_ASTRef TERMREF qual_Type possiblySigned_NameRef THIS Length clsRef_Type @@ -123,6 +124,7 @@ Standard-Section: "ASTs" Tree* Type = Path TYPEREFdirect sym_ASTRef + TYPEREFstatic fullyQualified_NameRef TYPEREFsymbol qual_Type sym_ASTRef TYPEREF qual_Type possiblySigned_NameRef SUPERtype Length this_Type underlying_Type @@ -135,9 +137,14 @@ Standard-Section: "ASTs" Tree* ANDtype Length left_Type right_Type ORtype Length left_Type right_Type BYNAMEtype Length underlying_Type + POLYtype Length result_Type NamesTypes // needed for refinements + METHODtype Length result_Type NamesTypes // needed for refinements + PARAMtype Length binder_ASTref paramNum_Nat // needed for refinements NOTYPE NOPREFIX SHARED type_ASTRef + NamesTypes = ParamType* + NameType = paramName_NameRef typeOrBounds_ASTRef Modifier = PRIVATE INTERNAL // package private @@ -189,10 +196,10 @@ object PickleFormat { final val header = "5CA1AB1F" final val MajorVersion = 0 - final val MinorVersion = 1 - + final val MinorVersion = 2 + // Name tags - + final val UTF8 = 1 final val QUALIFIED = 2 final val SIGNED = 3 @@ -200,7 +207,7 @@ object PickleFormat { final val MODULECLASS = 5 final val SUPERACCESSOR = 6 final val DEFAULTGETTER = 7 - + // AST tags final val EMPTYTREE = 0 @@ -241,20 +248,22 @@ object PickleFormat { final val SHARED = 96 final val TERMREFdirect = 97 final val TYPEREFdirect = 98 - final val BYTEconst = 99 - final val BYTEneg = 100 - final val SHORTconst = 101 - final val SHORTneg = 102 - final val CHARconst = 103 - final val INTconst = 104 - final val INTneg = 105 - final val LONGconst = 106 - final val LONGneg = 107 - final val FLOATconst = 108 - final val DOUBLEconst = 109 - final val STRINGconst = 110 - final val PRIVATEqualified = 111 - final val PROTECTEDqualified = 112 + final val TERMREFstatic = 99 + final val TYPEREFstatic = 100 + final val BYTEconst = 101 + final val BYTEneg = 102 + final val SHORTconst = 103 + final val SHORTneg = 104 + final val CHARconst = 105 + final val INTconst = 106 + final val INTneg = 107 + final val LONGconst = 108 + final val LONGneg = 109 + final val FLOATconst = 110 + final val DOUBLEconst = 111 + final val STRINGconst = 112 + final val PRIVATEqualified = 113 + final val PROTECTEDqualified = 114 final val SELECT = 128 final val TERMREFsymbol = 129 @@ -308,10 +317,137 @@ object PickleFormat { final val ANDtype = 203 final val ORtype = 204 final val BYNAMEtype = 205 - final val IMPLICITARG = 206 - + final val METHODtype = 206 + final val POLYtype = 207 + final val PARAMtype = 208 + final val IMPLICITARG = 209 + final val firstSimpleTreeTag = EMPTYTREE final val firstNatTreeTag = SHARED final val firstTreeNatTreeTag = SELECT final val firstLengthTreeTag = PACKAGE + + def nameTagToString(tag: Int): String = tag match { + case UTF8 => "UTF8" + case QUALIFIED => "QUALIFIED" + case SIGNED => "SIGNED" + case EXPANDED => "EXPANDED" + case MODULECLASS => "MODULECLASS" + case SUPERACCESSOR => "SUPERACCESSOR" + case DEFAULTGETTER => "DEFAULTGETTER" + } + + def astTagToString(tag: Int): String = tag match { + case EMPTYTREE => "EMPTYTREE" + case NOTYPE => "NOTYPE" + case NOPREFIX => "NOPREFIX" + case UNITconst => "UNITconst" + case FALSEconst => "FALSEconst" + case TRUEconst => "TRUEconst" + case NULLconst => "NULLconst" + case PRIVATE => "PRIVATE" + case INTERNAL => "INTERNAL" + case PROTECTED => "PROTECTED" + case ABSTRACT => "ABSTRACT" + case FINAL => "FINAL" + case SEALED => "SEALED" + case CASE => "CASE" + case IMPLICIT => "IMPLICIT" + case LAZY => "LAZY" + case OVERRIDE => "OVERRIDE" + case INLINE => "INLINE" + case ABSOVERRIDE => "ABSOVERRIDE" + case STATIC => "STATIC" + case MODULE => "MODULE" + case LOCAL => "LOCAL" + case SYNTHETIC => "SYNTHETIC" + case ARTIFACT => "ARTIFACT" + case MUTABLE => "MUTABLE" + case LABEL => "LABEL" + case FIELDaccessor => "FIELDaccessor" + case PARAMaccessor => "PARAMaccessor" + case CASEaccessor => "CASEaccessor" + case COVARIANT => "COVARIANT" + case CONTRAVARIANT => "CONTRAVARIANT" + case SCALA2X => "SCALA2X" + case DEFAULTparameterized => "DEFAULTparameterized" + case DEFAULTinit => "DEFAULTinit" + + case SHARED => "SHARED" + case TERMREFdirect => "TERMREFdirect" + case TYPEREFdirect => "TYPEREFdirect" + case TERMREFstatic => "TERMREFstatic" + case TYPEREFstatic => "TYPEREFstatic" + case BYTEconst => "BYTEconst" + case BYTEneg => "BYTEneg" + case SHORTconst => "SHORTconst" + case SHORTneg => "SHORTneg" + case CHARconst => "CHARconst" + case INTconst => "INTconst" + case INTneg => "INTneg" + case LONGconst => "LONGconst" + case LONGneg => "LONGneg" + case FLOATconst => "FLOATconst" + case DOUBLEconst => "DOUBLEconst" + case STRINGconst => "STRINGconst" + case PRIVATEqualified => "PRIVATEqualified" + case PROTECTEDqualified => "PROTECTEDqualified" + + case SELECT => "SELECT" + case TERMREFsymbol => "TERMREFsymbol" + case TERMREF => "TERMREF" + case TYPEREFsymbol => "TYPEREFsymbol" + case TYPEREF => "TYPEREF" + + case PACKAGE => "PACKAGE" + case VALDEF => "VALDEF" + case DEFDEF => "DEFDEF" + case TYPEDEF => "TYPEDEF" + case IMPORT => "IMPORT" + case TYPEPARAM => "TYPEPARAM" + case PARAMS => "PARAMS" + case PARAM => "PARAM" + case IMPORTED => "IMPORTED" + case RENAMED => "RENAMED" + case APPLY => "APPLY" + case TYPEAPPLY => "TYPEAPPLY" + case NEW => "NEW" + case PAIR => "PAIR" + case TYPED => "TYPED" + case NAMEDARG => "NAMEDARG" + case ASSIGN => "ASSIGN" + case BLOCK => "BLOCK" + case IF => "IF" + case CLOSURE => "CLOSURE" + case MATCH => "MATCH" + case RETURN => "RETURN" + case TRY => "TRY" + case THROW => "THROW" + case SEQLITERAL => "SEQLITERAL" + case JSEQLITERAL => "JSEQLITERAL" + case BIND => "BIND" + case ALTERNATIVE => "ALTERNATIVE" + case UNAPPLY => "UNAPPLY" + case ANNOTATED => "ANNOTATED" + case CASEDEF => "CASEDEF" + case IMPLICITarg => "IMPLICITarg" + case TEMPLATE => "TEMPLATE" + case THIS => "THIS" + case SUPER => "SUPER" + case CLASSconst => "CLASSconst" + case ENUMconst => "ENUMconst" + case SUPERtype => "SUPERtype" + case SKOLEMtype => "SKOLEMtype" + case REFINEDtype => "REFINEDtype" + case APPLIEDtype => "APPLIEDtype" + case TYPEBOUNDS => "TYPEBOUNDS" + case TYPEALIAS => "TYPEALIAS" + case ANDtype => "ANDtype" + case ORtype => "ORtype" + case BYNAMEtype => "BYNAMEtype" + case POLYtype => "POLYtype" + case METHODtype => "METHODtype" + case PARAMtype => "PARAMtype" + case IMPLICITARG => "IMPLICITARG" + } } diff --git a/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala index f6a7a17b4b06..0e44dbd76e83 100644 --- a/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala +++ b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala @@ -12,9 +12,9 @@ object TastyBuffer { if (nat < 128) 1 else natSize(nat >>> 7) + 1 /** An address pointing to an index in a Tasty buffer's byte array */ - class Addr(val index: Int) extends AnyVal { - def -(delta: Int): Addr = new Addr(this.index - delta) - def +(delta: Int): Addr = new Addr(this.index + delta) + case class Addr(val index: Int) extends AnyVal { + def -(delta: Int): Addr = Addr(this.index - delta) + def +(delta: Int): Addr = Addr(this.index + delta) def relativeTo(base: Addr): Addr = this - base.index - AddrWidth } @@ -49,7 +49,7 @@ class TastyBuffer(initialSize: Int) { /** Write the first `n` bytes of `data`. */ def writeBytes(data: Array[Byte], n: Int): Unit = { - while (bytes.length < length + data.length) bytes = dble(bytes) + while (bytes.length < length + n) bytes = dble(bytes) Array.copy(data, 0, bytes, length, n) length += n } @@ -71,11 +71,11 @@ class TastyBuffer(initialSize: Int) { def writeNatPrefix(x: Long): Unit = { val y = x >>> 7 if (y != 0L) writeNatPrefix(y) - writeByte(((x & 0x7f) | 0x80).toInt) + writeByte((x & 0x7f).toInt) } val y = x >>> 7 if (y != 0L) writeNatPrefix(y) - writeByte((x & 0x7f).toInt) + writeByte(((x & 0x7f) | 0x80).toInt) } /** Write the `nbytes` least significant bytes of `x` in big endian format */ @@ -119,14 +119,14 @@ class TastyBuffer(initialSize: Int) { var idx = at.index do { b = bytes(idx) - x = (x << 7) + (b & 0x7f) + x = (x << 7) | (b & 0x7f) idx += 1 - } while ((b & 0x80) != 0L) + } while ((b & 0x80) == 0) x } /** The address (represented as a natural number) at address `at` */ - def getAddr(at: Addr) = new Addr(getNat(at)) + def getAddr(at: Addr) = Addr(getNat(at)) /** The smallest address equal to or following `at` which points to a non-zero byte */ final def skipZeroes(at: Addr): Addr = @@ -139,7 +139,7 @@ class TastyBuffer(initialSize: Int) { } /** The address referring to the end of data written so far */ - def currentAddr: Addr = new Addr(length) + def currentAddr: Addr = Addr(length) /** Reserve `AddrWidth` bytes to write an address into */ def reserveAddr(): Addr = { diff --git a/src/dotty/tools/dotc/core/pickling/TastyName.scala b/src/dotty/tools/dotc/core/pickling/TastyName.scala index 911d4c0cd0ee..581a8790152f 100644 --- a/src/dotty/tools/dotc/core/pickling/TastyName.scala +++ b/src/dotty/tools/dotc/core/pickling/TastyName.scala @@ -4,19 +4,26 @@ package core package pickling import core.Names.TermName +import collection.mutable abstract class TastyName object TastyName { - class Ref(val index: Int) extends AnyVal + case class NameRef(val index: Int) extends AnyVal case class Simple(name: TermName) extends TastyName - case class Qualified(qualified: Ref, selector: Ref) extends TastyName - case class Signed(original: Ref, params: List[Ref], result: Ref) extends TastyName - case class Expanded(original: Ref) extends TastyName - case class ModuleClass(module: Ref) extends TastyName - case class SuperAccessor(accessed: Ref) extends TastyName - case class DefaultGetter(method: Ref, num: Int) extends TastyName + case class Qualified(qualified: NameRef, selector: NameRef) extends TastyName + case class Signed(original: NameRef, params: List[NameRef], result: NameRef) extends TastyName + case class Expanded(original: NameRef) extends TastyName + case class ModuleClass(module: NameRef) extends TastyName + case class SuperAccessor(accessed: NameRef) extends TastyName + case class DefaultGetter(method: NameRef, num: Int) extends TastyName + class Table extends (NameRef => TastyName) { + private val names = new mutable.ArrayBuffer[TastyName] + def add(name: TastyName) = names += name + def apply(ref: NameRef) = names(ref.index) + def contents: Iterable[TastyName] = names + } } diff --git a/src/dotty/tools/dotc/core/pickling/TastyPickler.scala b/src/dotty/tools/dotc/core/pickling/TastyPickler.scala index 2ae6848e0788..32cc1ae43c0b 100644 --- a/src/dotty/tools/dotc/core/pickling/TastyPickler.scala +++ b/src/dotty/tools/dotc/core/pickling/TastyPickler.scala @@ -9,7 +9,7 @@ import TastyBuffer._ class TastyPickler { - private val sections = new mutable.ArrayBuffer[(TastyName.Ref, TastyBuffer)] + private val sections = new mutable.ArrayBuffer[(TastyName.NameRef, TastyBuffer)] private val headerBuffer = { val buf = new TastyBuffer(16) @@ -24,7 +24,7 @@ class TastyPickler { def newSection(name: String, buf: TastyBuffer) = sections += ((nameBuffer.nameIndex(name), buf)) - def assembleParts: Array[Byte] = { + def assembleParts(): Array[Byte] = { def lengthWithLength(buf: TastyBuffer) = { buf.assemble() buf.length + natSize(buf.length) @@ -44,7 +44,7 @@ class TastyPickler { all.writeNat(buf.length) all.writeBytes(buf.bytes, buf.length) } - assert(all.length == totalSize && all.bytes.length == totalSize) + assert(all.length == totalSize && all.bytes.length == totalSize, s"totalSize = $totalSize, all.length = ${all.length}, all.bytes.length = ${all.bytes.length}") all.bytes } } diff --git a/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala b/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala new file mode 100644 index 000000000000..3e583beb0df1 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala @@ -0,0 +1,107 @@ +package dotty.tools.dotc +package core +package pickling + +import Contexts._, Decorators._ +import printing.Texts._ +import TastyName._ +import TastyUnpickler._ + +class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) { + + val reader = new TastyReader(bytes) + val unpickler = new TastyUnpickler(reader) + import unpickler.{tastyName, unpickled} + + def nameToString(name: TastyName): String = name match { + case Simple(name) => name.toString + case Qualified(qual, name) => nameRefToString(qual) + "." + nameRefToString(name) + case Signed(original, params, result) => + i"${nameRefToString(original)}@${params.map(nameRefToString)}%,%:${nameRefToString(result)}" + case Expanded(original) => nameRefToString(original) + "/EXPANDED" + case ModuleClass(original) => nameRefToString(original) + "/MODULECLASS" + case SuperAccessor(accessed) => nameRefToString(accessed) + "/SUPERACCESSOR" + case DefaultGetter(meth, num) => nameRefToString(meth) + "/DEFAULTGETTER" + num + } + + def nameRefToString(ref: NameRef): String = nameToString(tastyName(ref)) + + def printNames() = + for ((name, idx) <- tastyName.contents.zipWithIndex) + println(f"$idx%4d: " + nameToString(name)) + + def printContents(): Unit = { + println("Names:") + printNames() + println("Trees:") + unpickled(new TreeUnpickler) + } + + class TreeUnpickler extends SectionUnpickler[Text]("ASTs") { + import PickleFormat._ + def unpickle(reader: TastyReader, tastyName: TastyName.Table): Text = { + import reader._ + val sb = new StringBuilder(s"${reader.end.index - reader.from.index} bytes of AST:") + var indent = 0 + def newLine() = print(f"\n ${currentAddr.index - from.index}%5d:" + " " * indent) + def printNat() = print(" " + readNat()) + def printName() = { + val idx = readNat() + print(" ") ;print(idx); print("["); print(nameRefToString(NameRef(idx))); print("]") + } + def printTree(): Unit = { + newLine() + val tag = readByte() + print(" ");print(astTagToString(tag)) + indent += 2 + if (tag >= firstLengthTreeTag) { + val len = readNat() + print(s"($len)") + val end = currentAddr + len + def printTrees() = until(end)(printTree()) + tag match { + case IMPORTED => + printName() + case RENAMED => + printName(); printName() + case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | BIND | REFINEDtype => + printName(); printTrees() + case RETURN => + printNat(); printTrees() + case METHODtype | POLYtype => + printTree() + until(end) { printName(); printTree() } + case PARAMtype => + printNat(); printNat() + case _ => + printTrees() + } + if (currentAddr != end) { + println(s"incomplete read, current = $currentAddr, end = $end") + skipTo(currentAddr) + } + } + else if (tag >= firstTreeNatTreeTag) { + printTree() + newLine() + tag match { + case SELECT | TERMREF | TYPEREF => printName() + case _ => printNat() + } + } + else if (tag >= firstNatTreeTag) + tag match { + case TERMREFstatic | TYPEREFstatic | STRINGconst => printName() + case _ => printNat() + } + indent -= 2 + } + println(s"base = $currentAddr") + while (!atEnd) { + printTree() + newLine() + } + sb.toString + } + } +} \ No newline at end of file diff --git a/src/dotty/tools/dotc/core/pickling/TastyReader.scala b/src/dotty/tools/dotc/core/pickling/TastyReader.scala new file mode 100644 index 000000000000..659eb8977322 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyReader.scala @@ -0,0 +1,78 @@ +package dotty.tools +package dotc +package core +package pickling + + +import TastyBuffer._ +import TastyName.NameRef +import collection.mutable + +/** A byte array bufferfer that can be filled with bytes or natural numbers in TASTY format, + * and that supports reading and patching addresses represented as natural numbers. + */ +class TastyReader(val bytes: Array[Byte], val from: Addr, val end: Addr) { + + def this(bytes: Array[Byte]) = this(bytes, Addr(0), Addr(bytes.length)) + + private var bp: Int = from.index + + def currentAddr: Addr = Addr(bp) + + def atEnd: Boolean = bp == end.index + + /** Read a byte of data. */ + def readByte(): Int = { + val result = bytes(bp) & 0xff + bp += 1 + result + } + + /** Read the next `n` bytes of `data`. */ + def readBytes(n: Int): Array[Byte] = { + val result = new Array[Byte](n) + Array.copy(bytes, bp, result, 0, n) + bp += n + result + } + + /** Read a natural number fitting in an Int in big endian format, base 128. + * All but the last digits have bit 0x80 set. + */ + def readNat(): Int = readLongNat.toInt + + /** Read a natural number fitting in a Long in big endian format, base 128. + * All but the last digits have bit 0x80 set. + */ + def readLongNat(): Long = { + var b = 0L + var x = 0L + do { + b = bytes(bp) + x = (x << 7) | (b & 0x7f) + bp += 1 + } while ((b & 0x80) == 0) + x + } + + /** Read `nbytes` bytes in big endian format into a Long */ + def readRaw(nbytes: Int): Unit = { + def recur(x: Long, n: Int): Long = + if (n == 0) x else recur((x << 8) | (readByte & 0xff), n - 1) + recur(0, nbytes) + } + + def readNameRef() = NameRef(readNat()) + + def readEnd(): Addr = Addr(readNat() + bp) + + def skipTo(addr: Addr): Unit = + bp = addr.index + + def until[T](end: Addr)(op: => T): List[T] = { + val buf = new mutable.ListBuffer[T] + while (bp < end.index) buf += op + assert(bp == end.index) + buf.toList + } +} diff --git a/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala b/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala new file mode 100644 index 000000000000..f107c7c83219 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala @@ -0,0 +1,84 @@ +package dotty.tools.dotc +package core +package pickling + +import scala.collection.mutable +import PickleFormat._ +import Names.{Name, termName} + +object TastyUnpickler { + class UnpickleException(msg: String) extends Exception(msg) + + abstract class SectionUnpickler[R](val name: String) { + def unpickle(reader: TastyReader, tastyName: TastyName.Table): R + } +} + +import TastyUnpickler._ + +class TastyUnpickler(reader: TastyReader) { + import reader._ + + private val sectionReader = new mutable.HashMap[String, TastyReader] + val tastyName = new TastyName.Table + + def check(cond: Boolean, msg: => String) = + if (!cond) throw new UnpickleException(msg) + + def readString(): String = { + val TastyName.Simple(name) = tastyName(readNameRef()) + name.toString + } + + def readName(): TastyName = { + import TastyName._ + val tag = readByte() + val length = readNat() + val start = currentAddr + val end = start + length + val result = tag match { + case UTF8 => + skipTo(end) + Simple(termName(bytes, start.index, length)) + case QUALIFIED => + Qualified(readNameRef(), readNameRef()) + case SIGNED => + val original = readNameRef() + val result = readNameRef() + val params = until(end)(readNameRef()) + Signed(original, params, result) + case EXPANDED => + Expanded(readNameRef()) + case MODULECLASS => + ModuleClass(readNameRef()) + case SUPERACCESSOR => + SuperAccessor(readNameRef()) + case DEFAULTGETTER => + DefaultGetter(readNameRef(), readNat()) + } + assert(currentAddr == end, s"bad name $result $start $currentAddr $end") + result + } + + locally { + val magic = readBytes(8) + check(magic.map(_.toChar).mkString == header, "not a TASTy file") + val major = readNat() + val minor = readNat() + check(major == MajorVersion && (major != 0 || minor == MinorVersion), + s"""TASTy signature has wrong version. + | expected: $MajorVersion.$MinorVersion + | found : $major.$minor""".stripMargin) + until(readEnd()) { tastyName.add(readName()) } + while (!atEnd) { + val secName = readString() + val secEnd = readEnd() + sectionReader(secName) = new TastyReader(bytes, currentAddr, secEnd) + skipTo(secEnd) + } + } + + def unpickled[R](sec: SectionUnpickler[R]): Option[R] = + for (reader <- sectionReader.get(sec.name)) yield + sec.unpickle(reader, tastyName) +} diff --git a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala index 73b944b92185..a09b5011967e 100644 --- a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala +++ b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala @@ -16,7 +16,7 @@ class TreeBuffer extends TastyBuffer(1000000) { private var delta: Array[Int] = _ private var numOffsets = 0 - private def offset(i: Int): Addr = new Addr(offsets(i)) + private def offset(i: Int): Addr = Addr(offsets(i)) private def keepOffset(relative: Boolean): Unit = { if (numOffsets == offsets.length) { @@ -37,18 +37,20 @@ class TreeBuffer extends TastyBuffer(1000000) { def writeRef(target: Addr) = { keepOffset(relative = false) - writeNat(target.index) + fillAddr(reserveAddr(), target) } def fillRef(at: Addr, target: Addr, relative: Boolean) = { val addr = if (relative) target.relativeTo(at) else target fillAddr(at, addr) } - - def adjusted(x: Addr): Addr = { - val idx = bestFit(offsets, numOffsets, x.index - 1) - if (idx < 0) x else x - delta(idx) + + def deltaAt(at: Addr): Int = { + val idx = bestFit(offsets, numOffsets, at.index - 1) + if (idx < 0) 0 else delta(idx) } + + def adjusted(x: Addr): Addr = x - deltaAt(x) private def computeDeltas() = { delta = new Array[Int](numOffsets) @@ -65,20 +67,23 @@ class TreeBuffer extends TastyBuffer(1000000) { } } - private def adjustedOffset(at: Addr, isRelative: Boolean): Addr = { + private def adjustedOffset(i: Int): Addr = { + val at = offset(i) val original = getAddr(at) - if (isRelative) { - val start = skipNat(at).index - adjusted(original + start) - start + if (isRelative(i)) { + val start = skipNat(at) + val len1 = original + delta(i) - deltaAt(original + start.index) + val len2 = adjusted(original + start.index) - adjusted(start).index + assert(len1 == len2, + s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2") + len1 } else adjusted(original) } private def adjustOffsets(): Unit = { for (i <- 0 until numOffsets) { - val off = offset(i) - val original = getAddr(off) - val corrected = adjustedOffset(off, isRelative(i)) - fillAddr(off, corrected) + val corrected = adjustedOffset(i) + fillAddr(offset(i), corrected) } } @@ -87,7 +92,7 @@ class TreeBuffer extends TastyBuffer(1000000) { var lastDelta = 0 var i = 0 while (i < numOffsets) { - val corrected = adjustedOffset(offset(i), isRelative(i)) + val corrected = adjustedOffset(i) lastDelta += AddrWidth - TastyBuffer.natSize(corrected.index) delta1(i) = lastDelta i += 1 @@ -104,16 +109,19 @@ class TreeBuffer extends TastyBuffer(1000000) { var start = 0 var i = 0 var wasted = 0 + def shift(end: Int) = + Array.copy(bytes, start, bytes, start - lastDelta, end - start) while (i < numOffsets) { val next = offsets(i) - Array.copy(bytes, start, bytes, start - lastDelta, next - start) + shift(next) start = next + delta(i) - lastDelta - val pastZeroes = skipZeroes(new Addr(next)).index + val pastZeroes = skipZeroes(Addr(next)).index assert(pastZeroes >= start, s"something's wrong: eliminated non-zero") wasted += (pastZeroes - start) lastDelta = delta(i) i += 1 } + shift(length) length -= lastDelta wasted } @@ -121,12 +129,16 @@ class TreeBuffer extends TastyBuffer(1000000) { override def assemble(): Unit = { val origLength = length computeDeltas() - adjustOffsets() - if (false) { + //println(s"offsets: ${offsets.take(numOffsets).deep}") + //println(s"deltas: ${delta.take(numOffsets).deep}") + if (true) { var saved = 0 - do saved = adjustDeltas() - while (saved > 0 && length / saved < 100) + do { + saved = adjustDeltas() + println(s"adjusting deltas, saved = $saved") + } while (saved > 0 && length / saved < 100) } + adjustOffsets() val wasted = compress() println(s"original length: $origLength, compressed to: $length, wasted: $wasted") } diff --git a/src/dotty/tools/dotc/core/pickling/TreePickler.scala b/src/dotty/tools/dotc/core/pickling/TreePickler.scala index 8c92e2ed83b1..cc5a8b8f25fa 100644 --- a/src/dotty/tools/dotc/core/pickling/TreePickler.scala +++ b/src/dotty/tools/dotc/core/pickling/TreePickler.scala @@ -3,7 +3,6 @@ package dotc package core package pickling -import util.Util.{bestFit, dble} import ast.Trees._ import PickleFormat._ import core._ @@ -55,6 +54,10 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { def pickle(tree: Tree)(implicit ctx: Context) = { + def qualifiedName(sym: Symbol): TastyName = + if (sym.isRoot || sym.owner.isRoot) TastyName.Simple(sym.name.toTermName) + else TastyName.Qualified(nameIndex(qualifiedName(sym.owner)), nameIndex(sym.name)) + def pickleConstant(c: Constant): Unit = { def pickleNum(nonNegTag: Int, negTag: Int) = { val x = c.longValue @@ -103,12 +106,13 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { } } - def pickleType(tpe: Type): Unit = { + def pickleType(tpe0: Type, richTypes: Boolean = false): Unit = { + val tpe = tpe0.stripTypeVar val prev = pickledTypes.get(tpe) if (prev == null) { val addr = currentAddr - pickleNewType(tpe) pickledTypes.put(tpe, addr) + pickleNewType(tpe, richTypes) } else { writeByte(SHARED) @@ -116,10 +120,15 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { } } - def pickleNewType(tpe: Type)= tpe match { - case ConstantType(value) => pickleConstant(value) + def pickleNewType(tpe: Type, richTypes: Boolean): Unit = tpe match { + case ConstantType(value) => + pickleConstant(value) case tpe: WithFixedSym => - if (tpe.prefix == NoPrefix) { + if (tpe.symbol.isStatic) { + writeByte(if (tpe.isType) TYPEREFstatic else TERMREFstatic) + pickleName(qualifiedName(tpe.symbol)) + } + else if (tpe.prefix == NoPrefix) { writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect) pickleSym(tpe.symbol) } @@ -135,7 +144,7 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { pickleType(tpe.prefix); pickleName(tpe.name) case tpe: ThisType => writeByte(THIS) - pickleType(tpe.tref) + withLength { pickleType(tpe.tref) } case tpe: SuperType => writeByte(SUPERtype) withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)} @@ -146,11 +155,11 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { val args = tpe.argInfos(interpolate = false) if (args.isEmpty) { writeByte(REFINEDtype) - withLength { pickleName(tpe.refinedName); pickleType(tpe.refinedInfo) } + withLength { pickleName(tpe.refinedName); pickleType(tpe.refinedInfo, richTypes = true) } } else { writeByte(APPLIEDtype) - withLength { pickleType(tpe.withoutArgs(args)); args.foreach(pickleType) } + withLength { pickleType(tpe.withoutArgs(args)); args.foreach(pickleType(_)) } } case tpe: TypeAlias => writeByte(TYPEALIAS) @@ -167,22 +176,56 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { case tpe: ExprType => writeByte(BYNAMEtype) withLength { pickleType(tpe.underlying) } + case tpe: MethodType if richTypes => + writeByte(METHODtype) + pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramTypes) + case tpe: PolyType if richTypes => + writeByte(POLYtype) + pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramBounds) + case tpe: PolyParam => + if (!pickleParamType(tpe)) + // TODO figure out why this case arises in e.g. pickling AbstractFileReader. + ctx.typerState.constraint.entry(tpe) match { + case TypeBounds(lo, hi) if lo eq hi => pickleNewType(lo, richTypes) + } + case tpe: MethodParam => + assert(pickleParamType(tpe), "method parameter in wrong position") case NoType => writeByte(NOTYPE) // case NoPrefix => // not sure we need this! // writeByte(NOPREFIX) } + def pickleMethodic(result: Type, names: List[Name], types: List[Type]) = + withLength { + pickleType(result, richTypes = true) + (names, types).zipped.foreach { (name, tpe) => + pickleName(name); pickleType(tpe) + } + } + + def pickleParamType(tpe: ParamType): Boolean = { + val binder = pickledTypes.get(tpe.binder) + val pickled = binder != null + if (pickled) { + writeByte(PARAMtype) + withLength { writeRef(binder.asInstanceOf[Addr]); writeNat(tpe.paramNum) } + } + pickled + } + def pickleTpt(tpt: Tree): Unit = pickleType(tpt.tpe) // TODO correlate with original when generating positions def pickleTreeIfNonEmpty(tree: Tree): Unit = if (!tree.isEmpty) pickleTree(tree) - def pickleTree(tree: Tree): Unit = tree match { + def pickleTree(tree: Tree): Unit = { + tree match { case Ident(_) | This(_) => pickleType(tree.tpe) case Select(qual, name) => writeByte(SELECT) + pickleTree(qual) val sig = tree.tpe.signature if (sig == Signature.NotAMethod) pickleName(name) else pickleNameAndSig(name, sig) @@ -271,26 +314,29 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { patterns.foreach(pickleTree) } case tree: ValDef => - pickleDef(VALDEF, tree.symbol, tree.rhs) + pickleDef(VALDEF, tree.symbol, tree.tpt, tree.rhs) case tree: DefDef => def pickleParams = { - for (tparam <- tree.tparams) pickleDef(TYPEPARAM, tparam.symbol, EmptyTree) + for (tparam <- tree.tparams) + pickleDef(TYPEPARAM, tparam.symbol, tparam.rhs, EmptyTree) for (vparams <- tree.vparamss) { writeByte(PARAMS) withLength { - for (vparam <- vparams) pickleDef(PARAM, vparam.symbol, EmptyTree) + for (vparam <- vparams) + pickleDef(PARAM, vparam.symbol, vparam.tpt, EmptyTree) } } } - pickleDef(DEFDEF, tree.symbol, tree.rhs, pickleParams) + pickleDef(DEFDEF, tree.symbol, tree.tpt, tree.rhs, pickleParams) case tree: TypeDef => - pickleDef(TYPEDEF, tree.symbol, tree.rhs) + pickleDef(TYPEDEF, tree.symbol, tree.rhs, EmptyTree) case tree: Template => + registerDef(tree.symbol) writeByte(TEMPLATE) withLength { tree.parents.foreach(pickleTree) if (!tree.self.isEmpty) - pickleDef(PARAM, tree.self.symbol, EmptyTree) + pickleDef(PARAM, tree.self.symbol, tree.self.tpt, EmptyTree) pickleTreeIfNonEmpty(tree.constr) tree.body.foreach(pickleTree) } @@ -313,16 +359,21 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { case Annotated(annot, arg) => writeByte(ANNOTATED) withLength { pickleTree(annot); pickleTree(arg) } - } + case EmptyTree => + writeByte(EMPTYTREE) + }} - def pickleDef(tag: Int, sym: Symbol, rhs: Tree, pickleParams: => Unit = ()) = { + def pickleDef(tag: Int, sym: Symbol, tpt: Tree, rhs: Tree, pickleParams: => Unit = ()) = { registerDef(sym) writeByte(tag) withLength { pickleName(sym.name) pickleParams - if (tag != TYPEDEF) pickleType(sym.info.finalResultType) - if (tag != PARAM && tag != TYPEPARAM) pickleTree(rhs) + tpt match { + case tpt: TypeTree => pickleTpt(tpt) + case _ => pickleTree(tpt) + } + if (tag == VALDEF || tag == DEFDEF) pickleTree(rhs) pickleModifiers(sym) } } @@ -367,5 +418,6 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { } pickleTree(tree) + assert(forwardSymRefs.isEmpty, i"unresolved symbols: ${forwardSymRefs.keySet.toList}%, %") } } diff --git a/src/dotty/tools/dotc/transform/Pickler.scala b/src/dotty/tools/dotc/transform/Pickler.scala new file mode 100644 index 000000000000..644a66784f9b --- /dev/null +++ b/src/dotty/tools/dotc/transform/Pickler.scala @@ -0,0 +1,29 @@ +package dotty.tools.dotc +package transform + +import core._ +import TreeTransforms._ +import Contexts.Context +import Decorators._ +import pickling._ + +/** This miniphase pickles trees */ +class Pickler extends MiniPhaseTransform { thisTransform => + import ast.tpd._ + + override def phaseName: String = "pickler" + + + override def transformUnit(tree: Tree)(implicit ctx: Context, info: TransformerInfo): Tree = { + val pickler = new TastyPickler + new TreePickler(pickler, picklePositions = false).pickle(tree) + val bytes = pickler.assembleParts() + def rawBytes = + bytes.iterator.grouped(10).toList.zipWithIndex.map { + case (row, i) => s"${i}0: ${row.mkString(" ")}" + } + //println(s"written:\n${rawBytes.mkString("\n")}") + new TastyPrinter(bytes).printContents() + tree + } +} \ No newline at end of file From c312796b3cb591131c4654e8bd4922a9eef1fb3c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 11:38:41 +0100 Subject: [PATCH 08/22] Avoid pickling for Java-parsed compilation units. Their trees make no sense; can't be pickled. --- src/dotty/tools/dotc/transform/Pickler.scala | 21 ++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/src/dotty/tools/dotc/transform/Pickler.scala b/src/dotty/tools/dotc/transform/Pickler.scala index 644a66784f9b..4d2e416449ae 100644 --- a/src/dotty/tools/dotc/transform/Pickler.scala +++ b/src/dotty/tools/dotc/transform/Pickler.scala @@ -13,17 +13,18 @@ class Pickler extends MiniPhaseTransform { thisTransform => override def phaseName: String = "pickler" - override def transformUnit(tree: Tree)(implicit ctx: Context, info: TransformerInfo): Tree = { - val pickler = new TastyPickler - new TreePickler(pickler, picklePositions = false).pickle(tree) - val bytes = pickler.assembleParts() - def rawBytes = - bytes.iterator.grouped(10).toList.zipWithIndex.map { - case (row, i) => s"${i}0: ${row.mkString(" ")}" - } - //println(s"written:\n${rawBytes.mkString("\n")}") - new TastyPrinter(bytes).printContents() + if (!ctx.compilationUnit.isJava) { + val pickler = new TastyPickler + new TreePickler(pickler, picklePositions = false).pickle(tree) + val bytes = pickler.assembleParts() + def rawBytes = + bytes.iterator.grouped(10).toList.zipWithIndex.map { + case (row, i) => s"${i}0: ${row.mkString(" ")}" + } + //println(s"written:\n${rawBytes.mkString("\n")}") + new TastyPrinter(bytes).printContents() + } tree } } \ No newline at end of file From 09e1323db55738e043c10ea480117568f46d2b2b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 12:10:53 +0100 Subject: [PATCH 09/22] Eliminate raw numbers from Tasy format They are an irregularity and don't buy us much. Might as well pickle these numbers as Nats/Longs. --- .../tools/dotc/core/pickling/PickleFormat.scala | 4 ++-- .../tools/dotc/core/pickling/TastyBuffer.scala | 13 +------------ .../tools/dotc/core/pickling/TreePickler.scala | 4 ++-- 3 files changed, 5 insertions(+), 16 deletions(-) diff --git a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala index 6769cd016c23..338342459b79 100644 --- a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala +++ b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala @@ -113,8 +113,8 @@ Standard-Section: "ASTs" Tree* INTneg NegNat LONGconst LongNat LONGneg NegLongNat - FLOATconst FullInt - DOUBLEconst FullLong + FLOATconst LongNat + DOUBLEconst LongNat STRINGconst NameRef NULLconst CLASSconst Length Type diff --git a/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala index 0e44dbd76e83..ba033461e182 100644 --- a/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala +++ b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala @@ -61,8 +61,7 @@ class TastyBuffer(initialSize: Int) { writeLongNat(x.toLong & 0x00000000FFFFFFFFL) /** - * Like writeNat, but for longs. This is not the same as - * writeRaw, which writes in base 256. Note that the + * Like writeNat, but for longs. Note that the * binary representation of LongNat is identical to Nat * if the long value is in the range Int.MIN_VALUE to * Int.MAX_VALUE. @@ -77,16 +76,6 @@ class TastyBuffer(initialSize: Int) { if (y != 0L) writeNatPrefix(y) writeByte(((x & 0x7f) | 0x80).toInt) } - - /** Write the `nbytes` least significant bytes of `x` in big endian format */ - def writeRaw(x: Long, nbytes: Int): Unit = { - def recur(x: Long, n: Int): Unit = - if (n > 0) { - recur(x >>> 8, n - 1) - writeByte((x & 0xff).toInt) - } - recur(x, nbytes) - } // -- Address handling -------------------------------------------- diff --git a/src/dotty/tools/dotc/core/pickling/TreePickler.scala b/src/dotty/tools/dotc/core/pickling/TreePickler.scala index cc5a8b8f25fa..b3f22804034a 100644 --- a/src/dotty/tools/dotc/core/pickling/TreePickler.scala +++ b/src/dotty/tools/dotc/core/pickling/TreePickler.scala @@ -88,10 +88,10 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { pickleNum(LONGconst, LONGneg) case FloatTag => writeByte(FLOATconst) - writeRaw(java.lang.Float.floatToRawIntBits(c.floatValue), 4) + writeNat(java.lang.Float.floatToRawIntBits(c.floatValue)) case DoubleTag => writeByte(DOUBLEconst) - writeRaw(java.lang.Double.doubleToRawLongBits(c.doubleValue), 8) + writeLongNat(java.lang.Double.doubleToRawLongBits(c.doubleValue)) case StringTag => writeByte(STRINGconst) writeNat(nameIndex(c.stringValue).index) From adfa69a79ca93e9dbc0cdd2105a48caa5b8c460e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 12:40:12 +0100 Subject: [PATCH 10/22] Changes to Tasty format: 1) IDENT nodes if types are not termrefs. (otherwise we lose information) 2) PRIVATEqualified and PROTECTDqualified always have a type argument Also, Pickler guards against previously encountered errors: - orphan parameters - trying to pickle packages as internal symbols. Plus some small polishings. --- .../dotc/core/pickling/PickleFormat.scala | 30 ++++++------- .../dotc/core/pickling/TastyPrinter.scala | 2 +- .../dotc/core/pickling/TreePickler.scala | 44 ++++++++++++++----- 3 files changed, 49 insertions(+), 27 deletions(-) diff --git a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala index 338342459b79..24b5169e7797 100644 --- a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala +++ b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala @@ -17,9 +17,6 @@ Micro-syntax: Nat = LongNat // value fits in an Int without overflow Digit = 0 | ... | 127 StopDigit = 128 | ... | 255 // value = digit - 128 - FullInt = Byte Byte Byte Byte - FullLong = Byte Byte Byte Byte Byte Byte Byte Byte - Byte - 0 | ... | 255 Macro-format: @@ -62,6 +59,7 @@ Standard-Section: "ASTs" Tree* RENAMED Length from_NameRef to_NameRef Term = Path + IDENT Type NameRef SELECT qual_Term possiblySigned_NameRef SUPER Length this_Term mixinTrait_Type? APPLY Length fn_Term arg_Term* @@ -149,8 +147,8 @@ Standard-Section: "ASTs" Tree* Modifier = PRIVATE INTERNAL // package private PROTECTED - PRIVATEqualified qualifier_ASTRef // will be dropped - PROTECTEDqualified qualifier_ASTRef // will be dropped + PRIVATEqualified Length qualifier_Type // will be dropped + PROTECTEDqualified Length qualifier_Type // will be dropped ABSTRACT FINAL SEALED @@ -262,14 +260,13 @@ object PickleFormat { final val FLOATconst = 110 final val DOUBLEconst = 111 final val STRINGconst = 112 - final val PRIVATEqualified = 113 - final val PROTECTEDqualified = 114 - final val SELECT = 128 - final val TERMREFsymbol = 129 - final val TERMREF = 130 - final val TYPEREFsymbol = 131 - final val TYPEREF = 132 + final val IDENT = 128 + final val SELECT = 129 + final val TERMREFsymbol = 130 + final val TERMREF = 131 + final val TYPEREFsymbol = 132 + final val TYPEREF = 133 final val PACKAGE = 160 final val VALDEF = 161 @@ -321,10 +318,12 @@ object PickleFormat { final val POLYtype = 207 final val PARAMtype = 208 final val IMPLICITARG = 209 + final val PRIVATEqualified = 210 + final val PROTECTEDqualified = 211 final val firstSimpleTreeTag = EMPTYTREE final val firstNatTreeTag = SHARED - final val firstTreeNatTreeTag = SELECT + final val firstTreeNatTreeTag = IDENT final val firstLengthTreeTag = PACKAGE def nameTagToString(tag: Int): String = tag match { @@ -390,9 +389,8 @@ object PickleFormat { case FLOATconst => "FLOATconst" case DOUBLEconst => "DOUBLEconst" case STRINGconst => "STRINGconst" - case PRIVATEqualified => "PRIVATEqualified" - case PROTECTEDqualified => "PROTECTEDqualified" + case IDENT => "IDENT" case SELECT => "SELECT" case TERMREFsymbol => "TERMREFsymbol" case TERMREF => "TERMREF" @@ -449,5 +447,7 @@ object PickleFormat { case METHODtype => "METHODtype" case PARAMtype => "PARAMtype" case IMPLICITARG => "IMPLICITARG" + case PRIVATEqualified => "PRIVATEqualified" + case PROTECTEDqualified => "PROTECTEDqualified" } } diff --git a/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala b/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala index 3e583beb0df1..58e2499b380b 100644 --- a/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala +++ b/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala @@ -85,7 +85,7 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) { printTree() newLine() tag match { - case SELECT | TERMREF | TYPEREF => printName() + case IDENT | SELECT | TERMREF | TYPEREF => printName() case _ => printNat() } } diff --git a/src/dotty/tools/dotc/core/pickling/TreePickler.scala b/src/dotty/tools/dotc/core/pickling/TreePickler.scala index b3f22804034a..662637026f12 100644 --- a/src/dotty/tools/dotc/core/pickling/TreePickler.scala +++ b/src/dotty/tools/dotc/core/pickling/TreePickler.scala @@ -44,11 +44,12 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { pickleName(TastyName.Signed(nameIndex(name), params.map(nameIndex), nameIndex(result))) } - private def pickleSym(sym: Symbol) = symRefs.get(sym) match { + private def pickleSym(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match { case Some(label) => writeRef(label) case None => val ref = reserveRef(relative = false) + assert(!sym.is(Flags.Package), sym) forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil) } @@ -106,7 +107,7 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { } } - def pickleType(tpe0: Type, richTypes: Boolean = false): Unit = { + def pickleType(tpe0: Type, richTypes: Boolean = false): Unit = try { val tpe = tpe0.stripTypeVar val prev = pickledTypes.get(tpe) if (prev == null) { @@ -118,6 +119,10 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { writeByte(SHARED) writeRef(prev.asInstanceOf[Addr]) } + } catch { + case ex: AssertionError => + println(i"error when pickling type $tpe0") + throw ex } def pickleNewType(tpe: Type, richTypes: Boolean): Unit = tpe match { @@ -163,16 +168,16 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { } case tpe: TypeAlias => writeByte(TYPEALIAS) - withLength { pickleType(tpe.alias) } + withLength { pickleType(tpe.alias, richTypes) } case tpe: TypeBounds => writeByte(TYPEBOUNDS) - withLength { pickleType(tpe.lo); pickleType(tpe.hi) } + withLength { pickleType(tpe.lo, richTypes); pickleType(tpe.hi, richTypes) } case tpe: AnnotatedType => writeByte(ANNOTATED) - withLength { pickleTree(tpe.annot.tree); pickleType(tpe.tpe) } + withLength { pickleTree(tpe.annot.tree); pickleType(tpe.tpe, richTypes) } case tpe: AndOrType => writeByte(if (tpe.isAnd) ANDtype else ORtype) - withLength { pickleType(tpe.tp1); pickleType(tpe.tp2) } + withLength { pickleType(tpe.tp1, richTypes); pickleType(tpe.tp2, richTypes) } case tpe: ExprType => writeByte(BYNAMEtype) withLength { pickleType(tpe.underlying) } @@ -187,9 +192,12 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { // TODO figure out why this case arises in e.g. pickling AbstractFileReader. ctx.typerState.constraint.entry(tpe) match { case TypeBounds(lo, hi) if lo eq hi => pickleNewType(lo, richTypes) + case _ => assert(false, s"orphan poly parameter: $tpe") } case tpe: MethodParam => - assert(pickleParamType(tpe), "method parameter in wrong position") + assert(pickleParamType(tpe), s"orphan method parameter: $tpe") + case tpe: LazyRef => + pickleType(tpe.ref) case NoType => writeByte(NOTYPE) // case NoPrefix => // not sure we need this! @@ -219,9 +227,17 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { def pickleTreeIfNonEmpty(tree: Tree): Unit = if (!tree.isEmpty) pickleTree(tree) - def pickleTree(tree: Tree): Unit = { + def pickleTree(tree: Tree): Unit = try { tree match { - case Ident(_) | This(_) => + case Ident(name) => + tree.tpe match { + case tp: TermRef => pickleType(tp) + case _ => + writeByte(IDENT) + pickleType(tree.tpe) + pickleName(name) + } + case This(_) => pickleType(tree.tpe) case Select(qual, name) => writeByte(SELECT) @@ -239,7 +255,7 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { writeByte(TYPEAPPLY) withLength { pickleTree(fun) - args.foreach(pickleTree) + args.foreach(pickleTpt) } case Literal(const) => pickleConstant(const) @@ -362,6 +378,12 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { case EmptyTree => writeByte(EMPTYTREE) }} + catch { + case ex: AssertionError => + println(i"error when pickling tree $tree") + throw ex + } + def pickleDef(tag: Int, sym: Symbol, tpt: Tree, rhs: Tree, pickleParams: => Unit = ()) = { registerDef(sym) @@ -384,7 +406,7 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { val privateWithin = sym.privateWithin if (privateWithin.exists) { writeByte(if (flags is Protected) PROTECTEDqualified else PRIVATEqualified) - pickleSym(privateWithin) + withLength { pickleType(privateWithin.typeRef) } } if (flags is Private) writeByte(PRIVATE) if (flags is Protected) if (!privateWithin.exists) writeByte(PROTECTED) From 87aada4ea5930c4f44933c8814329f849034cc7e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 12:43:11 +0100 Subject: [PATCH 11/22] Turn on full compression of Tasty tree buffers. --- .../tools/dotc/core/pickling/TreeBuffer.scala | 35 ++++++++++++++----- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala index a09b5011967e..870e9772420b 100644 --- a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala +++ b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala @@ -28,6 +28,7 @@ class TreeBuffer extends TastyBuffer(1000000) { numOffsets += 1 } + /** Reserve space for a reference, to be adjusted later */ def reserveRef(relative: Boolean): Addr = { val addr = currentAddr keepOffset(relative) @@ -35,23 +36,28 @@ class TreeBuffer extends TastyBuffer(1000000) { addr } + /** Write reference right adjusted into freshly reserved field. */ def writeRef(target: Addr) = { keepOffset(relative = false) fillAddr(reserveAddr(), target) } + /** Fill previously reserved field with a reference */ def fillRef(at: Addr, target: Addr, relative: Boolean) = { val addr = if (relative) target.relativeTo(at) else target fillAddr(at, addr) } + /** The amount by which the bytes at the given address are shifted under compression */ def deltaAt(at: Addr): Int = { val idx = bestFit(offsets, numOffsets, at.index - 1) if (idx < 0) 0 else delta(idx) } + /** The address to which `x` is translated under compression */ def adjusted(x: Addr): Addr = x - deltaAt(x) + /** Compute all shift-deltas */ private def computeDeltas() = { delta = new Array[Int](numOffsets) var lastDelta = 0 @@ -67,6 +73,7 @@ class TreeBuffer extends TastyBuffer(1000000) { } } + /** The absoluate or relative adjusted address at index `i` of `offsets` array*/ private def adjustedOffset(i: Int): Addr = { val at = offset(i) val original = getAddr(at) @@ -80,6 +87,7 @@ class TreeBuffer extends TastyBuffer(1000000) { } else adjusted(original) } + /** Adjust all offsets according to previously computed deltas */ private def adjustOffsets(): Unit = { for (i <- 0 until numOffsets) { val corrected = adjustedOffset(i) @@ -87,6 +95,10 @@ class TreeBuffer extends TastyBuffer(1000000) { } } + /** Adjust deltas to also take account references that will shrink (and thereby + * generate additional zeroes that can be skipped) due to previously + * computed adjustements. + */ private def adjustDeltas(): Int = { val delta1 = new Array[Int](delta.length) var lastDelta = 0 @@ -103,7 +115,8 @@ class TreeBuffer extends TastyBuffer(1000000) { delta = delta1 saved } - + + /** Compress pickle buffer, shifting bytes to close all skipped zeroes. */ private def compress(): Int = { var lastDelta = 0 var start = 0 @@ -126,20 +139,24 @@ class TreeBuffer extends TastyBuffer(1000000) { wasted } + /** Final assembly, involving the following steps: + * - compute deltas + * - adjust deltas until additional savings are < 1% of total + * - adjust offsets according to the adjusted deltas + * - shrink buffer, skipping zeroes. + */ override def assemble(): Unit = { val origLength = length computeDeltas() //println(s"offsets: ${offsets.take(numOffsets).deep}") //println(s"deltas: ${delta.take(numOffsets).deep}") - if (true) { - var saved = 0 - do { - saved = adjustDeltas() - println(s"adjusting deltas, saved = $saved") - } while (saved > 0 && length / saved < 100) - } + var saved = 0 + do { + saved = adjustDeltas() + println(s"adjusting deltas, saved = $saved") + } while (saved > 0 && length / saved < 100) adjustOffsets() val wasted = compress() - println(s"original length: $origLength, compressed to: $length, wasted: $wasted") + println(s"original length: $origLength, compressed to: $length, wasted: $wasted") // DEBUG, for now. } } From fceb8cbbdcca67fbed9cc06a3fe250c3a768ba1e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 12:43:53 +0100 Subject: [PATCH 12/22] Disable orphan checks and pickling for tests that currently fail because of tailrec. --- .../tools/dotc/transform/TreeChecker.scala | 2 +- test/dotc/tests.scala | 19 ++++++++++--------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/src/dotty/tools/dotc/transform/TreeChecker.scala b/src/dotty/tools/dotc/transform/TreeChecker.scala index 7552f1f54020..f4c4bc7df909 100644 --- a/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -139,7 +139,7 @@ class TreeChecker { assert(isSubType(tree1.tpe, tree.typeOpt), divergenceMsg(tree1.tpe, tree.typeOpt)) tree1 } - checkNoOrphans(res.tpe) + if (!ctx.settings.Yskip.value.contains("pickler")) checkNoOrphans(res.tpe) phasesToCheck.foreach(_.checkPostCondition(res)) res } diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index 2b917b79513f..589caf0e194c 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -27,6 +27,7 @@ class tests extends CompilerTest { val twice = List("#runs", "2", "-YnoDoubleBindings") val allowDeepSubtypes = defaultOptions diff List("-Yno-deep-subtypes") + val noPickling = allowDeepSubtypes ++ List("-Yskip:pickler") val posDir = "./tests/pos/" val posSpecialDir = "./tests/pos-special/" @@ -51,7 +52,7 @@ class tests extends CompilerTest { @Test def pos_nameddefaults() = compileFile(posDir, "nameddefaults") @Test def pos_desugar() = compileFile(posDir, "desugar") @Test def pos_sigs() = compileFile(posDir, "sigs") - @Test def pos_typers() = compileFile(posDir, "typers") + @Test def pos_typers() = compileFile(posDir, "typers")(noPickling) @Test def pos_typedidents() = compileFile(posDir, "typedIdents") @Test def pos_assignments() = compileFile(posDir, "assignments") @Test def pos_packageobject() = compileFile(posDir, "packageobject") @@ -61,14 +62,14 @@ class tests extends CompilerTest { @Test def pos_templateParents() = compileFile(posDir, "templateParents") @Test def pos_overloadedAccess = compileFile(posDir, "overloadedAccess") @Test def pos_approximateUnion = compileFile(posDir, "approximateUnion") - @Test def pos_tailcall = compileDir(posDir + "tailcall/") + @Test def pos_tailcall = compileDir(posDir + "tailcall/")(noPickling) @Test def pos_nullarify = compileFile(posDir, "nullarify", "-Ycheck:nullarify" :: Nil) @Test def pos_subtyping = compileFile(posDir, "subtyping") @Test def pos_t2613 = compileFile(posSpecialDir, "t2613")(allowDeepSubtypes) @Test def pos_packageObj = compileFile(posDir, "i0239") @Test def pos_anonClassSubtyping = compileFile(posDir, "anonClassSubtyping") - @Test def pos_all = compileFiles(posDir, failedOther) + @Test def pos_all = compileFiles(posDir, failedOther)(noPickling) @Test def new_all = compileFiles(newDir, twice) @@ -116,21 +117,21 @@ class tests extends CompilerTest { @Test def neg_escapingRefs = compileFile(negDir, "escapingRefs", xerrors = 2) @Test def dotc = compileDir(dotcDir + "tools/dotc", failedOther)(allowDeepSubtypes) - @Test def dotc_ast = compileDir(dotcDir + "tools/dotc/ast", failedOther) // similar to dotc_config + @Test def dotc_ast = compileDir(dotcDir + "tools/dotc/ast", failedOther)(noPickling) // similar to dotc_config @Test def dotc_config = compileDir(dotcDir + "tools/dotc/config", failedOther) // seems to mess up stack frames - @Test def dotc_core = compileDir(dotcDir + "tools/dotc/core", failedUnderscore)(allowDeepSubtypes) + @Test def dotc_core = compileDir(dotcDir + "tools/dotc/core", failedUnderscore)(noPickling) // fails due to This refference to a non-eclosing class. Need to check - @Test def dotc_core_pickling = compileDir(dotcDir + "tools/dotc/core/pickling", failedOther)(allowDeepSubtypes) // Cannot emit primitive conversion from V to Z + @Test def dotc_core_pickling = compileDir(dotcDir + "tools/dotc/core/pickling", failedOther)(noPickling) // Cannot emit primitive conversion from V to Z - @Test def dotc_transform = compileDir(dotcDir + "tools/dotc/transform", failedbyName) + @Test def dotc_transform = compileDir(dotcDir + "tools/dotc/transform", failedbyName)(noPickling) @Test def dotc_parsing = compileDir(dotcDir + "tools/dotc/parsing", failedOther) // Expected primitive types I - Ljava/lang/Object // Tried to return an object where expected type was Integer @Test def dotc_printing = compileDir(dotcDir + "tools/dotc/printing", twice) @Test def dotc_reporting = compileDir(dotcDir + "tools/dotc/reporting", twice) - @Test def dotc_typer = compileDir(dotcDir + "tools/dotc/typer", failedOther) // similar to dotc_config + @Test def dotc_typer = compileDir(dotcDir + "tools/dotc/typer", failedOther)(noPickling) // similar to dotc_config //@Test def dotc_util = compileDir(dotcDir + "tools/dotc/util") //fails inside ExtensionMethods with ClassCastException @Test def tools_io = compileDir(dotcDir + "tools/io", failedOther) // similar to dotc_config @@ -143,7 +144,7 @@ class tests extends CompilerTest { dotcDir + "tools/dotc/core/Types.scala", dotcDir + "tools/dotc/ast/Trees.scala", failedUnderscore.head, - "-Xprompt", + "-Xprompt", "-Yskip:pickler", "#runs", "2")) @Test def testIssue_34 = compileArgs(Array( From c4b87dd4b1c6a99899da10cf2773e593d5ca1867 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 12:44:07 +0100 Subject: [PATCH 13/22] Turn on pickling. --- src/dotty/tools/dotc/Compiler.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/Compiler.scala b/src/dotty/tools/dotc/Compiler.scala index 403aa1157152..5f8ba9755f9a 100644 --- a/src/dotty/tools/dotc/Compiler.scala +++ b/src/dotty/tools/dotc/Compiler.scala @@ -41,7 +41,7 @@ class Compiler { List(new FirstTransform, new SyntheticMethods), List(new SuperAccessors), - //List(new Pickler), // Pickler needs to come last in a group since it should not pickle trees generated later + List(new Pickler), // Pickler needs to come last in a group since it should not pickle trees generated later List(new RefChecks, new ElimRepeated, new ElimLocals, From dcfd6fe0d448d01627e639d5ca93547ab61fa413 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 12:50:05 +0100 Subject: [PATCH 14/22] Make pickling output printer-dependent. --- src/dotty/tools/dotc/config/Printers.scala | 1 + src/dotty/tools/dotc/core/pickling/TreeBuffer.scala | 3 ++- src/dotty/tools/dotc/transform/Pickler.scala | 4 ++-- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/config/Printers.scala b/src/dotty/tools/dotc/config/Printers.scala index f8d7f8de557a..d1738ee6fccc 100644 --- a/src/dotty/tools/dotc/config/Printers.scala +++ b/src/dotty/tools/dotc/config/Printers.scala @@ -30,4 +30,5 @@ object Printers { val config = noPrinter val transforms = noPrinter val cyclicErrors = noPrinter + val pickling = noPrinter } \ No newline at end of file diff --git a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala index 870e9772420b..41a08d2ca2cd 100644 --- a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala +++ b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala @@ -5,6 +5,7 @@ package pickling import util.Util.{bestFit, dble} import TastyBuffer.{Addr, AddrWidth} +import config.Printers.pickling class TreeBuffer extends TastyBuffer(1000000) { @@ -157,6 +158,6 @@ class TreeBuffer extends TastyBuffer(1000000) { } while (saved > 0 && length / saved < 100) adjustOffsets() val wasted = compress() - println(s"original length: $origLength, compressed to: $length, wasted: $wasted") // DEBUG, for now. + pickling.println(s"original length: $origLength, compressed to: $length, wasted: $wasted") // DEBUG, for now. } } diff --git a/src/dotty/tools/dotc/transform/Pickler.scala b/src/dotty/tools/dotc/transform/Pickler.scala index 4d2e416449ae..c578b56c724c 100644 --- a/src/dotty/tools/dotc/transform/Pickler.scala +++ b/src/dotty/tools/dotc/transform/Pickler.scala @@ -6,6 +6,7 @@ import TreeTransforms._ import Contexts.Context import Decorators._ import pickling._ +import config.Printers /** This miniphase pickles trees */ class Pickler extends MiniPhaseTransform { thisTransform => @@ -22,8 +23,7 @@ class Pickler extends MiniPhaseTransform { thisTransform => bytes.iterator.grouped(10).toList.zipWithIndex.map { case (row, i) => s"${i}0: ${row.mkString(" ")}" } - //println(s"written:\n${rawBytes.mkString("\n")}") - new TastyPrinter(bytes).printContents() + if (Printers.pickling ne Printers.noPrinter) new TastyPrinter(bytes).printContents() } tree } From 70e3562f11c5023f9f1a25dbaf3808df50f0d288 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 15:28:23 +0100 Subject: [PATCH 15/22] Revert "Disable orphan checks and pickling for tests that currently fail because of tailrec." This reverts commit fceb8cbbdcca67fbed9cc06a3fe250c3a768ba1e. --- .../tools/dotc/transform/TreeChecker.scala | 2 +- test/dotc/tests.scala | 19 +++++++++---------- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/src/dotty/tools/dotc/transform/TreeChecker.scala b/src/dotty/tools/dotc/transform/TreeChecker.scala index f4c4bc7df909..7552f1f54020 100644 --- a/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -139,7 +139,7 @@ class TreeChecker { assert(isSubType(tree1.tpe, tree.typeOpt), divergenceMsg(tree1.tpe, tree.typeOpt)) tree1 } - if (!ctx.settings.Yskip.value.contains("pickler")) checkNoOrphans(res.tpe) + checkNoOrphans(res.tpe) phasesToCheck.foreach(_.checkPostCondition(res)) res } diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index 589caf0e194c..2b917b79513f 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -27,7 +27,6 @@ class tests extends CompilerTest { val twice = List("#runs", "2", "-YnoDoubleBindings") val allowDeepSubtypes = defaultOptions diff List("-Yno-deep-subtypes") - val noPickling = allowDeepSubtypes ++ List("-Yskip:pickler") val posDir = "./tests/pos/" val posSpecialDir = "./tests/pos-special/" @@ -52,7 +51,7 @@ class tests extends CompilerTest { @Test def pos_nameddefaults() = compileFile(posDir, "nameddefaults") @Test def pos_desugar() = compileFile(posDir, "desugar") @Test def pos_sigs() = compileFile(posDir, "sigs") - @Test def pos_typers() = compileFile(posDir, "typers")(noPickling) + @Test def pos_typers() = compileFile(posDir, "typers") @Test def pos_typedidents() = compileFile(posDir, "typedIdents") @Test def pos_assignments() = compileFile(posDir, "assignments") @Test def pos_packageobject() = compileFile(posDir, "packageobject") @@ -62,14 +61,14 @@ class tests extends CompilerTest { @Test def pos_templateParents() = compileFile(posDir, "templateParents") @Test def pos_overloadedAccess = compileFile(posDir, "overloadedAccess") @Test def pos_approximateUnion = compileFile(posDir, "approximateUnion") - @Test def pos_tailcall = compileDir(posDir + "tailcall/")(noPickling) + @Test def pos_tailcall = compileDir(posDir + "tailcall/") @Test def pos_nullarify = compileFile(posDir, "nullarify", "-Ycheck:nullarify" :: Nil) @Test def pos_subtyping = compileFile(posDir, "subtyping") @Test def pos_t2613 = compileFile(posSpecialDir, "t2613")(allowDeepSubtypes) @Test def pos_packageObj = compileFile(posDir, "i0239") @Test def pos_anonClassSubtyping = compileFile(posDir, "anonClassSubtyping") - @Test def pos_all = compileFiles(posDir, failedOther)(noPickling) + @Test def pos_all = compileFiles(posDir, failedOther) @Test def new_all = compileFiles(newDir, twice) @@ -117,21 +116,21 @@ class tests extends CompilerTest { @Test def neg_escapingRefs = compileFile(negDir, "escapingRefs", xerrors = 2) @Test def dotc = compileDir(dotcDir + "tools/dotc", failedOther)(allowDeepSubtypes) - @Test def dotc_ast = compileDir(dotcDir + "tools/dotc/ast", failedOther)(noPickling) // similar to dotc_config + @Test def dotc_ast = compileDir(dotcDir + "tools/dotc/ast", failedOther) // similar to dotc_config @Test def dotc_config = compileDir(dotcDir + "tools/dotc/config", failedOther) // seems to mess up stack frames - @Test def dotc_core = compileDir(dotcDir + "tools/dotc/core", failedUnderscore)(noPickling) + @Test def dotc_core = compileDir(dotcDir + "tools/dotc/core", failedUnderscore)(allowDeepSubtypes) // fails due to This refference to a non-eclosing class. Need to check - @Test def dotc_core_pickling = compileDir(dotcDir + "tools/dotc/core/pickling", failedOther)(noPickling) // Cannot emit primitive conversion from V to Z + @Test def dotc_core_pickling = compileDir(dotcDir + "tools/dotc/core/pickling", failedOther)(allowDeepSubtypes) // Cannot emit primitive conversion from V to Z - @Test def dotc_transform = compileDir(dotcDir + "tools/dotc/transform", failedbyName)(noPickling) + @Test def dotc_transform = compileDir(dotcDir + "tools/dotc/transform", failedbyName) @Test def dotc_parsing = compileDir(dotcDir + "tools/dotc/parsing", failedOther) // Expected primitive types I - Ljava/lang/Object // Tried to return an object where expected type was Integer @Test def dotc_printing = compileDir(dotcDir + "tools/dotc/printing", twice) @Test def dotc_reporting = compileDir(dotcDir + "tools/dotc/reporting", twice) - @Test def dotc_typer = compileDir(dotcDir + "tools/dotc/typer", failedOther)(noPickling) // similar to dotc_config + @Test def dotc_typer = compileDir(dotcDir + "tools/dotc/typer", failedOther) // similar to dotc_config //@Test def dotc_util = compileDir(dotcDir + "tools/dotc/util") //fails inside ExtensionMethods with ClassCastException @Test def tools_io = compileDir(dotcDir + "tools/io", failedOther) // similar to dotc_config @@ -144,7 +143,7 @@ class tests extends CompilerTest { dotcDir + "tools/dotc/core/Types.scala", dotcDir + "tools/dotc/ast/Trees.scala", failedUnderscore.head, - "-Xprompt", "-Yskip:pickler", + "-Xprompt", "#runs", "2")) @Test def testIssue_34 = compileArgs(Array( From 91aeb5236cd9c581058f30954a728ffb7a7495b5 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 20:29:11 +0100 Subject: [PATCH 16/22] Fix serialization of Bind-defined types in type patterns. These are now represented as BIND nodes in pickled types. --- src/dotty/tools/dotc/core/Flags.scala | 5 +++- .../tools/dotc/core/pickling/TreeBuffer.scala | 2 +- .../dotc/core/pickling/TreePickler.scala | 23 +++++++++++++++---- src/dotty/tools/dotc/typer/Typer.scala | 3 ++- 4 files changed, 25 insertions(+), 8 deletions(-) diff --git a/src/dotty/tools/dotc/core/Flags.scala b/src/dotty/tools/dotc/core/Flags.scala index 3a174d95f126..c2e7cd39983c 100644 --- a/src/dotty/tools/dotc/core/Flags.scala +++ b/src/dotty/tools/dotc/core/Flags.scala @@ -300,7 +300,7 @@ object Flags { /** Method is assumed to be stable */ final val Stable = termFlag(24, "") - + /** A case parameter accessor */ final val CaseAccessor = termFlag(25, "") @@ -318,6 +318,9 @@ object Flags { /** A method that has default params */ final val DefaultParameterized = termFlag(27, "") + /** A type that is defined by a type bind */ + final val BindDefinedType = typeFlag(27, "") + /** Symbol is initialized to the default value, e.g. var x: T = _ */ final val DefaultInit = termFlag(28, "") diff --git a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala index 41a08d2ca2cd..287e2f334b65 100644 --- a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala +++ b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala @@ -154,7 +154,7 @@ class TreeBuffer extends TastyBuffer(1000000) { var saved = 0 do { saved = adjustDeltas() - println(s"adjusting deltas, saved = $saved") + pickling.println(s"adjusting deltas, saved = $saved") } while (saved > 0 && length / saved < 100) adjustOffsets() val wasted = compress() diff --git a/src/dotty/tools/dotc/core/pickling/TreePickler.scala b/src/dotty/tools/dotc/core/pickling/TreePickler.scala index 662637026f12..7855f5d3ea5b 100644 --- a/src/dotty/tools/dotc/core/pickling/TreePickler.scala +++ b/src/dotty/tools/dotc/core/pickling/TreePickler.scala @@ -129,17 +129,30 @@ class TreePickler(pickler: TastyPickler, picklePositions: Boolean) { case ConstantType(value) => pickleConstant(value) case tpe: WithFixedSym => - if (tpe.symbol.isStatic) { + val sym = tpe.symbol + if (sym.isStatic) { writeByte(if (tpe.isType) TYPEREFstatic else TERMREFstatic) - pickleName(qualifiedName(tpe.symbol)) + pickleName(qualifiedName(sym)) } else if (tpe.prefix == NoPrefix) { - writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect) - pickleSym(tpe.symbol) + def pickleRef() = { + writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect) + pickleSym(sym) + } + if (sym is Flags.BindDefinedType) { + registerDef(sym) + writeByte(BIND) + withLength { + pickleName(sym.name) + pickleType(sym.info) + pickleRef() + } + } + else pickleRef() } else { writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol) - pickleType(tpe.prefix); pickleSym(tpe.symbol) + pickleType(tpe.prefix); pickleSym(sym) } case tpe: TermRefWithSignature => writeByte(TERMREF) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 7d1e950f4cf5..2eb9a0dee42c 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -834,7 +834,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit def typedBind(tree: untpd.Bind, pt: Type)(implicit ctx: Context): Bind = track("typedBind") { val body1 = typed(tree.body, pt) typr.println(i"typed bind $tree pt = $pt bodytpe = ${body1.tpe}") - val sym = ctx.newSymbol(ctx.owner, tree.name, EmptyFlags, body1.tpe, coord = tree.pos) + val flags = if (tree.isType) BindDefinedType else EmptyFlags + val sym = ctx.newSymbol(ctx.owner, tree.name, flags, body1.tpe, coord = tree.pos) assignType(cpy.Bind(tree)(tree.name, body1), sym) } From 2d56519cfaac503351f5f32646ee2e1db9a3721a Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 20:46:01 +0100 Subject: [PATCH 17/22] Make Definitions#rootPackage a root. Otherwise it is not static, and pickling the symbol leads astray. --- src/dotty/tools/dotc/core/SymDenotations.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index ebdbcb4f6eb0..ec3f551e354d 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -323,7 +323,7 @@ object SymDenotations { /** Is this symbol the root class or its companion object? */ final def isRoot: Boolean = - (name.toTermName == nme.ROOT) && (owner eq NoSymbol) + (name.toTermName == nme.ROOT || name == nme.ROOTPKG) && (owner eq NoSymbol) /** Is this symbol the empty package class or its companion object? */ final def isEmptyPackage(implicit ctx: Context): Boolean = From c46d4b14bb56ea1819c47cc49851a6a4d2510021 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 12 Feb 2015 20:48:16 +0100 Subject: [PATCH 18/22] Change scheme of translating array creations new Array(...) The previous scheme generated too many newGenericArray expressions because at the time newArray was called, the type arguments were not yet determined. Furthermore, the type variables somehow did not have the right positions, which caused them not to be interpolated and led to orphan PolyParams. The new scheme converts the expression when the length parameter has been supplied and it fully determines the array type before converting. --- src/dotty/tools/dotc/typer/Applications.scala | 14 +++++++------- src/dotty/tools/dotc/typer/Typer.scala | 3 +-- tests/pos/new-array.scala | 3 ++- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index c012e8837718..8d6bff6de5ff 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -530,7 +530,7 @@ trait Applications extends Compatibility { self: Typer => if (proto.argsAreTyped) new ApplyToTyped(tree, fun1, funRef, proto.typedArgs, pt) else new ApplyToUntyped(tree, fun1, funRef, proto, pt)(argCtx) val result = app.result - ConstFold(result) + convertNewArray(ConstFold(result)) } { (failedVal, failedState) => val fun2 = tryInsertImplicitOnQualifier(fun1, proto) if (fun1 eq fun2) { @@ -596,14 +596,14 @@ trait Applications extends Compatibility { self: Typer => checkBounds(typedArgs, pt) case _ => } - convertNewArray( - assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs)) + assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs) } - /** Rewrite `new Array[T]` trees to calls of newXYZArray methods. */ - def convertNewArray(tree: Tree)(implicit ctx: Context): Tree = tree match { - case TypeApply(tycon, targs) if tycon.symbol == defn.ArrayConstructor => - newArray(targs.head, tree.pos) + /** Rewrite `new Array[T](....)` trees to calls of newXYZArray methods. */ + def convertNewArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match { + case Apply(TypeApply(tycon, targ :: Nil), args) if tycon.symbol == defn.ArrayConstructor => + fullyDefinedType(tree.tpe, "array", tree.pos) + tpd.cpy.Apply(tree)(newArray(targ, tree.pos), args) case _ => tree } diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 2eb9a0dee42c..8e6741f6de42 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -1364,8 +1364,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit if (pt.isInstanceOf[PolyProto]) tree else { val (_, tvars) = constrained(poly, tree) - convertNewArray( - adaptInterpolated(tree.appliedToTypes(tvars), pt, original)) + adaptInterpolated(tree.appliedToTypes(tvars), pt, original) } case wtp => pt match { diff --git a/tests/pos/new-array.scala b/tests/pos/new-array.scala index 9deb2330a694..98b8345a037a 100644 --- a/tests/pos/new-array.scala +++ b/tests/pos/new-array.scala @@ -6,9 +6,10 @@ object Test { val z = new Array[Unit](10) } object Test2 { - val w: Array[String] = new Array(10) + val w: Array[Any] = new Array(10) val x: Array[Int] = new Array(10) def f[T: reflect.ClassTag]: Array[T] = new Array(10) val y: Array[Any] = new Array(10) val z: Array[Unit] = new Array(10) } + From b021a47004babd85bab8858c3af3065d9a905c48 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 13 Feb 2015 11:45:25 +0100 Subject: [PATCH 19/22] Tightening of orphans tests. Need to always follow TypeVariables, otherwise we will not detect orphan parameters of uninstantiated type variables. --- src/dotty/tools/dotc/transform/TreeChecker.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/dotty/tools/dotc/transform/TreeChecker.scala b/src/dotty/tools/dotc/transform/TreeChecker.scala index 7552f1f54020..69059168516a 100644 --- a/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -155,6 +155,8 @@ class TreeChecker { definedBinders -= tp case tp: ParamType => assert(definedBinders.contains(tp.binder), s"orphan param: $tp") + case tp: TypeVar => + apply(tp.underlying) case _ => mapOver(tp) } From 7855833eb91df689823aca0c4e336dd3166247dc Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 13 Feb 2015 11:53:09 +0100 Subject: [PATCH 20/22] Stop type inference from creating oprphans. A tweak in the answer to a fundamental question of inference: When should type variables be instantiated? Example: In a call f [ TVar ] ( g() ) A syntehsied type variable TVar can be instantiated as soon as the call is fully elaborated, but not before - in particular not when typing the nested call `g()`. This is so far achieved by looking at the `owningTree` of a type variable (in the example it would be the call above) and instantiating only if the current tree contains the owning tree. Problem is that this is fragile. If in the meantime the tree was copied, say due to eta-expansion, the contains test will fail. Now this is not a big deal, as long as we instantiate the type variable eventually. But previously that was never done. With the fix we now instantiate type variables also if we have fully elaborated the definition that closest encloses the point where the type variable is created. This is less fragile, as definitions can be compared using their symbols instead of looking at trees. --- src/dotty/tools/dotc/core/Types.scala | 6 +++++- src/dotty/tools/dotc/typer/Inferencing.scala | 14 +++++++++++--- src/dotty/tools/dotc/typer/ProtoTypes.scala | 2 +- src/dotty/tools/dotc/typer/Typer.scala | 4 ++-- 4 files changed, 19 insertions(+), 7 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 5f30f484e559..f113307efcfa 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2269,8 +2269,12 @@ object Types { * @param creatorState The typer state in which the variable was created. * @param owningTree The function part of the TypeApply tree tree that introduces * the type variable. + * @paran owner The current owner if the context where the variable was created. + * + * `owningTree` and `owner` are used to determine whether a type-variable can be instantiated + * at some given point. See `Inferencing#interpolateUndetVars`. */ - final class TypeVar(val origin: PolyParam, creatorState: TyperState, val owningTree: untpd.Tree) extends CachedProxyType with ValueType { + final class TypeVar(val origin: PolyParam, creatorState: TyperState, val owningTree: untpd.Tree, val owner: Symbol) extends CachedProxyType with ValueType { /** The permanent instance type of the the variable, or NoType is none is given yet */ private[core] var inst: Type = NoType diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala index 38c1e49c5669..753125e6a167 100644 --- a/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/src/dotty/tools/dotc/typer/Inferencing.scala @@ -163,10 +163,18 @@ trait Inferencing { this: Checking => * If such a variable appears covariantly in type `tp` or does not appear at all, * approximate it by its lower bound. Otherwise, if it appears contravariantly * in type `tp` approximate it by its upper bound. + * @param ownedBy if it is different from NoSymbol, all type variables owned by + * `ownedBy` qualify, independent of position. + * Without that second condition, it can be that certain variables escape + * interpolation, for instance when their tree was eta-lifted, so + * the typechecked tree is no longer the tree in which the variable + * was declared. A concrete example of this phenomenon can be + * observed when compiling core.TypeOps#asSeenFrom. */ - def interpolateUndetVars(tree: Tree)(implicit ctx: Context): Unit = { + def interpolateUndetVars(tree: Tree, ownedBy: Symbol)(implicit ctx: Context): Unit = { val constraint = ctx.typerState.constraint - val qualifies = (tvar: TypeVar) => tree contains tvar.owningTree + val qualifies = (tvar: TypeVar) => + (tree contains tvar.owningTree) || ownedBy.exists && tvar.owner == ownedBy def interpolate() = Stats.track("interpolateUndetVars") { val tp = tree.tpe.widen constr.println(s"interpolate undet vars in ${tp.show}, pos = ${tree.pos}, mode = ${ctx.mode}, undets = ${constraint.uninstVars map (tvar => s"${tvar.show}@${tvar.owningTree.pos}")}") @@ -182,7 +190,7 @@ trait Inferencing { this: Checking => } } if (changed) // instantiations might have uncovered new typevars to interpolate - interpolateUndetVars(tree) + interpolateUndetVars(tree, ownedBy) else for (tvar <- constraint.uninstVars) if (!(vs contains tvar) && qualifies(tvar)) { diff --git a/src/dotty/tools/dotc/typer/ProtoTypes.scala b/src/dotty/tools/dotc/typer/ProtoTypes.scala index c0e30b12ea5b..f646f7ecd2ab 100644 --- a/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -326,7 +326,7 @@ object ProtoTypes { def newTypeVars(pt: PolyType): List[TypeVar] = for (n <- (0 until pt.paramNames.length).toList) - yield new TypeVar(PolyParam(pt, n), state, owningTree) + yield new TypeVar(PolyParam(pt, n), state, owningTree, ctx.owner) val added = if (state.constraint contains pt) pt.duplicate(pt.paramNames, pt.paramBounds, pt.resultType) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 8e6741f6de42..e9b1cb9c1ed8 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -1166,8 +1166,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit def adapt(tree: Tree, pt: Type, original: untpd.Tree = untpd.EmptyTree)(implicit ctx: Context) = /*>|>*/ track("adapt") /*<|<*/ { /*>|>*/ ctx.traceIndented(i"adapting $tree of type ${tree.tpe} to $pt", typr, show = true) /*<|<*/ { - interpolateUndetVars(tree) - tree overwriteType tree.tpe.simplified + interpolateUndetVars(tree, if (tree.isDef) tree.symbol else NoSymbol) + tree.overwriteType(tree.tpe.simplified) adaptInterpolated(tree, pt, original) } } From 7b63786c087b09319c78f07b960d0846aa990438 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 13 Feb 2015 12:47:45 +0100 Subject: [PATCH 21/22] Tweaks in comments --- src/dotty/tools/dotc/core/Flags.scala | 8 ++++---- src/dotty/tools/dotc/typer/Namer.scala | 4 ++-- src/dotty/tools/dotc/typer/Typer.scala | 4 +--- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/core/Flags.scala b/src/dotty/tools/dotc/core/Flags.scala index c2e7cd39983c..e57ba066e47a 100644 --- a/src/dotty/tools/dotc/core/Flags.scala +++ b/src/dotty/tools/dotc/core/Flags.scala @@ -287,7 +287,7 @@ object Flags { /** A trait that has only abstract methods as members * (and therefore can be represented by a Java interface */ - final val PureInterface = typeFlag(22, "interface") + final val PureInterface = typeFlag(22, "interface") // TODO when unpickling, reconstitute from context /** Labeled with of abstract & override */ final val AbsOverride = termFlag(22, "abstract override") @@ -336,7 +336,7 @@ object Flags { final val JavaStaticType = JavaStatic.toTypeFlags /** Trait is not an interface, but does not have fields or intialization code */ - final val NoInits = typeFlag(32, "") // TODO reconstitute from context + final val NoInits = typeFlag(32, "") // TODO when unpickling, reconstitute from context /** Variable is accessed from nested function. */ final val Captured = termFlag(32, "") @@ -367,7 +367,7 @@ object Flags { /** Symbol always defines a fresh named type */ final val Fresh = commonFlag(45, "") - /** Symbol is defined in a super call */ // TODO reconstitute from context + /** Symbol is defined in a super call */ // TODO when unpickling, reconstitute from context final val InSuperCall = commonFlag(46, "") /** Symbol with private access is accessed outside its private scope */ @@ -554,7 +554,7 @@ object Flags { /** A Java interface, potentially with default methods */ final val JavaTrait = allOf(JavaDefined, Trait, NoInits) - /** A Java interface */ // TODO reconstitute from context + /** A Java interface */ // TODO when unpickling, reconstitute from context final val JavaInterface = allOf(JavaDefined, Trait) /** A Java companion object */ diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 95f0b41652a1..c522a5998e90 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -497,9 +497,9 @@ class Namer { typer: Typer => denot.info = ClassInfo(cls.owner.thisType, cls, parentRefs, decls, selfInfo) if (cls is Trait) { if (body forall isNoInitMember) { - cls.setFlag(NoInits) // TODO set when unpickling + cls.setFlag(NoInits) if (body forall isPureInterfaceMember) - cls.setFlag(PureInterface) // TODO set when unpickling + cls.setFlag(PureInterface) } } } diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index e9b1cb9c1ed8..5e8591ab54e3 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -793,10 +793,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit typr.println(s"adding refinement $refinement") checkRefinementNonCyclic(refinement, refineCls, seen) val rsym = refinement.symbol - if ((rsym.is(Method) || rsym.isType) && rsym.allOverriddenSymbols.isEmpty) { - println(refineCls.baseClasses) + if ((rsym.is(Method) || rsym.isType) && rsym.allOverriddenSymbols.isEmpty) ctx.error(i"refinement $rsym without matching type in parent $parent", refinement.pos) - } val rinfo = if (rsym is Accessor) rsym.info.resultType else rsym.info RefinedType(parent, rsym.name, rt => rinfo.substThis(refineCls, SkolemType(rt))) // todo later: check that refinement is within bounds From 7459aec08574b411a8b968ed8a7990182aa9e944 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 13 Feb 2015 13:18:24 +0100 Subject: [PATCH 22/22] Weaks to format Bump version to 0.03. Harmonize doc comment and Google doc. --- .../dotc/core/pickling/PickleFormat.scala | 171 +++++++++--------- src/dotty/tools/dotc/transform/Pickler.scala | 2 +- 2 files changed, 87 insertions(+), 86 deletions(-) diff --git a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala index 24b5169e7797..72c545021230 100644 --- a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala +++ b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala @@ -54,7 +54,7 @@ Standard-Section: "ASTs" Tree* TypeParam = TYPEPARAM Length NameRef Type Modifier* Params = PARAMS Length Param* - Param = PARAM Length NameRef Type Modifier + Param = PARAM Length NameRef Type Modifier* Selector = IMPORTED Length name_NameRef RENAMED Length from_NameRef to_NameRef @@ -63,7 +63,7 @@ Standard-Section: "ASTs" Tree* SELECT qual_Term possiblySigned_NameRef SUPER Length this_Term mixinTrait_Type? APPLY Length fn_Term arg_Term* - TYPEAPPLY Length fn_Term arg_Term* + TYPEAPPLY Length fn_Term arg_Type* NEW Length cls_Type PAIR Length left_Term right_Term TYPED Length expr_Term ascription_Type @@ -78,7 +78,7 @@ Standard-Section: "ASTs" Tree* THROW Length expr_Term SEQLITERAL Length elem_Term* JSEQLITERAL Length elem_Term* - BIND Length boundName_NameRef pat_Type pat_Term + BIND Length boundName_NameRef patType_Type pat_Term ALTERNATIVE Length alt_Term* UNAPPLY Length fun_Term ImplicitArg* pat_Term* ANNOTATED Length annot_Term underlying_Term @@ -134,6 +134,7 @@ Standard-Section: "ASTs" Tree* ANNOTATED Length annot_Tree underlying_Type ANDtype Length left_Type right_Type ORtype Length left_Type right_Type + BIND Length boundName_NameRef underlying_Type selfRef_Type BYNAMEtype Length underlying_Type POLYtype Length result_Type NamesTypes // needed for refinements METHODtype Length result_Type NamesTypes // needed for refinements @@ -177,10 +178,10 @@ Standard-Section: "ASTs" Tree* Note: Tree tags are grouped into 4 categories that determine what follows, and thus allow to compute the size of the tagged tree in a generic way. - Category 1 (tags 0-95): tag - Category 2 (tags 96-127): tag Nat - Category 3 (tags 128-159): tag AST Nat - Category 4 (tags 160-255): tag Length + Category 1 (tags 0-63) : tag + Category 2 (tags 64-99) : tag Nat + Category 3 (tags 100-127): tag AST Nat + Category 4 (tags 128-255): tag Length Standard Section: "Positions" startPos_Index endPos_Index @@ -194,7 +195,7 @@ object PickleFormat { final val header = "5CA1AB1F" final val MajorVersion = 0 - final val MinorVersion = 2 + final val MinorVersion = 3 // Name tags @@ -243,83 +244,83 @@ object PickleFormat { final val DEFAULTparameterized = 32 final val DEFAULTinit = 33 - final val SHARED = 96 - final val TERMREFdirect = 97 - final val TYPEREFdirect = 98 - final val TERMREFstatic = 99 - final val TYPEREFstatic = 100 - final val BYTEconst = 101 - final val BYTEneg = 102 - final val SHORTconst = 103 - final val SHORTneg = 104 - final val CHARconst = 105 - final val INTconst = 106 - final val INTneg = 107 - final val LONGconst = 108 - final val LONGneg = 109 - final val FLOATconst = 110 - final val DOUBLEconst = 111 - final val STRINGconst = 112 - - final val IDENT = 128 - final val SELECT = 129 - final val TERMREFsymbol = 130 - final val TERMREF = 131 - final val TYPEREFsymbol = 132 - final val TYPEREF = 133 - - final val PACKAGE = 160 - final val VALDEF = 161 - final val DEFDEF = 162 - final val TYPEDEF = 163 - final val IMPORT = 164 - final val TYPEPARAM = 165 - final val PARAMS = 166 - final val PARAM = 167 - final val IMPORTED = 168 - final val RENAMED = 169 - final val APPLY = 170 - final val TYPEAPPLY = 171 - final val NEW = 172 - final val PAIR = 173 - final val TYPED = 174 - final val NAMEDARG = 175 - final val ASSIGN = 176 - final val BLOCK = 177 - final val IF = 178 - final val CLOSURE = 179 - final val MATCH = 180 - final val RETURN = 181 - final val TRY = 182 - final val THROW = 183 - final val SEQLITERAL = 184 - final val JSEQLITERAL = 185 - final val BIND = 186 - final val ALTERNATIVE = 187 - final val UNAPPLY = 188 - final val ANNOTATED = 189 - final val CASEDEF = 190 - final val IMPLICITarg = 191 - final val TEMPLATE = 192 - final val THIS = 193 - final val SUPER = 194 - final val CLASSconst = 195 - final val ENUMconst = 196 - final val SUPERtype = 197 - final val SKOLEMtype = 198 - final val REFINEDtype = 199 - final val APPLIEDtype = 200 - final val TYPEBOUNDS = 201 - final val TYPEALIAS = 202 - final val ANDtype = 203 - final val ORtype = 204 - final val BYNAMEtype = 205 - final val METHODtype = 206 - final val POLYtype = 207 - final val PARAMtype = 208 - final val IMPLICITARG = 209 - final val PRIVATEqualified = 210 - final val PROTECTEDqualified = 211 + final val SHARED = 64 + final val TERMREFdirect = 65 + final val TYPEREFdirect = 66 + final val TERMREFstatic = 67 + final val TYPEREFstatic = 68 + final val BYTEconst = 69 + final val BYTEneg = 70 + final val SHORTconst = 71 + final val SHORTneg = 72 + final val CHARconst = 73 + final val INTconst = 74 + final val INTneg = 75 + final val LONGconst = 76 + final val LONGneg = 77 + final val FLOATconst = 78 + final val DOUBLEconst = 79 + final val STRINGconst = 80 + + final val IDENT = 100 + final val SELECT = 101 + final val TERMREFsymbol = 102 + final val TERMREF = 103 + final val TYPEREFsymbol = 104 + final val TYPEREF = 105 + + final val PACKAGE = 128 + final val VALDEF = 129 + final val DEFDEF = 130 + final val TYPEDEF = 131 + final val IMPORT = 132 + final val TYPEPARAM = 133 + final val PARAMS = 134 + final val PARAM = 135 + final val IMPORTED = 136 + final val RENAMED = 137 + final val APPLY = 138 + final val TYPEAPPLY = 139 + final val NEW = 140 + final val PAIR = 141 + final val TYPED = 142 + final val NAMEDARG = 143 + final val ASSIGN = 144 + final val BLOCK = 145 + final val IF = 146 + final val CLOSURE = 147 + final val MATCH = 148 + final val RETURN = 149 + final val TRY = 150 + final val THROW = 151 + final val SEQLITERAL = 152 + final val JSEQLITERAL = 153 + final val BIND = 154 + final val ALTERNATIVE = 155 + final val UNAPPLY = 156 + final val ANNOTATED = 157 + final val CASEDEF = 158 + final val IMPLICITarg = 159 + final val TEMPLATE = 160 + final val THIS = 161 + final val SUPER = 162 + final val CLASSconst = 163 + final val ENUMconst = 164 + final val SUPERtype = 165 + final val SKOLEMtype = 166 + final val REFINEDtype = 167 + final val APPLIEDtype = 168 + final val TYPEBOUNDS = 169 + final val TYPEALIAS = 170 + final val ANDtype = 171 + final val ORtype = 172 + final val BYNAMEtype = 173 + final val METHODtype = 174 + final val POLYtype = 175 + final val PARAMtype = 176 + final val IMPLICITARG = 177 + final val PRIVATEqualified = 178 + final val PROTECTEDqualified = 179 final val firstSimpleTreeTag = EMPTYTREE final val firstNatTreeTag = SHARED diff --git a/src/dotty/tools/dotc/transform/Pickler.scala b/src/dotty/tools/dotc/transform/Pickler.scala index c578b56c724c..c21910e658af 100644 --- a/src/dotty/tools/dotc/transform/Pickler.scala +++ b/src/dotty/tools/dotc/transform/Pickler.scala @@ -19,7 +19,7 @@ class Pickler extends MiniPhaseTransform { thisTransform => val pickler = new TastyPickler new TreePickler(pickler, picklePositions = false).pickle(tree) val bytes = pickler.assembleParts() - def rawBytes = + def rawBytes = // not needed right now, but useful to print raw format. bytes.iterator.grouped(10).toList.zipWithIndex.map { case (row, i) => s"${i}0: ${row.mkString(" ")}" }