diff --git a/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index 170738de61cb..5fcb7f57be97 100644 --- a/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -295,7 +295,7 @@ class DottyBackendInterface()(implicit ctx: Context) extends BackendInterface{ val t = new TreeTraverser { var outerRhs: Tree = tree - def traverse(tree: tpd.Tree): Unit = tree match { + def traverse(tree: tpd.Tree)(implicit ctx: Context): Unit = tree match { case t: DefDef => if (t.symbol is Flags.Label) res.put(outerRhs, t :: res.getOrElse(outerRhs, Nil)) diff --git a/src/dotty/tools/dotc/CompilationUnit.scala b/src/dotty/tools/dotc/CompilationUnit.scala index 9c14314594cc..44f9b7e5a198 100644 --- a/src/dotty/tools/dotc/CompilationUnit.scala +++ b/src/dotty/tools/dotc/CompilationUnit.scala @@ -13,4 +13,6 @@ class CompilationUnit(val source: SourceFile) { var tpdTree: tpd.Tree = tpd.EmptyTree def isJava = source.file.name.endsWith(".java") + + var pickled: Array[Byte] = Array() } \ No newline at end of file diff --git a/src/dotty/tools/dotc/Compiler.scala b/src/dotty/tools/dotc/Compiler.scala index e8df4900827a..e051e16c8d49 100644 --- a/src/dotty/tools/dotc/Compiler.scala +++ b/src/dotty/tools/dotc/Compiler.scala @@ -41,10 +41,10 @@ class Compiler { List(new FirstTransform, new SyntheticMethods), List(new SuperAccessors), - //List(new Pickler), // Pickler needs to come last in a group since it should not pickle trees generated later + List(new Pickler), // Pickler needs to come last in a group since it should not pickle trees generated later List(new RefChecks, new ElimRepeated, - new ElimLocals, + new NormalizeFlags, new ExtensionMethods), List(new TailRec), // TailRec needs to be in its own group for now. // Otherwise it produces -Ycheck incorrect code for diff --git a/src/dotty/tools/dotc/ast/Desugar.scala b/src/dotty/tools/dotc/ast/Desugar.scala index 7cdedb19a6cb..9bef032febec 100644 --- a/src/dotty/tools/dotc/ast/Desugar.scala +++ b/src/dotty/tools/dotc/ast/Desugar.scala @@ -218,7 +218,7 @@ object desugar { /** The expansion of a class definition. See inline comments for what is involved */ def classDef(cdef: TypeDef)(implicit ctx: Context): Tree = { - val TypeDef(name, impl @ Template(constr0, parents, self, body)) = cdef + val TypeDef(name, impl @ Template(constr0, parents, self, _)) = cdef val mods = cdef.mods val (constr1, defaultGetters) = defDef(constr0, isPrimaryConstructor = true) match { @@ -242,7 +242,7 @@ object desugar { val constr = cpy.DefDef(constr1)(tparams = constrTparams, vparamss = constrVparamss) // Add constructor type parameters to auxiliary constructors - val normalizedBody = body map { + val normalizedBody = impl.body map { case ddef: DefDef if ddef.name.isConstructorName => cpy.DefDef(ddef)(tparams = constrTparams) case stat => @@ -342,7 +342,10 @@ object desugar { val companions = if (mods is Case) { val parent = - if (constrTparams.nonEmpty || constrVparamss.length > 1) anyRef + if (constrTparams.nonEmpty || + constrVparamss.length > 1 || + mods.is(Abstract) || + constr.mods.is(Private)) anyRef // todo: also use anyRef if constructor has a dependent method type (or rule that out)! else (constrVparamss :\ classTypeRef) ((vparams, restpe) => Function(vparams map (_.tpt), restpe)) val applyMeths = @@ -425,10 +428,10 @@ object desugar { val modul = ValDef(name, clsRef, New(clsRef, Nil)) .withMods(mods | ModuleCreationFlags) .withPos(mdef.pos) - val ValDef(selfName, selfTpt, selfRhs) = tmpl.self + val ValDef(selfName, selfTpt, _) = tmpl.self val selfMods = tmpl.self.mods if (!selfTpt.isEmpty) ctx.error("object definition may not have a self type", tmpl.self.pos) - val clsSelf = ValDef(selfName, SingletonTypeTree(Ident(name)), selfRhs) + val clsSelf = ValDef(selfName, SingletonTypeTree(Ident(name)), tmpl.self.rhs) .withMods(selfMods) .withPos(tmpl.self.pos orElse tmpl.pos.startPos) val clsTmpl = cpy.Template(tmpl)(self = clsSelf, body = tmpl.body) @@ -864,18 +867,19 @@ object desugar { * @param parentType The type of `parent` */ def refinedTypeToClass(parent: tpd.Tree, refinements: List[Tree])(implicit ctx: Context): TypeDef = { - def stripToCore(tp: Type): Type = tp match { - case tp: RefinedType if tp.argInfos.nonEmpty => tp // parameterized class type - case tp: TypeRef if tp.symbol.isClass => tp // monomorphic class type + def stripToCore(tp: Type): List[Type] = tp match { + case tp: RefinedType if tp.argInfos.nonEmpty => tp :: Nil // parameterized class type + case tp: TypeRef if tp.symbol.isClass => tp :: Nil // monomorphic class type case tp: TypeProxy => stripToCore(tp.underlying) - case _ => defn.AnyType + case AndType(tp1, tp2) => stripToCore(tp1) ::: stripToCore(tp2) + case _ => defn.AnyType :: Nil } - val parentCore = stripToCore(parent.tpe) + val parentCores = stripToCore(parent.tpe) val untpdParent = TypedSplice(parent) - val (classParent, self) = - if (parent.tpe eq parentCore) (untpdParent, EmptyValDef) - else (TypeTree(parentCore), ValDef(nme.WILDCARD, untpdParent, EmptyTree)) - val impl = Template(emptyConstructor, classParent :: Nil, self, refinements) + val (classParents, self) = + if (parentCores.length == 1 && (parent.tpe eq parentCores.head)) (untpdParent :: Nil, EmptyValDef) + else (parentCores map TypeTree, ValDef(nme.WILDCARD, untpdParent, EmptyTree)) + val impl = Template(emptyConstructor, classParents, self, refinements) TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) } diff --git a/src/dotty/tools/dotc/ast/Positioned.scala b/src/dotty/tools/dotc/ast/Positioned.scala index 59e2e29b50c6..be3f904a9d7d 100644 --- a/src/dotty/tools/dotc/ast/Positioned.scala +++ b/src/dotty/tools/dotc/ast/Positioned.scala @@ -48,6 +48,12 @@ abstract class Positioned extends DotClass with Product { */ def addPos(pos: Position): this.type = withPos(pos union this.pos) + /** Set position of this tree only, without performing + * any checks of consistency with - or updates of - other positions. + * Called from Unpickler when entering positions. + */ + private[dotc] def setPosUnchecked(pos: Position) = curPos = pos + /** If any children of this node do not have positions, set them to the given position, * and transitively visit their children. */ diff --git a/src/dotty/tools/dotc/ast/TreeInfo.scala b/src/dotty/tools/dotc/ast/TreeInfo.scala index 3d633c58d59a..0abd25f515df 100644 --- a/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -24,20 +24,14 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => case _ => false } - /** Is tree legal as a member definition of an interface? + /** Does tree contain an initialization part when seen as a member of a class or trait? */ - def isPureInterfaceMember(tree: Tree): Boolean = unsplice(tree) match { + def isNoInitMember(tree: Tree): Boolean = unsplice(tree) match { case EmptyTree | Import(_, _) | TypeDef(_, _) => true - case DefDef(_, _, _, _, rhs) => rhs.isEmpty - case ValDef(_, _, rhs) => rhs.isEmpty + case tree: ValDef => tree.unforcedRhs == EmptyTree case _ => false } - /** Is tree legal as a member definition of a no-init trait? - */ - def isNoInitMember(tree: Tree): Boolean = - isPureInterfaceMember(tree) || unsplice(tree).isInstanceOf[DefDef] - def isOpAssign(tree: Tree) = unsplice(tree) match { case Apply(fn, _ :: Nil) => unsplice(fn) match { @@ -90,8 +84,8 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => } /** If tree is a closure, it's body, otherwise tree itself */ - def closureBody(tree: tpd.Tree): tpd.Tree = tree match { - case Block(DefDef(nme.ANON_FUN, _, _, _, rhs) :: Nil, Closure(_, _, _)) => rhs + def closureBody(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match { + case Block((meth @ DefDef(nme.ANON_FUN, _, _, _, _)) :: Nil, Closure(_, _, _)) => meth.rhs case _ => tree } @@ -244,12 +238,14 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => /** Is this case guarded? */ def isGuardedCase(cdef: CaseDef) = cdef.guard ne EmptyTree - /** True iff definition if a val or def with no right-hand-side, or it + /** True iff definition is a val or def with no right-hand-side, or it * is an abstract typoe declaration */ def lacksDefinition(mdef: MemberDef)(implicit ctx: Context) = mdef match { - case mdef: ValOrDefDef => mdef.rhs.isEmpty && !mdef.name.isConstructorName && !mdef.mods.is(ParamAccessor) - case mdef: TypeDef => mdef.rhs.isEmpty || mdef.rhs.isInstanceOf[TypeBoundsTree] + case mdef: ValOrDefDef => + mdef.unforcedRhs == EmptyTree && !mdef.name.isConstructorName && !mdef.mods.is(ParamAccessor) + case mdef: TypeDef => + mdef.rhs.isEmpty || mdef.rhs.isInstanceOf[TypeBoundsTree] case _ => false } @@ -287,8 +283,8 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => | Import(_, _) | DefDef(_, _, _, _, _) => Pure - case vdef @ ValDef(_, _, rhs) => - if (vdef.mods is Mutable) Impure else exprPurity(rhs) + case vdef @ ValDef(_, _, _) => + if (vdef.mods is Mutable) Impure else exprPurity(vdef.rhs) case _ => Impure } @@ -410,7 +406,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => /** The variables defined by a pattern, in reverse order of their appearance. */ def patVars(tree: Tree)(implicit ctx: Context): List[Symbol] = { val acc = new TreeAccumulator[List[Symbol]] { - def apply(syms: List[Symbol], tree: Tree) = tree match { + def apply(syms: List[Symbol], tree: Tree)(implicit ctx: Context) = tree match { case Bind(_, body) => apply(tree.symbol :: syms, body) case _ => foldOver(syms, tree) } @@ -452,7 +448,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => def defPath(sym: Symbol, root: Tree)(implicit ctx: Context): List[Tree] = ctx.debugTraceIndented(s"defpath($sym with position ${sym.pos}, ${root.show})") { require(sym.pos.exists) object accum extends TreeAccumulator[List[Tree]] { - def apply(x: List[Tree], tree: Tree): List[Tree] = { + def apply(x: List[Tree], tree: Tree)(implicit ctx: Context): List[Tree] = { if (tree.envelope.contains(sym.pos)) if (definedSym(tree) == sym) tree :: x else { @@ -478,7 +474,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => if (stats exists (definedSym(_) == sym)) stats else Nil encl match { case Block(stats, _) => verify(stats) - case Template(_, _, _, stats) => verify(stats) + case encl: Template => verify(encl.body) case PackageDef(_, stats) => verify(stats) case _ => Nil } diff --git a/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 98027cd396ba..d2ec3ea105a3 100644 --- a/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -78,22 +78,22 @@ final class TreeTypeMap( } override def transform(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = treeMap(tree) match { - case impl @ Template(constr, parents, self, body) => + case impl @ Template(constr, parents, self, _) => val tmap = withMappedSyms(localSyms(impl :: self :: Nil)) cpy.Template(impl)( constr = tmap.transformSub(constr), parents = parents mapconserve transform, self = tmap.transformSub(self), - body = body mapconserve tmap.transform + body = impl.body mapconserve tmap.transform ).withType(tmap.mapType(impl.tpe)) case tree1 => tree1.withType(mapType(tree1.tpe)) match { case id: Ident if tpd.needsSelect(id.tpe) => ref(id.tpe.asInstanceOf[TermRef]).withPos(id.pos) - case ddef @ DefDef(name, tparams, vparamss, tpt, rhs) => + case ddef @ DefDef(name, tparams, vparamss, tpt, _) => val (tmap1, tparams1) = transformDefs(ddef.tparams) val (tmap2, vparamss1) = tmap1.transformVParamss(vparamss) - cpy.DefDef(ddef)(name, tparams1, vparamss1, tmap2.transform(tpt), tmap2.transform(rhs)) + cpy.DefDef(ddef)(name, tparams1, vparamss1, tmap2.transform(tpt), tmap2.transform(ddef.rhs)) case blk @ Block(stats, expr) => val (tmap1, stats1) = transformDefs(stats) val expr1 = tmap1.transform(expr) diff --git a/src/dotty/tools/dotc/ast/Trees.scala b/src/dotty/tools/dotc/ast/Trees.scala index ff44a48c5de3..a70869e22656 100644 --- a/src/dotty/tools/dotc/ast/Trees.scala +++ b/src/dotty/tools/dotc/ast/Trees.scala @@ -19,8 +19,8 @@ object Trees { // Note: it would be more logical to make Untyped = Nothing. // However, this interacts in a bad way with Scala's current type inference. - // In fact, we cannot write soemthing like Select(pre, name), where pre is - // of type Tree[Nothing]; type inference will treat the Nothing as an uninstantited + // In fact, we cannot write something like Select(pre, name), where pre is + // of type Tree[Nothing]; type inference will treat the Nothing as an uninstantiated // value and will not infer Nothing as the type parameter for Select. // We should come back to this issue once type inference is changed. type Untyped = Null @@ -76,6 +76,9 @@ object Trees { } private var nextId = 0 // for debugging + + type LazyTree = AnyRef /* really: Tree | Lazy[Tree] */ + type LazyTreeList = AnyRef /* really: List[Tree] | Lazy[List[Tree]] */ /** Trees take a parameter indicating what the type of their `tpe` field * is. Two choices: `Type` or `Untyped`. @@ -309,7 +312,7 @@ object Trees { /** Tree defines a new symbol and carries modifiers. * The position of a MemberDef contains only the defined identifier or pattern. - * The envelope of a MemberDef contains the whole definition and his its point + * The envelope of a MemberDef contains the whole definition and has its point * on the opening keyword (or the next token after that if keyword is missing). */ abstract class MemberDef[-T >: Untyped] extends NameTree[T] with DefTree[T] { @@ -330,13 +333,14 @@ object Trees { protected def setMods(mods: Modifiers[T @uncheckedVariance]) = myMods = mods - override def envelope: Position = rawMods.pos union pos union initialPos + override def envelope: Position = rawMods.pos.union(pos).union(initialPos) } /** A ValDef or DefDef tree */ - trait ValOrDefDef[-T >: Untyped] extends MemberDef[T] { + trait ValOrDefDef[-T >: Untyped] extends MemberDef[T] with WithLazyField[Tree[T]] { def tpt: Tree[T] - def rhs: Tree[T] + def unforcedRhs: LazyTree = unforced + def rhs(implicit ctx: Context): Tree[T] = forceIfLazy } // ----------- Tree case classes ------------------------------------ @@ -619,17 +623,22 @@ object Trees { } /** mods val name: tpt = rhs */ - case class ValDef[-T >: Untyped] private[ast] (name: TermName, tpt: Tree[T], rhs: Tree[T]) + case class ValDef[-T >: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree) extends ValOrDefDef[T] { type ThisTree[-T >: Untyped] = ValDef[T] assert(isEmpty || tpt != genericEmptyTree) + def unforced = preRhs + protected def force(x: AnyRef) = preRhs = x } /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */ - case class DefDef[-T >: Untyped] private[ast] (name: TermName, tparams: List[TypeDef[T]], vparamss: List[List[ValDef[T]]], tpt: Tree[T], rhs: Tree[T]) + case class DefDef[-T >: Untyped] private[ast] (name: TermName, tparams: List[TypeDef[T]], + vparamss: List[List[ValDef[T]]], tpt: Tree[T], private var preRhs: LazyTree) extends ValOrDefDef[T] { type ThisTree[-T >: Untyped] = DefDef[T] assert(tpt != genericEmptyTree) + def unforced = preRhs + protected def force(x: AnyRef) = preRhs = x } /** mods class name template or @@ -652,9 +661,13 @@ object Trees { } /** extends parents { self => body } */ - case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parents: List[Tree[T]], self: ValDef[T], body: List[Tree[T]]) - extends DefTree[T] { + case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parents: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList) + extends DefTree[T] with WithLazyField[List[Tree[T]]] { type ThisTree[-T >: Untyped] = Template[T] + def unforcedBody = unforced + def unforced = preBody + protected def force(x: AnyRef) = preBody = x + def body(implicit ctx: Context): List[Tree[T]] = forceIfLazy } /** import expr.selectors @@ -746,6 +759,33 @@ object Trees { if (buf != null) buf.toList else trees } + // ----- Lazy trees and tree sequences + + /** A tree that can have a lazy field + * The field is represented by some private `var` which is + * proxied `unforced` and `force`. Forcing the field will + * set the `var` to the underlying value. + */ + trait WithLazyField[+T <: AnyRef] { + def unforced: AnyRef + protected def force(x: AnyRef): Unit + def forceIfLazy(implicit ctx: Context): T = unforced match { + case lzy: Lazy[T] => + val x = lzy.complete + force(x) + x + case x: T @ unchecked => x + } + } + + /** A base trait for lazy tree fields. + * These can be instantiated with Lazy instances which + * can delay tree construction until the field is first demanded. + */ + trait Lazy[T <: AnyRef] { + def complete(implicit ctx: Context): T + } + // ----- Generic Tree Instances, inherited from `tpt` and `untpd`. abstract class Instance[T >: Untyped <: Type] extends DotClass { inst => @@ -990,20 +1030,20 @@ object Trees { case tree: UnApply if (fun eq tree.fun) && (implicits eq tree.implicits) && (patterns eq tree.patterns) => tree case _ => finalize(tree, untpd.UnApply(fun, implicits, patterns)) } - def ValDef(tree: Tree)(name: TermName, tpt: Tree, rhs: Tree): ValDef = tree match { - case tree: ValDef if (name == tree.name) && (tpt eq tree.tpt) && (rhs eq tree.rhs) => tree + def ValDef(tree: Tree)(name: TermName, tpt: Tree, rhs: LazyTree): ValDef = tree match { + case tree: ValDef if (name == tree.name) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree case _ => finalize(tree, untpd.ValDef(name, tpt, rhs)) } - def DefDef(tree: Tree)(name: TermName, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef = tree match { - case tree: DefDef if (name == tree.name) && (tparams eq tree.tparams) && (vparamss eq tree.vparamss) && (tpt eq tree.tpt) && (rhs eq tree.rhs) => tree + def DefDef(tree: Tree)(name: TermName, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: LazyTree): DefDef = tree match { + case tree: DefDef if (name == tree.name) && (tparams eq tree.tparams) && (vparamss eq tree.vparamss) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree case _ => finalize(tree, untpd.DefDef(name, tparams, vparamss, tpt, rhs)) } def TypeDef(tree: Tree)(name: TypeName, rhs: Tree, tparams: List[untpd.TypeDef]): TypeDef = tree match { case tree: TypeDef if (name == tree.name) && (rhs eq tree.rhs) && (tparams eq tree.tparams) => tree case _ => finalize(tree, untpd.TypeDef(name, tparams, rhs)) } - def Template(tree: Tree)(constr: DefDef, parents: List[Tree], self: ValDef, body: List[Tree]): Template = tree match { - case tree: Template if (constr eq tree.constr) && (parents eq tree.parents) && (self eq tree.self) && (body eq tree.body) => tree + def Template(tree: Tree)(constr: DefDef, parents: List[Tree], self: ValDef, body: LazyTreeList): Template = tree match { + case tree: Template if (constr eq tree.constr) && (parents eq tree.parents) && (self eq tree.self) && (body eq tree.unforcedBody) => tree case _ => finalize(tree, untpd.Template(constr, parents, self, body)) } def Import(tree: Tree)(expr: Tree, selectors: List[untpd.Tree]): Import = tree match { @@ -1035,13 +1075,13 @@ object Trees { Try(tree: Tree)(expr, cases, finalizer) def UnApply(tree: UnApply)(fun: Tree = tree.fun, implicits: List[Tree] = tree.implicits, patterns: List[Tree] = tree.patterns): UnApply = UnApply(tree: Tree)(fun, implicits, patterns) - def ValDef(tree: ValDef)(name: TermName = tree.name, tpt: Tree = tree.tpt, rhs: Tree = tree.rhs): ValDef = + def ValDef(tree: ValDef)(name: TermName = tree.name, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs): ValDef = ValDef(tree: Tree)(name, tpt, rhs) - def DefDef(tree: DefDef)(name: TermName = tree.name, tparams: List[TypeDef] = tree.tparams, vparamss: List[List[ValDef]] = tree.vparamss, tpt: Tree = tree.tpt, rhs: Tree = tree.rhs): DefDef = + def DefDef(tree: DefDef)(name: TermName = tree.name, tparams: List[TypeDef] = tree.tparams, vparamss: List[List[ValDef]] = tree.vparamss, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs): DefDef = DefDef(tree: Tree)(name, tparams, vparamss, tpt, rhs) def TypeDef(tree: TypeDef)(name: TypeName = tree.name, rhs: Tree = tree.rhs, tparams: List[untpd.TypeDef] = tree.tparams): TypeDef = TypeDef(tree: Tree)(name, rhs, tparams) - def Template(tree: Template)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, self: ValDef = tree.self, body: List[Tree] = tree.body): Template = + def Template(tree: Template)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody): Template = Template(tree: Tree)(constr, parents, self, body) } @@ -1114,16 +1154,16 @@ object Trees { cpy.UnApply(tree)(transform(fun), transform(implicits), transform(patterns)) case EmptyValDef => tree - case tree @ ValDef(name, tpt, rhs) => + case tree @ ValDef(name, tpt, _) => val tpt1 = transform(tpt) - val rhs1 = transform(rhs) + val rhs1 = transform(tree.rhs) cpy.ValDef(tree)(name, tpt1, rhs1) - case tree @ DefDef(name, tparams, vparamss, tpt, rhs) => - cpy.DefDef(tree)(name, transformSub(tparams), vparamss mapConserve (transformSub(_)), transform(tpt), transform(rhs)) + case tree @ DefDef(name, tparams, vparamss, tpt, _) => + cpy.DefDef(tree)(name, transformSub(tparams), vparamss mapConserve (transformSub(_)), transform(tpt), transform(tree.rhs)) case tree @ TypeDef(name, rhs) => cpy.TypeDef(tree)(name, transform(rhs), tree.tparams) - case Template(constr, parents, self, body) => - cpy.Template(tree)(transformSub(constr), transform(parents), transformSub(self), transformStats(body)) + case tree @ Template(constr, parents, self, _) => + cpy.Template(tree)(transformSub(constr), transform(parents), transformSub(self), transformStats(tree.body)) case Import(expr, selectors) => cpy.Import(tree)(transform(expr), selectors) case PackageDef(pid, stats) => @@ -1145,109 +1185,116 @@ object Trees { transform(trees).asInstanceOf[List[Tr]] } - abstract class TreeAccumulator[X] extends ((X, Tree) => X) { - def apply(x: X, tree: Tree): X - def apply(x: X, trees: Traversable[Tree]): X = (x /: trees)(apply) - def foldOver(x: X, tree: Tree): X = tree match { - case Ident(name) => - x - case Select(qualifier, name) => - this(x, qualifier) - case This(qual) => - x - case Super(qual, mix) => - this(x, qual) - case Apply(fun, args) => - this(this(x, fun), args) - case TypeApply(fun, args) => - this(this(x, fun), args) - case Literal(const) => - x - case New(tpt) => - this(x, tpt) - case Pair(left, right) => - this(this(x, left), right) - case Typed(expr, tpt) => - this(this(x, expr), tpt) - case NamedArg(name, arg) => - this(x, arg) - case Assign(lhs, rhs) => - this(this(x, lhs), rhs) - case Block(stats, expr) => - this(this(x, stats), expr) - case If(cond, thenp, elsep) => - this(this(this(x, cond), thenp), elsep) - case Closure(env, meth, tpt) => - this(this(this(x, env), meth), tpt) - case Match(selector, cases) => - this(this(x, selector), cases) - case CaseDef(pat, guard, body) => - this(this(this(x, pat), guard), body) - case Return(expr, from) => - this(this(x, expr), from) - case Try(block, handler, finalizer) => - this(this(this(x, block), handler), finalizer) - case SeqLiteral(elems) => - this(x, elems) - case TypeTree(original) => - x - case SingletonTypeTree(ref) => - this(x, ref) - case SelectFromTypeTree(qualifier, name) => - this(x, qualifier) - case AndTypeTree(left, right) => - this(this(x, left), right) - case OrTypeTree(left, right) => - this(this(x, left), right) - case RefinedTypeTree(tpt, refinements) => - this(this(x, tpt), refinements) - case AppliedTypeTree(tpt, args) => - this(this(x, tpt), args) - case ByNameTypeTree(result) => - this(x, result) - case TypeBoundsTree(lo, hi) => - this(this(x, lo), hi) - case Bind(name, body) => - this(x, body) - case Alternative(trees) => - this(x, trees) - case UnApply(fun, implicits, patterns) => - this(this(this(x, fun), implicits), patterns) - case ValDef(name, tpt, rhs) => - this(this(x, tpt), rhs) - case DefDef(name, tparams, vparamss, tpt, rhs) => - this(this((this(x, tparams) /: vparamss)(apply), tpt), rhs) - case TypeDef(name, rhs) => - this(x, rhs) - case Template(constr, parents, self, body) => - this(this(this(this(x, constr), parents), self), body) - case Import(expr, selectors) => - this(x, expr) - case PackageDef(pid, stats) => - this(this(x, pid), stats) - case Annotated(annot, arg) => - this(this(x, annot), arg) - case Thicket(ts) => - this(x, ts) + abstract class TreeAccumulator[X] { + def apply(x: X, tree: Tree)(implicit ctx: Context): X + def apply(x: X, trees: Traversable[Tree])(implicit ctx: Context): X = (x /: trees)(apply) + def foldOver(x: X, tree: Tree)(implicit ctx: Context): X = { + def localCtx = + if (tree.hasType && tree.symbol.exists) ctx.withOwner(tree.symbol) else ctx + tree match { + case Ident(name) => + x + case Select(qualifier, name) => + this(x, qualifier) + case This(qual) => + x + case Super(qual, mix) => + this(x, qual) + case Apply(fun, args) => + this(this(x, fun), args) + case TypeApply(fun, args) => + this(this(x, fun), args) + case Literal(const) => + x + case New(tpt) => + this(x, tpt) + case Pair(left, right) => + this(this(x, left), right) + case Typed(expr, tpt) => + this(this(x, expr), tpt) + case NamedArg(name, arg) => + this(x, arg) + case Assign(lhs, rhs) => + this(this(x, lhs), rhs) + case Block(stats, expr) => + this(this(x, stats), expr) + case If(cond, thenp, elsep) => + this(this(this(x, cond), thenp), elsep) + case Closure(env, meth, tpt) => + this(this(this(x, env), meth), tpt) + case Match(selector, cases) => + this(this(x, selector), cases) + case CaseDef(pat, guard, body) => + this(this(this(x, pat), guard), body) + case Return(expr, from) => + this(this(x, expr), from) + case Try(block, handler, finalizer) => + this(this(this(x, block), handler), finalizer) + case SeqLiteral(elems) => + this(x, elems) + case TypeTree(original) => + x + case SingletonTypeTree(ref) => + this(x, ref) + case SelectFromTypeTree(qualifier, name) => + this(x, qualifier) + case AndTypeTree(left, right) => + this(this(x, left), right) + case OrTypeTree(left, right) => + this(this(x, left), right) + case RefinedTypeTree(tpt, refinements) => + this(this(x, tpt), refinements) + case AppliedTypeTree(tpt, args) => + this(this(x, tpt), args) + case ByNameTypeTree(result) => + this(x, result) + case TypeBoundsTree(lo, hi) => + this(this(x, lo), hi) + case Bind(name, body) => + this(x, body) + case Alternative(trees) => + this(x, trees) + case UnApply(fun, implicits, patterns) => + this(this(this(x, fun), implicits), patterns) + case tree @ ValDef(name, tpt, _) => + implicit val ctx: Context = localCtx + this(this(x, tpt), tree.rhs) + case tree @ DefDef(name, tparams, vparamss, tpt, _) => + implicit val ctx: Context = localCtx + this(this((this(x, tparams) /: vparamss)(apply), tpt), tree.rhs) + case TypeDef(name, rhs) => + implicit val ctx: Context = localCtx + this(x, rhs) + case tree @ Template(constr, parents, self, _) => + this(this(this(this(x, constr), parents), self), tree.body) + case Import(expr, selectors) => + this(x, expr) + case PackageDef(pid, stats) => + this(this(x, pid), stats)(localCtx) + case Annotated(annot, arg) => + this(this(x, annot), arg) + case Thicket(ts) => + this(x, ts) + } } } abstract class TreeTraverser extends TreeAccumulator[Unit] { - def traverse(tree: Tree): Unit - def apply(x: Unit, tree: Tree) = traverse(tree) - protected def traverseChildren(tree: Tree) = foldOver((), tree) + def traverse(tree: Tree)(implicit ctx: Context): Unit + def apply(x: Unit, tree: Tree)(implicit ctx: Context) = traverse(tree) + protected def traverseChildren(tree: Tree)(implicit ctx: Context) = foldOver((), tree) } /** Fold `f` over all tree nodes, in depth-first, prefix order */ class DeepFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { - def apply(x: X, tree: Tree): X = foldOver(f(x, tree), tree) + def apply(x: X, tree: Tree)(implicit ctx: Context): X = foldOver(f(x, tree), tree) } /** Fold `f` over all tree nodes, in depth-first, prefix order, but don't visit * subtrees where `f` returns a different result for the root, i.e. `f(x, root) ne x`. */ class ShallowFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { - def apply(x: X, tree: Tree): X = { + def apply(x: X, tree: Tree)(implicit ctx: Context): X = { val x1 = f(x, tree) if (x1.asInstanceOf[AnyRef] ne x1.asInstanceOf[AnyRef]) x1 else foldOver(x1, tree) diff --git a/src/dotty/tools/dotc/ast/tpd.scala b/src/dotty/tools/dotc/ast/tpd.scala index 52a617ea2764..d1fa1733f0d3 100644 --- a/src/dotty/tools/dotc/ast/tpd.scala +++ b/src/dotty/tools/dotc/ast/tpd.scala @@ -161,7 +161,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree], proto: Type)(implicit ctx: Context): UnApply = ta.assignType(untpd.UnApply(fun, implicits, patterns), proto) - def ValDef(sym: TermSymbol, rhs: Tree = EmptyTree)(implicit ctx: Context): ValDef = + def ValDef(sym: TermSymbol, rhs: LazyTree = EmptyTree)(implicit ctx: Context): ValDef = ta.assignType(untpd.ValDef(sym.name, TypeTree(sym.info), rhs), sym) def SyntheticValDef(name: TermName, rhs: Tree)(implicit ctx: Context): ValDef = @@ -183,8 +183,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def valueParamss(tp: Type): (List[List[TermSymbol]], Type) = tp match { case tp @ MethodType(paramNames, paramTypes) => - def valueParam(name: TermName, info: Type): TermSymbol = - ctx.newSymbol(sym, name, TermParam, info) + def valueParam(name: TermName, info: Type): TermSymbol = { + val maybeImplicit = if (tp.isInstanceOf[ImplicitMethodType]) Implicit else EmptyFlags + ctx.newSymbol(sym, name, TermParam | maybeImplicit, info) + } val params = (paramNames, paramTypes).zipped.map(valueParam) val (paramss, rtp) = valueParamss(tp.instantiate(params map (_.termRef))) (params :: paramss, rtp) @@ -234,7 +236,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { for (tparam <- cls.typeParams if !(bodyTypeParams contains tparam)) yield TypeDef(tparam) val findLocalDummy = new FindLocalDummyAccumulator(cls) - val localDummy = ((NoSymbol: Symbol) /: body)(findLocalDummy) + val localDummy = ((NoSymbol: Symbol) /: body)(findLocalDummy.apply) .orElse(ctx.newLocalDummy(cls)) val impl = untpd.Template(constr, parents, selfType, newTypeParams ++ body) .withType(localDummy.nonMemberTermRef) @@ -382,7 +384,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { else Literal(Constant(null)).select(defn.Any_asInstanceOf).appliedToType(tpe) } private class FindLocalDummyAccumulator(cls: ClassSymbol)(implicit ctx: Context) extends TreeAccumulator[Symbol] { - def apply(sym: Symbol, tree: Tree) = + def apply(sym: Symbol, tree: Tree)(implicit ctx: Context) = if (sym.exists) sym else if (tree.isDef) { val owner = tree.symbol.owner @@ -407,7 +409,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { override def Select(tree: Tree)(qualifier: Tree, name: Name)(implicit ctx: Context): Select = { val tree1 = untpd.cpy.Select(tree)(qualifier, name) tree match { - case tree: Select if (qualifier.tpe eq tree.qualifier.tpe) => tree1.withTypeUnchecked(tree.tpe) + case tree: Select if (qualifier.tpe eq tree.qualifier.tpe) => + tree1.withTypeUnchecked(tree.tpe) case _ => tree.tpe match { case tpe: NamedType => tree1.withType(tpe.derivedSelect(qualifier.tpe)) case _ => tree1.withTypeUnchecked(tree.tpe) @@ -538,13 +541,13 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _ => false } - def shallowFold[T](z: T)(op: (T, tpd.Tree) => T) = + def shallowFold[T](z: T)(op: (T, tpd.Tree) => T)(implicit ctx: Context) = new ShallowFolder(op).apply(z, tree) - def deepFold[T](z: T)(op: (T, tpd.Tree) => T) = + def deepFold[T](z: T)(op: (T, tpd.Tree) => T)(implicit ctx: Context) = new DeepFolder(op).apply(z, tree) - def find[T](pred: (tpd.Tree) => Boolean): Option[tpd.Tree] = + def find[T](pred: (tpd.Tree) => Boolean)(implicit ctx: Context): Option[tpd.Tree] = shallowFold[Option[tpd.Tree]](None)((accum, tree) => if (pred(tree)) Some(tree) else accum) def subst(from: List[Symbol], to: List[Symbol])(implicit ctx: Context): ThisTree = @@ -571,7 +574,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def changeOwnerAfter(from: Symbol, to: Symbol, trans: DenotTransformer)(implicit ctx: Context): ThisTree = { assert(ctx.phase == trans.next) val traverser = new TreeTraverser { - def traverse(tree: Tree) = tree match { + def traverse(tree: Tree)(implicit ctx: Context) = tree match { case tree: DefTree => val sym = tree.symbol if (sym.denot(ctx.withPhase(trans)).owner == from) @@ -657,21 +660,21 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { // --- Higher order traversal methods ------------------------------- - def foreachSubTree(f: Tree => Unit): Unit = { //TODO should go in tpd. + def foreachSubTree(f: Tree => Unit)(implicit ctx: Context): Unit = { //TODO should go in tpd. val traverser = new TreeTraverser { - def traverse(tree: Tree) = foldOver(f(tree), tree) + def traverse(tree: Tree)(implicit ctx: Context) = foldOver(f(tree), tree) } traverser.traverse(tree) } - def existsSubTree(p: Tree => Boolean): Boolean = { + def existsSubTree(p: Tree => Boolean)(implicit ctx: Context): Boolean = { val acc = new TreeAccumulator[Boolean] { - def apply(x: Boolean, t: Tree) = x || p(t) || foldOver(x, t) + def apply(x: Boolean, t: Tree)(implicit ctx: Context) = x || p(t) || foldOver(x, t) } acc(false, tree) } - def filterSubTrees(f: Tree => Boolean): List[Tree] = { + def filterSubTrees(f: Tree => Boolean)(implicit ctx: Context): List[Tree] = { val buf = new mutable.ListBuffer[Tree] foreachSubTree { tree => if (f(tree)) buf += tree } buf.toList @@ -770,9 +773,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** A traverser that passes the enlcosing class or method as an argumenr * to the traverse method. */ - abstract class EnclosingMethodTraverser(implicit ctx: Context) extends TreeAccumulator[Symbol] { - def traverse(enclMeth: Symbol, tree: Tree): Unit - def apply(enclMeth: Symbol, tree: Tree) = { + abstract class EnclosingMethodTraverser extends TreeAccumulator[Symbol] { + def traverse(enclMeth: Symbol, tree: Tree)(implicit ctx: Context): Unit + def apply(enclMeth: Symbol, tree: Tree)(implicit ctx: Context) = { tree match { case _: DefTree if tree.symbol.exists => traverse(tree.symbol.enclosingMethod, tree) diff --git a/src/dotty/tools/dotc/ast/untpd.scala b/src/dotty/tools/dotc/ast/untpd.scala index dfff175146f6..1c1f7dd61588 100644 --- a/src/dotty/tools/dotc/ast/untpd.scala +++ b/src/dotty/tools/dotc/ast/untpd.scala @@ -142,10 +142,10 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def Bind(name: Name, body: Tree): Bind = new Bind(name, body) def Alternative(trees: List[Tree]): Alternative = new Alternative(trees) def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree]): UnApply = new UnApply(fun, implicits, patterns) - def ValDef(name: TermName, tpt: Tree, rhs: Tree): ValDef = new ValDef(name, tpt, rhs) - def DefDef(name: TermName, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef = new DefDef(name, tparams, vparamss, tpt, rhs) + def ValDef(name: TermName, tpt: Tree, rhs: LazyTree): ValDef = new ValDef(name, tpt, rhs) + def DefDef(name: TermName, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: LazyTree): DefDef = new DefDef(name, tparams, vparamss, tpt, rhs) def TypeDef(name: TypeName, rhs: Tree): TypeDef = new TypeDef(name, rhs) - def Template(constr: DefDef, parents: List[Tree], self: ValDef, body: List[Tree]): Template = new Template(constr, parents, self, body) + def Template(constr: DefDef, parents: List[Tree], self: ValDef, body: LazyTreeList): Template = new Template(constr, parents, self, body) def Import(expr: Tree, selectors: List[untpd.Tree]): Import = new Import(expr, selectors) def PackageDef(pid: RefTree, stats: List[Tree]): PackageDef = new PackageDef(pid, stats) def Annotated(annot: Tree, arg: Tree): Annotated = new Annotated(annot, arg) @@ -389,7 +389,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { } abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { - override def foldOver(x: X, tree: Tree): X = tree match { + override def foldOver(x: X, tree: Tree)(implicit ctx: Context): X = tree match { case ModuleDef(name, impl) => this(x, impl) case SymbolLit(str) => diff --git a/src/dotty/tools/dotc/config/Printers.scala b/src/dotty/tools/dotc/config/Printers.scala index f8d7f8de557a..d1738ee6fccc 100644 --- a/src/dotty/tools/dotc/config/Printers.scala +++ b/src/dotty/tools/dotc/config/Printers.scala @@ -30,4 +30,5 @@ object Printers { val config = noPrinter val transforms = noPrinter val cyclicErrors = noPrinter + val pickling = noPrinter } \ No newline at end of file diff --git a/src/dotty/tools/dotc/config/ScalaSettings.scala b/src/dotty/tools/dotc/config/ScalaSettings.scala index 51fccd8a8300..1c1f3e49490f 100644 --- a/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -150,6 +150,7 @@ class ScalaSettings extends Settings.SettingGroup { val Yprintpos = BooleanSetting("-Yprintpos", "show tree positions.") val YnoDeepSubtypes = BooleanSetting("-Yno-deep-subtypes", "throw an exception on deep subtyping call stacks.") val YprintSyms = BooleanSetting("-Yprint-syms", "when printing trees print info in symbols instead of corresponding info in trees.") + val YtestPickler = BooleanSetting("-Ytest-pickler", "self-test for pickling functionality; should be used with -Ystop-after:pickler") def stop = YstopAfter /** Area-specific debug output. diff --git a/src/dotty/tools/dotc/core/Annotations.scala b/src/dotty/tools/dotc/core/Annotations.scala index 79aa058efafb..d264483e6e7b 100644 --- a/src/dotty/tools/dotc/core/Annotations.scala +++ b/src/dotty/tools/dotc/core/Annotations.scala @@ -33,12 +33,13 @@ object Annotations { def tree(implicit ctx: Context): Tree = t } - case class LazyAnnotation(sym: Symbol)(treeFn: Context => Tree) extends Annotation { + abstract case class LazyAnnotation(sym: Symbol) extends Annotation { private var myTree: Tree = null def tree(implicit ctx: Context) = { - if (myTree == null) myTree = treeFn(ctx) + if (myTree == null) myTree = complete(ctx) myTree } + def complete(implicit ctx: Context): Tree override def symbol(implicit ctx: Context): Symbol = sym } @@ -74,7 +75,9 @@ object Annotations { } def deferred(sym: Symbol, treeFn: Context => Tree)(implicit ctx: Context): Annotation = - new LazyAnnotation(sym)(treeFn) + new LazyAnnotation(sym) { + def complete(implicit ctx: Context) = treeFn(ctx) + } def deferred(atp: Type, args: List[Tree])(implicit ctx: Context): Annotation = deferred(atp.classSymbol, implicit ctx => New(atp, args)) diff --git a/src/dotty/tools/dotc/core/Decorators.scala b/src/dotty/tools/dotc/core/Decorators.scala index 882729063c29..1ce83442880c 100644 --- a/src/dotty/tools/dotc/core/Decorators.scala +++ b/src/dotty/tools/dotc/core/Decorators.scala @@ -176,7 +176,7 @@ object Decorators { case _ => arg } catch { - case ex: Exception => s"(missing due to $ex)" + case ex: Exception => throw ex // s"(missing due to $ex)" } val prefix :: suffixes = sc.parts.toList diff --git a/src/dotty/tools/dotc/core/Denotations.scala b/src/dotty/tools/dotc/core/Denotations.scala index cd585dea1cae..91cf73404541 100644 --- a/src/dotty/tools/dotc/core/Denotations.scala +++ b/src/dotty/tools/dotc/core/Denotations.scala @@ -120,11 +120,13 @@ object Denotations { /** Is this denotation overloaded? */ final def isOverloaded = isInstanceOf[MultiDenotation] - /** The signature of the denotation */ + /** The signature of the denotation. */ def signature(implicit ctx: Context): Signature - /** Resolve overloaded denotation to pick the one with the given signature */ - def atSignature(sig: Signature)(implicit ctx: Context): SingleDenotation + /** Resolve overloaded denotation to pick the one with the given signature + * when seen from prefix `site`. + */ + def atSignature(sig: Signature, site: Type = NoPrefix)(implicit ctx: Context): SingleDenotation /** The variant of this denotation that's current in the given context. */ def current(implicit ctx: Context): Denotation @@ -207,7 +209,7 @@ object Denotations { */ def matchingDenotation(site: Type, targetType: Type)(implicit ctx: Context): SingleDenotation = if (isOverloaded) - atSignature(targetType.signature).matchingDenotation(site, targetType) + atSignature(targetType.signature, site).matchingDenotation(site, targetType) else if (exists && !site.memberInfo(symbol).matchesLoosely(targetType)) NoDenotation else @@ -343,8 +345,8 @@ object Denotations { final def validFor = denot1.validFor & denot2.validFor final def isType = false final def signature(implicit ctx: Context) = Signature.OverloadedSignature - def atSignature(sig: Signature)(implicit ctx: Context): SingleDenotation = - denot1.atSignature(sig) orElse denot2.atSignature(sig) + def atSignature(sig: Signature, site: Type)(implicit ctx: Context): SingleDenotation = + denot1.atSignature(sig, site) orElse denot2.atSignature(sig, site) def current(implicit ctx: Context): Denotation = derivedMultiDenotation(denot1.current, denot2.current) def altsWith(p: Symbol => Boolean): List[SingleDenotation] = @@ -412,8 +414,10 @@ object Denotations { def accessibleFrom(pre: Type, superAccess: Boolean)(implicit ctx: Context): Denotation = if (!symbol.exists || symbol.isAccessibleFrom(pre, superAccess)) this else NoDenotation - def atSignature(sig: Signature)(implicit ctx: Context): SingleDenotation = - if (sig matches signature) this else NoDenotation + def atSignature(sig: Signature, site: Type)(implicit ctx: Context): SingleDenotation = { + val situated = if (site == NoPrefix) this else asSeenFrom(site) + if (sig matches situated.signature) this else NoDenotation + } // ------ Forming types ------------------------------------------- diff --git a/src/dotty/tools/dotc/core/Flags.scala b/src/dotty/tools/dotc/core/Flags.scala index 53beae838c68..108a9edeeac3 100644 --- a/src/dotty/tools/dotc/core/Flags.scala +++ b/src/dotty/tools/dotc/core/Flags.scala @@ -287,7 +287,7 @@ object Flags { /** A trait that has only abstract methods as members * (and therefore can be represented by a Java interface */ - final val PureInterface = typeFlag(22, "interface") + final val PureInterface = typeFlag(22, "interface") // TODO when unpickling, reconstitute from context /** Labeled with of abstract & override */ final val AbsOverride = termFlag(22, "abstract override") @@ -300,7 +300,7 @@ object Flags { /** Method is assumed to be stable */ final val Stable = termFlag(24, "") - + /** A case parameter accessor */ final val CaseAccessor = termFlag(25, "") @@ -318,8 +318,8 @@ object Flags { /** A method that has default params */ final val DefaultParameterized = termFlag(27, "") - /** Symbol is initialized to the default value, e.g. var x: T = _ */ - final val DefaultInit = termFlag(28, "") + /** A type that is defined by a type bind */ + final val BindDefinedType = typeFlag(27, "") /** Symbol is inlined */ final val Inline = commonFlag(29, "inline") @@ -332,7 +332,7 @@ object Flags { final val JavaStaticTerm = JavaStatic.toTermFlags final val JavaStaticType = JavaStatic.toTypeFlags - /** Trait is not an interface, but does not have fields or intialization code */ + /** Trait is not an interface, but does not have fields or initialization code */ final val NoInits = typeFlag(32, "") /** Variable is accessed from nested function. */ @@ -345,7 +345,7 @@ object Flags { final val Bridge = termFlag(34, "") /** Symbol is a Java varargs bridge */ // (needed?) - final val VBridge = termFlag(35, "") + final val VBridge = termFlag(35, "") // TODO remove /** Symbol is a method which should be marked ACC_SYNCHRONIZED */ final val Synchronized = termFlag(36, "") @@ -499,6 +499,8 @@ object Flags { /** These flags are pickled */ final val PickledFlags = flagRange(FirstFlag, FirstNotPickledFlag) + + final val AllFlags = flagRange(FirstFlag, MaxFlag) /** An abstract class or a trait */ final val AbstractOrTrait = Abstract | Trait @@ -529,7 +531,10 @@ object Flags { /** A type parameter or type parameter accessor */ final val TypeParamOrAccessor = TypeParam | TypeParamAccessor - + + /** If symbol of a type alias has these flags, prefer the alias */ + final val AliasPreferred = TypeParam | TypeArgument | ExpandedName + /** A covariant type parameter instance */ final val LocalCovariant = allOf(Local, Covariant) @@ -551,7 +556,7 @@ object Flags { /** A Java interface, potentially with default methods */ final val JavaTrait = allOf(JavaDefined, Trait, NoInits) - /** A Java interface */ + /** A Java interface */ // TODO when unpickling, reconstitute from context final val JavaInterface = allOf(JavaDefined, Trait) /** A Java companion object */ diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala index c15c4c4c2946..60e429bb3c83 100644 --- a/src/dotty/tools/dotc/core/NameOps.scala +++ b/src/dotty/tools/dotc/core/NameOps.scala @@ -75,7 +75,7 @@ object NameOps { def isAvoidClashName = name endsWith AVOID_CLASH_SUFFIX def isImportName = name startsWith IMPORT def isFieldName = name endsWith LOCAL_SUFFIX - def isInheritedName = name.length > 0 && name.head == '(' && name.startsWith(nme.INHERITED) + def isShadowedName = name.length > 0 && name.head == '(' && name.startsWith(nme.SHADOWED) def isDefaultGetterName = name.isTermName && name.asTermName.defaultGetterIndex >= 0 def isScala2LocalSuffix = name.endsWith(" ") def isModuleVarName(name: Name): Boolean = @@ -149,19 +149,22 @@ object NameOps { /** The expanded name of `name` relative to this class `base` with given `separator` */ - def expandedName(base: Symbol, separator: Name = nme.EXPAND_SEPARATOR)(implicit ctx: Context): N = { - val prefix = if (base is Flags.ExpandedName) base.name else base.fullNameSeparated('$') - name.fromName(prefix ++ separator ++ name).asInstanceOf[N] - } + def expandedName(base: Symbol)(implicit ctx: Context): N = + expandedName(if (base is Flags.ExpandedName) base.name else base.fullNameSeparated('$')) + + /** The expanded name of `name` relative to `basename` with given `separator` + */ + def expandedName(prefix: Name)(implicit ctx: Context): N = + name.fromName(prefix ++ nme.EXPAND_SEPARATOR ++ name).asInstanceOf[N] - def unexpandedName(separator: Name = nme.EXPAND_SEPARATOR): N = { - val idx = name.lastIndexOfSlice(separator) - if (idx < 0) name else (name drop (idx + separator.length)).asInstanceOf[N] + def unexpandedName: N = { + val idx = name.lastIndexOfSlice(nme.EXPAND_SEPARATOR) + if (idx < 0) name else (name drop (idx + nme.EXPAND_SEPARATOR.length)).asInstanceOf[N] } - def inheritedName: N = likeTyped(nme.INHERITED ++ name) + def shadowedName: N = likeTyped(nme.SHADOWED ++ name) - def revertInherited: N = likeTyped(name.drop(nme.INHERITED.length)) + def revertShadowed: N = likeTyped(name.drop(nme.SHADOWED.length)) /** Translate a name into a list of simple TypeNames and TermNames. * In all segments before the last, type/term is determined by whether @@ -289,11 +292,11 @@ object NameOps { /** The name of an accessor for protected symbols. */ def protectedAccessorName: TermName = - PROTECTED_PREFIX ++ name.unexpandedName() + PROTECTED_PREFIX ++ name.unexpandedName /** The name of a setter for protected symbols. Used for inherited Java fields. */ def protectedSetterName: TermName = - PROTECTED_SET_PREFIX ++ name.unexpandedName() + PROTECTED_SET_PREFIX ++ name.unexpandedName def moduleVarName: TermName = name ++ MODULE_VAR_SUFFIX diff --git a/src/dotty/tools/dotc/core/Periods.scala b/src/dotty/tools/dotc/core/Periods.scala index 0bdbb94bae89..0cd41a7df6f4 100644 --- a/src/dotty/tools/dotc/core/Periods.scala +++ b/src/dotty/tools/dotc/core/Periods.scala @@ -46,7 +46,7 @@ object Periods { * It is coded as follows: * * sign, always 0 1 bit - * runid 21 bits + * runid 19 bits * last phase id: 6 bits * #phases before last: 6 bits * diff --git a/src/dotty/tools/dotc/core/Signature.scala b/src/dotty/tools/dotc/core/Signature.scala index 0f7f33b6bb7d..4e041e629748 100644 --- a/src/dotty/tools/dotc/core/Signature.scala +++ b/src/dotty/tools/dotc/core/Signature.scala @@ -23,7 +23,7 @@ import TypeErasure.sigName * * The signatures of non-method types are always `NotAMethod`. */ -case class Signature private (paramsSig: List[TypeName], resSig: TypeName) { +case class Signature(paramsSig: List[TypeName], resSig: TypeName) { /** Does this signature conincide with that signature on their parameter parts? */ final def sameParams(that: Signature): Boolean = this.paramsSig == that.paramsSig diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala index 19a210a32aab..7b730d4d76a4 100644 --- a/src/dotty/tools/dotc/core/StdNames.scala +++ b/src/dotty/tools/dotc/core/StdNames.scala @@ -100,7 +100,6 @@ object StdNames { val EXPAND_SEPARATOR: N = "$$" val IMPL_CLASS_SUFFIX: N = "$class" val IMPORT: N = "" - val INHERITED: N = "(inherited)" // tag to be used until we have proper name kinds val INTERPRETER_IMPORT_WRAPPER: N = "$iw" val INTERPRETER_LINE_PREFIX: N = "line" val INTERPRETER_VAR_PREFIX: N = "res" @@ -118,6 +117,7 @@ object StdNames { val PROTECTED_PREFIX: N = "protected$" val PROTECTED_SET_PREFIX: N = PROTECTED_PREFIX + "set" val ROOT: N = "" + val SHADOWED: N = "(shadowed)" // tag to be used until we have proper name kinds val SINGLETON_SUFFIX: N = ".type" val SPECIALIZED_SUFFIX: N = "$sp" val SUPER_PREFIX: N = "super$" diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index c46c40cd026d..44629c036c2f 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -95,7 +95,7 @@ object SymDenotations { private def adaptFlags(flags: FlagSet) = if (isType) flags.toTypeFlags else flags.toTermFlags /** Update the flag set */ - private final def flags_=(flags: FlagSet): Unit = + final def flags_=(flags: FlagSet): Unit = myFlags = adaptFlags(flags) /** Set given flags(s) of this denotation */ @@ -271,7 +271,7 @@ object SymDenotations { /** The name with which the denoting symbol was created */ final def originalName(implicit ctx: Context) = { val d = initial.asSymDenotation - if (d is ExpandedName) d.name.unexpandedName() else d.name // !!!DEBUG, was: effectiveName + if (d is ExpandedName) d.name.unexpandedName else d.name // !!!DEBUG, was: effectiveName } /** The encoded full path name of this denotation, where outer names and inner names @@ -338,7 +338,7 @@ object SymDenotations { /** Is this symbol the root class or its companion object? */ final def isRoot: Boolean = - (name.toTermName == nme.ROOT) && (owner eq NoSymbol) + (name.toTermName == nme.ROOT || name == nme.ROOTPKG) && (owner eq NoSymbol) /** Is this symbol the empty package class or its companion object? */ final def isEmptyPackage(implicit ctx: Context): Boolean = @@ -423,7 +423,7 @@ object SymDenotations { /** Is this denotation static (i.e. with no outer instance)? */ final def isStatic(implicit ctx: Context) = - (this is JavaStatic) || this.exists && owner.isStaticOwner + (this is JavaStatic) || this.exists && owner.isStaticOwner || this.isRoot /** Is this a package class or module class that defines static symbols? */ final def isStaticOwner(implicit ctx: Context): Boolean = @@ -456,8 +456,9 @@ object SymDenotations { } } - /** Is this a user defined "def" method? Excluded are accessors. */ - final def isSourceMethod(implicit ctx: Context) = this is (Method, butNot = AccessorOrLabel) + /** Is this a user defined "def" method? Excluded are accessors and anonymous functions. */ + final def isSourceMethod(implicit ctx: Context) = + this.is(Method, butNot = AccessorOrLabel) && !isAnonymousFunction /** Is this a setter? */ final def isGetter(implicit ctx: Context) = diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 6f75142c4bb9..02526a30ba21 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -132,8 +132,8 @@ class TypeApplications(val self: Type) extends AnyVal { LambdaClass(forcing = false).exists /** Is type `tp` a Lambda with all Arg$ fields fully instantiated? */ - def isInstantiatedLambda(tp: Type)(implicit ctx: Context): Boolean = - tp.isSafeLambda && tp.typeParams.isEmpty + def isInstantiatedLambda(implicit ctx: Context): Boolean = + isSafeLambda && typeParams.isEmpty /** Encode the type resulting from applying this type to given arguments */ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { @@ -188,7 +188,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (args.isEmpty || ctx.erasedTypes) self else { val res = instantiate(self, self) - if (isInstantiatedLambda(res)) res.select(tpnme.Apply) else res + if (res.isInstantiatedLambda) res.select(tpnme.Apply) else res } } diff --git a/src/dotty/tools/dotc/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala similarity index 100% rename from src/dotty/tools/dotc/TypeErasure.scala rename to src/dotty/tools/dotc/core/TypeErasure.scala diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 3e04eb037537..e6a81248c4ba 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -10,7 +10,7 @@ import util.SimpleMap import collection.mutable import ast.tpd._ -trait TypeOps { this: Context => +trait TypeOps { this: Context => // TODO: Make standalone object. final def asSeenFrom(tp: Type, pre: Type, cls: Symbol, theMap: AsSeenFromMap): Type = { diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 5f30f484e559..ea4101558c88 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -30,6 +30,7 @@ import config.Config import config.Printers._ import annotation.tailrec import Flags.FlagSet +import typer.Mode import language.implicitConversions object Types { @@ -981,7 +982,7 @@ object Types { * when forming the function type. */ def toFunctionType(dropLast: Int = 0)(implicit ctx: Context): Type = this match { - case mt @ MethodType(_, formals) if !mt.isDependent => + case mt @ MethodType(_, formals) if !mt.isDependent || ctx.mode.is(Mode.AllowDependentFunctions) => val formals1 = if (dropLast == 0) formals else formals dropRight dropLast defn.FunctionType( formals1 mapConserve (_.underlyingIfRepeated(mt.isJava)), mt.resultType) @@ -1006,30 +1007,6 @@ object Types { case _ => show } - type VarianceMap = SimpleMap[TypeVar, Integer] - - /** All occurrences of type vars in this type that satisfy predicate - * `include` mapped to their variances (-1/0/1) in this type, where - * -1 means: only covariant occurrences - * +1 means: only covariant occurrences - * 0 means: mixed or non-variant occurrences - */ - def variances(include: TypeVar => Boolean)(implicit ctx: Context): VarianceMap = track("variances") { - val accu = new TypeAccumulator[VarianceMap] { - def apply(vmap: VarianceMap, t: Type): VarianceMap = t match { - case t: TypeVar - if !t.isInstantiated && (ctx.typerState.constraint contains t) && include(t) => - val v = vmap(t) - if (v == null) vmap.updated(t, variance) - else if (v == variance) vmap - else vmap.updated(t, 0) - case _ => - foldOver(vmap, t) - } - } - accu(SimpleMap.Empty, this) - } - /** A simplified version of this type which is equivalent wrt =:= to this type. * This applies a typemap to the type which (as all typemaps) follows type * variable instances and reduces typerefs over refined types. It also @@ -1343,7 +1320,7 @@ object Types { } protected def asMemberOf(prefix: Type)(implicit ctx: Context) = - if (name.isInheritedName) prefix.nonPrivateMember(name.revertInherited) + if (name.isShadowedName) prefix.nonPrivateMember(name.revertShadowed) else prefix.member(name) /** (1) Reduce a type-ref `W # X` or `W { ... } # U`, where `W` is a wildcard type @@ -1441,7 +1418,7 @@ object Types { * the public name. */ final def shadowed(implicit ctx: Context): NamedType = - NamedType(prefix, name.inheritedName) + NamedType(prefix, name.shadowedName) override def equals(that: Any) = that match { case that: NamedType => @@ -1964,7 +1941,13 @@ object Types { } else resType - private[this] var myDependencyStatus: DependencyStatus = Unknown + var myDependencyStatus: DependencyStatus = Unknown + + private def combine(x: DependencyStatus, y: DependencyStatus): DependencyStatus = { + val status = (x & StatusMask) max (y & StatusMask) + val provisional = (x | y) & Provisional + (if (status == TrueDeps) status else status | provisional).toByte + } /** The dependency status of this method. Some examples: * @@ -1975,26 +1958,27 @@ object Types { * // dependency can be eliminated by dealiasing. */ private def dependencyStatus(implicit ctx: Context): DependencyStatus = { - if (myDependencyStatus == Unknown) { + if (myDependencyStatus != Unknown) myDependencyStatus + else { val isDepAcc = new TypeAccumulator[DependencyStatus] { def apply(x: DependencyStatus, tp: Type) = if (x == TrueDeps) x - else x max { + else tp match { case MethodParam(`thisMethodType`, _) => TrueDeps case tp @ TypeRef(MethodParam(`thisMethodType`, _), name) => tp.info match { // follow type alias to avoid dependency - case TypeAlias(alias) => apply(x, alias) max FalseDeps + case TypeAlias(alias) => combine(apply(x, alias), FalseDeps) case _ => TrueDeps } - case _ => - foldOver(x, tp) + case tp: TypeVar if !tp.isInstantiated => combine(x, Provisional) + case _ => foldOver(x, tp) } - } } - myDependencyStatus = isDepAcc(NoDeps, resType) + val result = isDepAcc(NoDeps, resType) + if ((result & Provisional) == 0) myDependencyStatus = result + (result & StatusMask).toByte } - myDependencyStatus } /** Does result type contain references to parameters of this method type, @@ -2086,6 +2070,8 @@ object Types { private final val NoDeps: DependencyStatus = 1 // no dependent parameters found private final val FalseDeps: DependencyStatus = 2 // all dependent parameters are prefixes of non-depended alias types private final val TrueDeps: DependencyStatus = 3 // some truly dependent parameters exist + private final val StatusMask: DependencyStatus = 3 // the bits indicating actual dependency status + private final val Provisional: DependencyStatus = 4 // set if dependency status can still change due to type variable instantiations } object JavaMethodType extends MethodTypeCompanion { @@ -2269,8 +2255,12 @@ object Types { * @param creatorState The typer state in which the variable was created. * @param owningTree The function part of the TypeApply tree tree that introduces * the type variable. + * @paran owner The current owner if the context where the variable was created. + * + * `owningTree` and `owner` are used to determine whether a type-variable can be instantiated + * at some given point. See `Inferencing#interpolateUndetVars`. */ - final class TypeVar(val origin: PolyParam, creatorState: TyperState, val owningTree: untpd.Tree) extends CachedProxyType with ValueType { + final class TypeVar(val origin: PolyParam, creatorState: TyperState, val owningTree: untpd.Tree, val owner: Symbol) extends CachedProxyType with ValueType { /** The permanent instance type of the the variable, or NoType is none is given yet */ private[core] var inst: Type = NoType @@ -2602,7 +2592,8 @@ object Types { if ((annot eq this.annot) && (tpe eq this.tpe)) this else AnnotatedType(annot, tpe) - override def stripTypeVar(implicit ctx: Context): Type = tpe.stripTypeVar + override def stripTypeVar(implicit ctx: Context): Type = + derivedAnnotatedType(annot, tpe.stripTypeVar) override def stripAnnots(implicit ctx: Context): Type = tpe.stripAnnots } @@ -2734,7 +2725,13 @@ object Types { tp match { case tp: NamedType => if (stopAtStatic && tp.symbol.isStatic) tp - else tp.derivedSelect(this(tp.prefix)) + else { + val saved = variance + variance = 0 + val result = tp.derivedSelect(this(tp.prefix)) + variance = saved + result + } case _: ThisType | _: BoundType @@ -2867,17 +2864,25 @@ object Types { protected def applyToAnnot(x: T, annot: Annotation): T = x // don't go into annotations protected var variance = 1 - + + protected def applyToPrefix(x: T, tp: NamedType) = { + val saved = variance + variance = 0 + val result = this(x, tp.prefix) + variance = saved + result + } + def foldOver(x: T, tp: Type): T = tp match { case tp: TypeRef => if (stopAtStatic && tp.symbol.isStatic) x else { val tp1 = tp.prefix.lookupRefined(tp.name) - this(x, if (tp1.exists) tp1 else tp.prefix) + if (tp1.exists) this(x, tp1) else applyToPrefix(x, tp) } case tp: TermRef => if (stopAtStatic && tp.currentSymbol.isStatic) x - else this(x, tp.prefix) + else applyToPrefix(x, tp) case _: ThisType | _: BoundType diff --git a/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala b/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala index ed3eb7251d1f..3b8daab392cd 100644 --- a/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala +++ b/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala @@ -139,7 +139,7 @@ class ClassfileParser( var sym = classRoot.owner while (sym.isClass && !(sym is Flags.ModuleClass)) { for (tparam <- sym.typeParams) { - classTParams = classTParams.updated(tparam.name.unexpandedName(), tparam) + classTParams = classTParams.updated(tparam.name.unexpandedName, tparam) } sym = sym.owner } diff --git a/src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala b/src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala new file mode 100644 index 000000000000..84a9a17443d5 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala @@ -0,0 +1,52 @@ +package dotty.tools +package dotc +package core +package pickling + +import Contexts._, SymDenotations._ +import dotty.tools.dotc.ast.tpd +import TastyUnpickler._, TastyBuffer._ +import util.Positions._ +import PositionUnpickler._ + +object DottyUnpickler { + + /** Exception thrown if classfile is corrupted */ + class BadSignature(msg: String) extends RuntimeException(msg) +} + +/** A class for unpickling Tasty trees and symbols. + * @param bytes the bytearray containing the Tasty file from which we unpickle + */ +class DottyUnpickler(bytes: Array[Byte]) { + import tpd._ + + private val unpickler = new TastyUnpickler(bytes) + private val treeUnpickler = unpickler.unpickle(new TreeSectionUnpickler).get + + /** Enter all toplevel classes and objects into their scopes + * @param roots a set of SymDenotations that should be overwritten by unpickling + */ + def enter(roots: Set[SymDenotation])(implicit ctx: Context): Unit = + treeUnpickler.enterTopLevel(roots) + + /** The unpickled trees + * @param readPositions if true, trees get decorated with position information. + */ + def body(readPositions: Boolean = false)(implicit ctx: Context): List[Tree] = { + if (readPositions) + for ((totalRange, positions) <- unpickler.unpickle(new PositionsSectionUnpickler())) + treeUnpickler.usePositions(totalRange, positions) + treeUnpickler.unpickle() + } + + private class TreeSectionUnpickler extends SectionUnpickler[TreeUnpickler]("ASTs") { + def unpickle(reader: TastyReader, tastyName: TastyName.Table) = + new TreeUnpickler(reader, tastyName) + } + + private class PositionsSectionUnpickler extends SectionUnpickler[(Position, AddrToPosition)]("Positions") { + def unpickle(reader: TastyReader, tastyName: TastyName.Table) = + new PositionUnpickler(reader).unpickle() + } +} diff --git a/src/dotty/tools/dotc/core/pickling/NameBuffer.scala b/src/dotty/tools/dotc/core/pickling/NameBuffer.scala new file mode 100644 index 000000000000..be557f0b199e --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/NameBuffer.scala @@ -0,0 +1,93 @@ +package dotty.tools +package dotc +package core +package pickling + +import collection.mutable +import Names.{Name, chrs} +import Decorators._, NameOps._ +import TastyBuffer._ +import scala.io.Codec +import TastyName._ +import PickleFormat._ + +class NameBuffer extends TastyBuffer(100000) { + + private val nameRefs = new mutable.LinkedHashMap[TastyName, NameRef] + + def nameIndex(name: TastyName): NameRef = nameRefs.get(name) match { + case Some(ref) => + ref + case None => + val ref = NameRef(nameRefs.size) + nameRefs(name) = ref + ref + } + def nameIndex(name: Name): NameRef = { + val tname = + if (name.isShadowedName) Shadowed(nameIndex(name.revertShadowed)) + else Simple(name.toTermName) + nameIndex(tname) + } + + def nameIndex(str: String): NameRef = nameIndex(str.toTermName) + + def fullNameIndex(name: Name): NameRef = { + val pos = name.lastIndexOf('.') + if (pos > 0) + nameIndex(Qualified(fullNameIndex(name.take(pos)), nameIndex(name.drop(pos + 1)))) + else + nameIndex(name) + } + + private def withLength(op: => Unit): Unit = { + val lengthAddr = currentAddr + writeByte(0) + op + val length = currentAddr.index - lengthAddr.index - 1 + assert(length < 128) + putNat(lengthAddr, length, 1) + } + + def writeNameRef(ref: NameRef) = writeNat(ref.index) + + def pickleName(name: TastyName): Unit = name match { + case Simple(name) => + val bytes = + if (name.length == 0) new Array[Byte](0) + else Codec.toUTF8(chrs, name.start, name.length) + writeByte(UTF8) + writeNat(bytes.length) + writeBytes(bytes, bytes.length) + case Qualified(qualified, selector) => + writeByte(QUALIFIED) + withLength { writeNameRef(qualified); writeNameRef(selector) } + case Signed(original, params, result) => + writeByte(SIGNED) + withLength { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) } + case Expanded(original) => + writeByte(EXPANDED) + withLength { writeNameRef(original) } + case ModuleClass(module) => + writeByte(OBJECTCLASS) + withLength { writeNameRef(module) } + case SuperAccessor(accessed) => + writeByte(SUPERACCESSOR) + withLength { writeNameRef(accessed) } + case DefaultGetter(method, paramNumber) => + writeByte(DEFAULTGETTER) + withLength { writeNameRef(method); writeNat(paramNumber) } + case Shadowed(original) => + writeByte(SHADOWED) + withLength { writeNameRef(original) } + } + + override def assemble(): Unit = { + var i = 0 + for ((name, ref) <- nameRefs) { + assert(ref.index == i) + i += 1 + pickleName(name) + } + } +} diff --git a/src/dotty/tools/dotc/core/pickling/PickleBuffer.scala b/src/dotty/tools/dotc/core/pickling/PickleBuffer.scala index c16b794b7922..06d02d888c81 100644 --- a/src/dotty/tools/dotc/core/pickling/PickleBuffer.scala +++ b/src/dotty/tools/dotc/core/pickling/PickleBuffer.scala @@ -249,7 +249,6 @@ object PickleBuffer { EXPANDEDNAME -> ExpandedName, IMPLCLASS -> (Scala2PreSuper, ImplClass), SPECIALIZED -> Specialized, - DEFAULTINIT -> DefaultInit, VBRIDGE -> VBridge, VARARGS -> JavaVarargs, ENUM -> Enum) diff --git a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala new file mode 100644 index 000000000000..d37a3673d145 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala @@ -0,0 +1,489 @@ +package dotty.tools.dotc +package core +package pickling + +/************************************************************ +Notation: + +We use BNF notation. Terminal symbols start with at least two +consecutive upper case letters. Each terminal is represented as a +single byte tag. Non-terminals are mixed case. Prefixes of the form +lower case letter*_ are for explanation of semantic content only, they +can be dropped without changing the grammar. + +Micro-syntax: + + LongInt = Digit* StopDigit // big endian 2's complement, value fits in a Long w/o overflow + Int = LongInt // big endian 2's complement, fits in an Int w/o overflow + Nat = LongInt // non-negative value, fits in an Int without overflow + Digit = 0 | ... | 127 + StopDigit = 128 | ... | 255 // value = digit - 128 + +Macro-format: + + File = Header majorVersion_Nat minorVersion_Nat UUID + nameTable_Length Name* Section* + Header = 0x5CA1AB1F + UUID = Byte*16 // random UUID + + Section = NameRef Length Bytes + Length = Nat // length of rest of entry in bytes + + Name = UTF8 Length UTF8-CodePoint* + QUALIFIED Length qualified_NameRef selector_NameRef + SIGNED Length original_NameRef resultSig_NameRef paramSig_NameRef* + EXPANDED Length original_NameRef + OBJECTCLASS Length module_NameRef + SUPERACCESSOR Length accessed_NameRef + DEFAULTGETTER Length method_NameRef paramNumber_Nat + SHADOWED Length original_NameRef + MANGLED Length mangle_NameRef name_NameRef + ... + + NameRef = Nat // ordinal number of name in name table, starting from 1. + +Note: Unqualified names in the name table are strings. The context decides whether a name is +a type-name or a term-name. The same string can represent both. + +Standard-Section: "ASTs" TopLevelStat* + + TopLevelStat = PACKAGE Length Path TopLevelStat* + Stat + + Stat = Term + VALDEF Length NameRef Type rhs_Term? Modifier* + DEFDEF Length NameRef TypeParam* Params* return_Type rhs_Term? + Modifier* + TYPEDEF Length NameRef (Type | Template) Modifier* + IMPORT Length qual_Term Selector* + Selector = IMPORTED name_NameRef + RENAMED Length from_NameRef to_NameRef + // Imports are for scala.meta, they are not used in the backend + + TypeParam = TYPEPARAM Length NameRef Type Modifier* + Params = PARAMS Length Param* + Param = PARAM Length NameRef Type rhs_Term? Modifier* // rhs_Term is present in the case of an aliased class parameter + Template = TEMPLATE Length TypeParam* Param* Parent* Self? Stat* // Stat* always starts with the primary constructor. + Parent = Application + Type + Self = SELFDEF selfName_NameRef selfType_Type + + Term = Path + Application + IDENT NameRef Type // used when ident’s type is not a TermRef + SELECT possiblySigned_NameRef qual_Term + NEW cls_Type + SUPER Length this_Term mixinTrait_Type? + PAIR Length left_Term right_Term + TYPED Length expr_Term ascription_Type + NAMEDARG Length paramName_NameRef arg_Term + ASSIGN Length lhs_Term rhs_Term + BLOCK Length expr_Term Stat* + LAMBDA Length meth_Term target_Type + IF Length cond_Term then_Term else_Term + MATCH Length sel_Term CaseDef* + TRY Length expr_Term CaseDef* finalizer_Term? + RETURN Length meth_ASTRef expr_Term? + REPEATED Length elem_Term* + BIND Length boundName_NameRef patType_Type pat_Term + ALTERNATIVE Length alt_Term* + UNAPPLY Length fun_Term ImplicitArg* pat_Type pat_Term* + EMPTYTREE + SHARED term_ASTRef + Application = APPLY Length fn_Term arg_Term* + + TYPEAPPLY Length fn_Term arg_Type* + CaseDef = CASEDEF Length pat_Term rhs_Tree guard_Tree? + ImplicitArg = IMPLICITARG arg_Term + ASTRef = Nat // byte position in AST payload + + Path = Constant + TERMREFdirect sym_ASTRef + TERMREFsymbol sym_ASTRef qual_Type + TERMREFpkg fullyQualified_NameRef + TERMREF possiblySigned_NameRef qual_Type + THIS clsRef_Type + SKOLEMtype refinedType_ASTRef + SHARED path_ASTRef + + + Constant = UNITconst + FALSEconst + TRUEconst + BYTEconst Int + SHORTconst Int + CHARconst Nat + INTconst Int + LONGconst LongInt + FLOATconst Int + DOUBLEconst LongInt + STRINGconst NameRef + NULLconst + CLASSconst Type + ENUMconst Path + + Type = Path + TYPEREFdirect sym_ASTRef + TYPEREFsymbol sym_ASTRef qual_Type + TYPEREFpkg fullyQualified_NameRef + TYPEREF possiblySigned_NameRef qual_Type + SUPERtype Length this_Type underlying_Type + REFINEDtype Length underlying_Type refinement_NameRef info_Type + APPLIEDtype Length tycon_Type arg_Type* + TYPEBOUNDS Length low_Type high_Type + TYPEALIAS Length alias_Type (COVARIANT | CONTRAVARIANT)? + ANNOTATED Length fullAnnotation_Term underlying_Type + ANDtype Length left_Type right_Type + ORtype Length left_Type right_Type + BIND Length boundName_NameRef bounds_Type + // for type-variables defined in a type pattern + BYNAMEtype underlying_Type + POLYtype Length result_Type NamesTypes // needed for refinements + METHODtype Length result_Type NamesTypes // needed for refinements + PARAMtype Length binder_ASTref paramNum_Nat // needed for refinements + SHARED type_ASTRef + NamesTypes = ParamType* + NameType = paramName_NameRef typeOrBounds_ASTRef + + Modifier = PRIVATE + INTERNAL // package private + PROTECTED + PRIVATEqualified qualifier_Type // will be dropped + PROTECTEDqualified qualifier_Type // will be dropped + ABSTRACT + FINAL + SEALED + CASE + IMPLICIT + LAZY + OVERRIDE + INLINE // macro + ABSOVERRIDE // abstract override + STATIC // mapped to static Java member + OBJECT // an object or its class + TRAIT // a trait + LOCAL // private[this] or protected[this] + SYNTHETIC // generated by Scala compiler + ARTIFACT // to be tagged Java Synthetic + MUTABLE // a var + LABEL // method generated as a label + FIELDaccessor // getter or setter + CASEaccessor // getter for case class param + COVARIANT // type param marked “+” + CONTRAVARIANT // type param marked “-” + SCALA2X // Imported from Scala2.x + DEFAULTparameterized // Method with default params + INSUPERCALL // defined in the argument of a constructor supercall + Annotation + Annotation = ANNOTATION Length tycon_Type fullAnnotation_Term + +Note: Tree tags are grouped into 5 categories that determine what follows, and thus allow to compute the size of the tagged tree in a generic way. + + Category 1 (tags 0-63) : tag + Category 2 (tags 64-95) : tag Nat + Category 3 (tags 96-111) : tag AST + Category 4 (tags 112-127): tag Nat AST + Category 5 (tags 128-255): tag Length + +Standard Section: "Positions" sourceLength_Nat Assoc* + + Assoc = addr_Delta offset_Delta offset_Delta? + // addr_Delta : + // Difference of address to last recorded node. + // All but the first addr_Deltas are > 0, the first is >= 0. + // 2nd offset_Delta: + // Difference of end offset of addressed node vs parent node. Always <= 0 + // 1st offset Delta, if delta >= 0 or 2nd offset delta exists + // Difference of start offset of addressed node vs parent node. + // 1st offset Delta, if delta < 0 and 2nd offset delta does not exist: + // Difference of end offset of addressed node vs parent node. + // Offsets and addresses are difference encoded. + // Nodes which have the same positions as their parents are omitted. + Delta = Int // Difference between consecutive offsets / tree addresses, + +**************************************************************************************/ + +object PickleFormat { + + final val header = Array(0x5C, 0xA1, 0xAB, 0x1F) + final val MajorVersion = 0 + final val MinorVersion = 5 + + // Name tags + + final val UTF8 = 1 + final val QUALIFIED = 2 + final val SIGNED = 3 + final val EXPANDED = 4 + final val OBJECTCLASS = 5 + final val SUPERACCESSOR = 6 + final val DEFAULTGETTER = 7 + final val SHADOWED = 8 + +// AST tags + + final val UNITconst = 2 + final val FALSEconst = 3 + final val TRUEconst = 4 + final val NULLconst = 5 + final val PRIVATE = 6 + final val INTERNAL = 7 + final val PROTECTED = 8 + final val ABSTRACT = 9 + final val FINAL = 10 + final val SEALED = 11 + final val CASE = 12 + final val IMPLICIT = 13 + final val LAZY = 14 + final val OVERRIDE = 15 + final val INLINE = 16 + final val ABSOVERRIDE = 17 + final val STATIC = 18 + final val OBJECT = 19 + final val TRAIT = 20 + final val LOCAL = 21 + final val SYNTHETIC = 22 + final val ARTIFACT = 23 + final val MUTABLE = 24 + final val LABEL = 25 + final val FIELDaccessor = 26 + final val CASEaccessor = 27 + final val COVARIANT = 28 + final val CONTRAVARIANT = 29 + final val SCALA2X = 30 + final val DEFAULTparameterized = 31 + final val INSUPERCALL = 32 + + final val SHARED = 64 + final val TERMREFdirect = 65 + final val TYPEREFdirect = 66 + final val TERMREFpkg = 67 + final val TYPEREFpkg = 68 + final val SKOLEMtype = 69 + final val BYTEconst = 70 + final val SHORTconst = 71 + final val CHARconst = 72 + final val INTconst = 73 + final val LONGconst = 74 + final val FLOATconst = 75 + final val DOUBLEconst = 76 + final val STRINGconst = 77 + final val IMPORTED = 78 + + final val THIS = 96 + final val CLASSconst = 97 + final val ENUMconst = 98 + final val BYNAMEtype = 99 + final val NEW = 100 + final val IMPLICITarg = 101 + final val PRIVATEqualified = 102 + final val PROTECTEDqualified = 103 + + final val IDENT = 112 + final val SELECT = 113 + final val TERMREFsymbol = 114 + final val TERMREF = 115 + final val TYPEREFsymbol = 116 + final val TYPEREF = 117 + final val SELFDEF = 118 + + final val PACKAGE = 128 + final val VALDEF = 129 + final val DEFDEF = 130 + final val TYPEDEF = 131 + final val IMPORT = 132 + final val TYPEPARAM = 133 + final val PARAMS = 134 + final val PARAM = 136 + final val RENAMED = 138 + final val APPLY = 139 + final val TYPEAPPLY = 140 + final val PAIR = 142 + final val TYPED = 143 + final val NAMEDARG = 144 + final val ASSIGN = 145 + final val BLOCK = 146 + final val IF = 147 + final val LAMBDA = 148 + final val MATCH = 149 + final val RETURN = 150 + final val TRY = 151 + final val REPEATED = 153 + final val BIND = 154 + final val ALTERNATIVE = 155 + final val UNAPPLY = 156 + final val ANNOTATED = 157 + final val CASEDEF = 158 + final val TEMPLATE = 160 + final val SUPER = 163 + final val SUPERtype = 166 + final val REFINEDtype = 167 + final val APPLIEDtype = 168 + final val TYPEBOUNDS = 169 + final val TYPEALIAS = 170 + final val ANDtype = 171 + final val ORtype = 172 + final val METHODtype = 174 + final val POLYtype = 175 + final val PARAMtype = 176 + final val ANNOTATION = 178 + + final val firstSimpleTreeTag = UNITconst + final val firstNatTreeTag = SHARED + final val firstASTTreeTag = THIS + final val firstNatASTTreeTag = IDENT + final val firstLengthTreeTag = PACKAGE + + def isParamTag(tag: Int) = tag == PARAM || tag == TYPEPARAM + + def isModifierTag(tag: Int) = tag match { + case PRIVATE + | INTERNAL + | PROTECTED + | ABSTRACT + | FINAL + | SEALED + | CASE + | IMPLICIT + | LAZY + | OVERRIDE + | INLINE + | ABSOVERRIDE + | STATIC + | OBJECT + | TRAIT + | LOCAL + | SYNTHETIC + | ARTIFACT + | MUTABLE + | LABEL + | FIELDaccessor + | CASEaccessor + | COVARIANT + | CONTRAVARIANT + | SCALA2X + | DEFAULTparameterized + | INSUPERCALL + | ANNOTATION + | PRIVATEqualified + | PROTECTEDqualified => true + case _ => false + } + + def nameTagToString(tag: Int): String = tag match { + case UTF8 => "UTF8" + case QUALIFIED => "QUALIFIED" + case SIGNED => "SIGNED" + case EXPANDED => "EXPANDED" + case OBJECTCLASS => "OBJECTCLASS" + case SUPERACCESSOR => "SUPERACCESSOR" + case DEFAULTGETTER => "DEFAULTGETTER" + } + + def astTagToString(tag: Int): String = tag match { + case UNITconst => "UNITconst" + case FALSEconst => "FALSEconst" + case TRUEconst => "TRUEconst" + case NULLconst => "NULLconst" + case PRIVATE => "PRIVATE" + case INTERNAL => "INTERNAL" + case PROTECTED => "PROTECTED" + case ABSTRACT => "ABSTRACT" + case FINAL => "FINAL" + case SEALED => "SEALED" + case CASE => "CASE" + case IMPLICIT => "IMPLICIT" + case LAZY => "LAZY" + case OVERRIDE => "OVERRIDE" + case INLINE => "INLINE" + case ABSOVERRIDE => "ABSOVERRIDE" + case STATIC => "STATIC" + case OBJECT => "OBJECT" + case TRAIT => "TRAIT" + case LOCAL => "LOCAL" + case SYNTHETIC => "SYNTHETIC" + case ARTIFACT => "ARTIFACT" + case MUTABLE => "MUTABLE" + case LABEL => "LABEL" + case FIELDaccessor => "FIELDaccessor" + case CASEaccessor => "CASEaccessor" + case COVARIANT => "COVARIANT" + case CONTRAVARIANT => "CONTRAVARIANT" + case SCALA2X => "SCALA2X" + case DEFAULTparameterized => "DEFAULTparameterized" + case INSUPERCALL => "INSUPERCALL" + + case SHARED => "SHARED" + case TERMREFdirect => "TERMREFdirect" + case TYPEREFdirect => "TYPEREFdirect" + case TERMREFpkg => "TERMREFpkg" + case TYPEREFpkg => "TYPEREFpkg" + case SKOLEMtype => "SKOLEMtype" + case BYTEconst => "BYTEconst" + case SHORTconst => "SHORTconst" + case CHARconst => "CHARconst" + case INTconst => "INTconst" + case LONGconst => "LONGconst" + case FLOATconst => "FLOATconst" + case DOUBLEconst => "DOUBLEconst" + case STRINGconst => "STRINGconst" + + case IDENT => "IDENT" + case SELECT => "SELECT" + case TERMREFsymbol => "TERMREFsymbol" + case TERMREF => "TERMREF" + case TYPEREFsymbol => "TYPEREFsymbol" + case TYPEREF => "TYPEREF" + + case PACKAGE => "PACKAGE" + case VALDEF => "VALDEF" + case DEFDEF => "DEFDEF" + case TYPEDEF => "TYPEDEF" + case IMPORT => "IMPORT" + case TYPEPARAM => "TYPEPARAM" + case PARAMS => "PARAMS" + case PARAM => "PARAM" + case IMPORTED => "IMPORTED" + case RENAMED => "RENAMED" + case APPLY => "APPLY" + case TYPEAPPLY => "TYPEAPPLY" + case NEW => "NEW" + case PAIR => "PAIR" + case TYPED => "TYPED" + case NAMEDARG => "NAMEDARG" + case ASSIGN => "ASSIGN" + case BLOCK => "BLOCK" + case IF => "IF" + case LAMBDA => "LAMBDA" + case MATCH => "MATCH" + case RETURN => "RETURN" + case TRY => "TRY" + case REPEATED => "REPEATED" + case BIND => "BIND" + case ALTERNATIVE => "ALTERNATIVE" + case UNAPPLY => "UNAPPLY" + case ANNOTATED => "ANNOTATED" + case CASEDEF => "CASEDEF" + case IMPLICITarg => "IMPLICITarg" + case TEMPLATE => "TEMPLATE" + case SELFDEF => "SELFDEF" + case THIS => "THIS" + case SUPER => "SUPER" + case CLASSconst => "CLASSconst" + case ENUMconst => "ENUMconst" + case SUPERtype => "SUPERtype" + case REFINEDtype => "REFINEDtype" + case APPLIEDtype => "APPLIEDtype" + case TYPEBOUNDS => "TYPEBOUNDS" + case TYPEALIAS => "TYPEALIAS" + case ANDtype => "ANDtype" + case ORtype => "ORtype" + case BYNAMEtype => "BYNAMEtype" + case POLYtype => "POLYtype" + case METHODtype => "METHODtype" + case PARAMtype => "PARAMtype" + case ANNOTATION => "ANNOTATION" + case PRIVATEqualified => "PRIVATEqualified" + case PROTECTEDqualified => "PROTECTEDqualified" + } +} diff --git a/src/dotty/tools/dotc/core/pickling/PositionPickler.scala b/src/dotty/tools/dotc/core/pickling/PositionPickler.scala new file mode 100644 index 000000000000..8ee70719e4eb --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/PositionPickler.scala @@ -0,0 +1,75 @@ +package dotty.tools +package dotc +package core +package pickling + +import ast.tpd._ +import ast.Trees.WithLazyField +import PickleFormat._ +import core._ +import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._ +import collection.mutable +import TastyBuffer._ +import util.Positions._ + +object PositionPickler { + + trait DeferredPosition { + var parentPos: Position = NoPosition + } + + def traverse(x: Any, parentPos: Position, op: (Tree, Position) => Unit)(implicit ctx: Context): Unit = + if (parentPos.exists) + x match { + case x: Tree @unchecked => + op(x, parentPos) + x match { + case x: MemberDef @unchecked => traverse(x.symbol.annotations, x.pos, op) + case _ => + } + traverse(x.productIterator, x.pos, op) + case x: DeferredPosition => + x.parentPos = parentPos + case xs: TraversableOnce[_] => + xs.foreach(traverse(_, parentPos, op)) + case _ => + } +} +import PositionPickler._ + +class PositionPickler(pickler: TastyPickler, addrOfTree: Tree => Option[Addr]) { + val buf = new TastyBuffer(100000) + pickler.newSection("Positions", buf) + import buf._ + + def picklePositions(roots: List[Tree], totalRange: Position)(implicit ctx: Context) = { + var lastIndex = 0 + def record(tree: Tree, parentPos: Position): Unit = + if (tree.pos.exists) { + def msg = s"failure to pickle $tree at ${tree.pos}, parent = $parentPos" + val endPos = tree.pos.end min parentPos.end + // end positions can be larger than their parents + // e.g. in the case of synthetic empty ranges, which are placed at the next token after + // the current construct. + val endDelta = endPos - parentPos.end + val startPos = + if (endDelta == 0) tree.pos.start max parentPos.start else tree.pos.start min endPos + // Since end positions are corrected above, start positions have to follow suit. + val startDelta = startPos - parentPos.start + if (startDelta != 0 || endDelta != 0) + for (addr <- addrOfTree(tree)) { + buf.writeInt(addr.index - lastIndex) + lastIndex = addr.index + if (startDelta != 0) buf.writeInt(startDelta) + if (endDelta != 0) { + assert(endDelta < 0, msg) + buf.writeInt(endDelta) + } else + assert(startDelta >= 0, msg) + } + } + + buf.writeNat(totalRange.end) + traverse(roots, totalRange, record) + } +} \ No newline at end of file diff --git a/src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala b/src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala new file mode 100644 index 000000000000..4d06cf7922bb --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala @@ -0,0 +1,37 @@ +package dotty.tools +package dotc +package core +package pickling + +import util.Positions._ +import collection.mutable +import TastyBuffer.Addr + +object PositionUnpickler { + type AddrToPosition = mutable.HashMap[Addr, Position] +} + +/** Unpickler for tree positions */ +class PositionUnpickler(reader: TastyReader) { + import PositionUnpickler._ + import reader._ + + def unpickle(): (Position, AddrToPosition) = { + val positions = new mutable.HashMap[Addr, Position] // Dotty deviation: Can't use new AddrToPosition here. TODO: fix this! + val sourceLength = readNat() + def readDelta() = if (isAtEnd) 0 else readInt() + var curIndex: Addr = Addr(readDelta()) + while (!isAtEnd) { + val delta1 = readDelta() + val delta2 = readDelta() + val (startDelta, endDelta, indexDelta) = + if (delta2 <= 0) (delta1, -delta2, readDelta()) + else if (delta1 < 0) (0, -delta1, delta2) + else (delta1, 0, delta2) + positions(curIndex) = Position(startDelta, endDelta, startDelta) + // make non-synthetic position; will be made synthetic by normalization. + curIndex += indexDelta + } + (Position(0, sourceLength), positions) + } +} \ No newline at end of file diff --git a/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala new file mode 100644 index 000000000000..9197a2accf57 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala @@ -0,0 +1,185 @@ +package dotty.tools +package dotc +package core +package pickling + +import util.Util.dble + +object TastyBuffer { + + /** The number of digits of the natural number `nat`, written in base 128 format. */ + def natSize(nat: Int): Int = + if (nat < 128) 1 else natSize(nat >>> 7) + 1 + + /** An address pointing to an index in a Tasty buffer's byte array */ + case class Addr(val index: Int) extends AnyVal { + def -(delta: Int): Addr = Addr(this.index - delta) + def +(delta: Int): Addr = Addr(this.index + delta) + + def relativeTo(base: Addr): Addr = this - base.index - AddrWidth + } + + val NoAddr = Addr(-1) + + /** The maximal number of address bytes. + * Since addresses are written as base-128 natural numbers, + * the value of 4 gives a maximal array size of 256M. + */ + final val AddrWidth = 4 +} +import TastyBuffer._ + +/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format, + * and that supports reading and patching addresses represented as natural numbers. + */ +class TastyBuffer(initialSize: Int) { + + /** The current byte array, will be expanded as needed */ + var bytes = new Array[Byte](initialSize) + + /** The number of bytes written */ + var length = 0 + + // -- Output routines -------------------------------------------- + + /** Write a byte of data. */ + def writeByte(b: Int): Unit = { + if (length == bytes.length) bytes = dble(bytes) + bytes(length) = b.toByte + length += 1 + } + + /** Write the first `n` bytes of `data`. */ + def writeBytes(data: Array[Byte], n: Int): Unit = { + while (bytes.length < length + n) bytes = dble(bytes) + Array.copy(data, 0, bytes, length, n) + length += n + } + + /** Write a natural number in big endian format, base 128. + * All but the last digits have bit 0x80 set. + */ + def writeNat(x: Int): Unit = + writeLongNat(x.toLong & 0x00000000FFFFFFFFL) + + /** Write a natural number in 2's complement big endian format, base 128. + * All but the last digits have bit 0x80 set. + */ + def writeInt(x: Int): Unit = + writeLongInt(x) + + /** + * Like writeNat, but for longs. Note that the + * binary representation of LongNat is identical to Nat + * if the long value is in the range Int.MIN_VALUE to + * Int.MAX_VALUE. + */ + def writeLongNat(x: Long): Unit = { + def writePrefix(x: Long): Unit = { + val y = x >>> 7 + if (y != 0L) writePrefix(y) + writeByte((x & 0x7f).toInt) + } + val y = x >>> 7 + if (y != 0L) writePrefix(y) + writeByte(((x & 0x7f) | 0x80).toInt) + } + + /** Like writeInt, but for longs */ + def writeLongInt(x: Long): Unit = { + def writePrefix(x: Long): Unit = { + val y = x >> 7 + if (y != 0L - ((x >> 6) & 1)) writePrefix(y) + writeByte((x & 0x7f).toInt) + } + val y = x >> 7 + if (y != 0L - ((x >> 6) & 1)) writePrefix(y) + writeByte(((x & 0x7f) | 0x80).toInt) + } + + /** Write an uncompressed Long stored in 8 bytes in big endian format */ + def writeUncompressedLong(x: Long): Unit = { + var y = x + val bytes = new Array[Byte](8) + for (i <- 7 to 0 by -1) { + bytes(i) = (y & 0xff).toByte + y = y >>> 8 + } + writeBytes(bytes, 8) + } + + // -- Address handling -------------------------------------------- + + /** Write natural number `x` right-adjusted in a field of `width` bytes + * starting with address `at`. + */ + def putNat(at: Addr, x: Int, width: Int): Unit = { + var y = x + var w = width + var digit = y & 0x7f | 0x80 + while (w > 0) { + w -= 1 + bytes(at.index + w) = digit.toByte + y >>>= 7 + digit = y & 0x7f + } + assert(y == 0, s"number $x too large to fit in $width bytes") + } + + /** The byte at given address */ + def getByte(at: Addr): Int = bytes(at.index) + + /** The natural number at address `at` */ + def getNat(at: Addr): Int = getLongNat(at).toInt + + /** The long natural number at address `at` */ + def getLongNat(at: Addr): Long = { + var b = 0L + var x = 0L + var idx = at.index + do { + b = bytes(idx) + x = (x << 7) | (b & 0x7f) + idx += 1 + } while ((b & 0x80) == 0) + x + } + + /** The address (represented as a natural number) at address `at` */ + def getAddr(at: Addr) = Addr(getNat(at)) + + /** The smallest address equal to or following `at` which points to a non-zero byte */ + final def skipZeroes(at: Addr): Addr = + if (getByte(at) != 0) at else skipZeroes(at + 1) + + /** The address after the natural number found at address `at`. */ + final def skipNat(at: Addr): Addr = { + val next = at + 1 + if ((getByte(at) & 0x80) != 0) next else skipNat(next) + } + + /** The address referring to the end of data written so far */ + def currentAddr: Addr = Addr(length) + + /** Reserve `AddrWidth` bytes to write an address into */ + def reserveAddr(): Addr = { + val result = currentAddr + length += AddrWidth + result + } + + /** Fill reserved space at address `at` with address `target` */ + def fillAddr(at: Addr, target: Addr) = + putNat(at, target.index, AddrWidth) + + /** Write address without leading zeroes */ + def writeAddr(addr: Addr): Unit = writeNat(addr.index) + + // -- Finalization -------------------------------------------- + + /** Hook to be overridden in subclasses. + * Perform all actions necessary to assemble the final byte array. + * After `assemble` no more output actions to this buffer are permitted. + */ + def assemble(): Unit = () +} diff --git a/src/dotty/tools/dotc/core/pickling/TastyName.scala b/src/dotty/tools/dotc/core/pickling/TastyName.scala new file mode 100644 index 000000000000..8508d7ffaa88 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyName.scala @@ -0,0 +1,30 @@ +package dotty.tools +package dotc +package core +package pickling + +import core.Names.TermName +import collection.mutable + +abstract class TastyName + +object TastyName { + + case class NameRef(val index: Int) extends AnyVal + + case class Simple(name: TermName) extends TastyName + case class Qualified(qualified: NameRef, selector: NameRef) extends TastyName + case class Signed(original: NameRef, params: List[NameRef], result: NameRef) extends TastyName + case class Expanded(original: NameRef) extends TastyName + case class ModuleClass(module: NameRef) extends TastyName + case class SuperAccessor(accessed: NameRef) extends TastyName + case class DefaultGetter(method: NameRef, num: Int) extends TastyName + case class Shadowed(original: NameRef) extends TastyName + + class Table extends (NameRef => TastyName) { + private val names = new mutable.ArrayBuffer[TastyName] + def add(name: TastyName) = names += name + def apply(ref: NameRef) = names(ref.index) + def contents: Iterable[TastyName] = names + } +} diff --git a/src/dotty/tools/dotc/core/pickling/TastyPickler.scala b/src/dotty/tools/dotc/core/pickling/TastyPickler.scala new file mode 100644 index 000000000000..f998cf377ba5 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyPickler.scala @@ -0,0 +1,54 @@ +package dotty.tools +package dotc +package core +package pickling + +import PickleFormat._ +import collection.mutable +import TastyBuffer._ +import java.util.UUID + +class TastyPickler { + + private val sections = new mutable.ArrayBuffer[(TastyName.NameRef, TastyBuffer)] + + private val headerBuffer = { + val buf = new TastyBuffer(24) + for (ch <- header) buf.writeByte(ch.toByte) + buf.writeNat(MajorVersion) + buf.writeNat(MinorVersion) + val uuid = UUID.randomUUID() + buf.writeUncompressedLong(uuid.getMostSignificantBits) + buf.writeUncompressedLong(uuid.getLeastSignificantBits) + buf + } + + val nameBuffer = new NameBuffer + + def newSection(name: String, buf: TastyBuffer) = + sections += ((nameBuffer.nameIndex(name), buf)) + + def assembleParts(): Array[Byte] = { + def lengthWithLength(buf: TastyBuffer) = { + buf.assemble() + buf.length + natSize(buf.length) + } + val totalSize = + headerBuffer.length + + lengthWithLength(nameBuffer) + { + for ((nameRef, buf) <- sections) yield + natSize(nameRef.index) + lengthWithLength(buf) + }.sum + val all = new TastyBuffer(totalSize) + all.writeBytes(headerBuffer.bytes, headerBuffer.length) + all.writeNat(nameBuffer.length) + all.writeBytes(nameBuffer.bytes, nameBuffer.length) + for ((nameRef, buf) <- sections) { + all.writeNat(nameRef.index) + all.writeNat(buf.length) + all.writeBytes(buf.bytes, buf.length) + } + assert(all.length == totalSize && all.bytes.length == totalSize, s"totalSize = $totalSize, all.length = ${all.length}, all.bytes.length = ${all.bytes.length}") + all.bytes + } +} diff --git a/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala b/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala new file mode 100644 index 000000000000..91cc168ea518 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala @@ -0,0 +1,121 @@ +package dotty.tools.dotc +package core +package pickling + +import Contexts._, Decorators._ +import printing.Texts._ +import TastyName._ +import TastyUnpickler._ +import TastyBuffer.Addr +import util.Positions.{Position, offsetToInt} +import collection.mutable + +class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) { + + val unpickler = new TastyUnpickler(bytes) + import unpickler.{tastyName, unpickle} + + def nameToString(name: TastyName): String = name match { + case Simple(name) => name.toString + case Qualified(qual, name) => nameRefToString(qual) + "." + nameRefToString(name) + case Signed(original, params, result) => + i"${nameRefToString(original)}@${params.map(nameRefToString)}%,%:${nameRefToString(result)}" + case Expanded(original) => nameRefToString(original) + "/EXPANDED" + case ModuleClass(original) => nameRefToString(original) + "/MODULECLASS" + case SuperAccessor(accessed) => nameRefToString(accessed) + "/SUPERACCESSOR" + case DefaultGetter(meth, num) => nameRefToString(meth) + "/DEFAULTGETTER" + num + case Shadowed(original) => nameRefToString(original) + "/SHADOWED" + } + + def nameRefToString(ref: NameRef): String = nameToString(tastyName(ref)) + + def printNames() = + for ((name, idx) <- tastyName.contents.zipWithIndex) + println(f"$idx%4d: " + nameToString(name)) + + def printContents(): Unit = { + println("Names:") + printNames() + println("Trees:") + unpickle(new TreeSectionUnpickler) + unpickle(new PositionSectionUnpickler) + } + + class TreeSectionUnpickler extends SectionUnpickler[Unit]("ASTs") { + import PickleFormat._ + def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = { + import reader._ + var indent = 0 + def newLine() = print(f"\n ${index(currentAddr) - index(startAddr)}%5d:" + " " * indent) + def printNat() = print(" " + readNat()) + def printName() = { + val idx = readNat() + print(" ") ;print(idx); print("["); print(nameRefToString(NameRef(idx))); print("]") + } + def printTree(): Unit = { + newLine() + val tag = readByte() + print(" ");print(astTagToString(tag)) + indent += 2 + if (tag >= firstLengthTreeTag) { + val len = readNat() + print(s"($len)") + val end = currentAddr + len + def printTrees() = until(end)(printTree()) + tag match { + case RENAMED => + printName(); printName() + case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | BIND => + printName(); printTrees() + case REFINEDtype => + printTree(); printName(); printTrees() + case RETURN => + printNat(); printTrees() + case METHODtype | POLYtype => + printTree() + until(end) { printName(); printTree() } + case PARAMtype => + printNat(); printNat() + case _ => + printTrees() + } + if (currentAddr != end) { + println(s"incomplete read, current = $currentAddr, end = $end") + goto(end) + } + } + else if (tag >= firstNatASTTreeTag) { + tag match { + case IDENT | SELECT | TERMREF | TYPEREF | SELFDEF => printName() + case _ => printNat() + } + printTree() + } + else if (tag >= firstASTTreeTag) + printTree() + else if (tag >= firstNatTreeTag) + tag match { + case TERMREFpkg | TYPEREFpkg | STRINGconst | IMPORTED => printName() + case _ => printNat() + } + indent -= 2 + } + println(i"start = ${reader.startAddr}, base = $base, current = $currentAddr, end = $endAddr") + println(s"${endAddr.index - startAddr.index} bytes of AST, base = $currentAddr") + while (!isAtEnd) { + printTree() + newLine() + } + } + } + + class PositionSectionUnpickler extends SectionUnpickler[Unit]("Positions") { + def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = { + print(s"${reader.endAddr.index - reader.currentAddr.index}") + val (totalRange, positions) = new PositionUnpickler(reader).unpickle() + println(s" position bytes in $totalRange:") + val sorted = positions.toSeq.sortBy(_._1.index) + for ((addr, pos) <- sorted) println(s"${addr.index}: ${offsetToInt(pos.start)} .. ${pos.end}") + } + } +} \ No newline at end of file diff --git a/src/dotty/tools/dotc/core/pickling/TastyReader.scala b/src/dotty/tools/dotc/core/pickling/TastyReader.scala new file mode 100644 index 000000000000..0385e9adb344 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyReader.scala @@ -0,0 +1,142 @@ +package dotty.tools +package dotc +package core +package pickling + + +import TastyBuffer._ +import TastyName.NameRef +import collection.mutable + +/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format, + * and that supports reading and patching addresses represented as natural numbers. + * + * @param bytes The array containing data + * @param from The position from which to read + * @param end The position one greater than the last byte to be read + * @param base The index referenced by the logical zero address Addr(0) + */ +class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int = 0) { + + def this(bytes: Array[Byte]) = this(bytes, 0, bytes.length) + + private var bp: Int = start + + def addr(idx: Int) = Addr(idx - base) + def index(addr: Addr) = addr.index + base + + /** The address of the first byte to read, respectively byte that was read */ + def startAddr: Addr = addr(start) + + /** The address of the next byte to read */ + def currentAddr: Addr = addr(bp) + + /** the address one greater than the last brte to read */ + def endAddr: Addr = addr(end) + + /** Have all bytes been read? */ + def isAtEnd: Boolean = bp == end + + /** A new reader over the same array with the same address base, but with + * specified start and end positions + */ + def subReader(start: Addr, end: Addr): TastyReader = + new TastyReader(bytes, index(start), index(end), base) + + /** Read a byte of data. */ + def readByte(): Int = { + val result = bytes(bp) & 0xff + bp += 1 + result + } + + /** Returns the next byte of data as a natural number without advancing the read position */ + def nextByte: Int = bytes(bp) & 0xff + + /** Read the next `n` bytes of `data`. */ + def readBytes(n: Int): Array[Byte] = { + val result = new Array[Byte](n) + Array.copy(bytes, bp, result, 0, n) + bp += n + result + } + + /** Read a natural number fitting in an Int in big endian format, base 128. + * All but the last digits have bit 0x80 set. + */ + def readNat(): Int = readLongNat.toInt + + /** Read an integer number in 2's complement big endian format, base 128. + * All but the last digits have bit 0x80 set. + */ + def readInt(): Int = readLongInt.toInt + + /** Read a natural number fitting in a Long in big endian format, base 128. + * All but the last digits have bit 0x80 set. + */ + def readLongNat(): Long = { + var b = 0L + var x = 0L + do { + b = bytes(bp) + x = (x << 7) | (b & 0x7f) + bp += 1 + } while ((b & 0x80) == 0) + x + } + + /** Read a long integer number in 2's complement big endian format, base 128. */ + def readLongInt(): Long = { + var b = bytes(bp) + var x: Long = (b << 1).toByte >> 1 // sign extend with bit 6. + bp += 1 + while ((b & 0x80) == 0) { + b = bytes(bp) + x = (x << 7) | (b & 0x7f) + bp += 1 + } + x + } + + /** Read an uncompressed Long stored in 8 bytes in big endian format */ + def readUncompressedLong(): Long = { + var x = 0 + for (i <- 0 to 7) + x = (x << 8) | (readByte() & 0xff) + x + } + + /** Read a natural number and return as a NameRef */ + def readNameRef() = NameRef(readNat()) + + /** Read a natural number and return as an address */ + def readAddr() = Addr(readNat()) + + /** Read a length number and return the absolute end address implied by it, + * given as
+ . + */ + def readEnd(): Addr = addr(readNat() + bp) + + /** Set read position to the one pointed to by `addr` */ + def goto(addr: Addr): Unit = + bp = index(addr) + + /** Perform `op` until `end` address is reached and collect results in a list. */ + def until[T](end: Addr)(op: => T): List[T] = { + val buf = new mutable.ListBuffer[T] + while (bp < index(end)) buf += op + assert(bp == index(end)) + buf.toList + } + + /** If before given `end` address, the result of `op`, otherwise `default` */ + def ifBefore[T](end: Addr)(op: => T, default: T): T = + if (bp < index(end)) op else default + + /** Perform `op` while cindition `cond` holds and collect results in a list. */ + def collectWhile[T](cond: => Boolean)(op: => T): List[T] = { + val buf = new mutable.ListBuffer[T] + while (cond) buf += op + buf.toList + } +} diff --git a/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala b/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala new file mode 100644 index 000000000000..e5eabaf361df --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala @@ -0,0 +1,95 @@ +package dotty.tools.dotc +package core +package pickling + +import scala.collection.mutable +import PickleFormat._ +import Names.{Name, termName} +import java.util.UUID + +object TastyUnpickler { + class UnpickleException(msg: String) extends Exception(msg) + + abstract class SectionUnpickler[R](val name: String) { + def unpickle(reader: TastyReader, tastyName: TastyName.Table): R + } +} + +import TastyUnpickler._ + +class TastyUnpickler(reader: TastyReader) { + import reader._ + + def this(bytes: Array[Byte]) = this(new TastyReader(bytes)) + + private val sectionReader = new mutable.HashMap[String, TastyReader] + val tastyName = new TastyName.Table + + def check(cond: Boolean, msg: => String) = + if (!cond) throw new UnpickleException(msg) + + def readString(): String = { + val TastyName.Simple(name) = tastyName(readNameRef()) + name.toString + } + + def readName(): TastyName = { + import TastyName._ + val tag = readByte() + val length = readNat() + val start = currentAddr + val end = start + length + val result = tag match { + case UTF8 => + goto(end) + Simple(termName(bytes, start.index, length)) + case QUALIFIED => + Qualified(readNameRef(), readNameRef()) + case SIGNED => + val original = readNameRef() + val result = readNameRef() + val params = until(end)(readNameRef()) + Signed(original, params, result) + case EXPANDED => + Expanded(readNameRef()) + case OBJECTCLASS => + ModuleClass(readNameRef()) + case SUPERACCESSOR => + SuperAccessor(readNameRef()) + case DEFAULTGETTER => + DefaultGetter(readNameRef(), readNat()) + case SHADOWED => + Shadowed(readNameRef()) + } + assert(currentAddr == end, s"bad name $result $start $currentAddr $end") + result + } + + private def readHeader(): UUID = { + for (i <- 0 until header.length) + check(readByte() == header(i), "not a TASTy file") + val major = readNat() + val minor = readNat() + check(major == MajorVersion && minor <= MinorVersion, + s"""TASTy signature has wrong version. + | expected: $MajorVersion.$MinorVersion + | found : $major.$minor""".stripMargin) + new UUID(readUncompressedLong(), readUncompressedLong()) + } + + val uuid = readHeader() + + locally { + until(readEnd()) { tastyName.add(readName()) } + while (!isAtEnd) { + val secName = readString() + val secEnd = readEnd() + sectionReader(secName) = new TastyReader(bytes, currentAddr.index, secEnd.index, currentAddr.index) + goto(secEnd) + } + } + + def unpickle[R](sec: SectionUnpickler[R]): Option[R] = + for (reader <- sectionReader.get(sec.name)) yield + sec.unpickle(reader, tastyName) +} diff --git a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala new file mode 100644 index 000000000000..c1eae5014c9a --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala @@ -0,0 +1,180 @@ +package dotty.tools +package dotc +package core +package pickling + +import util.Util.{bestFit, dble} +import TastyBuffer.{Addr, AddrWidth} +import config.Printers.pickling +import ast.tpd.Tree + +class TreeBuffer extends TastyBuffer(1000000) { + + private final val ItemsOverOffsets = 2 + + private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets) + private var offsets = new Array[Int](initialOffsetSize) + private var isRelative = new Array[Boolean](initialOffsetSize) + private var delta: Array[Int] = _ + private var numOffsets = 0 + + private[pickling] val pickledTrees = new java.util.IdentityHashMap[Tree, Any] // Value type is really Addr, but that's not compatible with null + + def addrOfTree(tree: Tree): Option[Addr] = pickledTrees.get(tree) match { + case null => None + case n => Some(n.asInstanceOf[Addr]) + } + + private def offset(i: Int): Addr = Addr(offsets(i)) + + private def keepOffset(relative: Boolean): Unit = { + if (numOffsets == offsets.length) { + offsets = dble(offsets) + isRelative = dble(isRelative) + } + offsets(numOffsets) = length + isRelative(numOffsets) = relative + numOffsets += 1 + } + + /** Reserve space for a reference, to be adjusted later */ + def reserveRef(relative: Boolean): Addr = { + val addr = currentAddr + keepOffset(relative) + reserveAddr() + addr + } + + /** Write reference right adjusted into freshly reserved field. */ + def writeRef(target: Addr) = { + keepOffset(relative = false) + fillAddr(reserveAddr(), target) + } + + /** Fill previously reserved field with a reference */ + def fillRef(at: Addr, target: Addr, relative: Boolean) = { + val addr = if (relative) target.relativeTo(at) else target + fillAddr(at, addr) + } + + /** The amount by which the bytes at the given address are shifted under compression */ + def deltaAt(at: Addr): Int = { + val idx = bestFit(offsets, numOffsets, at.index - 1) + if (idx < 0) 0 else delta(idx) + } + + /** The address to which `x` is translated under compression */ + def adjusted(x: Addr): Addr = x - deltaAt(x) + + /** Compute all shift-deltas */ + private def computeDeltas() = { + delta = new Array[Int](numOffsets) + var lastDelta = 0 + var i = 0 + while (i < numOffsets) { + val off = offset(i) + val skippedOff = skipZeroes(off) + val skippedCount = skippedOff.index - off.index + assert(skippedCount < AddrWidth, s"unset field at position $off") + lastDelta += skippedCount + delta(i) = lastDelta + i += 1 + } + } + + /** The absolute or relative adjusted address at index `i` of `offsets` array*/ + private def adjustedOffset(i: Int): Addr = { + val at = offset(i) + val original = getAddr(at) + if (isRelative(i)) { + val start = skipNat(at) + val len1 = original + delta(i) - deltaAt(original + start.index) + val len2 = adjusted(original + start.index) - adjusted(start).index + assert(len1 == len2, + s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2") + len1 + } else adjusted(original) + } + + /** Adjust all offsets according to previously computed deltas */ + private def adjustOffsets(): Unit = { + for (i <- 0 until numOffsets) { + val corrected = adjustedOffset(i) + fillAddr(offset(i), corrected) + } + } + + /** Adjust deltas to also take account references that will shrink (and thereby + * generate additional zeroes that can be skipped) due to previously + * computed adjustements. + */ + private def adjustDeltas(): Int = { + val delta1 = new Array[Int](delta.length) + var lastDelta = 0 + var i = 0 + while (i < numOffsets) { + val corrected = adjustedOffset(i) + lastDelta += AddrWidth - TastyBuffer.natSize(corrected.index) + delta1(i) = lastDelta + i += 1 + } + val saved = + if (numOffsets == 0) 0 + else delta1(numOffsets - 1) - delta(numOffsets - 1) + delta = delta1 + saved + } + + /** Compress pickle buffer, shifting bytes to close all skipped zeroes. */ + private def compress(): Int = { + var lastDelta = 0 + var start = 0 + var i = 0 + var wasted = 0 + def shift(end: Int) = + Array.copy(bytes, start, bytes, start - lastDelta, end - start) + while (i < numOffsets) { + val next = offsets(i) + shift(next) + start = next + delta(i) - lastDelta + val pastZeroes = skipZeroes(Addr(next)).index + assert(pastZeroes >= start, s"something's wrong: eliminated non-zero") + wasted += (pastZeroes - start) + lastDelta = delta(i) + i += 1 + } + shift(length) + length -= lastDelta + wasted + } + + def adjustPickledTrees(): Unit = { + val it = pickledTrees.keySet.iterator + while (it.hasNext) { + val tree = it.next + pickledTrees.put(tree, adjusted(pickledTrees.get(tree).asInstanceOf[Addr])) + } + } + + /** Final assembly, involving the following steps: + * - compute deltas + * - adjust deltas until additional savings are < 1% of total + * - adjust offsets according to the adjusted deltas + * - shrink buffer, skipping zeroes. + */ + def compactify(): Unit = { + val origLength = length + computeDeltas() + //println(s"offsets: ${offsets.take(numOffsets).deep}") + //println(s"deltas: ${delta.take(numOffsets).deep}") + var saved = 0 + do { + saved = adjustDeltas() + pickling.println(s"adjusting deltas, saved = $saved") + } while (saved > 0 && length / saved < 100) + adjustOffsets() + adjustPickledTrees() + val wasted = compress() + pickling.println(s"original length: $origLength, compressed to: $length, wasted: $wasted") // DEBUG, for now. + } +} diff --git a/src/dotty/tools/dotc/core/pickling/TreePickler.scala b/src/dotty/tools/dotc/core/pickling/TreePickler.scala new file mode 100644 index 000000000000..f16cb50e6745 --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TreePickler.scala @@ -0,0 +1,538 @@ +package dotty.tools +package dotc +package core +package pickling + +import ast.Trees._ +import PickleFormat._ +import core._ +import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._, StdNames.tpnme, NameOps._ +import collection.mutable +import TastyBuffer._ + +class TreePickler(pickler: TastyPickler) { + val buf = new TreeBuffer + pickler.newSection("ASTs", buf) + import buf._ + import pickler.nameBuffer.{nameIndex, fullNameIndex} + import ast.tpd._ + + private val symRefs = new mutable.HashMap[Symbol, Addr] + private val forwardSymRefs = new mutable.HashMap[Symbol, List[Addr]] + private val pickledTypes = new java.util.IdentityHashMap[Type, Any] // Value type is really Addr, but that's not compatible with null + + private def withLength(op: => Unit) = { + val lengthAddr = reserveRef(relative = true) + op + fillRef(lengthAddr, currentAddr, relative = true) + } + + private var makeSymbolicRefsTo: Symbol = NoSymbol + + /** All references to members of class `sym` are pickled + * as symbolic references. Used to pickle the self info of a class. + * Without this precaution we get an infinite cycle when unpickling pos/extmethods.scala + * The problem arises when a self type of a trait is a type parameter of the same trait. + */ + private def withSymbolicRefsTo[T](sym: Symbol)(op: => T): T = { + val saved = makeSymbolicRefsTo + makeSymbolicRefsTo = sym + try op + finally makeSymbolicRefsTo = saved + } + + def preRegister(tree: Tree)(implicit ctx: Context): Unit = tree match { + case tree: MemberDef => + if (!symRefs.contains(tree.symbol)) symRefs(tree.symbol) = NoAddr + case _ => + } + + def registerDef(sym: Symbol): Unit = { + symRefs(sym) = currentAddr + forwardSymRefs.get(sym) match { + case Some(refs) => + refs.foreach(fillRef(_, currentAddr, relative = false)) + forwardSymRefs -= sym + case None => + } + } + + private def pickleName(name: Name) = writeNat(nameIndex(name).index) + private def pickleName(name: TastyName) = writeNat(nameIndex(name).index) + private def pickleNameAndSig(name: Name, sig: Signature) = { + val Signature(params, result) = sig + pickleName(TastyName.Signed(nameIndex(name), params.map(fullNameIndex), fullNameIndex(result))) + } + + private def pickleSymRef(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match { + case Some(label) => + if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym) + case None => + ctx.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.pos) + pickleForwardSymRef(sym) + } + + private def pickleForwardSymRef(sym: Symbol)(implicit ctx: Context) = { + val ref = reserveRef(relative = false) + assert(!sym.is(Flags.Package), sym) + forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil) + } + + def pickle(trees: List[Tree])(implicit ctx: Context) = { + + def qualifiedName(sym: Symbol): TastyName = + if (sym.isRoot || sym.owner.isRoot) TastyName.Simple(sym.name.toTermName) + else TastyName.Qualified(nameIndex(qualifiedName(sym.owner)), nameIndex(sym.name)) + + def pickleConstant(c: Constant): Unit = c.tag match { + case UnitTag => + writeByte(UNITconst) + case BooleanTag => + writeByte(if (c.booleanValue) TRUEconst else FALSEconst) + case ByteTag => + writeByte(BYTEconst) + writeInt(c.byteValue) + case ShortTag => + writeByte(SHORTconst) + writeInt(c.shortValue) + case CharTag => + writeByte(CHARconst) + writeNat(c.charValue) + case IntTag => + writeByte(INTconst) + writeInt(c.intValue) + case LongTag => + writeByte(LONGconst) + writeLongInt(c.longValue) + case FloatTag => + writeByte(FLOATconst) + writeInt(java.lang.Float.floatToRawIntBits(c.floatValue)) + case DoubleTag => + writeByte(DOUBLEconst) + writeLongInt(java.lang.Double.doubleToRawLongBits(c.doubleValue)) + case StringTag => + writeByte(STRINGconst) + writeNat(nameIndex(c.stringValue).index) + case NullTag => + writeByte(NULLconst) + case ClazzTag => + writeByte(CLASSconst) + pickleType(c.typeValue) + case EnumTag => + writeByte(ENUMconst) + pickleType(c.symbolValue.termRef) + } + + def pickleType(tpe0: Type, richTypes: Boolean = false): Unit = try { + val tpe = tpe0.stripTypeVar + val prev = pickledTypes.get(tpe) + if (prev == null) { + pickledTypes.put(tpe, currentAddr) + pickleNewType(tpe, richTypes) + } + else { + writeByte(SHARED) + writeRef(prev.asInstanceOf[Addr]) + } + } catch { + case ex: AssertionError => + println(i"error when pickling type $tpe0") + throw ex + } + + def pickleNewType(tpe: Type, richTypes: Boolean): Unit = try { tpe match { + case ConstantType(value) => + pickleConstant(value) + case tpe: TypeRef if tpe.info.isAlias && tpe.symbol.is(Flags.AliasPreferred) => + pickleType(tpe.info.bounds.hi) + case tpe: WithFixedSym => + val sym = tpe.symbol + if (sym.is(Flags.Package)) { + writeByte(if (tpe.isType) TYPEREFpkg else TERMREFpkg) + pickleName(qualifiedName(sym)) + } + else if (tpe.prefix == NoPrefix) { + def pickleRef() = { + writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect) + pickleSymRef(sym) + } + if (sym is Flags.BindDefinedType) { + registerDef(sym) + writeByte(BIND) + withLength { + pickleName(sym.name) + pickleType(sym.info) + pickleRef() + } + } + else pickleRef() + } + else { + writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol) + pickleSymRef(sym); pickleType(tpe.prefix) + } + case tpe: TermRefWithSignature => + writeByte(TERMREF) + pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix) + case tpe: NamedType => + if (tpe.name == tpnme.Apply && tpe.prefix.argInfos.nonEmpty && tpe.prefix.isInstantiatedLambda) + // instantiated lambdas are pickled as APPLIEDTYPE; #Apply will + // be reconstituted when unpickling. + pickleType(tpe.prefix) + else tpe.prefix match { + case prefix: ThisType if prefix.cls == makeSymbolicRefsTo => + pickleType(NamedType.withFixedSym(tpe.prefix, tpe.symbol)) + case _ => + writeByte(if (tpe.isType) TYPEREF else TERMREF) + pickleName(tpe.name); pickleType(tpe.prefix) + } + case tpe: ThisType => + writeByte(THIS) + pickleType(tpe.tref) + case tpe: SuperType => + writeByte(SUPERtype) + withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)} + case tpe: SkolemType => + writeByte(SKOLEMtype) + writeRef(pickledTypes.get(tpe.binder).asInstanceOf[Addr]) + case tpe: RefinedType => + val args = tpe.argInfos(interpolate = false) + if (args.isEmpty) { + writeByte(REFINEDtype) + withLength { + pickleType(tpe.parent) + pickleName(tpe.refinedName) + pickleType(tpe.refinedInfo, richTypes = true) + } + } + else { + writeByte(APPLIEDtype) + withLength { pickleType(tpe.withoutArgs(args)); args.foreach(pickleType(_)) } + } + case tpe: TypeAlias => + writeByte(TYPEALIAS) + withLength { + pickleType(tpe.alias, richTypes) + tpe.variance match { + case 1 => writeByte(COVARIANT) + case -1 => writeByte(CONTRAVARIANT) + case 0 => + } + } + case tpe: TypeBounds => + writeByte(TYPEBOUNDS) + withLength { pickleType(tpe.lo, richTypes); pickleType(tpe.hi, richTypes) } + case tpe: AnnotatedType => + writeByte(ANNOTATED) + withLength { pickleTree(tpe.annot.tree); pickleType(tpe.tpe, richTypes) } + case tpe: AndOrType => + writeByte(if (tpe.isAnd) ANDtype else ORtype) + withLength { pickleType(tpe.tp1, richTypes); pickleType(tpe.tp2, richTypes) } + case tpe: ExprType => + writeByte(BYNAMEtype) + pickleType(tpe.underlying) + case tpe: MethodType if richTypes => + writeByte(METHODtype) + pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramTypes) + case tpe: PolyType if richTypes => + writeByte(POLYtype) + pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramBounds) + case tpe: PolyParam => + if (!pickleParamType(tpe)) + // TODO figure out why this case arises in e.g. pickling AbstractFileReader. + ctx.typerState.constraint.entry(tpe) match { + case TypeBounds(lo, hi) if lo eq hi => pickleNewType(lo, richTypes) + case _ => assert(false, s"orphan poly parameter: $tpe") + } + case tpe: MethodParam => + assert(pickleParamType(tpe), s"orphan method parameter: $tpe") + case tpe: LazyRef => + pickleType(tpe.ref) + }} catch { + case ex: AssertionError => + println(i"error while pickling type $tpe") + throw ex + } + + def pickleMethodic(result: Type, names: List[Name], types: List[Type]) = + withLength { + pickleType(result, richTypes = true) + (names, types).zipped.foreach { (name, tpe) => + pickleName(name); pickleType(tpe) + } + } + + def pickleParamType(tpe: ParamType): Boolean = { + val binder = pickledTypes.get(tpe.binder) + val pickled = binder != null + if (pickled) { + writeByte(PARAMtype) + withLength { writeRef(binder.asInstanceOf[Addr]); writeNat(tpe.paramNum) } + } + pickled + } + + def pickleTpt(tpt: Tree): Unit = pickleType(tpt.tpe) // TODO correlate with original when generating positions + + def pickleTreeUnlessEmpty(tree: Tree): Unit = + if (!tree.isEmpty) pickleTree(tree) + + def pickleTree(tree: Tree): Unit = try { + pickledTrees.put(tree, currentAddr) + tree match { + case Ident(name) => + tree.tpe match { + case tp: TermRef => pickleType(tp) + case _ => + writeByte(IDENT) + pickleName(name) + pickleType(tree.tpe) + } + case This(_) => + pickleType(tree.tpe) + case Select(qual, name) => + writeByte(SELECT) + val realName = tree.tpe match { + case tp: NamedType if tp.name.isShadowedName => tp.name + case _ => name + } + val sig = tree.tpe.signature + if (sig == Signature.NotAMethod) pickleName(realName) + else pickleNameAndSig(realName, sig) + pickleTree(qual) + case Apply(fun, args) => + writeByte(APPLY) + withLength { + pickleTree(fun) + args.foreach(pickleTree) + } + case TypeApply(fun, args) => + writeByte(TYPEAPPLY) + withLength { + pickleTree(fun) + args.foreach(pickleTpt) + } + case Literal(const1) => + pickleConstant { + tree.tpe match { + case ConstantType(const2) => const2 + case _ => const1 + } + } + case Super(qual, mix) => + writeByte(SUPER) + withLength { + pickleTree(qual); + if (!mix.isEmpty) { + val SuperType(_, mixinType) = tree.tpe + pickleType(mixinType) + } + } + case New(tpt) => + writeByte(NEW) + pickleTpt(tpt) + case Pair(left, right) => + writeByte(PAIR) + withLength { pickleTree(left); pickleTree(right) } + case Typed(expr, tpt) => + writeByte(TYPED) + withLength { pickleTree(expr); pickleTpt(tpt) } + case NamedArg(name, arg) => + writeByte(NAMEDARG) + withLength { pickleName(name); pickleTree(arg) } + case Assign(lhs, rhs) => + writeByte(ASSIGN) + withLength { pickleTree(lhs); pickleTree(rhs) } + case Block(stats, expr) => + writeByte(BLOCK) + stats.foreach(preRegister) + withLength { pickleTree(expr); stats.foreach(pickleTree) } + case If(cond, thenp, elsep) => + writeByte(IF) + withLength{ pickleTree(cond); pickleTree(thenp); pickleTree(elsep) } + case Closure(env, meth, tpt) => + writeByte(LAMBDA) + assert(env.isEmpty) + withLength{ + pickleTree(meth) + if (tpt.tpe.exists) pickleTpt(tpt) + } + case Match(selector, cases) => + writeByte(MATCH) + withLength { pickleTree(selector); cases.foreach(pickleTree) } + case CaseDef(pat, guard, rhs) => + writeByte(CASEDEF) + withLength { pickleTree(pat); pickleTree(rhs); pickleTreeUnlessEmpty(guard) } + case Return(expr, from) => + writeByte(RETURN) + withLength { pickleSymRef(from.symbol); pickleTreeUnlessEmpty(expr) } + case Try(block, cases, finalizer) => + writeByte(TRY) + withLength { pickleTree(block); cases.foreach(pickleTree); pickleTreeUnlessEmpty(finalizer) } + case SeqLiteral(elems) => + writeByte(REPEATED) + withLength { elems.foreach(pickleTree) } + case TypeTree(original) => + pickleTpt(tree) + case Bind(name, body) => + registerDef(tree.symbol) + writeByte(BIND) + withLength { pickleName(name); pickleType(tree.symbol.info); pickleTree(body) } + case Alternative(alts) => + writeByte(ALTERNATIVE) + withLength { alts.foreach(pickleTree) } + case UnApply(fun, implicits, patterns) => + writeByte(UNAPPLY) + withLength { + pickleTree(fun) + for (implicitArg <- implicits) { + writeByte(IMPLICITarg) + pickleTree(implicitArg) + } + pickleType(tree.tpe) + patterns.foreach(pickleTree) + } + case tree: ValDef => + pickleDef(VALDEF, tree.symbol, tree.tpt, tree.rhs) + case tree: DefDef => + def pickleAllParams = { + pickleParams(tree.tparams) + for (vparams <- tree.vparamss) { + writeByte(PARAMS) + withLength { pickleParams(vparams) } + } + } + pickleDef(DEFDEF, tree.symbol, tree.tpt, tree.rhs, pickleAllParams) + case tree: TypeDef => + pickleDef(TYPEDEF, tree.symbol, tree.rhs) + case tree: Template => + registerDef(tree.symbol) + writeByte(TEMPLATE) + val (params, rest) = tree.body partition { + case stat: TypeDef => stat.symbol is Flags.Param + case stat: ValOrDefDef => + stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter + case _ => false + } + withLength { + pickleParams(params) + tree.parents.foreach(pickleTree) + val cinfo @ ClassInfo(_, _, _, _, selfInfo) = tree.symbol.owner.info + if ((selfInfo ne NoType) || !tree.self.isEmpty) { + writeByte(SELFDEF) + pickleName(tree.self.name) + withSymbolicRefsTo(tree.symbol.owner) { + pickleType { + cinfo.selfInfo match { + case sym: Symbol => sym.info + case tp: Type => tp + } + } + } + } + pickleStats(tree.constr :: rest) + } + case Import(expr, selectors) => + writeByte(IMPORT) + withLength { + pickleTree(expr) + selectors foreach { + case Pair(Ident(from), Ident(to)) => + writeByte(RENAMED) + withLength { pickleName(from); pickleName(to) } + case Ident(name) => + writeByte(IMPORTED) + pickleName(name) + } + } + case PackageDef(pid, stats) => + writeByte(PACKAGE) + withLength { pickleType(pid.tpe); pickleStats(stats) } + }} + catch { + case ex: AssertionError => + println(i"error when pickling tree $tree") + throw ex + } + + def pickleDef(tag: Int, sym: Symbol, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ()) = { + assert(symRefs(sym) == NoAddr) + registerDef(sym) + writeByte(tag) + withLength { + pickleName(sym.name) + pickleParams + tpt match { + case tpt: TypeTree => pickleTpt(tpt) + case _ => pickleTree(tpt) + } + pickleTreeUnlessEmpty(rhs) + pickleModifiers(sym) + } + } + + def pickleParam(tree: Tree): Unit = tree match { + case tree: ValDef => pickleDef(PARAM, tree.symbol, tree.tpt) + case tree: DefDef => pickleDef(PARAM, tree.symbol, tree.tpt, tree.rhs) + case tree: TypeDef => pickleDef(TYPEPARAM, tree.symbol, tree.rhs) + } + + def pickleParams(trees: List[Tree]): Unit = { + trees.foreach(preRegister) + trees.foreach(pickleParam) + } + + def pickleStats(stats: List[Tree]) = { + stats.foreach(preRegister) + stats.foreach(stat => if (!stat.isEmpty) pickleTree(stat)) + } + + def pickleModifiers(sym: Symbol): Unit = { + import Flags._ + val flags = sym.flags + val privateWithin = sym.privateWithin + if (privateWithin.exists) { + writeByte(if (flags is Protected) PROTECTEDqualified else PRIVATEqualified) + pickleType(privateWithin.typeRef) + } + if (flags is Private) writeByte(PRIVATE) + if (flags is Protected) if (!privateWithin.exists) writeByte(PROTECTED) + if ((flags is Final) && !(sym is Module)) writeByte(FINAL) + if (flags is Case) writeByte(CASE) + if (flags is Override) writeByte(OVERRIDE) + if (flags is Inline) writeByte(INLINE) + if (flags is JavaStatic) writeByte(STATIC) + if (flags is Module) writeByte(OBJECT) + if (flags is Local) writeByte(LOCAL) + if (flags is Synthetic) writeByte(SYNTHETIC) + if (flags is Artifact) writeByte(ARTIFACT) + if (flags is Scala2x) writeByte(SCALA2X) + if (flags is InSuperCall) writeByte(INSUPERCALL) + if (sym.isTerm) { + if (flags is Implicit) writeByte(IMPLICIT) + if ((flags is Lazy) && !(sym is Module)) writeByte(LAZY) + if (flags is AbsOverride) writeByte(ABSOVERRIDE) + if (flags is Mutable) writeByte(MUTABLE) + if (flags is Accessor) writeByte(FIELDaccessor) + if (flags is CaseAccessor) writeByte(CASEaccessor) + if (flags is DefaultParameterized) writeByte(DEFAULTparameterized) + } else { + if (flags is Sealed) writeByte(SEALED) + if (flags is Abstract) writeByte(ABSTRACT) + if (flags is Trait) writeByte(TRAIT) + if (flags is Covariant) writeByte(COVARIANT) + if (flags is Contravariant) writeByte(CONTRAVARIANT) + } + sym.annotations.foreach(pickleAnnotation) + } + + def pickleAnnotation(ann: Annotation) = { + writeByte(ANNOTATION) + withLength { pickleType(ann.symbol.typeRef); pickleTree(ann.tree) } + } + + trees.foreach(tree => if (!tree.isEmpty) pickleTree(tree)) + assert(forwardSymRefs.isEmpty, i"unresolved symbols: ${forwardSymRefs.keySet.toList}%, %") + compactify() + } +} diff --git a/src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala b/src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala new file mode 100644 index 000000000000..7d80065cf2ec --- /dev/null +++ b/src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala @@ -0,0 +1,862 @@ +package dotty.tools +package dotc +package core +package pickling + +import Contexts._, Symbols._, Types._, Scopes._, SymDenotations._, Names._, NameOps._ +import StdNames._, Denotations._, Flags._, Constants._, Annotations._ +import util.Positions._ +import dotty.tools.dotc.ast.{tpd, Trees, untpd} +import Trees._ +import Decorators._ +import TastyUnpickler._, TastyBuffer._ +import annotation.switch +import scala.collection.{ mutable, immutable } +import typer.Mode +import config.Printers.pickling +import PositionPickler._ + +/** Unpickler for typed trees + * @param reader the reader from which to unpickle + * @param tastyName the nametable + */ +class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { + import dotty.tools.dotc.core.pickling.PickleFormat._ + import TastyName._ + import tpd._ + + private var readPositions = false + private var totalRange = NoPosition + private var positions: collection.Map[Addr, Position] = _ + + /** Make a subsequent call to `unpickle` return trees with positions + * @param totalRange the range position enclosing all returned trees, + * or NoPosition if positions should not be unpickled + * @param positions a map from tree addresses to their positions relative + * to positions of parent nodes. + */ + def usePositions(totalRange: Position, positions: collection.Map[Addr, Position]): Unit = { + readPositions = true + this.totalRange = totalRange + this.positions = positions + } + + private val symAtAddr = new mutable.HashMap[Addr, Symbol] + private val treeAtAddr = new mutable.HashMap[Addr, Tree] + private val typeAtAddr = new mutable.HashMap[Addr, Type] // currently populated only for types that are known to be SHAREd. + private var stubs: Set[Symbol] = Set() + + private var roots: Set[SymDenotation] = null + + /** Enter all toplevel classes and objects into their scopes + * @param roots a set of SymDenotations that should be overwritten by unpickling + */ + def enterTopLevel(roots: Set[SymDenotation])(implicit ctx: Context): Unit = { + this.roots = roots + new TreeReader(reader).fork.indexStats(reader.endAddr) + } + + /** The unpickled trees */ + def unpickle()(implicit ctx: Context): List[Tree] = { + assert(roots != null, "unpickle without previous enterTopLevel") + val stats = new TreeReader(reader) + .readIndexedStats(NoSymbol, reader.endAddr)(ctx.addMode(Mode.AllowDependentFunctions)) + normalizePos(stats, totalRange) + stats + } + + def toTermName(tname: TastyName): TermName = tname match { + case Simple(name) => name + case Qualified(qual, name) => toTermName(qual) ++ "." ++ toTermName(name) + case Signed(original, params, result) => toTermName(original) + case Shadowed(original) => toTermName(original).shadowedName + case Expanded(original) => ??? + case ModuleClass(original) => toTermName(original).moduleClassName.toTermName + case SuperAccessor(accessed) => ??? + case DefaultGetter(meth, num) => ??? + } + + def toTermName(ref: NameRef): TermName = toTermName(tastyName(ref)) + def toTypeName(ref: NameRef): TypeName = toTermName(ref).toTypeName + + class Completer(reader: TastyReader) extends LazyType { + import reader._ + def complete(denot: SymDenotation)(implicit ctx: Context): Unit = { + treeAtAddr(currentAddr) = new TreeReader(reader).readIndexedDef() + } + } + + class TreeReader(val reader: TastyReader) { + import reader._ + + def forkAt(start: Addr) = new TreeReader(subReader(start, endAddr)) + def fork = forkAt(currentAddr) + + def skipTree(tag: Int): Unit = + if (tag >= firstLengthTreeTag) goto(readEnd()) + else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() } + else if (tag >= firstASTTreeTag) skipTree() + else if (tag >= firstNatTreeTag) readNat() + def skipTree(): Unit = skipTree(readByte()) + + def skipParams(): Unit = + while (nextByte == PARAMS || nextByte == TYPEPARAM) skipTree() + + def readName(): TermName = toTermName(readNameRef()) + + def readNameSplitSig()(implicit ctx: Context): Any /* TermName | (TermName, Signature) */ = + tastyName(readNameRef()) match { + case Signed(original, params, result) => + var sig = Signature(params map toTypeName, toTypeName(result)) + if (sig == Signature.NotAMethod) sig = Signature.NotAMethod + (toTermName(original), sig) + case name => + toTermName(name) + } + +// ------ Reading types ----------------------------------------------------- + + /** Read names in an interleaved sequence of (parameter) names and types/bounds */ + def readParamNames[N <: Name](end: Addr): List[N] = + until(end) { + val name = readName().asInstanceOf[N] + skipTree() + name + } + + /** Read types or bounds in an interleaved sequence of (parameter) names and types/bounds */ + def readParamTypes[T <: Type](end: Addr)(implicit ctx: Context): List[T] = + until(end) { readNat(); readType().asInstanceOf[T] } + + /** Read referece to definition and return symbol created at that definition */ + def readSymRef()(implicit ctx: Context): Symbol = { + val start = currentAddr + val addr = readAddr() + symAtAddr get addr match { + case Some(sym) => sym + case None => + // Create a stub; owner might be wrong but will be overwritten later. + forkAt(addr).createSymbol() + val sym = symAtAddr(addr) + ctx.log(i"forward reference to $sym") + stubs += sym + sym + } + } + + /** Read a type */ + def readType()(implicit ctx: Context): Type = { + val start = currentAddr + val tag = readByte() + pickling.println(s"reading type ${astTagToString(tag)} at $start") + + def registeringType[T](tp: Type, op: => T): T = { + typeAtAddr(start) = tp + op + } + + def readLengthType(): Type = { + val end = readEnd() + + def readNamesSkipParams[N <: Name]: (List[N], TreeReader) = { + val nameReader = fork + nameReader.skipTree() // skip result + val paramReader = nameReader.fork + (nameReader.readParamNames[N](end), paramReader) + } + + val result = + (tag: @switch) match { + case SUPERtype => + SuperType(readType(), readType()) + case REFINEDtype => + val parent = readType() + var name: Name = readName() + if (nextByte == SHARED) { + val refinedInfo = readType() + if (refinedInfo.isInstanceOf[TypeBounds]) name = name.toTypeName + RefinedType(parent, name, refinedInfo) + } + else { + if (nextByte == TYPEBOUNDS || nextByte == TYPEALIAS) name = name.toTypeName + RefinedType(parent, name, rt => registeringType(rt, readType())) + // Note that the lambda is not equivalent to a wildcard closure! + // Eta expansion of the latter puts readType() out of the expression. + } + case APPLIEDtype => + readType().appliedTo(until(end)(readType())) + case TYPEBOUNDS => + TypeBounds(readType(), readType()) + case TYPEALIAS => + val alias = readType() + val variance = + if (nextByte == COVARIANT) { readByte(); 1 } + else if (nextByte == CONTRAVARIANT) { readByte(); -1 } + else 0 + TypeAlias(alias, variance) + case ANNOTATED => + AnnotatedType(Annotation(readTerm()), readType()) + case ANDtype => + AndType(readType(), readType()) + case ORtype => + OrType(readType(), readType()) + case BIND => + val sym = ctx.newSymbol(ctx.owner, readName().toTypeName, BindDefinedType, readType()) + symAtAddr(start) = sym + TypeRef.withFixedSym(NoPrefix, sym.name, sym) + case POLYtype => + val (names, paramReader) = readNamesSkipParams[TypeName] + val result = PolyType(names)( + pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)), + pt => readType()) + goto(end) + result + case METHODtype => + val (names, paramReader) = readNamesSkipParams[TermName] + val result = MethodType(names, paramReader.readParamTypes[Type](end))( + mt => registeringType(mt, readType())) + goto(end) + result + case PARAMtype => + readTypeRef() match { + case binder: PolyType => PolyParam(binder, readNat()) + case binder: MethodType => MethodParam(binder, readNat()) + } + case CLASSconst => + ConstantType(Constant(readType())) + case ENUMconst => + ConstantType(Constant(readTermRef().termSymbol)) + } + assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}") + result + } + + def readSimpleType(): Type = (tag: @switch) match { + case TYPEREFdirect | TERMREFdirect => + NamedType.withFixedSym(NoPrefix, readSymRef()) + case TYPEREFsymbol | TERMREFsymbol => + val sym = readSymRef() + val prefix = readType() + val res = NamedType.withFixedSym(prefix, sym) + if (prefix.isInstanceOf[ThisType]) res.withDenot(sym.denot) else res + // without this precaution we get an infinite cycle when unpickling pos/extmethods.scala + // the problem arises when a self type of a trait is a type parameter of the same trait. + case TYPEREFpkg => + val name = readName() + val pkg = + if (name == nme.ROOT) defn.RootClass + else if (name == nme.EMPTY_PACKAGE) defn.EmptyPackageClass + else ctx.requiredPackage(name).moduleClass + pkg.typeRef + case TERMREFpkg => + val name = readName() + val pkg = + if (name == nme.ROOT) defn.RootPackage + else if (name == nme.EMPTY_PACKAGE) defn.EmptyPackageVal + else ctx.requiredPackage(name) + pkg.termRef + case TYPEREF => + val name = readName().toTypeName + TypeRef(readType(), name) + case TERMREF => + readNameSplitSig() match { + case name: TermName => TermRef.all(readType(), name) + case (name: TermName, sig: Signature) => TermRef.withSig(readType(), name, sig) + } + case THIS => + ThisType.raw(readType().asInstanceOf[TypeRef]) + case SKOLEMtype => + SkolemType(readTypeRef()) + case SHARED => + val ref = readAddr() + typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType()) + case UNITconst => + ConstantType(Constant(())) + case TRUEconst => + ConstantType(Constant(true)) + case FALSEconst => + ConstantType(Constant(false)) + case BYTEconst => + ConstantType(Constant(readInt().toByte)) + case SHORTconst => + ConstantType(Constant(readInt().toShort)) + case CHARconst => + ConstantType(Constant(readNat().toChar)) + case INTconst => + ConstantType(Constant(readInt())) + case LONGconst => + ConstantType(Constant(readLongInt())) + case FLOATconst => + ConstantType(Constant(java.lang.Float.intBitsToFloat(readInt()))) + case DOUBLEconst => + ConstantType(Constant(java.lang.Double.longBitsToDouble(readLongInt()))) + case STRINGconst => + ConstantType(Constant(readName().toString)) + case NULLconst => + ConstantType(Constant(null)) + case BYNAMEtype => + ExprType(readType()) + } + + if (tag < firstLengthTreeTag) readSimpleType() else readLengthType() + } + + def readTypeRef(): Type = + typeAtAddr(readAddr()) + + def readPath()(implicit ctx: Context): Type = { + val tp = readType() + assert(tp.isInstanceOf[SingletonType]) + tp + } + + def readTermRef()(implicit ctx: Context): TermRef = + readType().asInstanceOf[TermRef] + +// ------ Reading definitions ----------------------------------------------------- + + private def noRhs(end: Addr): Boolean = + currentAddr == end || isModifierTag(nextByte) + + private def localContext(owner: Symbol)(implicit ctx: Context) = { + val lctx = ctx.fresh.setOwner(owner) + if (owner.isClass) lctx.setScope(owner.unforcedDecls) else lctx.setNewScope + } + + private def normalizeFlags(tag: Int, givenFlags: FlagSet, name: Name, isAbstractType: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): FlagSet = { + val lacksDefinition = + rhsIsEmpty && !name.isConstructorName && !givenFlags.is(ParamOrAccessor) || + isAbstractType + var flags = givenFlags + if (lacksDefinition) flags |= Deferred + if (tag == DEFDEF) flags |= Method + if (givenFlags is Module) + flags = flags | (if (tag == VALDEF) ModuleCreationFlags else ModuleClassCreationFlags) + if (ctx.mode.is(Mode.InSuperCall) && !flags.is(ParamOrAccessor)) flags |= InSuperCall + if (ctx.owner.isClass) { + if (tag == TYPEPARAM) flags |= Param | ExpandedName // TODO check name to determine ExpandedName + else if (tag == PARAM) flags |= ParamAccessor + } + else if (isParamTag(tag)) flags |= Param + flags + } + + /** Create symbol of definition node and enter in symAtAddr map + * @return true iff the definition does not contain initialization code + */ + def createSymbol()(implicit ctx: Context): Boolean = { + val start = currentAddr + val tag = readByte() + val end = readEnd() + val name = if (tag == TYPEDEF || tag == TYPEPARAM) readName().toTypeName else readName() + skipParams() + val isAbstractType = nextByte == TYPEBOUNDS + val isClass = nextByte == TEMPLATE + val templateStart = currentAddr + skipTree() // tpt + val rhsIsEmpty = noRhs(end) + if (!rhsIsEmpty) skipTree() + val (givenFlags, annots, privateWithin) = readModifiers(end) + pickling.println(i"creating symbol $name at $start with flags $givenFlags") + val flags = normalizeFlags(tag, givenFlags, name, isAbstractType, rhsIsEmpty) + val nameMatches = (_: Denotation).symbol.name == name + val prevDenot: SymDenotation = + if (ctx.owner.is(Package)) ctx.effectiveScope.lookup(name) + else NoDenotation // TODO check for double reads + var completer: LazyType = + if (prevDenot.exists) new Completer(subReader(start, end)) with SymbolLoaders.SecondCompleter + else new Completer(subReader(start, end)) + if (flags is Module) completer = ctx.adjustModuleCompleter(completer, name) + val sym = + if (roots contains prevDenot) { + pickling.println(i"overwriting ${prevDenot.symbol} # ${prevDenot.hashCode}") + prevDenot.info = completer + prevDenot.flags = flags &~ Touched // allow one more completion + prevDenot.privateWithin = privateWithin + prevDenot.symbol + } else if (isClass) + ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer, privateWithin, coord = start.index) + else { + val sym = symAtAddr.get(start) match { + case Some(preExisting) => + assert(stubs contains preExisting) + stubs -= preExisting + preExisting + case none => + ctx.newNakedSymbol(start.index) + } + val denot = ctx.SymDenotation(symbol = sym, owner = ctx.owner, name, flags, completer, privateWithin) + sym.denot = denot + sym + } // TODO set position + sym.annotations = annots + ctx.enter(sym) + symAtAddr(start) = sym + if (isClass) { + completer.withDecls(newScope) + forkAt(templateStart).indexTemplateParams()(localContext(sym)) + } + tag != VALDEF || rhsIsEmpty + } + + /** Read modifier list into triplet of flags, annotations and a privateWithin + * boindary symbol. + */ + def readModifiers(end: Addr)(implicit ctx: Context): (FlagSet, List[Annotation], Symbol) = { + var flags: FlagSet = EmptyFlags + var annots = new mutable.ListBuffer[Annotation] + var privateWithin: Symbol = NoSymbol + while (currentAddr.index != end.index) { + def addFlag(flag: FlagSet) = { + flags |= flag + readByte() + } + nextByte match { + case PRIVATE => addFlag(Private) + case INTERNAL => ??? // addFlag(Internal) + case PROTECTED => addFlag(Protected) + case ABSTRACT => addFlag(Abstract) + case FINAL => addFlag(Final) + case SEALED => addFlag(Sealed) + case CASE => addFlag(Case) + case IMPLICIT => addFlag(Implicit) + case LAZY => addFlag(Lazy) + case OVERRIDE => addFlag(Override) + case INLINE => addFlag(Inline) + case ABSOVERRIDE => addFlag(AbsOverride) + case STATIC => addFlag(JavaStatic) + case OBJECT => addFlag(Module) + case TRAIT => addFlag(Trait) + case LOCAL => addFlag(Local) + case SYNTHETIC => addFlag(Synthetic) + case ARTIFACT => addFlag(Artifact) + case MUTABLE => addFlag(Mutable) + case LABEL => addFlag(Label) + case FIELDaccessor => addFlag(Accessor) + case CASEaccessor => addFlag(CaseAccessor) + case COVARIANT => addFlag(Covariant) + case CONTRAVARIANT => addFlag(Contravariant) + case SCALA2X => addFlag(Scala2x) + case DEFAULTparameterized => addFlag(DefaultParameterized) + case INSUPERCALL => addFlag(InSuperCall) + case PRIVATEqualified => + readByte() + privateWithin = readType().typeSymbol + case PROTECTEDqualified => + addFlag(Protected) + privateWithin = readType().typeSymbol + case ANNOTATION => + readByte() + val end = readEnd() + val sym = readType().typeSymbol + val lazyAnnotTree = readLater(end, rdr => ctx => rdr.readTerm()(ctx)) + annots += Annotation.deferred(sym, _ => lazyAnnotTree.complete) + case _ => + assert(false, s"illegal modifier tag at $currentAddr") + } + } + (flags, annots.toList, privateWithin) + } + + /** Create symbols for a definitions in statement sequence between + * current address and `end`. + * @return true iff none of the statements contains initialization code + */ + def indexStats(end: Addr)(implicit ctx: Context): Boolean = { + val noInitss = + until(end) { + nextByte match { + case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM => + createSymbol() + case IMPORT => + skipTree() + true + case PACKAGE => + processPackage { (pid, end) => implicit ctx => indexStats(end) } + case _ => + skipTree() + false + } + } + noInitss.forall(_ == true) + } + + /** Process package with given operation `op`. The operation takes as arguments + * - a `RefTree` representing the `pid` of the package, + * - an end address, + * - a context which has the processd package as owner + */ + def processPackage[T](op: (RefTree, Addr) => Context => T)(implicit ctx: Context): T = { + readByte() + val end = readEnd() + val pid = ref(readTermRef()).asInstanceOf[RefTree] + op(pid, end)(localContext(pid.symbol.moduleClass)) + } + + /** Create symbols the longest consecutive sequence of parameters with given + * `tag starting at current address. + */ + def indexParams(tag: Int)(implicit ctx: Context) = + while (nextByte == tag) createSymbol() + + /** Create symbols for all type and value parameters of template starting + * at current address. + */ + def indexTemplateParams()(implicit ctx: Context) = { + assert(readByte() == TEMPLATE) + readEnd() + indexParams(TYPEPARAM) + indexParams(PARAM) + } + + /** If definition was already read by a completer, return the previously read tree + * or else read definition. + */ + def readIndexedDef()(implicit ctx: Context): Tree = treeAtAddr.remove(currentAddr) match { + case Some(tree) => skipTree(); tree + case none => readNewDef() + } + + private def readNewDef()(implicit ctx: Context): Tree = { + val start = currentAddr + val sym = symAtAddr(start) + val tag = readByte() + val end = readEnd() + + def readParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] = { + fork.indexParams(tag) + readIndexedParams(tag) + } + + def readParamss(implicit ctx: Context): List[List[ValDef]] = { + collectWhile(nextByte == PARAMS) { + readByte() + readEnd() + readParams[ValDef](PARAM) + } + } + + def readRhs(implicit ctx: Context) = + if (noRhs(end)) EmptyTree + else readLater(end, rdr => ctx => rdr.readTerm()(ctx)) + + def localCtx = localContext(sym) + + def DefDef(tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree) = + ta.assignType( + untpd.DefDef( + sym.name.asTermName, tparams, vparamss, tpt, readRhs(localCtx)), + sym) + + def ta = ctx.typeAssigner + + val name = readName() + pickling.println(s"reading def of $name at $start") + val tree: MemberDef = tag match { + case DEFDEF => + val tparams = readParams[TypeDef](TYPEPARAM)(localCtx) + val vparamss = readParamss(localCtx) + val tpt = readTpt() + val typeParams = tparams.map(_.symbol) + val valueParamss = ctx.normalizeIfConstructor( + vparamss.nestedMap(_.symbol), name == nme.CONSTRUCTOR) + val resType = ctx.effectiveResultType(sym, typeParams, tpt.tpe) + sym.info = ctx.methodType(typeParams, valueParamss, resType) + DefDef(tparams, vparamss, tpt) + case VALDEF => + sym.info = readType() + ValDef(sym.asTerm, readRhs(localCtx)) + case TYPEDEF | TYPEPARAM => + if (sym.isClass) { + val cls = sym.asClass + def setClsInfo(parents: List[TypeRef], selfType: Type) = + cls.info = ClassInfo(cls.owner.thisType, cls, parents, cls.unforcedDecls, selfType) + setClsInfo(Nil, NoType) + val impl = readTemplate(localCtx) + setClsInfo( + ctx.normalizeToClassRefs(impl.parents.map(_.tpe), cls, cls.unforcedDecls), + if (impl.self.isEmpty) NoType else impl.self.tpt.tpe) + ta.assignType(untpd.TypeDef(sym.name.asTypeName, impl), sym) + } + else { + sym.info = readType() + TypeDef(sym.asType) + } + case PARAM => + val info = readType() + if (noRhs(end)) { + sym.info = info + ValDef(sym.asTerm) + } + else { + sym.setFlag(Method) + sym.info = ExprType(info) + pickling.println(i"reading param alias $name -> $currentAddr") + DefDef(Nil, Nil, TypeTree(info)) + } + } + val mods = + if (sym.annotations.isEmpty) EmptyModifiers + else Modifiers(annotations = sym.annotations.map(_.tree)) + tree.withMods(mods) // record annotations in tree so that tree positions can be filled in. + goto(end) + setPos(start, tree) + } + + private def readTemplate(implicit ctx: Context): Template = { + val start = currentAddr + val cls = ctx.owner.asClass + val localDummy = ctx.newLocalDummy(cls) + assert(readByte() == TEMPLATE) + val end = readEnd() + val tparams = readIndexedParams[TypeDef](TYPEPARAM) + val vparams = readIndexedParams[ValDef](PARAM) + val parents = collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) { + nextByte match { + case APPLY | TYPEAPPLY => readTerm() + case _ => readTpt() + } + } + val self = + if (nextByte == SELFDEF) { + readByte() + untpd.ValDef(readName(), readTpt(), EmptyTree).withType(NoType) + } + else EmptyValDef + val noInits = fork.indexStats(end) + if (noInits) cls.setFlag(NoInits) + val constr = readIndexedDef().asInstanceOf[DefDef] + + def mergeTypeParamsAndAliases(tparams: List[TypeDef], stats: List[Tree]): (List[Tree], List[Tree]) = + (tparams, stats) match { + case (tparam :: tparams1, (alias: TypeDef) :: stats1) + if tparam.name == alias.name.expandedName(cls) => + val (tas, stats2) = mergeTypeParamsAndAliases(tparams1, stats1) + (tparam :: alias :: tas, stats2) + case _ => + (tparams, stats) + } + + val lazyStats = readLater(end, rdr => implicit ctx => { + val stats0 = rdr.readIndexedStats(localDummy, end) + val (tparamsAndAliases, stats) = mergeTypeParamsAndAliases(tparams, stats0) + tparamsAndAliases ++ vparams ++ stats + }) + setPos(start, + untpd.Template(constr, parents, self, lazyStats) + .withType(localDummy.nonMemberTermRef)) + } + + def readIndexedStat(exprOwner: Symbol)(implicit ctx: Context): Tree = nextByte match { + case TYPEDEF | VALDEF | DEFDEF | IMPORT => + readIndexedDef() + case IMPORT => + ??? + case PACKAGE => + val start = currentAddr + processPackage { (pid, end) => implicit ctx => + setPos(start, PackageDef(pid, readIndexedStats(exprOwner, end)(ctx))) + } + case _ => + readTerm()(ctx.withOwner(exprOwner)) + } + + def readIndexedStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] = + until(end)(readIndexedStat(exprOwner)) + + def readStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] = { + fork.indexStats(end) + readIndexedStats(exprOwner, end) + } + + def readIndexedParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] = + collectWhile(nextByte == tag) { readIndexedDef().asInstanceOf[T] } + +// ------ Reading terms ----------------------------------------------------- + + def readTerm()(implicit ctx: Context): Tree = { + val start = currentAddr + val tag = readByte() + pickling.println(s"reading term ${astTagToString(tag)} at $start") + + def readPathTerm(): Tree = { + goto(start) + readPath() match { + case path: TermRef => ref(path) + case path: ThisType => This(path.cls) + case path: ConstantType => Literal(path.value) + } + } + + def readSimpleTerm(): Tree = tag match { + case IDENT => + untpd.Ident(readName()).withType(readType()) + case SELECT => + def readQual(name: Name) = { + val localCtx = + if (name == nme.CONSTRUCTOR) ctx.fresh.addMode(Mode.InSuperCall) else ctx + readTerm()(localCtx) + } + def readRest(name: Name, sig: Signature) = { + val unshadowed = if (name.isShadowedName) name.revertShadowed else name + val qual = readQual(name) + untpd.Select(qual, unshadowed) + .withType(TermRef.withSig(qual.tpe.widenIfUnstable, name.asTermName, sig)) + } + readNameSplitSig match { + case name: Name => readRest(name, Signature.NotAMethod) + case (name: Name, sig: Signature) => readRest(name, sig) + } + + case NEW => + New(readTpt()) + case _ => + readPathTerm() + } + + def readLengthTerm(): Tree = { + val end = readEnd() + + val result = + (tag: @switch) match { + case SUPER => + val qual = readTerm() + val mixClass = ifBefore(end)(readType().typeSymbol, NoSymbol) + val mixName = if (mixClass.exists) mixClass.name.asTypeName else tpnme.EMPTY + tpd.Super(qual, mixName, ctx.mode.is(Mode.InSuperCall), mixClass) + case APPLY => + val fn = readTerm() + val isJava = fn.tpe.isInstanceOf[JavaMethodType] + def readArg() = readTerm() match { + case SeqLiteral(elems) if isJava => JavaSeqLiteral(elems) + case arg => arg + } + tpd.Apply(fn, until(end)(readArg())) + case TYPEAPPLY => + tpd.TypeApply(readTerm(), until(end)(readTpt())) + case PAIR => + Pair(readTerm(), readTerm()) + case TYPED => + Typed(readTerm(), readTpt()) + case NAMEDARG => + NamedArg(readName(), readTerm()) + case ASSIGN => + Assign(readTerm(), readTerm()) + case BLOCK => + val exprReader = fork + skipTree() + val localCtx = ctx.fresh.setNewScope + val stats = readStats(ctx.owner, end)(localCtx) + val expr = exprReader.readTerm()(localCtx) + Block(stats, expr) + case IF => + If(readTerm(), readTerm(), readTerm()) + case LAMBDA => + val meth = readTerm() + val tpt = ifBefore(end)(readTpt(), EmptyTree) + Closure(Nil, meth, tpt) + case MATCH => + Match(readTerm(), readCases(end)) + case RETURN => + val from = readSymRef() + val expr = ifBefore(end)(readTerm(), EmptyTree) + Return(expr, Ident(from.termRef)) + case TRY => + Try(readTerm(), readCases(end), ifBefore(end)(readTerm(), EmptyTree)) + case REPEATED => + SeqLiteral(until(end)(readTerm())) + case BIND => + val name = readName() + val info = readType() + val sym = ctx.newSymbol(ctx.owner, name, EmptyFlags, info) + symAtAddr(start) = sym + Bind(sym, readTerm()) + case ALTERNATIVE => + Alternative(until(end)(readTerm())) + case UNAPPLY => + val fn = readTerm() + val implicitArgs = + collectWhile(nextByte == IMPLICITarg) { + readByte() + readTerm() + } + val patType = readType() + val argPats = until(end)(readTerm()) + UnApply(fn, implicitArgs, argPats, patType) + case _ => + readPathTerm() + } + assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}") + result + } + + val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm() + tree.overwriteType(tree.tpe.simplified) + setPos(start, tree) + } + + def readTpt()(implicit ctx: Context) = { + val start = currentAddr + val tp = readType() + if (tp.exists) setPos(start, TypeTree(tp)) else EmptyTree + } + + def readCases(end: Addr)(implicit ctx: Context): List[CaseDef] = + collectWhile(nextByte == CASEDEF && currentAddr != end) { readCase()(ctx.fresh.setNewScope) } + + def readCase()(implicit ctx: Context): CaseDef = { + val start = currentAddr + readByte() + val end = readEnd() + val pat = readTerm() + val rhs = readTerm() + val guard = ifBefore(end)(readTerm(), EmptyTree) + setPos(start, CaseDef(pat, guard, rhs)) + } + + def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context => T): Trees.Lazy[T] = { + val localReader = fork + goto(end) + new LazyReader(localReader, op) + } + +// ------ Hooks for positions ------------------------------------------------ + + /** Record address from which tree was created as a temporary position in the tree. + * The temporary position contains deltas relative to the position of the (as yet unknown) + * parent node. It is marked as a non-synthetic source position. + */ + def setPos[T <: Tree](addr: Addr, tree: T): T = { + if (readPositions) + tree.setPosUnchecked(positions.getOrElse(addr, Position(0, 0, 0))) + tree + } + } + + private def setNormalized(tree: Tree, parentPos: Position): Unit = { + assert(tree.pos.exists) + val absPos = Position(parentPos.start + offsetToInt(tree.pos.start), parentPos.end - tree.pos.end) + tree.setPosUnchecked(absPos) + } + + def normalizePos(x: Any, parentPos: Position)(implicit ctx: Context): Unit = + traverse(x, parentPos, setNormalized) + + class LazyReader[T <: AnyRef](reader: TreeReader, op: TreeReader => Context => T) extends Trees.Lazy[T] with DeferredPosition { + def complete(implicit ctx: Context): T = { + pickling.println(i"starting to read at ${reader.reader.currentAddr}") + val res = op(reader)(ctx.addMode(Mode.AllowDependentFunctions)) + normalizePos(res, parentPos) + res + } + } + + class LazyAnnotationReader(sym: Symbol, reader: TreeReader) + extends LazyAnnotation(sym) with DeferredPosition { + def complete(implicit ctx: Context) = { + val res = reader.readTerm() + normalizePos(res, parentPos) + res + } + } +} diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala index 238c43854c07..2bb6b974b0a6 100644 --- a/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/src/dotty/tools/dotc/parsing/Parsers.scala @@ -61,7 +61,7 @@ object Parsers { atPos(Position(start, end, point))(t) def atPos[T <: Positioned](start: Offset, point: Offset)(t: T): T = - atPos(start, point, in.lastOffset)(t) + atPos(start, point, in.lastOffset max start)(t) def atPos[T <: Positioned](start: Offset)(t: T): T = atPos(start, start)(t) diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index f34135431e6b..0fd862afb590 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -11,7 +11,7 @@ import config.Config.summarizeDepth import scala.annotation.switch class PlainPrinter(_ctx: Context) extends Printer { - protected[this] implicit val ctx: Context = _ctx + protected[this] implicit def ctx: Context = _ctx protected def maxToTextRecursions = 100 @@ -33,6 +33,14 @@ class PlainPrinter(_ctx: Context) extends Printer { ctx.warning("Exceeded recursion depth attempting to print.") (new Throwable).printStackTrace } + + /** If true, tweak output so it is the same before and after pickling */ + protected def homogenizedView: Boolean = ctx.settings.YtestPickler.value + + def homogenize(tp: Type): Type = tp match { + case tp: TypeVar if homogenizedView && tp.isInstantiated => homogenize(tp.instanceOpt) + case _ => tp + } /** Render elements alternating with `sep` string */ protected def toText(elems: Traversable[Showable], sep: String) = @@ -86,20 +94,18 @@ class PlainPrinter(_ctx: Context) extends Printer { */ private def refinementChain(tp: Type): List[Type] = tp :: (tp match { - case RefinedType(parent, _) => refinementChain(parent) + case RefinedType(parent, _) => refinementChain(parent.stripTypeVar) case _ => Nil }) def toText(tp: Type): Text = controlled { - tp match { + homogenize(tp) match { case tp: TypeType => toTextRHS(tp) - case tp: TermRef if !tp.denotationIsCurrent => + case tp: TermRef if !tp.denotationIsCurrent || tp.symbol.is(Module) => toTextRef(tp) ~ ".type" case tp: TermRef if tp.denot.isOverloaded => "" - case tp: TermRef if tp.symbol is Module => - toText(tp.underlying) ~ ".type" case tp: SingletonType => toText(tp.underlying) ~ "(" ~ toTextRef(tp) ~ ")" case tp: TypeRef => @@ -249,8 +255,8 @@ class PlainPrinter(_ctx: Context) extends Printer { else " = " eql ~ toText(lo) } else - (if (lo == defn.NothingType) Text() else " >: " ~ toText(lo)) ~ - (if (hi == defn.AnyType) Text() else " <: " ~ toText(hi)) + (if (lo isRef defn.NothingClass) Text() else " >: " ~ toText(lo)) ~ + (if (hi isRef defn.AnyClass) Text() else " <: " ~ toText(hi)) case tp @ ClassInfo(pre, cls, cparents, decls, selfInfo) => val preText = toTextLocal(pre) val (tparams, otherDecls) = decls.toList partition treatAsTypeParam diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 43dbef890332..d341ce00f5c0 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -5,34 +5,43 @@ import core._ import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Constants._ import Contexts.Context, Scopes.Scope, Denotations._, SymDenotations._, Annotations.Annotation import StdNames.nme -import ast.{Trees, untpd} +import ast.{Trees, untpd, tpd} import typer.Namer import typer.ProtoTypes.{SelectionProto, ViewProto, FunProto, IgnoredProto, dummyTreeOfType} import Trees._ import scala.annotation.switch +import language.implicitConversions class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { - override protected def recursionLimitExceeded() = {} - - protected val PrintableFlags = (SourceModifierFlags | Label | Module | Local).toCommonFlags - - /** The closest enclosing DefDef, TypeDef, or ClassDef node */ - private var currentOwner: untpd.Tree = untpd.EmptyTree - - def atOwner(owner: Tree[_ >: Untyped])(op: => Text): Text = { - val saved = currentOwner - currentOwner = owner - try op - finally { currentOwner = saved } + /** A stack of enclosing DefDef, TypeDef, or ClassDef, or ModuleDefs nodes */ + private var enclosingDef: untpd.Tree = untpd.EmptyTree + + private var myCtx: Context = _ctx + override protected[this] implicit def ctx: Context = myCtx + + def withEnclosingDef(enclDef: Tree[_ >: Untyped])(op: => Text): Text = { + val savedCtx = myCtx + if (enclDef.hasType && enclDef.symbol.exists) + myCtx = ctx.withOwner(enclDef.symbol) + val savedDef = enclosingDef + enclosingDef = enclDef + try op finally { + myCtx = savedCtx + enclosingDef = savedDef + } } - private def ownerIsClass = currentOwner match { + private def enclDefIsClass = enclosingDef match { case owner: TypeDef[_] => owner.isClassDef case owner: untpd.ModuleDef => true case _ => false } + override protected def recursionLimitExceeded() = {} + + protected val PrintableFlags = (SourceModifierFlags | Label | Module | Local).toCommonFlags + override def nameString(name: Name): String = name.decode.toString override protected def simpleNameString(sym: Symbol): String = @@ -75,7 +84,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (!tsym.exists) super.refinementNameString(tp) else { val name = tsym.originalName - nameString(if (tsym is ExpandedTypeParam) name.asTypeName.unexpandedName() else name) + nameString(if (tsym is ExpandedTypeParam) name.asTypeName.unexpandedName else name) } } @@ -95,7 +104,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toTextTuple(args.init) argStr ~ " => " ~ toText(args.last) } - tp match { + homogenize(tp) match { case tp: RefinedType => val args = tp.argInfos(interpolate = false) if (args.nonEmpty) { @@ -109,7 +118,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close } case tp: TypeRef => - val hideType = tp.symbol is TypeParam | TypeArgument | ExpandedName + val hideType = tp.symbol is AliasPreferred if (hideType && !ctx.phase.erasedTypes && !tp.symbol.isCompleting) { tp.info match { case TypeAlias(alias) => return toText(alias) @@ -128,7 +137,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { return "?{ " ~ toText(tp.name) ~ ": " ~ toText(tp.memberProto) ~ " }" case tp: ViewProto => return toText(tp.argType) ~ " ?=>? " ~ toText(tp.resultType) - case FunProto(args, resultType, _) => + case tp @ FunProto(args, resultType, _) => val argsText = args match { case dummyTreeOfType(tp) :: Nil if !(tp isRef defn.NullClass) => "null: " ~ toText(tp) case _ => toTextGlobal(args, ", ") @@ -146,21 +155,30 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { override def toText[T >: Untyped](tree: Tree[T]): Text = controlled { - def optDotPrefix(name: Name) = optText(name)(_ ~ ".") + import untpd.{modsDeco => _, _} + + /** Print modifiers form symbols if tree has type, overriding the untpd behavior. */ + implicit def modsDeco(mdef: untpd.MemberDef)(implicit ctx: Context): untpd.ModsDeco = + tpd.modsDeco(mdef.asInstanceOf[tpd.MemberDef]).asInstanceOf[untpd.ModsDeco] + + def isLocalThis(tree: Tree) = tree.typeOpt match { + case tp: ThisType => tp.cls == ctx.owner.enclosingClass + case _ => false + } + + def optDotPrefix(tree: This) = optText(tree.qual)(_ ~ ".") provided !isLocalThis(tree) def optAscription(tpt: untpd.Tree) = optText(tpt)(": " ~ _) // Dotty deviation: called with an untpd.Tree, so cannot be a untpd.Tree[T] (seems to be a Scala2 problem to allow this) // More deviations marked below as // DD - def tparamsText[T >: Untyped](params: List[Tree[T]]): Text = + def tparamsText[T >: Untyped](params: List[Tree]): Text = "[" ~ toText(params, ", ") ~ "]" provided params.nonEmpty - def addVparamssText(txt: Text, vparamss: List[List[ValDef[T]]]): Text = + def addVparamssText(txt: Text, vparamss: List[List[ValDef]]): Text = (txt /: vparamss)((txt, vparams) => txt ~ "(" ~ toText(vparams, ", ") ~ ")") - - - def caseBlockText(tree: Tree[T]): Text = tree match { + def caseBlockText(tree: Tree): Text = tree match { case Block(stats, expr) => toText(stats :+ expr, "\n") case expr => toText(expr) } @@ -184,19 +202,21 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def useSymbol = tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value - + def modText(mods: untpd.Modifiers, kw: String): Text = { // DD - val suppressKw = if (ownerIsClass) mods is ParamAndLocal else mods is Param - val flagMask = if (suppressKw) PrintableFlags &~ Private else PrintableFlags - val flagsText: Text = - if (useSymbol) toTextFlags(tree.symbol) - else (mods.flags & flagMask).toString + val suppressKw = if (enclDefIsClass) mods is ParamAndLocal else mods is Param + var flagMask = + if (ctx.settings.debugFlags.value) AllFlags + else if (suppressKw) PrintableFlags &~ Private + else PrintableFlags + if (homogenizedView && mods.flags.isTypeFlags) flagMask &~= Implicit // drop implicit from classes + val flagsText = (mods.flags & flagMask).toString Text(mods.annotations.map(annotText), " ") ~~ flagsText ~~ (kw provided !suppressKw) } - def argText(arg: Tree[T]): Text = arg match { - case arg: TypeBoundsTree[_] => "_" ~ toTextGlobal(arg) - case arg: TypeTree[_] => + def argText(arg: Tree): Text = arg match { + case arg: TypeBoundsTree => "_" ~ toTextGlobal(arg) + case arg: TypeTree => arg.typeOpt match { case tp: TypeBounds => "_" ~ toTextGlobal(arg) case _ => toTextGlobal(arg) @@ -217,10 +237,32 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def nameIdText(tree: untpd.NameTree): Text = toText(tree.name) ~ idText(tree) - import untpd._ + def toTextTemplate(impl: Template, ofNew: Boolean = false): Text = { + val Template(constr @ DefDef(_, tparams, vparamss, _, _), parents, self, _) = impl + val tparamsTxt = withEnclosingDef(constr) { tparamsText(tparams) } + val primaryConstrs = if (constr.rhs.isEmpty) Nil else constr :: Nil + val prefix: Text = + if (vparamss.isEmpty || primaryConstrs.nonEmpty) tparamsTxt + else { + var modsText = modText(constr.mods, "") + if (constr.mods.hasAnnotations && !constr.mods.hasFlags) modsText = modsText ~~ " this" + withEnclosingDef(constr) { addVparamssText(tparamsTxt ~~ modsText, vparamss) } + } + val parentsText = Text(parents map constrText, " with ") + val selfText = { + val selfName = if (self.name == nme.WILDCARD) "this" else self.name.toString + (selfName ~ optText(self.tpt)(": " ~ _) ~ " =>").close + } provided !self.isEmpty + val bodyText = "{" ~~ selfText ~~ toTextGlobal(primaryConstrs ::: impl.body, "\n") ~ "}" + prefix ~ (" extends" provided !ofNew) ~~ parentsText ~~ bodyText + } + + def toTextPackageId(pid: Tree): Text = + if (homogenizedView) toTextLocal(pid.tpe) + else toTextLocal(pid) var txt: Text = tree match { - case id: Trees.BackquotedIdent[_] => + case id: Trees.BackquotedIdent[_] if !homogenizedView => "`" ~ toText(id.name) ~ "`" case Ident(name) => tree.typeOpt match { @@ -231,10 +273,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } case tree @ Select(qual, name) => toTextLocal(qual) ~ ("." ~ nameIdText(tree) provided name != nme.CONSTRUCTOR) - case This(name) => - optDotPrefix(name) ~ "this" ~ idText(tree) - case Super(This(name), mix) => - optDotPrefix(name) ~ "super" ~ optText(mix)("[" ~ _ ~ "]") + case tree: This => + optDotPrefix(tree) ~ "this" ~ idText(tree) + case Super(qual: This, mix) => + optDotPrefix(qual) ~ "super" ~ optText(mix)("[" ~ _ ~ "]") case Apply(fun, args) => if (fun.hasType && fun.symbol == defn.throwMethod) changePrec (GlobalPrec) { @@ -245,9 +287,17 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case TypeApply(fun, args) => toTextLocal(fun) ~ "[" ~ toTextGlobal(args, ", ") ~ "]" case Literal(c) => - toText(c) + tree.typeOpt match { + case ConstantType(tc) => toText(tc) + case _ => toText(c) + } case New(tpt) => - "new " ~ toTextLocal(tpt) + "new " ~ { + tpt match { + case tpt: Template => toTextTemplate(tpt, ofNew = true) + case _ => toTextLocal(tpt) + } + } case Pair(l, r) => "(" ~ toTextGlobal(l) ~ ", " ~ toTextGlobal(r) ~ ")" case Typed(expr, tpt) => @@ -312,52 +362,39 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toTextLocal(extractor) ~ "(" ~ toTextGlobal(patterns, ", ") ~ ")" ~ ("(" ~ toTextGlobal(implicits, ", ") ~ ")" provided implicits.nonEmpty) - case tree @ ValDef(name, tpt, rhs) => + case tree @ ValDef(name, tpt, _) => dclTextOr { - modText(tree.mods, if (tree.mods is Mutable) "var" else "val") ~~ nameIdText(tree) ~ - optAscription(tpt) - } ~ optText(rhs)(" = " ~ _) - case tree @ DefDef(name, tparams, vparamss, tpt, rhs) => - atOwner(tree) { - dclTextOr { - val first = modText(tree.mods, "def") ~~ nameIdText(tree) ~ tparamsText(tparams) - addVparamssText(first, vparamss) ~ optAscription(tpt) - } ~ optText(rhs)(" = " ~ _) + modText(tree.mods, if (tree.mods is Mutable) "var" else "val") ~~ + nameIdText(tree) ~ optAscription(tpt) ~ + withEnclosingDef(tree) { optText(tree.rhs)(" = " ~ _) } + } + case tree @ DefDef(name, tparams, vparamss, tpt, _) => + dclTextOr { + val prefix = modText(tree.mods, "def") ~~ nameIdText(tree) + withEnclosingDef(tree) { + addVparamssText(prefix ~ tparamsText(tparams), vparamss) ~ optAscription(tpt) ~ + optText(tree.rhs)(" = " ~ _) + } } case tree @ TypeDef(name, rhs) => - atOwner(tree) { - def typeDefText(rhsText: Text) = - dclTextOr { + def typeDefText(rhsText: Text) = + dclTextOr { + modText(tree.mods, "type") ~~ nameIdText(tree) ~ + withEnclosingDef(tree) { val rhsText1 = if (tree.hasType) toText(tree.symbol.info) else rhsText - modText(tree.mods, "type") ~~ nameIdText(tree) ~ tparamsText(tree.tparams) ~ rhsText1 + tparamsText(tree.tparams) ~ rhsText1 } - rhs match { - case impl: Template => - modText(tree.mods, if (tree.mods is Trait) "trait" else "class") ~~ nameIdText(tree) ~ toText(impl) ~ - (if (tree.hasType && ctx.settings.verbose.value) s"[decls = ${tree.symbol.info.decls}]" else "") - case rhs: TypeBoundsTree => - typeDefText(toText(rhs)) - case _ => - typeDefText(optText(rhs)(" = " ~ _)) } + rhs match { + case impl: Template => + modText(tree.mods, if ((tree).mods is Trait) "trait" else "class") ~~ + nameIdText(tree) ~ withEnclosingDef(tree) { toTextTemplate(impl) } ~ + (if (tree.hasType && ctx.settings.verbose.value) s"[decls = ${tree.symbol.info.decls}]" else "") + case rhs: TypeBoundsTree => + typeDefText(toText(rhs)) + case _ => + typeDefText(optText(rhs)(" = " ~ _)) } - case Template(constr @ DefDef(_, tparams, vparamss, _, rhs), parents, self, stats) => - val tparamsTxt = tparamsText(tparams) - val primaryConstrs = if (rhs.isEmpty) Nil else constr :: Nil - val prefix: Text = - if (vparamss.isEmpty || primaryConstrs.nonEmpty) tparamsTxt - else { - var modsText = modText(constr.mods, "") - if (constr.mods.hasAnnotations && !constr.mods.hasFlags) modsText = modsText ~~ " this" - addVparamssText(tparamsTxt ~~ modsText, vparamss) - } - val parentsText = Text(parents map constrText, " with ") - val selfText = { - val selfName = if (self.name == nme.WILDCARD) "this" else self.name.toString - (selfName ~ optText(self.tpt)(": " ~ _) ~ " =>").close - } provided !self.isEmpty - val bodyText = "{" ~~ selfText ~~ toTextGlobal(primaryConstrs ::: stats, "\n") ~ "}" - prefix ~~ (" extends" provided ownerIsClass) ~~ parentsText ~~ bodyText case Import(expr, selectors) => def selectorText(sel: Tree): Text = sel match { case Pair(l, r) => toTextGlobal(l) ~ " => " ~ toTextGlobal(r) @@ -375,7 +412,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } val bodyText = if (currentPrecedence == TopLevelPrec) "\n" ~ statsText else " {" ~ statsText ~ "}" - "package " ~ toTextLocal(pid) ~ bodyText + "package " ~ toTextPackageId(pid) ~ bodyText + case tree: Template => + toTextTemplate(tree) case Annotated(annot, arg) => toTextLocal(arg) ~~ annotText(annot) case EmptyTree => @@ -383,8 +422,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case TypedSplice(t) => toText(t) case tree @ ModuleDef(name, impl) => - atOwner(tree) { - modText(tree.mods, "object") ~~ nameIdText(tree) ~ toText(impl) + withEnclosingDef(tree) { + modText(tree.mods, "object") ~~ nameIdText(tree) ~ toTextTemplate(impl) } case SymbolLit(str) => "'" + str @@ -445,6 +484,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case PatDef(mods, pats, tpt, rhs) => modText(mods, "val") ~~ toText(pats, ", ") ~ optAscription(tpt) ~ optText(rhs)(" = " ~ _) + case ParsedTry(expr, handler, finalizer) => + changePrec(GlobalPrec) { + "try " ~ toText(expr) ~ " catch {" ~ toText(handler) ~ "}" ~ optText(finalizer)(" finally " ~ _) + } case Thicket(trees) => "Thicket {" ~~ toTextGlobal(trees, "\n") ~~ "}" case _ => @@ -474,9 +517,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def optText[T >: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text = if (tree.exists(!_.isEmpty)) encl(blockText(tree)) else "" - + override protected def polyParamName(name: TypeName): TypeName = - name.unexpandedName() + name.unexpandedName override protected def treatAsTypeParam(sym: Symbol): Boolean = sym is TypeParam @@ -518,7 +561,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { else { var flags = sym.flagsUNSAFE if (flags is TypeParam) flags = flags &~ Protected - Text((flags & SourceModifierFlags).flagStrings map stringToText, " ") + Text((flags & PrintableFlags).flagStrings map stringToText, " ") } override def toText(denot: Denotation): Text = denot match { diff --git a/src/dotty/tools/dotc/reporting/Reporter.scala b/src/dotty/tools/dotc/reporting/Reporter.scala index 5bb660cdb356..223fc33d5bf5 100644 --- a/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/src/dotty/tools/dotc/reporting/Reporter.scala @@ -109,9 +109,9 @@ trait Reporting { this: Context => * See [[config.CompilerCommand#explainAdvanced]] for the exact meaning of * "contains" here. */ - def log(msg: => String): Unit = + def log(msg: => String, pos: SourcePosition = NoSourcePosition): Unit = if (this.settings.log.value.containsPhase(phase)) - echo(s"[log ${ctx.phasesStack.reverse.mkString(" -> ")}] $msg") + echo(s"[log ${ctx.phasesStack.reverse.mkString(" -> ")}] $msg", pos) def debuglog(msg: => String): Unit = if (ctx.debug) log(msg) diff --git a/src/dotty/tools/dotc/transform/CapturedVars.scala b/src/dotty/tools/dotc/transform/CapturedVars.scala index 68bda9782ec0..77b912f08ec7 100644 --- a/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -29,7 +29,7 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisTransfo private class CollectCaptured(implicit ctx: Context) extends EnclosingMethodTraverser { private val captured = mutable.HashSet[Symbol]() - def traverse(enclMeth: Symbol, tree: Tree) = tree match { + def traverse(enclMeth: Symbol, tree: Tree)(implicit ctx: Context) = tree match { case id: Ident => val sym = id.symbol if (sym.is(Mutable, butNot = Method) && sym.owner.isTerm && sym.enclosingMethod != enclMeth) { diff --git a/src/dotty/tools/dotc/transform/Constructors.scala b/src/dotty/tools/dotc/transform/Constructors.scala index d68f20696223..165210cfbfa1 100644 --- a/src/dotty/tools/dotc/transform/Constructors.scala +++ b/src/dotty/tools/dotc/transform/Constructors.scala @@ -114,7 +114,7 @@ class Constructors extends MiniPhaseTransform with SymTransformer { thisTransfor private val seen = mutable.Set[Symbol](accessors: _*) val retained = mutable.Set[Symbol]() def dropped: collection.Set[Symbol] = seen -- retained - override def traverse(tree: Tree) = { + override def traverse(tree: Tree)(implicit ctx: Context) = { val sym = tree.symbol tree match { case Ident(_) | Select(This(_), _) if inConstr && seen(tree.symbol) => @@ -149,14 +149,14 @@ class Constructors extends MiniPhaseTransform with SymTransformer { thisTransfor def splitStats(stats: List[Tree]): Unit = stats match { case stat :: stats1 => stat match { - case stat @ ValDef(name, tpt, rhs) if !stat.symbol.is(Lazy) => + case stat @ ValDef(name, tpt, _) if !stat.symbol.is(Lazy) => val sym = stat.symbol if (isRetained(sym)) { - if (!rhs.isEmpty && !isWildcardArg(rhs)) - constrStats += Assign(ref(sym), intoConstr(rhs)).withPos(stat.pos) + if (!stat.rhs.isEmpty && !isWildcardArg(stat.rhs)) + constrStats += Assign(ref(sym), intoConstr(stat.rhs)).withPos(stat.pos) clsStats += cpy.ValDef(stat)(rhs = EmptyTree) } - else if (!rhs.isEmpty) { + else if (!stat.rhs.isEmpty) { sym.copySymDenotation( initFlags = sym.flags &~ Private, owner = constr.symbol).installAfter(thisTransform) diff --git a/src/dotty/tools/dotc/transform/ElimLocals.scala b/src/dotty/tools/dotc/transform/ElimLocals.scala deleted file mode 100644 index d18ad0288f9c..000000000000 --- a/src/dotty/tools/dotc/transform/ElimLocals.scala +++ /dev/null @@ -1,22 +0,0 @@ -package dotty.tools.dotc -package transform - -import core._ -import DenotTransformers.SymTransformer -import Phases.Phase -import Contexts.Context -import SymDenotations.SymDenotation -import TreeTransforms.MiniPhaseTransform -import Flags.Local - -/** Widens all private[this] and protected[this] qualifiers to just private/protected */ -class ElimLocals extends MiniPhaseTransform with SymTransformer { thisTransformer => - override def phaseName = "elimLocals" - - def transformSym(ref: SymDenotation)(implicit ctx: Context) = - dropLocal(ref) - - private def dropLocal(ref: SymDenotation)(implicit ctx: Context) = - if (ref.flags is Local) ref.copySymDenotation(initFlags = ref.flags &~ Local) - else ref -} diff --git a/src/dotty/tools/dotc/transform/Erasure.scala b/src/dotty/tools/dotc/transform/Erasure.scala index 7fd5109c449b..3a4487af5260 100644 --- a/src/dotty/tools/dotc/transform/Erasure.scala +++ b/src/dotty/tools/dotc/transform/Erasure.scala @@ -278,7 +278,7 @@ object Erasure extends TypeTestsCasts{ def select(qual: Tree, sym: Symbol): Tree = { val name = tree.typeOpt match { - case tp: NamedType if tp.name.isInheritedName => sym.name.inheritedName + case tp: NamedType if tp.name.isShadowedName => sym.name.shadowedName case _ => sym.name } untpd.cpy.Select(tree)(qual, sym.name) diff --git a/src/dotty/tools/dotc/transform/FirstTransform.scala b/src/dotty/tools/dotc/transform/FirstTransform.scala index bde17c854639..0de261149e1b 100644 --- a/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -119,7 +119,7 @@ class FirstTransform extends MiniPhaseTransform with IdentityDenotTransformer wi /* A this reference hide in a self ident, and be subsequently missed when deciding on whether outer accessors are needed and computing outer paths. - sWe do this normalization directly after Typer, because during typer the + We do this normalization directly after Typer, because during typer the ident should rest available for hyperlinking.*/ This(tpe.cls).withPos(tree.pos) case _ => normalizeType(tree) diff --git a/src/dotty/tools/dotc/transform/FullParameterization.scala b/src/dotty/tools/dotc/transform/FullParameterization.scala index 1fc9591d2cb0..2e30152754ec 100644 --- a/src/dotty/tools/dotc/transform/FullParameterization.scala +++ b/src/dotty/tools/dotc/transform/FullParameterization.scala @@ -92,7 +92,7 @@ trait FullParameterization { case _ => (0, info) } val ctparams = if(abstractOverClass) clazz.typeParams else Nil - val ctnames = ctparams.map(_.name.unexpandedName()) + val ctnames = ctparams.map(_.name.unexpandedName) /** The method result type */ def resultType(mapClassParams: Type => Type) = { diff --git a/src/dotty/tools/dotc/transform/LambdaLift.scala b/src/dotty/tools/dotc/transform/LambdaLift.scala index 1363615a57ad..a42e0cc352eb 100644 --- a/src/dotty/tools/dotc/transform/LambdaLift.scala +++ b/src/dotty/tools/dotc/transform/LambdaLift.scala @@ -161,8 +161,8 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisTransform if (callee.enclosingClass != caller.enclosingClass) calledFromInner += callee } - private class CollectDependencies(implicit ctx: Context) extends EnclosingMethodTraverser { - def traverse(enclMeth: Symbol, tree: Tree) = try { //debug + private class CollectDependencies extends EnclosingMethodTraverser { + def traverse(enclMeth: Symbol, tree: Tree)(implicit ctx: Context) = try { //debug val enclosure = enclMeth.skipConstructor val sym = tree.symbol def narrowTo(thisClass: ClassSymbol) = { diff --git a/src/dotty/tools/dotc/transform/LazyVals.scala b/src/dotty/tools/dotc/transform/LazyVals.scala index dde086089309..87644d2f9154 100644 --- a/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/src/dotty/tools/dotc/transform/LazyVals.scala @@ -83,7 +83,8 @@ class LazyVals extends MiniPhaseTransform with SymTransformer { * dotty.runtime(eg dotty.runtime.LazyInt) */ def transformLocalValDef(x: ValDef)(implicit ctx: Context) = x match { - case x@ValDef(name, tpt, valueInitter) => + case ValDef(name, tpt, _) => + val valueInitter = x.rhs val holderName = ctx.freshName(name.toString + StdNames.nme.LAZY_LOCAL).toTermName val initName = ctx.freshName(name.toString + StdNames.nme.LAZY_LOCAL_INIT).toTermName val tpe = x.tpe.widen @@ -162,7 +163,7 @@ class LazyVals extends MiniPhaseTransform with SymTransformer { } def transformFieldValDefNonVolatile(x: ValDef)(implicit ctx: Context) = x match { - case x@ValDef(name, tpt, rhs) if (x.mods is Flags.Lazy) => + case ValDef(name, tpt, _) if (x.mods is Flags.Lazy) => val claz = x.symbol.owner.asClass val tpe = x.tpe.widen assert(!(x.mods is Flags.Mutable)) @@ -171,17 +172,16 @@ class LazyVals extends MiniPhaseTransform with SymTransformer { val containerTree = ValDef(containerSymbol, initValue(tpe)) if (x.tpe.isNotNull && tpe <:< defn.AnyRefType) { // can use 'null' value instead of flag - val slowPath = DefDef(x.symbol.asTerm, mkDefNonThreadSafeNonNullable(containerSymbol, rhs)) + val slowPath = DefDef(x.symbol.asTerm, mkDefNonThreadSafeNonNullable(containerSymbol, x.rhs)) Thicket(List(containerTree, slowPath)) } else { val flagName = ctx.freshName(name.toString + StdNames.nme.BITMAP_PREFIX).toTermName val flagSymbol = ctx.newSymbol(x.symbol.owner, flagName, containerFlags, defn.BooleanType) val flag = ValDef(flagSymbol, Literal(Constants.Constant(false))) - val slowPath = DefDef(x.symbol.asTerm, mkNonThreadSafeDef(ref(containerSymbol), ref(flagSymbol), rhs)) + val slowPath = DefDef(x.symbol.asTerm, mkNonThreadSafeDef(ref(containerSymbol), ref(flagSymbol), x.rhs)) Thicket(List(containerTree, flag, slowPath)) } - } /** Create non-threadsafe lazy accessor equivalent to such code @@ -281,7 +281,7 @@ class LazyVals extends MiniPhaseTransform with SymTransformer { } def transformFieldValDefVolatile(x: ValDef)(implicit ctx: Context) = x match { - case x@ValDef(name, tpt, rhs) if (x.mods is Flags.Lazy) => + case ValDef(name, tpt, _) if (x.mods is Flags.Lazy) => assert(!(x.mods is Flags.Mutable)) val tpe = x.tpe.widen @@ -334,7 +334,7 @@ class LazyVals extends MiniPhaseTransform with SymTransformer { val state = Select(ref(helperModule), RLazyVals.Names.state.toTermName) val cas = Select(ref(helperModule), RLazyVals.Names.cas.toTermName) - val accessor = mkThreadSafeDef(x.symbol.asTerm, claz, ord, containerSymbol, rhs, tpe, offset, getFlag, state, cas, setFlag, wait) + val accessor = mkThreadSafeDef(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait) if(flag eq EmptyTree) Thicket(List(containerTree, accessor)) else Thicket(List(containerTree, flag, accessor)) diff --git a/src/dotty/tools/dotc/transform/MacroTransform.scala b/src/dotty/tools/dotc/transform/MacroTransform.scala index 3a8bcc92071c..0f57c3ff5f6a 100644 --- a/src/dotty/tools/dotc/transform/MacroTransform.scala +++ b/src/dotty/tools/dotc/transform/MacroTransform.scala @@ -58,12 +58,12 @@ abstract class MacroTransform extends Phase { tree case _: PackageDef | _: MemberDef => super.transform(tree)(localCtx(tree)) - case Template(constr, parents, self, body) => + case impl @ Template(constr, parents, self, _) => cpy.Template(tree)( transformSub(constr), transform(parents)(ctx.superCallContext), transformSelf(self), - transformStats(body, tree.symbol)) + transformStats(impl.body, tree.symbol)) case _ => super.transform(tree) } diff --git a/src/dotty/tools/dotc/transform/NormalizeFlags.scala b/src/dotty/tools/dotc/transform/NormalizeFlags.scala new file mode 100644 index 000000000000..fce2c3317078 --- /dev/null +++ b/src/dotty/tools/dotc/transform/NormalizeFlags.scala @@ -0,0 +1,30 @@ +package dotty.tools.dotc +package transform + +import core._ +import DenotTransformers.SymTransformer +import Phases.Phase +import Contexts.Context +import SymDenotations.SymDenotation +import TreeTransforms.MiniPhaseTransform +import Flags._, Symbols._ + +/** 1. Widens all private[this] and protected[this] qualifiers to just private/protected + * 2. Sets PureInterface flag for traits that only have pure interface members and that + * do not have initialization code. A pure interface member is either an abstract + * or alias type definition or a deferred val or def. + */ +class NormalizeFlags extends MiniPhaseTransform with SymTransformer { thisTransformer => + override def phaseName = "elimLocals" + + def transformSym(ref: SymDenotation)(implicit ctx: Context) = { + var newFlags = ref.flags &~ Local + if (ref.is(NoInitsTrait) && ref.info.decls.forall(isPureInterfaceMember)) + newFlags |= PureInterface + if (newFlags != ref.flags) ref.copySymDenotation(initFlags = newFlags) + else ref + } + + private def isPureInterfaceMember(sym: Symbol)(implicit ctx: Context) = + if (sym.isTerm) sym.is(Deferred) else !sym.isClass +} diff --git a/src/dotty/tools/dotc/transform/Pickler.scala b/src/dotty/tools/dotc/transform/Pickler.scala new file mode 100644 index 000000000000..fb6b58e36474 --- /dev/null +++ b/src/dotty/tools/dotc/transform/Pickler.scala @@ -0,0 +1,81 @@ +package dotty.tools.dotc +package transform + +import core._ +import Contexts.Context +import Decorators._ +import pickling._ +import config.Printers.{noPrinter, pickling} +import java.io.PrintStream +import Periods._ +import Phases._ +import collection.mutable + +/** This miniphase pickles trees */ +class Pickler extends Phase { + import ast.tpd._ + + override def phaseName: String = "pickler" + + private def output(name: String, msg: String) = { + val s = new PrintStream(name) + s.print(msg) + s.close + } + + private val beforePickling = new mutable.HashMap[CompilationUnit, String] + + override def run(implicit ctx: Context): Unit = { + val unit = ctx.compilationUnit + if (!unit.isJava) { + val tree = unit.tpdTree + pickling.println(i"unpickling in run ${ctx.runId}") + if (ctx.settings.YtestPickler.value) beforePickling(unit) = tree.show + + val pickler = new TastyPickler + val treePkl = new TreePickler(pickler) + treePkl.pickle(tree :: Nil) + if (tree.pos.exists) + new PositionPickler(pickler, treePkl.buf.addrOfTree).picklePositions(tree :: Nil, tree.pos) + + unit.pickled = pickler.assembleParts() + def rawBytes = // not needed right now, but useful to print raw format. + unit.pickled.iterator.grouped(10).toList.zipWithIndex.map { + case (row, i) => s"${i}0: ${row.mkString(" ")}" + } + // println(i"rawBytes = \n$rawBytes%\n%") // DEBUG + if (pickling ne noPrinter) new TastyPrinter(unit.pickled).printContents() + } + } + + override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = { + val result = super.runOn(units) + if (ctx.settings.YtestPickler.value) + testUnpickler(units)(ctx.fresh.setPeriod(Period(ctx.runId + 1, FirstPhaseId))) + result + } + + private def testUnpickler(units: List[CompilationUnit])(implicit ctx: Context): Unit = { + pickling.println(i"testing unpickler at run ${ctx.runId}") + ctx.definitions.init + val unpicklers = + for (unit <- units) yield { + val unpickler = new DottyUnpickler(unit.pickled) + unpickler.enter(roots = Set()) + unpickler + } + for ((unpickler, unit) <- unpicklers zip units) { + val unpickled = unpickler.body(readPositions = false) + testSame(i"$unpickled%\n%", beforePickling(unit), unit) + } + } + + private def testSame(unpickled: String, previous: String, unit: CompilationUnit)(implicit ctx: Context) = + if (previous != unpickled) { + output("before-pickling.txt", previous) + output("after-pickling.txt", unpickled) + ctx.error(s"""pickling difference for $unit, for details: + | + | diff before-pickling.txt after-pickling.txt""".stripMargin) + } +} \ No newline at end of file diff --git a/src/dotty/tools/dotc/transform/RestoreScopes.scala b/src/dotty/tools/dotc/transform/RestoreScopes.scala index 5f2fd689dc12..8a6bb15ba89a 100644 --- a/src/dotty/tools/dotc/transform/RestoreScopes.scala +++ b/src/dotty/tools/dotc/transform/RestoreScopes.scala @@ -20,9 +20,9 @@ class RestoreScopes extends MiniPhaseTransform with IdentityDenotTransformer { t override def phaseName = "restoreScopes" override def transformTypeDef(tree: TypeDef)(implicit ctx: Context, info: TransformerInfo) = { - val TypeDef(_, Template(constr, _, _, body)) = tree + val TypeDef(_, impl: Template) = tree val restoredDecls = newScope - for (stat <- constr :: body) + for (stat <- impl.constr :: impl.body) if (stat.isInstanceOf[MemberDef] && stat.symbol.exists) restoredDecls.enter(stat.symbol) val cls = tree.symbol.asClass diff --git a/src/dotty/tools/dotc/transform/SuperAccessors.scala b/src/dotty/tools/dotc/transform/SuperAccessors.scala index 0d89e9d74848..b556288682a9 100644 --- a/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -76,6 +76,7 @@ class SuperAccessors extends MacroTransform with IdentityDenotTransformer { this buf += tree } + /** Turn types which are not methodic into ExprTypes. */ private def ensureMethodic(tpe: Type)(implicit ctx: Context) = tpe match { case tpe: MethodicType => tpe case _ => ExprType(tpe) @@ -293,9 +294,17 @@ class SuperAccessors extends MacroTransform with IdentityDenotTransformer { this def transformTemplate = { val ownStats = new ListBuffer[Tree] accDefs(currentClass) = ownStats - val body1 = forwardParamAccessors(transformStats(impl.body, tree.symbol)) + // write super accessors after parameters and type aliases (so + // that order is stable under pickling/unpickling) + val (params, rest) = impl.body span { + case td: TypeDef => !td.isClassDef + case vd: ValOrDefDef => vd.symbol.flags is ParamAccessor + case _ => false + } + ownStats ++= params + val rest1 = forwardParamAccessors(transformStats(rest, tree.symbol)) accDefs -= currentClass - ownStats ++= body1 + ownStats ++= rest1 cpy.Template(impl)(body = ownStats.toList) } transformTemplate @@ -369,9 +378,9 @@ class SuperAccessors extends MacroTransform with IdentityDenotTransformer { this } transformSelect - case tree @ DefDef(_, _, _, _, rhs) => + case tree: DefDef => cpy.DefDef(tree)( - rhs = if (isMethodWithExtension(sym)) withInvalidOwner(transform(rhs)) else transform(rhs)) + rhs = if (isMethodWithExtension(sym)) withInvalidOwner(transform(tree.rhs)) else transform(tree.rhs)) case TypeApply(sel @ Select(qual, name), args) => mayNeedProtectedAccessor(sel, args, goToSuper = true) diff --git a/src/dotty/tools/dotc/transform/TailRec.scala b/src/dotty/tools/dotc/transform/TailRec.scala index b747636c74b3..2fd0c439c716 100644 --- a/src/dotty/tools/dotc/transform/TailRec.scala +++ b/src/dotty/tools/dotc/transform/TailRec.scala @@ -83,8 +83,8 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = { val sym = tree.symbol tree match { - case dd@DefDef(name, tparams, vparamss0, tpt, rhs0) - if (sym.isEffectivelyFinal) && !((sym is Flags.Accessor) || (rhs0 eq EmptyTree) || (sym is Flags.Label)) => + case dd@DefDef(name, tparams, vparamss0, tpt, _) + if (sym.isEffectivelyFinal) && !((sym is Flags.Accessor) || (dd.rhs eq EmptyTree) || (sym is Flags.Label)) => val mandatory = sym.hasAnnotation(defn.TailrecAnnotationClass) atGroupEnd { implicit ctx: Context => @@ -104,7 +104,7 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete // now this speculatively transforms tree and throws away result in many cases val rhsSemiTransformed = { val transformer = new TailRecElimination(origMeth, owner, thisTpe, mandatory, label, abstractOverClass = defIsTopLevel) - val rhs = atGroupEnd(transformer.transform(rhs0)(_)) + val rhs = atGroupEnd(transformer.transform(dd.rhs)(_)) rewrote = transformer.rewrote rhs } @@ -117,7 +117,7 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete } else { if (mandatory) ctx.error("TailRec optimisation not applicable, method not tail recursive", dd.pos) - rhs0 + dd.rhs } }) } diff --git a/src/dotty/tools/dotc/transform/TreeChecker.scala b/src/dotty/tools/dotc/transform/TreeChecker.scala index ce8f972544ef..b2d4abe10b54 100644 --- a/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -199,6 +199,8 @@ class TreeChecker extends Phase with SymTransformer { definedBinders -= tp case tp: ParamType => assert(definedBinders.contains(tp.binder), s"orphan param: $tp") + case tp: TypeVar => + apply(tp.underlying) case _ => mapOver(tp) } @@ -277,8 +279,8 @@ class TreeChecker extends Phase with SymTransformer { super.typedStats(trees, exprOwner) } - override def ensureNoLocalRefs(block: Block, pt: Type, forcedDefined: Boolean = false)(implicit ctx: Context): Tree = - block + override def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: => List[Symbol], forcedDefined: Boolean = false)(implicit ctx: Context): Tree = + tree override def adapt(tree: Tree, pt: Type, original: untpd.Tree = untpd.EmptyTree)(implicit ctx: Context) = { def isPrimaryConstructorReturn = diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index 47dfe157d305..582642325769 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -530,7 +530,7 @@ trait Applications extends Compatibility { self: Typer => if (proto.argsAreTyped) new ApplyToTyped(tree, fun1, funRef, proto.typedArgs, pt) else new ApplyToUntyped(tree, fun1, funRef, proto, pt)(argCtx) val result = app.result - ConstFold(result) + convertNewArray(ConstFold(result)) } { (failedVal, failedState) => val fun2 = tryInsertImplicitOnQualifier(fun1, proto) if (fun1 eq fun2) { @@ -596,14 +596,14 @@ trait Applications extends Compatibility { self: Typer => checkBounds(typedArgs, pt) case _ => } - convertNewArray( - assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs)) + assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs) } - /** Rewrite `new Array[T]` trees to calls of newXYZArray methods. */ - def convertNewArray(tree: Tree)(implicit ctx: Context): Tree = tree match { - case TypeApply(tycon, targs) if tycon.symbol == defn.ArrayConstructor => - newArray(targs.head, tree.pos) + /** Rewrite `new Array[T](....)` trees to calls of newXYZArray methods. */ + def convertNewArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match { + case Apply(TypeApply(tycon, targ :: Nil), args) if tycon.symbol == defn.ArrayConstructor => + fullyDefinedType(tree.tpe, "array", tree.pos) + tpd.cpy.Apply(tree)(newArray(targ, tree.pos), args) case _ => tree } diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index 1354b8926183..f7502d56dab4 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -169,7 +169,7 @@ object Checking { val checkTree = new TreeAccumulator[Unit] { def checkRef(tree: Tree, sym: Symbol) = if (sym.maybeOwner == refineCls && !seen(sym)) forwardRef(tree) - def apply(x: Unit, tree: Tree) = tree match { + def apply(x: Unit, tree: Tree)(implicit ctx: Context) = tree match { case tree: MemberDef => foldOver(x, tree) seen += tree.symbol diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala index 53968ae91ea7..23938ea998dd 100644 --- a/src/dotty/tools/dotc/typer/Implicits.scala +++ b/src/dotty/tools/dotc/typer/Implicits.scala @@ -501,7 +501,7 @@ trait Implicits { self: Typer => if (ctx.typerState.reporter.hasErrors) nonMatchingImplicit(ref) else if (contextual && !shadowing.tpe.isError && !refMatches(shadowing)) { - implicits.println(i"SHADOWING $ref is shadowed by $shadowing") + implicits.println(i"SHADOWING $ref in ${ref.termSymbol.owner} is shadowed by $shadowing in ${shadowing.symbol.owner}") shadowedImplicit(ref, methPart(shadowing).tpe) } else diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala index 38c1e49c5669..424575f8c238 100644 --- a/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/src/dotty/tools/dotc/typer/Inferencing.scala @@ -163,16 +163,24 @@ trait Inferencing { this: Checking => * If such a variable appears covariantly in type `tp` or does not appear at all, * approximate it by its lower bound. Otherwise, if it appears contravariantly * in type `tp` approximate it by its upper bound. + * @param ownedBy if it is different from NoSymbol, all type variables owned by + * `ownedBy` qualify, independent of position. + * Without that second condition, it can be that certain variables escape + * interpolation, for instance when their tree was eta-lifted, so + * the typechecked tree is no longer the tree in which the variable + * was declared. A concrete example of this phenomenon can be + * observed when compiling core.TypeOps#asSeenFrom. */ - def interpolateUndetVars(tree: Tree)(implicit ctx: Context): Unit = { + def interpolateUndetVars(tree: Tree, ownedBy: Symbol)(implicit ctx: Context): Unit = { val constraint = ctx.typerState.constraint - val qualifies = (tvar: TypeVar) => tree contains tvar.owningTree + val qualifies = (tvar: TypeVar) => + (tree contains tvar.owningTree) || ownedBy.exists && tvar.owner == ownedBy def interpolate() = Stats.track("interpolateUndetVars") { val tp = tree.tpe.widen constr.println(s"interpolate undet vars in ${tp.show}, pos = ${tree.pos}, mode = ${ctx.mode}, undets = ${constraint.uninstVars map (tvar => s"${tvar.show}@${tvar.owningTree.pos}")}") constr.println(s"qualifying undet vars: ${constraint.uninstVars filter qualifies map (tvar => s"$tvar / ${tvar.show}")}, constraint: ${constraint.show}") - val vs = tp.variances(qualifies) + val vs = variances(tp, qualifies) var changed = false vs foreachBinding { (tvar, v) => if (v != 0) { @@ -182,7 +190,7 @@ trait Inferencing { this: Checking => } } if (changed) // instantiations might have uncovered new typevars to interpolate - interpolateUndetVars(tree) + interpolateUndetVars(tree, ownedBy) else for (tvar <- constraint.uninstVars) if (!(vs contains tvar) && qualifies(tvar)) { @@ -198,7 +206,7 @@ trait Inferencing { this: Checking => * typevar is not uniquely determined, return that typevar in a Some. */ def maximizeType(tp: Type)(implicit ctx: Context): Option[TypeVar] = Stats.track("maximizeType") { - val vs = tp.variances(alwaysTrue) + val vs = variances(tp, alwaysTrue) var result: Option[TypeVar] = None vs foreachBinding { (tvar, v) => if (v == 1) tvar.instantiate(fromBelow = false) @@ -211,6 +219,41 @@ trait Inferencing { this: Checking => } result } + + type VarianceMap = SimpleMap[TypeVar, Integer] + + /** All occurrences of type vars in this type that satisfy predicate + * `include` mapped to their variances (-1/0/1) in this type, where + * -1 means: only covariant occurrences + * +1 means: only covariant occurrences + * 0 means: mixed or non-variant occurrences + * + * Note: We intentionally use a relaxed version of variance here, + * where the variance does not change under a prefix of a named type + * (the strict version makes prefixes invariant). This turns out to be + * better for type inference. In a nutshell, if a type variable occurs + * like this: + * + * (U? >: x.type) # T + * + * we want to instantiate U to x.type right away. No need to wait further. + */ + private def variances(tp: Type, include: TypeVar => Boolean)(implicit ctx: Context): VarianceMap = Stats.track("variances") { + val accu = new TypeAccumulator[VarianceMap] { + def apply(vmap: VarianceMap, t: Type): VarianceMap = t match { + case t: TypeVar if !t.isInstantiated && (ctx.typerState.constraint contains t) && include(t) => + val v = vmap(t) + if (v == null) vmap.updated(t, variance) + else if (v == variance) vmap + else vmap.updated(t, 0) + case _ => + foldOver(vmap, t) + } + override def applyToPrefix(vmap: VarianceMap, t: NamedType) = + apply(vmap, t.prefix) + } + accu(SimpleMap.Empty, tp) + } } /** An enumeration controlling the degree of forcing in "is-dully-defined" checks. */ diff --git a/src/dotty/tools/dotc/typer/Mode.scala b/src/dotty/tools/dotc/typer/Mode.scala index 1a57fdc5420d..d7f3420ccd33 100644 --- a/src/dotty/tools/dotc/typer/Mode.scala +++ b/src/dotty/tools/dotc/typer/Mode.scala @@ -56,6 +56,12 @@ object Mode { /** Allow GADTFlexType labelled types to have their bounds adjusted */ val GADTflexible = newMode(8, "GADTflexible") + + /** Allow dependent functions. This is currently necessary for unpickling, because + * some dependent functions are passed through from the front end(s?), even though they + * are technically speaking illegal. + */ + val AllowDependentFunctions = newMode(9, "AllowDependentFunctions") val PatternOrType = Pattern | Type } \ No newline at end of file diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index c522a5998e90..357860290e3e 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -18,7 +18,7 @@ import config.Printers._ import language.implicitConversions trait NamerContextOps { this: Context => - + /** Enter symbol into current class, if current class is owner of current context, * or into current scope, if not. Should always be called instead of scope.enter * in order to make sure that updates to class members are reflected in @@ -79,6 +79,56 @@ trait NamerContextOps { this: Context => .dropWhile(_.owner != sym) .dropWhile(_.owner == sym) .next + + /** The given type, unless `sym` is a constructor, in which case the + * type of the constructed instance is returned + */ + def effectiveResultType(sym: Symbol, typeParams: List[Symbol], given: Type) = + if (sym.name == nme.CONSTRUCTOR) sym.owner.typeRef.appliedTo(typeParams map (_.typeRef)) + else given + + /** if isConstructor, make sure it has one non-implicit parameter list */ + def normalizeIfConstructor(paramSymss: List[List[Symbol]], isConstructor: Boolean) = + if (isConstructor && + (paramSymss.isEmpty || paramSymss.head.nonEmpty && (paramSymss.head.head is Implicit))) + Nil :: paramSymss + else + paramSymss + + /** The method type corresponding to given parameters and result type */ + def methodType(typeParams: List[Symbol], valueParamss: List[List[Symbol]], resultType: Type, isJava: Boolean = false)(implicit ctx: Context): Type = { + val monotpe = + (valueParamss :\ resultType) { (params, resultType) => + val make = + if (params.nonEmpty && (params.head is Implicit)) ImplicitMethodType + else if (isJava) JavaMethodType + else MethodType + if (isJava) + for (param <- params) + if (param.info.isDirectRef(defn.ObjectClass)) param.info = defn.AnyType + make.fromSymbols(params, resultType) + } + if (typeParams.nonEmpty) PolyType.fromSymbols(typeParams, monotpe) + else if (valueParamss.isEmpty) ExprType(monotpe) + else monotpe + } + + /** Find moduleClass/sourceModule in effective scope */ + private def findModuleBuddy(name: Name)(implicit ctx: Context) = { + val scope = effectiveScope + val it = scope.lookupAll(name).filter(_ is Module) + assert(it.hasNext, s"no companion $name in $scope") + it.next + } + + /** Add moduleClass or sourceModule functionality to completer + * for a module or module class + */ + def adjustModuleCompleter(completer: LazyType, name: Name) = + if (name.isTermName) + completer withModuleClass (_ => findModuleBuddy(name.moduleClassName)) + else + completer withSourceModule (_ => findModuleBuddy(name.sourceModuleName)) } /** This class creates symbols from definitions and imports and gives them @@ -163,14 +213,6 @@ class Namer { typer: Typer => } } - /** Find moduleClass/sourceModule in effective scope */ - private def findModuleBuddy(name: Name)(implicit ctx: Context) = { - val scope = ctx.effectiveScope - val it = scope.lookupAll(name).filter(_ is Module) - assert(it.hasNext, s"no companion $name in $scope") - it.next - } - /** If this tree is a member def or an import, create a symbol of it * and store in symOfTree map. */ @@ -191,15 +233,9 @@ class Namer { typer: Typer => /** Add moduleClass/sourceModule to completer if it is for a module val or class */ def adjustIfModule(completer: LazyType, tree: MemberDef) = - if (tree.mods is Module) { - val name = tree.name.encode - if (name.isTermName) - completer withModuleClass (_ => findModuleBuddy(name.moduleClassName)) - else - completer withSourceModule (_ => findModuleBuddy(name.sourceModuleName)) - } + if (tree.mods is Module) ctx.adjustModuleCompleter(completer, tree.name.encode) else completer - + typr.println(i"creating symbol for $tree in ${ctx.mode}") def checkNoConflict(name: Name): Unit = { @@ -395,7 +431,7 @@ class Namer { typer: Typer => private def typeSig(sym: Symbol): Type = original match { case original: ValDef => if (sym is Module) moduleValSig(sym) - else valOrDefDefSig(original, sym, Nil, identity)(localContext(sym).setNewScope) + else valOrDefDefSig(original, sym, Nil, Nil, identity)(localContext(sym).setNewScope) case original: DefDef => val typer1 = new Typer nestedTyper(sym) = typer1 @@ -436,11 +472,11 @@ class Namer { typer: Typer => protected implicit val ctx: Context = localContext(cls).setMode(ictx.mode &~ Mode.InSuperCall) - val TypeDef(name, impl @ Template(constr, parents, self, body)) = original + val TypeDef(name, impl @ Template(constr, parents, self, _)) = original - val (params, rest) = body span { + val (params, rest) = impl.body span { case td: TypeDef => td.mods is Param - case td: ValDef => td.mods is ParamAccessor + case vd: ValDef => vd.mods is ParamAccessor case _ => false } @@ -495,13 +531,7 @@ class Namer { typer: Typer => index(rest)(inClassContext(selfInfo)) denot.info = ClassInfo(cls.owner.thisType, cls, parentRefs, decls, selfInfo) - if (cls is Trait) { - if (body forall isNoInitMember) { - cls.setFlag(NoInits) - if (body forall isPureInterfaceMember) - cls.setFlag(PureInterface) - } - } + if (impl.body forall isNoInitMember) cls.setFlag(NoInits) } } @@ -546,7 +576,7 @@ class Namer { typer: Typer => * @param paramFn A wrapping function that produces the type of the * defined symbol, given its final return type */ - def valOrDefDefSig(mdef: ValOrDefDef, sym: Symbol, typeParams: List[Symbol], paramFn: Type => Type)(implicit ctx: Context): Type = { + def valOrDefDefSig(mdef: ValOrDefDef, sym: Symbol, typeParams: List[Symbol], paramss: List[List[Symbol]], paramFn: Type => Type)(implicit ctx: Context): Type = { def inferredType = { /** A type for this definition that might be inherited from elsewhere: @@ -575,7 +605,7 @@ class Namer { typer: Typer => } val iResType = iInstInfo.finalResultType.asSeenFrom(site, cls) if (iResType.exists) - typr.println(s"using inherited type; raw: $iRawInfo, inst: $iInstInfo, inherited: $iResType") + typr.println(i"using inherited type for ${mdef.name}; raw: $iRawInfo, inst: $iInstInfo, inherited: $iResType") tp & iResType } } @@ -627,14 +657,30 @@ class Namer { typer: Typer => lhsType orElse WildcardType } } - - val pt = mdef.tpt match { - case _: untpd.DerivedTypeTree => WildcardType - case TypeTree(untpd.EmptyTree) => inferredType - case _ => WildcardType + + val tptProto = mdef.tpt match { + case _: untpd.DerivedTypeTree => + WildcardType + case TypeTree(untpd.EmptyTree) => + inferredType + case TypedSplice(tpt: TypeTree) if !isFullyDefined(tpt.tpe, ForceDegree.none) => + val rhsType = typedAheadExpr(mdef.rhs, tpt.tpe).tpe + mdef match { + case mdef: DefDef if mdef.name == nme.ANON_FUN => + val hygienicType = avoid(rhsType, paramss.flatten) + if (!(hygienicType <:< tpt.tpe)) + ctx.error(i"return type ${tpt.tpe} of lambda cannot be made hygienic;\n" + + i"it is not a supertype of the hygienic type $hygienicType", mdef.pos) + //println(i"lifting $rhsType over $paramss -> $hygienicType = ${tpt.tpe}") + //println(TypeComparer.explained { implicit ctx => hygienicType <:< tpt.tpe }) + case _ => + } + WildcardType + case _ => + WildcardType } - paramFn(typedAheadType(mdef.tpt, pt).tpe) - } + paramFn(typedAheadType(mdef.tpt, tptProto).tpe) + } /** The type signature of a DefDef with given symbol */ def defDefSig(ddef: DefDef, sym: Symbol)(implicit ctx: Context) = { @@ -643,37 +689,19 @@ class Namer { typer: Typer => vparamss foreach completeParams val isConstructor = name == nme.CONSTRUCTOR def typeParams = tparams map symbolOfTree + val paramSymss = ctx.normalizeIfConstructor(vparamss.nestedMap(symbolOfTree), isConstructor) def wrapMethType(restpe: Type): Type = { - var paramSymss = vparamss.nestedMap(symbolOfTree) - // Make sure constructor has one non-implicit parameter list - if (isConstructor && - (paramSymss.isEmpty || paramSymss.head.nonEmpty && (paramSymss.head.head is Implicit))) - paramSymss = Nil :: paramSymss val restpe1 = // try to make anonymous functions non-dependent, so that they can be used in closures if (name == nme.ANON_FUN) avoid(restpe, paramSymss.flatten) else restpe - val monotpe = - (paramSymss :\ restpe1) { (params, restpe) => - val isJava = ddef.mods is JavaDefined - val make = - if (params.nonEmpty && (params.head is Implicit)) ImplicitMethodType - else if(isJava) JavaMethodType - else MethodType - if(isJava) params.foreach { symbol => - if(symbol.info.isDirectRef(defn.ObjectClass)) symbol.info = defn.AnyType - } - make.fromSymbols(params, restpe) - } - if (typeParams.nonEmpty) PolyType.fromSymbols(typeParams, monotpe) - else if (vparamss.isEmpty) ExprType(monotpe) - else monotpe + ctx.methodType(tparams map symbolOfTree, paramSymss, restpe1, isJava = ddef.mods is JavaDefined) } if (isConstructor) { // set result type tree to unit, but take the current class as result type of the symbol typedAheadType(ddef.tpt, defn.UnitType) - wrapMethType(sym.owner.typeRef.appliedTo(typeParams map (_.typeRef))) + wrapMethType(ctx.effectiveResultType(sym, typeParams, NoType)) } - else valOrDefDefSig(ddef, sym, typeParams, wrapMethType) + else valOrDefDefSig(ddef, sym, typeParams, paramSymss, wrapMethType) } def typeDefSig(tdef: TypeDef, sym: Symbol)(implicit ctx: Context): Type = { diff --git a/src/dotty/tools/dotc/typer/ProtoTypes.scala b/src/dotty/tools/dotc/typer/ProtoTypes.scala index c0e30b12ea5b..f646f7ecd2ab 100644 --- a/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -326,7 +326,7 @@ object ProtoTypes { def newTypeVars(pt: PolyType): List[TypeVar] = for (n <- (0 until pt.paramNames.length).toList) - yield new TypeVar(PolyParam(pt, n), state, owningTree) + yield new TypeVar(PolyParam(pt, n), state, owningTree, ctx.owner) val added = if (state.constraint contains pt) pt.duplicate(pt.paramNames, pt.paramBounds, pt.resultType) diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index ea489f7b0f83..5ef3a768c5f2 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -47,6 +47,8 @@ trait TypeAssigner { sym.owner.isTerm && (forbidden contains sym) || !(sym.owner is Package) && toAvoid(tp.prefix) ) + case tp: TypeRef => + forbidden contains tp.symbol case _ => false } @@ -57,7 +59,7 @@ trait TypeAssigner { tp.info match { case TypeAlias(ref) => apply(ref) - case info: ClassInfo => + case info: ClassInfo if variance > 0 => val parentType = info.instantiatedParents.reduceLeft(ctx.typeComparer.andType(_, _)) def addRefinement(parent: Type, decl: Symbol) = { val inherited = parentType.findMember(decl.name, info.cls.thisType, Private) @@ -73,16 +75,22 @@ trait TypeAssigner { sym => sym.is(TypeParamAccessor | Private) || sym.isConstructor) val fullType = (parentType /: refinableDecls)(addRefinement) mapOver(fullType) + case TypeBounds(lo, hi) if variance > 0 => + apply(hi) case _ => mapOver(tp) } case tp: RefinedType => val tp1 @ RefinedType(parent1, _) = mapOver(tp) - if (tp1.refinedInfo existsPart toAvoid) { + if (tp1.refinedInfo.existsPart(toAvoid) && variance > 0) { typr.println(s"dropping refinement from $tp1") parent1 } else tp1 + case tp: TypeVar if ctx.typerState.constraint.contains(tp) => + val lo = ctx.typerState.constraint.fullLowerBound(tp.origin) + val lo1 = avoid(lo, symsToAvoid) + if (lo1 ne lo) lo1 else tp case _ => mapOver(tp) } diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 5dc0452d1934..ceab2fd2b003 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -188,7 +188,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit def wildImportRef(imp: ImportInfo): Type = { if (imp.isWildcardImport) { val pre = imp.site - if (!isDisabled(imp, pre) && !(imp.excluded contains name.toTermName)) { + if (!isDisabled(imp, pre) && !(imp.excluded contains name.toTermName) && name != nme.CONSTRUCTOR) { val denot = pre.member(name).accessibleFrom(pre)(refctx) if (reallyExists(denot)) return pre.select(name, denot) } @@ -445,12 +445,13 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit val exprCtx = index(tree.stats) val stats1 = typedStats(tree.stats, ctx.owner) val expr1 = typedExpr(tree.expr, pt)(exprCtx) - ensureNoLocalRefs(assignType(cpy.Block(tree)(stats1, expr1), stats1, expr1), pt) + ensureNoLocalRefs( + assignType(cpy.Block(tree)(stats1, expr1), stats1, expr1), pt, localSyms(stats1)) } - def escapingRefs(block: Block)(implicit ctx: Context): collection.Set[NamedType] = { + def escapingRefs(block: Tree, localSyms: => List[Symbol])(implicit ctx: Context): collection.Set[NamedType] = { var hoisted: Set[Symbol] = Set() - lazy val locals = localSyms(block.stats).toSet + lazy val locals = localSyms.toSet def leakingTypes(tp: Type): collection.Set[NamedType] = tp namedPartsWith (tp => locals.contains(tp.symbol)) def typeLeaks(tp: Type): Boolean = leakingTypes(tp).nonEmpty @@ -461,28 +462,31 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit leakingTypes(block.tpe) } - /** Check that block's type can be expressed without references to locally defined + /** Check that expression's type can be expressed without references to locally defined * symbols. The following two remedies are tried before giving up: - * 1. If the expected type of the block is fully defined, pick it as the + * 1. If the expected type of the expression is fully defined, pick it as the * type of the result expressed by adding a type ascription. * 2. If (1) fails, force all type variables so that the block's type is * fully defined and try again. */ - protected def ensureNoLocalRefs(block: Block, pt: Type, forcedDefined: Boolean = false)(implicit ctx: Context): Tree = { - val Block(stats, expr) = block - val leaks = escapingRefs(block) - if (leaks.isEmpty) block - else if (isFullyDefined(pt, ForceDegree.none)) { - val expr1 = Typed(expr, TypeTree(pt)) - cpy.Block(block)(stats, expr1) withType expr1.tpe // no assignType here because avoid is redundant - } else if (!forcedDefined) { - fullyDefinedType(block.tpe, "block", block.pos) - val expr1 = Typed(expr, TypeTree(avoid(block.tpe, localSyms(stats)))) - val block1 = cpy.Block(block)(stats, expr1) withType expr1.tpe // no assignType here because avoid is already done - ensureNoLocalRefs(block1, pt, forcedDefined = true) + protected def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: => List[Symbol], forcedDefined: Boolean = false)(implicit ctx: Context): Tree = { + def ascribeType(tree: Tree, pt: Type): Tree = tree match { + case block @ Block(stats, expr) => + val expr1 = ascribeType(expr, pt) + cpy.Block(block)(stats, expr1) withType expr1.tpe // no assignType here because avoid is redundant + case _ => + Typed(tree, TypeTree(pt.simplified)) + } + val leaks = escapingRefs(tree, localSyms) + if (leaks.isEmpty) tree + else if (isFullyDefined(pt, ForceDegree.none)) ascribeType(tree, pt) + else if (!forcedDefined) { + fullyDefinedType(tree.tpe, "block", tree.pos) + val tree1 = ascribeType(tree, avoid(tree.tpe, localSyms)) + ensureNoLocalRefs(tree1, pt, localSyms, forcedDefined = true) } else - errorTree(block, - d"local definition of ${leaks.head.name} escapes as part of block's type ${block.tpe}"/*; full type: ${result.tpe.toString}"*/) + errorTree(tree, + d"local definition of ${leaks.head.name} escapes as part of expression's type ${tree.tpe}"/*; full type: ${result.tpe.toString}"*/) } def typedIf(tree: untpd.If, pt: Type)(implicit ctx: Context) = track("typedIf") { @@ -611,6 +615,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit throw new Error(i"internal error: closing over non-method $tp, pos = ${tree.pos}") } else typed(tree.tpt) + //println(i"typing closure $tree : ${meth1.tpe.widen}") assignType(cpy.Closure(tree)(env1, meth1, target), meth1, target) } @@ -667,7 +672,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit case _ => } val guard1 = typedExpr(tree.guard, defn.BooleanType) - val body1 = typedExpr(tree.body, pt) + val body1 = ensureNoLocalRefs(typedExpr(tree.body, pt), pt, ctx.scope.toList) .ensureConforms(pt)(originalCtx) // insert a cast if body does not conform to expected type if we disregard gadt bounds assignType(cpy.CaseDef(tree)(pat, guard1, body1), body1) } @@ -794,13 +799,16 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit val tpt1 = if (tree.tpt.isEmpty) TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) val refineClsDef = desugar.refinedTypeToClass(tpt1, tree.refinements) val refineCls = createSymbol(refineClsDef).asClass - val TypeDef(_, Template(_, _, _, refinements1)) = typed(refineClsDef) + val TypeDef(_, impl: Template) = typed(refineClsDef) + val refinements1 = impl.body val seen = mutable.Set[Symbol]() assert(tree.refinements.length == refinements1.length, s"${tree.refinements} != $refinements1") def addRefinement(parent: Type, refinement: Tree): Type = { typr.println(s"adding refinement $refinement") checkRefinementNonCyclic(refinement, refineCls, seen) val rsym = refinement.symbol + if ((rsym.is(Method) || rsym.isType) && rsym.allOverriddenSymbols.isEmpty) + ctx.error(i"refinement $rsym without matching type in parent $parent", refinement.pos) val rinfo = if (rsym is Accessor) rsym.info.resultType else rsym.info RefinedType(parent, rsym.name, rt => rinfo.substThis(refineCls, SkolemType(rt))) // todo later: check that refinement is within bounds @@ -838,7 +846,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit def typedBind(tree: untpd.Bind, pt: Type)(implicit ctx: Context): Bind = track("typedBind") { val body1 = typed(tree.body, pt) typr.println(i"typed bind $tree pt = $pt bodytpe = ${body1.tpe}") - val sym = ctx.newSymbol(ctx.owner, tree.name, EmptyFlags, body1.tpe, coord = tree.pos) + val flags = if (tree.isType) BindDefinedType else EmptyFlags + val sym = ctx.newSymbol(ctx.owner, tree.name, flags, body1.tpe, coord = tree.pos) assignType(cpy.Bind(tree)(tree.name, body1), sym) } @@ -863,24 +872,24 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit } def typedValDef(vdef: untpd.ValDef, sym: Symbol)(implicit ctx: Context) = track("typedValDef") { - val ValDef(name, tpt, rhs) = vdef + val ValDef(name, tpt, _) = vdef addTypedModifiersAnnotations(vdef, sym) val tpt1 = typedType(tpt) - val rhs1 = rhs match { - case Ident(nme.WILDCARD) => rhs withType tpt1.tpe - case _ => typedExpr(rhs, tpt1.tpe) + val rhs1 = vdef.rhs match { + case rhs @ Ident(nme.WILDCARD) => rhs withType tpt1.tpe + case rhs => typedExpr(rhs, tpt1.tpe) } assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym) } def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(implicit ctx: Context) = track("typedDefDef") { - val DefDef(name, tparams, vparamss, tpt, rhs) = ddef + val DefDef(name, tparams, vparamss, tpt, _) = ddef addTypedModifiersAnnotations(ddef, sym) val tparams1 = tparams mapconserve (typed(_).asInstanceOf[TypeDef]) val vparamss1 = vparamss nestedMapconserve (typed(_).asInstanceOf[ValDef]) if (sym is Implicit) checkImplicitParamsNotSingletons(vparamss1) val tpt1 = typedType(tpt) - val rhs1 = typedExpr(rhs, tpt1.tpe) + val rhs1 = typedExpr(ddef.rhs, tpt1.tpe) assignType(cpy.DefDef(ddef)(name, tparams1, vparamss1, tpt1, rhs1), sym) //todo: make sure dependent method types do not depend on implicits or by-name params } @@ -893,7 +902,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit } def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(implicit ctx: Context) = track("typedClassDef") { - val TypeDef(name, impl @ Template(constr, parents, self, body)) = cdef + val TypeDef(name, impl @ Template(constr, parents, self, _)) = cdef val superCtx = ctx.superCallContext def typedParent(tree: untpd.Tree): Tree = if (tree.isType) typedType(tree)(superCtx) @@ -910,7 +919,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit val parents1 = ensureConstrCall(cls, parentsWithClass)(superCtx) val self1 = typed(self)(ctx.outer).asInstanceOf[ValDef] // outer context where class members are not visible val dummy = localDummy(cls, impl) - val body1 = typedStats(body, dummy)(inClassContext(self1.symbol)) + val body1 = typedStats(impl.body, dummy)(inClassContext(self1.symbol)) checkNoDoubleDefs(cls) val impl1 = cpy.Template(impl)(constr1, parents1, self1, body1) .withType(dummy.nonMemberTermRef) @@ -1169,8 +1178,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit def adapt(tree: Tree, pt: Type, original: untpd.Tree = untpd.EmptyTree)(implicit ctx: Context) = /*>|>*/ track("adapt") /*<|<*/ { /*>|>*/ ctx.traceIndented(i"adapting $tree of type ${tree.tpe} to $pt", typr, show = true) /*<|<*/ { - interpolateUndetVars(tree) - tree overwriteType tree.tpe.simplified + interpolateUndetVars(tree, if (tree.isDef) tree.symbol else NoSymbol) + tree.overwriteType(tree.tpe.simplified) adaptInterpolated(tree, pt, original) } } @@ -1351,7 +1360,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit case SearchSuccess(inferred, _, _) => adapt(inferred, pt) case failure: SearchFailure => - if (pt.isInstanceOf[ProtoType]) tree + if (pt.isInstanceOf[ProtoType] && !failure.isInstanceOf[AmbiguousImplicits]) tree else err.typeMismatch(tree, pt, failure) } } diff --git a/src/dotty/tools/dotc/typer/VarianceChecker.scala b/src/dotty/tools/dotc/typer/VarianceChecker.scala index 8fff5217056d..1d3ceaa578b6 100644 --- a/src/dotty/tools/dotc/typer/VarianceChecker.scala +++ b/src/dotty/tools/dotc/typer/VarianceChecker.scala @@ -116,7 +116,7 @@ class VarianceChecker()(implicit ctx: Context) { case None => } - override def traverse(tree: Tree) = { + override def traverse(tree: Tree)(implicit ctx: Context) = { def sym = tree.symbol // No variance check for private/protected[this] methods/values. def skip = !sym.exists || sym.is(Local) diff --git a/src/dotty/tools/dotc/util/Positions.scala b/src/dotty/tools/dotc/util/Positions.scala index 46e4f4ee7db6..744f97340660 100644 --- a/src/dotty/tools/dotc/util/Positions.scala +++ b/src/dotty/tools/dotc/util/Positions.scala @@ -12,9 +12,18 @@ import language.implicitConversions object Positions { private val StartEndBits = 26 - private val StartEndMask: Long = (1L << StartEndBits) - 1 + val StartEndMask: Long = (1L << StartEndBits) - 1 private val SyntheticPointDelta = (1 << (64 - StartEndBits * 2)) - 1 + /** The maximal representable offset in a position */ + val MaxOffset = StartEndMask + + /** Convert offset `x` to an integer by sign extending the original + * field of `StartEndBits` width. + */ + def offsetToInt(x: Int) = + x << (32 - StartEndBits) >> (32 - StartEndBits) + /** A position indicates a range between a start offset and an end offset. * Positions can be synthetic or source-derived. A source-derived position * has in addition a point lies somewhere between start and end. The point @@ -108,12 +117,14 @@ object Positions { } } - private def fromOffsets(start: Int, end: Int, pointDelta: Int) = + private def fromOffsets(start: Int, end: Int, pointDelta: Int) = { + //assert(start <= end || start == 1 && end == 0, s"$start..$end") new Position( (start & StartEndMask).toLong | ((end & StartEndMask).toLong << StartEndBits) | (pointDelta.toLong << (StartEndBits * 2))) - + } + /** A synthetic position with given start and end */ def Position(start: Int, end: Int): Position = { val pos = fromOffsets(start, end, SyntheticPointDelta) diff --git a/src/dotty/tools/dotc/util/SourceFile.scala b/src/dotty/tools/dotc/util/SourceFile.scala index c5d88d7bf95f..45119a881538 100644 --- a/src/dotty/tools/dotc/util/SourceFile.scala +++ b/src/dotty/tools/dotc/util/SourceFile.scala @@ -99,7 +99,7 @@ case class SourceFile(file: AbstractFile, content: Array[Char]) { * Lines are numbered from 0 */ def offsetToLine(offset: Int): Int = { - lastLine = Util.bestFit(lineIndices, offset, lastLine) + lastLine = Util.bestFit(lineIndices, lineIndices.length, offset, lastLine) lastLine } diff --git a/src/dotty/tools/dotc/util/Util.scala b/src/dotty/tools/dotc/util/Util.scala index ed9a54e38d09..98f0b62dbbde 100644 --- a/src/dotty/tools/dotc/util/Util.scala +++ b/src/dotty/tools/dotc/util/Util.scala @@ -11,18 +11,16 @@ object Util { * `candidates.length/2`. * @pre candidates is sorted */ - def bestFit(candidates: Array[Int], x: Int, hint: Int = -1): Int = { + def bestFit(candidates: Array[Int], length: Int, x: Int, hint: Int = -1): Int = { def recur(lo: Int, hi: Int, mid: Int): Int = if (x < candidates(mid)) recur(lo, mid - 1, (lo + mid - 1) / 2) - else if (mid + 1 < candidates.length && x >= candidates(mid + 1)) + else if (mid + 1 < length && x >= candidates(mid + 1)) recur(mid + 1, hi, (mid + 1 + hi) / 2) else mid - val initMid = - if (0 <= hint && hint < candidates.length) hint - else candidates.length / 2 - if (candidates.isEmpty || x < candidates(0)) -1 - else recur(0, candidates.length, initMid) + val initMid = if (0 <= hint && hint < length) hint else length / 2 + if (length == 0 || x < candidates(0)) -1 + else recur(0, length, initMid) } /** An array twice the size of given array, with existing elements copied over */ diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index 3babe7bc686f..2a1ef08ee4d1 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -19,6 +19,11 @@ class tests extends CompilerTest { "-d", "./out/" ) + val doEmitBytecode = List("-Ystop-before:terminal") + val failedbyName = List("-Ystop-before:collectEntryPoints") // #288 + val failedUnderscore = List("-Ystop-before:collectEntryPoints") // #289 + val testPickling = List("-Xprint-types", "-Ytest-pickler", "-Ystop-after:pickler") + val failedOther = List("-Ystop-before:collectEntryPoints") // some non-obvious reason. need to look deeper val twice = List("#runs", "2", "-YnoDoubleBindings") val staleSymbolError: List[String] = List() @@ -30,6 +35,12 @@ class tests extends CompilerTest { val negDir = "./tests/neg/" val newDir = "./tests/new/" val dotcDir = "./src/dotty/" + val picklingDir = "./tests/pickling" + + @Test def pickle_pickleOK = compileDir(picklingDir, testPickling) + @Test def pickle_pickling = compileDir(dotcDir + "tools/dotc/core/pickling/", testPickling) + + //@Test def pickle_core = compileDir(dotcDir + "tools/dotc/core", testPickling, xerrors = 2) // two spurious comparison errors in Types and TypeOps @Test def pos_t2168_pat = compileFile(posDir, "t2168") @Test def pos_erasure = compileFile(posDir, "erasure") @@ -56,7 +67,6 @@ class tests extends CompilerTest { @Test def pos_overrides() = compileFile(posDir, "overrides") @Test def pos_javaOverride() = compileDir(posDir + "java-override") @Test def pos_templateParents() = compileFile(posDir, "templateParents") - @Test def pos_structural() = compileFile(posDir, "structural") @Test def pos_overloadedAccess = compileFile(posDir, "overloadedAccess") @Test def pos_approximateUnion = compileFile(posDir, "approximateUnion") @Test def pos_tailcall = compileDir(posDir + "tailcall/") @@ -87,16 +97,10 @@ class tests extends CompilerTest { @Test def neg_over = compileFile(negDir, "over", xerrors = 3) @Test def neg_overrides = compileFile(negDir, "overrides", xerrors = 11) @Test def neg_projections = compileFile(negDir, "projections", xerrors = 1) - @Test def neg_i39 = compileFile(negDir, "i39", xerrors = 1) - @Test def neg_i50_volatile = compileFile(negDir, "i50-volatile", xerrors = 4) + @Test def neg_i39 = compileFile(negDir, "i39", xerrors = 2) + @Test def neg_i50_volatile = compileFile(negDir, "i50-volatile", xerrors = 6) @Test def neg_t0273_doubledefs = compileFile(negDir, "t0273", xerrors = 1) - @Test def neg_t0586_structural = compileFile(negDir, "t0586", xerrors = 1) - @Test def neg_t0625_structural = compileFile(negDir, "t0625", xerrors = 1)( - defaultOptions = noCheckOptions) - // -Ycheck fails because there are structural types involving higher-kinded types. - // these are illegal, but are tested only later. - @Test def neg_t1131_structural = compileFile(negDir, "t1131", xerrors = 1) - @Test def neg_zoo = compileFile(negDir, "zoo", xerrors = 1) + @Test def neg_zoo = compileFile(negDir, "zoo", xerrors = 12) @Test def neg_t1192_legalPrefix = compileFile(negDir, "t1192", xerrors = 1) @Test def neg_tailcall_t1672b = compileFile(negDir, "tailcall/t1672b", xerrors = 6) @Test def neg_tailcall_t3275 = compileFile(negDir, "tailcall/t3275", xerrors = 1) @@ -108,13 +112,13 @@ class tests extends CompilerTest { @Test def neg_t1843_variances = compileFile(negDir, "t1843-variances", xerrors = 1) @Test def neg_t2660_ambi = compileFile(negDir, "t2660", xerrors = 2) @Test def neg_t2994 = compileFile(negDir, "t2994", xerrors = 2) - @Test def neg_subtyping = compileFile(negDir, "subtyping", xerrors = 2) + @Test def neg_subtyping = compileFile(negDir, "subtyping", xerrors = 4) @Test def neg_variances = compileFile(negDir, "variances", xerrors = 2) @Test def neg_badAuxConstr = compileFile(negDir, "badAuxConstr", xerrors = 2) @Test def neg_typetest = compileFile(negDir, "typetest", xerrors = 1) @Test def neg_t1569_failedAvoid = compileFile(negDir, "t1569-failedAvoid", xerrors = 1) @Test def neg_cycles = compileFile(negDir, "cycles", xerrors = 8) - @Test def neg_boundspropagation = compileFile(negDir, "boundspropagation", xerrors = 4) + @Test def neg_boundspropagation = compileFile(negDir, "boundspropagation", xerrors = 5) @Test def neg_refinedSubtyping = compileFile(negDir, "refinedSubtyping", xerrors = 2) @Test def neg_i0091_infpaths = compileFile(negDir, "i0091-infpaths", xerrors = 3) @Test def neg_i0248_inherit_refined = compileFile(negDir, "i0248-inherit-refined", xerrors = 4) @@ -178,7 +182,6 @@ class tests extends CompilerTest { val javaDir = "./tests/pos/java-interop/" @Test def java_all = compileFiles(javaDir) - - + //@Test def dotc_compilercommand = compileFile(dotcDir + "tools/dotc/config/", "CompilerCommand") } diff --git a/test/test/DeSugarTest.scala b/test/test/DeSugarTest.scala index 66fb70158787..016ab536140b 100644 --- a/test/test/DeSugarTest.scala +++ b/test/test/DeSugarTest.scala @@ -57,14 +57,14 @@ class DeSugarTest extends ParserTest { cpy.SeqLiteral(tree1)(transform(elems)) case UnApply(fun, implicits, patterns) => cpy.UnApply(tree1)(transform(fun, Expr), transform(implicits), transform(patterns)) - case ValDef(name, tpt, rhs) => - cpy.ValDef(tree1)(name, transform(tpt, Type), transform(rhs)) - case DefDef(name, tparams, vparamss, tpt, rhs) => - cpy.DefDef(tree1)(name, transformSub(tparams), vparamss mapConserve (transformSub(_)), transform(tpt, Type), transform(rhs)) + case tree1 @ ValDef(name, tpt, _) => + cpy.ValDef(tree1)(name, transform(tpt, Type), transform(tree1.rhs)) + case tree1 @ DefDef(name, tparams, vparamss, tpt, _) => + cpy.DefDef(tree1)(name, transformSub(tparams), vparamss mapConserve (transformSub(_)), transform(tpt, Type), transform(tree1.rhs)) case tree1 @ TypeDef(name, rhs) => cpy.TypeDef(tree1)(name, transform(rhs, Type), transformSub(tree1.tparams)) - case Template(constr, parents, self, body) => - cpy.Template(tree1)(transformSub(constr), transform(parents), transformSub(self), transform(body, Expr)) + case impl @ Template(constr, parents, self, _) => + cpy.Template(tree1)(transformSub(constr), transform(parents), transformSub(self), transform(impl.body, Expr)) case Thicket(trees) => Thicket(flatten(trees mapConserve super.transform)) case tree1 => diff --git a/test/test/DottyTest.scala b/test/test/DottyTest.scala index 21eebd660c59..fc9cff7822d6 100644 --- a/test/test/DottyTest.scala +++ b/test/test/DottyTest.scala @@ -27,7 +27,7 @@ class DottyTest /*extends ContextEscapeDetection*/ { // .withSetting(debugTrace, true) // .withSetting(prompt, true) //.setSetting(Ylogcp, true) - .setSetting(printtypes, true) + //.setSetting(printtypes, true) .setSetting(pageWidth, 90) .setSetting(log, List(" R): R = + try { + action(s) + } finally { + s.close() + } +} +} + +package p3 { +object Test { + def idMap[C[_],T](m: { def map[U](f: T => U): C[U] }): C[T] = m.map(t => t) + + def main(args: Array[String]): Unit = { + idMap(Some(5)) + idMap(Responder.constant(5)) + } +} +} +package p4 { + +trait A { self: Any { def p: Any } => + def f(b: => Unit): Unit = {} + f { p } // error: cannot access member 'p' from structural type +} +} + +package p5 { +// t2810 +object Test { + val closeable1: { def close(): Unit } = new scala.io.Source { val iter: Iterator[Char] = "".iterator } + val closeable2: { def close(): Unit } = new java.io.Closeable { def close() = {} } +} +} + +package p6 { + + class Refinements { + val y: C { val x: T; type T } // was adeprecated warning: illegal forward reference in refinement; now illegal + } + +} diff --git a/tests/neg/t0586.scala b/tests/neg/t0586.scala deleted file mode 100644 index 540e225a1465..000000000000 --- a/tests/neg/t0586.scala +++ /dev/null @@ -1,9 +0,0 @@ -object RClose { - type ReflectCloseable = { def close(): Unit } - def withReflectCloseable[T <: ReflectCloseable, R](s: T)(action: T => R): R = - try { - action(s) - } finally { - s.close() - } -} diff --git a/tests/neg/t0625.scala b/tests/neg/t0625.scala deleted file mode 100644 index 56145425998f..000000000000 --- a/tests/neg/t0625.scala +++ /dev/null @@ -1,8 +0,0 @@ -object Test { - def idMap[C[_],T](m: { def map[U](f: T => U): C[U] }): C[T] = m.map(t => t) - - def main(args: Array[String]): Unit = { - idMap(Some(5)) - idMap(Responder.constant(5)) - } -} diff --git a/tests/neg/t1131.scala b/tests/neg/t1131.scala deleted file mode 100644 index f4a7b377d98a..000000000000 --- a/tests/neg/t1131.scala +++ /dev/null @@ -1,4 +0,0 @@ -trait A { self: Any { def p: Any } => - def f(b: => Unit): Unit = {} - f { p } // error: cannot access member 'p' from structural type -} diff --git a/tests/neg/typers.scala b/tests/neg/typers.scala index 226fd2310408..b5bd1fa2c129 100644 --- a/tests/neg/typers.scala +++ b/tests/neg/typers.scala @@ -60,8 +60,4 @@ object typers { 123 } } - - class Refinements { - val y: C { val x: T; type T } // deprecated warning: illegal forward reference in refinement - } } diff --git a/tests/pickling/Coder.scala b/tests/pickling/Coder.scala new file mode 100644 index 000000000000..77bbd134c150 --- /dev/null +++ b/tests/pickling/Coder.scala @@ -0,0 +1,59 @@ +import collection.mutable.HashMap + +class Coder(words: List[String]) { + + (2 -> "ABC", new ArrowAssoc('3') -> "DEF") + + private val mnemonics = Map( + '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL", + '6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ") + + + ('1', "1") match { + case (digit, str) => true + case _ => false + } + + /** Invert the mnemonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */ + private val charCode0: Map[Char, Char] = mnemonics withFilter { + case (digit, str) => true + case _ => false + } flatMap { x$1 => + x$1 match { + case (digit, str) => str map (ltr => ltr -> digit) + } + } + + private val charCode: Map[Char, Char] = + for ((digit, str) <- mnemonics; ltr <- str) yield ltr -> digit + + /** Maps a word to the digit string it can represent */ + private def wordCode(word: String): String = word map charCode + + /** A map from digit strings to the words that represent them */ + private val wordsForNum: Map[String, List[String]] = + words groupBy wordCode withDefaultValue Nil + + /** All ways to encode a number as a list of words */ + def encode(number: String): Set[List[String]] = + if (number.isEmpty) Set(Nil) + else { + for { + splitPoint <- 1 to number.length + word <- wordsForNum(number take splitPoint) + rest <- encode(number drop splitPoint) + } yield word :: rest + }.toSet + + /** Maps a number to a list of all word phrases that can represent it */ + def translate(number: String): Set[String] = encode(number) map (_ mkString " ") + +} + +object Coder { + def main(args : Array[String]) : Unit = { + val coder = new Coder(List("Scala", "sobls", "Python", "Ruby", "C", "A", "rocks", "sucks", "works", "Racka")) +// println(coder.wordsForNum) + println(coder.translate("7225276257")) + } +} diff --git a/tests/pickling/Labels.scala b/tests/pickling/Labels.scala new file mode 100644 index 000000000000..4a84175aff86 --- /dev/null +++ b/tests/pickling/Labels.scala @@ -0,0 +1,21 @@ +object Labels { + def main(args: Array[String]): Unit = { + var i = 10 + while(i>0) { + var j = 0 + while(j0) => println("one") + case t@2 => println("two" + t) + case _ => println("default") + } +} diff --git a/tests/pickling/alias.scala b/tests/pickling/alias.scala new file mode 100644 index 000000000000..a66edc73a121 --- /dev/null +++ b/tests/pickling/alias.scala @@ -0,0 +1,3 @@ +class A(val x: Int) + +class B(x: Int) extends A(x) diff --git a/tests/pickling/arrays2.scala b/tests/pickling/arrays2.scala new file mode 100644 index 000000000000..1a0d3e660c95 --- /dev/null +++ b/tests/pickling/arrays2.scala @@ -0,0 +1,32 @@ +package arrays + +case class C(); + +object arrays2 { + + def main(args: Array[String]): Unit = { + val a: Array[Array[C]] = new Array[Array[C]](2); + a(0) = new Array[C](2); + a(0)(0) = new C(); + } +} + +// #2422 +object arrays4 { + val args = Array[String]("World") + "Hello %1$s".format(args: _*) +} + +// #2461 +object arrays3 { + import scala.collection.JavaConversions._ + def apply[X](xs : X*) : java.util.List[X] = java.util.Arrays.asList(xs: _*) + + def apply1[X <: String](xs : X*) : java.util.List[X] = java.util.Arrays.asList(xs: _*) + def apply2[X <: AnyVal](xs : X*) : java.util.List[X] = java.util.Arrays.asList(xs: _*) + def apply3(xs : Int*) : java.util.List[Int] = java.util.Arrays.asList(xs: _*) + def apply4(xs : Unit*) : java.util.List[Unit] = java.util.Arrays.asList(xs: _*) + def apply5(xs : Null*) : java.util.List[Null] = java.util.Arrays.asList(xs: _*) + def apply6(xs : Nothing*) : java.util.List[Nothing] = java.util.Arrays.asList(xs: _*) +} + diff --git a/tests/pickling/desugar.scala b/tests/pickling/desugar.scala new file mode 100644 index 000000000000..0d3b6d8ca624 --- /dev/null +++ b/tests/pickling/desugar.scala @@ -0,0 +1,88 @@ +object desugar { + + // variables + var x: Int = 2 + var y = x * x + val list = List(1, 2, 3) + + { var z: Int = y } + + def foo0(first: Int, second: Int = 2, third: Int = 3) = first + second + def foo1(first: Int, second: Int = 2)(third: Int = 3) = first + second + def foo2(first: Int)(second: Int = 2)(third: Int = 3) = first + second + + object caseClasses { self => + trait List[+T] { + def head: T + def tail: List[T] + } + + case class Cons[+T](val head: T, val tail: List[T]) extends List[T] + + object Cons { + def apply[T](head: T): Cons[T] = apply(head, Nil) + } + + case object Nil extends List[Nothing] { + def head = throw new Error() + def tail = throw new Error() + } + } + + object patDefs { + + import caseClasses._ + + val xs: List[Int] = Cons(1, Cons(2, Nil)) + + val Cons(y, ys) = xs + val Cons(z, _) = xs + val Cons(_, _) = xs + + val (cons: Cons[Int]) = xs + + val x1, y1, z1: Int = 1 + } + + object Binops { + + x :: y :: Nil + + val x :: y :: Nil = list + + } + + object fors { + + for (x <- List(1, 2, 3)) yield 2 + for (x <- List(1, 2, 3) if x % 2 == 0) yield x * x + for (x <- List(1, 2, 3); y <- 0 to x) yield x * y + for (x <- List(1, 2, 3); y <- 0 to x; if x + y % 2 == 0) yield x * y + for (x <- List(1, 2, 3); y = x * x; if x + y % 2 == 0) yield x * y + for (x <- List(1, 2, 3); y = x * x; z = x * y; u <- 0 to y) yield x * y * z * u + + for (x <- List(1, 2, 3)) println(x) + for (x <- List(1, 2, 3) if x % 2 == 0) println(x * x) + for (x <- List(1, 2, 3); y <- 0 to x) println(x * y) + for (x <- List(1, 2, 3); y <- 0 to x; if x + y % 2 == 0) println(x * y) + for (x <- List(1, 2, 3); y = x * x; if x + y % 2 == 0) println(x * y) + for (x <- List(1, 2, 3); y = x * x; z = x * y; u <- 0 to y) println(x * y * z * u) + } + + object misc { + 'hello + s"this is a $x + ${x + y} string" + type ~ = Tuple2 + val pair: Int ~ String = 1 -> "abc" + def foo(xs: Int*) = xs.length + foo(list: _*) + println(list: _*) + (list length) + - desugar.x + def bar(x: => Int) = x + (x + y) + 1 + while (x < 10) x += 1 + do x -= 1 while (x > 0) + } + +} diff --git a/tests/pickling/extmethods.scala b/tests/pickling/extmethods.scala new file mode 100644 index 000000000000..1cbc9f2eef85 --- /dev/null +++ b/tests/pickling/extmethods.scala @@ -0,0 +1,8 @@ +package extMethods + +trait That1[A] +class T[A, This <: That1[A]](val x: Int) extends AnyVal { + self: This => + var next: This = _ + final def loop(x: This, cnt: Int): Int = loop(x, cnt + 1) +} diff --git a/tests/pickling/hk.scala b/tests/pickling/hk.scala new file mode 100644 index 000000000000..9fdaf94f6ca8 --- /dev/null +++ b/tests/pickling/hk.scala @@ -0,0 +1,56 @@ +import language.higherKinds + +object hk0 { + + abstract class Base { + type Rep[T] + val strRep: Rep[String] + } + + class Sub extends Base { + type Rep[T] = T + val strRep = "abc" + val sr: Rep[String] = "" + } + + abstract class Functor[F[_]] { + def map[A, B](f: A => B): F[A] => F[B] + } + val ml: Functor[List] = ??? + val mx = ml + val mm: (Int => Boolean) => List[Int] => List[Boolean] = mx.map +} + +object higherKinded { + + type Untyped = Null + + class Tree[-T >: Untyped] { + type ThisType[-U >: Untyped] <: Tree[U] + def withString(s: String): ThisType[String] = withString(s) + } + + class Ident[-T >: Untyped] extends Tree[T] { + type ThisType[-U] = Ident[U] + } + + val id = new Ident[Integer] + + val y = id.withString("abc") + + val z: Ident[String] = y + + val zz: tpd.Tree = y + + abstract class Instance[T >: Untyped] { + type Tree = higherKinded.Tree[T] + } + + object tpd extends Instance[String] + + def transform(tree: Tree[String]) = { + val tree1 = tree.withString("") + tree1: Tree[String] + } + +} diff --git a/tests/pickling/i94-nada.scala b/tests/pickling/i94-nada.scala new file mode 100644 index 000000000000..ce8dc98adb56 --- /dev/null +++ b/tests/pickling/i94-nada.scala @@ -0,0 +1,45 @@ +package i94 + +import scala.language.higherKinds + +trait Base { + type Rep[T] +} + +trait BaseExp extends Base { + type Rep[T] = Exp[T] + case class Exp[T](v: T) +} + +trait BaseStr extends Base { + type Rep[T] = String +} + +trait BaseDirect extends Base { + type Rep[T] = T +} + +trait Test1 { + trait Monad[X] { + def x: X + } + sealed abstract class Either[A,B] + case class Left[A,B](x: A) extends Either[A,B] with Monad[A] + case class Right[A,B](x: B) extends Either[A,B] with Monad[B] + def flatMap[X,Y,M[X]<:Monad[X]](m: M[X], f: X => M[Y]): M[Y] = f(m.x) + println(flatMap(Left(1), {x: Int => Left(x)})) +} +trait Test2 { + trait Monad[X] { + def x: X + } + sealed abstract class Either[A,B] + case class Left[A,B](x: A) extends Either[A,B] with Monad[A] + case class Right[A,B](x: B) extends Either[A,B] with Monad[B] + def flatMap[X,Y,M[X]](m: M[X], f: X => M[Y]): M[Y] + println(flatMap(Left(1), {x: Int => Left(x)})) +} +trait Test3 { + def flatMap[X,Y,M[X]](m: M[X], f: X => M[Y]): M[Y] + println(flatMap(Some(1), {x: Int => Some(x)})) +} diff --git a/tests/pickling/nameddefaults.scala b/tests/pickling/nameddefaults.scala new file mode 100644 index 000000000000..671f14a07386 --- /dev/null +++ b/tests/pickling/nameddefaults.scala @@ -0,0 +1,63 @@ +object nameddefaults { + + def foo(first: Int, second: Int = 2, third: Int = 3) = first + second + + var x = 1 + var y = 2 + + foo(1, 2, 3) + + foo(1, 2) + + foo(1) + + // named and missing arguments + + foo(first = 1, second = 3) + + foo(second = 3, first = 1) + + foo(first = 2, third = 3) + + foo(2, third = 3) + + // same but with non-idempotent expressions + + foo(first = x, second = y) + + foo(second = x, first = y) + + foo(first = x, third = y) + + foo(x, third = y) + +// The same thing, but for classes + + class C(first: Int, second: Int = 2, third: Int = 3) {} + + new C(1, 2, 3) + + new C(1, 2) + + new C(1) + + // named and missing arguments + + new C(first = 1, second = 3) + + new C(second = 3, first = 1) + + new C(first = 2, third = 3) + + new C(2, third = 3) + + // same but with non-idempotent expressions + + new C(first = x, second = y) + + new C(second = x, first = y) + + new C(first = x, third = y) + + +} diff --git a/tests/pickling/new-array.scala b/tests/pickling/new-array.scala new file mode 100644 index 000000000000..a323783de945 --- /dev/null +++ b/tests/pickling/new-array.scala @@ -0,0 +1,17 @@ +package newArray + +object Test { + val w = new Array[String](10) + val x = new Array[Int](10) + def f[T: reflect.ClassTag] = new Array[T](10) + val y = new Array[Any](10) + val z = new Array[Unit](10) +} +object Test2 { + val w: Array[Any] = new Array(10) + val x: Array[Int] = new Array(10) + def f[T: reflect.ClassTag]: Array[T] = new Array(10) + val y: Array[Any] = new Array(10) + val z: Array[Unit] = new Array(10) +} + diff --git a/tests/pickling/partialApplications.scala b/tests/pickling/partialApplications.scala new file mode 100644 index 000000000000..f517011b995a --- /dev/null +++ b/tests/pickling/partialApplications.scala @@ -0,0 +1,13 @@ +object PartialApplications { + + type Histogram = Map[_, Int] + + type StringlyHistogram = Histogram[_ >: String] + + val xs: Histogram[String] = Map[String, Int]() + + val ys: StringlyHistogram[String] = xs + + val zs: StringlyHistogram = xs + +} diff --git a/tests/pickling/selftypes.scala b/tests/pickling/selftypes.scala new file mode 100644 index 000000000000..243405f77d12 --- /dev/null +++ b/tests/pickling/selftypes.scala @@ -0,0 +1,20 @@ +object selftypes { + + trait A { self: AB => + + type AA = List[this.BX] + + class AX + + } + + trait B { self: AB => + + type BB = AA + + class BX + } + + class AB extends A with B + +} \ No newline at end of file diff --git a/tests/pickling/t1957.scala b/tests/pickling/t1957.scala new file mode 100644 index 000000000000..5fcc1723bff5 --- /dev/null +++ b/tests/pickling/t1957.scala @@ -0,0 +1,65 @@ +package t1957 + +// See comment at end of file. +object Test { + abstract class Settings {} + + abstract class Grist + { self => + type settingsType <: Settings + type moduleType <: Module {type settingsType = self.settingsType} + val module: moduleType + } + + abstract class Tool + { self => + type settingsType <: Settings + type moduleType = Module { type settingsType = self.settingsType } + type gristType = Grist { type moduleType <: self.moduleType; type settingsType <: self.settingsType } + + def inputGrist: List[gristType] + } + + abstract class Module + { self => + type settingsType <: Settings + final type commonModuleType = Module {type settingsType = self.settingsType} + type selfType >: self.type <: commonModuleType + + // BTW: if we use the commented out type decls, the code compiles successfully + // type gristType = Grist {type settingsType <: self.settingsType; type moduleType <: commonModuleType } + + val tools: List[Tool {type settingsType = self.settingsType}] + + protected def f: List[commonModuleType] = + { + val inputGrists = tools.flatMap(_.inputGrist) + // This produces an unhygienic closure for _.inputGrist. + // Pickling will log: + // + // [...] pickling reference to as yet undefined value _$1 in method $anonfun + // + // More info can be produced by uncommenting these two lines in + // Namer#valOrDefDefSig: + // + //println(i"lifting $rhsType over $paramss -> $hygienicType = ${tpt.tpe}") + //println(TypeComparer.explained { implicit ctx => hygienicType <:< tpt.tpe }) + // + // Tracing the subtype statement (over 1600+ lines!) shows that the TypeComparer thinks that the + // following subtype judgement is true: + // + // Test.Grist{ + // moduleType <: Test.Module{settingsType = Module.this.settingsType}; + // settingsType <: Module.this.settingsType + // } <:< Test.Grist{moduleType <: _$1.moduleType; settingsType <: _$1.settingsType} + // + // Therefore, a type variable which has the second type as lower bound does not get + // the (hygienic) first type as new lower bound. Clearly something is wrong in the subtype + // derivation here. It would be important to figure out what. + + ??? +// inputGrists.map(_.module) + } + + } +} diff --git a/tests/pickling/templateParents.scala b/tests/pickling/templateParents.scala new file mode 100644 index 000000000000..153c4b4da728 --- /dev/null +++ b/tests/pickling/templateParents.scala @@ -0,0 +1,25 @@ +object templateParents { + + // traits do not call a constructor + class C[+T](val x: T) + trait D extends C[String] + trait E extends C[Int] + class F extends C[Boolean](true) { + def foo = x + } + val cd = new C("abc") with D + cd.x + +} + +object templateParents1 { + // tests inference of synthesized class type + class C[+T] + trait D extends C[String] + trait E extends C[Int] + + val x = new D with E + + val y: C[Int & String] = x +} + diff --git a/tests/pickling/traits.scala b/tests/pickling/traits.scala new file mode 100644 index 000000000000..e93ebc46b34e --- /dev/null +++ b/tests/pickling/traits.scala @@ -0,0 +1,27 @@ +package traits + +trait B extends Object { + + val z: Int + +} + +trait T extends B { + + var x = 2 + + private var xp = 2 + + val y = 3 + + private val yp = 3 + + val z = 4 + + x = 4 + + xp = 4 + +} + +class C extends T diff --git a/tests/pickling/tryTyping.scala b/tests/pickling/tryTyping.scala new file mode 100644 index 000000000000..a2aeb17c8edc --- /dev/null +++ b/tests/pickling/tryTyping.scala @@ -0,0 +1,20 @@ +object tryTyping{ + def foo: Int = { + try{???; 1} + catch { + case e: Exception => 2 + } + } + + def foo2: Int = { + val a2: (Throwable => Int) = _ match {case _ => 2} + try{???; 1} + catch a2 + } + + def foo3: Int = { + val a3: (Int => Throwable => Int) = (b: Int) => _ match {case _ => b} + try{???; 1} + catch a3(3) + } +} \ No newline at end of file diff --git a/tests/pickling/typers.scala b/tests/pickling/typers.scala new file mode 100644 index 000000000000..56007729e8b7 --- /dev/null +++ b/tests/pickling/typers.scala @@ -0,0 +1,153 @@ +package typers + +import annotation.{tailrec, switch} +import collection.mutable._ + +object typers { + + val names = List("a", "b", "c") + val ints = List(1, 2, 3) + + object Inference { + + for ((name, n) <- (names, ints).zipped) + println(name.length + n) + + def double(x: Char): String = s"$x$x" + + "abc" flatMap double + + } + object Eta { + + def fun(x: Int): Int = x + 1 + val foo = fun(_) + } + + case class DefaultParams(init: String => String = identity) + object DefaultParams { + def foo(x: String => String = identity) = x("abc") + + foo() + } + + class List[+T] { + def :: [U >: T](x: U): List[U] = new :: (x, this) + + def len: Int = this match { + case x :: xs1 => 1 + xs1.len + case Nil => 0 + } + } + + object Nil extends List[Nothing] + + case class :: [+T] (hd: T, tl: List[T]) extends List[T] + + def len[U](xs: List[U]): Int = xs match { + case x :: xs1 => 1 + len(xs1) + case Nil => 0 + } + + object returns { + + def foo(x: Int): Int = { + return 3 + } + } + + object tries { + + val x = try { + "abc" + } catch { + case ex: java.io.IOException => + 123 + } finally { + println("done") + } + + val y = try 2 catch Predef.identity + + val z = try 3 finally "abc" + + println("abc".toString) + + } + + class C { + + @tailrec final def factorial(acc: Int, n: Int): Int = (n: @switch) match { + case 0 => acc + case _ => factorial(acc * n, n - 1) + } + + println(factorial(1, 10)) + + + } + + class Refinements { + trait C { type T; def process(x: T): Int } + val y: C { type T; val key: T; def process(x: T): Int } = ??? + } + + object Accessibility { + + class A { + val x: String = "abc" + } + + class B extends A { + private def x: Int = 1 + } + + val b: B = new B + val y = b.x + val z: String = y + + } + + object Self { + + class A(self1: Int) { self => + + def self1(x: Int) = x + + class B { + val b = self + val c: A = b + } + + val a = self + val c: A = a + } + + + } + + object Arrays { + + val arr = List("a", "b", "c").toArray + val i = 2 + arr(i).charAt(0) + + val x = new ArrayBuffer[String] // testing overloaded polymorphic constructors + + val entries = Array("abc", "def") + + for ((x, i) <- entries.zipWithIndex) + println(x) + } + + object SeqExtractors { + val y = names match { + case List(x, z) => x + case List(x) => x + case List() => "" + } + val yy: String = y + } + + +} diff --git a/tests/pickling/unapply.scala b/tests/pickling/unapply.scala new file mode 100644 index 000000000000..ba885be7375a --- /dev/null +++ b/tests/pickling/unapply.scala @@ -0,0 +1,11 @@ +object test { + class Foo[T](val arg : T) + + object Foo { + def unapply [a](m : Foo[a]) = Some (m.arg) + } + def matchAndGetArgFromFoo[b]( e:Foo[b]):b = {e match { case Foo(x) => x }} +// Unapply node here will have type argument [a] instantiated to scala.Nothing: +// UnApply(TypeApply(Select(Ident(Foo),unapply),List(TypeTree[TypeVar(PolyParam(a) -> TypeRef(ThisType(TypeRef(NoPrefix,scala)),Nothing))])),List(),List(Bind(x,Ident(_)))) +// but the type of the UnApply node itself is correct: RefinedType(TypeRef(ThisType(TypeRef(ThisType(TypeRef(NoPrefix,)),test$)),Foo), test$$Foo$$a, TypeAlias(TypeRef(NoPrefix,a))) +} diff --git a/tests/pickling/unions.scala b/tests/pickling/unions.scala new file mode 100644 index 000000000000..22e6391e3f57 --- /dev/null +++ b/tests/pickling/unions.scala @@ -0,0 +1,33 @@ +object unions { + + class A { + def f: String = "abc" + + def g(x: Int): Int = x + def g(x: Double): Double = x + } + + class B { + def f: String = "bcd" + + def g(x: Int) = -x + def g(x: Double): Double = -x + } + + val x: A | B = if (true) new A else new B + def y: B | A = if (true) new A else new B + println(x.f) + println(x.g(2)) + println(y.f) + println(y.g(1.0)) + println(y.g(1.0f)) + + class C { + private def foo = 0 + class D extends C { + private def foo = 1 + def test(cd: C | D, dc: D | C) = (cd.foo, dc.foo) + } + } + +} diff --git a/tests/pickling/varargs.scala b/tests/pickling/varargs.scala new file mode 100644 index 000000000000..3739636b8247 --- /dev/null +++ b/tests/pickling/varargs.scala @@ -0,0 +1,13 @@ +object varargs { + List(1, 2, 3) + def g(x: Int*) = x.length + g(1, 2, 3, 4) + val x = if (true) 1 else 2 + def foo[T](x: T, y: T): T = x + foo(1, 2) + val xs = 1 :: 2 :: Nil + g(xs: _*) + g(Nil: _*) + g(1) + g() +} \ No newline at end of file diff --git a/tests/pickling/zoo.scala b/tests/pickling/zoo.scala new file mode 100644 index 000000000000..02dac8f5bf32 --- /dev/null +++ b/tests/pickling/zoo.scala @@ -0,0 +1,41 @@ +object Test { +trait FoodStuff +trait Meat extends FoodStuff { + type IsMeat = Any +} +trait Grass extends FoodStuff { + type IsGrass = Any +} +trait Animal { + type Food <: FoodStuff + def eats(food: Food): Unit + def gets: Food +} +trait Cow extends Animal { + type IsMeat = Any + type Food <: Grass + def eats(food: Grass): Unit + def gets: Food +} +trait Lion extends Animal { + type Food = Meat + def eats(food: Meat): Unit + def gets: Meat +} +def newMeat: Meat = new Meat { +} +def newGrass: Grass = new Grass { +} +def newCow: Cow = new Cow { + type Food = Grass + def eats(food: Grass) = () + def gets = newGrass +} +def newLion: Lion = new Lion { + def eats(food: Meat) = () + def gets = newMeat +} +val milka = newCow +val leo = newLion +//leo.eats(milka) // structural select not supported +} diff --git a/tests/pos/flow.scala b/tests/pos/flow.scala new file mode 100644 index 000000000000..76c0d372c8a3 --- /dev/null +++ b/tests/pos/flow.scala @@ -0,0 +1,26 @@ +trait FlowOps[+Out] { + type Repr[+O] <: FlowOps[O] +} + +trait Flow[-In, +Out] extends FlowOps[Out] { + override type Repr[+O] <: Flow[In, O] + def map[T](f: Out => T): Repr[T] /* workaround: expand alias Flow[In, T] */ +} + +class Test { + def slowFlow: Unit = { + (null: Flow[String, String]) + .map(b => b) + .map(b => b) + .map(b => b) + .map(b => b) + .map(b => b) + .map(b => b) + .map(b => b) + .map(b => b) + .map(b => b) + .map(b => b) + .map(b => b) + .map(b => b) // takes an age to compile + } +} diff --git a/tests/pos/i262-null-subtyping.scala b/tests/pos/i262-null-subtyping.scala index 284be49e8520..5e57fcca0a8f 100644 --- a/tests/pos/i262-null-subtyping.scala +++ b/tests/pos/i262-null-subtyping.scala @@ -1,12 +1,9 @@ object O { - // This compiles - val a: { type T } = null; - val b: Any { type T } = null; + trait Base extends Any { type T } + val a: Base { type T } = null; + val b: Any with Base { type T } = null; - // This doesn't: - // found : Null - // required: AnyRef{T} - val c: AnyRef { type T } = null; + val c: AnyRef with Base { type T } = null; class A class B diff --git a/tests/pos/new-array.scala b/tests/pos/new-array.scala index 9deb2330a694..98b8345a037a 100644 --- a/tests/pos/new-array.scala +++ b/tests/pos/new-array.scala @@ -6,9 +6,10 @@ object Test { val z = new Array[Unit](10) } object Test2 { - val w: Array[String] = new Array(10) + val w: Array[Any] = new Array(10) val x: Array[Int] = new Array(10) def f[T: reflect.ClassTag]: Array[T] = new Array(10) val y: Array[Any] = new Array(10) val z: Array[Unit] = new Array(10) } + diff --git a/tests/pos/printTest.scala b/tests/pos/printTest.scala new file mode 100644 index 000000000000..81730f82bef4 --- /dev/null +++ b/tests/pos/printTest.scala @@ -0,0 +1,13 @@ +// tests printing functionality + +class C[X](x: Int, val y: String) { + +} + +object test extends C[String](1, "") { + + new C[Int](1, "") {} + + def foo(x: C[Int]) = new C[String](1, "") {} + +} diff --git a/tests/pos/structural.scala b/tests/pos/structural.scala deleted file mode 100644 index 8afa49ed0e0b..000000000000 --- a/tests/pos/structural.scala +++ /dev/null @@ -1,21 +0,0 @@ -object test123 { - type A = { def a: Int } - def f(a: A): A = a -} - -object structural2 { - type A = { def a: Int } - - type B = { - def b: Int - } - - type AB = A & B - - def f(ab: AB): AB = ab - - f(new { - def a = 43 - def b = 42 - }) -} \ No newline at end of file diff --git a/tests/pos/t1053.scala b/tests/pos/t1053.scala index 1d4dfb637e99..2c5dc1d5a9d8 100644 --- a/tests/pos/t1053.scala +++ b/tests/pos/t1053.scala @@ -1,6 +1,7 @@ trait T[A] { trait U { type W = A; val x = 3 } } +trait Base { type V } object Test { - val x : ({ type V = T[this.type] })#V = null + val x : (Base { type V = T[this.type] })#V = null val y = new x.U { } } diff --git a/tests/pos/t1957.scala b/tests/pos/t1957.scala index 711ce17deb0b..2ac7f041441e 100644 --- a/tests/pos/t1957.scala +++ b/tests/pos/t1957.scala @@ -1,3 +1,4 @@ +// See comment at end of file. object Test { abstract class Settings {} @@ -30,8 +31,32 @@ object Test { protected def f: List[commonModuleType] = { - val inputGrists = tools.flatMap(_.inputGrist) // val inputGrists: List[gristType] = - inputGrists.map(_.module) + val inputGrists = tools.flatMap(_.inputGrist) + // This produces an unhygienic closure for _.inputGrist. + // Pickling will log: + // + // [...] pickling reference to as yet undefined value _$1 in method $anonfun + // + // More info can be produced by uncommenting these two lines in + // Namer#valOrDefDefSig: + // + //println(i"lifting $rhsType over $paramss -> $hygienicType = ${tpt.tpe}") + //println(TypeComparer.explained { implicit ctx => hygienicType <:< tpt.tpe }) + // + // Tracing the subtype statement (over 1600+ lines!) shows that the TypeComparer thinks that the + // following subtype judgement is true: + // + // Test.Grist{ + // moduleType <: Test.Module{settingsType = Module.this.settingsType}; + // settingsType <: Module.this.settingsType + // } <:< Test.Grist{moduleType <: _$1.moduleType; settingsType <: _$1.settingsType} + // + // Therefore, a type variable which has the second type as lower bound does not get + // the (hygienic) first type as new lower bound. Clearly something is wrong in the subtype + // derivation here. It would be important to figure out what. + + ??? +// inputGrists.map(_.module) } } diff --git a/tests/pos/t252.scala b/tests/pos/t252.scala index d51b5511ebac..ac4e971062e5 100644 --- a/tests/pos/t252.scala +++ b/tests/pos/t252.scala @@ -11,6 +11,18 @@ abstract class Base { } abstract class Derived extends Base { + + val t: T = ??? + + // trying a simple dependent closure body first + def cont1[X, Y](x: X)(f: X => Y): Y = f(x) + cont1(t)(x => x.module) + + // trying an indirectly dependent closure body first + def cont2[X, Y](x: X)(f: X => Int => Y): Y = f(x)(1) + cont2(t)(x => z => x.module) + + // trying the original, harder case next def f(inputs: List[tType]): Unit = { for (t <- inputs; m = t.module) { } } diff --git a/tests/pos/t2810.scala b/tests/pos/t2810.scala deleted file mode 100644 index c85eca164aa3..000000000000 --- a/tests/pos/t2810.scala +++ /dev/null @@ -1,8 +0,0 @@ - - - - -object Test { - val closeable1: { def close(): Unit } = new scala.io.Source { val iter: Iterator[Char] = "".iterator } - val closeable2: { def close(): Unit } = new java.io.Closeable { def close() = {} } -} diff --git a/tests/pos/t5070.scala b/tests/pos/t5070.scala new file mode 100644 index 000000000000..c236b4f9ea21 --- /dev/null +++ b/tests/pos/t5070.scala @@ -0,0 +1,18 @@ +trait Web { + type LocalName +} +trait Companion1[A] +trait WebDSL[W <: Web] { + trait LocalNameCompanion extends Companion1[W#LocalName] { + type A = String + } + implicit val LocalName: LocalNameCompanion +} +object Test { + def t[W <: Web](implicit webDSL: WebDSL[W]): Unit = { + import webDSL._ + implicitly[LocalNameCompanion] // succeeds + implicitly[Companion1[W#LocalName]] // fails + } +} + diff --git a/tests/pos/typers.scala b/tests/pos/typers.scala index fe11ca6021be..7f67d2c7265a 100644 --- a/tests/pos/typers.scala +++ b/tests/pos/typers.scala @@ -88,6 +88,7 @@ object typers { } class Refinements { + trait C { type T; def process(x: T): Int } val y: C { type T; val key: T; def process(x: T): Int } = ??? } diff --git a/tests/pos/zoo.scala b/tests/pos/zoo.scala index 08f7eba6380f..02dac8f5bf32 100644 --- a/tests/pos/zoo.scala +++ b/tests/pos/zoo.scala @@ -1,40 +1,37 @@ object Test { -type Meat = { +trait FoodStuff +trait Meat extends FoodStuff { type IsMeat = Any } -type Grass = { +trait Grass extends FoodStuff { type IsGrass = Any } -type Animal = { - type Food +trait Animal { + type Food <: FoodStuff def eats(food: Food): Unit def gets: Food } -type Cow = { +trait Cow extends Animal { type IsMeat = Any type Food <: Grass def eats(food: Grass): Unit - def gets: Grass + def gets: Food } -type Lion = { +trait Lion extends Animal { type Food = Meat def eats(food: Meat): Unit def gets: Meat } -def newMeat: Meat = new { - type IsMeat = Any +def newMeat: Meat = new Meat { } -def newGrass: Grass = new { - type IsGrass = Any +def newGrass: Grass = new Grass { } -def newCow: Cow = new { - type IsMeat = Any +def newCow: Cow = new Cow { type Food = Grass def eats(food: Grass) = () def gets = newGrass } -def newLion: Lion = new { - type Food = Meat +def newLion: Lion = new Lion { def eats(food: Meat) = () def gets = newMeat }