From cf794033ae42df0251fe2bc60051cb7dafb38023 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 30 Nov 2015 20:11:06 +0100 Subject: [PATCH 01/41] Drop requirement that an isRef type cannot be refined. It seems to complciate things with no real purpose. --- src/dotty/tools/dotc/core/Types.scala | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 2558dcbb7de4..61d9093bc201 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -100,7 +100,7 @@ object Types { case _ => false } - /** Is this type a (possibly aliased and/or partially applied) type reference + /** Is this type a (possibly refined or applied or aliased) type reference * to the given type symbol? * @sym The symbol to compare to. It must be a class symbol or abstract type. * It makes no sense for it to be an alias type because isRef would always @@ -113,8 +113,7 @@ object Types { case _ => this1.symbol eq sym } case this1: RefinedType => - // make sure all refinements are type arguments - this1.parent.isRef(sym) && this.argInfos.nonEmpty + this1.parent.isRef(sym) case _ => false } From 4c8db87e499e3a0f56a89ed0824b27230db32997 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 1 Dec 2015 11:08:15 +0100 Subject: [PATCH 02/41] Don't count partial applications as applications. --- src/dotty/tools/dotc/core/TypeApplications.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 661975dabce9..1c1717649f8a 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -389,7 +389,7 @@ class TypeApplications(val self: Type) extends AnyVal { else new mutable.ListBuffer[Type] } val buf = recur(self, 0) - if (buf == null) Nil else buf.toList + if (buf == null || buf.size != tparams.length) Nil else buf.toList } /** Argument types where existential types in arguments are disallowed */ From df00eb195d86a3d1425029f2f2436029a757ee5d Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 1 Dec 2015 12:32:59 +0100 Subject: [PATCH 03/41] Disallow existentially bound parameters as type parameters Done in order to keep the basics as simple as possible. Treating existentially bound parameters as still instantiatable type parameters does not seem to add anything fundamental, and makes the type system less regular. --- .../tools/dotc/core/TypeApplications.scala | 13 +--------- test/dotc/tests.scala | 1 + tests/neg/partialApplications.scala | 16 +++++++++++++ tests/pickling/partialApplications.scala | 24 +++++++++++++++---- tests/pos/collections.scala | 2 +- tests/pos/partialApplications.scala | 17 ------------- 6 files changed, 39 insertions(+), 34 deletions(-) create mode 100644 tests/neg/partialApplications.scala diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 1c1717649f8a..c2738f4be234 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -69,18 +69,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (lam.exists) lam.typeParams else Nil } case self: RefinedType => - def isBoundedLambda(tp: Type): Boolean = tp match { - case tp: TypeRef => tp.typeSymbol.isLambdaTrait - case tp: RefinedType => tp.refinedName.isLambdaArgName && isBoundedLambda(tp.parent) - case _ => false - } - val tparams = self.parent.typeParams - self.refinedInfo match { - case rinfo: TypeBounds if rinfo.isAlias || isBoundedLambda(self) => - tparams.filterNot(_.name == self.refinedName) - case _ => - tparams - } + self.parent.typeParams.filterNot(_.name == self.refinedName) case self: SingletonType => Nil case self: TypeProxy => diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index 0ac043daf5e0..de4705dac1e6 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -158,6 +158,7 @@ class tests extends CompilerTest { @Test def neg_moduleSubtyping = compileFile(negDir, "moduleSubtyping", xerrors = 4) @Test def neg_escapingRefs = compileFile(negDir, "escapingRefs", xerrors = 2) @Test def neg_instantiateAbstract = compileFile(negDir, "instantiateAbstract", xerrors = 8) + @Test def neg_partialApplications = compileFile(negDir, "partialApplications", xerrors = 3) @Test def neg_selfInheritance = compileFile(negDir, "selfInheritance", xerrors = 6) @Test def neg_selfreq = compileFile(negDir, "selfreq", xerrors = 3) @Test def neg_singletons = compileFile(negDir, "singletons", xerrors = 8) diff --git a/tests/neg/partialApplications.scala b/tests/neg/partialApplications.scala new file mode 100644 index 000000000000..d186273aa2e3 --- /dev/null +++ b/tests/neg/partialApplications.scala @@ -0,0 +1,16 @@ +object Test2 { + type Histogram = Map[_, Int] // this is now an existential type! + + type StringlyHistogram = Histogram[_ >: String] // Error: Test2.Histogram does not take type parameters + + val xs: Histogram[String] = Map[String, Int]() // Error: Test2.Histogram does not take type parameters + + val ys: StringlyHistogram[String] = xs // Error: Test2.StringlyHistogram does not take type parameters + + val zs: StringlyHistogram = xs + + val xs1 = xs + val ys1 = ys + val zs1 = zs + +} diff --git a/tests/pickling/partialApplications.scala b/tests/pickling/partialApplications.scala index f517011b995a..c4c4328f6509 100644 --- a/tests/pickling/partialApplications.scala +++ b/tests/pickling/partialApplications.scala @@ -1,13 +1,29 @@ -object PartialApplications { +object partialApplications { - type Histogram = Map[_, Int] + type Histogram[X] = Map[X, Int] - type StringlyHistogram = Histogram[_ >: String] + type StringlyHistogram[X >: String] = Histogram[X] val xs: Histogram[String] = Map[String, Int]() val ys: StringlyHistogram[String] = xs - val zs: StringlyHistogram = xs + def e = xs + + val zs: StringlyHistogram[_] = e + + type IntMap[Y] = Map[Int, Y] + + val is = Map[Int, Boolean]() + + val js: IntMap[Boolean] = is + + val ks: IntMap[_] = is + + type RMap[X, Y] = Map[Y, X] + + val rs = Map[Int, Float]() + + val ss: RMap[Float, Int] = rs } diff --git a/tests/pos/collections.scala b/tests/pos/collections.scala index 08c3010c8605..5edcff986aab 100644 --- a/tests/pos/collections.scala +++ b/tests/pos/collections.scala @@ -10,7 +10,7 @@ object collections { val s = Set(1, 2, 3) val ss = s map (_ + 1) - val cbf: CanBuildFrom[List, Int, List[Int]] = scala.collection.immutable.List.canBuildFrom + val cbf: CanBuildFrom[List[_], Int, List[Int]] = scala.collection.immutable.List.canBuildFrom val nil = Nil val ints1 = 1 :: Nil diff --git a/tests/pos/partialApplications.scala b/tests/pos/partialApplications.scala index 696c544e78eb..fae6849fc3fc 100644 --- a/tests/pos/partialApplications.scala +++ b/tests/pos/partialApplications.scala @@ -27,20 +27,3 @@ object Test { val ss: RMap[Float, Int] = rs } - -object Test2 { - type Histogram = Map[_, Int] - - type StringlyHistogram = Histogram[_ >: String] - - val xs: Histogram[String] = Map[String, Int]() - - val ys: StringlyHistogram[String] = xs - - val zs: StringlyHistogram = xs - - val xs1 = xs - val ys1 = ys - val zs1 = zs - -} From 0fde4b4902f81f9a020dbf460925596a276ab328 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Dec 2015 10:56:49 +0100 Subject: [PATCH 04/41] Change hk naming scheme --- src/dotty/tools/dotc/core/Definitions.scala | 10 +++++----- src/dotty/tools/dotc/core/NameOps.scala | 15 +++++++++------ src/dotty/tools/dotc/core/StdNames.scala | 20 ++++++++++---------- 3 files changed, 24 insertions(+), 21 deletions(-) diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala index 4556dd9d59fc..650cfe79c7f8 100644 --- a/src/dotty/tools/dotc/core/Definitions.scala +++ b/src/dotty/tools/dotc/core/Definitions.scala @@ -642,9 +642,9 @@ class Definitions { * to be the type parameters of a higher-kided type). This is a class symbol that * would be generated by the following schema. * - * class LambdaXYZ extends Object with P1 with ... with Pn { - * type v_1 $hk$Arg0; ...; type v_N $hk$ArgN; - * type Apply + * trait LambdaXYZ extends Object with P1 with ... with Pn { + * type v_1 hk$0; ...; type v_N hk$N; + * type +$Apply * } * * Here: @@ -669,7 +669,7 @@ class Definitions { val cls = denot.asClass.classSymbol val paramDecls = newScope for (i <- 0 until vcs.length) - newTypeParam(cls, tpnme.LambdaArgName(i), varianceFlags(vcs(i)), paramDecls) + newTypeParam(cls, tpnme.hkArg(i), varianceFlags(vcs(i)), paramDecls) newTypeField(cls, tpnme.hkApply, Covariant, paramDecls) val parentTraitRefs = for (i <- 0 until vcs.length if vcs(i) != 0) @@ -679,7 +679,7 @@ class Definitions { } } - val traitName = tpnme.LambdaTraitName(vcs) + val traitName = tpnme.hkLambda(vcs) def createTrait = { val cls = newClassSymbol( diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala index 1a26463473ac..7dbd492b401e 100644 --- a/src/dotty/tools/dotc/core/NameOps.scala +++ b/src/dotty/tools/dotc/core/NameOps.scala @@ -99,19 +99,22 @@ object NameOps { } /** Is this the name of a higher-kinded type parameter of a Lambda? */ - def isLambdaArgName = + def isHkArgName = name.length > 0 && - name.head == tpnme.LAMBDA_ARG_PREFIXhead && - name.startsWith(tpnme.LAMBDA_ARG_PREFIX) && { - val digits = name.drop(tpnme.LAMBDA_ARG_PREFIX.length) + name.head == tpnme.hkArgPrefixHead && + name.startsWith(tpnme.hkArgPrefix) && { + val digits = name.drop(tpnme.hkArgPrefixLength) digits.length <= 4 && digits.forall(_.isDigit) } /** The index of the higher-kinded type parameter with this name. * Pre: isLambdaArgName. */ - def LambdaArgIndex: Int = - name.drop(tpnme.LAMBDA_ARG_PREFIX.length).toString.toInt + def hkArgIndex: Int = + name.drop(tpnme.hkArgPrefixLength).toString.toInt + + def isLambdaTraitName(implicit ctx: Context): Boolean = + name.startsWith(tpnme.hkLambdaPrefix) /** If the name ends with $nn where nn are * all digits, strip the $ and the digits. diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala index e8cddd3d4743..e2add1a520a4 100644 --- a/src/dotty/tools/dotc/core/StdNames.scala +++ b/src/dotty/tools/dotc/core/StdNames.scala @@ -173,8 +173,6 @@ object StdNames { final val WILDCARD_STAR: N = "_*" final val REIFY_TREECREATOR_PREFIX: N = "$treecreator" final val REIFY_TYPECREATOR_PREFIX: N = "$typecreator" - final val LAMBDA_ARG_PREFIX: N = "hk$" - final val LAMBDA_ARG_PREFIXhead: Char = LAMBDA_ARG_PREFIX.head final val AbstractFunction: N = "AbstractFunction" final val Any: N = "Any" @@ -314,7 +312,6 @@ object StdNames { val AnnotatedType: N = "AnnotatedType" val AppliedTypeTree: N = "AppliedTypeTree" - val hkApply: N = "$apply" val ArrayAnnotArg: N = "ArrayAnnotArg" val Constant: N = "Constant" val ConstantType: N = "ConstantType" @@ -322,7 +319,6 @@ object StdNames { val Flag : N = "Flag" val Ident: N = "Ident" val Import: N = "Import" - val LambdaPrefix: N = "Lambda$" val Literal: N = "Literal" val LiteralAnnotArg: N = "LiteralAnnotArg" val Modifiers: N = "Modifiers" @@ -530,9 +526,14 @@ object StdNames { val nothingRuntimeClass: N = "scala.runtime.Nothing$" val nullRuntimeClass: N = "scala.runtime.Null$" - val synthSwitch: N = "$synthSwitch" + val hkApply: N = "$Apply" + val hkArgPrefix: N = "$hk" + val hkLambdaPrefix: N = "Lambda$" + val hkArgPrefixHead: Char = hkArgPrefix.head + val hkArgPrefixLength: Int = hkArgPrefix.length + // unencoded operators object raw { final val AMP : N = "&" @@ -742,14 +743,13 @@ object StdNames { def syntheticTypeParamNames(num: Int): List[TypeName] = (0 until num).map(syntheticTypeParamName)(breakOut) - def LambdaTraitName(vcs: List[Int]): TypeName = LambdaPrefix ++ vcs.map(varianceSuffix).mkString - def LambdaArgName(n: Int) = LAMBDA_ARG_PREFIX ++ n.toString - - final val Conforms = encode("<:<") + def hkLambda(vcs: List[Int]): TypeName = hkLambdaPrefix ++ vcs.map(varianceSuffix).mkString + def hkArg(n: Int): TypeName = hkArgPrefix ++ n.toString def varianceSuffix(v: Int): Char = varianceSuffixes.charAt(v + 1) - val varianceSuffixes = "NIP" + + final val Conforms = encode("<:<") } abstract class JavaNames[N <: Name] extends DefinedNames[N] { From f89b5628a66442edcc8c31193a559c6b5c32e837 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Dec 2015 10:58:44 +0100 Subject: [PATCH 05/41] Add missing type params in test --- src/dotty/tools/dotc/core/SymDenotations.scala | 2 +- src/dotty/tools/dotc/core/TypeApplications.scala | 8 ++++---- src/dotty/tools/dotc/core/TypeComparer.scala | 2 +- src/dotty/tools/dotc/core/Types.scala | 12 ++++++------ src/dotty/tools/dotc/printing/RefinedPrinter.scala | 4 ++-- tests/pos/desugar.scala | 2 +- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index d7fa183c9ed7..cc201b66b04b 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -446,7 +446,7 @@ object SymDenotations { /** is this symbol a trait representing a type lambda? */ final def isLambdaTrait(implicit ctx: Context): Boolean = - isClass && name.startsWith(tpnme.LambdaPrefix) && owner == defn.ScalaPackageClass + isClass && name.startsWith(tpnme.hkLambdaPrefix) && owner == defn.ScalaPackageClass /** Is this symbol a package object or its module class? */ def isPackageObject(implicit ctx: Context): Boolean = { diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index c2738f4be234..8417620f1019 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -143,7 +143,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** Is receiver type higher-kinded (i.e. of kind != "*")? */ def isHK(implicit ctx: Context): Boolean = self.dealias match { case self: TypeRef => self.info.isHK - case RefinedType(_, name) => name == tpnme.hkApply || name.isLambdaArgName + case RefinedType(_, name) => name == tpnme.hkApply || name.isHkArgName case TypeBounds(_, hi) => hi.isHK case _ => false } @@ -465,7 +465,7 @@ class TypeApplications(val self: Type) extends AnyVal { val lambda = defn.LambdaTrait(boundSyms.map(_.variance)) def toHK(tp: Type) = (rt: RefinedType) => { val argRefs = boundSyms.indices.toList.map(i => - RefinedThis(rt).select(tpnme.LambdaArgName(i))) + RefinedThis(rt).select(tpnme.hkArg(i))) val substituted = if (cycleParanoid) new ctx.SafeSubstMap(boundSyms, argRefs).apply(tp) else tp.subst(boundSyms, argRefs) @@ -576,8 +576,8 @@ class TypeApplications(val self: Type) extends AnyVal { new TypeMap { def apply(tp: Type): Type = tp match { case TypeRef(RefinedThis(binder), name) if binder eq self => - assert(name.isLambdaArgName) - RefinedThis(reduced).select(reduced.typeParams.apply(name.LambdaArgIndex)) + assert(name.isHkArgName) + RefinedThis(reduced).select(reduced.typeParams.apply(name.hkArgIndex)) case _ => mapOver(tp) } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 1d9928e2dc57..6b37227b26b7 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -669,7 +669,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { private def needsEtaLift(tp: Type, target: RefinedType): Boolean = { // if (tp.isLambda != tp.isHK) println(i"discrepancy for $tp, isLambda = ${tp.isLambda}, isHK = ${tp.isHK}") val name = target.refinedName - (name.isLambdaArgName || (name eq tpnme.hkApply)) && + (name.isHkArgName || (name eq tpnme.hkApply)) && tp.exists && !tp.isLambda // we do encounter Lambda classes without any arguments here } diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 61d9093bc201..5662684551d1 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -855,7 +855,7 @@ object Types { object instantiate extends TypeMap { var isSafe = true def apply(tp: Type): Type = tp match { - case TypeRef(RefinedThis(`pre`), name) if name.isLambdaArgName => + case TypeRef(RefinedThis(`pre`), name) if name.isHkArgName => member(name).info match { case TypeAlias(alias) => alias case _ => isSafe = false; tp @@ -868,7 +868,7 @@ object Types { } } def instArg(tp: Type): Type = tp match { - case tp @ TypeAlias(TypeRef(RefinedThis(`pre`), name)) if name.isLambdaArgName => + case tp @ TypeAlias(TypeRef(RefinedThis(`pre`), name)) if name.isHkArgName => member(name).info match { case TypeAlias(alias) => tp.derivedTypeAlias(alias) // needed to keep variance case bounds => bounds @@ -1899,15 +1899,15 @@ object Types { private def checkInst(implicit ctx: Context): this.type = { if (Config.checkLambdaVariance) refinedInfo match { - case refinedInfo: TypeBounds if refinedInfo.variance != 0 && refinedName.isLambdaArgName => + case refinedInfo: TypeBounds if refinedInfo.variance != 0 && refinedName.isHkArgName => val cls = parent.LambdaClass(forcing = false) if (cls.exists) - assert(refinedInfo.variance == cls.typeParams.apply(refinedName.LambdaArgIndex).variance, - s"variance mismatch for $this, $cls, ${cls.typeParams}, ${cls.typeParams.apply(refinedName.LambdaArgIndex).variance}, ${refinedInfo.variance}") + assert(refinedInfo.variance == cls.typeParams.apply(refinedName.hkArgIndex).variance, + s"variance mismatch for $this, $cls, ${cls.typeParams}, ${cls.typeParams.apply(refinedName.hkArgIndex).variance}, ${refinedInfo.variance}") case _ => } if (Config.checkProjections && - (refinedName == tpnme.hkApply || refinedName.isLambdaArgName) && + (refinedName == tpnme.hkApply || refinedName.isHkArgName) && parent.noHK) assert(false, s"illegal refinement of first-order type: $this") this diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 06fe0c9eff17..04c9252eb737 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -190,14 +190,14 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { // LambdaI{...}.HK$i val simplifyArgs = new TypeMap { override def apply(tp: Type) = tp match { - case tp @ TypeRef(RefinedThis(_), name) if name.isLambdaArgName => + case tp @ TypeRef(RefinedThis(_), name) if name.isHkArgName => TypeRef(NoPrefix, tp.symbol.asType) case _ => mapOver(tp) } } (parent, simplifyArgs(tp.refinedInfo.followTypeAlias), Nil) - } else if (name.isLambdaArgName) { + } else if (name.isHkArgName) { val (prefix, body, argBindings) = decomposeHKApply(parent) (prefix, body, (name, tp.refinedInfo) :: argBindings) } else (tp, NoType, Nil) diff --git a/tests/pos/desugar.scala b/tests/pos/desugar.scala index 0d3b6d8ca624..cc679498578f 100644 --- a/tests/pos/desugar.scala +++ b/tests/pos/desugar.scala @@ -72,7 +72,7 @@ object desugar { object misc { 'hello s"this is a $x + ${x + y} string" - type ~ = Tuple2 + type ~[X, Y] = Tuple2[X, Y] val pair: Int ~ String = 1 -> "abc" def foo(xs: Int*) = xs.length foo(list: _*) From 7eeb5657497536ebafd7bbf80ea74fbc805a3b77 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Dec 2015 13:13:19 +0100 Subject: [PATCH 06/41] Extractors and other new functionality for type applications --- .../tools/dotc/core/TypeApplications.scala | 205 ++++++++++++++++++ 1 file changed, 205 insertions(+) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 8417620f1019..3ce1c1cc4ec2 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -36,6 +36,140 @@ object TypeApplications { case tp: TypeBounds => tp.hi case _ => tp } + + /** Extractor for + * + * [v1 X1: B1, ..., vn Xn: Bn] -> T + * ==> + * Lambda$_v1...vn { type $hk_i: B_i, type $Apply = [X_i := this.$Arg_i] T } + */ + object TypeLambda { + def apply(variances: List[Int], + argBoundss: List[TypeBounds], + bodyFn: RefinedType => Type)(implicit ctx: Context): Type = { + def argRefinements(parent: Type, i: Int, bs: List[TypeBounds]): Type = bs match { + case b :: bs1 => + argRefinements(RefinedType(parent, tpnme.hkArg(i), b), i + 1, bs1) + case nil => + parent + } + RefinedType( + argRefinements(defn.LambdaTrait(variances).typeRef, 0, argBoundss), + tpnme.hkApply, rt => bodyFn(rt).bounds) + } + + def unapply(tp: Type)(implicit ctx: Context): Option[(List[Int], List[TypeBounds], Type)] = tp match { + case app @ RefinedType(prefix, tpnme.hkApply) => + val cls = prefix.classSymbol + val variances = cls.typeParams.map(_.variance) + val argBounds = prefix.argInfos.map(_.bounds) + Some((variances, argBounds, app.refinedInfo)) + case TypeBounds(lo, hi) => + unapply(hi) + case _ => + None + } + } + + /** Extractor for + * + * [v1 X1: B1, ..., vn Xn: Bn] -> C[X1, ..., Xn] + * + * where v1, ..., vn and B1, ..., Bn are the variances and bounds of the type parameters + * of the class C. + * + * @param tycon C + */ + object EtaExpansion { + def apply(tycon: TypeRef)(implicit ctx: Context) = { + assert(tycon.isEtaExpandable) + val tparams = tycon.typeParams + val variances = tycon.typeParams.map(_.variance) + TypeLambda(tparams.map(_.variance), tycon.paramBounds, + rt => tycon.appliedTo(tparams.map(RefinedThis(rt).select(_)))) + } + + def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = { + def argsAreForwarders(args: List[Type], n: Int): Boolean = args match { + case TypeRef(RefinedThis(rt), sel) :: args1 => + rt.eq(tp) && sel == tpnme.hkArg(n) && argsAreForwarders(args1, n - 1) + case nil => + n == 0 + } + tp match { + case TypeLambda(_, argBounds, AppliedType(fn: TypeRef, args)) + if argsAreForwarders(args, tp.typeParams.length - 1) => Some(fn) + case _ => None + } + } + } + + /** Extractor for type application T[U_1, ..., U_n]. This is the refined type + * + * T { type p_1 v_1= U_1; ...; type p_n v_n= U_n } + * + * where v_i, p_i are the variances and names of the type parameters of T, + * If `T`'s class symbol is a lambda trait, follow the refined type with a + * projection + * + * T { ... } # $Apply + */ + object AppliedType { + def apply(tp: Type, args: List[Type])(implicit ctx: Context): Type = { + def matchParams(tp: Type, tparams: List[TypeSymbol], args: List[Type]): Type = args match { + case arg :: args1 => + try { + val tparam :: tparams1 = tparams + matchParams(RefinedType(tp, tparam.name, arg.toBounds(tparam)), tparams1, args1) + } catch { + case ex: MatchError => + println(s"applied type mismatch: $tp $args, typeParams = ${tp.typeSymbol.typeParams}") // !!! DEBUG + println(s"precomplete decls = ${tp.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}") + throw ex + } + case nil => tp + } + assert(args.nonEmpty) + val cls = tp.classSymbol + val refined = matchParams(tp, cls.typeParams, args) + if (cls.isLambdaTrait) TypeRef(refined, tpnme.hkApply) else refined + } + + def unapply(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match { + case TypeRef(prefix, tpnme.hkApply) => unapp(prefix) + case _ => unapp(tp) + } + + private def unapp(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match { + case _: RefinedType => + val tparams = tp.typeSymbol.typeParams + if (tparams.isEmpty) None + else { + val argBuf = new mutable.ListBuffer[Type] + def stripArgs(tp: Type, n: Int): Type = + if (n == 0) tp + else tp match { + case tp @ RefinedType(parent, pname) if pname == tparams(n - 1).name => + val res = stripArgs(parent, n - 1) + if (res.exists) argBuf += tp.refinedInfo.argInfo + res + case _ => + NoType + } + val res = stripArgs(tp, tparams.length) + if (res.exists) Some((res, argBuf.toList)) else None + } + case _ => None + } + } + + /** Adapt all arguments to possible higher-kinded type parameters using adaptIfHK + */ + def adaptArgs(tparams: List[Symbol], args: List[Type])(implicit ctx: Context): List[Type] = + args.zipWithConserve(tparams)((arg, tparam) => arg.adaptIfHK(tparam.infoOrCompleter)) + + def argRefs(rt: RefinedType, n: Int)(implicit ctx: Context) = + List.range(0, n).map(i => RefinedThis(rt).select(tpnme.hkArg(i))) } import TypeApplications._ @@ -88,6 +222,77 @@ class TypeApplications(val self: Type) extends AnyVal { * Third, it won't return abstract higher-kinded type parameters, i.e. the type parameters of * an abstract type are always empty. */ + final def hkTypeParams(implicit ctx: Context): List[TypeSymbol] = self match { + case TypeLambda(_, _, body) => self.typeSymbol.typeParams.head :: body.hkTypeParams + case TypeBounds(lo, hi) => hi.hkTypeParams + case _ => Nil + } + + final def paramBounds(implicit ctx: Context): List[TypeBounds] = + typeParams.map(self.memberInfo(_).bounds) + + def LambdaTrait(implicit ctx: Context) = { + def skipArgs(tp: Type): Type = tp match { + case RefinedType(parent, pname) if pname.isHkArgName => skipArgs(parent) + case _ => tp + } + self.stripTypeVar match { + case RefinedType(parent, tpnme.hkApply) => skipArgs(parent).stripTypeVar match { + case ref @ TypeRef(_, lam) if lam.isLambdaTraitName => ref.symbol + case _ => NoSymbol + } + case _ => NoSymbol + } + } + + def isEtaExpandable(implicit ctx: Context) = self match { + case self: TypeRef => self.symbol.isClass && !self.name.isLambdaTraitName + case _ => false + } + + /** Adapt argument A to type parameter P in the case P is higher-kinded. + * This means: + * (1) Make sure that A is a type lambda, if necessary by eta-expanding it. + * (2) Make sure the variances of the type lambda + * agrees with variances of corresponding higherkinded type parameters. Example: + * + * class Companion[+CC[X]] + * Companion[List] + * + * with adaptArgs, this will expand to + * + * Companion[[X] => List[X]] + * + * instead of + * + * Companion[[+X] => List[X]] + * + * even though `List` is covariant. This adaptation is necessary to ignore conflicting + * variances in overriding members that have types of hk-type parameters such as `Companion[GenTraversable]` + * or `Companion[ListBuffer]`. Without the adaptation we would end up with + * + * Companion[[+X] => GenTraversable[X]] + * Companion[[X] => List[X]] + * + * and the second is not a subtype of the first. So if we have overridding memebrs of the two + * types we get an error. + */ + def adaptIfHK(bound: Type)(implicit ctx: Context): Type = { + val boundLambda = bound.LambdaTrait + val hkParams = boundLambda.typeParams + if (hkParams.isEmpty) self + else self match { + case self: TypeRef if self.symbol.isClass && self.typeParams.length == hkParams.length => + EtaExpansion(self).adaptIfHK(bound) + case arg: TypeRef + if arg.symbol.isLambdaTrait && + !arg.symbol.typeParams.corresponds(boundLambda.typeParams)(_.variance == _.variance) => + arg.prefix.select(boundLambda) + case _ => + self + } + } + final def rawTypeParams(implicit ctx: Context): List[TypeSymbol] = { self match { case self: ClassInfo => From e63f0585c7f120dbc8d9965d8729e831efd2f2ae Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Dec 2015 13:21:43 +0100 Subject: [PATCH 07/41] Use typeapp extractors for printing --- .../tools/dotc/core/TypeApplications.scala | 5 +- .../tools/dotc/printing/PlainPrinter.scala | 8 +- .../tools/dotc/printing/RefinedPrinter.scala | 105 +++++------------- 3 files changed, 38 insertions(+), 80 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 3ce1c1cc4ec2..1d11645a4147 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -53,6 +53,8 @@ object TypeApplications { case nil => parent } + assert(variances.nonEmpty) + assert(argBoundss.length == variances.length) RefinedType( argRefinements(defn.LambdaTrait(variances).typeRef, 0, argBoundss), tpnme.hkApply, rt => bodyFn(rt).bounds) @@ -60,7 +62,8 @@ object TypeApplications { def unapply(tp: Type)(implicit ctx: Context): Option[(List[Int], List[TypeBounds], Type)] = tp match { case app @ RefinedType(prefix, tpnme.hkApply) => - val cls = prefix.classSymbol + println(s"type lam $tp") + val cls = prefix.typeSymbol val variances = cls.typeParams.map(_.variance) val argBounds = prefix.argInfos.map(_.bounds) Some((variances, argBounds, app.refinedInfo)) diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index 0d925a27dc7e..4f92f836c572 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -50,8 +50,10 @@ class PlainPrinter(_ctx: Context) extends Printer { case OrType(tp1, tp2) => homogenize(tp1) | homogenize(tp2) case _ => - val tp1 = tp.simplifyApply + tp + /*val tp1 = tp.simplifyApply if (tp1 eq tp) tp else homogenize(tp1) + */ } else tp @@ -344,7 +346,9 @@ class PlainPrinter(_ctx: Context) extends Printer { Text(sym.flagsUNSAFE.flagStrings map stringToText, " ") /** String representation of symbol's variance or "" if not applicable */ - protected def varianceString(sym: Symbol): String = sym.variance match { + protected def varianceString(sym: Symbol): String = varianceString(sym.variance) + + protected def varianceString(v: Int): String = v match { case -1 => "-" case 1 => "+" case _ => "" diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 04c9252eb737..e80b7d48d108 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -10,6 +10,8 @@ import ast.{Trees, untpd, tpd} import typer.Namer import typer.ProtoTypes.{SelectionProto, ViewProto, FunProto, IgnoredProto, dummyTreeOfType} import Trees._ +import TypeApplications._ +import Decorators._ import scala.annotation.switch import language.implicitConversions @@ -108,26 +110,23 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { argStr ~ " => " ~ toText(args.last) } homogenize(tp) match { - case tp: RefinedType => - val args = tp.argInfos - if (args.nonEmpty) { - val tycon = tp.unrefine - val cls = tycon.typeSymbol - if (cls.typeParams.length == args.length) { - if (tycon.isRepeatedParam) return toTextLocal(args.head) ~ "*" - if (defn.isFunctionClass(cls)) return toTextFunction(args) - if (defn.isTupleClass(cls)) return toTextTuple(args) - } - return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close - } - if (tp.isSafeLambda) { - val (prefix, body, bindings) = decomposeHKApply(tp) - prefix match { - case prefix: TypeRef if prefix.symbol.isLambdaTrait && body.exists => - return typeLambdaText(prefix.symbol, body, bindings) - case _ => + case AppliedType(tycon, args) => + val cls = tycon.typeSymbol + if (tycon.isRepeatedParam) return toTextLocal(args.head) ~ "*" + if (defn.isFunctionClass(cls)) return toTextFunction(args) + if (defn.isTupleClass(cls)) return toTextTuple(args) + return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close + case TypeLambda(variances, argBoundss, body) => + val paramNames = variances.indices.toList.map("X" + _) + val instantiate = new TypeMap { + def apply(tp: Type): Type = tp match { + case TypeRef(RefinedThis(rt), name) if name.isHkArgName && rt.eq(tp) => + TypeRef(NoPrefix, paramNames(name.hkArgIndex).toTypeName) + case _ => mapOver(tp) } } + return typeLambdaText(paramNames, variances, argBoundss, + instantiate(body).argInfo) case tp: TypeRef => val hideType = tp.symbol is AliasPreferred if (hideType && !ctx.phase.erasedTypes && !tp.symbol.isCompleting) { @@ -167,71 +166,23 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def blockText[T >: Untyped](trees: List[Tree[T]]): Text = "{" ~ toText(trees, "\n") ~ "}" - /** If type `tp` represents a potential type Lambda of the form - * - * parent { type Apply = body; argBindings? } - * - * split it into - * - * - the `parent` - * - the simplified `body` - * - the bindings HK$ members, if there are any - * - * The body is simplified as follows - * - if it is a TypeAlias, follow it - * - replace all references to of the form .HK$i by references - * without a prefix, because the latter print nicer. - * - */ - def decomposeHKApply(tp: Type): (Type, Type, List[(Name, Type)]) = tp.stripTypeVar match { - case tp @ RefinedType(parent, name) => - if (name == tpnme.hkApply) { - // simplify arguments so that parameters just print HK$i and not - // LambdaI{...}.HK$i - val simplifyArgs = new TypeMap { - override def apply(tp: Type) = tp match { - case tp @ TypeRef(RefinedThis(_), name) if name.isHkArgName => - TypeRef(NoPrefix, tp.symbol.asType) - case _ => - mapOver(tp) - } - } - (parent, simplifyArgs(tp.refinedInfo.followTypeAlias), Nil) - } else if (name.isHkArgName) { - val (prefix, body, argBindings) = decomposeHKApply(parent) - (prefix, body, (name, tp.refinedInfo) :: argBindings) - } else (tp, NoType, Nil) - case _ => - (tp, NoType, Nil) - } - /** The text for a TypeLambda * - * LambdaXYZ { type Apply = body'; bindings? } + * [v_1 p_1: B_1, ..., v_n p_n: B_n] -> T * * where - * @param lambdaCls The class symbol for `LambdaXYZ` - * @param body The simplified lambda body - * @param bindings The bindings of any HK$i arguments - * - * @return A text of the form - * - * [HK$0, ..., HK$n] => body - * - * possibly followed by bindings - * - * [HK$i = arg_i, ..., HK$k = arg_k] + * @param paramNames = p_1, ..., p_n + * @param variances = v_1, ..., v_n + * @param argBoundss = B_1, ..., B_n + * @param body = T */ - def typeLambdaText(lambdaCls: Symbol, body: Type, bindings: List[(Name, Type)]): Text = { - def lambdaParamText(tparam: Symbol): Text = { - varianceString(tparam) ~ nameString(tparam.name) - } - def lambdaText = changePrec(GlobalPrec) { - "[" ~ Text(lambdaCls.typeParams.map(lambdaParamText), ", ") ~ "] => " ~ toTextGlobal(body) + def typeLambdaText(paramNames: List[String], variances: List[Int], argBoundss: List[TypeBounds], body: Type): Text = { + def lambdaParamText(variance: Int, name: String, bounds: TypeBounds): Text = + varianceString(variance) ~ name ~ toText(bounds) + changePrec(GlobalPrec) { + "[" ~ Text((variances, paramNames, argBoundss).zipped.map(lambdaParamText), ", ") ~ + "] -> " ~ toTextGlobal(body) } - def bindingText(binding: (Name, Type)) = binding._1.toString ~ toTextGlobal(binding._2) - if (bindings.isEmpty) lambdaText - else atPrec(DotPrec)(lambdaText) ~ "[" ~ Text(bindings.map(bindingText), ", ") ~ "]" } override def toText[T >: Untyped](tree: Tree[T]): Text = controlled { From e14be2d90f330835af86e85b2bca97a6d1b7cf06 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Dec 2015 14:06:18 +0100 Subject: [PATCH 08/41] Fix adaptIfHK and drop rawTypeParams --- .../tools/dotc/core/TypeApplications.scala | 34 ++++++------------- 1 file changed, 11 insertions(+), 23 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 1d11645a4147..0cf40d818340 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -287,30 +287,18 @@ class TypeApplications(val self: Type) extends AnyVal { else self match { case self: TypeRef if self.symbol.isClass && self.typeParams.length == hkParams.length => EtaExpansion(self).adaptIfHK(bound) - case arg: TypeRef - if arg.symbol.isLambdaTrait && - !arg.symbol.typeParams.corresponds(boundLambda.typeParams)(_.variance == _.variance) => - arg.prefix.select(boundLambda) case _ => - self - } - } - - final def rawTypeParams(implicit ctx: Context): List[TypeSymbol] = { - self match { - case self: ClassInfo => - self.cls.typeParams - case self: TypeRef => - val tsym = self.typeSymbol - if (tsym.isClass) tsym.typeParams - else if (tsym.isAliasType) self.underlying.rawTypeParams - else Nil - case _: BoundType | _: SingletonType => - Nil - case self: TypeProxy => - self.underlying.rawTypeParams - case _ => - Nil + def adaptArg(arg: Type): Type = arg match { + case arg: TypeRef + if arg.symbol.isLambdaTrait && + !arg.symbol.typeParams.corresponds(boundLambda.typeParams)(_.variance == _.variance) => + arg.prefix.select(boundLambda) + case arg: RefinedType => + arg.derivedRefinedType(adaptArg(arg.parent), arg.refinedName, arg.refinedInfo) + case _ => + arg + } + adaptArg(self) } } From 67d28339d91f912c2894a05110d713f077458feb Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Dec 2015 14:13:15 +0100 Subject: [PATCH 09/41] Drop redundant check in beta reduce --- src/dotty/tools/dotc/core/Types.scala | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 5662684551d1..84d74b0f4196 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -883,12 +883,10 @@ object Types { instantiate(tp) } /** Reduce rhs of $hkApply to make it stand alone */ - def betaReduce(tp: Type) = - if (pre.parent.isSafeLambda) { - val reduced = instTop(tp) - if (instantiate.isSafe) reduced else NoType - } - else NoType + def betaReduce(tp: Type) = { + val reduced = instTop(tp) + if (instantiate.isSafe) reduced else NoType + } pre.refinedInfo match { case TypeAlias(alias) => if (pre.refinedName ne name) loop(pre.parent) From 2abcd02d2a2067ba78430262664f5d3ab9b61d00 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Dec 2015 15:42:56 +0100 Subject: [PATCH 10/41] Use new adaptArgs for Scala2Unpickler Also: fix adaptArgs and LambdaTrait to make it work. --- .../tools/dotc/core/TypeApplications.scala | 22 ++++----- .../core/unpickleScala2/Scala2Unpickler.scala | 46 +------------------ 2 files changed, 10 insertions(+), 58 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 0cf40d818340..b1ffe0169c4e 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -169,7 +169,8 @@ object TypeApplications { /** Adapt all arguments to possible higher-kinded type parameters using adaptIfHK */ def adaptArgs(tparams: List[Symbol], args: List[Type])(implicit ctx: Context): List[Type] = - args.zipWithConserve(tparams)((arg, tparam) => arg.adaptIfHK(tparam.infoOrCompleter)) + if (tparams.isEmpty) args + else args.zipWithConserve(tparams)((arg, tparam) => arg.adaptIfHK(tparam.infoOrCompleter)) def argRefs(rt: RefinedType, n: Int)(implicit ctx: Context) = List.range(0, n).map(i => RefinedThis(rt).select(tpnme.hkArg(i))) @@ -234,18 +235,13 @@ class TypeApplications(val self: Type) extends AnyVal { final def paramBounds(implicit ctx: Context): List[TypeBounds] = typeParams.map(self.memberInfo(_).bounds) - def LambdaTrait(implicit ctx: Context) = { - def skipArgs(tp: Type): Type = tp match { - case RefinedType(parent, pname) if pname.isHkArgName => skipArgs(parent) - case _ => tp - } - self.stripTypeVar match { - case RefinedType(parent, tpnme.hkApply) => skipArgs(parent).stripTypeVar match { - case ref @ TypeRef(_, lam) if lam.isLambdaTraitName => ref.symbol - case _ => NoSymbol - } - case _ => NoSymbol - } + /** The Lambda trait underlying a type lambda */ + def LambdaTrait(implicit ctx: Context): Symbol = self.stripTypeVar match { + case RefinedType(parent, tpnme.hkApply) => + val sym = self.classSymbol + if (sym.isLambdaTrait) sym else NoSymbol + case TypeBounds(lo, hi) => hi.LambdaTrait + case _ => NoSymbol } def isEtaExpandable(implicit ctx: Context) = self match { diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index cf7b487bbf85..747d73ea99f9 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -21,6 +21,7 @@ import typer.Mode import PickleBuffer._ import scala.reflect.internal.pickling.PickleFormat._ import Decorators._ +import TypeApplications._ import classfile.ClassfileParser import scala.collection.{ mutable, immutable } import scala.collection.mutable.ListBuffer @@ -142,51 +143,6 @@ object Scala2Unpickler { denot.info = ClassInfo( // final info denot.owner.thisType, denot.classSymbol, parentRefs, declsInRightOrder, ost) } - - /** Adapt arguments to type parameters so that variance of type lambda arguments - * agrees with variance of corresponding higherkinded type parameters. Example: - * - * class Companion[+CC[X]] - * Companion[List] - * - * with adaptArgs, this will expand to - * - * Companion[[X] => List[X]] - * - * instead of - * - * Companion[[+X] => List[X]] - * - * even though `List` is covariant. This adaptation is necessary to ignore conflicting - * variances in overriding members that have types of hk-type parameters such as `Companion[GenTraversable]` - * or `Companion[ListBuffer]`. Without the adaptation we would end up with - * - * Companion[[+X] => GenTraversable[X]] - * Companion[[X] => List[X]] - * - * and the second is not a subtype of the first. So if we have overridding memebrs of the two - * types we get an error. - */ - def adaptArgs(tparams: List[Symbol], args: List[Type])(implicit ctx: Context): List[Type] = tparams match { - case tparam :: tparams1 => - val boundLambda = tparam.infoOrCompleter match { - case TypeBounds(_, hi) => hi.LambdaClass(forcing = false) - case _ => NoSymbol - } - def adaptArg(arg: Type): Type = arg match { - case arg: TypeRef if arg.symbol.isLambdaTrait => - assert(arg.symbol.typeParams.length == boundLambda.typeParams.length) - arg.prefix.select(boundLambda) - case arg: RefinedType => - arg.derivedRefinedType(adaptArg(arg.parent), arg.refinedName, arg.refinedInfo) - case _ => - arg - } - val arg = args.head - val adapted = if (boundLambda.exists) adaptArg(arg) else arg - adapted :: adaptArgs(tparams1, args.tail) - case nil => args - } } /** Unpickle symbol table information descending from a class and/or module root From 73bf06b4735b8a74c99ed185e52ae3350181f426 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Dec 2015 16:00:07 +0100 Subject: [PATCH 11/41] More uses of adaptIfHK Also: fix EtaExpansion. Also: Add some debug code to Applications, awaiting further fixes. --- .../tools/dotc/core/TypeApplications.scala | 3 +- src/dotty/tools/dotc/typer/Applications.scala | 8 +++-- src/dotty/tools/dotc/typer/Namer.scala | 30 +++++++++---------- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index b1ffe0169c4e..618dd35e8333 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -62,7 +62,6 @@ object TypeApplications { def unapply(tp: Type)(implicit ctx: Context): Option[(List[Int], List[TypeBounds], Type)] = tp match { case app @ RefinedType(prefix, tpnme.hkApply) => - println(s"type lam $tp") val cls = prefix.typeSymbol val variances = cls.typeParams.map(_.variance) val argBounds = prefix.argInfos.map(_.bounds) @@ -89,7 +88,7 @@ object TypeApplications { val tparams = tycon.typeParams val variances = tycon.typeParams.map(_.variance) TypeLambda(tparams.map(_.variance), tycon.paramBounds, - rt => tycon.appliedTo(tparams.map(RefinedThis(rt).select(_)))) + rt => tycon.appliedTo(argRefs(rt, tparams.length))) } def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = { diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index 4ca9c39af5b3..e8ba3b07b4c1 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -613,8 +613,12 @@ trait Applications extends Compatibility { self: Typer => assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs) } - def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = - tree.withType(tree.tpe.EtaExpandIfHK(bound)) + def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = { + val was = tree.tpe.EtaExpandIfHK(bound) + //val now = tree.tpe.adaptIfHK(bound) // ### + //if (was != now) println(i"diff adapt ${tree.tpe} to $bound, was: $was, now: $now") + tree.withType(was)//tree.tpe.adaptIfHK(bound)) + } /** Rewrite `new Array[T](....)` trees to calls of newXYZArray methods. */ def convertNewArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match { diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 5eebdbad1cc8..eecbd13474df 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -18,6 +18,7 @@ import config.Printers._ import Annotations._ import Inferencing._ import transform.ValueClasses._ +import TypeApplications._ import language.implicitConversions trait NamerContextOps { this: Context => @@ -840,26 +841,23 @@ class Namer { typer: Typer => /** Eta expand all class types C appearing as arguments to a higher-kinded * type parameter to type lambdas, e.g. [HK0] => C[HK0]. This is necessary - * because in `typedAppliedTypeTree` we might ahve missed some eta expansions + * because in `typedAppliedTypeTree` we might have missed some eta expansions * of arguments in F-bounds, because the recursive type was initialized with * TypeBounds.empty. */ + // ### Check whether this is still needed! def etaExpandArgs(implicit ctx: Context) = new TypeMap { - def apply(tp: Type): Type = { - tp match { - case tp: RefinedType => - val args = tp.argInfos.mapconserve(this) - if (args.nonEmpty) { - val tycon = tp.withoutArgs(args) - val tparams = tycon.typeParams - if (args.length == tparams.length) { // if lengths differ, problem is caught in typedTypeApply - val args1 = args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfHK(tparam.info)) - if (args1 ne args) return this(tycon).appliedTo(args1) - } - } - case _ => - } - mapOver(tp) + def apply(tp: Type): Type = tp match { + case tp: RefinedType => + val args = tp.argInfos.mapconserve(this) + if (args.nonEmpty) { + val tycon = tp.withoutArgs(args) + val tycon1 = this(tycon) + val tparams = tycon.typeParams + val args1 = if (args.length == tparams.length) adaptArgs(tparams, args) else args + if ((tycon1 eq tycon) && (args1 eq args)) tp else tycon1.appliedTo(args1) + } else mapOver(tp) + case _ => mapOver(tp) } } } From 91a865cf9e5fff097249b3f14ced9deb0006fafe Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Dec 2015 17:29:44 +0100 Subject: [PATCH 12/41] Base Application extractor on classSymbols typeSymbols always have empty type parameter list. --- src/dotty/tools/dotc/core/TypeApplications.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 618dd35e8333..3cd9727e72f2 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -125,7 +125,7 @@ object TypeApplications { matchParams(RefinedType(tp, tparam.name, arg.toBounds(tparam)), tparams1, args1) } catch { case ex: MatchError => - println(s"applied type mismatch: $tp $args, typeParams = ${tp.typeSymbol.typeParams}") // !!! DEBUG + println(s"applied type mismatch: $tp $args, typeParams = ${tp.classSymbol.typeParams}") // !!! DEBUG println(s"precomplete decls = ${tp.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}") throw ex } @@ -144,7 +144,7 @@ object TypeApplications { private def unapp(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match { case _: RefinedType => - val tparams = tp.typeSymbol.typeParams + val tparams = tp.classSymbol.typeParams if (tparams.isEmpty) None else { val argBuf = new mutable.ListBuffer[Type] From 261087a97fc981dbc5cd2c57eab736aca668c3bc Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 2 Dec 2015 17:30:19 +0100 Subject: [PATCH 13/41] Fix problem with printing lambdas. Previous implementation died because TermRef had no denotation. --- src/dotty/tools/dotc/printing/RefinedPrinter.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index e80b7d48d108..dd485ad60669 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -119,10 +119,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case TypeLambda(variances, argBoundss, body) => val paramNames = variances.indices.toList.map("X" + _) val instantiate = new TypeMap { - def apply(tp: Type): Type = tp match { + def apply(t: Type): Type = t match { case TypeRef(RefinedThis(rt), name) if name.isHkArgName && rt.eq(tp) => - TypeRef(NoPrefix, paramNames(name.hkArgIndex).toTypeName) - case _ => mapOver(tp) + TypeRef.withFixedSym( + NoPrefix, paramNames(name.hkArgIndex).toTypeName, defn.AnyClass) + case _ => mapOver(t) } } return typeLambdaText(paramNames, variances, argBoundss, From 56e63ed4b081c0e4696a475aa4a468baa6491e4a Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Dec 2015 09:45:54 +0100 Subject: [PATCH 14/41] Don't recognize type bounds as lambda abstractions Printing bounds omits the "<:" otherwise. --- src/dotty/tools/dotc/core/TypeApplications.scala | 2 -- src/dotty/tools/dotc/printing/PlainPrinter.scala | 11 ++++++----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 3cd9727e72f2..c6abc9360c4a 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -66,8 +66,6 @@ object TypeApplications { val variances = cls.typeParams.map(_.variance) val argBounds = prefix.argInfos.map(_.bounds) Some((variances, argBounds, app.refinedInfo)) - case TypeBounds(lo, hi) => - unapply(hi) case _ => None } diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index 4f92f836c572..857a32cccdc3 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -266,12 +266,13 @@ class PlainPrinter(_ctx: Context) extends Printer { homogenize(tp) match { case tp @ TypeBounds(lo, hi) => if (lo eq hi) { - val eql = - if (tp.variance == 1) " =+ " - else if (tp.variance == -1) " =- " - else " = " + val eql = + if (tp.variance == 1) " =+ " + else if (tp.variance == -1) " =- " + else " = " eql ~ toText(lo) - } else + } + else (if (lo isRef defn.NothingClass) Text() else " >: " ~ toText(lo)) ~ (if (hi isRef defn.AnyClass) Text() else " <: " ~ toText(hi)) case tp @ ClassInfo(pre, cls, cparents, decls, selfInfo) => From 05c0e55850a5b086e38d3a54e937106f727fbe8e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Dec 2015 10:17:28 +0100 Subject: [PATCH 15/41] Fix bug computing typeParams for hk types Arg bounds do not count is bindings. --- src/dotty/tools/dotc/core/TypeApplications.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index c6abc9360c4a..5b34fe662d98 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -204,7 +204,9 @@ class TypeApplications(val self: Type) extends AnyVal { if (lam.exists) lam.typeParams else Nil } case self: RefinedType => - self.parent.typeParams.filterNot(_.name == self.refinedName) + val hkParams = self.hkTypeParams + if (hkParams.nonEmpty) hkParams + else self.parent.typeParams.filterNot(_.name == self.refinedName) case self: SingletonType => Nil case self: TypeProxy => From 8db7a9b8914e0f66207db9d998f1186f8e195f85 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Dec 2015 10:31:15 +0100 Subject: [PATCH 16/41] Fix bug computing typeParams for hk types Arg bounds do not count is bindings. Also: TypeLambda's $Apply binding should be covariant, because the parameter is (not sure it matters though). --- src/dotty/tools/dotc/core/TypeApplications.scala | 2 +- src/dotty/tools/dotc/core/Types.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 5b34fe662d98..5f7b730d2b52 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -57,7 +57,7 @@ object TypeApplications { assert(argBoundss.length == variances.length) RefinedType( argRefinements(defn.LambdaTrait(variances).typeRef, 0, argBoundss), - tpnme.hkApply, rt => bodyFn(rt).bounds) + tpnme.hkApply, rt => TypeAlias(bodyFn(rt), 1)) } def unapply(tp: Type)(implicit ctx: Context): Option[(List[Int], List[TypeBounds], Type)] = tp match { diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 84d74b0f4196..4af4d0c14101 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1929,7 +1929,7 @@ object Types { false } override def computeHash = doHash(refinedName, refinedInfo, parent) - override def toString = s"RefinedType($parent, $refinedName, $refinedInfo | $hashCode)" // !!! TODO: remove + override def toString = s"RefinedType($parent, $refinedName, $refinedInfo)" } class CachedRefinedType(parent: Type, refinedName: Name, infoFn: RefinedType => Type) extends RefinedType(parent, refinedName) { From 130c24c233e0b2b41be8e11ece1bd4cd6ebff00f Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Dec 2015 16:12:11 +0100 Subject: [PATCH 17/41] Fix to TypeLambda over with a TypeBounds body Taking typeAlias is illegal in that case. --- src/dotty/tools/dotc/core/TypeApplications.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 5f7b730d2b52..d45e11d59b3d 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -57,7 +57,7 @@ object TypeApplications { assert(argBoundss.length == variances.length) RefinedType( argRefinements(defn.LambdaTrait(variances).typeRef, 0, argBoundss), - tpnme.hkApply, rt => TypeAlias(bodyFn(rt), 1)) + tpnme.hkApply, bodyFn(_).bounds.withVariance(1)) } def unapply(tp: Type)(implicit ctx: Context): Option[(List[Int], List[TypeBounds], Type)] = tp match { From 1fe3310c2008a29f9e3cf9e067005bf52527a0d3 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Dec 2015 16:41:34 +0100 Subject: [PATCH 18/41] Fixes to TypeLambda and EtaExpansion extractors. Makes i815 compile. --- .../tools/dotc/core/TypeApplications.scala | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index d45e11d59b3d..4c9746c39096 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -65,7 +65,7 @@ object TypeApplications { val cls = prefix.typeSymbol val variances = cls.typeParams.map(_.variance) val argBounds = prefix.argInfos.map(_.bounds) - Some((variances, argBounds, app.refinedInfo)) + Some((variances, argBounds, app.refinedInfo.argInfo)) case _ => None } @@ -91,14 +91,22 @@ object TypeApplications { def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = { def argsAreForwarders(args: List[Type], n: Int): Boolean = args match { - case TypeRef(RefinedThis(rt), sel) :: args1 => - rt.eq(tp) && sel == tpnme.hkArg(n) && argsAreForwarders(args1, n - 1) - case nil => + case Nil => n == 0 + case TypeRef(RefinedThis(rt), sel) :: args1 => + rt.eq(tp) && sel == tpnme.hkArg(n - 1) && argsAreForwarders(args1, n - 1) + case _ => + false } tp match { case TypeLambda(_, argBounds, AppliedType(fn: TypeRef, args)) - if argsAreForwarders(args, tp.typeParams.length - 1) => Some(fn) + if argsAreForwarders(args, tp.typeParams.length) => Some(fn) + //case TypeLambda(_, argBounds, AppliedType(fn: TypeRef, args)) => + // println(i"eta expansion failed because args $args are not forwarders for ${tp.toString}") + // None + //case TypeLambda(_, argBounds, _) => + // println(i"eta expansion failed because body is not applied type") + // None case _ => None } } From 8cd50f7132fae4ca795d5fb5811421a43b79eb46 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 3 Dec 2015 16:43:05 +0100 Subject: [PATCH 19/41] Fully type annotate Printers This prevents propagation changes leading to long recompiles when a printer is changed. --- src/dotty/tools/dotc/config/Printers.scala | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/config/Printers.scala b/src/dotty/tools/dotc/config/Printers.scala index 8b9cc45c4ce4..21147fe6fa86 100644 --- a/src/dotty/tools/dotc/config/Printers.scala +++ b/src/dotty/tools/dotc/config/Printers.scala @@ -22,13 +22,13 @@ object Printers { val implicitsDetailed: Printer = noPrinter val subtyping: Printer = noPrinter val unapp: Printer = noPrinter - val gadts = noPrinter - val hk = noPrinter - val variances = noPrinter - val incremental = noPrinter - val config = noPrinter - val transforms = noPrinter - val completions = noPrinter - val cyclicErrors = noPrinter - val pickling = noPrinter + val gadts: Printer = noPrinter + val hk: Printer = noPrinter + val variances: Printer = noPrinter + val incremental: Printer = noPrinter + val config: Printer = noPrinter + val transforms: Printer = noPrinter + val completions: Printer = noPrinter + val cyclicErrors: Printer = noPrinter + val pickling: Printer = noPrinter } From dec21f5ea39a2c2c059c829ac16edd68f71793ee Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 4 Dec 2015 17:46:57 +0100 Subject: [PATCH 20/41] Better diagnostics for applyOverloaded. --- src/dotty/tools/dotc/ast/tpd.scala | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/ast/tpd.scala b/src/dotty/tools/dotc/ast/tpd.scala index feeaad49dc36..b78e4c79f3b2 100644 --- a/src/dotty/tools/dotc/ast/tpd.scala +++ b/src/dotty/tools/dotc/ast/tpd.scala @@ -845,15 +845,22 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def applyOverloaded(receiver: Tree, method: TermName, args: List[Tree], targs: List[Type], expectedType: Type, isAnnotConstructor: Boolean = false)(implicit ctx: Context): Tree = { val typer = ctx.typer val proto = new FunProtoTyped(args, expectedType, typer) - val alts = receiver.tpe.member(method).alternatives.map(_.termRef) - - val alternatives = ctx.typer.resolveOverloaded(alts, proto, Nil) - assert(alternatives.size == 1, - i"multiple overloads available for $method on ${receiver.tpe.widenDealias} with targs: $targs, args: $args and expectedType: $expectedType." + - i" isAnnotConstructor = $isAnnotConstructor.\n" + - i"alternatives: $alternatives") // this is parsed from bytecode tree. there's nothing user can do about it - - val selected = alternatives.head + val denot = receiver.tpe.member(method) + assert(denot.exists, i"no member $receiver . $method, members = ${receiver.tpe.decls}") + val selected = + if (denot.isOverloaded) { + val allAlts = denot.alternatives.map(_.termRef) + val alternatives = + ctx.typer.resolveOverloaded(allAlts, proto, Nil) + assert(alternatives.size == 1, + i"${if (alternatives.isEmpty) "no" else "multiple"} overloads available for " + + i"$method on ${receiver.tpe.widenDealias} with targs: $targs, args: $args and expectedType: $expectedType." + + i" isAnnotConstructor = $isAnnotConstructor.\n" + + i"all alternatives: ${allAlts.map(_.symbol.showDcl).mkString(", ")}\n" + + i"matching alternatives: ${alternatives.map(_.symbol.showDcl).mkString(", ")}.") // this is parsed from bytecode tree. there's nothing user can do about it + alternatives.head + } + else denot.asSingleDenotation.termRef val fun = receiver .select(TermRef.withSig(receiver.tpe, selected.termSymbol.asTerm)) .appliedToTypes(targs) From f8d82c2634dcf9d94037b107d49d088e89f7070f Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 4 Dec 2015 17:55:21 +0100 Subject: [PATCH 21/41] Switch to new hk scheme. --- .../tools/dotc/core/ConstraintHandling.scala | 10 + .../tools/dotc/core/TypeApplications.scala | 303 +++++++++--------- src/dotty/tools/dotc/core/TypeComparer.scala | 146 ++++++--- src/dotty/tools/dotc/core/Types.scala | 36 +-- .../tools/dotc/core/tasty/TreePickler.scala | 42 ++- .../core/unpickleScala2/Scala2Unpickler.scala | 7 +- src/dotty/tools/dotc/typer/Applications.scala | 4 +- src/dotty/tools/dotc/typer/Typer.scala | 7 +- tests/pos/t2693.scala | 6 +- 9 files changed, 310 insertions(+), 251 deletions(-) diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index 577b958c848e..ff7afe99aeff 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -232,6 +232,16 @@ trait ConstraintHandling { } } + /** Instantiate `param` to `tp` if the constraint stays satisfiable */ + protected def tryInstantiate(param: PolyParam, tp: Type): Boolean = { + val saved = constraint + constraint = + if (addConstraint(param, tp, fromBelow = true) && + addConstraint(param, tp, fromBelow = false)) constraint.replace(param, tp) + else saved + constraint ne saved + } + /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */ def checkPropagated(msg: => String)(result: Boolean): Boolean = { if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) { diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 4c9746c39096..0f9cb8230c44 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -37,6 +37,15 @@ object TypeApplications { case _ => tp } + /** Does the variance of `sym1` conform to the variance of `sym2`? + * This is the case if the variances are the same or `sym` is nonvariant. + */ + def varianceConforms(sym1: TypeSymbol, sym2: TypeSymbol)(implicit ctx: Context) = + sym1.variance == sym2.variance || sym2.variance == 0 + + def variancesConform(syms1: List[TypeSymbol], syms2: List[TypeSymbol])(implicit ctx: Context) = + syms1.corresponds(syms2)(varianceConforms) + /** Extractor for * * [v1 X1: B1, ..., vn Xn: Bn] -> T @@ -123,25 +132,7 @@ object TypeApplications { * T { ... } # $Apply */ object AppliedType { - def apply(tp: Type, args: List[Type])(implicit ctx: Context): Type = { - def matchParams(tp: Type, tparams: List[TypeSymbol], args: List[Type]): Type = args match { - case arg :: args1 => - try { - val tparam :: tparams1 = tparams - matchParams(RefinedType(tp, tparam.name, arg.toBounds(tparam)), tparams1, args1) - } catch { - case ex: MatchError => - println(s"applied type mismatch: $tp $args, typeParams = ${tp.classSymbol.typeParams}") // !!! DEBUG - println(s"precomplete decls = ${tp.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}") - throw ex - } - case nil => tp - } - assert(args.nonEmpty) - val cls = tp.classSymbol - val refined = matchParams(tp, cls.typeParams, args) - if (cls.isLambdaTrait) TypeRef(refined, tpnme.hkApply) else refined - } + def apply(tp: Type, args: List[Type])(implicit ctx: Context): Type = tp.appliedTo(args) def unapply(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match { case TypeRef(prefix, tpnme.hkApply) => unapp(prefix) @@ -204,18 +195,22 @@ class TypeApplications(val self: Type) extends AnyVal { case self: ClassInfo => self.cls.typeParams case self: TypeRef => - val tsym = self.typeSymbol + val tsym = self.symbol if (tsym.isClass) tsym.typeParams else if (tsym.isAliasType) self.underlying.typeParams - else { - val lam = LambdaClass(forcing = false) - if (lam.exists) lam.typeParams else Nil - } + else if (tsym.isCompleting) + // We are facing a problem when computing the type parameters of an uncompleted + // abstract type. We can't access the bounds of the symbol yet because that + // would cause a cause a cyclic reference. So we return `Nil` instead + // and try to make up for it later. The acrobatics in Scala2Unpicker#readType + // for reading a TypeRef show what's neeed. + Nil + else tsym.info.typeParams case self: RefinedType => val hkParams = self.hkTypeParams if (hkParams.nonEmpty) hkParams else self.parent.typeParams.filterNot(_.name == self.refinedName) - case self: SingletonType => + case self: SingletonType => Nil case self: TypeProxy => self.underlying.typeParams @@ -224,20 +219,15 @@ class TypeApplications(val self: Type) extends AnyVal { } } - /** The type parameters of the underlying class. - * This is like `typeParams`, except for 3 differences. - * First, it does not adjust type parameters in refined types. I.e. type arguments - * do not remove corresponding type parameters. - * Second, it will return Nil for BoundTypes because we might get a NullPointer exception - * on PolyParam#underlying otherwise (demonstrated by showClass test). - * Third, it won't return abstract higher-kinded type parameters, i.e. the type parameters of - * an abstract type are always empty. + /** The higherkinded type parameters in case this is a type lambda + * + * [X1, ..., Xn] -> T + * + * These are the parameters of the underlying lambda class. + * Returns `Nil` for all other types. */ - final def hkTypeParams(implicit ctx: Context): List[TypeSymbol] = self match { - case TypeLambda(_, _, body) => self.typeSymbol.typeParams.head :: body.hkTypeParams - case TypeBounds(lo, hi) => hi.hkTypeParams - case _ => Nil - } + final def hkTypeParams(implicit ctx: Context): List[TypeSymbol] = + self.LambdaTrait.typeParams final def paramBounds(implicit ctx: Context): List[TypeBounds] = typeParams.map(self.memberInfo(_).bounds) @@ -251,11 +241,41 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => NoSymbol } + /** A type ref is eta expandable if it refers to a non-lambda class. + * In that case we can look for parameterized base types fo the type + * to eta expand them. + */ def isEtaExpandable(implicit ctx: Context) = self match { case self: TypeRef => self.symbol.isClass && !self.name.isLambdaTraitName case _ => false } + /** Lambda abstract `self` with given type parameters. Examples: + * + * type T[X] = U becomes type T = [X] -> U + * type T[X] >: L <: U becomes type T >: L <: ([X] -> _ <: U) + */ + def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = { + def expand(tp: Type) = { + TypeLambda(tparams.map(_.variance), tparams.map(_.info.bounds), + rt => new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)).apply(tp)) + } + self match { + case self: TypeAlias => + self.derivedTypeAlias(expand(self.alias)) + case self @ TypeBounds(lo, hi) => + self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi))) + case _ => expand(self) + } + } +/* + def betaReduce(implicit ctx: Context): Type = self.stripTypeVar match { + case TypeRef(prefix, tpnme.hkApply) => + prefix.betaReduce + case self @ RefinedType(parent, tpnme.hkArg) if parent.isTypeLambda => + HKApplication(parent, self.refinedInfo.dropAlias) + } +*/ /** Adapt argument A to type parameter P in the case P is higher-kinded. * This means: * (1) Make sure that A is a type lambda, if necessary by eta-expanding it. @@ -305,6 +325,7 @@ class TypeApplications(val self: Type) extends AnyVal { } } + /* /** If type `self` is equal, aliased-to, or upperbounded-by a type of the form * `LambdaXYZ { ... }`, the class symbol of that type, otherwise NoSymbol. * symbol of that type, otherwise NoSymbol. @@ -338,11 +359,11 @@ class TypeApplications(val self: Type) extends AnyVal { /** Is type `self` a Lambda with all hk$i fields fully instantiated? */ def isInstantiatedLambda(implicit ctx: Context): Boolean = isSafeLambda && typeParams.isEmpty - +*/ /** Is receiver type higher-kinded (i.e. of kind != "*")? */ def isHK(implicit ctx: Context): Boolean = self.dealias match { case self: TypeRef => self.info.isHK - case RefinedType(_, name) => name == tpnme.hkApply || name.isHkArgName + case RefinedType(_, name) => name == tpnme.hkApply case TypeBounds(_, hi) => hi.isHK case _ => false } @@ -361,14 +382,14 @@ class TypeApplications(val self: Type) extends AnyVal { * * but without forcing anything. */ - def noHK(implicit ctx: Context): Boolean = self.stripTypeVar match { + def classNotLambda(implicit ctx: Context): Boolean = self.stripTypeVar match { case self: RefinedType => - self.parent.noHK + self.parent.classNotLambda case self: TypeRef => - (self.denot.exists) && { + self.denot.exists && { val sym = self.symbol if (sym.isClass) !sym.isLambdaTrait - else sym.isCompleted && self.info.isAlias && self.info.bounds.hi.noHK + else sym.isCompleted && self.info.isAlias && self.info.bounds.hi.classNotLambda } case _ => false @@ -376,72 +397,66 @@ class TypeApplications(val self: Type) extends AnyVal { /** Encode the type resulting from applying this type to given arguments */ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { - def matchParams(tp: Type, tparams: List[TypeSymbol], args: List[Type]): Type = args match { - case arg :: args1 => - if (tparams.isEmpty) { - println(s"applied type mismatch: $self $args, typeParams = $typeParams, tsym = ${self.typeSymbol.debugString}") // !!! DEBUG - println(s"precomplete decls = ${self.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}") - } - val tparam = tparams.head - val tp1 = RefinedType(tp, tparam.name, arg.toBounds(tparam)) - matchParams(tp1, tparams.tail, args1) - case nil => tp + def substHkArgs = new TypeMap { + def apply(tp: Type): Type = tp match { + case TypeRef(RefinedThis(rt), name) if rt.eq(self) && name.isHkArgName => + args(name.hkArgIndex) + case _ => + mapOver(tp) + } + } + if (args.isEmpty || ctx.erasedTypes) self + else self.stripTypeVar match { + case EtaExpansion(self1) => + self1.appliedTo(args) + case TypeLambda(_, _, body) if !args.exists(_.isInstanceOf[TypeBounds]) => + substHkArgs(body) + case self: PolyType => + self.instantiate(args) + case _ => + appliedTo(args, typeParams) } + } - /** Instantiate type `tp` with `args`. - * @param original The original type for which we compute the type parameters - * This makes a difference for refinement types, because - * refinements bind type parameters and thereby remove them - * from `typeParams`. - */ - def instantiate(tp: Type, original: Type): Type = tp match { - case tp: TypeRef => - val tsym = tp.symbol - if (tsym.isAliasType) tp.underlying.appliedTo(args) - else { - val safeTypeParams = - if (tsym.isClass || !tp.typeSymbol.isCompleting) original.typeParams - else { - ctx.warning(i"encountered F-bounded higher-kinded type parameters for $tsym; assuming they are invariant") - defn.LambdaTrait(args map alwaysZero).typeParams // @@@ can we force? - } - matchParams(tp, safeTypeParams, args) + def appliedTo(args: List[Type], typParams: List[TypeSymbol])(implicit ctx: Context): Type = { + def matchParams(t: Type, tparams: List[TypeSymbol], args: List[Type])(implicit ctx: Context): Type = args match { + case arg :: args1 => + try { + val tparam :: tparams1 = tparams + matchParams(RefinedType(t, tparam.name, arg.toBounds(tparam)), tparams1, args1) + } catch { + case ex: MatchError => + println(s"applied type mismatch: $self $args, typeParams = $typParams") // !!! DEBUG + //println(s"precomplete decls = ${self.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}") + throw ex } - case tp: RefinedType => - val redux = tp.EtaReduce - if (redux.exists) redux.appliedTo(args) // Rewrite ([hk$0] => C[hk$0])(T) to C[T] - else tp.derivedRefinedType( - instantiate(tp.parent, original), - tp.refinedName, - tp.refinedInfo) - case tp: TypeProxy => - instantiate(tp.underlying, original) - case tp: PolyType => - tp.instantiate(args) - case ErrorType => - tp + case nil => t } - - /** Same as isHK, except we classify all abstract types as HK, - * (they must be, because they are applied). This avoids some forcing and - * CyclicReference errors of the standard isHK. - */ - def isKnownHK(tp: Type): Boolean = tp match { - case tp: TypeRef => - val sym = tp.symbol - if (sym.isClass) sym.isLambdaTrait - else !sym.isAliasType || isKnownHK(tp.info) - case tp: TypeProxy => isKnownHK(tp.underlying) - case _ => false + assert(args.nonEmpty) + matchParams(self, typParams, args) match { + case refined @ RefinedType(_, pname) if pname.isHkArgName => + TypeRef(refined, tpnme.hkApply) + case refined => + refined } + } - if (args.isEmpty || ctx.erasedTypes) self - else { - val res = instantiate(self, self) - if (isKnownHK(res)) TypeRef(res, tpnme.hkApply) else res + + final def appliedTo(arg: Type)(implicit ctx: Context): Type = appliedTo(arg :: Nil) + final def appliedTo(arg1: Type, arg2: Type)(implicit ctx: Context): Type = appliedTo(arg1 :: arg2 :: Nil) + + final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = { + val safeTypeParams = self match { + case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => + // This happens when unpickling e.g. scala$collection$generic$GenMapFactory$$CC + ctx.warning(i"encountered F-bounded higher-kinded type parameters for ${self.symbol}; assuming they are invariant") + defn.LambdaTrait(args map alwaysZero).typeParams + case _ => + typeParams } + appliedTo(args, safeTypeParams) } - +/* /** Simplify a fully instantiated type of the form `LambdaX{... type Apply = T } # Apply` to `T`. */ def simplifyApply(implicit ctx: Context): Type = self match { @@ -452,9 +467,7 @@ class TypeApplications(val self: Type) extends AnyVal { } case _ => self } - - final def appliedTo(arg: Type)(implicit ctx: Context): Type = appliedTo(arg :: Nil) - final def appliedTo(arg1: Type, arg2: Type)(implicit ctx: Context): Type = appliedTo(arg1 :: arg2 :: Nil) +*/ /** Turn this type, which is used as an argument for * type parameter `tparam`, into a TypeBounds RHS @@ -558,26 +571,9 @@ class TypeApplications(val self: Type) extends AnyVal { * otherwise return Nil. * Existential types in arguments are returned as TypeBounds instances. */ - final def argInfos(implicit ctx: Context): List[Type] = { - var tparams: List[TypeSymbol] = null - def recur(tp: Type, refineCount: Int): mutable.ListBuffer[Type] = tp.stripTypeVar match { - case tp @ RefinedType(tycon, name) => - val buf = recur(tycon, refineCount + 1) - if (buf == null) null - else { - if (tparams == null) tparams = tycon.typeParams - if (buf.size < tparams.length) { - val tparam = tparams(buf.size) - if (name == tparam.name) buf += tp.refinedInfo.argInfo - else null - } else null - } - case _ => - if (refineCount == 0) null - else new mutable.ListBuffer[Type] - } - val buf = recur(self, 0) - if (buf == null || buf.size != tparams.length) Nil else buf.toList + final def argInfos(implicit ctx: Context): List[Type] = self match { + case AppliedType(tycon, args) => args + case _ => Nil } /** Argument types where existential types in arguments are disallowed */ @@ -600,6 +596,11 @@ class TypeApplications(val self: Type) extends AnyVal { self } + final def typeConstructor(implicit ctx: Context): Type = self.stripTypeVar match { + case AppliedType(tycon, _) => tycon + case self => self + } + /** If this is the image of a type argument; recover the type argument, * otherwise NoType. */ @@ -609,6 +610,11 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => NoType } + def dropAlias(implicit ctx: Context): Type = self match { + case TypeAlias(alias) => alias + case _ => self + } + /** The element type of a sequence or array */ def elemType(implicit ctx: Context): Type = self match { case defn.ArrayOf(elemtp) => elemtp @@ -639,7 +645,7 @@ class TypeApplications(val self: Type) extends AnyVal { } recur(self) } - +/* /** The typed lambda abstraction of this type `T` relative to `boundSyms`. * This is: * @@ -690,7 +696,7 @@ class TypeApplications(val self: Type) extends AnyVal { expand(self) } } - +*/ /** Convert a type constructor `TC` which has type parameters `T1, ..., Tn` * in a context where type parameters `U1,...,Un` are expected to * @@ -701,20 +707,17 @@ class TypeApplications(val self: Type) extends AnyVal { * - `T1,...,Tn` otherwise. * v1 is compatible with v2, if v1 = v2 or v2 is non-variant. */ - def EtaExpand(tparams: List[Symbol])(implicit ctx: Context): Type = { - def varianceCompatible(actual: Symbol, formal: Symbol) = - formal.variance == 0 || actual.variance == formal.variance - val tparamsToUse = - if (typeParams.corresponds(tparams)(varianceCompatible)) tparams else typeParams + def EtaExpand(tparams: List[TypeSymbol])(implicit ctx: Context): Type = { + val tparamsToUse = if (variancesConform(typeParams, tparams)) tparams else typeParams self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparamsToUse) //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") } - +/* /** Eta expand if `bound` is a higher-kinded type */ def EtaExpandIfHK(bound: Type)(implicit ctx: Context): Type = if (bound.isHK && !isHK && self.typeSymbol.isClass && typeParams.nonEmpty) EtaExpand(bound.typeParams) else self - +*/ /** Eta expand the prefix in front of any refinements. */ def EtaExpandCore(implicit ctx: Context): Type = self.stripTypeVar match { case self: RefinedType => @@ -722,7 +725,7 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => self.EtaExpand(self.typeParams) } - +/* /** If `self` is a (potentially partially instantiated) eta expansion of type T, return T, * otherwise NoType. More precisely if `self` is of the form * @@ -809,31 +812,21 @@ class TypeApplications(val self: Type) extends AnyVal { * @param classBounds A hint to bound the search. Only types that derive from one of the * classes in classBounds are considered. */ - def testLifted(tparams: List[Symbol], p: Type => Boolean, classBounds: List[ClassSymbol])(implicit ctx: Context): Boolean = { - def tryLift(bcs: List[ClassSymbol]): Boolean = bcs match { + def testLifted(tparams: List[Symbol], p: Type => Boolean, classBounds: List[ClassSymbol] = Nil)(implicit ctx: Context): Boolean = { + def recur(bcs: List[ClassSymbol]): Boolean = bcs match { case bc :: bcs1 => - val tp = self.baseTypeWithArgs(bc) - val targs = tp.argInfos - val tycon = tp.withoutArgs(targs) + val baseRef = self.baseTypeRef(bc) def variancesMatch(param1: Symbol, param2: Symbol) = param2.variance == param2.variance || param2.variance == 0 - if (classBounds.exists(tycon.derivesFrom(_)) && - tycon.typeParams.corresponds(tparams)(variancesMatch)) { - val expanded = tycon.EtaExpand(tparams) - val lifted = (expanded /: targs) { (partialInst, targ) => - val tparam = partialInst.typeParams.head - RefinedType(partialInst, tparam.name, targ.bounds.withVariance(tparam.variance)) - } - ctx.traceIndented(i"eta lifting $self --> $lifted", hk) { - p(lifted) || tryLift(bcs1) - } - } - else tryLift(bcs1) + (classBounds.exists(bc.derivesFrom) && + baseRef.typeParams.corresponds(tparams)(variancesMatch) && + p(baseRef.appliedTo(self.baseArgInfos(bc))) + || + recur(bcs1)) case nil => false } - tparams.nonEmpty && - (typeParams.hasSameLengthAs(tparams) && p(EtaExpand(tparams)) || - classBounds.nonEmpty && tryLift(self.baseClasses)) + classBounds.nonEmpty && recur(self.baseClasses) } + */ } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 6b37227b26b7..bdabca8df675 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -12,6 +12,7 @@ import util.{Stats, DotClass, SimpleMap} import config.Config import config.Printers._ import TypeErasure.{erasedLub, erasedGlb} +import TypeApplications._ import scala.util.control.NonFatal /** Provides methods to compare types. @@ -150,20 +151,21 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case info1: TypeAlias => compareNamed(info1.alias, tp2) case _ => val sym1 = tp1.symbol - (if ((sym1 ne NoSymbol) && (sym1 eq tp2.symbol)) - ctx.erasedTypes || - sym1.isStaticOwner || - isSubType(tp1.prefix, tp2.prefix) || - thirdTryNamed(tp1, tp2) - else - (tp1.name eq tp2.name) && - isSubType(tp1.prefix, tp2.prefix) && - (tp1.signature == tp2.signature) && - !tp1.isInstanceOf[WithFixedSym] && - !tp2.isInstanceOf[WithFixedSym] || - compareHK(tp1, tp2, inOrder = true) || - compareHK(tp2, tp1, inOrder = false) || - thirdTryNamed(tp1, tp2)) + if ((sym1 ne NoSymbol) && (sym1 eq tp2.symbol)) + ctx.erasedTypes || + sym1.isStaticOwner || + isSubType(tp1.prefix, tp2.prefix) || + thirdTryNamed(tp1, tp2) + else + ( (tp1.name eq tp2.name) + && isSubType(tp1.prefix, tp2.prefix) + && tp1.signature == tp2.signature + && !tp1.isInstanceOf[WithFixedSym] + && !tp2.isInstanceOf[WithFixedSym] + ) || + compareHK(tp1, tp2, inOrder = true) || + compareHK(tp2, tp1, inOrder = false) || + thirdTryNamed(tp1, tp2) } case _ => compareHK(tp2, tp1, inOrder = false) || @@ -239,9 +241,9 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp1: NamedType => tp1.info match { case info1: TypeAlias => isSubType(info1.alias, tp2) - case _ => compareHK(tp1, tp2, inOrder = true) || thirdTry(tp1, tp2) - // Note: If we change the order here, doing compareHK first and following aliases second, - // we get a -Ycheck error when compiling dotc/transform. Need to investigate. + case _ => + compareHK(tp1, tp2, inOrder = true) || + thirdTry(tp1, tp2) } case tp1: PolyParam => def flagNothingBound = { @@ -344,12 +346,16 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp1: AndType => // Delay calling `compareRefinedSlow` because looking up a member // of an `AndType` can lead to a cascade of subtyping checks + // This twist is needed to make collection/generic/ParFactory.scala compile fourthTry(tp1, tp2) || compareRefinedSlow case _ => compareRefinedSlow || fourthTry(tp1, tp2) } + def etaExpandedSubType(tp1: Type) = + isSubType(tp1.typeConstructor.EtaExpand(tp2.typeParams), tp2) normalPath || - needsEtaLift(tp1, tp2) && tp1.testLifted(tp2.typeParams, isSubType(_, tp2), classBounds(tp2)) + needsEtaLift(tp1, tp2) && + testLifted(tp1, tp2, tp2.typeParams, etaExpandedSubType) } else // fast path, in particular for refinements resulting from parameterization. isSubType(tp1, skipped2) && @@ -471,7 +477,9 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths case tp1: RefinedType => isNewSubType(tp1.parent, tp2) || - needsEtaLift(tp2, tp1) && tp2.testLifted(tp1.typeParams, isSubType(tp1, _), Nil) + needsEtaLift(tp2, tp1) && + tp2.typeParams.length == tp1.typeParams.length && + isSubType(tp1, tp2.EtaExpand(tp1.typeParams)) case AndType(tp11, tp12) => // Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 | T12) <: T2 // and analogously for T11 & (T121 | T122) & T12 <: T2 @@ -502,19 +510,89 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } - /** If `projection` is a hk projection T#$apply - * and `other` is not a hk projection, then convert `other` to a hk projection `U`, and - * continue with `T <:< U` if `inOrder` is true and `U <:< T` otherwise. + /** Does `tp` need to be eta lifted to be comparable to `target`? + * This is the case if: + * - target is a type lambda, and + * - `tp` is eta-expandable (i.e. is a non-lambda class ref) */ - def compareHK(projection: NamedType, other: Type, inOrder: Boolean) = - projection.name == tpnme.hkApply && - !other.isHKApply && - other.testLifted(projection.prefix.LambdaClass(forcing = true).typeParams, - if (inOrder) isSubType(projection.prefix, _) else isSubType(_, projection.prefix), - if (inOrder) Nil else classBounds(projection.prefix)) + private def needsEtaLift(tp: Type, target: RefinedType): Boolean = + target.refinedName == tpnme.hkApply && tp.isEtaExpandable - /** The class symbols bounding the type of the `Apply` member of `tp` */ - private def classBounds(tp: Type) = tp.member(tpnme.hkApply).info.classSymbols + /** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where + * - `B` derives from one of the class symbols of `tp2`, + * - the type parameters of `B` match one-by-one the variances of `tparams`, + * - `B` satisfies predicate `p`. + */ + private def testLifted[T](tp1: Type, tp2: Type, tparams: List[TypeSymbol], p: Type => Boolean): Boolean = { + val classBounds = tp2.member(tpnme.hkApply).info.classSymbols + def recur(bcs: List[ClassSymbol]): Boolean = bcs match { + case bc :: bcs1 => + val baseRef = tp1.baseTypeRef(bc) + (classBounds.exists(bc.derivesFrom) && + variancesConform(baseRef.typeParams, tparams) && + p(baseRef.appliedTo(tp1.baseArgInfos(bc))) + || + recur(bcs1)) + case nil => + false + } + recur(tp1.baseClasses) + } + + /** If `projection` is a hk projection T#$apply with a constrainable poly param + * as type constructor and `other` is not a hk projection, then perform the following + * steps: + * + * (1) If not `inOrder` then perform the next steps until they all succeed + * for each base type of other which + * - derives from a class bound of `projection`, + * - has the same number of type parameters than `projection` + * - has type parameter variances which conform to those of `projection`. + * If `inOrder` then perform the same steps on the original `other` type. + * + * (2) Try to eta expand the constructor of `other`. + * + * (3a) In mode `TypeVarsMissConetxt` replace the projection's hk constructor parameter + * by the eta expansion of step (2) reapplied to the projection's arguments. + * (3b) In normal mode, try to unify the projection's hk constructor parameter with + * the eta expansion of step(2) + * + * (4) If `inOrder`, test `projection <: other` else test `other <: projection`. + */ + def compareHK(projection: NamedType, other: Type, inOrder: Boolean): Boolean = { + def tryInfer(tp: Type): Boolean = ctx.traceIndented(i"compareHK($projection, $other, inOrder = $inOrder, constr = $tp)", subtyping) { + tp match { + case tp: TypeVar => tryInfer(tp.underlying) + case param: PolyParam if canConstrain(param) => + + def unifyWith(liftedOther: Type): Boolean = { + subtyping.println(i"unify with $liftedOther") + liftedOther.typeConstructor.widen match { + case tycon: TypeRef if tycon.isEtaExpandable && tycon.typeParams.nonEmpty => + val (ok, projection1) = + if (ctx.mode.is(Mode.TypevarsMissContext)) + (true, EtaExpansion(tycon).appliedTo(projection.argInfos)) + else + (tryInstantiate(param, EtaExpansion(tycon)), projection) + ok && + (if (inOrder) isSubType(projection1, other) else isSubType(other, projection1)) // ### move out? + case _ => + false + } + } + val hkTypeParams = param.typeParams + subtyping.println(i"classBounds = ${projection.prefix.member(tpnme.hkApply).info.classSymbols}") + subtyping.println(i"base classes = ${other.baseClasses}") + subtyping.println(i"type params = $hkTypeParams") + if (inOrder) unifyWith(other) + else testLifted(other, projection.prefix, hkTypeParams, unifyWith) + case _ => + false + } + } + projection.name == tpnme.hkApply && !other.isHKApply && + tryInfer(projection.prefix.typeConstructor.dealias) + } /** Returns true iff either `tp11 <:< tp21` or `tp12 <:< tp22`, trying at the same time * to keep the constraint as wide as possible. Specifically, if @@ -665,14 +743,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case _ => false } - /** Does `tp` need to be eta lifted to be comparable to `target`? */ - private def needsEtaLift(tp: Type, target: RefinedType): Boolean = { - // if (tp.isLambda != tp.isHK) println(i"discrepancy for $tp, isLambda = ${tp.isLambda}, isHK = ${tp.isHK}") - val name = target.refinedName - (name.isHkArgName || (name eq tpnme.hkApply)) && - tp.exists && !tp.isLambda // we do encounter Lambda classes without any arguments here - } - /** Narrow gadt.bounds for the type parameter referenced by `tr` to include * `bound` as an upper or lower bound (which depends on `isUpper`). * Test that the resulting bounds are still satisfiable. diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 4af4d0c14101..54e6397bbcf0 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -893,7 +893,7 @@ object Types { else if (!pre.refinementRefersToThis) alias else alias match { case TypeRef(RefinedThis(`pre`), aliasName) => lookupRefined(aliasName) // (1) - case _ => if (name == tpnme.hkApply) betaReduce(alias) else NoType // (2) + case _ => if (name == tpnme.hkApply) betaReduce(alias) else NoType // (2) // ### use TypeApplication's betaReduce } case _ => loop(pre.parent) } @@ -1513,7 +1513,12 @@ object Types { else { val res = prefix.lookupRefined(name) if (res.exists) res - else if (name == tpnme.hkApply && prefix.noHK) derivedSelect(prefix.EtaExpandCore) + else if (name == tpnme.hkApply && prefix.classNotLambda) { + // After substitution we might end up with a type like + // `C { type hk$0 = T0; ...; type hk$n = Tn } # $Apply` + // where C is a class. In that case we eta expand `C`. + derivedSelect(prefix.EtaExpandCore) + } else newLikeThis(prefix) } @@ -1753,8 +1758,8 @@ object Types { object TypeRef { def checkProjection(prefix: Type, name: TypeName)(implicit ctx: Context) = - if (name == tpnme.hkApply && prefix.noHK) - assert(false, s"bad type : $prefix.$name should not be $$applied") + if (name == tpnme.hkApply && prefix.classNotLambda) + assert(false, s"bad type : $prefix.$name does not allow $$Apply projection") /** Create type ref with given prefix and name */ def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef = { @@ -1894,22 +1899,7 @@ object Types { override def underlying(implicit ctx: Context) = parent - private def checkInst(implicit ctx: Context): this.type = { - if (Config.checkLambdaVariance) - refinedInfo match { - case refinedInfo: TypeBounds if refinedInfo.variance != 0 && refinedName.isHkArgName => - val cls = parent.LambdaClass(forcing = false) - if (cls.exists) - assert(refinedInfo.variance == cls.typeParams.apply(refinedName.hkArgIndex).variance, - s"variance mismatch for $this, $cls, ${cls.typeParams}, ${cls.typeParams.apply(refinedName.hkArgIndex).variance}, ${refinedInfo.variance}") - case _ => - } - if (Config.checkProjections && - (refinedName == tpnme.hkApply || refinedName.isHkArgName) && - parent.noHK) - assert(false, s"illegal refinement of first-order type: $this") - this - } + private def checkInst(implicit ctx: Context): this.type = this def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): RefinedType = if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this @@ -2743,9 +2733,9 @@ object Types { abstract class TypeAlias(val alias: Type, override val variance: Int) extends TypeBounds(alias, alias) { /** pre: this is a type alias */ - def derivedTypeAlias(tp: Type, variance: Int = this.variance)(implicit ctx: Context) = - if ((lo eq tp) && (variance == this.variance)) this - else TypeAlias(tp, variance) + def derivedTypeAlias(alias: Type, variance: Int = this.variance)(implicit ctx: Context) = + if ((alias eq this.alias) && (variance == this.variance)) this + else TypeAlias(alias, variance) override def & (that: TypeBounds)(implicit ctx: Context): TypeBounds = { val v = this commonVariance that diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index c0136538bb47..8fccb89731b5 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -9,6 +9,7 @@ import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annot import collection.mutable import NameOps._ import TastyBuffer._ +import TypeApplications._ class TreePickler(pickler: TastyPickler) { val buf = new TreeBuffer @@ -142,6 +143,9 @@ class TreePickler(pickler: TastyPickler) { } def pickleNewType(tpe: Type, richTypes: Boolean): Unit = try { tpe match { + case AppliedType(tycon, args) => + writeByte(APPLIEDtype) + withLength { pickleType(tycon); args.foreach(pickleType(_)) } case ConstantType(value) => pickleConstant(value) case tpe: TypeRef if tpe.info.isAlias && tpe.symbol.is(Flags.AliasPreferred) => @@ -181,17 +185,16 @@ class TreePickler(pickler: TastyPickler) { pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix) } case tpe: NamedType => - if (tpe.name == tpnme.hkApply && tpe.prefix.argInfos.nonEmpty && tpe.prefix.isInstantiatedLambda) - // instantiated lambdas are pickled as APPLIEDTYPE; #Apply will - // be reconstituted when unpickling. - pickleType(tpe.prefix) - else if (isLocallyDefined(tpe.symbol)) { - writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol) - pickleSymRef(tpe.symbol); pickleType(tpe.prefix) - } - else { - writeByte(if (tpe.isType) TYPEREF else TERMREF) - pickleName(tpe.name); pickleType(tpe.prefix) + tpe match { + case _ => + if (isLocallyDefined(tpe.symbol)) { + writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol) + pickleSymRef(tpe.symbol); pickleType(tpe.prefix) + } + else { + writeByte(if (tpe.isType) TYPEREF else TERMREF) + pickleName(tpe.name); pickleType(tpe.prefix) + } } case tpe: ThisType => if (tpe.cls.is(Flags.Package) && !tpe.cls.isEffectiveRoot) @@ -211,18 +214,11 @@ class TreePickler(pickler: TastyPickler) { case tpe: SkolemType => pickleType(tpe.info) case tpe: RefinedType => - val args = tpe.argInfos - if (args.isEmpty) { - writeByte(REFINEDtype) - withLength { - pickleType(tpe.parent) - pickleName(tpe.refinedName) - pickleType(tpe.refinedInfo, richTypes = true) - } - } - else { - writeByte(APPLIEDtype) - withLength { pickleType(tpe.withoutArgs(args)); args.foreach(pickleType(_)) } + writeByte(REFINEDtype) + withLength { + pickleType(tpe.parent) + pickleName(tpe.refinedName) + pickleType(tpe.refinedInfo, richTypes = true) } case tpe: TypeAlias => writeByte(TYPEALIAS) diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 747d73ea99f9..b0e31202f11c 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -53,7 +53,7 @@ object Scala2Unpickler { case TempPolyType(tparams, restpe) => if (denot.isType) { assert(!denot.isClass) - restpe.LambdaAbstract(tparams, cycleParanoid = true) + restpe.LambdaAbstract(tparams) } else PolyType.fromSymbols(tparams, restpe) @@ -715,8 +715,9 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas else TypeRef(pre, sym.name.asTypeName) val args = until(end, readTypeRef) if (sym == defn.ByNameParamClass2x) ExprType(args.head) - else if (args.isEmpty && sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams) - else tycon.appliedTo(adaptArgs(sym.typeParams, args)) + else if (args.nonEmpty) tycon.safeAppliedTo(adaptArgs(sym.typeParams, args)) + else if (sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams) + else tycon case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef()) case REFINEDtpe => diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index e8ba3b07b4c1..d119658be078 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -614,10 +614,10 @@ trait Applications extends Compatibility { self: Typer => } def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = { - val was = tree.tpe.EtaExpandIfHK(bound) + //val was = tree.tpe.EtaExpandIfHK(bound) //val now = tree.tpe.adaptIfHK(bound) // ### //if (was != now) println(i"diff adapt ${tree.tpe} to $bound, was: $was, now: $now") - tree.withType(was)//tree.tpe.adaptIfHK(bound)) + tree.withType(tree.tpe.adaptIfHK(bound)) } /** Rewrite `new Array[T](....)` trees to calls of newXYZArray methods. */ diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 3f5c4f47ef89..f857a2504988 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -348,10 +348,9 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(x), Nil)), pt) case _ => var tpt1 = typedType(tree.tpt) - if (tpt1.tpe.isHK) { - val deAliased = tpt1.tpe.dealias.EtaReduce - if (deAliased.exists && deAliased.ne(tpt1.tpe)) - tpt1 = tpt1.withType(deAliased) + tpt1.tpe.dealias match { + case TypeApplications.EtaExpansion(tycon) => tpt1 = tpt1.withType(tycon) + case _ => } checkClassTypeWithStablePrefix(tpt1.tpe, tpt1.pos, traitReq = false) assignType(cpy.New(tree)(tpt1), tpt1) diff --git a/tests/pos/t2693.scala b/tests/pos/t2693.scala index 5d4d0380c418..537e6d8abdf9 100644 --- a/tests/pos/t2693.scala +++ b/tests/pos/t2693.scala @@ -1,6 +1,6 @@ class A { - trait T[A] + trait Tr[A] def usetHk[T[_], A](ta: T[A]) = 0 - usetHk(new T[Int]{}: T[Int]) - usetHk(new T[Int]{}) // fails with: found: java.lang.Object with T[Int], required: ?T[ ?A ] + usetHk(new Tr[Int]{}: Tr[Int]) + usetHk(new Tr[Int]{}) // fails with: found: java.lang.Object with T[Int], required: ?T[ ?A ] } From d186a333c55b738ee35a81cb185da2f3e7ab4742 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 5 Dec 2015 18:21:20 +0100 Subject: [PATCH 22/41] Reset constraint when one of several implicit arguments is not found. --- src/dotty/tools/dotc/typer/Typer.scala | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index f857a2504988..0d7db4f9ab6c 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -1415,7 +1415,13 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit implicitArgError(d"no implicit argument of type $formal found for $where" + failure.postscript) } } - if (args.exists(_.isEmpty)) { assert(constr eq ctx.typerState.constraint); tree } + if (args.exists(_.isEmpty)) { + // If there are several arguments, some arguments might already + // have influenced the context, binfing variables, but later ones + // might fail. In that case the constraint needs to be reset. + ctx.typerState.constraint = constr + tree + } else adapt(tpd.Apply(tree, args), pt) } if ((pt eq WildcardType) || original.isEmpty) addImplicitArgs From 56dd29d3d3b0524d03a93965160e651a5d757b92 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 5 Dec 2015 18:21:56 +0100 Subject: [PATCH 23/41] Fix printing of type lambdas --- .../tools/dotc/printing/RefinedPrinter.scala | 35 ++++++++++++++----- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index dd485ad60669..e015fd4cf805 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -19,7 +19,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { /** A stack of enclosing DefDef, TypeDef, or ClassDef, or ModuleDefs nodes */ private var enclosingDef: untpd.Tree = untpd.EmptyTree - + private var lambdaNestingLevel: Int = 0 private var myCtx: Context = _ctx override protected[this] implicit def ctx: Context = myCtx @@ -116,18 +116,35 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (defn.isFunctionClass(cls)) return toTextFunction(args) if (defn.isTupleClass(cls)) return toTextTuple(args) return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close - case TypeLambda(variances, argBoundss, body) => - val paramNames = variances.indices.toList.map("X" + _) + case tp @ TypeLambda(variances, argBoundss, body) => + val prefix = ((('X' - 'A') + lambdaNestingLevel) % 26 + 'A').toChar + val paramNames = variances.indices.toList.map(prefix.toString + _) val instantiate = new TypeMap { + def contains(tp1: Type, tp2: Type): Boolean = + tp1.eq(tp2) || { + tp1.stripTypeVar match { + case RefinedType(parent, _) => contains(parent, tp2) + case _ => false + } + } def apply(t: Type): Type = t match { - case TypeRef(RefinedThis(rt), name) if name.isHkArgName && rt.eq(tp) => - TypeRef.withFixedSym( - NoPrefix, paramNames(name.hkArgIndex).toTypeName, defn.AnyClass) - case _ => mapOver(t) + case TypeRef(RefinedThis(rt), name) if name.isHkArgName && contains(tp, rt) => + // Make up a name that prints as "Xi". Need to be careful we do not + // accidentally unique-hash to something else. That's why we can't + // use prefix = NoPrefix or a WithFixedSym instance. + TypeRef.withSymAndName( + defn.EmptyPackageClass.thisType, defn.AnyClass, + paramNames(name.hkArgIndex).toTypeName) + case _ => + mapOver(t) } } - return typeLambdaText(paramNames, variances, argBoundss, - instantiate(body).argInfo) + val instArgs = argBoundss.map(instantiate).asInstanceOf[List[TypeBounds]] + val instBody = instantiate(body).dropAlias + lambdaNestingLevel += 1 + try + return typeLambdaText(paramNames, variances, instArgs, instBody) + finally lambdaNestingLevel -=1 case tp: TypeRef => val hideType = tp.symbol is AliasPreferred if (hideType && !ctx.phase.erasedTypes && !tp.symbol.isCompleting) { From 651930949e370b21e46bedba0f58fc92e37a33f6 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 5 Dec 2015 23:52:41 +0100 Subject: [PATCH 24/41] Allow for F-bounded bounds in TypeLambda --- .../tools/dotc/core/TypeApplications.scala | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 0f9cb8230c44..b86c58030c91 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -54,18 +54,18 @@ object TypeApplications { */ object TypeLambda { def apply(variances: List[Int], - argBoundss: List[TypeBounds], + argBoundsFns: List[RefinedType => TypeBounds], bodyFn: RefinedType => Type)(implicit ctx: Context): Type = { - def argRefinements(parent: Type, i: Int, bs: List[TypeBounds]): Type = bs match { + def argRefinements(parent: Type, i: Int, bs: List[RefinedType => TypeBounds]): Type = bs match { case b :: bs1 => argRefinements(RefinedType(parent, tpnme.hkArg(i), b), i + 1, bs1) case nil => parent } assert(variances.nonEmpty) - assert(argBoundss.length == variances.length) + assert(argBoundsFns.length == variances.length) RefinedType( - argRefinements(defn.LambdaTrait(variances).typeRef, 0, argBoundss), + argRefinements(defn.LambdaTrait(variances).typeRef, 0, argBoundsFns), tpnme.hkApply, bodyFn(_).bounds.withVariance(1)) } @@ -94,7 +94,9 @@ object TypeApplications { assert(tycon.isEtaExpandable) val tparams = tycon.typeParams val variances = tycon.typeParams.map(_.variance) - TypeLambda(tparams.map(_.variance), tycon.paramBounds, + TypeLambda( + tparams.map(_.variance), + tycon.paramBounds.map(internalize(_, tycon.typeParams)), rt => tycon.appliedTo(argRefs(rt, tparams.length))) } @@ -162,7 +164,11 @@ object TypeApplications { } } - /** Adapt all arguments to possible higher-kinded type parameters using adaptIfHK + private def internalize[T <: Type](tp: T, tparams: List[Symbol])(implicit ctx: Context) = + (rt: RefinedType) => + new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)).apply(tp).asInstanceOf[T] + + /** Adapt all arguments to possible higher-kinded type parameters using adaptIfHK */ def adaptArgs(tparams: List[Symbol], args: List[Type])(implicit ctx: Context): List[Type] = if (tparams.isEmpty) args @@ -257,8 +263,10 @@ class TypeApplications(val self: Type) extends AnyVal { */ def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = { def expand(tp: Type) = { - TypeLambda(tparams.map(_.variance), tparams.map(_.info.bounds), - rt => new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)).apply(tp)) + TypeLambda( + tparams.map(_.variance), + tparams.map(tparam => internalize(tparam.info.bounds, tparams)), + internalize(tp, tparams)) } self match { case self: TypeAlias => From 50d265299ab99ace68e3813794559de2bbcdc91c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 6 Dec 2015 09:24:43 +0100 Subject: [PATCH 25/41] Be DRY betweeen EtaExpand and LambdaAbstract All Lambda abstractions, not just eta expansions, should use actual parameter bounds, not the one retrieved from the parameter symbols. --- .../tools/dotc/core/TypeApplications.scala | 22 ++++++------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index b86c58030c91..db7550402e82 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -92,12 +92,7 @@ object TypeApplications { object EtaExpansion { def apply(tycon: TypeRef)(implicit ctx: Context) = { assert(tycon.isEtaExpandable) - val tparams = tycon.typeParams - val variances = tycon.typeParams.map(_.variance) - TypeLambda( - tparams.map(_.variance), - tycon.paramBounds.map(internalize(_, tycon.typeParams)), - rt => tycon.appliedTo(argRefs(rt, tparams.length))) + tycon.EtaExpand(tycon.typeParams) } def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = { @@ -164,10 +159,6 @@ object TypeApplications { } } - private def internalize[T <: Type](tp: T, tparams: List[Symbol])(implicit ctx: Context) = - (rt: RefinedType) => - new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)).apply(tp).asInstanceOf[T] - /** Adapt all arguments to possible higher-kinded type parameters using adaptIfHK */ def adaptArgs(tparams: List[Symbol], args: List[Type])(implicit ctx: Context): List[Type] = @@ -235,9 +226,6 @@ class TypeApplications(val self: Type) extends AnyVal { final def hkTypeParams(implicit ctx: Context): List[TypeSymbol] = self.LambdaTrait.typeParams - final def paramBounds(implicit ctx: Context): List[TypeBounds] = - typeParams.map(self.memberInfo(_).bounds) - /** The Lambda trait underlying a type lambda */ def LambdaTrait(implicit ctx: Context): Symbol = self.stripTypeVar match { case RefinedType(parent, tpnme.hkApply) => @@ -262,11 +250,15 @@ class TypeApplications(val self: Type) extends AnyVal { * type T[X] >: L <: U becomes type T >: L <: ([X] -> _ <: U) */ def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = { + def internalize[T <: Type](tp: T) = + (rt: RefinedType) => + new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)) + .apply(tp).asInstanceOf[T] def expand(tp: Type) = { TypeLambda( tparams.map(_.variance), - tparams.map(tparam => internalize(tparam.info.bounds, tparams)), - internalize(tp, tparams)) + tparams.map(tparam => internalize(self.memberInfo(tparam).bounds)), + internalize(tp)) } self match { case self: TypeAlias => From f6d1153e193045ad4d8a1564e3c0cdb49b853a5b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 6 Dec 2015 12:44:00 +0100 Subject: [PATCH 26/41] Avoid false positives when extracting AppliedType --- src/dotty/tools/dotc/core/TypeApplications.scala | 8 +++++++- src/dotty/tools/dotc/core/Types.scala | 12 +++++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index db7550402e82..ab15b3e1a8d5 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -133,7 +133,13 @@ object TypeApplications { def unapply(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match { case TypeRef(prefix, tpnme.hkApply) => unapp(prefix) - case _ => unapp(tp) + case _ => + unapp(tp) match { + case Some((tycon: TypeRef, _)) if tycon.symbol.isLambdaTrait => + // We are seeing part of a lambda abstraction, not an applied type + None + case x => x + } } private def unapp(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match { diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 54e6397bbcf0..b5d5c864bc2b 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1899,7 +1899,17 @@ object Types { override def underlying(implicit ctx: Context) = parent - private def checkInst(implicit ctx: Context): this.type = this + private def badInst = + throw new AssertionError(s"bad instantiation: $this") + + def checkInst(implicit ctx: Context): this.type = { + if (refinedName == tpnme.hkApply) + parent.stripTypeVar match { + case RefinedType(_, name) if name.isHkArgName => // ok + case _ => badInst + } + this + } def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): RefinedType = if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this From 378b96fc15eda32d5b645aa6b02442509b209147 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 6 Dec 2015 12:45:00 +0100 Subject: [PATCH 27/41] Fix tasty test failures by bringing homogenization of # $Apply projections back. --- src/dotty/tools/dotc/printing/PlainPrinter.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index 857a32cccdc3..9e5ab5d8ccbe 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -4,7 +4,7 @@ package printing import core._ import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Constants._, Denotations._ import Contexts.Context, Scopes.Scope, Denotations.Denotation, Annotations.Annotation -import StdNames.nme +import StdNames.{nme, tpnme} import ast.Trees._, ast._ import java.lang.Integer.toOctalString import config.Config.summarizeDepth @@ -49,11 +49,11 @@ class PlainPrinter(_ctx: Context) extends Printer { homogenize(tp1) & homogenize(tp2) case OrType(tp1, tp2) => homogenize(tp1) | homogenize(tp2) + case tp @ TypeRef(_, tpnme.hkApply) => + val tp1 = tp.reduceProjection + if (tp1 eq tp) tp else homogenize(tp1) case _ => tp - /*val tp1 = tp.simplifyApply - if (tp1 eq tp) tp else homogenize(tp1) - */ } else tp From b8c6e73eb970894c12eb11f3b99123f502d52ae7 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 6 Dec 2015 12:49:57 +0100 Subject: [PATCH 28/41] Move failing test to pending Seems to be a hk-type inference issue. Needs further investigation but is not high priority right now. --- tests/{ => pending}/pos/t3152.scala | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{ => pending}/pos/t3152.scala (100%) diff --git a/tests/pos/t3152.scala b/tests/pending/pos/t3152.scala similarity index 100% rename from tests/pos/t3152.scala rename to tests/pending/pos/t3152.scala From 7f6decfe9b30396fb788a16639eb9e4700f532b0 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 6 Dec 2015 15:36:34 +0100 Subject: [PATCH 29/41] Code and documentation cleanups --- .../tools/dotc/core/TypeApplications.scala | 407 +++++------------- 1 file changed, 107 insertions(+), 300 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index ab15b3e1a8d5..684e836330d4 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -107,12 +107,6 @@ object TypeApplications { tp match { case TypeLambda(_, argBounds, AppliedType(fn: TypeRef, args)) if argsAreForwarders(args, tp.typeParams.length) => Some(fn) - //case TypeLambda(_, argBounds, AppliedType(fn: TypeRef, args)) => - // println(i"eta expansion failed because args $args are not forwarders for ${tp.toString}") - // None - //case TypeLambda(_, argBounds, _) => - // println(i"eta expansion failed because body is not applied type") - // None case _ => None } } @@ -210,9 +204,14 @@ class TypeApplications(val self: Type) extends AnyVal { Nil else tsym.info.typeParams case self: RefinedType => - val hkParams = self.hkTypeParams - if (hkParams.nonEmpty) hkParams - else self.parent.typeParams.filterNot(_.name == self.refinedName) + // inlined and optimized version of + // val sym = self.LambdaTrait + // if (sym.exists) return sym.typeParams + if (self.refinedName == tpnme.hkApply) { + val sym = self.parent.classSymbol + if (sym.isLambdaTrait) return sym.typeParams + } + self.parent.typeParams.filterNot(_.name == self.refinedName) case self: SingletonType => Nil case self: TypeProxy => @@ -222,16 +221,6 @@ class TypeApplications(val self: Type) extends AnyVal { } } - /** The higherkinded type parameters in case this is a type lambda - * - * [X1, ..., Xn] -> T - * - * These are the parameters of the underlying lambda class. - * Returns `Nil` for all other types. - */ - final def hkTypeParams(implicit ctx: Context): List[TypeSymbol] = - self.LambdaTrait.typeParams - /** The Lambda trait underlying a type lambda */ def LambdaTrait(implicit ctx: Context): Symbol = self.stripTypeVar match { case RefinedType(parent, tpnme.hkApply) => @@ -241,25 +230,56 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => NoSymbol } - /** A type ref is eta expandable if it refers to a non-lambda class. - * In that case we can look for parameterized base types fo the type - * to eta expand them. - */ - def isEtaExpandable(implicit ctx: Context) = self match { - case self: TypeRef => self.symbol.isClass && !self.name.isLambdaTraitName + /** Is receiver type higher-kinded (i.e. of kind != "*")? */ + def isHK(implicit ctx: Context): Boolean = self.dealias match { + case self: TypeRef => self.info.isHK + case RefinedType(_, name) => name == tpnme.hkApply + case TypeBounds(_, hi) => hi.isHK + case _ => false + } + + /** is receiver of the form T#$Apply? */ + def isHKApply: Boolean = self match { + case TypeRef(_, name) => name == tpnme.hkApply case _ => false } + /** True if it can be determined without forcing that the class symbol + * of this application exists and is not a lambda trait. + * Equivalent to + * + * self.classSymbol.exists && !self.classSymbol.isLambdaTrait + * + * but without forcing anything. + */ + def classNotLambda(implicit ctx: Context): Boolean = self.stripTypeVar match { + case self: RefinedType => + self.parent.classNotLambda + case self: TypeRef => + self.denot.exists && { + val sym = self.symbol + if (sym.isClass) !sym.isLambdaTrait + else sym.isCompleted && self.info.isAlias && self.info.bounds.hi.classNotLambda + } + case _ => + false + } + /** Lambda abstract `self` with given type parameters. Examples: * * type T[X] = U becomes type T = [X] -> U * type T[X] >: L <: U becomes type T >: L <: ([X] -> _ <: U) */ def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = { + + /** Replace references to type parameters with references to hk arguments `this.$hk_i` + * Care is needed not to cause cycles, hence `SafeSubstMap`. + */ def internalize[T <: Type](tp: T) = (rt: RefinedType) => new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)) .apply(tp).asInstanceOf[T] + def expand(tp: Type) = { TypeLambda( tparams.map(_.variance), @@ -274,15 +294,41 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => expand(self) } } -/* - def betaReduce(implicit ctx: Context): Type = self.stripTypeVar match { - case TypeRef(prefix, tpnme.hkApply) => - prefix.betaReduce - case self @ RefinedType(parent, tpnme.hkArg) if parent.isTypeLambda => - HKApplication(parent, self.refinedInfo.dropAlias) + + /** A type ref is eta expandable if it refers to a non-lambda class. + * In that case we can look for parameterized base types of the type + * to eta expand them. + */ + def isEtaExpandable(implicit ctx: Context) = self match { + case self: TypeRef => self.symbol.isClass && !self.name.isLambdaTraitName + case _ => false + } + + /** Convert a type constructor `TC` which has type parameters `T1, ..., Tn` + * in a context where type parameters `U1,...,Un` are expected to + * + * LambdaXYZ { Apply = TC[hk$0, ..., hk$n] } + * + * Here, XYZ corresponds to the variances of + * - `U1,...,Un` if the variances of `T1,...,Tn` are pairwise compatible with `U1,...,Un`, + * - `T1,...,Tn` otherwise. + * v1 is compatible with v2, if v1 = v2 or v2 is non-variant. + */ + def EtaExpand(tparams: List[TypeSymbol])(implicit ctx: Context): Type = { + val tparamsToUse = if (variancesConform(typeParams, tparams)) tparams else typeParams + self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparamsToUse) + //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") + } + + /** Eta expand the prefix in front of any refinements. */ + def EtaExpandCore(implicit ctx: Context): Type = self.stripTypeVar match { + case self: RefinedType => + self.derivedRefinedType(self.parent.EtaExpandCore, self.refinedName, self.refinedInfo) + case _ => + self.EtaExpand(self.typeParams) } -*/ - /** Adapt argument A to type parameter P in the case P is higher-kinded. + + /** Adapt argument A to type parameter P in the case P is higher-kinded. * This means: * (1) Make sure that A is a type lambda, if necessary by eta-expanding it. * (2) Make sure the variances of the type lambda @@ -331,77 +377,21 @@ class TypeApplications(val self: Type) extends AnyVal { } } - /* - /** If type `self` is equal, aliased-to, or upperbounded-by a type of the form - * `LambdaXYZ { ... }`, the class symbol of that type, otherwise NoSymbol. - * symbol of that type, otherwise NoSymbol. - * @param forcing if set, might force completion. If not, never forces - * but returns NoSymbol when it would have to otherwise. - */ - def LambdaClass(forcing: Boolean)(implicit ctx: Context): Symbol = track("LambdaClass") { self.stripTypeVar match { - case self: TypeRef => - val sym = self.symbol - if (sym.isLambdaTrait) sym - else if (sym.isClass || sym.isCompleting && !forcing) NoSymbol - else self.info.LambdaClass(forcing) - case self: TypeProxy => - self.underlying.LambdaClass(forcing) - case _ => - NoSymbol - }} - - /** Is type `self` equal, aliased-to, or upperbounded-by a type of the form - * `LambdaXYZ { ... }`? - */ - def isLambda(implicit ctx: Context): Boolean = - LambdaClass(forcing = true).exists - - /** Same is `isLambda`, except that symbol denotations are not forced - * Symbols in completion count as not lambdas. - */ - def isSafeLambda(implicit ctx: Context): Boolean = - LambdaClass(forcing = false).exists - - /** Is type `self` a Lambda with all hk$i fields fully instantiated? */ - def isInstantiatedLambda(implicit ctx: Context): Boolean = - isSafeLambda && typeParams.isEmpty -*/ - /** Is receiver type higher-kinded (i.e. of kind != "*")? */ - def isHK(implicit ctx: Context): Boolean = self.dealias match { - case self: TypeRef => self.info.isHK - case RefinedType(_, name) => name == tpnme.hkApply - case TypeBounds(_, hi) => hi.isHK - case _ => false - } - - /** is receiver of the form T#$apply? */ - def isHKApply: Boolean = self match { - case TypeRef(_, name) => name == tpnme.hkApply - case _ => false - } - - /** True if it can be determined without forcing that the class symbol - * of this application exists and is not a lambda trait. - * Equivalent to + /** Encode * - * self.classSymbol.exists && !self.classSymbol.isLambdaTrait + * T[U1, ..., Un] * - * but without forcing anything. + * where + * @param self = `T` + * @param args = `U1,...,Un` + * performing the following simplifications + * + * 1. If `T` is an eta expansion `[X1,..,Xn] -> C[X1,...,Xn]` of class `C` compute + * `C[U1, ..., Un]` instead. + * 2. If `T` is some other type lambda `[X1,...,Xn] -> S` none of the arguments + * `U1,...,Un` is a wildcard, compute `[X1:=U1, ..., Xn:=Un]S` instead. + * 3. If `T` is a polytype, instantiate it to `U1,...,Un`. */ - def classNotLambda(implicit ctx: Context): Boolean = self.stripTypeVar match { - case self: RefinedType => - self.parent.classNotLambda - case self: TypeRef => - self.denot.exists && { - val sym = self.symbol - if (sym.isClass) !sym.isLambdaTrait - else sym.isCompleted && self.info.isAlias && self.info.bounds.hi.classNotLambda - } - case _ => - false - } - - /** Encode the type resulting from applying this type to given arguments */ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { def substHkArgs = new TypeMap { def apply(tp: Type): Type = tp match { @@ -424,7 +414,12 @@ class TypeApplications(val self: Type) extends AnyVal { } } - def appliedTo(args: List[Type], typParams: List[TypeSymbol])(implicit ctx: Context): Type = { + /** Encode application `T[U1, ..., Un]` without simplifications, where + * @param self = `T` + * @param args = `U1, ..., Un` + * @param tparams are assumed to be the type parameters of `T`. + */ + final def appliedTo(args: List[Type], typParams: List[TypeSymbol])(implicit ctx: Context): Type = { def matchParams(t: Type, tparams: List[TypeSymbol], args: List[Type])(implicit ctx: Context): Type = args match { case arg :: args1 => try { @@ -447,10 +442,14 @@ class TypeApplications(val self: Type) extends AnyVal { } } - final def appliedTo(arg: Type)(implicit ctx: Context): Type = appliedTo(arg :: Nil) final def appliedTo(arg1: Type, arg2: Type)(implicit ctx: Context): Type = appliedTo(arg1 :: arg2 :: Nil) + /** A cycle-safe version of `appliedTo` where computing type parameters do not force + * the typeconstructor. Instead, if the type constructor is completing, we make + * up hk type parameters matching the arguments. This is needed when unpickling + * Scala2 files such as `scala.collection.generic.Mapfactory`. + */ final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = { val safeTypeParams = self match { case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => @@ -462,18 +461,6 @@ class TypeApplications(val self: Type) extends AnyVal { } appliedTo(args, safeTypeParams) } -/* - /** Simplify a fully instantiated type of the form `LambdaX{... type Apply = T } # Apply` to `T`. - */ - def simplifyApply(implicit ctx: Context): Type = self match { - case self @ TypeRef(prefix, tpnme.hkApply) if prefix.isInstantiatedLambda => - prefix.member(tpnme.hkApply).info match { - case TypeAlias(alias) => alias - case _ => self - } - case _ => self - } -*/ /** Turn this type, which is used as an argument for * type parameter `tparam`, into a TypeBounds RHS @@ -488,7 +475,7 @@ class TypeApplications(val self: Type) extends AnyVal { else TypeAlias(self, v) } - /** The type arguments of this type's base type instance wrt.`base`. + /** The type arguments of this type's base type instance wrt. `base`. * Existential types in arguments are returned as TypeBounds instances. */ final def baseArgInfos(base: Symbol)(implicit ctx: Context): List[Type] = @@ -616,6 +603,7 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => NoType } + /** If this is a type alias, its underlying type, otherwise the type itself */ def dropAlias(implicit ctx: Context): Type = self match { case TypeAlias(alias) => alias case _ => self @@ -628,6 +616,9 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => firstBaseArgInfo(defn.SeqClass) } + /** Does this type contain RefinedThis type with `target` as its underling + * refinement type? + */ def containsRefinedThis(target: Type)(implicit ctx: Context): Boolean = { def recur(tp: Type): Boolean = tp.stripTypeVar match { case RefinedThis(tp) => @@ -651,188 +642,4 @@ class TypeApplications(val self: Type) extends AnyVal { } recur(self) } -/* - /** The typed lambda abstraction of this type `T` relative to `boundSyms`. - * This is: - * - * LambdaXYZ{ bounds }{ type Apply = toHK(T) } - * - * where - * - XYZ reflects the variances of the bound symbols, - * - `bounds` consists of type declarations `type hk$i >: toHK(L) <: toHK(U), - * one for each type parameter in `T` with non-trivial bounds L,U. - * - `toHK` is a substitution that replaces every bound symbol sym_i by - * `this.hk$i`. - * - * TypeBounds are lambda abstracting by lambda abstracting their upper bound. - * - * @param cycleParanoid If `true` don't force denotation of a TypeRef unless - * its name matches one of `boundSyms`. Needed to avoid cycles - * involving F-boundes hk-types when reading Scala2 collection classes - * with new hk-scheme. - */ - def LambdaAbstract(boundSyms: List[Symbol], cycleParanoid: Boolean = false)(implicit ctx: Context): Type = { - def expand(tp: Type): Type = { - val lambda = defn.LambdaTrait(boundSyms.map(_.variance)) - def toHK(tp: Type) = (rt: RefinedType) => { - val argRefs = boundSyms.indices.toList.map(i => - RefinedThis(rt).select(tpnme.hkArg(i))) - val substituted = - if (cycleParanoid) new ctx.SafeSubstMap(boundSyms, argRefs).apply(tp) - else tp.subst(boundSyms, argRefs) - substituted.bounds.withVariance(1) - } - val boundNames = new mutable.ListBuffer[Name] - val boundss = new mutable.ListBuffer[TypeBounds] - for (sym <- boundSyms) { - val bounds = sym.info.bounds - if (!(TypeBounds.empty frozen_<:< bounds)) { - boundNames += sym.name - boundss += bounds - } - } - val lambdaWithBounds = - RefinedType.make(lambda.typeRef, boundNames.toList, boundss.toList.map(toHK)) - RefinedType(lambdaWithBounds, tpnme.hkApply, toHK(tp)) - } - self match { - case self @ TypeBounds(lo, hi) => - self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi))) - case _ => - expand(self) - } - } -*/ - /** Convert a type constructor `TC` which has type parameters `T1, ..., Tn` - * in a context where type parameters `U1,...,Un` are expected to - * - * LambdaXYZ { Apply = TC[hk$0, ..., hk$n] } - * - * Here, XYZ corresponds to the variances of - * - `U1,...,Un` if the variances of `T1,...,Tn` are pairwise compatible with `U1,...,Un`, - * - `T1,...,Tn` otherwise. - * v1 is compatible with v2, if v1 = v2 or v2 is non-variant. - */ - def EtaExpand(tparams: List[TypeSymbol])(implicit ctx: Context): Type = { - val tparamsToUse = if (variancesConform(typeParams, tparams)) tparams else typeParams - self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparamsToUse) - //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") - } -/* - /** Eta expand if `bound` is a higher-kinded type */ - def EtaExpandIfHK(bound: Type)(implicit ctx: Context): Type = - if (bound.isHK && !isHK && self.typeSymbol.isClass && typeParams.nonEmpty) EtaExpand(bound.typeParams) - else self -*/ - /** Eta expand the prefix in front of any refinements. */ - def EtaExpandCore(implicit ctx: Context): Type = self.stripTypeVar match { - case self: RefinedType => - self.derivedRefinedType(self.parent.EtaExpandCore, self.refinedName, self.refinedInfo) - case _ => - self.EtaExpand(self.typeParams) - } -/* - /** If `self` is a (potentially partially instantiated) eta expansion of type T, return T, - * otherwise NoType. More precisely if `self` is of the form - * - * T { type $apply = U[T1, ..., Tn] } - * - * where - * - * - hk$0, ..., hk${m-1} are the type parameters of T - * - a sublist of the arguments Ti_k (k = 0,...,m_1) are of the form T{...}.this.hk$i_k - * - * rewrite `self` to - * - * U[T'1,...T'j] - * - * where - * - * T'j = _ >: Lj <: Uj if j is in the i_k list defined above - * where Lj and Uj are the bounds of hk$j mapped using `fromHK`. - * = fromHK(Tj) otherwise. - * - * `fromHK` is the function that replaces every occurrence of `.this.hk$i` by the - * corresponding parameter reference in `U[T'1,...T'j]` - */ - def EtaReduce(implicit ctx: Context): Type = { - def etaCore(tp: Type, tparams: List[Symbol]): Type = tparams match { - case Nil => tp - case tparam :: otherParams => - tp match { - case tp: RefinedType => - tp.refinedInfo match { - case TypeAlias(TypeRef(RefinedThis(rt), rname)) - if (rname == tparam.name) && (rt eq self) => - // we have a binding T = Lambda$XYZ{...}.this.hk$i where hk$i names the current `tparam`. - val pcore = etaCore(tp.parent, otherParams) - val hkBounds = self.member(rname).info.bounds - if (TypeBounds.empty frozen_<:< hkBounds) pcore - else tp.derivedRefinedType(pcore, tp.refinedName, hkBounds) - case _ => - val pcore = etaCore(tp.parent, tparams) - if (pcore.exists) tp.derivedRefinedType(pcore, tp.refinedName, tp.refinedInfo) - else NoType - } - case _ => - NoType - } - } - // Map references `Lambda$XYZ{...}.this.hk$i to corresponding parameter references of the reduced core. - def fromHK(reduced: Type) = reduced match { - case reduced: RefinedType => - new TypeMap { - def apply(tp: Type): Type = tp match { - case TypeRef(RefinedThis(binder), name) if binder eq self => - assert(name.isHkArgName) - RefinedThis(reduced).select(reduced.typeParams.apply(name.hkArgIndex)) - case _ => - mapOver(tp) - } - }.apply(reduced) - case _ => - reduced - } - - self match { - case self @ RefinedType(parent, tpnme.hkApply) => - val lc = parent.LambdaClass(forcing = false) - self.refinedInfo match { - case TypeAlias(alias) if lc.exists => - fromHK(etaCore(alias, lc.typeParams.reverse)) - case _ => NoType - } - case _ => NoType - } - } - - /** Test whether this type has a base type of the form `B[T1, ..., Tn]` where - * the type parameters of `B` match one-by-one the variances of `tparams`, - * and where the lambda abstracted type - * - * LambdaXYZ { type Apply = B[hk$0, ..., hk${n-1}] } - * { type hk$0 = T1; ...; type hk${n-1} = Tn } - * - * satisfies predicate `p`. Try base types in the order of their occurrence in `baseClasses`. - * A type parameter matches a variance V if it has V as its variance or if V == 0. - * @param classBounds A hint to bound the search. Only types that derive from one of the - * classes in classBounds are considered. - */ - def testLifted(tparams: List[Symbol], p: Type => Boolean, classBounds: List[ClassSymbol] = Nil)(implicit ctx: Context): Boolean = { - def recur(bcs: List[ClassSymbol]): Boolean = bcs match { - case bc :: bcs1 => - val baseRef = self.baseTypeRef(bc) - def variancesMatch(param1: Symbol, param2: Symbol) = - param2.variance == param2.variance || param2.variance == 0 - (classBounds.exists(bc.derivesFrom) && - baseRef.typeParams.corresponds(tparams)(variancesMatch) && - p(baseRef.appliedTo(self.baseArgInfos(bc))) - || - recur(bcs1)) - case nil => - false - } - classBounds.nonEmpty && recur(self.baseClasses) - } - */ } From 169c8dc0dd06349b50bc8c059566d5b66314a133 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 6 Dec 2015 16:21:54 +0100 Subject: [PATCH 30/41] Cleanup of RefinedType subtype comparison. --- src/dotty/tools/dotc/core/TypeComparer.scala | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index bdabca8df675..866c0112680c 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -338,25 +338,23 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { isSubType(tp1, tp2.parent) && (name2 == nme.WILDCARD || hasMatchingMember(name2, tp1, tp2)) } + def etaExpandedSubType(tp1: Type) = + isSubType(tp1.typeConstructor.EtaExpand(tp2.typeParams), tp2) def compareRefined: Boolean = { val tp1w = tp1.widen val skipped2 = skipMatching(tp1w, tp2) - if ((skipped2 eq tp2) || !Config.fastPathForRefinedSubtype) { - val normalPath = tp1 match { + if ((skipped2 eq tp2) || !Config.fastPathForRefinedSubtype) + tp1 match { case tp1: AndType => // Delay calling `compareRefinedSlow` because looking up a member // of an `AndType` can lead to a cascade of subtyping checks // This twist is needed to make collection/generic/ParFactory.scala compile fourthTry(tp1, tp2) || compareRefinedSlow case _ => - compareRefinedSlow || fourthTry(tp1, tp2) + compareRefinedSlow || + fourthTry(tp1, tp2) || + needsEtaLift(tp1, tp2) && testLifted(tp1, tp2, tp2.typeParams, etaExpandedSubType) } - def etaExpandedSubType(tp1: Type) = - isSubType(tp1.typeConstructor.EtaExpand(tp2.typeParams), tp2) - normalPath || - needsEtaLift(tp1, tp2) && - testLifted(tp1, tp2, tp2.typeParams, etaExpandedSubType) - } else // fast path, in particular for refinements resulting from parameterization. isSubType(tp1, skipped2) && isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) From 1c77b03cb9909c3a1bf4477e49e58d81b25f47b2 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 10 Dec 2015 11:17:33 +0100 Subject: [PATCH 31/41] Fix TypeLambda extractor. As remarked by @smarter, argInfos does not work for type lambdas, so argBoundss is always Nil. --- src/dotty/tools/dotc/core/TypeApplications.scala | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 684e836330d4..dabd2f2cd92e 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -70,10 +70,18 @@ object TypeApplications { } def unapply(tp: Type)(implicit ctx: Context): Option[(List[Int], List[TypeBounds], Type)] = tp match { - case app @ RefinedType(prefix, tpnme.hkApply) => - val cls = prefix.typeSymbol + case app @ RefinedType(parent, tpnme.hkApply) => + val cls = parent.typeSymbol val variances = cls.typeParams.map(_.variance) - val argBounds = prefix.argInfos.map(_.bounds) + def collectBounds(t: Type, acc: List[TypeBounds]): List[TypeBounds] = t match { + case t @ RefinedType(p, rname) => + assert(rname.isHkArgName) + collectBounds(p, t.refinedInfo.bounds :: acc) + case TypeRef(_, lname) => + assert(lname.isLambdaTraitName) + acc + } + val argBounds = collectBounds(parent, Nil) Some((variances, argBounds, app.refinedInfo.argInfo)) case _ => None From fd44a171340a87359e267aeb63072bc5a8873b10 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 11 Dec 2015 09:56:40 +0100 Subject: [PATCH 32/41] Disallow hk type parameters in lower bounds. Also: various cleanups to comments. --- src/dotty/tools/dotc/core/TypeComparer.scala | 4 ++-- src/dotty/tools/dotc/typer/Applications.scala | 6 +----- src/dotty/tools/dotc/typer/Checking.scala | 12 ++++++++++++ src/dotty/tools/dotc/typer/Namer.scala | 1 - src/dotty/tools/dotc/typer/Typer.scala | 1 + test/dotc/tests.scala | 1 + tests/neg/hklower.scala | 11 +++++++++++ 7 files changed, 28 insertions(+), 8 deletions(-) create mode 100644 tests/neg/hklower.scala diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 866c0112680c..bf82bfe099eb 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -550,7 +550,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * * (2) Try to eta expand the constructor of `other`. * - * (3a) In mode `TypeVarsMissConetxt` replace the projection's hk constructor parameter + * (3a) In mode `TypevarsMissConetxt` replace the projection's hk constructor parameter * by the eta expansion of step (2) reapplied to the projection's arguments. * (3b) In normal mode, try to unify the projection's hk constructor parameter with * the eta expansion of step(2) @@ -573,7 +573,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { else (tryInstantiate(param, EtaExpansion(tycon)), projection) ok && - (if (inOrder) isSubType(projection1, other) else isSubType(other, projection1)) // ### move out? + (if (inOrder) isSubType(projection1, other) else isSubType(other, projection1)) case _ => false } diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index d119658be078..c6053d0fd6e8 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -613,12 +613,8 @@ trait Applications extends Compatibility { self: Typer => assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs) } - def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = { - //val was = tree.tpe.EtaExpandIfHK(bound) - //val now = tree.tpe.adaptIfHK(bound) // ### - //if (was != now) println(i"diff adapt ${tree.tpe} to $bound, was: $was, now: $now") + def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = tree.withType(tree.tpe.adaptIfHK(bound)) - } /** Rewrite `new Array[T](....)` trees to calls of newXYZArray methods. */ def convertNewArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match { diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index 9dfe462329ee..57032c4d9ecb 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -401,6 +401,17 @@ trait Checking { errorTree(tpt, d"missing type parameter for ${tpt.tpe}") } else tpt + + def checkLowerNotHK(sym: Symbol, tparams: List[Symbol], pos: Position)(implicit ctx: Context) = + if (tparams.nonEmpty) + sym.info match { + case info: TypeAlias => // ok + case TypeBounds(lo, _) => + for (tparam <- tparams) + if (tparam.typeRef.occursIn(lo)) + ctx.error(i"type parameter ${tparam.name} may not occur in lower bound $lo", pos) + case _ => + } } trait NoChecking extends Checking { @@ -414,4 +425,5 @@ trait NoChecking extends Checking { override def checkNoDoubleDefs(cls: Symbol)(implicit ctx: Context): Unit = () override def checkParentCall(call: Tree, caller: ClassSymbol)(implicit ctx: Context) = () override def checkSimpleKinded(tpt: Tree)(implicit ctx: Context): Tree = tpt + override def checkLowerNotHK(sym: Symbol, tparams: List[Symbol], pos: Position)(implicit ctx: Context) = () } diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index eecbd13474df..fedbc98b8e42 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -845,7 +845,6 @@ class Namer { typer: Typer => * of arguments in F-bounds, because the recursive type was initialized with * TypeBounds.empty. */ - // ### Check whether this is still needed! def etaExpandArgs(implicit ctx: Context) = new TypeMap { def apply(tp: Type): Type = tp match { case tp: RefinedType => diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 0d7db4f9ab6c..08ed04286b17 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -930,6 +930,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(implicit ctx: Context): Tree = track("typedTypeDef") { val TypeDef(name, rhs) = tdef + checkLowerNotHK(sym, tdef.tparams.map(symbolOfTree), tdef.pos) completeAnnotations(tdef, sym) val _ = typedType(rhs) // unused, typecheck only to remove from typedTree assignType(cpy.TypeDef(tdef)(name, TypeTree(sym.info), Nil), sym) diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index de4705dac1e6..5993f4064efb 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -147,6 +147,7 @@ class tests extends CompilerTest { @Test def neg_cycles = compileFile(negDir, "cycles", xerrors = 8) @Test def neg_boundspropagation = compileFile(negDir, "boundspropagation", xerrors = 5) @Test def neg_refinedSubtyping = compileFile(negDir, "refinedSubtyping", xerrors = 2) + @Test def neg_hklower = compileFile(negDir, "hklower", xerrors = 3) @Test def neg_i0091_infpaths = compileFile(negDir, "i0091-infpaths", xerrors = 3) @Test def neg_i0248_inherit_refined = compileFile(negDir, "i0248-inherit-refined", xerrors = 4) @Test def neg_i0281 = compileFile(negDir, "i0281-null-primitive-conforms", xerrors = 3) diff --git a/tests/neg/hklower.scala b/tests/neg/hklower.scala new file mode 100644 index 000000000000..e29a1545e8d1 --- /dev/null +++ b/tests/neg/hklower.scala @@ -0,0 +1,11 @@ +class Test { + + type T[X] // OK + type U[X] = T[X] // OK + + type V[X] >: T[X] // error + type W[X] >: T[X] <: T[X] // error + + def f[C[X] >: T[X]]() = ??? // error + +} From 6cb2a3bcee8bc4fb9ae4d148c8e620d3ae41040f Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 11 Dec 2015 10:31:10 +0100 Subject: [PATCH 33/41] Remove extraneous match --- .../tools/dotc/core/tasty/TreePickler.scala | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 8fccb89731b5..d11d6f4b737b 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -185,16 +185,12 @@ class TreePickler(pickler: TastyPickler) { pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix) } case tpe: NamedType => - tpe match { - case _ => - if (isLocallyDefined(tpe.symbol)) { - writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol) - pickleSymRef(tpe.symbol); pickleType(tpe.prefix) - } - else { - writeByte(if (tpe.isType) TYPEREF else TERMREF) - pickleName(tpe.name); pickleType(tpe.prefix) - } + if (isLocallyDefined(tpe.symbol)) { + writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol) + pickleSymRef(tpe.symbol); pickleType(tpe.prefix) + } else { + writeByte(if (tpe.isType) TYPEREF else TERMREF) + pickleName(tpe.name); pickleType(tpe.prefix) } case tpe: ThisType => if (tpe.cls.is(Flags.Package) && !tpe.cls.isEffectiveRoot) From b8e05d5b955de664e4a5872f43c8184014cecf05 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 11 Dec 2015 13:48:51 +0100 Subject: [PATCH 34/41] Add isBottomType/Class tests to Definitions --- src/dotty/tools/dotc/core/Definitions.scala | 6 ++++++ src/dotty/tools/dotc/typer/Inferencing.scala | 5 +---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala index 650cfe79c7f8..5f794f2d51de 100644 --- a/src/dotty/tools/dotc/core/Definitions.scala +++ b/src/dotty/tools/dotc/core/Definitions.scala @@ -588,6 +588,12 @@ class Definitions { name.startsWith(prefix) && name.drop(prefix.length).forall(_.isDigit) } + def isBottomClass(cls: Symbol) = cls == NothingClass || cls == NullClass + def isBottomType(tp: Type) = tp match { + case tp: TypeRef => isBottomClass(tp.symbol) + case _ => false + } + def isFunctionClass(cls: Symbol) = isVarArityClass(cls, tpnme.Function) def isAbstractFunctionClass(cls: Symbol) = isVarArityClass(cls, tpnme.AbstractFunction) def isTupleClass(cls: Symbol) = isVarArityClass(cls, tpnme.Tuple) diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala index 1e8dcf4b27a1..9717b5625d05 100644 --- a/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/src/dotty/tools/dotc/typer/Inferencing.scala @@ -89,7 +89,7 @@ object Inferencing { val minimize = variance >= 0 && !( force == ForceDegree.noBottom && - isBottomType(ctx.typeComparer.approximation(tvar.origin, fromBelow = true))) + defn.isBottomType(ctx.typeComparer.approximation(tvar.origin, fromBelow = true))) if (minimize) instantiate(tvar, fromBelow = true) else toMaximize = true } @@ -173,9 +173,6 @@ object Inferencing { approxAbove - approxBelow } - def isBottomType(tp: Type)(implicit ctx: Context) = - tp == defn.NothingType || tp == defn.NullType - /** Recursively widen and also follow type declarations and type aliases. */ def widenForMatchSelector(tp: Type)(implicit ctx: Context): Type = tp.widen match { case tp: TypeRef if !tp.symbol.isClass => widenForMatchSelector(tp.info.bounds.hi) From 88f24efb2858f46c146214bc7e51f5de17c31bc0 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 11 Dec 2015 14:39:41 +0100 Subject: [PATCH 35/41] Allow bottom types as hk type arguments Fixes problem raised in #966. --- src/dotty/tools/dotc/core/TypeApplications.scala | 10 +++++++--- src/dotty/tools/dotc/core/Types.scala | 2 +- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index dabd2f2cd92e..b2f22da209cf 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -328,10 +328,14 @@ class TypeApplications(val self: Type) extends AnyVal { //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") } - /** Eta expand the prefix in front of any refinements. */ - def EtaExpandCore(implicit ctx: Context): Type = self.stripTypeVar match { + /** Eta expand the prefix in front of any refinements. + * @param tparamsForBottom Type parameters to use if core is a bottom type + */ + def EtaExpandCore(tparamsForBottom: List[TypeSymbol])(implicit ctx: Context): Type = self.stripTypeVar match { case self: RefinedType => - self.derivedRefinedType(self.parent.EtaExpandCore, self.refinedName, self.refinedInfo) + self.derivedRefinedType(self.parent.EtaExpandCore(tparamsForBottom), self.refinedName, self.refinedInfo) + case tp: TypeRef if defn.isBottomClass(tp.symbol) => + self.LambdaAbstract(tparamsForBottom) case _ => self.EtaExpand(self.typeParams) } diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index b5d5c864bc2b..a7c457d8fecc 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1517,7 +1517,7 @@ object Types { // After substitution we might end up with a type like // `C { type hk$0 = T0; ...; type hk$n = Tn } # $Apply` // where C is a class. In that case we eta expand `C`. - derivedSelect(prefix.EtaExpandCore) + derivedSelect(prefix.EtaExpandCore(this.prefix.typeConstructor.typeParams)) } else newLikeThis(prefix) } From b350d209a20ebdf583d1dd2d3fdcd3be5073d2ef Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 13 Dec 2015 13:21:54 +0100 Subject: [PATCH 36/41] Perform variance adaptation only when needed in isSubType Previously adaptIfHK was performed on every type application. This made t3152 fail. We now do this only on demand, in isSubType. t3152 now passes again. But the change unmasked another error, which makes Iter2 fail to compile. --- .../tools/dotc/core/TypeApplications.scala | 59 +++++++++++-------- src/dotty/tools/dotc/core/TypeComparer.scala | 12 +++- .../core/unpickleScala2/Scala2Unpickler.scala | 2 +- src/dotty/tools/dotc/typer/Applications.scala | 2 +- src/dotty/tools/dotc/typer/Namer.scala | 2 +- tests/{pending => }/pos/t3152.scala | 0 6 files changed, 48 insertions(+), 29 deletions(-) rename tests/{pending => }/pos/t3152.scala (100%) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index b2f22da209cf..f7727779df3b 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -167,11 +167,11 @@ object TypeApplications { } } - /** Adapt all arguments to possible higher-kinded type parameters using adaptIfHK + /** Adapt all arguments to possible higher-kinded type parameters using etaExpandIfHK */ - def adaptArgs(tparams: List[Symbol], args: List[Type])(implicit ctx: Context): List[Type] = + def etaExpandIfHK(tparams: List[Symbol], args: List[Type])(implicit ctx: Context): List[Type] = if (tparams.isEmpty) args - else args.zipWithConserve(tparams)((arg, tparam) => arg.adaptIfHK(tparam.infoOrCompleter)) + else args.zipWithConserve(tparams)((arg, tparam) => arg.etaExpandIfHK(tparam.infoOrCompleter)) def argRefs(rt: RefinedType, n: Int)(implicit ctx: Context) = List.range(0, n).map(i => RefinedThis(rt).select(tpnme.hkArg(i))) @@ -340,11 +340,21 @@ class TypeApplications(val self: Type) extends AnyVal { self.EtaExpand(self.typeParams) } - /** Adapt argument A to type parameter P in the case P is higher-kinded. - * This means: - * (1) Make sure that A is a type lambda, if necessary by eta-expanding it. - * (2) Make sure the variances of the type lambda - * agrees with variances of corresponding higherkinded type parameters. Example: + /** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */ + def etaExpandIfHK(bound: Type)(implicit ctx: Context): Type = { + val boundLambda = bound.LambdaTrait + val hkParams = boundLambda.typeParams + if (hkParams.isEmpty) self + else self match { + case self: TypeRef if self.symbol.isClass && self.typeParams.length == hkParams.length => + EtaExpansion(self) + case _ => self + } + } + + /** If argument A and type parameter P are higher-kinded, adapt the variances + * of A to those of P, ensuring that the variances of the type lambda A + * agree with the variances of corresponding higherkinded type parameters of P. Example: * * class Companion[+CC[X]] * Companion[List] @@ -367,25 +377,26 @@ class TypeApplications(val self: Type) extends AnyVal { * and the second is not a subtype of the first. So if we have overridding memebrs of the two * types we get an error. */ - def adaptIfHK(bound: Type)(implicit ctx: Context): Type = { + def adaptHkVariances(bound: Type)(implicit ctx: Context): Type = { val boundLambda = bound.LambdaTrait val hkParams = boundLambda.typeParams if (hkParams.isEmpty) self - else self match { - case self: TypeRef if self.symbol.isClass && self.typeParams.length == hkParams.length => - EtaExpansion(self).adaptIfHK(bound) - case _ => - def adaptArg(arg: Type): Type = arg match { - case arg: TypeRef - if arg.symbol.isLambdaTrait && - !arg.symbol.typeParams.corresponds(boundLambda.typeParams)(_.variance == _.variance) => - arg.prefix.select(boundLambda) - case arg: RefinedType => - arg.derivedRefinedType(adaptArg(arg.parent), arg.refinedName, arg.refinedInfo) - case _ => - arg - } - adaptArg(self) + else { + def adaptArg(arg: Type): Type = arg match { + case arg: TypeRef if arg.symbol.isLambdaTrait && + !arg.symbol.typeParams.corresponds(hkParams)(_.variance == _.variance) && + arg.symbol.typeParams.corresponds(hkParams)(varianceConforms) => + arg.prefix.select(boundLambda) + case arg: RefinedType => + arg.derivedRefinedType(adaptArg(arg.parent), arg.refinedName, arg.refinedInfo) + case arg @ TypeAlias(alias) => + arg.derivedTypeAlias(adaptArg(alias)) + case arg @ TypeBounds(lo, hi) => + arg.derivedTypeBounds(lo, adaptArg(hi)) + case _ => + arg + } + adaptArg(self) } } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index bf82bfe099eb..14b5a403cda0 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -703,11 +703,19 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * Further, no refinement refers back to the refined type via a refined this. * The precondition is established by `skipMatching`. */ - private def isSubRefinements(tp1: RefinedType, tp2: RefinedType, limit: Type): Boolean = - isSubType(tp1.refinedInfo, tp2.refinedInfo) && ( + private def isSubRefinements(tp1: RefinedType, tp2: RefinedType, limit: Type): Boolean = { + def hasSubRefinement(tp1: RefinedType, refine2: Type): Boolean = { + isSubType(tp1.refinedInfo, refine2) || { + // last effort: try to adapt variances of higher-kinded types if this is sound. + val adapted2 = refine2.adaptHkVariances(tp1.parent.member(tp1.refinedName).symbol.info) + adapted2.ne(refine2) && hasSubRefinement(tp1, adapted2) + } + } + hasSubRefinement(tp1, tp2.refinedInfo) && ( (tp2.parent eq limit) || isSubRefinements( tp1.parent.asInstanceOf[RefinedType], tp2.parent.asInstanceOf[RefinedType], limit)) + } /** A type has been covered previously in subtype checking if it * is some combination of TypeRefs that point to classes, where the diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index b0e31202f11c..618e3ceeac43 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -715,7 +715,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas else TypeRef(pre, sym.name.asTypeName) val args = until(end, readTypeRef) if (sym == defn.ByNameParamClass2x) ExprType(args.head) - else if (args.nonEmpty) tycon.safeAppliedTo(adaptArgs(sym.typeParams, args)) + else if (args.nonEmpty) tycon.safeAppliedTo(etaExpandIfHK(sym.typeParams, args)) else if (sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams) else tycon case TYPEBOUNDStpe => diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index c6053d0fd6e8..941b35d71eee 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -614,7 +614,7 @@ trait Applications extends Compatibility { self: Typer => } def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = - tree.withType(tree.tpe.adaptIfHK(bound)) + tree.withType(tree.tpe.etaExpandIfHK(bound)) /** Rewrite `new Array[T](....)` trees to calls of newXYZArray methods. */ def convertNewArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match { diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index fedbc98b8e42..ca37614bf95d 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -853,7 +853,7 @@ class Namer { typer: Typer => val tycon = tp.withoutArgs(args) val tycon1 = this(tycon) val tparams = tycon.typeParams - val args1 = if (args.length == tparams.length) adaptArgs(tparams, args) else args + val args1 = if (args.length == tparams.length) etaExpandIfHK(tparams, args) else args if ((tycon1 eq tycon) && (args1 eq args)) tp else tycon1.appliedTo(args1) } else mapOver(tp) case _ => mapOver(tp) diff --git a/tests/pending/pos/t3152.scala b/tests/pos/t3152.scala similarity index 100% rename from tests/pending/pos/t3152.scala rename to tests/pos/t3152.scala From 2703543fb6e86603bae040fa4cf1ddf93498ef3f Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 13 Dec 2015 15:17:02 +0100 Subject: [PATCH 37/41] In isSubType, follow aliases only if prefix is a path. Comment explains why following aliases in general is incomplete and potentially unsound. This makes Iter2 compile, but causes cyclic reference errors for pos/sets.scala. --- src/dotty/tools/dotc/core/TypeComparer.scala | 22 ++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 14b5a403cda0..d798e0c31b6c 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -144,11 +144,29 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { def compareNamed(tp1: Type, tp2: NamedType): Boolean = { implicit val ctx: Context = this.ctx tp2.info match { - case info2: TypeAlias => firstTry(tp1, info2.alias) + case info2: TypeAlias if tp2.prefix.isStable => + // If prefix is not stable (i.e. is not a path), then we have a true + // projection `T # A` which is treated as the existential type + // `ex(x: T)x.A`. We need to deal with the existential first before + // following the alias. If we did follow the alias we could be + // unsound as well as incomplete. An example of this was discovered in Iter2.scala. + // It failed to validate the subtype test + // + // (([+X] -> Seq[X]) & C)[SA] <: C[SA] + // + // Both sides are projections of $Apply. The left $Apply does have an + // aliased info, namely, Seq[SA]. But that is not a subtype of C[SA]. + // The problem is that, with the prefix not being a path, an aliased info + // does not necessarily give all of the information of the original projection. + // So we can't follow the alias without a backup strategy. If the alias + // would appear on the right then I believe this can be turned into a case + // of unsoundness. + isSubType(tp1, info2.alias) case _ => tp1 match { case tp1: NamedType => tp1.info match { - case info1: TypeAlias => compareNamed(info1.alias, tp2) + case info1: TypeAlias if tp1.prefix.isStable => + isSubType(info1.alias, tp2) case _ => val sym1 = tp1.symbol if ((sym1 ne NoSymbol) && (sym1 eq tp2.symbol)) From dba4b94443a8066df23ee7ccdb890ad048a06a2c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 13 Dec 2015 15:32:17 +0100 Subject: [PATCH 38/41] Avoid cycle when computing sets The tightened subtyping algorithm led to a cycle in baseTypeRef when compiling sets.scala. This commit fixes the problem. --- src/dotty/tools/dotc/core/Types.scala | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index a7c457d8fecc..f65633595df7 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -447,7 +447,18 @@ object Types { if (rinfo.isAlias) rinfo else if (pdenot.info.isAlias) pdenot.info else if (ctx.pendingMemberSearches.contains(name)) safeAnd(pdenot.info, rinfo) - else pdenot.info & rinfo + else + try pdenot.info & rinfo + catch { + case ex: CyclicReference => + // happens for tests/pos/sets.scala. findMember is called from baseTypeRef. + // The & causes a subtype check which calls baseTypeRef again with the same + // superclass. In the observed case, the superclass was Any, and + // the special shortcut for Any in derivesFrom was as yet absent. To reproduce, + // remove the special treatment of Any in derivesFrom and compile + // sets.scala. + safeAnd(pdenot.info, rinfo) + } pdenot.asSingleDenotation.derivedSingleDenotation(pdenot.symbol, jointInfo) } else pdenot & (new JointRefDenotation(NoSymbol, rinfo, Period.allInRun(ctx.runId)), pre) @@ -1562,6 +1573,15 @@ object Types { case _ => false } + + /* A version of toString which also prints aliases. Can be used for debugging + override def toString = + if (isTerm) s"TermRef($prefix, $name)" + else s"TypeRef($prefix, $name)${ + if (lastDenotation != null && lastDenotation.infoOrCompleter.isAlias) + s"@@@ ${lastDenotation.infoOrCompleter.asInstanceOf[TypeAlias].hi}" + else ""}" + */ } abstract case class TermRef(override val prefix: Type, name: TermName) extends NamedType with SingletonType { From b59395843b872d5c95052aa1e2f81c2c2fc172f1 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 13 Dec 2015 15:32:57 +0100 Subject: [PATCH 39/41] Better explanation for adaptHkVariances --- .../tools/dotc/core/TypeApplications.scala | 32 +++++++++++-------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index f7727779df3b..fbab4ee39744 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -353,29 +353,35 @@ class TypeApplications(val self: Type) extends AnyVal { } /** If argument A and type parameter P are higher-kinded, adapt the variances - * of A to those of P, ensuring that the variances of the type lambda A - * agree with the variances of corresponding higherkinded type parameters of P. Example: + * of A to those of P, ensuring that the variances of the type lambda A + * agree with the variances of corresponding higher-kinded type parameters of P. Example: * - * class Companion[+CC[X]] - * Companion[List] + * class GenericCompanion[+CC[X]] + * GenericCompanion[List] * - * with adaptArgs, this will expand to + * with adaptHkVariances, the argument `List` will expand to * - * Companion[[X] => List[X]] + * [X] => List[X] * * instead of * - * Companion[[+X] => List[X]] + * [+X] => List[X] * * even though `List` is covariant. This adaptation is necessary to ignore conflicting - * variances in overriding members that have types of hk-type parameters such as `Companion[GenTraversable]` - * or `Companion[ListBuffer]`. Without the adaptation we would end up with + * variances in overriding members that have types of hk-type parameters such as + * `GenericCompanion[GenTraversable]` or `GenericCompanion[ListBuffer]`. + * When checking overriding, we need to validate the subtype relationship * - * Companion[[+X] => GenTraversable[X]] - * Companion[[X] => List[X]] + * GenericCompanion[[X] -> ListBuffer[X]] <: GenericCompanion[[+X] -> GenTraversable[X]] * - * and the second is not a subtype of the first. So if we have overridding memebrs of the two - * types we get an error. + * Without adaptation, this would be false, and hence an overriding error would + * result. But with adaptation, the rhs argument will be adapted to + * + * [X] -> GenTraversable[X] + * + * which makes the subtype test succeed. The crucial point here is that, since + * GenericCompanion only expects a non-variant CC, the fact that GenTraversable + * is covariant is irrelevant, so can be ignored. */ def adaptHkVariances(bound: Type)(implicit ctx: Context): Type = { val boundLambda = bound.LambdaTrait From 95098ba498b5317002395008376a565550189673 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 13 Dec 2015 15:34:38 +0100 Subject: [PATCH 40/41] Shortcut in derivesFrom for high bound Any. Any is a supertype of every other type, so no need to analyze types in detail. This also fixes the cyclic reference error observed for sets.scala, but only for the special case where the base class is Any. --- src/dotty/tools/dotc/core/Types.scala | 31 +++++++++++++++------------ 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index f65633595df7..596d1ba24d72 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -141,20 +141,23 @@ object Types { } /** Is this type an instance of a non-bottom subclass of the given class `cls`? */ - final def derivesFrom(cls: Symbol)(implicit ctx: Context): Boolean = this match { - case tp: TypeRef => - val sym = tp.symbol - if (sym.isClass) sym.derivesFrom(cls) else tp.underlying.derivesFrom(cls) - case tp: TypeProxy => - tp.underlying.derivesFrom(cls) - case tp: AndType => - tp.tp1.derivesFrom(cls) || tp.tp2.derivesFrom(cls) - case tp: OrType => - tp.tp1.derivesFrom(cls) && tp.tp2.derivesFrom(cls) - case tp: JavaArrayType => - cls == defn.ObjectClass - case _ => - false + final def derivesFrom(cls: Symbol)(implicit ctx: Context): Boolean = { + def loop(tp: Type) = tp match { + case tp: TypeRef => + val sym = tp.symbol + if (sym.isClass) sym.derivesFrom(cls) else tp.underlying.derivesFrom(cls) + case tp: TypeProxy => + tp.underlying.derivesFrom(cls) + case tp: AndType => + tp.tp1.derivesFrom(cls) || tp.tp2.derivesFrom(cls) + case tp: OrType => + tp.tp1.derivesFrom(cls) && tp.tp2.derivesFrom(cls) + case tp: JavaArrayType => + cls == defn.ObjectClass + case _ => + false + } + cls == defn.AnyClass || loop(this) } /** Is this type guaranteed not to have `null` as a value? From 8a9e89afffb501ed7b312c21aa5e9981cd4e2b23 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 13 Dec 2015 15:36:04 +0100 Subject: [PATCH 41/41] Allow deep subtype for sets and related code in dotty/transform. The change in subtyping led to a deep subtype recursion for sets.scala. It seems legit, so the -Yno-deep-subtypes check is disabled. --- test/dotc/tests.scala | 5 +++-- tests/{pos => pos-special}/sets.scala | 0 2 files changed, 3 insertions(+), 2 deletions(-) rename tests/{pos => pos-special}/sets.scala (100%) diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index 5993f4064efb..9662d09d65b1 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -100,6 +100,7 @@ class tests extends CompilerTest { @Test def pos_all = compileFiles(posDir) // twice omitted to make tests run faster + @Test def pos_sets = compileFile(posSpecialDir, "sets")(allowDeepSubtypes) @Test def pos_t2613 = compileFile(posSpecialDir, "t2613")(allowDeepSubtypes) @Test def pos_i871 = compileFile(posSpecialDir, "i871", scala2mode) @Test def pos_variancesConstr = compileFile(posSpecialDir, "variances-constr", scala2mode) @@ -182,7 +183,7 @@ class tests extends CompilerTest { .toList @Test def compileStdLib = compileList("compileStdLib", stdlibFiles, "-migration" :: scala2mode) - @Test def dotty = compileDir(dottyDir, ".", "-deep" :: "-Ycheck-reentrant" :: allowDeepSubtypes) // note the -deep argument + @Test def dotty = compileDir(dottyDir, ".", List("-deep", "-Ycheck-reentrant"))(allowDeepSubtypes) // note the -deep argument @Test def dotc_ast = compileDir(dotcDir, "ast") @Test def dotc_config = compileDir(dotcDir, "config") @@ -191,7 +192,7 @@ class tests extends CompilerTest { // This directory doesn't exist anymore // @Test def dotc_core_pickling = compileDir(coreDir, "pickling")(allowDeepSubtypes)// twice omitted to make tests run faster - @Test def dotc_transform = compileDir(dotcDir, "transform")// twice omitted to make tests run faster + @Test def dotc_transform = compileDir(dotcDir, "transform")(allowDeepSubtypes)// twice omitted to make tests run faster @Test def dotc_parsing = compileDir(dotcDir, "parsing") // twice omitted to make tests run faster diff --git a/tests/pos/sets.scala b/tests/pos-special/sets.scala similarity index 100% rename from tests/pos/sets.scala rename to tests/pos-special/sets.scala