diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala index 7ed0a26e018c..a75602187fa9 100644 --- a/src/dotty/tools/dotc/core/Definitions.scala +++ b/src/dotty/tools/dotc/core/Definitions.scala @@ -493,7 +493,7 @@ class Definitions { /** The set of HigherKindedXYZ traits encountered so far */ def lambdaTraits: Set[Symbol] = myLambdaTraits - private var lambdaTraitForVariances = mutable.Map[List[Int], ClassSymbol]() + private var LambdaTraitForVariances = mutable.Map[List[Int], ClassSymbol]() /** The HigherKinded trait corresponding to symbols `boundSyms` (which are assumed * to be the type parameters of a higher-kided type). This is a class symbol that @@ -512,7 +512,7 @@ class Definitions { * - for each positive or negative variance v_i there is a parent trait Pj which * is the same as LambdaXYZ except that it has `I` in i-th position. */ - def lambdaTrait(vcs: List[Int]): ClassSymbol = { + def LambdaTrait(vcs: List[Int]): ClassSymbol = { assert(vcs.nonEmpty) def varianceFlags(v: Int) = v match { @@ -526,17 +526,17 @@ class Definitions { val cls = denot.asClass.classSymbol val paramDecls = newScope for (i <- 0 until vcs.length) - newTypeParam(cls, tpnme.lambdaArgName(i), varianceFlags(vcs(i)), paramDecls) - newTypeField(cls, tpnme.Apply, Covariant, paramDecls) + newTypeParam(cls, tpnme.LambdaArgName(i), varianceFlags(vcs(i)), paramDecls) + newTypeField(cls, tpnme.hkApply, Covariant, paramDecls) val parentTraitRefs = for (i <- 0 until vcs.length if vcs(i) != 0) - yield lambdaTrait(vcs.updated(i, 0)).typeRef + yield LambdaTrait(vcs.updated(i, 0)).typeRef denot.info = ClassInfo( ScalaPackageClass.thisType, cls, ObjectClass.typeRef :: parentTraitRefs.toList, paramDecls) } } - val traitName = tpnme.lambdaTraitName(vcs) + val traitName = tpnme.LambdaTraitName(vcs) def createTrait = { val cls = newClassSymbol( @@ -548,7 +548,7 @@ class Definitions { cls } - lambdaTraitForVariances.getOrElseUpdate(vcs, createTrait) + LambdaTraitForVariances.getOrElseUpdate(vcs, createTrait) } // ----- primitive value class machinery ------------------------------------------ diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala index 4d6cca61dc08..77a14767ec3b 100644 --- a/src/dotty/tools/dotc/core/NameOps.scala +++ b/src/dotty/tools/dotc/core/NameOps.scala @@ -109,7 +109,7 @@ object NameOps { /** The index of the higher-kinded type parameter with this name. * Pre: isLambdaArgName. */ - def lambdaArgIndex: Int = + def LambdaArgIndex: Int = name.drop(tpnme.LAMBDA_ARG_PREFIX.length).toString.toInt /** If the name ends with $nn where nn are diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala index eb1a73625381..25457f676c29 100644 --- a/src/dotty/tools/dotc/core/StdNames.scala +++ b/src/dotty/tools/dotc/core/StdNames.scala @@ -173,7 +173,7 @@ object StdNames { final val WILDCARD_STAR: N = "_*" final val REIFY_TREECREATOR_PREFIX: N = "$treecreator" final val REIFY_TYPECREATOR_PREFIX: N = "$typecreator" - final val LAMBDA_ARG_PREFIX: N = "HK$" + final val LAMBDA_ARG_PREFIX: N = "hk$" final val LAMBDA_ARG_PREFIXhead: Char = LAMBDA_ARG_PREFIX.head final val Any: N = "Any" @@ -311,7 +311,7 @@ object StdNames { val AnnotatedType: N = "AnnotatedType" val AppliedTypeTree: N = "AppliedTypeTree" - val Apply: N = "Apply" + val hkApply: N = "$apply" val ArrayAnnotArg: N = "ArrayAnnotArg" val Constant: N = "Constant" val ConstantType: N = "ConstantType" @@ -737,8 +737,8 @@ object StdNames { def syntheticTypeParamNames(num: Int): List[TypeName] = (0 until num).map(syntheticTypeParamName)(breakOut) - def lambdaTraitName(vcs: List[Int]): TypeName = LambdaPrefix ++ vcs.map(varianceSuffix).mkString - def lambdaArgName(n: Int) = LAMBDA_ARG_PREFIX ++ n.toString + def LambdaTraitName(vcs: List[Int]): TypeName = LambdaPrefix ++ vcs.map(varianceSuffix).mkString + def LambdaArgName(n: Int) = LAMBDA_ARG_PREFIX ++ n.toString final val Conforms = encode("<:<") diff --git a/src/dotty/tools/dotc/core/Substituters.scala b/src/dotty/tools/dotc/core/Substituters.scala index e4bbf2305805..0083ac626fae 100644 --- a/src/dotty/tools/dotc/core/Substituters.scala +++ b/src/dotty/tools/dotc/core/Substituters.scala @@ -277,4 +277,31 @@ trait Substituters { this: Context => final class SubstParamsMap(from: BindingType, to: List[Type]) extends DeepTypeMap { def apply(tp: Type) = substParams(tp, from, to, this) } + + /** A map for "cycle safe substitutions" which do not force the denotation + * of a TypeRef unless the name matches up with one of the substituted symbols. + */ + final class SafeSubstMap(from: List[Symbol], to: List[Type]) extends TypeMap { + def apply(tp: Type): Type = tp match { + case tp: NamedType => + try { + var sym: Symbol = null + var fs = from + var ts = to + while (fs.nonEmpty) { + if (fs.head.name == tp.name) { + if (sym == null) sym = tp.symbol + if (fs.head eq sym) return ts.head + } + fs = fs.tail + ts = ts.tail + } + tp.newLikeThis(apply(tp.prefix)) + } + catch { + case ex: CyclicReference => tp.derivedSelect(apply(tp.prefix)) + } + case _ => mapOver(tp) + } + } } diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index f466cee7705a..42da617d624f 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -14,6 +14,7 @@ import StdNames.tpnme import util.Positions.Position import config.Printers._ import collection.mutable +import java.util.NoSuchElementException object TypeApplications { @@ -44,7 +45,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** The type parameters of this type are: * For a ClassInfo type, the type parameters of its class. * For a typeref referring to a class, the type parameters of the class. - * For a typeref referring to an alias or abstract type, the type parameters of + * For a typeref referring to a Lambda class, the type parameters of * its right hand side or upper bound. * For a refinement type, the type parameters of its parent, unless the refinement * re-binds the type parameter with a type-alias. @@ -58,7 +59,11 @@ class TypeApplications(val self: Type) extends AnyVal { case tp: TypeRef => val tsym = tp.typeSymbol if (tsym.isClass) tsym.typeParams - else tp.underlying.typeParams + else if (tsym.isAliasType) tp.underlying.typeParams + else { + val lam = LambdaClass(forcing = false) + if (lam.exists) lam.typeParams else Nil + } case tp: RefinedType => val tparams = tp.parent.typeParams tp.refinedInfo match { @@ -80,7 +85,7 @@ class TypeApplications(val self: Type) extends AnyVal { * do not remove corresponding type parameters. * Second, it will return Nil for BoundTypes because we might get a NullPointer exception * on PolyParam#underlying otherwise (demonstrated by showClass test). - * Third, it won't return higher-kinded type parameters, i.e. the type parameters of + * Third, it won't return abstract higher-kinded type parameters, i.e. the type parameters of * an abstract type are always empty. */ final def rawTypeParams(implicit ctx: Context): List[TypeSymbol] = { @@ -135,6 +140,20 @@ class TypeApplications(val self: Type) extends AnyVal { def isInstantiatedLambda(implicit ctx: Context): Boolean = isSafeLambda && typeParams.isEmpty + /** Is receiver type higher-kinded (i.e. of kind != "*")? */ + def isHK(implicit ctx: Context): Boolean = self.dealias match { + case self: TypeRef => self.info.isHK + case RefinedType(_, name) => name == tpnme.hkApply || name.isLambdaArgName + case TypeBounds(_, hi) => hi.isHK + case _ => false + } + + /** is receiver of the form T#$apply? */ + def isHKApply: Boolean = self match { + case TypeRef(_, name) => name == tpnme.hkApply + case _ => false + } + /** Encode the type resulting from applying this type to given arguments */ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { def matchParams(tp: Type, tparams: List[TypeSymbol], args: List[Type]): Type = args match { @@ -144,11 +163,7 @@ class TypeApplications(val self: Type) extends AnyVal { println(s"precomplete decls = ${self.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}") } val tparam = tparams.head - val arg1 = - if ((tparam is HigherKinded) && !arg.isLambda && arg.typeParams.nonEmpty) - arg.EtaExpand - else arg - val tp1 = RefinedType(tp, tparam.name, arg1.toBounds(tparam)) + val tp1 = RefinedType(tp, tparam.name, arg.toBounds(tparam)) matchParams(tp1, tparams.tail, args1) case nil => tp } @@ -168,12 +183,14 @@ class TypeApplications(val self: Type) extends AnyVal { if (tsym.isClass || !tp.typeSymbol.isCompleting) original.typeParams else { ctx.warning(i"encountered F-bounded higher-kinded type parameters for $tsym; assuming they are invariant") - defn.lambdaTrait(args map alwaysZero).typeParams + defn.LambdaTrait(args map alwaysZero).typeParams // @@@ can we force? } matchParams(tp, safeTypeParams, args) } case tp: RefinedType => - tp.derivedRefinedType( + val redux = tp.EtaReduce + if (redux.exists) redux.appliedTo(args) // Rewrite ([HK$0] => C[HK$0])(T) to C[T] + else tp.derivedRefinedType( instantiate(tp.parent, original), tp.refinedName, tp.refinedInfo) @@ -185,18 +202,31 @@ class TypeApplications(val self: Type) extends AnyVal { tp } + /** Same as isHK, except we classify all abstract types as HK, + * (they must be, because the are applied). This avoids some forcing and + * CyclicReference errors of the standard isHK. + */ + def isKnownHK(tp: Type): Boolean = tp match { + case tp: TypeRef => + val sym = tp.symbol + if (sym.isClass) sym.isLambdaTrait + else !sym.isAliasType || isKnownHK(tp.info) + case tp: TypeProxy => isKnownHK(tp.underlying) + case _ => false + } + if (args.isEmpty || ctx.erasedTypes) self else { val res = instantiate(self, self) - if (res.isInstantiatedLambda) res.select(tpnme.Apply) else res + if (isKnownHK(res)) TypeRef(res, tpnme.hkApply) else res } } /** Simplify a fully instantiated type of the form `LambdaX{... type Apply = T } # Apply` to `T`. */ def simplifyApply(implicit ctx: Context): Type = self match { - case self @ TypeRef(prefix, tpnme.Apply) if prefix.isInstantiatedLambda => - prefix.member(tpnme.Apply).info match { + case self @ TypeRef(prefix, tpnme.hkApply) if prefix.isInstantiatedLambda => + prefix.member(tpnme.hkApply).info match { case TypeAlias(alias) => alias case _ => self } @@ -392,9 +422,10 @@ class TypeApplications(val self: Type) extends AnyVal { case RefinedThis(tp) => tp eq target case tp: NamedType => - tp.info match { + if (tp.symbol.isClass) !tp.symbol.isStatic && recur(tp.prefix) + else tp.info match { case TypeAlias(alias) => recur(alias) - case _ => !tp.symbol.isStatic && recur(tp.prefix) + case _ => recur(tp.prefix) } case tp: RefinedType => recur(tp.refinedInfo) || recur(tp.parent) @@ -410,91 +441,48 @@ class TypeApplications(val self: Type) extends AnyVal { recur(self) } - /** Given a type alias - * - * type T[boundSyms] = p.C[targs] - * - * produce its equivalent right hand side RHS that makes no reference to the bound - * symbols on the left hand side. I.e. the type alias can be replaced by - * - * type T = RHS - * - * There are two strategies how this is achieved. - - * 1st strategy: Applies if `C` is a class such that every bound symbol in `boundSyms` - * appears as an argument in `targs`, and in the same order. Then the rewriting replaces - * bound symbols by references to the parameters of class C. Example: - * - * Say we have: - * - * class Triple[type T1, type T2, type T3] - * type A[X] = Triple[(X, X), X, String] - * - * Then this is rewritable, as `X` appears as second type argument to `Triple`. - * Occurrences of `X` are rewritten to `this.T2` and the whole definition becomes: - * - * type A = Triple { type T1 = (this.T2, this.T2); type T3 = String } - * - * 2nd strategy: Used as a fallback if 1st strategy does not apply. It rewrites - * the RHS to a typed lambda abstraction. - */ - def parameterizeWith(boundSyms: List[Symbol])(implicit ctx: Context): Type = { - def matchParams(bsyms: List[Symbol], tparams: List[Symbol], targs: List[Type], - correspondingParamName: Map[Symbol, TypeName]): Type = { - if (bsyms.isEmpty) { - val correspondingNames = correspondingParamName.values.toSet - - def replacements(rt: RefinedType): List[Type] = - for (sym <- boundSyms) - yield TypeRef(RefinedThis(rt), correspondingParamName(sym)) - - def rewrite(tp: Type): Type = tp match { - case tp @ RefinedType(parent, name: TypeName) => - if (correspondingNames contains name) rewrite(parent) - else RefinedType( - rewrite(parent), name, - rt => tp.refinedInfo.subst(boundSyms, replacements(rt))) - case tp => - tp - } - - rewrite(self) - } - else if (tparams.isEmpty || targs.isEmpty) - LambdaAbstract(boundSyms) - else if (bsyms.head == targs.head.typeSymbol) - matchParams(bsyms.tail, tparams.tail, targs.tail, - correspondingParamName + (bsyms.head -> tparams.head.name.asTypeName)) - else - matchParams(bsyms, tparams.tail, targs.tail, correspondingParamName) - } - val cls = self.typeSymbol - if (cls.isClass) matchParams(boundSyms, cls.typeParams, argInfos, Map()) - else LambdaAbstract(boundSyms) - } - /** The typed lambda abstraction of this type `T` relative to `boundSyms`. * This is: * - * LambdaXYZ{ type Apply = subst(T) } + * LambdaXYZ{ bounds }{ type Apply = toHK(T) } * - * where XYZ reflets that variances of the bound symbols and - * `subst` is a substitution that replaces every bound symbol sym_i by - * `this.Arg$i`. + * where + * - XYZ reflects the variances of the bound symbols, + * - `bounds` consists of type declarations `type hk$i >: toHK(L) <: toHK(U), + * one for each type parameter in `T` with non-trivial bounds L,U. + * - `toHK` is a substitution that replaces every bound symbol sym_i by + * `this.Arg$i`. * * TypeBounds are lambda abstracting by lambda abstracting their upper bound. + * + * @param cycleParanoid If `true` don't force denotation of a TypeRef unless + * its name matches one of `boundSyms`. Needed to avoid cycles + * involving F-boundes hk-types when reading Scala2 collection classes + * with new hk-scheme. */ - def LambdaAbstract(boundSyms: List[Symbol])(implicit ctx: Context): Type = { - def expand(tp: Type) = { - val lambda = defn.lambdaTrait(boundSyms.map(_.variance)) - val substitutedRHS = (rt: RefinedType) => { + def LambdaAbstract(boundSyms: List[Symbol], cycleParanoid: Boolean = false)(implicit ctx: Context): Type = { + def expand(tp: Type): Type = { + val lambda = defn.LambdaTrait(boundSyms.map(_.variance)) + def toHK(tp: Type) = (rt: RefinedType) => { val argRefs = boundSyms.indices.toList.map(i => - RefinedThis(rt).select(tpnme.lambdaArgName(i))) - tp.subst(boundSyms, argRefs).bounds.withVariance(1) + RefinedThis(rt).select(tpnme.LambdaArgName(i))) + val substituted = + if (cycleParanoid) new ctx.SafeSubstMap(boundSyms, argRefs).apply(tp) + else tp.subst(boundSyms, argRefs) + substituted.bounds.withVariance(1) + } + val boundNames = new mutable.ListBuffer[Name] + val boundss = new mutable.ListBuffer[TypeBounds] + for (sym <- boundSyms) { + val bounds = sym.info.bounds + if (!(TypeBounds.empty <:< bounds)) { + boundNames += sym.name + boundss += bounds + } } - val res = RefinedType(lambda.typeRef, tpnme.Apply, substitutedRHS) - //println(i"lambda abstract $self wrt $boundSyms%, % --> $res") - res + val lambdaWithBounds = + RefinedType.make(lambda.typeRef, boundNames.toList, boundss.toList.map(toHK)) + RefinedType(lambdaWithBounds, tpnme.hkApply, toHK(tp)) } self match { case self @ TypeBounds(lo, hi) => @@ -513,14 +501,94 @@ class TypeApplications(val self: Type) extends AnyVal { def EtaExpand(implicit ctx: Context): Type = { val tparams = typeParams self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparams) + //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") + } + + /** Eta expand if `bound` is a higher-kinded type */ + def EtaExpandIfHK(bound: Type)(implicit ctx: Context): Type = + if (bound.isHK && !isHK && self.typeSymbol.isClass && typeParams.nonEmpty) EtaExpand + else self + + /** If `self` is a (potentially partially instantiated) eta expansion of type T, return T, + * otherwise NoType. More precisely if `self` is of the form + * + * T { type $apply = U[T1, ..., Tn] } + * + * where + * + * - hk$0, ..., hk${m-1} are the type parameters of T + * - a sublist of the arguments Ti_k (k = 0,...,m_1) are of the form T{...}.this.hk$i_k + * + * rewrite `self` to + * + * U[T'1,...T'j] + * + * where + * + * T'j = _ >: Lj <: Uj if j is in the i_k list defined above + * where Lj and Uj are the bounds of hk$j mapped using `fromHK`. + * = fromHK(Tj) otherwise. + * + * `fromHK` is the function that replaces every occurrence of `.this.hk$i` by the + * corresponding parameter reference in `U[T'1,...T'j]` + */ + def EtaReduce(implicit ctx: Context): Type = { + def etaCore(tp: Type, tparams: List[Symbol]): Type = tparams match { + case Nil => tp + case tparam :: otherParams => + tp match { + case tp: RefinedType => + tp.refinedInfo match { + case TypeAlias(TypeRef(RefinedThis(rt), rname)) + if (rname == tparam.name) && (rt eq self) => + // we have a binding T = Lambda$XYZ{...}.this.hk$i where hk$i names the current `tparam`. + val pcore = etaCore(tp.parent, otherParams) + val hkBounds = self.member(rname).info.bounds + if (TypeBounds.empty <:< hkBounds) pcore + else tp.derivedRefinedType(pcore, tp.refinedName, hkBounds) + case _ => + val pcore = etaCore(tp.parent, tparams) + if (pcore.exists) tp.derivedRefinedType(pcore, tp.refinedName, tp.refinedInfo) + else NoType + } + case _ => + NoType + } + } + // Map references `Lambda$XYZ{...}.this.hk$i to corresponding parameter references of the reduced core. + def fromHK(reduced: Type) = reduced match { + case reduced: RefinedType => + new TypeMap { + def apply(tp: Type): Type = tp match { + case TypeRef(RefinedThis(binder), name) if binder eq self => + assert(name.isLambdaArgName) + RefinedThis(reduced).select(reduced.typeParams.apply(name.LambdaArgIndex)) + case _ => + mapOver(tp) + } + }.apply(reduced) + case _ => + reduced + } + + self match { + case self @ RefinedType(parent, tpnme.hkApply) => + val lc = parent.LambdaClass(forcing = false) + self.refinedInfo match { + case TypeAlias(alias) if lc.exists => + fromHK(etaCore(alias, lc.typeParams.reverse)) + case _ => NoType + } + case _ => NoType + } } - /** Test whether this type has a base type of the form `B[T1, ..., Bn]` where + /** Test whether this type has a base type of the form `B[T1, ..., Tn]` where * the type parameters of `B` match one-by-one the variances of `tparams`, * and where the lambda abstracted type * - * LambdaXYZ { type Apply = B[$hkArg$0, ..., $hkArg$n] } - * { type $hkArg$0 = T1; ...; type $hkArg$n = Tn } + * LambdaXYZ { type Apply = B[$hkArg$0, ..., $hkArg${n-1}] } + * { type $hkArg$0 = T1; ...; type $hkArg${n-1} = Tn } * * satisfies predicate `p`. Try base types in the order of their occurrence in `baseClasses`. * A type parameter matches a variance V if it has V as its variance or if V == 0. @@ -550,8 +618,8 @@ class TypeApplications(val self: Type) extends AnyVal { case nil => false } - if (tparams.isEmpty) false - else if (typeParams.nonEmpty) p(EtaExpand) || classBounds.nonEmpty && tryLift(self.baseClasses) - else classBounds.nonEmpty && tryLift(self.baseClasses) + tparams.nonEmpty && + (typeParams.nonEmpty && p(EtaExpand) || + classBounds.nonEmpty && tryLift(self.baseClasses)) } } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index ea815f6c0778..fce803c56866 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -466,21 +466,19 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } - /** If `projection` is of the form T # Apply where `T` is an instance of a Lambda class, - * and `other` is not a type lambda projection, then convert `other` to a type lambda `U`, and + /** If `projection` is a hk projection T#$apply + * and `other` is not a hk projection, then convert `other` to a hk projection `U`, and * continue with `T <:< U` if `inOrder` is true and `U <:< T` otherwise. */ def compareHK(projection: NamedType, other: Type, inOrder: Boolean) = - projection.name == tpnme.Apply && { - val lambda = projection.prefix.LambdaClass(forcing = true) - lambda.exists && !other.isLambda && - other.testLifted(lambda.typeParams, - if (inOrder) isSubType(projection.prefix, _) else isSubType(_, projection.prefix), - if (inOrder) Nil else classBounds(projection.prefix)) - } + projection.name == tpnme.hkApply && + !other.isHKApply && + other.testLifted(projection.prefix.LambdaClass(forcing = true).typeParams, + if (inOrder) isSubType(projection.prefix, _) else isSubType(_, projection.prefix), + if (inOrder) Nil else classBounds(projection.prefix)) /** The class symbols bounding the type of the `Apply` member of `tp` */ - private def classBounds(tp: Type) = tp.member(tpnme.Apply).info.classSymbols + private def classBounds(tp: Type) = tp.member(tpnme.hkApply).info.classSymbols /** Returns true iff either `tp11 <:< tp21` or `tp12 <:< tp22`, trying at the same time * to keep the constraint as wide as possible. Specifically, if @@ -633,11 +631,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { /** Does `tp` need to be eta lifted to be comparable to `target`? */ private def needsEtaLift(tp: Type, target: RefinedType): Boolean = { - //default.echo(i"needs eta $tp $target?", { + // if (tp.isLambda != tp.isHK) println(i"discrepancy for $tp, isLambda = ${tp.isLambda}, isHK = ${tp.isHK}") val name = target.refinedName - (name.isLambdaArgName || (name eq tpnme.Apply)) && target.isLambda && - tp.exists && !tp.isLambda - //}) + (name.isLambdaArgName || (name eq tpnme.hkApply)) && + tp.exists && !tp.isLambda // we do encounter Lambda classes without any arguments here } /** Narrow gadt.bounds for the type parameter referenced by `tr` to include @@ -932,7 +929,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { tp1.derivedRefinedType( tp1.parent & tp2.parent, tp1.refinedName, - tp1.refinedInfo & tp2.refinedInfo) + tp1.refinedInfo & tp2.refinedInfo.substRefinedThis(tp2, RefinedThis(tp1))) case _ => NoType } @@ -998,7 +995,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { tp1.derivedRefinedType( tp1.parent | tp2.parent, tp1.refinedName, - tp1.refinedInfo | tp2.refinedInfo) + tp1.refinedInfo | tp2.refinedInfo.substRefinedThis(tp2, RefinedThis(tp1))) case _ => NoType } @@ -1235,5 +1232,12 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx) + override def compareHK(projection: NamedType, other: Type, inOrder: Boolean) = + if (projection.name == tpnme.hkApply) + traceIndented(i"compareHK $projection, $other, $inOrder") { + super.compareHK(projection, other, inOrder) + } + else super.compareHK(projection, other, inOrder) + override def toString = "Subtype trace:" + { try b.toString finally b.clear() } } diff --git a/src/dotty/tools/dotc/core/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala index 0ef31015c2de..262fff2ed04b 100644 --- a/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/src/dotty/tools/dotc/core/TypeErasure.scala @@ -441,7 +441,11 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean case tp: TypeRef => if (!tp.denot.exists) throw new MissingType(tp.prefix, tp.name) val sym = tp.symbol - if (!sym.isClass) sigName(tp.info) + if (!sym.isClass) { + val info = tp.info + if (!info.exists) assert(false, "undefined: $tp with symbol $sym") + sigName(info) + } else if (isDerivedValueClass(sym)) sigName(eraseDerivedValueClassRef(tp)) else normalizeClass(sym.asClass).fullName.asTypeName case defn.ArrayType(elem) => diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index b61d39749a4b..eb46ef25df89 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -21,7 +21,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. * class C { type T; def f(x: T): T } * * and an expression `e` of type `C`. Then computing the type of `e.f` leads - * to the query asSeenFrom(`C`, `(x: T)T`). What should it's result be? The + * to the query asSeenFrom(`C`, `(x: T)T`). What should its result be? The * naive answer `(x: C.T)C.T` is incorrect given that we treat `C.T` as the existential * `exists(c: C)c.T`. What we need to do instead is to skolemize the existential. So * the answer would be `(x: c.T)c.T` for some (unknown) value `c` of type `C`. diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 4d6a7e22b719..ab6eb03ad776 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -826,7 +826,7 @@ object Types { * * P { type T = String, type R = P{...}.T } # R --> String * - * (2) The refinement is a fully instantiated type lambda, and the projected name is "Apply". + * (2) The refinement is a fully instantiated type lambda, and the projected name is "$apply". * In this case the rhs of the apply is returned with all references to lambda argument types * substituted by their definitions. * @@ -862,7 +862,7 @@ object Types { else if (!pre.refinementRefersToThis) alias else alias match { case TypeRef(RefinedThis(`pre`), aliasName) => lookupRefined(aliasName) // (1) - case _ => if (name == tpnme.Apply) betaReduce(alias) else NoType // (2) + case _ => if (name == tpnme.hkApply) betaReduce(alias) else NoType // (2) } case _ => loop(pre.parent, resolved) } @@ -1485,7 +1485,7 @@ object Types { /** Create a NamedType of the same kind as this type, but with a new prefix. */ - protected def newLikeThis(prefix: Type)(implicit ctx: Context): NamedType = + def newLikeThis(prefix: Type)(implicit ctx: Context): NamedType = NamedType(prefix, name) /** Create a NamedType of the same kind as this type, but with a "inherited name". @@ -1808,7 +1808,16 @@ object Types { } case class LazyRef(refFn: () => Type) extends UncachedProxyType with ValueType { - lazy val ref = refFn() + private var myRef: Type = null + private var computed = false + lazy val ref = { + if (computed) assert(myRef != null) + else { + computed = true + myRef = refFn() + } + myRef + } override def underlying(implicit ctx: Context) = ref override def toString = s"LazyRef($ref)" override def equals(other: Any) = other match { @@ -1849,30 +1858,16 @@ object Types { case refinedInfo: TypeBounds if refinedInfo.variance != 0 && refinedName.isLambdaArgName => val cls = parent.LambdaClass(forcing = false) if (cls.exists) - assert(refinedInfo.variance == cls.typeParams.apply(refinedName.lambdaArgIndex).variance, - s"variance mismatch for $this, $cls, ${cls.typeParams}, ${cls.typeParams.apply(refinedName.lambdaArgIndex).variance}, ${refinedInfo.variance}") + assert(refinedInfo.variance == cls.typeParams.apply(refinedName.LambdaArgIndex).variance, + s"variance mismatch for $this, $cls, ${cls.typeParams}, ${cls.typeParams.apply(refinedName.LambdaArgIndex).variance}, ${refinedInfo.variance}") case _ => } this } - /** Derived refined type, with a twist: A refinement with a higher-kinded type param placeholder - * is transformed to a refinement of the original type parameter if that one exists. - */ - def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): RefinedType = { - lazy val underlyingTypeParams = parent.rawTypeParams - - if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) - this - else if ( refinedName.isLambdaArgName - //&& { println(s"deriving $refinedName $parent $underlyingTypeParams"); true } - && refinedName.lambdaArgIndex < underlyingTypeParams.length - && !parent.isLambda) - derivedRefinedType(parent.EtaExpand, refinedName, refinedInfo) - else - if (false) RefinedType(parent, refinedName, refinedInfo) - else RefinedType(parent, refinedName, rt => refinedInfo.substRefinedThis(this, RefinedThis(rt))) - } + def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): RefinedType = + if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this + else RefinedType(parent, refinedName, rt => refinedInfo.substRefinedThis(this, RefinedThis(rt))) /** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */ def wrapIfMember(parent: Type)(implicit ctx: Context): Type = diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 8847d2de3c40..58697c196b36 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -176,7 +176,7 @@ class TreePickler(pickler: TastyPickler) { pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix) } case tpe: NamedType => - if (tpe.name == tpnme.Apply && tpe.prefix.argInfos.nonEmpty && tpe.prefix.isInstantiatedLambda) + if (tpe.name == tpnme.hkApply && tpe.prefix.argInfos.nonEmpty && tpe.prefix.isInstantiatedLambda) // instantiated lambdas are pickled as APPLIEDTYPE; #Apply will // be reconstituted when unpickling. pickleType(tpe.prefix) @@ -200,7 +200,9 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)} case tpe: RefinedThis => writeByte(REFINEDthis) - writeRef(pickledTypes.get(tpe.binder).asInstanceOf[Addr]) + val binderAddr = pickledTypes.get(tpe.binder) + assert(binderAddr != null, tpe.binder) + writeRef(binderAddr.asInstanceOf[Addr]) case tpe: SkolemType => pickleType(tpe.info) case tpe: RefinedType => diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 53e8478fa66f..74e9b21eac0a 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -49,18 +49,9 @@ object Scala2Unpickler { */ def depoly(tp: Type, denot: SymDenotation)(implicit ctx: Context): Type = tp match { case TempPolyType(tparams, restpe) => - if (denot.isAbstractType) - restpe.LambdaAbstract(tparams) // bounds needed? - else if (denot.isAliasType) { - var err: Option[(String, Position)] = None - val result = restpe.parameterizeWith(tparams) - for ((msg, pos) <- err) - ctx.warning( - sm"""$msg - |originally parsed type : ${tp.show} - |will be approximated by: ${result.show}. - |Proceed at own risk.""") - result + if (denot.isType) { + assert(!denot.isClass) + restpe.LambdaAbstract(tparams, cycleParanoid = true) } else PolyType.fromSymbols(tparams, restpe) @@ -126,9 +117,11 @@ object Scala2Unpickler { val companionClassMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_CLASS_METHOD, claz, module) if (companionClassMethod.exists) companionClassMethod.entered - val companionModuleMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, module, claz) - if (companionModuleMethod.exists) - companionModuleMethod.entered + if (claz.isClass) { + val companionModuleMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, module, claz) + if (companionModuleMethod.exists) + companionModuleMethod.entered + } } if (denot.flagsUNSAFE is Module) { @@ -586,29 +579,48 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas * tp { name: T } */ def elimExistentials(boundSyms: List[Symbol], tp: Type)(implicit ctx: Context): Type = { + // Need to be careful not to run into cyclic references here (observed when + // comiling t247.scala). That's why we avoiud taking `symbol` of a TypeRef + // unless names match up. + val isBound = (tp: Type) => { + def refersTo(tp: Type, sym: Symbol): Boolean = tp match { + case tp @ TypeRef(_, name) => sym.name == name && sym == tp.symbol + case tp: TypeVar => refersTo(tp.underlying, sym) + case tp : LazyRef => refersTo(tp.ref, sym) + case _ => false + } + boundSyms.exists(refersTo(tp, _)) + } + // Cannot use standard `existsPart` method because it calls `lookupRefined` + // which can cause CyclicReference errors. + val isBoundAccumulator = new ExistsAccumulator(isBound) { + override def foldOver(x: Boolean, tp: Type): Boolean = tp match { + case tp: TypeRef => applyToPrefix(x, tp) + case _ => super.foldOver(x, tp) + } + } def removeSingleton(tp: Type): Type = if (tp isRef defn.SingletonClass) defn.AnyType else tp def elim(tp: Type): Type = tp match { case tp @ RefinedType(parent, name) => val parent1 = elim(tp.parent) tp.refinedInfo match { - case TypeAlias(info: TypeRef) if boundSyms contains info.symbol => + case TypeAlias(info: TypeRef) if isBound(info) => RefinedType(parent1, name, info.symbol.info) - case info: TypeRef if boundSyms contains info.symbol => + case info: TypeRef if isBound(info) => val info1 = info.symbol.info assert(info1.derivesFrom(defn.SingletonClass)) RefinedType(parent1, name, info1.mapReduceAnd(removeSingleton)(_ & _)) case info => tp.derivedRefinedType(parent1, name, info) } - case tp @ TypeRef(pre, tpnme.Apply) if pre.isLambda => + case tp @ TypeRef(pre, tpnme.hkApply) => elim(pre) case _ => tp } val tp1 = elim(tp) - val isBound = (tp: Type) => boundSyms contains tp.typeSymbol - if (tp1 existsPart isBound) { + if (isBoundAccumulator(false, tp1)) { val anyTypes = boundSyms map (_ => defn.AnyType) val boundBounds = boundSyms map (_.info.bounds.hi) val tp2 = tp1.subst(boundSyms, boundBounds).subst(boundSyms, anyTypes) @@ -676,6 +688,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas else TypeRef(pre, sym.name.asTypeName) val args = until(end, readTypeRef) if (sym == defn.ByNameParamClass2x) ExprType(args.head) + else if (args.isEmpty && sym.typeParams.nonEmpty) tycon.EtaExpand else tycon.appliedTo(args) case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef()) diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 70fab7e0fc5a..a46665ec02c8 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -122,7 +122,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close } if (tp.isSafeLambda) { - val (prefix, body, bindings) = extractApply(tp) + val (prefix, body, bindings) = decomposeHKApply(tp) prefix match { case prefix: TypeRef if prefix.symbol.isLambdaTrait && body.exists => return typeLambdaText(prefix.symbol, body, bindings) @@ -184,9 +184,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { * without a prefix, because the latter print nicer. * */ - def extractApply(tp: Type): (Type, Type, List[(Name, Type)]) = tp.stripTypeVar match { + def decomposeHKApply(tp: Type): (Type, Type, List[(Name, Type)]) = tp.stripTypeVar match { case tp @ RefinedType(parent, name) => - if (name == tpnme.Apply) { + if (name == tpnme.hkApply) { // simplify arguments so that parameters just print HK$i and not // LambdaI{...}.HK$i val simplifyArgs = new TypeMap { @@ -199,7 +199,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } (parent, simplifyArgs(tp.refinedInfo.followTypeAlias), Nil) } else if (name.isLambdaArgName) { - val (prefix, body, argBindings) = extractApply(parent) + val (prefix, body, argBindings) = decomposeHKApply(parent) (prefix, body, (name, tp.refinedInfo) :: argBindings) } else (tp, NoType, Nil) case _ => diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index c45db4ccc827..b2d2b9bd31ee 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -604,10 +604,6 @@ trait Applications extends Compatibility { self: Typer => val typedFn = typedExpr(tree.fun, PolyProto(typedArgs.tpes, pt)) typedFn.tpe.widen match { case pt: PolyType => - def adaptTypeArg(tree: tpd.Tree, bound: Type): tpd.Tree = - if (bound.isLambda && !tree.tpe.isLambda && tree.tpe.typeParams.nonEmpty) - tree.withType(tree.tpe.EtaExpand) - else tree if (typedArgs.length <= pt.paramBounds.length) typedArgs = typedArgs.zipWithConserve(pt.paramBounds)(adaptTypeArg) checkBounds(typedArgs, pt) @@ -616,6 +612,9 @@ trait Applications extends Compatibility { self: Typer => assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs) } + def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = + tree.withType(tree.tpe.EtaExpandIfHK(bound)) + /** Rewrite `new Array[T](....)` trees to calls of newXYZArray methods. */ def convertNewArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match { case Apply(TypeApply(tycon, targ :: Nil), args) if tycon.symbol == defn.ArrayConstructor => diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index cdce11f224b9..506b40651a30 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -768,11 +768,11 @@ class Namer { typer: Typer => completeParams(tdef.tparams) val tparamSyms = tdef.tparams map symbolOfTree val isDerived = tdef.rhs.isInstanceOf[untpd.DerivedTypeTree] - val toParameterize = tparamSyms.nonEmpty && !isDerived - val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived + //val toParameterize = tparamSyms.nonEmpty && !isDerived + //val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived def abstracted(tp: Type): Type = - if (needsLambda) tp.LambdaAbstract(tparamSyms) - else if (toParameterize) tp.parameterizeWith(tparamSyms) + if (tparamSyms.nonEmpty && !isDerived) tp.LambdaAbstract(tparamSyms) + //else if (toParameterize) tp.parameterizeWith(tparamSyms) else tp sym.info = abstracted(TypeBounds.empty) // Temporarily set info of defined type T to ` >: Nothing <: Any. @@ -785,12 +785,38 @@ class Namer { typer: Typer => // // The scheme critically relies on an implementation detail of isRef, which // inspects a TypeRef's info, instead of simply dealiasing alias types. - val rhsType = typedAheadType(tdef.rhs).tpe + val rhsType = abstracted(typedAheadType(tdef.rhs).tpe) val unsafeInfo = rhsType match { - case _: TypeBounds => abstracted(rhsType).asInstanceOf[TypeBounds] - case _ => TypeAlias(abstracted(rhsType), if (sym is Local) sym.variance else 0) + case bounds: TypeBounds => bounds + case alias => TypeAlias(alias, if (sym is Local) sym.variance else 0) } sym.info = NoCompleter - checkNonCyclic(sym, unsafeInfo, reportErrors = true) + sym.info = checkNonCyclic(sym, unsafeInfo, reportErrors = true) + etaExpandArgs.apply(sym.info) + } + + /** Eta expand all class types C appearing as arguments to a higher-kinded + * type parameter to type lambdas, e.g. [HK0] => C[HK0]. This is necessary + * because in `typedAppliedTypeTree` we might ahve missed some eta expansions + * of arguments in F-bounds, because the recursive type was initialized with + * TypeBounds.empty. + */ + def etaExpandArgs(implicit ctx: Context) = new TypeMap { + def apply(tp: Type): Type = { + tp match { + case tp: RefinedType => + val args = tp.argInfos(interpolate = false).mapconserve(this) + if (args.nonEmpty) { + val tycon = tp.withoutArgs(args) + val tparams = tycon.typeParams + if (args.length == tparams.length) { // if lengths differ, problem is caught in typedTypeApply + val args1 = args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfHK(tparam.info)) + if (args1 ne args) return this(tycon).appliedTo(args1) + } + } + case _ => + } + mapOver(tp) + } } } diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 33ec156a1d72..868ef4e16303 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -822,14 +822,54 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit res } - def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): AppliedTypeTree = track("typedAppliedTypeTree") { + def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): Tree = track("typedAppliedTypeTree") { val tpt1 = typed(tree.tpt) - val argPts = - if (ctx.mode is Mode.Pattern) tpt1.tpe.typeParams.map(_.info) - else tree.args.map(_ => WildcardType) - val args1 = tree.args.zipWithConserve(argPts)(typed(_, _)).asInstanceOf[List[Tree]] - // check that arguments conform to bounds is done in phase FirstTransform - assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) + val tparams = tpt1.tpe.typeParams + var args = tree.args + if (tparams.isEmpty) { + ctx.error(d"${tpt1.tpe} does not take type parameters", tree.pos) + tpt1 + } + else { + if (args.length != tparams.length) { + ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos) + args = args.take(tparams.length) + } + def typedArg(arg: untpd.Tree, tparam: Symbol) = { + val arg1 = typed(arg, if (ctx.mode is Mode.Pattern) tparam.info else WildcardType) + adaptTypeArg(arg1, if (tparam.isCompleted) tparam.info else WildcardType) + } + val args1 = args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]] + // check that arguments conform to bounds is done in phase PostTyper + val tree1 = assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) + if (tree1.tpe.isHKApply) + for (arg @ TypeBoundsTree(_, _) <- args1) + ctx.error("illegal wildcard type argument; does not correspond to type parameter of a class", arg.pos) + // The reason for outlawing such arguments is illustrated by the following example. + // Say we have + // + // type RMap[A, B] = Map[B, A] + // + // Then + // + // Rmap[_, Int] + // + // translates to + // + // Lambda$I { type hk$0; type hk$1 = Int; type $apply = Map[$hk1, $hk0] } # $apply + // + // Let's call the last type T. You would expect that + // + // Map[Int, String] <: RMap[_, Int] + // + // But that's not the case given the standard subtyping rules. In fact, the rhs reduces to + // + // Map[Int, T # $hk0] + // + // That means the second argument to `Map` is unknown and String is certainly not a subtype of it. + // To avoid the surprise we outlaw problematic wildcard arguments from the start. + tree1 + } } def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(implicit ctx: Context): ByNameTypeTree = track("typedByNameTypeTree") { diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index fbf26044cf59..8150be6b1a99 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -51,6 +51,8 @@ class tests extends CompilerTest { //@Test def pickle_core = compileDir(dotcDir, "core", testPickling, xerrors = 2) // two spurious comparison errors in Types and TypeOps + @Test def pos_arraycopy = + compileFile(runDir, "arraycopy", List("-Ylog-classpath")) @Test def pos_t2168_pat = compileFile(posDir, "t2168", twice) @Test def pos_erasure = compileFile(posDir, "erasure", twice) @Test def pos_Coder() = compileFile(posDir, "Coder", twice) @@ -145,6 +147,7 @@ class tests extends CompilerTest { @Test def neg_traitParamsMixin = compileFile(negDir, "traitParamsMixin", xerrors = 2) @Test def neg_firstError = compileFile(negDir, "firstError", xerrors = 3) @Test def neg_implicitLowerBound = compileFile(negDir, "implicit-lower-bound", xerrors = 1) + @Test def neg_partialApplications = compileFile(negDir, "partialApplications", xerrors = 8) @Test def run_all = runFiles(runDir) diff --git a/tests/neg/partialApplications.scala b/tests/neg/partialApplications.scala new file mode 100644 index 000000000000..67f6cf059ad0 --- /dev/null +++ b/tests/neg/partialApplications.scala @@ -0,0 +1,11 @@ +object Test { + + type RMap[X, Y] = Map[Y, X] + val m = Map[Int, String]() + val ts: RMap[_, Int] = m // erorr // error + val us: RMap[String, _] = m // error // error + val vs: RMap[_, _] = m // error // error // error + val zz: RMap = m // error + +} + diff --git a/tests/pos/IterableSelfRec.scala b/tests/pending/pos/IterableSelfRec.scala similarity index 84% rename from tests/pos/IterableSelfRec.scala rename to tests/pending/pos/IterableSelfRec.scala index bba7a82d252a..a97833991785 100644 --- a/tests/pos/IterableSelfRec.scala +++ b/tests/pending/pos/IterableSelfRec.scala @@ -22,9 +22,9 @@ trait Seq[T] extends Iterable[T] { self => def apply(x: Int): T } -abstract class CollectionCompanion[+CC <: Collection { type This <: CC }] +abstract class CollectionCompanion[+CC[X] <: Collection[X] { type This <: CC }] -abstract class IterableCompanion[+CC <: Iterable { type This <: CC }] extends CollectionCompanion[CC] { +abstract class IterableCompanion[+CC[X] <: Iterable[X] { type This <: CC }] extends CollectionCompanion[CC] { def fromIterator[T](it: Iterator[T]): CC[T] def map[T, U](xs: Iterable[T], f: T => U): CC[U] = fromIterator(xs.iterator.map(f)) @@ -36,7 +36,7 @@ abstract class IterableCompanion[+CC <: Iterable { type This <: CC }] extends Co implicit def transformOps[T](xs: CC[T] @uncheckedVariance): TransformOps[CC, T] = ??? // new TransformOps[CC, T](xs) } -class TransformOps[+CC <: Iterable { type This <: CC }, T] (val xs: CC[T]) extends AnyVal { +class TransformOps[+CC[X] <: Iterable[X] { type This <: CC }, T] (val xs: CC[T]) extends AnyVal { def companion[T](xs: CC[T] @uncheckedVariance): IterableCompanion[CC] = xs.companion def map[U](f: T => U): CC[U] = companion(xs).map(xs, f) def filter(p: T => Boolean): CC[T] = companion(xs).filter(xs, p) diff --git a/tests/pos/partialApplications.scala b/tests/pos/partialApplications.scala index b68c4b945152..285dc86613cc 100644 --- a/tests/pos/partialApplications.scala +++ b/tests/pos/partialApplications.scala @@ -1,13 +1,46 @@ object Test { - type Histogram = Map[_, Int] + type Histogram[X] = Map[X, Int] - type StringlyHistogram = Histogram[_ >: String] + type StringlyHistogram[X >: String] = Histogram[X] val xs: Histogram[String] = Map[String, Int]() val ys: StringlyHistogram[String] = xs - val zs: StringlyHistogram = xs + def e = xs + + val zs: StringlyHistogram[_] = e + + type IntMap[Y] = Map[Int, Y] + + val is = Map[Int, Boolean]() + + val js: IntMap[Boolean] = is + + val ks: IntMap[_] = is + + type RMap[X, Y] = Map[Y, X] + + val rs = Map[Int, Float]() + + val ss: RMap[Float, Int] = rs + +} + +object Test2 { + type Histogram = Map[_, Int] + + type StringlyHistogram = Histogram[_ >: String] // error + + val xs: Histogram[String] = Map[String, Int]() // error + + val ys: StringlyHistogram[String] = xs // error + + val zs: StringlyHistogram = xs // error + + val xs1 = xs + val ys1 = ys + val zs1 = zs }