diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index 97893647c7fb..d66d1ecdb898 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -71,6 +71,11 @@ object Config { /** If this flag is set, take the fast path when comparing same-named type-aliases and types */ final val fastPathForRefinedSubtype = true + /** If this flag is set, $apply projections are checked that they apply to a + * higher-kinded type. + */ + final val checkProjections = false + /** When set, use new signature-based matching. * Advantage of doing so: It's supposed to be faster * Disadvantage: It might hide inconsistencies, so while debugging it's better to turn it off diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala index fcd9ef224f10..a503a2d23cb0 100644 --- a/src/dotty/tools/dotc/core/Definitions.scala +++ b/src/dotty/tools/dotc/core/Definitions.scala @@ -494,7 +494,7 @@ class Definitions { /** The set of HigherKindedXYZ traits encountered so far */ def lambdaTraits: Set[Symbol] = myLambdaTraits - private var lambdaTraitForVariances = mutable.Map[List[Int], ClassSymbol]() + private var LambdaTraitForVariances = mutable.Map[List[Int], ClassSymbol]() /** The HigherKinded trait corresponding to symbols `boundSyms` (which are assumed * to be the type parameters of a higher-kided type). This is a class symbol that @@ -513,7 +513,7 @@ class Definitions { * - for each positive or negative variance v_i there is a parent trait Pj which * is the same as LambdaXYZ except that it has `I` in i-th position. */ - def lambdaTrait(vcs: List[Int]): ClassSymbol = { + def LambdaTrait(vcs: List[Int]): ClassSymbol = { assert(vcs.nonEmpty) def varianceFlags(v: Int) = v match { @@ -527,17 +527,17 @@ class Definitions { val cls = denot.asClass.classSymbol val paramDecls = newScope for (i <- 0 until vcs.length) - newTypeParam(cls, tpnme.lambdaArgName(i), varianceFlags(vcs(i)), paramDecls) - newTypeField(cls, tpnme.Apply, Covariant, paramDecls) + newTypeParam(cls, tpnme.LambdaArgName(i), varianceFlags(vcs(i)), paramDecls) + newTypeField(cls, tpnme.hkApply, Covariant, paramDecls) val parentTraitRefs = for (i <- 0 until vcs.length if vcs(i) != 0) - yield lambdaTrait(vcs.updated(i, 0)).typeRef + yield LambdaTrait(vcs.updated(i, 0)).typeRef denot.info = ClassInfo( ScalaPackageClass.thisType, cls, ObjectClass.typeRef :: parentTraitRefs.toList, paramDecls) } } - val traitName = tpnme.lambdaTraitName(vcs) + val traitName = tpnme.LambdaTraitName(vcs) def createTrait = { val cls = newClassSymbol( @@ -549,7 +549,7 @@ class Definitions { cls } - lambdaTraitForVariances.getOrElseUpdate(vcs, createTrait) + LambdaTraitForVariances.getOrElseUpdate(vcs, createTrait) } // ----- primitive value class machinery ------------------------------------------ diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala index bf5b68ce95fb..1a26463473ac 100644 --- a/src/dotty/tools/dotc/core/NameOps.scala +++ b/src/dotty/tools/dotc/core/NameOps.scala @@ -110,7 +110,7 @@ object NameOps { /** The index of the higher-kinded type parameter with this name. * Pre: isLambdaArgName. */ - def lambdaArgIndex: Int = + def LambdaArgIndex: Int = name.drop(tpnme.LAMBDA_ARG_PREFIX.length).toString.toInt /** If the name ends with $nn where nn are diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala index 52318a386bd1..7c73d95da242 100644 --- a/src/dotty/tools/dotc/core/StdNames.scala +++ b/src/dotty/tools/dotc/core/StdNames.scala @@ -173,7 +173,7 @@ object StdNames { final val WILDCARD_STAR: N = "_*" final val REIFY_TREECREATOR_PREFIX: N = "$treecreator" final val REIFY_TYPECREATOR_PREFIX: N = "$typecreator" - final val LAMBDA_ARG_PREFIX: N = "HK$" + final val LAMBDA_ARG_PREFIX: N = "hk$" final val LAMBDA_ARG_PREFIXhead: Char = LAMBDA_ARG_PREFIX.head final val Any: N = "Any" @@ -311,7 +311,7 @@ object StdNames { val AnnotatedType: N = "AnnotatedType" val AppliedTypeTree: N = "AppliedTypeTree" - val Apply: N = "Apply" + val hkApply: N = "$apply" val ArrayAnnotArg: N = "ArrayAnnotArg" val Constant: N = "Constant" val ConstantType: N = "ConstantType" @@ -739,8 +739,8 @@ object StdNames { def syntheticTypeParamNames(num: Int): List[TypeName] = (0 until num).map(syntheticTypeParamName)(breakOut) - def lambdaTraitName(vcs: List[Int]): TypeName = LambdaPrefix ++ vcs.map(varianceSuffix).mkString - def lambdaArgName(n: Int) = LAMBDA_ARG_PREFIX ++ n.toString + def LambdaTraitName(vcs: List[Int]): TypeName = LambdaPrefix ++ vcs.map(varianceSuffix).mkString + def LambdaArgName(n: Int) = LAMBDA_ARG_PREFIX ++ n.toString final val Conforms = encode("<:<") diff --git a/src/dotty/tools/dotc/core/Substituters.scala b/src/dotty/tools/dotc/core/Substituters.scala index e4bbf2305805..0083ac626fae 100644 --- a/src/dotty/tools/dotc/core/Substituters.scala +++ b/src/dotty/tools/dotc/core/Substituters.scala @@ -277,4 +277,31 @@ trait Substituters { this: Context => final class SubstParamsMap(from: BindingType, to: List[Type]) extends DeepTypeMap { def apply(tp: Type) = substParams(tp, from, to, this) } + + /** A map for "cycle safe substitutions" which do not force the denotation + * of a TypeRef unless the name matches up with one of the substituted symbols. + */ + final class SafeSubstMap(from: List[Symbol], to: List[Type]) extends TypeMap { + def apply(tp: Type): Type = tp match { + case tp: NamedType => + try { + var sym: Symbol = null + var fs = from + var ts = to + while (fs.nonEmpty) { + if (fs.head.name == tp.name) { + if (sym == null) sym = tp.symbol + if (fs.head eq sym) return ts.head + } + fs = fs.tail + ts = ts.tail + } + tp.newLikeThis(apply(tp.prefix)) + } + catch { + case ex: CyclicReference => tp.derivedSelect(apply(tp.prefix)) + } + case _ => mapOver(tp) + } + } } diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 617e4eb71222..927c4fcc5522 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -15,6 +15,7 @@ import typer.Mode import util.Positions.Position import config.Printers._ import collection.mutable +import java.util.NoSuchElementException object TypeApplications { @@ -45,7 +46,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** The type parameters of this type are: * For a ClassInfo type, the type parameters of its class. * For a typeref referring to a class, the type parameters of the class. - * For a typeref referring to an alias or abstract type, the type parameters of + * For a typeref referring to a Lambda class, the type parameters of * its right hand side or upper bound. * For a refinement type, the type parameters of its parent, unless the refinement * re-binds the type parameter with a type-alias. @@ -59,7 +60,11 @@ class TypeApplications(val self: Type) extends AnyVal { case tp: TypeRef => val tsym = tp.typeSymbol if (tsym.isClass) tsym.typeParams - else tp.underlying.typeParams + else if (tsym.isAliasType) tp.underlying.typeParams + else { + val lam = LambdaClass(forcing = false) + if (lam.exists) lam.typeParams else Nil + } case tp: RefinedType => val tparams = tp.parent.typeParams tp.refinedInfo match { @@ -81,7 +86,7 @@ class TypeApplications(val self: Type) extends AnyVal { * do not remove corresponding type parameters. * Second, it will return Nil for BoundTypes because we might get a NullPointer exception * on PolyParam#underlying otherwise (demonstrated by showClass test). - * Third, it won't return higher-kinded type parameters, i.e. the type parameters of + * Third, it won't return abstract higher-kinded type parameters, i.e. the type parameters of * an abstract type are always empty. */ final def rawTypeParams(implicit ctx: Context): List[TypeSymbol] = { @@ -132,10 +137,45 @@ class TypeApplications(val self: Type) extends AnyVal { def isSafeLambda(implicit ctx: Context): Boolean = LambdaClass(forcing = false).exists - /** Is type `tp` a Lambda with all Arg$ fields fully instantiated? */ + /** Is type `tp` a Lambda with all hk$i fields fully instantiated? */ def isInstantiatedLambda(implicit ctx: Context): Boolean = isSafeLambda && typeParams.isEmpty + /** Is receiver type higher-kinded (i.e. of kind != "*")? */ + def isHK(implicit ctx: Context): Boolean = self.dealias match { + case self: TypeRef => self.info.isHK + case RefinedType(_, name) => name == tpnme.hkApply || name.isLambdaArgName + case TypeBounds(_, hi) => hi.isHK + case _ => false + } + + /** is receiver of the form T#$apply? */ + def isHKApply: Boolean = self match { + case TypeRef(_, name) => name == tpnme.hkApply + case _ => false + } + + /** True if it can be determined without forcing that the class symbol + * of this application exists and is not a lambda trait. + * Equivalent to + * + * self.classSymbol.exists && !self.classSymbol.isLambdaTrait + * + * but without forcing anything. + */ + def noHK(implicit ctx: Context): Boolean = self.stripTypeVar match { + case self: RefinedType => + self.parent.noHK + case self: TypeRef => + (self.denot.exists) && { + val sym = self.symbol + if (sym.isClass) !sym.isLambdaTrait + else sym.isCompleted && self.info.isAlias && self.info.bounds.hi.noHK + } + case _ => + false + } + /** Encode the type resulting from applying this type to given arguments */ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { def matchParams(tp: Type, tparams: List[TypeSymbol], args: List[Type]): Type = args match { @@ -145,24 +185,7 @@ class TypeApplications(val self: Type) extends AnyVal { println(s"precomplete decls = ${self.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}") } val tparam = tparams.head - def needsEtaExpand = - try { - (tparam is HigherKinded) && !arg.isLambda && arg.typeParams.nonEmpty - } - catch { - case ex: CyclicReference => - if (ctx.mode.is(Mode.Scala2Unpickling)) - // When unpickling Scala2, we might run into cyclic references when - // checking whether eta expansion is needed or eta expanding. - // (e.g. try compile collection/generic/GenericTraversableTemplate.scala). - // In that case, back out gracefully. Ideally, we should not have - // underdefined pickling data that requires post-transformations like - // eta expansion, but we can't change Scala2's. - false - else throw ex - } - val arg1 = if (needsEtaExpand) arg.EtaExpand else arg - val tp1 = RefinedType(tp, tparam.name, arg1.toBounds(tparam)) + val tp1 = RefinedType(tp, tparam.name, arg.toBounds(tparam)) matchParams(tp1, tparams.tail, args1) case nil => tp } @@ -182,12 +205,14 @@ class TypeApplications(val self: Type) extends AnyVal { if (tsym.isClass || !tp.typeSymbol.isCompleting) original.typeParams else { ctx.warning(i"encountered F-bounded higher-kinded type parameters for $tsym; assuming they are invariant") - defn.lambdaTrait(args map alwaysZero).typeParams + defn.LambdaTrait(args map alwaysZero).typeParams // @@@ can we force? } matchParams(tp, safeTypeParams, args) } case tp: RefinedType => - tp.derivedRefinedType( + val redux = tp.EtaReduce + if (redux.exists) redux.appliedTo(args) // Rewrite ([hk$0] => C[hk$0])(T) to C[T] + else tp.derivedRefinedType( instantiate(tp.parent, original), tp.refinedName, tp.refinedInfo) @@ -199,28 +224,31 @@ class TypeApplications(val self: Type) extends AnyVal { tp } + /** Same as isHK, except we classify all abstract types as HK, + * (they must be, because the are applied). This avoids some forcing and + * CyclicReference errors of the standard isHK. + */ + def isKnownHK(tp: Type): Boolean = tp match { + case tp: TypeRef => + val sym = tp.symbol + if (sym.isClass) sym.isLambdaTrait + else !sym.isAliasType || isKnownHK(tp.info) + case tp: TypeProxy => isKnownHK(tp.underlying) + case _ => false + } + if (args.isEmpty || ctx.erasedTypes) self else { val res = instantiate(self, self) - if (res.isInstantiatedLambda) - // Note: isInstantiatedLambda needs to be conservative, using isSafeLambda - // in order to avoid cyclic reference errors. But this means that some fully - // instantiated types will remain unprojected, which essentially means - // that they stay as higher-kinded types. checkNonCyclic checks the type again - // and potentially inserts an #Apply then. Hopefully, this catches all types - // that fall through the hole. Not adding an #Apply typically manifests itself - // with a <:< failure of two types that "look the same". An example is #779, - // where compiling scala.immutable.Map gives a bounds violation. - res.select(tpnme.Apply) - else res + if (isKnownHK(res)) TypeRef(res, tpnme.hkApply) else res } } /** Simplify a fully instantiated type of the form `LambdaX{... type Apply = T } # Apply` to `T`. */ def simplifyApply(implicit ctx: Context): Type = self match { - case self @ TypeRef(prefix, tpnme.Apply) if prefix.isInstantiatedLambda => - prefix.member(tpnme.Apply).info match { + case self @ TypeRef(prefix, tpnme.hkApply) if prefix.isInstantiatedLambda => + prefix.member(tpnme.hkApply).info match { case TypeAlias(alias) => alias case _ => self } @@ -416,9 +444,10 @@ class TypeApplications(val self: Type) extends AnyVal { case RefinedThis(tp) => tp eq target case tp: NamedType => - tp.info match { + if (tp.symbol.isClass) !tp.symbol.isStatic && recur(tp.prefix) + else tp.info match { case TypeAlias(alias) => recur(alias) - case _ => !tp.symbol.isStatic && recur(tp.prefix) + case _ => recur(tp.prefix) } case tp: RefinedType => recur(tp.refinedInfo) || recur(tp.parent) @@ -434,91 +463,48 @@ class TypeApplications(val self: Type) extends AnyVal { recur(self) } - /** Given a type alias - * - * type T[boundSyms] = p.C[targs] - * - * produce its equivalent right hand side RHS that makes no reference to the bound - * symbols on the left hand side. I.e. the type alias can be replaced by - * - * type T = RHS - * - * There are two strategies how this is achieved. - - * 1st strategy: Applies if `C` is a class such that every bound symbol in `boundSyms` - * appears as an argument in `targs`, and in the same order. Then the rewriting replaces - * bound symbols by references to the parameters of class C. Example: - * - * Say we have: - * - * class Triple[type T1, type T2, type T3] - * type A[X] = Triple[(X, X), X, String] - * - * Then this is rewritable, as `X` appears as second type argument to `Triple`. - * Occurrences of `X` are rewritten to `this.T2` and the whole definition becomes: - * - * type A = Triple { type T1 = (this.T2, this.T2); type T3 = String } - * - * 2nd strategy: Used as a fallback if 1st strategy does not apply. It rewrites - * the RHS to a typed lambda abstraction. - */ - def parameterizeWith(boundSyms: List[Symbol])(implicit ctx: Context): Type = { - def matchParams(bsyms: List[Symbol], tparams: List[Symbol], targs: List[Type], - correspondingParamName: Map[Symbol, TypeName]): Type = { - if (bsyms.isEmpty) { - val correspondingNames = correspondingParamName.values.toSet - - def replacements(rt: RefinedType): List[Type] = - for (sym <- boundSyms) - yield TypeRef(RefinedThis(rt), correspondingParamName(sym)) - - def rewrite(tp: Type): Type = tp match { - case tp @ RefinedType(parent, name: TypeName) => - if (correspondingNames contains name) rewrite(parent) - else RefinedType( - rewrite(parent), name, - rt => tp.refinedInfo.subst(boundSyms, replacements(rt))) - case tp => - tp - } - - rewrite(self) - } - else if (tparams.isEmpty || targs.isEmpty) - LambdaAbstract(boundSyms) - else if (bsyms.head == targs.head.typeSymbol) - matchParams(bsyms.tail, tparams.tail, targs.tail, - correspondingParamName + (bsyms.head -> tparams.head.name.asTypeName)) - else - matchParams(bsyms, tparams.tail, targs.tail, correspondingParamName) - } - val cls = self.typeSymbol - if (cls.isClass) matchParams(boundSyms, cls.typeParams, argInfos, Map()) - else LambdaAbstract(boundSyms) - } - /** The typed lambda abstraction of this type `T` relative to `boundSyms`. * This is: * - * LambdaXYZ{ type Apply = subst(T) } + * LambdaXYZ{ bounds }{ type Apply = toHK(T) } * - * where XYZ reflets that variances of the bound symbols and - * `subst` is a substitution that replaces every bound symbol sym_i by - * `this.Arg$i`. + * where + * - XYZ reflects the variances of the bound symbols, + * - `bounds` consists of type declarations `type hk$i >: toHK(L) <: toHK(U), + * one for each type parameter in `T` with non-trivial bounds L,U. + * - `toHK` is a substitution that replaces every bound symbol sym_i by + * `this.hk$i`. * * TypeBounds are lambda abstracting by lambda abstracting their upper bound. + * + * @param cycleParanoid If `true` don't force denotation of a TypeRef unless + * its name matches one of `boundSyms`. Needed to avoid cycles + * involving F-boundes hk-types when reading Scala2 collection classes + * with new hk-scheme. */ - def LambdaAbstract(boundSyms: List[Symbol])(implicit ctx: Context): Type = { - def expand(tp: Type) = { - val lambda = defn.lambdaTrait(boundSyms.map(_.variance)) - val substitutedRHS = (rt: RefinedType) => { + def LambdaAbstract(boundSyms: List[Symbol], cycleParanoid: Boolean = false)(implicit ctx: Context): Type = { + def expand(tp: Type): Type = { + val lambda = defn.LambdaTrait(boundSyms.map(_.variance)) + def toHK(tp: Type) = (rt: RefinedType) => { val argRefs = boundSyms.indices.toList.map(i => - RefinedThis(rt).select(tpnme.lambdaArgName(i))) - tp.subst(boundSyms, argRefs).bounds.withVariance(1) + RefinedThis(rt).select(tpnme.LambdaArgName(i))) + val substituted = + if (cycleParanoid) new ctx.SafeSubstMap(boundSyms, argRefs).apply(tp) + else tp.subst(boundSyms, argRefs) + substituted.bounds.withVariance(1) } - val res = RefinedType(lambda.typeRef, tpnme.Apply, substitutedRHS) - //println(i"lambda abstract $self wrt $boundSyms%, % --> $res") - res + val boundNames = new mutable.ListBuffer[Name] + val boundss = new mutable.ListBuffer[TypeBounds] + for (sym <- boundSyms) { + val bounds = sym.info.bounds + if (!(TypeBounds.empty <:< bounds)) { + boundNames += sym.name + boundss += bounds + } + } + val lambdaWithBounds = + RefinedType.make(lambda.typeRef, boundNames.toList, boundss.toList.map(toHK)) + RefinedType(lambdaWithBounds, tpnme.hkApply, toHK(tp)) } self match { case self @ TypeBounds(lo, hi) => @@ -530,21 +516,109 @@ class TypeApplications(val self: Type) extends AnyVal { /** Convert a type constructor `TC` with type parameters `T1, ..., Tn` to * - * LambdaXYZ { Apply = TC[$hkArg$0, ..., $hkArg$n] } + * LambdaXYZ { Apply = TC[hk$0, ..., hk$n] } * * where XYZ is a corresponds to the variances of the type parameters. */ def EtaExpand(implicit ctx: Context): Type = { val tparams = typeParams self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparams) + //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") + } + + /** Eta expand if `bound` is a higher-kinded type */ + def EtaExpandIfHK(bound: Type)(implicit ctx: Context): Type = + if (bound.isHK && !isHK && self.typeSymbol.isClass && typeParams.nonEmpty) EtaExpand + else self + + /** Eta expand the prefix in front of any refinements. */ + def EtaExpandCore(implicit ctx: Context): Type = self.stripTypeVar match { + case self: RefinedType => + self.derivedRefinedType(self.parent.EtaExpandCore, self.refinedName, self.refinedInfo) + case _ => + self.EtaExpand + } + + /** If `self` is a (potentially partially instantiated) eta expansion of type T, return T, + * otherwise NoType. More precisely if `self` is of the form + * + * T { type $apply = U[T1, ..., Tn] } + * + * where + * + * - hk$0, ..., hk${m-1} are the type parameters of T + * - a sublist of the arguments Ti_k (k = 0,...,m_1) are of the form T{...}.this.hk$i_k + * + * rewrite `self` to + * + * U[T'1,...T'j] + * + * where + * + * T'j = _ >: Lj <: Uj if j is in the i_k list defined above + * where Lj and Uj are the bounds of hk$j mapped using `fromHK`. + * = fromHK(Tj) otherwise. + * + * `fromHK` is the function that replaces every occurrence of `.this.hk$i` by the + * corresponding parameter reference in `U[T'1,...T'j]` + */ + def EtaReduce(implicit ctx: Context): Type = { + def etaCore(tp: Type, tparams: List[Symbol]): Type = tparams match { + case Nil => tp + case tparam :: otherParams => + tp match { + case tp: RefinedType => + tp.refinedInfo match { + case TypeAlias(TypeRef(RefinedThis(rt), rname)) + if (rname == tparam.name) && (rt eq self) => + // we have a binding T = Lambda$XYZ{...}.this.hk$i where hk$i names the current `tparam`. + val pcore = etaCore(tp.parent, otherParams) + val hkBounds = self.member(rname).info.bounds + if (TypeBounds.empty <:< hkBounds) pcore + else tp.derivedRefinedType(pcore, tp.refinedName, hkBounds) + case _ => + val pcore = etaCore(tp.parent, tparams) + if (pcore.exists) tp.derivedRefinedType(pcore, tp.refinedName, tp.refinedInfo) + else NoType + } + case _ => + NoType + } + } + // Map references `Lambda$XYZ{...}.this.hk$i to corresponding parameter references of the reduced core. + def fromHK(reduced: Type) = reduced match { + case reduced: RefinedType => + new TypeMap { + def apply(tp: Type): Type = tp match { + case TypeRef(RefinedThis(binder), name) if binder eq self => + assert(name.isLambdaArgName) + RefinedThis(reduced).select(reduced.typeParams.apply(name.LambdaArgIndex)) + case _ => + mapOver(tp) + } + }.apply(reduced) + case _ => + reduced + } + + self match { + case self @ RefinedType(parent, tpnme.hkApply) => + val lc = parent.LambdaClass(forcing = false) + self.refinedInfo match { + case TypeAlias(alias) if lc.exists => + fromHK(etaCore(alias, lc.typeParams.reverse)) + case _ => NoType + } + case _ => NoType + } } - /** Test whether this type has a base type of the form `B[T1, ..., Bn]` where + /** Test whether this type has a base type of the form `B[T1, ..., Tn]` where * the type parameters of `B` match one-by-one the variances of `tparams`, * and where the lambda abstracted type * - * LambdaXYZ { type Apply = B[$hkArg$0, ..., $hkArg$n] } - * { type $hkArg$0 = T1; ...; type $hkArg$n = Tn } + * LambdaXYZ { type Apply = B[hk$0, ..., hk${n-1}] } + * { type hk$0 = T1; ...; type hk${n-1} = Tn } * * satisfies predicate `p`. Try base types in the order of their occurrence in `baseClasses`. * A type parameter matches a variance V if it has V as its variance or if V == 0. @@ -574,8 +648,8 @@ class TypeApplications(val self: Type) extends AnyVal { case nil => false } - if (tparams.isEmpty) false - else if (typeParams.nonEmpty) p(EtaExpand) || classBounds.nonEmpty && tryLift(self.baseClasses) - else classBounds.nonEmpty && tryLift(self.baseClasses) + tparams.nonEmpty && + (typeParams.nonEmpty && p(EtaExpand) || + classBounds.nonEmpty && tryLift(self.baseClasses)) } } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index ea815f6c0778..fce803c56866 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -466,21 +466,19 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } - /** If `projection` is of the form T # Apply where `T` is an instance of a Lambda class, - * and `other` is not a type lambda projection, then convert `other` to a type lambda `U`, and + /** If `projection` is a hk projection T#$apply + * and `other` is not a hk projection, then convert `other` to a hk projection `U`, and * continue with `T <:< U` if `inOrder` is true and `U <:< T` otherwise. */ def compareHK(projection: NamedType, other: Type, inOrder: Boolean) = - projection.name == tpnme.Apply && { - val lambda = projection.prefix.LambdaClass(forcing = true) - lambda.exists && !other.isLambda && - other.testLifted(lambda.typeParams, - if (inOrder) isSubType(projection.prefix, _) else isSubType(_, projection.prefix), - if (inOrder) Nil else classBounds(projection.prefix)) - } + projection.name == tpnme.hkApply && + !other.isHKApply && + other.testLifted(projection.prefix.LambdaClass(forcing = true).typeParams, + if (inOrder) isSubType(projection.prefix, _) else isSubType(_, projection.prefix), + if (inOrder) Nil else classBounds(projection.prefix)) /** The class symbols bounding the type of the `Apply` member of `tp` */ - private def classBounds(tp: Type) = tp.member(tpnme.Apply).info.classSymbols + private def classBounds(tp: Type) = tp.member(tpnme.hkApply).info.classSymbols /** Returns true iff either `tp11 <:< tp21` or `tp12 <:< tp22`, trying at the same time * to keep the constraint as wide as possible. Specifically, if @@ -633,11 +631,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { /** Does `tp` need to be eta lifted to be comparable to `target`? */ private def needsEtaLift(tp: Type, target: RefinedType): Boolean = { - //default.echo(i"needs eta $tp $target?", { + // if (tp.isLambda != tp.isHK) println(i"discrepancy for $tp, isLambda = ${tp.isLambda}, isHK = ${tp.isHK}") val name = target.refinedName - (name.isLambdaArgName || (name eq tpnme.Apply)) && target.isLambda && - tp.exists && !tp.isLambda - //}) + (name.isLambdaArgName || (name eq tpnme.hkApply)) && + tp.exists && !tp.isLambda // we do encounter Lambda classes without any arguments here } /** Narrow gadt.bounds for the type parameter referenced by `tr` to include @@ -932,7 +929,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { tp1.derivedRefinedType( tp1.parent & tp2.parent, tp1.refinedName, - tp1.refinedInfo & tp2.refinedInfo) + tp1.refinedInfo & tp2.refinedInfo.substRefinedThis(tp2, RefinedThis(tp1))) case _ => NoType } @@ -998,7 +995,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { tp1.derivedRefinedType( tp1.parent | tp2.parent, tp1.refinedName, - tp1.refinedInfo | tp2.refinedInfo) + tp1.refinedInfo | tp2.refinedInfo.substRefinedThis(tp2, RefinedThis(tp1))) case _ => NoType } @@ -1235,5 +1232,12 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx) + override def compareHK(projection: NamedType, other: Type, inOrder: Boolean) = + if (projection.name == tpnme.hkApply) + traceIndented(i"compareHK $projection, $other, $inOrder") { + super.compareHK(projection, other, inOrder) + } + else super.compareHK(projection, other, inOrder) + override def toString = "Subtype trace:" + { try b.toString finally b.clear() } } diff --git a/src/dotty/tools/dotc/core/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala index f27b2fd1ea5f..616e222e0b55 100644 --- a/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/src/dotty/tools/dotc/core/TypeErasure.scala @@ -445,7 +445,11 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean case tp: TypeRef => if (!tp.denot.exists) throw new MissingType(tp.prefix, tp.name) val sym = tp.symbol - if (!sym.isClass) sigName(tp.info) + if (!sym.isClass) { + val info = tp.info + if (!info.exists) assert(false, "undefined: $tp with symbol $sym") + sigName(info) + } else if (isDerivedValueClass(sym)) sigName(eraseDerivedValueClassRef(tp)) else normalizeClass(sym.asClass).fullName.asTypeName case defn.ArrayType(elem) => diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 2a9dbd09c4e3..77c6805f01a9 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -21,7 +21,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. * class C { type T; def f(x: T): T } * * and an expression `e` of type `C`. Then computing the type of `e.f` leads - * to the query asSeenFrom(`C`, `(x: T)T`). What should it's result be? The + * to the query asSeenFrom(`C`, `(x: T)T`). What should its result be? The * naive answer `(x: C.T)C.T` is incorrect given that we treat `C.T` as the existential * `exists(c: C)c.T`. What we need to do instead is to skolemize the existential. So * the answer would be `(x: c.T)c.T` for some (unknown) value `c` of type `C`. diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 358720836787..e545066af221 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -833,7 +833,7 @@ object Types { * * P { type T = String, type R = P{...}.T } # R --> String * - * (2) The refinement is a fully instantiated type lambda, and the projected name is "Apply". + * (2) The refinement is a fully instantiated type lambda, and the projected name is "$apply". * In this case the rhs of the apply is returned with all references to lambda argument types * substituted by their definitions. * @@ -869,7 +869,7 @@ object Types { else if (!pre.refinementRefersToThis) alias else alias match { case TypeRef(RefinedThis(`pre`), aliasName) => lookupRefined(aliasName) // (1) - case _ => if (name == tpnme.Apply) betaReduce(alias) else NoType // (2) + case _ => if (name == tpnme.hkApply) betaReduce(alias) else NoType // (2) } case _ => loop(pre.parent, resolved) } @@ -1487,12 +1487,14 @@ object Types { if (prefix eq this.prefix) this else { val res = prefix.lookupRefined(name) - if (res.exists) res else newLikeThis(prefix) + if (res.exists) res + else if (name == tpnme.hkApply && prefix.noHK) derivedSelect(prefix.EtaExpandCore) + else newLikeThis(prefix) } /** Create a NamedType of the same kind as this type, but with a new prefix. */ - protected def newLikeThis(prefix: Type)(implicit ctx: Context): NamedType = + def newLikeThis(prefix: Type)(implicit ctx: Context): NamedType = NamedType(prefix, name) /** Create a NamedType of the same kind as this type, but with a "inherited name". @@ -1725,9 +1727,15 @@ object Types { } object TypeRef { + def checkProjection(prefix: Type, name: TypeName)(implicit ctx: Context) = + if (name == tpnme.hkApply && prefix.noHK) + assert(false, s"bad type : $prefix.$name should not be $$applied") + /** Create type ref with given prefix and name */ - def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef = + def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef = { + if (Config.checkProjections) checkProjection(prefix, name) ctx.uniqueNamedTypes.enterIfNew(prefix, name).asInstanceOf[TypeRef] + } /** Create type ref to given symbol */ def apply(prefix: Type, sym: TypeSymbol)(implicit ctx: Context): TypeRef = @@ -1736,8 +1744,10 @@ object Types { /** Create a non-member type ref (which cannot be reloaded using `member`), * with given prefix, name, and symbol. */ - def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef = + def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef = { + if (Config.checkProjections) checkProjection(prefix, name) unique(new TypeRefWithFixedSym(prefix, name, sym)) + } /** Create a type ref referring to given symbol with given name. * This is very similar to TypeRef(Type, Symbol), @@ -1815,7 +1825,16 @@ object Types { } case class LazyRef(refFn: () => Type) extends UncachedProxyType with ValueType { - lazy val ref = refFn() + private var myRef: Type = null + private var computed = false + lazy val ref = { + if (computed) assert(myRef != null) + else { + computed = true + myRef = refFn() + } + myRef + } override def underlying(implicit ctx: Context) = ref override def toString = s"LazyRef($ref)" override def equals(other: Any) = other match { @@ -1856,30 +1875,16 @@ object Types { case refinedInfo: TypeBounds if refinedInfo.variance != 0 && refinedName.isLambdaArgName => val cls = parent.LambdaClass(forcing = false) if (cls.exists) - assert(refinedInfo.variance == cls.typeParams.apply(refinedName.lambdaArgIndex).variance, - s"variance mismatch for $this, $cls, ${cls.typeParams}, ${cls.typeParams.apply(refinedName.lambdaArgIndex).variance}, ${refinedInfo.variance}") + assert(refinedInfo.variance == cls.typeParams.apply(refinedName.LambdaArgIndex).variance, + s"variance mismatch for $this, $cls, ${cls.typeParams}, ${cls.typeParams.apply(refinedName.LambdaArgIndex).variance}, ${refinedInfo.variance}") case _ => } this } - /** Derived refined type, with a twist: A refinement with a higher-kinded type param placeholder - * is transformed to a refinement of the original type parameter if that one exists. - */ - def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): RefinedType = { - lazy val underlyingTypeParams = parent.rawTypeParams - - if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) - this - else if ( refinedName.isLambdaArgName - //&& { println(s"deriving $refinedName $parent $underlyingTypeParams"); true } - && refinedName.lambdaArgIndex < underlyingTypeParams.length - && !parent.isLambda) - derivedRefinedType(parent.EtaExpand, refinedName, refinedInfo) - else - if (false) RefinedType(parent, refinedName, refinedInfo) - else RefinedType(parent, refinedName, rt => refinedInfo.substRefinedThis(this, RefinedThis(rt))) - } + def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): RefinedType = + if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this + else RefinedType(parent, refinedName, rt => refinedInfo.substRefinedThis(this, RefinedThis(rt))) /** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */ def wrapIfMember(parent: Type)(implicit ctx: Context): Type = @@ -3203,7 +3208,9 @@ object Types { class MissingType(pre: Type, name: Name)(implicit ctx: Context) extends TypeError( i"""cannot resolve reference to type $pre.$name - |the classfile defining the type might be missing from the classpath${otherReason(pre)}""".stripMargin) + |the classfile defining the type might be missing from the classpath${otherReason(pre)}""".stripMargin) { + printStackTrace() + } private def otherReason(pre: Type)(implicit ctx: Context): String = pre match { case pre: ThisType if pre.givenSelfType.exists => diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 8847d2de3c40..58697c196b36 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -176,7 +176,7 @@ class TreePickler(pickler: TastyPickler) { pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix) } case tpe: NamedType => - if (tpe.name == tpnme.Apply && tpe.prefix.argInfos.nonEmpty && tpe.prefix.isInstantiatedLambda) + if (tpe.name == tpnme.hkApply && tpe.prefix.argInfos.nonEmpty && tpe.prefix.isInstantiatedLambda) // instantiated lambdas are pickled as APPLIEDTYPE; #Apply will // be reconstituted when unpickling. pickleType(tpe.prefix) @@ -200,7 +200,9 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)} case tpe: RefinedThis => writeByte(REFINEDthis) - writeRef(pickledTypes.get(tpe.binder).asInstanceOf[Addr]) + val binderAddr = pickledTypes.get(tpe.binder) + assert(binderAddr != null, tpe.binder) + writeRef(binderAddr.asInstanceOf[Addr]) case tpe: SkolemType => pickleType(tpe.info) case tpe: RefinedType => diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index e6eb89008bdc..af33ce3c2723 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -50,18 +50,9 @@ object Scala2Unpickler { */ def depoly(tp: Type, denot: SymDenotation)(implicit ctx: Context): Type = tp match { case TempPolyType(tparams, restpe) => - if (denot.isAbstractType) - restpe.LambdaAbstract(tparams) // bounds needed? - else if (denot.isAliasType) { - var err: Option[(String, Position)] = None - val result = restpe.parameterizeWith(tparams) - for ((msg, pos) <- err) - ctx.warning( - sm"""$msg - |originally parsed type : ${tp.show} - |will be approximated by: ${result.show}. - |Proceed at own risk.""") - result + if (denot.isType) { + assert(!denot.isClass) + restpe.LambdaAbstract(tparams, cycleParanoid = true) } else PolyType.fromSymbols(tparams, restpe) @@ -127,9 +118,11 @@ object Scala2Unpickler { val companionClassMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_CLASS_METHOD, claz, module) if (companionClassMethod.exists) companionClassMethod.entered - val companionModuleMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, module, claz) - if (companionModuleMethod.exists) - companionModuleMethod.entered + if (claz.isClass) { + val companionModuleMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, module, claz) + if (companionModuleMethod.exists) + companionModuleMethod.entered + } } if (denot.flagsUNSAFE is Module) { @@ -591,29 +584,48 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas * tp { name: T } */ def elimExistentials(boundSyms: List[Symbol], tp: Type)(implicit ctx: Context): Type = { + // Need to be careful not to run into cyclic references here (observed when + // comiling t247.scala). That's why we avoiud taking `symbol` of a TypeRef + // unless names match up. + val isBound = (tp: Type) => { + def refersTo(tp: Type, sym: Symbol): Boolean = tp match { + case tp @ TypeRef(_, name) => sym.name == name && sym == tp.symbol + case tp: TypeVar => refersTo(tp.underlying, sym) + case tp : LazyRef => refersTo(tp.ref, sym) + case _ => false + } + boundSyms.exists(refersTo(tp, _)) + } + // Cannot use standard `existsPart` method because it calls `lookupRefined` + // which can cause CyclicReference errors. + val isBoundAccumulator = new ExistsAccumulator(isBound) { + override def foldOver(x: Boolean, tp: Type): Boolean = tp match { + case tp: TypeRef => applyToPrefix(x, tp) + case _ => super.foldOver(x, tp) + } + } def removeSingleton(tp: Type): Type = if (tp isRef defn.SingletonClass) defn.AnyType else tp def elim(tp: Type): Type = tp match { case tp @ RefinedType(parent, name) => val parent1 = elim(tp.parent) tp.refinedInfo match { - case TypeAlias(info: TypeRef) if boundSyms contains info.symbol => + case TypeAlias(info: TypeRef) if isBound(info) => RefinedType(parent1, name, info.symbol.info) - case info: TypeRef if boundSyms contains info.symbol => + case info: TypeRef if isBound(info) => val info1 = info.symbol.info assert(info1.derivesFrom(defn.SingletonClass)) RefinedType(parent1, name, info1.mapReduceAnd(removeSingleton)(_ & _)) case info => tp.derivedRefinedType(parent1, name, info) } - case tp @ TypeRef(pre, tpnme.Apply) if pre.isLambda => + case tp @ TypeRef(pre, tpnme.hkApply) => elim(pre) case _ => tp } val tp1 = elim(tp) - val isBound = (tp: Type) => boundSyms contains tp.typeSymbol - if (tp1 existsPart isBound) { + if (isBoundAccumulator(false, tp1)) { val anyTypes = boundSyms map (_ => defn.AnyType) val boundBounds = boundSyms map (_.info.bounds.hi) val tp2 = tp1.subst(boundSyms, boundBounds).subst(boundSyms, anyTypes) @@ -681,6 +693,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas else TypeRef(pre, sym.name.asTypeName) val args = until(end, readTypeRef) if (sym == defn.ByNameParamClass2x) ExprType(args.head) + else if (args.isEmpty && sym.typeParams.nonEmpty) tycon.EtaExpand else tycon.appliedTo(args) case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef()) diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 70fab7e0fc5a..a46665ec02c8 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -122,7 +122,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close } if (tp.isSafeLambda) { - val (prefix, body, bindings) = extractApply(tp) + val (prefix, body, bindings) = decomposeHKApply(tp) prefix match { case prefix: TypeRef if prefix.symbol.isLambdaTrait && body.exists => return typeLambdaText(prefix.symbol, body, bindings) @@ -184,9 +184,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { * without a prefix, because the latter print nicer. * */ - def extractApply(tp: Type): (Type, Type, List[(Name, Type)]) = tp.stripTypeVar match { + def decomposeHKApply(tp: Type): (Type, Type, List[(Name, Type)]) = tp.stripTypeVar match { case tp @ RefinedType(parent, name) => - if (name == tpnme.Apply) { + if (name == tpnme.hkApply) { // simplify arguments so that parameters just print HK$i and not // LambdaI{...}.HK$i val simplifyArgs = new TypeMap { @@ -199,7 +199,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } (parent, simplifyArgs(tp.refinedInfo.followTypeAlias), Nil) } else if (name.isLambdaArgName) { - val (prefix, body, argBindings) = extractApply(parent) + val (prefix, body, argBindings) = decomposeHKApply(parent) (prefix, body, (name, tp.refinedInfo) :: argBindings) } else (tp, NoType, Nil) case _ => diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index 40029c42b805..c7d8acb371e7 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -604,10 +604,6 @@ trait Applications extends Compatibility { self: Typer => val typedFn = typedExpr(tree.fun, PolyProto(typedArgs.tpes, pt)) typedFn.tpe.widen match { case pt: PolyType => - def adaptTypeArg(tree: tpd.Tree, bound: Type): tpd.Tree = - if (bound.isLambda && !tree.tpe.isLambda && tree.tpe.typeParams.nonEmpty) - tree.withType(tree.tpe.EtaExpand) - else tree if (typedArgs.length <= pt.paramBounds.length) typedArgs = typedArgs.zipWithConserve(pt.paramBounds)(adaptTypeArg) checkBounds(typedArgs, pt) @@ -616,6 +612,9 @@ trait Applications extends Compatibility { self: Typer => assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs) } + def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = + tree.withType(tree.tpe.EtaExpandIfHK(bound)) + /** Rewrite `new Array[T](....)` trees to calls of newXYZArray methods. */ def convertNewArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match { case Apply(TypeApply(tycon, targ :: Nil), args) if tycon.symbol == defn.ArrayConstructor => diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index 3847cb5bef22..8376dd4e9619 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -115,18 +115,8 @@ object Checking { val parent1 = this(parent) val saved = cycleOK cycleOK = nestedCycleOK - - /** A derived refined type with two possible tweaks: - * (1) LazyRefs in parents are pulled out, - * (2) #Apply is added if the type is a fully applied type lambda. - */ - def derivedType(p: Type): Type = p match { - case p: LazyRef => LazyRef(() => derivedType(p.ref)) - case _ => - val res = tp.derivedRefinedType(p, name, this(tp.refinedInfo)) - if (res.isSafeLambda && res.typeParams.isEmpty) res.select(tpnme.Apply) else res - } - try derivedType(parent1) finally cycleOK = saved + try tp.derivedRefinedType(parent1, name, this(tp.refinedInfo)) + finally cycleOK = saved case tp @ TypeRef(pre, name) => try { // A prefix is interesting if it might contain (transitively) a reference diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 99119acb3275..c1341a9ae769 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -780,11 +780,11 @@ class Namer { typer: Typer => completeParams(tdef.tparams) val tparamSyms = tdef.tparams map symbolOfTree val isDerived = tdef.rhs.isInstanceOf[untpd.DerivedTypeTree] - val toParameterize = tparamSyms.nonEmpty && !isDerived - val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived + //val toParameterize = tparamSyms.nonEmpty && !isDerived + //val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived def abstracted(tp: Type): Type = - if (needsLambda) tp.LambdaAbstract(tparamSyms) - else if (toParameterize) tp.parameterizeWith(tparamSyms) + if (tparamSyms.nonEmpty && !isDerived) tp.LambdaAbstract(tparamSyms) + //else if (toParameterize) tp.parameterizeWith(tparamSyms) else tp sym.info = abstracted(TypeBounds.empty) // Temporarily set info of defined type T to ` >: Nothing <: Any. @@ -797,12 +797,38 @@ class Namer { typer: Typer => // // The scheme critically relies on an implementation detail of isRef, which // inspects a TypeRef's info, instead of simply dealiasing alias types. - val rhsType = typedAheadType(tdef.rhs).tpe + val rhsType = abstracted(typedAheadType(tdef.rhs).tpe) val unsafeInfo = rhsType match { - case _: TypeBounds => abstracted(rhsType).asInstanceOf[TypeBounds] - case _ => TypeAlias(abstracted(rhsType), if (sym is Local) sym.variance else 0) + case bounds: TypeBounds => bounds + case alias => TypeAlias(alias, if (sym is Local) sym.variance else 0) } sym.info = NoCompleter - checkNonCyclic(sym, unsafeInfo, reportErrors = true) + sym.info = checkNonCyclic(sym, unsafeInfo, reportErrors = true) + etaExpandArgs.apply(sym.info) + } + + /** Eta expand all class types C appearing as arguments to a higher-kinded + * type parameter to type lambdas, e.g. [HK0] => C[HK0]. This is necessary + * because in `typedAppliedTypeTree` we might ahve missed some eta expansions + * of arguments in F-bounds, because the recursive type was initialized with + * TypeBounds.empty. + */ + def etaExpandArgs(implicit ctx: Context) = new TypeMap { + def apply(tp: Type): Type = { + tp match { + case tp: RefinedType => + val args = tp.argInfos(interpolate = false).mapconserve(this) + if (args.nonEmpty) { + val tycon = tp.withoutArgs(args) + val tparams = tycon.typeParams + if (args.length == tparams.length) { // if lengths differ, problem is caught in typedTypeApply + val args1 = args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfHK(tparam.info)) + if (args1 ne args) return this(tycon).appliedTo(args1) + } + } + case _ => + } + mapOver(tp) + } } } diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index a2c49cdd9751..a3c64f526379 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -825,14 +825,54 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit res } - def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): AppliedTypeTree = track("typedAppliedTypeTree") { + def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): Tree = track("typedAppliedTypeTree") { val tpt1 = typed(tree.tpt) - val argPts = - if (ctx.mode is Mode.Pattern) tpt1.tpe.typeParams.map(_.info) - else tree.args.map(_ => WildcardType) - val args1 = tree.args.zipWithConserve(argPts)(typed(_, _)).asInstanceOf[List[Tree]] - // check that arguments conform to bounds is done in phase FirstTransform - assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) + val tparams = tpt1.tpe.typeParams + var args = tree.args + if (tparams.isEmpty) { + ctx.error(d"${tpt1.tpe} does not take type parameters", tree.pos) + tpt1 + } + else { + if (args.length != tparams.length) { + ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos) + args = args.take(tparams.length) + } + def typedArg(arg: untpd.Tree, tparam: Symbol) = { + val arg1 = typed(arg, if (ctx.mode is Mode.Pattern) tparam.info else WildcardType) + adaptTypeArg(arg1, if (tparam.isCompleted) tparam.info else WildcardType) + } + val args1 = args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]] + // check that arguments conform to bounds is done in phase PostTyper + val tree1 = assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) + if (tree1.tpe.isHKApply) + for (arg @ TypeBoundsTree(_, _) <- args1) + ctx.error("illegal wildcard type argument; does not correspond to type parameter of a class", arg.pos) + // The reason for outlawing such arguments is illustrated by the following example. + // Say we have + // + // type RMap[A, B] = Map[B, A] + // + // Then + // + // Rmap[_, Int] + // + // translates to + // + // Lambda$I { type hk$0; type hk$1 = Int; type $apply = Map[$hk1, $hk0] } # $apply + // + // Let's call the last type T. You would expect that + // + // Map[Int, String] <: RMap[_, Int] + // + // But that's not the case given the standard subtyping rules. In fact, the rhs reduces to + // + // Map[Int, T # $hk0] + // + // That means the second argument to `Map` is unknown and String is certainly not a subtype of it. + // To avoid the surprise we outlaw problematic wildcard arguments from the start. + tree1 + } } def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(implicit ctx: Context): ByNameTypeTree = track("typedByNameTypeTree") { diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index 0838874c9553..b8423db38969 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -56,6 +56,8 @@ class tests extends CompilerTest { //@Test def pickle_core = compileDir(dotcDir, "core", testPickling, xerrors = 2) // two spurious comparison errors in Types and TypeOps + @Test def pos_arraycopy = + compileFile(runDir, "arraycopy", List("-Ylog-classpath")) @Test def pos_t2168_pat = compileFile(posDir, "t2168", twice) @Test def pos_erasure = compileFile(posDir, "erasure", twice) @Test def pos_Coder() = compileFile(posDir, "Coder", twice) @@ -153,6 +155,7 @@ class tests extends CompilerTest { @Test def neg_traitParamsMixin = compileFile(negDir, "traitParamsMixin", xerrors = 2) @Test def neg_firstError = compileFile(negDir, "firstError", xerrors = 3) @Test def neg_implicitLowerBound = compileFile(negDir, "implicit-lower-bound", xerrors = 1) + @Test def neg_partialApplications = compileFile(negDir, "partialApplications", xerrors = 8) @Test def run_all = runFiles(runDir) diff --git a/tests/neg/partialApplications.scala b/tests/neg/partialApplications.scala new file mode 100644 index 000000000000..67f6cf059ad0 --- /dev/null +++ b/tests/neg/partialApplications.scala @@ -0,0 +1,11 @@ +object Test { + + type RMap[X, Y] = Map[Y, X] + val m = Map[Int, String]() + val ts: RMap[_, Int] = m // erorr // error + val us: RMap[String, _] = m // error // error + val vs: RMap[_, _] = m // error // error // error + val zz: RMap = m // error + +} + diff --git a/tests/pos/IterableSelfRec.scala b/tests/pending/pos/IterableSelfRec.scala similarity index 84% rename from tests/pos/IterableSelfRec.scala rename to tests/pending/pos/IterableSelfRec.scala index bba7a82d252a..a97833991785 100644 --- a/tests/pos/IterableSelfRec.scala +++ b/tests/pending/pos/IterableSelfRec.scala @@ -22,9 +22,9 @@ trait Seq[T] extends Iterable[T] { self => def apply(x: Int): T } -abstract class CollectionCompanion[+CC <: Collection { type This <: CC }] +abstract class CollectionCompanion[+CC[X] <: Collection[X] { type This <: CC }] -abstract class IterableCompanion[+CC <: Iterable { type This <: CC }] extends CollectionCompanion[CC] { +abstract class IterableCompanion[+CC[X] <: Iterable[X] { type This <: CC }] extends CollectionCompanion[CC] { def fromIterator[T](it: Iterator[T]): CC[T] def map[T, U](xs: Iterable[T], f: T => U): CC[U] = fromIterator(xs.iterator.map(f)) @@ -36,7 +36,7 @@ abstract class IterableCompanion[+CC <: Iterable { type This <: CC }] extends Co implicit def transformOps[T](xs: CC[T] @uncheckedVariance): TransformOps[CC, T] = ??? // new TransformOps[CC, T](xs) } -class TransformOps[+CC <: Iterable { type This <: CC }, T] (val xs: CC[T]) extends AnyVal { +class TransformOps[+CC[X] <: Iterable[X] { type This <: CC }, T] (val xs: CC[T]) extends AnyVal { def companion[T](xs: CC[T] @uncheckedVariance): IterableCompanion[CC] = xs.companion def map[U](f: T => U): CC[U] = companion(xs).map(xs, f) def filter(p: T => Boolean): CC[T] = companion(xs).filter(xs, p) diff --git a/tests/pos/GenericTraversableTemplate.scala b/tests/pos/GenericTraversableTemplate.scala new file mode 100644 index 000000000000..cd48cd23f4b3 --- /dev/null +++ b/tests/pos/GenericTraversableTemplate.scala @@ -0,0 +1,232 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + + +package scala +package collection +package generic + +import mutable.Builder +import scala.annotation.migration +import scala.annotation.unchecked.uncheckedVariance +import scala.language.higherKinds + +/** A template class for companion objects of ``regular`` collection classes + * that represent an unconstrained higher-kinded type. + * + * @tparam A The type of the collection elements. + * @tparam CC The type constructor representing the collection class. + * @author Martin Odersky + * @since 2.8 + * @define coll collection + * @define Coll CC + */ +trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { + + /** Applies a function `f` to all elements of this $coll. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + * + * @usecase def foreach(f: A => Unit): Unit + */ + def foreach[U](f: A => U): Unit + + /** Selects the first element of this $coll. + * + * @return the first element of this $coll. + * @throws `NoSuchElementException` if the $coll is empty. + */ + def head: A + + /** Tests whether this $coll is empty. + * + * @return `true` if the $coll contain no elements, `false` otherwise. + */ + def isEmpty: Boolean + + /** The factory companion object that builds instances of class $Coll. + * (or its `Iterable` superclass where class $Coll is not a `Seq`.) + */ + def companion: GenericCompanion[CC] + + /** The builder that builds instances of type $Coll[A] + */ + protected[this] def newBuilder: Builder[A, CC[A]] = companion.newBuilder[A] + + /** The generic builder that builds instances of $Coll + * at arbitrary element types. + */ + def genericBuilder[B]: Builder[B, CC[B]] = companion.newBuilder[B] + + private def sequential: TraversableOnce[A] = this.asInstanceOf[GenTraversableOnce[A]].seq + + /** Converts this $coll of pairs into two collections of the first and second + * half of each pair. + * + * {{{ + * val xs = $Coll( + * (1, "one"), + * (2, "two"), + * (3, "three")).unzip + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this $coll is a pair. + * @return a pair of ${coll}s, containing the first, respectively second + * half of each element pair of this $coll. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + val b1 = genericBuilder[A1] + val b2 = genericBuilder[A2] + for (xy <- sequential) { + val (x, y) = asPair(xy) + b1 += x + b2 += y + } + (b1.result(), b2.result()) + } + + /** Converts this $coll of triples into three collections of the first, second, + * and third element of each triple. + * + * {{{ + * val xs = $Coll( + * (1, "one", '1'), + * (2, "two", '2'), + * (3, "three", '3')).unzip3 + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three), + * // $Coll(1, 2, 3)) + * }}} + * + * @tparam A1 the type of the first member of the element triples + * @tparam A2 the type of the second member of the element triples + * @tparam A3 the type of the third member of the element triples + * @param asTriple an implicit conversion which asserts that the element type + * of this $coll is a triple. + * @return a triple of ${coll}s, containing the first, second, respectively + * third member of each element triple of this $coll. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val b1 = genericBuilder[A1] + val b2 = genericBuilder[A2] + val b3 = genericBuilder[A3] + + for (xyz <- sequential) { + val (x, y, z) = asTriple(xyz) + b1 += x + b2 += y + b3 += z + } + (b1.result(), b2.result(), b3.result()) + } + + /** Converts this $coll of traversable collections into + * a $coll formed by the elements of these traversable + * collections. + * + * @tparam B the type of the elements of each traversable collection. + * @param asTraversable an implicit conversion which asserts that the element + * type of this $coll is a `GenTraversable`. + * @return a new $coll resulting from concatenating all element ${coll}s. + * + * @usecase def flatten[B]: $Coll[B] + * + * @inheritdoc + * + * The resulting collection's type will be guided by the + * static type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(1, 2, 3) + * ).flatten + * // xs == List(1, 2, 3, 1, 2, 3) + * + * val ys = Set( + * List(1, 2, 3), + * List(3, 2, 1) + * ).flatten + * // ys == Set(1, 2, 3) + * }}} + */ + def flatten[B](implicit asTraversable: A => /*<: /*<:= headSize) fail + bs(i) += x + i += 1 + } + if (i != headSize) + fail + } + val bb = genericBuilder[CC[B]] + for (b <- bs) bb += b.result + bb.result() + } +} + diff --git a/tests/pos/Iter3.scala b/tests/pos/Iter3.scala new file mode 100644 index 000000000000..d0ae79f1f213 --- /dev/null +++ b/tests/pos/Iter3.scala @@ -0,0 +1,199 @@ +package dotty1.collections +package immutable + +import annotation.unchecked.uncheckedVariance + +// Like Iter2, but with non-variant types only. +object Iter2 { + + trait Iterator[A] extends IterableOnce[A] { + def hasNext: Boolean + def next: A + def iterator = this + def foreach(f: A => Unit): Unit = ??? + def map[B](f: A => B): Iterator[B] = ??? + def flatMap[B](f: A => IterableOnce[B]): Iterator[B] = ??? + def ++[B >: A](xs: IterableOnce[B]): Iterator[B] = ??? + def drop(n: Int): Iterator[A] = ??? + def indexWhere(p: A => Boolean): Int = { + var i = 0 + while (hasNext) { + if (p(next)) return i + i += 1 + } + -1 + } + def zip[B](that: Iterator[B]): Iterator[(A, B)] = ??? + } + + trait IterableOnce[A] { + def iterator: Iterator[A] + def buildIterator: Iterator[A] = iterator + } + + trait FromIterator[C[X] <: Iterable[X]] { + def fromIterator[B](it: Iterator[B]): C[B] + } + + trait Iterable[IA] extends IterableOnce[IA] with FromIterator[Iterable] + + trait Seq[AA] extends Iterable[AA] with FromIterator[Seq] { + def apply(i: Int): AA + def length: Int + } + + sealed trait List[A] extends Seq[A] with FromIterator[List] { + def isEmpty: Boolean + def head: A + def tail: List[A] + def iterator = new ListIterator[A](this) + def fromIterator[B](it: Iterator[B]): List[B] = it match { + case ListIterator(xs) => xs + case _ => if (it.hasNext) Cons(it.next, fromIterator(it)) else Nil.asInstanceOf[List[B]] + } + def apply(i: Int): A = { + require(!isEmpty) + if (i == 0) head else tail.apply(i - 1) + } + def length: Int = + if (isEmpty) 0 else 1 + tail.length + } + + case class Cons[A](x: A, xs: List[A]) extends List[A] { + def isEmpty = false + def head = x + def tail = xs + } + + case object Nil extends List[Nothing] { + def isEmpty = true + def head = ??? + def tail = ??? + } + + class ArrayBuffer[A] private (initElems: Array[AnyRef], initLen: Int) extends Seq[A] with FromIterator[ArrayBuffer] { + def this() = this(new Array[AnyRef](16), 0) + def this(it: ArrayIterator[A]) = this(it.elems, it.len) + private var elems: Array[AnyRef] = initElems + private var len = 0 + def iterator = + elems.iterator.take(len).asInstanceOf[Iterator[A]] + override def buildIterator = + new ArrayIterator(elems, len).asInstanceOf[Iterator[A]] + def fromIterator[B](it: Iterator[B]): ArrayBuffer[B] = + new ArrayBuffer(ArrayIterator.fromIterator(it)) + def apply(i: Int) = elems(i).asInstanceOf[A] + def length = len + } + + implicit class IterableTransforms[A, C[X] <: Iterable[X]](val c: Iterable[A] & FromIterator[C]) extends AnyVal { + def map[B](f: A => B): C[B] = c.fromIterator(c.buildIterator.map(f)) + def flatMap[B](f: A => IterableOnce[B]): C[B] = c.fromIterator(c.buildIterator.flatMap(f(_).buildIterator)) + def ++[B >: A](xs: IterableOnce[B]): C[B] = c.fromIterator(c.buildIterator ++ xs.buildIterator) + def drop(n: Int): C[A] = c.fromIterator(c.buildIterator.drop(n)) + def head: A = c.iterator.next + def zip[B](xs: IterableOnce[B]): C[(A, B)] = c.fromIterator(c.iterator.zip(xs.iterator)) + } + + implicit class SeqTransforms[SA, C[X] <: Seq[X]](val c: Seq[SA] & FromIterator[C]) extends AnyVal { + def reverse: C[SA] = { + val elems = new Array[AnyRef](c.length) + var i = elems.length + val it = c.iterator + while (it.hasNext) { + i -= 1 + elems(i) = it.next.asInstanceOf[AnyRef] + } + val xzz = c.fromIterator(ArrayIterator[SA](elems, c.length)) + xzz + } + def indexWhere(p: SA => Boolean): Int = c.iterator.indexWhere(p) + } + + case class ListIterator[A](xs: List[A]) extends Iterator[A] { + private[this] var current: List[A] = xs + def hasNext = !current.isEmpty + def next = { val res = current.head; current = current.tail; res } + } + + case class ArrayIterator[A](elems: Array[AnyRef], len: Int) extends Iterator[A] { + import ArrayIterator._ + + private def elem(i: Int) = elems(i).asInstanceOf[A] + + private var cur = 0 + + def hasNext = cur < len + def next = { val res = elem(cur); cur += 1; res } + + override def foreach(f: A => Unit): Unit = + for (i <- 0 until len) f(elem(i)) + + override def map[B](f: A => B): ArrayIterator[B] = { + var mapped = elems + for (i <- 0 until len) { + val x = elem(i) + val y = widen(f(x)) + if (widen(x) ne y) { + if (mapped eq elems) mapped = new Array[AnyRef](len) + mapped(i) = y + } + } + if (mapped eq elems) this.asInstanceOf[ArrayIterator[B]] + else new ArrayIterator(mapped, len) + } + + override def flatMap[B](f: A => IterableOnce[B]): ArrayIterator[B] = + flatten(map(f(_).buildIterator)) + + override def ++[B >: A](that: IterableOnce[B]): ArrayIterator[B] = { + val thatIterator @ ArrayIterator(elems2, len2) = fromIterator(that.iterator) + if (len == 0) thatIterator + else if (len2 == 0) this.asInstanceOf[ArrayIterator[B]] + else { + val resLen = len + len2 + val resElems = new Array[AnyRef](resLen) + Array.copy(elems, 0, resElems, 0, len) + Array.copy(elems2, 0, resElems, len, len2) + new ArrayIterator(resElems, resLen) + } + } + } + + object ArrayIterator { + private def widen(x: Any): AnyRef = x.asInstanceOf[AnyRef] + + def fromIterator[A](it: Iterator[A]): ArrayIterator[A] = it match { + case it: ArrayIterator[A] => it + case _ => + var elems = new Array[AnyRef](32) + var len = 0 + def ensureCapacity() = { + while (len > elems.length) { + val newElems = new Array[AnyRef](elems.length * 2) + Array.copy(elems, 0, newElems, 0, elems.length) + elems = newElems + } + } + while (it.hasNext) { + len += 1 + ensureCapacity() + elems(len - 1) = widen(it.next) + } + ArrayIterator(elems, len) + } + + def flatten[A](its: ArrayIterator[Iterator[A]]): ArrayIterator[A] = { + var arrayIts = its.map(fromIterator) + var totalLen = 0 + arrayIts.foreach(totalLen += _.len) + val allElems = new Array[AnyRef](totalLen) + var j = 0 + arrayIts.foreach { it => + Array.copy(it.elems, 0, allElems, j, it.len) + j += it.len + } + new ArrayIterator(allElems, totalLen) + } + } +} diff --git a/tests/pos/Map.scala b/tests/pos/Map.scala new file mode 100644 index 000000000000..5178d5a862cf --- /dev/null +++ b/tests/pos/Map.scala @@ -0,0 +1,194 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + +package scala +package collection +package immutable + +import generic._ + +/** + * A generic trait for immutable maps. Concrete classes have to provide + * functionality for the abstract methods in `Map`: + * + * {{{ + * def get(key: A): Option[B] + * def iterator: Iterator[(A, B)] + * def + [B1 >: B](kv: (A, B1)): Map[A, B1] + * def -(key: A): Map[A, B] + * }}} + * + * @since 1 + */ +trait Map[A, +B] extends Iterable[(A, B)] +// with GenMap[A, B] + with scala.collection.Map[A, B] + with MapLike[A, B, Map[A, B]] { self => + + override def empty: Map[A, B] = Map.empty + + /** Returns this $coll as an immutable map. + * + * A new map will not be built; lazy collections will stay lazy. + */ + @deprecatedOverriding("Immutable maps should do nothing on toMap except return themselves cast as a map.", "2.11.0") + override def toMap[T, U](implicit ev: (A, B) <:< (T, U)): immutable.Map[T, U] = + self.asInstanceOf[immutable.Map[T, U]] + + override def seq: Map[A, B] = this + + /** The same map with a given default function. + * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault[B1 >: B](d: A => B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, d) + + /** The same map with a given default value. + * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, x => d) + + /** Add a key/value pair to this map. + * @param key the key + * @param value the value + * @return A new map with the new binding added to this map + */ + override def updated [B1 >: B](key: A, value: B1): Map[A, B1] + def + [B1 >: B](kv: (A, B1)): Map[A, B1] +} + +/** $factoryInfo + * @define Coll `immutable.Map` + * @define coll immutable map + */ +object Map extends ImmutableMapFactory[Map] { + + /** $mapCanBuildFromInfo */ + implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B] + + def empty[A, B]: Map[A, B] = EmptyMap.asInstanceOf[Map[A, B]] + + class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault[A, B](underlying, d) with Map[A, B] { + override def empty = new WithDefault(underlying.empty, d) + override def updated[B1 >: B](key: A, value: B1): WithDefault[A, B1] = new WithDefault[A, B1](underlying.updated[B1](key, value), d) + override def + [B1 >: B](kv: (A, B1)): WithDefault[A, B1] = updated(kv._1, kv._2) + override def - (key: A): WithDefault[A, B] = new WithDefault(underlying - key, d) + override def withDefault[B1 >: B](d: A => B1): immutable.Map[A, B1] = new WithDefault[A, B1](underlying, d) + override def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new WithDefault[A, B1](underlying, x => d) + } + + private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable { + override def size: Int = 0 + def get(key: Any): Option[Nothing] = None + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value) + def + [B1](kv: (Any, B1)): Map[Any, B1] = updated(kv._1, kv._2) + def - (key: Any): Map[Any, Nothing] = this + } + + class Map1[A, +B](key1: A, value1: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + override def size = 1 + def get(key: A): Option[B] = + if (key == key1) Some(value1) else None + def iterator = Iterator((key1, value1)) + override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + if (key == key1) new Map1(key1, value) + else new Map2(key1, value1, key, value) + def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) + def - (key: A): Map[A, B] = + if (key == key1) Map.empty else this + override def foreach[U](f: ((A, B)) => U): Unit = { + f((key1, value1)) + } + } + + class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + override def size = 2 + def get(key: A): Option[B] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else None + def iterator = Iterator((key1, value1), (key2, value2)) + override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + if (key == key1) new Map2(key1, value, key2, value2) + else if (key == key2) new Map2(key1, value1, key2, value) + else new Map3(key1, value1, key2, value2, key, value) + def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) + def - (key: A): Map[A, B] = + if (key == key1) new Map1(key2, value2) + else if (key == key2) new Map1(key1, value1) + else this + override def foreach[U](f: ((A, B)) => U): Unit = { + f((key1, value1)); f((key2, value2)) + } + } + + class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + override def size = 3 + def get(key: A): Option[B] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else None + def iterator = Iterator((key1, value1), (key2, value2), (key3, value3)) + override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + if (key == key1) new Map3(key1, value, key2, value2, key3, value3) + else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) + else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) + else new Map4(key1, value1, key2, value2, key3, value3, key, value) + def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) + def - (key: A): Map[A, B] = + if (key == key1) new Map2(key2, value2, key3, value3) + else if (key == key2) new Map2(key1, value1, key3, value3) + else if (key == key3) new Map2(key1, value1, key2, value2) + else this + override def foreach[U](f: ((A, B)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)) + } + } + + class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + override def size = 4 + def get(key: A): Option[B] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else if (key == key4) Some(value4) + else None + def iterator = Iterator((key1, value1), (key2, value2), (key3, value3), (key4, value4)) + override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) + else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) + else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) + else new HashMap + ((key1, value1), (key2, value2), (key3, value3), (key4, value4), (key, value)) + def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) + def - (key: A): Map[A, B] = + if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) + else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) + else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) + else this + override def foreach[U](f: ((A, B)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) + } + } +} + +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[A, +B] extends scala.collection.AbstractMap[A, B] with Map[A, B] diff --git a/tests/pos/partialApplications.scala b/tests/pos/partialApplications.scala index b68c4b945152..696c544e78eb 100644 --- a/tests/pos/partialApplications.scala +++ b/tests/pos/partialApplications.scala @@ -1,5 +1,34 @@ object Test { + type Histogram[X] = Map[X, Int] + + type StringlyHistogram[X >: String] = Histogram[X] + + val xs: Histogram[String] = Map[String, Int]() + + val ys: StringlyHistogram[String] = xs + + def e = xs + + val zs: StringlyHistogram[_] = e + + type IntMap[Y] = Map[Int, Y] + + val is = Map[Int, Boolean]() + + val js: IntMap[Boolean] = is + + val ks: IntMap[_] = is + + type RMap[X, Y] = Map[Y, X] + + val rs = Map[Int, Float]() + + val ss: RMap[Float, Int] = rs + +} + +object Test2 { type Histogram = Map[_, Int] type StringlyHistogram = Histogram[_ >: String] @@ -10,4 +39,8 @@ object Test { val zs: StringlyHistogram = xs + val xs1 = xs + val ys1 = ys + val zs1 = zs + }