diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 46b1896f1add..b04099af8b48 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -70,11 +70,18 @@ object Config { final val traceDeepSubTypeRecursions = false /** When explaining subtypes and this flag is set, also show the classes of the compared types. */ - final val verboseExplainSubtype = true + final val verboseExplainSubtype = false /** If this flag is set, take the fast path when comparing same-named type-aliases and types */ final val fastPathForRefinedSubtype = true + /** If this flag is set, `TypeOps.normalizeToClassRefs` will insert forwarders + * for type parameters of base classes. This is an optimization, which avoids + * long alias chains. We should not rely on the optimization, though. So changing + * the flag to false can be used for checking that everything works OK without it. + */ + final val forwardTypeParams = true + /** If this flag is set, and we compute `T1 { X = S1 }` & `T2 { X = S2 }` as a new * upper bound of a constrained parameter, try to align the refinements by computing * `S1 =:= S2` (which might instantiate type parameters). diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 941434dd5ff3..5f34c08ddeeb 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -24,7 +24,8 @@ class ScalaSettings extends Settings.SettingGroup { val deprecation = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.") val migration = BooleanSetting("-migration", "Emit warning and location for migration issues from Scala 2.") val encoding = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding) - val explaintypes = BooleanSetting("-explaintypes", "Explain type errors in more detail.") + val explainTypes = BooleanSetting("-explain-types", "Explain type errors in more detail.") + val explainImplicits = BooleanSetting("-explain-implicits", "Explain implicit search errors in more detail.") val explain = BooleanSetting("-explain", "Explain errors in more detail.") val feature = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.") val help = BooleanSetting("-help", "Print a synopsis of standard options") diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index de96f644a91c..fae46c12ce85 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -230,12 +230,47 @@ trait ConstraintHandling { } /** The instance type of `param` in the current constraint (which contains `param`). - * If `fromBelow` is true, the instance type is the lub of the parameter's - * lower bounds; otherwise it is the glb of its upper bounds. However, - * a lower bound instantiation can be a singleton type only if the upper bound - * is also a singleton type. + * The instance type TI is computed depending on the given variance as follows: + * + * If `variance < 0`, `TI` is the glb of `param`'s upper bounds. + * If `variance > 0`, `TI` is the lub of `param`'s lower bounds. + * However, if `TI` would be a singleton type, and the upper bound is + * not a singleton type, `TI` is widened to a non-singleton type. + * If `variance = 0`, let `Lo1` be the lower bound of `param` and let `Lo2` be the + * |-dominator of `Lo1`. If `Lo2` is not more ugly than `Lo1`, we add the + * additional constraint that `param` should be a supertype of `Lo2`. + * We then proceed as if `variance > 0`. + * + * The reason for tweaking the `variance = 0` case is that in this case we have to make + * a guess, since no possible instance type is better than the other. We apply the heuristic + * that usually an |-type is not what is intended. E.g. given two cases of types + * `Some[Int]` and `None`, we'd naturally want `Option[Int]` as the inferred type, not + * `Some[Int] | None`. If `variance > 0` it's OK to infer the smaller `|-type` since + * we can always widen later to the intended type. But in non-variant situations, we + * have to stick to the initial guess. + * + * On the other hand, sometimes the |-dominator is _not_ what we want. For instance, taking + * run/hmap.scala as an example, we have a lower bound + * + * HEntry[$param$5, V] | HEntry[String("name"), V] + * + * Its |-dominator is + * + * HEntry[_ >: $param$11 & String("cat") <: String, V] + * + * If we apply the additional constraint we get compilation failures because while + * we can find implicits for the |-type above, we cannot find implicits for the |-dominator. + * There's no hard criterion what should be considered ugly or not. For now we + * pick the degree of undefinedness of type parameters, i.e. how many type + * parameters are instantiated with wildcard types. Maybe we have to refine this + * in the future. + * + * The whole thing is clearly not nice, and I would love to have a better criterion. + * In principle we are grappling with the fundamental shortcoming that local type inference + * sometimes has to guess what was intended. The more refined our type lattice becomes, + * the harder it is to make a choice. */ - def instanceType(param: TypeParamRef, fromBelow: Boolean): Type = { + def instanceType(param: TypeParamRef, variance: Int): Type = { def upperBound = constraint.fullUpperBound(param) def isSingleton(tp: Type): Boolean = tp match { case tp: SingletonType => true @@ -257,22 +292,48 @@ trait ConstraintHandling { case _ => false } - // First, solve the constraint. + // First, consider whether we need to constrain with |-dominator + if (variance == 0) { + val lo1 = ctx.typeComparer.bounds(param).lo + val lo2 = ctx.orDominator(lo1) + if (lo1 ne lo2) { + val ugliness = new TypeAccumulator[Int] { + def apply(x: Int, tp: Type) = tp match { + case tp: TypeBounds if !tp.isAlias => apply(apply(x + 1, tp.lo), tp.hi) + case tp: AndOrType => apply(x, tp.tp1) max apply(x, tp.tp2) + case _ => foldOver(x, tp) + } + } + if (ugliness(0, lo2) <= ugliness(0, lo1)) { + constr.println(i"apply additional constr $lo2 <: $param, was $lo1") + lo2 <:< param + } + else + constr.println(i"""refrain from adding constrint. + |ugliness($lo1) = ${ugliness(0, lo1)} + |ugliness($lo2) = ${ugliness(0, lo2)}""") + } + } + + // Then, solve the constraint. + val fromBelow = variance >= 0 var inst = approximation(param, fromBelow) - // Then, approximate by (1.) - (3.) and simplify as follows. - // 1. If instance is from below and is a singleton type, yet + // Then, if instance is from below and is a singleton type, yet // upper bound is not a singleton type, widen the instance. if (fromBelow && isSingleton(inst) && !isSingleton(upperBound)) inst = inst.widen + // Then, simplify. inst = inst.simplified - // 2. If instance is from below and is a fully-defined union type, yet upper bound - // is not a union type, approximate the union type from above by an intersection - // of all common base types. - if (fromBelow && isOrType(inst) && isFullyDefined(inst) && !isOrType(upperBound)) + // Finally, if the instance is from below and is a fully-defined union type, yet upper bound + // is not a union type, harmonize the union type. + // TODO: See whether we can merge this with the special treatment of dependent params in + // simplified. + if (fromBelow && isOrType(inst) && isFullyDefined(inst) && !isOrType(upperBound)) { inst = ctx.harmonizeUnion(inst) + } inst } diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index c835f677ef96..60ab93dba24d 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -39,6 +39,7 @@ object Mode { val TypevarsMissContext = newMode(4, "TypevarsMissContext") val CheckCyclic = newMode(5, "CheckCyclic") + /** We are looking at the arguments of a supercall */ val InSuperCall = newMode(6, "InSuperCall") /** Allow GADTFlexType labelled types to have their bounds adjusted */ @@ -81,7 +82,7 @@ object Mode { val ReadPositions = newMode(16, "ReadPositions") val PatternOrType = Pattern | Type - + /** We are elaborating the fully qualified name of a package clause. * In this case, identifiers should never be imported. */ diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index e5cc94883dd1..7688b60aa184 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1381,12 +1381,15 @@ object SymDenotations { /** Invalidate baseTypeRefCache, baseClasses and superClassBits on new run */ private def checkBasesUpToDate()(implicit ctx: Context) = if (baseTypeRefValid != ctx.runId) { - baseTypeRefCache = new java.util.HashMap[CachedType, Type] + invalidateBaseTypeRefCache() myBaseClasses = null mySuperClassBits = null baseTypeRefValid = ctx.runId } + def invalidateBaseTypeRefCache() = + baseTypeRefCache = new java.util.HashMap[CachedType, Type] + private def computeBases(implicit ctx: Context): (List[ClassSymbol], BitSet) = { if (myBaseClasses eq Nil) throw CyclicReference(this) myBaseClasses = Nil @@ -1712,18 +1715,23 @@ object SymDenotations { /*>|>*/ ctx.debugTraceIndented(s"$tp.baseTypeRef($this)") /*<|<*/ { tp match { case tp: CachedType => - checkBasesUpToDate() - var basetp = baseTypeRefCache get tp - if (basetp == null) { - baseTypeRefCache.put(tp, NoPrefix) - basetp = computeBaseTypeRefOf(tp) - if (isCachable(tp)) baseTypeRefCache.put(tp, basetp) - else baseTypeRefCache.remove(tp) - } else if (basetp == NoPrefix) { - baseTypeRefCache.put(tp, null) - throw CyclicReference(this) + try { + checkBasesUpToDate() + var basetp = baseTypeRefCache get tp + if (basetp == null) { + baseTypeRefCache.put(tp, NoPrefix) + basetp = computeBaseTypeRefOf(tp) + if (isCachable(tp)) baseTypeRefCache.put(tp, basetp) + else baseTypeRefCache.remove(tp) + } else if (basetp == NoPrefix) + throw CyclicReference(this) + basetp + } + catch { + case ex: Throwable => + baseTypeRefCache.put(tp, null) + throw ex } - basetp case _ => computeBaseTypeRefOf(tp) } diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 54b96a2530b3..8c245f6c81b6 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -574,8 +574,15 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { isNewSubType(tp1.parent, tp2) case tp1 @ HKApply(tycon1, args1) => compareHkApply1(tp1, tycon1, args1, tp2) - case EtaExpansion(tycon1) => - isSubType(tycon1, tp2) + case tp1: HKTypeLambda => + def compareHKLambda = tp1 match { + case EtaExpansion(tycon1) => isSubType(tycon1, tp2) + case _ => tp2 match { + case tp2: HKTypeLambda => false // this case was covered in thirdTry + case _ => tp2.isHK && isSubType(tp1.resultType, tp2.appliedTo(tp1.paramRefs)) + } + } + compareHKLambda case AndType(tp11, tp12) => // Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 | T12) <: T2 // and analogously for T11 & (T121 | T122) & T12 <: T2 @@ -775,7 +782,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { (v < 0 || isSubType(tp1, tp2)) } isSub(args1.head, args2.head) - } && isSubArgs(args1.tail, args2.tail, tparams) + } && isSubArgs(args1.tail, args2.tail, tparams.tail) /** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where * - `B` derives from one of the class symbols of `tp2`, @@ -1287,8 +1294,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { tl.integrate(tparams1, tparam1.paramInfoAsSeenFrom(tp1)).bounds & tl.integrate(tparams2, tparam2.paramInfoAsSeenFrom(tp2)).bounds), resultTypeExp = tl => - original(tl.integrate(tparams1, tp1).appliedTo(tl.paramRefs), - tl.integrate(tparams2, tp2).appliedTo(tl.paramRefs))) + original(tp1.appliedTo(tl.paramRefs), tp2.appliedTo(tl.paramRefs))) } /** Try to distribute `&` inside type, detect and handle conflicts diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 4a1c3d04469d..0570a05812ec 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -352,72 +352,11 @@ trait TypeOps { this: Context => // TODO: Make standalone object. } } - /** If we have member definitions - * - * type argSym v= from - * type from v= to - * - * where the variances of both alias are the same, then enter a new definition - * - * type argSym v= to - * - * unless a definition for `argSym` already exists in the current scope. - */ - def forwardRef(argSym: Symbol, from: Symbol, to: TypeBounds, cls: ClassSymbol, decls: Scope) = - argSym.info match { - case info @ TypeBounds(lo2 @ TypeRef(_: ThisType, name), hi2) => - if (name == from.name && - (lo2 eq hi2) && - info.variance == to.variance && - !decls.lookup(argSym.name).exists) { - // println(s"short-circuit ${argSym.name} was: ${argSym.info}, now: $to") - enterArgBinding(argSym, to, cls, decls) - } - case _ => - } - - /** Normalize a list of parent types of class `cls` that may contain refinements * to a list of typerefs referring to classes, by converting all refinements to member * definitions in scope `decls`. Can add members to `decls` as a side-effect. */ def normalizeToClassRefs(parents: List[Type], cls: ClassSymbol, decls: Scope): List[TypeRef] = { - - /** If we just entered the type argument binding - * - * type From = To - * - * and there is a type argument binding in a parent in `prefs` of the form - * - * type X = From - * - * then also add the binding - * - * type X = To - * - * to the current scope, provided (1) variances of both aliases are the same, and - * (2) X is not yet defined in current scope. This "short-circuiting" prevents - * long chains of aliases which would have to be traversed in type comparers. - * - * Note: Test i1401.scala shows that `forwardRefs` is also necessary - * for typechecking in the case where self types refer to type parameters - * that are upper-bounded by subclass instances. - */ - def forwardRefs(from: Symbol, to: Type, prefs: List[TypeRef]) = to match { - case to @ TypeBounds(lo1, hi1) if lo1 eq hi1 => - for (pref <- prefs) { - def forward()(implicit ctx: Context): Unit = - for (argSym <- pref.decls) - if (argSym is BaseTypeArg) - forwardRef(argSym, from, to, cls, decls) - pref.info match { - case info: TempClassInfo => info.addSuspension(implicit ctx => forward()) - case _ => forward() - } - } - case _ => - } - // println(s"normalizing $parents of $cls in ${cls.owner}") // !!! DEBUG // A map consolidating all refinements arising from parent type parameters @@ -460,16 +399,70 @@ trait TypeOps { this: Context => // TODO: Make standalone object. s"redefinition of ${decls.lookup(name).debugString} in ${cls.showLocated}") enterArgBinding(formals(name), refinedInfo, cls, decls) } - // Forward definitions in super classes that have one of the refined parameters - // as aliases directly to the refined info. - // Note that this cannot be fused with the previous loop because we now - // assume that all arguments have been entered in `decls`. - refinements foreachBinding { (name, refinedInfo) => - forwardRefs(formals(name), refinedInfo, parentRefs) - } + + if (Config.forwardTypeParams) + forwardParamBindings(parentRefs, refinements, cls, decls) + parentRefs } + /** Forward parameter bindings in baseclasses to argument types of + * class `cls` if possible. + * If there have member definitions + * + * type param v= middle + * type middle v= to + * + * where the variances of both alias are the same, then enter a new definition + * + * type param v= to + * + * If multiple forwarders would be generated, join their `to` types with an `&`. + * + * @param cls The class for which parameter bindings should be forwarded + * @param decls Its scope + * @param parentRefs The parent type references of `cls` + * @param paramBindings The type parameter bindings generated for `cls` + * + */ + def forwardParamBindings(parentRefs: List[TypeRef], + paramBindings: SimpleMap[TypeName, Type], + cls: ClassSymbol, decls: Scope)(implicit ctx: Context) = { + + def forwardRef(argSym: Symbol, from: TypeName, to: TypeAlias) = argSym.info match { + case info @ TypeAlias(TypeRef(_: ThisType, `from`)) if info.variance == to.variance => + val existing = decls.lookup(argSym.name) + if (existing.exists) existing.info = existing.info & to + else enterArgBinding(argSym, to, cls, decls) + case _ => + } + + def forwardRefs(from: TypeName, to: Type) = to match { + case to: TypeAlias => + for (pref <- parentRefs) { + def forward()(implicit ctx: Context): Unit = + for (argSym <- pref.decls) + if (argSym is BaseTypeArg) forwardRef(argSym, from, to) + pref.info match { + case info: TempClassInfo => info.addSuspension(implicit ctx => forward()) + case _ => forward() + } + } + case _ => + } + + paramBindings.foreachBinding(forwardRefs) + } + + /** Used only for debugging: All BaseTypeArg definitions in + * `cls` and all its base classes. + */ + def allBaseTypeArgs(cls: ClassSymbol)(implicit ctx: Context) = + for { bc <- cls.baseClasses + sym <- bc.info.decls.toList + if sym.is(BaseTypeArg) + } yield sym + /** An argument bounds violation is a triple consisting of * - the argument tree * - a string "upper" or "lower" indicating which bound is violated diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 955a5a11c6f2..a650c20c41dd 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3001,6 +3001,8 @@ object Types { override def hashCode: Int = identityHash override def equals(that: Any) = this eq that.asInstanceOf[AnyRef] + def withName(name: Name): this.type = { myRepr = name; this } + private var myRepr: Name = null def repr(implicit ctx: Context): Name = { if (myRepr == null) myRepr = SkolemName.fresh() @@ -3073,14 +3075,14 @@ object Types { } /** Instantiate variable from the constraints over its `origin`. - * If `fromBelow` is true, the variable is instantiated to the lub + * If `variance >= 0` the variable is instantiated to the lub * of its lower bounds in the current constraint; otherwise it is - * instantiated to the glb of its upper bounds. However, a lower bound - * instantiation can be a singleton type only if the upper bound - * is also a singleton type. + * instantiated to the glb of its upper bounds. However, lower bound + * singleton types and |-types are sometimes widened; for details see + * TypeComparer#instanceType. */ - def instantiate(fromBelow: Boolean)(implicit ctx: Context): Type = - instantiateWith(ctx.typeComparer.instanceType(origin, fromBelow)) + def instantiate(variance: Int)(implicit ctx: Context): Type = + instantiateWith(ctx.typeComparer.instanceType(origin, variance)) /** Unwrap to instance (if instantiated) or origin (if not), until result * is no longer a TypeVar diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 5d33738c2b59..1ac090da8718 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -119,24 +119,26 @@ class TreePickler(pickler: TastyPickler) { pickleType(c.symbolValue.termRef) } - def pickleType(tpe0: Type, richTypes: Boolean = false)(implicit ctx: Context): Unit = try { + def pickleType(tpe0: Type, richTypes: Boolean = false)(implicit ctx: Context): Unit = { val tpe = tpe0.stripTypeVar - val prev = pickledTypes.get(tpe) - if (prev == null) { - pickledTypes.put(tpe, currentAddr) - pickleNewType(tpe, richTypes) - } - else { - writeByte(SHARED) - writeRef(prev.asInstanceOf[Addr]) + try { + val prev = pickledTypes.get(tpe) + if (prev == null) { + pickledTypes.put(tpe, currentAddr) + pickleNewType(tpe, richTypes) + } + else { + writeByte(SHARED) + writeRef(prev.asInstanceOf[Addr]) + } + } catch { + case ex: AssertionError => + println(i"error when pickling type $tpe") + throw ex } - } catch { - case ex: AssertionError => - println(i"error when pickling type $tpe0") - throw ex } - private def pickleNewType(tpe: Type, richTypes: Boolean)(implicit ctx: Context): Unit = try { tpe match { + private def pickleNewType(tpe: Type, richTypes: Boolean)(implicit ctx: Context): Unit = tpe match { case AppliedType(tycon, args) => writeByte(APPLIEDtype) withLength { pickleType(tycon); args.foreach(pickleType(_)) } @@ -241,21 +243,10 @@ class TreePickler(pickler: TastyPickler) { pickleMethodic(POLYtype, tpe) case tpe: MethodType if richTypes => pickleMethodic(METHODtype, tpe) - case tpe: TypeParamRef => - if (!pickleParamRef(tpe)) - // TODO figure out why this case arises in e.g. pickling AbstractFileReader. - ctx.typerState.constraint.entry(tpe) match { - case TypeBounds(lo, hi) if lo eq hi => pickleNewType(lo, richTypes) - case _ => assert(false, s"orphan poly parameter: $tpe") - } - case tpe: TermParamRef => - assert(pickleParamRef(tpe), s"orphan method parameter: $tpe") + case tpe: ParamRef => + assert(pickleParamRef(tpe), s"orphan parameter reference: $tpe") case tpe: LazyRef => pickleType(tpe.ref) - }} catch { - case ex: AssertionError => - println(i"error while pickling type $tpe") - throw ex } def picklePackageRef(pkg: Symbol)(implicit ctx: Context): Unit = { diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 375edc3cb4db..03e83e93e001 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -174,7 +174,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def paramText(name: Name, bounds: TypeBounds): Text = name.toString ~ toText(bounds) changePrec(GlobalPrec) { "[" ~ Text((tp.paramNames, tp.paramInfos).zipped.map(paramText), ", ") ~ - "]" ~ (" => " provided !tp.resultType.isInstanceOf[MethodType]) ~ + "]" ~ lambdaHash(tp) ~ (" => " provided !tp.resultType.isInstanceOf[MethodType]) ~ toTextGlobal(tp.resultType) } case tp: TypeParamRef => diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index eb7773ef3869..1e7ecbe29eb1 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -18,6 +18,7 @@ import core.Decorators._ import core.TypeErasure.isErasedType import core.Phases.Phase import core.Mode +import SymUtils._ import typer._ import typer.ErrorReporting._ import reporting.ThrowingReporter @@ -45,7 +46,7 @@ import scala.util.control.NonFatal */ class TreeChecker extends Phase with SymTransformer { import ast.tpd._ - + import TreeChecker._ private val seenClasses = collection.mutable.HashMap[String, Symbol]() private val seenModuleVals = collection.mutable.HashMap[String, Symbol]() @@ -288,26 +289,6 @@ class TreeChecker extends Phase with SymTransformer { res } - /** Check that TypeParamRefs and MethodParams refer to an enclosing type */ - def checkNoOrphans(tp: Type)(implicit ctx: Context) = new TypeMap() { - val definedBinders = mutable.Set[Type]() - def apply(tp: Type): Type = { - tp match { - case tp: BindingType => - definedBinders += tp - mapOver(tp) - definedBinders -= tp - case tp: ParamRef => - assert(definedBinders.contains(tp.binder), s"orphan param: $tp") - case tp: TypeVar => - apply(tp.underlying) - case _ => - mapOver(tp) - } - tp - } - }.apply(tp) - def checkNotRepeated(tree: Tree)(implicit ctx: Context): tree.type = { def allowedRepeated = (tree.symbol.flags is Case) && tree.tpe.widen.isRepeatedParam @@ -467,3 +448,27 @@ class TreeChecker extends Phase with SymTransformer { } } } + +object TreeChecker { + /** Check that TypeParamRefs and MethodParams refer to an enclosing type */ + def checkNoOrphans(tp0: Type, tree: untpd.Tree = untpd.EmptyTree)(implicit ctx: Context) = new TypeMap() { + val definedBinders = new java.util.IdentityHashMap[Type, Any] + def apply(tp: Type): Type = { + tp match { + case tp: BindingType => + definedBinders.put(tp, tp) + mapOver(tp) + definedBinders.remove(tp) + case tp: ParamRef => + assert(definedBinders.get(tp.binder) != null, s"orphan param: ${tp.show}, hash of binder = ${System.identityHashCode(tp.binder)}, tree = ${tree.show}, type = $tp0") + case tp: TypeVar => + apply(tp.underlying) + case tp: TypeRef if tp.info.isAlias && tp.symbol.isAliasPreferred => + apply(tp.superType) + case _ => + mapOver(tp) + } + tp + } + }.apply(tp0) +} diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 5d1c44efcf21..37bc8c774d53 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -130,6 +130,9 @@ object Checking { */ class CheckNonCyclicMap(sym: Symbol, reportErrors: Boolean)(implicit ctx: Context) extends TypeMap { + /** Set of type references whose info is currently checked */ + private val locked = mutable.Set[TypeRef]() + /** Are cycles allowed within nested refinedInfos of currently checked type? */ private var nestedCycleOK = false @@ -212,7 +215,10 @@ object Checking { } if (isInteresting(pre)) { val pre1 = this(pre, false, false) - checkInfo(tp.info) + if (locked.contains(tp)) throw CyclicReference(tp.symbol) + locked += tp + try checkInfo(tp.info) + finally locked -= tp if (pre1 eq pre) tp else tp.newLikeThis(pre1) } else tp diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index 6c72c16e0680..11472e0e3dff 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -114,7 +114,7 @@ object ErrorReporting { val expected1 = dropJavaMethod(expected) if ((found1 eq found) != (expected eq expected1) && (found1 <:< expected1)) "\n (Note that Scala's and Java's representation of this type differs)" - else if (ctx.settings.explaintypes.value) + else if (ctx.settings.explainTypes.value) "\n" + ctx.typerState.show + "\n" + TypeComparer.explained((found <:< expected)(_)) else "" diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 6c0be51dfcfd..fe63b3b7504c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -334,11 +334,14 @@ object Implicits { class FailedImplicit(failures: List[ExplainedSearchFailure], val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure { def explanation(implicit ctx: Context): String = if (failures.isEmpty) s" No implicit candidates were found that $qualify" - else " " + (failures map (_.explanation) mkString "\n ") - override def postscript(implicit ctx: Context): String = + else failures.map(_.explanation).mkString("\n").replace("\n", "\n ") + override def postscript(implicit ctx: Context): String = { + val what = + if (argument.isEmpty) i"value of type $pt" + else i"conversion from ${argument.tpe.widen} to $pt" i""" - |Implicit search failure summary: |$explanation""" + } } } @@ -671,7 +674,7 @@ trait Implicits { self: Typer => ctx.traceIndented(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) { assert(!pt.isInstanceOf[ExprType]) val isearch = - if (ctx.settings.explaintypes.value) new ExplainedImplicitSearch(pt, argument, pos) + if (ctx.settings.explainImplicits.value) new ExplainedImplicitSearch(pt, argument, pos) else new ImplicitSearch(pt, argument, pos) val result = isearch.bestImplicit result match { diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 632dbff76c89..9400b3f10d12 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -45,9 +45,12 @@ object Inferencing { else throw new Error(i"internal error: type of $what $tp is not fully defined, pos = $pos") // !!! DEBUG - /** Instantiate selected type variables `tvars` in type `tp` */ + /** Instantiate selected type variables `tvars` in type `tp`. Instantiation + * proceeds with `implicitMode = true`, that is, type variables are approximated + * from below using or-dominators. + */ def instantiateSelected(tp: Type, tvars: List[Type])(implicit ctx: Context): Unit = - new IsFullyDefinedAccumulator(new ForceDegree.Value(tvars.contains, minimizeAll = true)).process(tp) + new IsFullyDefinedAccumulator(new ForceDegree.Value(tvars.contains, implicitMode = true)).process(tp) /** The accumulator which forces type variables using the policy encoded in `force` * and returns whether the type is fully defined. The direction in which @@ -69,8 +72,8 @@ object Inferencing { * to their upper bound. */ private class IsFullyDefinedAccumulator(force: ForceDegree.Value)(implicit ctx: Context) extends TypeAccumulator[Boolean] { - private def instantiate(tvar: TypeVar, fromBelow: Boolean): Type = { - val inst = tvar.instantiate(fromBelow) + private def instantiate(tvar: TypeVar, v: Int): Type = { + val inst = tvar.instantiate(v) typr.println(i"forced instantiation of ${tvar.origin} = $inst") inst } @@ -81,18 +84,20 @@ object Inferencing { case tvar: TypeVar if !tvar.isInstantiated && ctx.typerState.constraint.contains(tvar) => force.appliesTo(tvar) && { - val direction = instDirection(tvar.origin) - if (direction != 0) { + var constrVariance = instVariance(tvar.origin) + if (variance == 0) constrVariance = constrVariance min 0 + if (constrVariance != 0) { //if (direction > 0) println(s"inst $tvar dir = up") - instantiate(tvar, direction < 0) + instantiate(tvar, constrVariance) } else { - val minimize = - force.minimizeAll || - variance >= 0 && !( - force == ForceDegree.noBottom && - defn.isBottomType(ctx.typeComparer.approximation(tvar.origin, fromBelow = true))) - if (minimize) instantiate(tvar, fromBelow = true) + val effectiveVariance = + if (force.implicitMode) 0 + else if (force == ForceDegree.noBottom && + defn.isBottomType(ctx.typeComparer.approximation(tvar.origin, fromBelow = true))) + -1 + else variance + if (effectiveVariance >= 0) instantiate(tvar, effectiveVariance) else toMaximize = true } foldOver(x, tvar) @@ -105,7 +110,7 @@ object Inferencing { def apply(x: Unit, tp: Type): Unit = { tp match { case tvar: TypeVar if !tvar.isInstantiated => - instantiate(tvar, fromBelow = false) + instantiate(tvar, -1) case _ => } foldOver(x, tp) @@ -170,13 +175,15 @@ object Inferencing { occurring(tree, boundVars(tree, Nil), Nil) } - /** The instantiation direction for given poly param computed + /** The preferred instantiation variance for given param ref computed * from the constraint: - * @return 1 (maximize) if constraint is uniformly from above, - * -1 (minimize) if constraint is uniformly from below, + * @return -1 (maximize) if constraint is uniformly from above, + * +1 (minimize) if constraint is uniformly from below, * 0 if unconstrained, or constraint is from below and above. + * Note that the instantiation direction of the parameter is the reverse + * of the variance: -1 means maximize, +1 means minimize. */ - private def instDirection(param: TypeParamRef)(implicit ctx: Context): Int = { + private def instVariance(param: TypeParamRef)(implicit ctx: Context): Int = { val constrained = ctx.typerState.constraint.fullBounds(param) val original = param.binder.paramInfos(param.paramNum) val cmp = ctx.typeComparer @@ -184,7 +191,7 @@ object Inferencing { if (!cmp.isSubTypeWhenFrozen(constrained.lo, original.lo)) 1 else 0 val approxAbove = if (!cmp.isSubTypeWhenFrozen(original.hi, constrained.hi)) 1 else 0 - approxAbove - approxBelow + approxBelow - approxAbove } /** Recursively widen and also follow type declarations and type aliases. */ @@ -263,13 +270,13 @@ object Inferencing { vs foreachBinding { (tvar, v) => if (v != 0) { typr.println(s"interpolate ${if (v == 1) "co" else "contra"}variant ${tvar.show} in ${tp.show}") - tvar.instantiate(fromBelow = v == 1) + tvar.instantiate(v) } } for (tvar <- constraint.uninstVars) if (!(vs contains tvar) && qualifies(tvar)) { typr.println(s"instantiating non-occurring ${tvar.show} in ${tp.show} / $tp") - tvar.instantiate(fromBelow = true) + tvar.instantiate(0) } } if (constraint.uninstVars exists qualifies) interpolate() @@ -283,12 +290,11 @@ object Inferencing { val vs = variances(tp, alwaysTrue) var result: Option[TypeVar] = None vs foreachBinding { (tvar, v) => - if (v == 1) tvar.instantiate(fromBelow = false) - else if (v == -1) tvar.instantiate(fromBelow = true) + if (v != 0) tvar.instantiate(-v) else { val bounds = ctx.typerState.constraint.fullBounds(tvar.origin) if (!(bounds.hi <:< bounds.lo)) result = Some(tvar) - tvar.instantiate(fromBelow = false) + tvar.instantiate(-1) } } result @@ -366,9 +372,9 @@ object Inferencing { /** An enumeration controlling the degree of forcing in "is-dully-defined" checks. */ @sharable object ForceDegree { - class Value(val appliesTo: TypeVar => Boolean, val minimizeAll: Boolean) - val none = new Value(_ => false, minimizeAll = false) - val all = new Value(_ => true, minimizeAll = false) - val noBottom = new Value(_ => true, minimizeAll = false) + class Value(val appliesTo: TypeVar => Boolean, val implicitMode: Boolean) + val none = new Value(_ => false, implicitMode = false) + val all = new Value(_ => true, implicitMode = false) + val noBottom = new Value(_ => true, implicitMode = false) } diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index da9f9f6ac218..2c0e53d14d22 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -915,6 +915,7 @@ class Namer { typer: Typer => Checking.checkWellFormed(cls) if (isDerivedValueClass(cls)) cls.setFlag(Final) cls.info = avoidPrivateLeaks(cls, cls.pos) + cls.baseClasses.foreach(_.invalidateBaseTypeRefCache) } } diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 4715873e5483..34eadb318333 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -73,6 +73,22 @@ object RefChecks { } } + /** The this-type of `cls` which should be used when looking at the types of + * inherited members. If `cls` has a non-trivial self type, this returns a skolem + * with the class type instead of the `this-type` of the class as usual. + * This is done because otherwise we want to understand inherited infos + * as they are written, whereas with the this-type they could be + * more special. A test where this makes a difference is pos/i1401.scala. + * This one used to succeed only if forwarding parameters is on. + * (Forwarding tends to hide problems by binding parameter names). + */ + private def upwardsThisType(cls: Symbol)(implicit ctx: Context) = cls.info match { + case ClassInfo(_, _, _, _, tp: Type) if (tp ne cls.typeRef) && !cls.is(ModuleOrFinal) => + SkolemType(cls.typeRef).withName(nme.this_) + case _ => + cls.thisType + } + /** Check that self type of this class conforms to self types of parents. * and required classes. */ @@ -134,7 +150,7 @@ object RefChecks { * before, but it looks too complicated and method bodies are far too large. */ private def checkAllOverrides(clazz: Symbol)(implicit ctx: Context): Unit = { - val self = clazz.thisType + val self = upwardsThisType(clazz) var hasErrors = false case class MixinOverrideError(member: Symbol, msg: String) @@ -201,7 +217,14 @@ object RefChecks { infoStringWithLocation(other), infoString(member), msg, addendum) } - def emitOverrideError(fullmsg: String) = + def emitOverrideError(fullmsg: String) = { + if (false) { + val tparams = clazz.info.typeMembers.map(_.symbol) + .filter(_.is(BaseTypeArg | TypeParamOrAccessor)) + println(i"*** type param members of $clazz") + for (tparam <- tparams) + println(i" ${tparam.showLocated}: ${tparam.info}") + } if (!(hasErrors && member.is(Synthetic) && member.is(Module))) { // suppress errors relating toi synthetic companion objects if other override // errors (e.g. relating to the companion class) have already been reported. @@ -209,6 +232,7 @@ object RefChecks { else mixinOverrideErrors += new MixinOverrideError(member, fullmsg) hasErrors = true } + } def overrideError(msg: String) = { if (noErrorType) diff --git a/tests/pos/i1401.scala b/tests/pos/i1401.scala index 140d78e7f621..03dd684fe421 100644 --- a/tests/pos/i1401.scala +++ b/tests/pos/i1401.scala @@ -1,7 +1,7 @@ package i1401 -trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { - def -(elem: A): Repr +trait Subtractable[AS, +Repr <: Subtractable[AS, Repr]] { + def -(elem: AS): Repr } trait BufferLike[BA, +This <: BufferLike[BA, This] with Buffer[BA]] diff --git a/tests/pos/implicits1.scala b/tests/pos/implicits1.scala index eda1346630d2..b8e6ef4172c3 100644 --- a/tests/pos/implicits1.scala +++ b/tests/pos/implicits1.scala @@ -54,4 +54,8 @@ object Implicits { val xx: Int = (1: Byte) + // Problem with implicits over or types + def useOrd[T: math.Ordering](xs: T*) = () + useOrd(Some(1), None) + }