Skip to content

Bug fixes to make collection strawman compile #2331

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 13 commits into from
May 1, 2017
Merged
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/config/Config.scala
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ object Config {
final val traceDeepSubTypeRecursions = false

/** When explaining subtypes and this flag is set, also show the classes of the compared types. */
final val verboseExplainSubtype = true
final val verboseExplainSubtype = false

/** If this flag is set, take the fast path when comparing same-named type-aliases and types */
final val fastPathForRefinedSubtype = true
Expand Down
4 changes: 3 additions & 1 deletion compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ class ScalaSettings extends Settings.SettingGroup {
val deprecation = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.")
val migration = BooleanSetting("-migration", "Emit warning and location for migration issues from Scala 2.")
val encoding = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding)
val explaintypes = BooleanSetting("-explaintypes", "Explain type errors in more detail.")
val explainTypes = BooleanSetting("-explain-types", "Explain type errors in more detail.")
val explainImplicits = BooleanSetting("-explain-implicits", "Explain implicit search errors in more detail.")
val explain = BooleanSetting("-explain", "Explain errors in more detail.")
val feature = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.")
val help = BooleanSetting("-help", "Print a synopsis of standard options")
Expand Down Expand Up @@ -95,6 +96,7 @@ class ScalaSettings extends Settings.SettingGroup {
val YforceSbtPhases = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.")
val YdumpSbtInc = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.")
val YcheckAllPatmat = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm)")
val YsuppressParamForwarding = BooleanSetting("-Ysuppress-param-forwarding", "Don't install aliases for base type parameters.")

/** Area-specific debug output */
val Yexplainlowlevel = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.")
Expand Down
3 changes: 2 additions & 1 deletion compiler/src/dotty/tools/dotc/core/Mode.scala
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ object Mode {
val TypevarsMissContext = newMode(4, "TypevarsMissContext")
val CheckCyclic = newMode(5, "CheckCyclic")

/** We are looking at the arguments of a supercall */
val InSuperCall = newMode(6, "InSuperCall")

/** Allow GADTFlexType labelled types to have their bounds adjusted */
Expand Down Expand Up @@ -81,7 +82,7 @@ object Mode {
val ReadPositions = newMode(16, "ReadPositions")

val PatternOrType = Pattern | Type

/** We are elaborating the fully qualified name of a package clause.
* In this case, identifiers should never be imported.
*/
Expand Down
32 changes: 20 additions & 12 deletions compiler/src/dotty/tools/dotc/core/SymDenotations.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1381,12 +1381,15 @@ object SymDenotations {
/** Invalidate baseTypeRefCache, baseClasses and superClassBits on new run */
private def checkBasesUpToDate()(implicit ctx: Context) =
if (baseTypeRefValid != ctx.runId) {
baseTypeRefCache = new java.util.HashMap[CachedType, Type]
invalidateBaseTypeRefCache()
myBaseClasses = null
mySuperClassBits = null
baseTypeRefValid = ctx.runId
}

def invalidateBaseTypeRefCache() =
baseTypeRefCache = new java.util.HashMap[CachedType, Type]

private def computeBases(implicit ctx: Context): (List[ClassSymbol], BitSet) = {
if (myBaseClasses eq Nil) throw CyclicReference(this)
myBaseClasses = Nil
Expand Down Expand Up @@ -1712,18 +1715,23 @@ object SymDenotations {
/*>|>*/ ctx.debugTraceIndented(s"$tp.baseTypeRef($this)") /*<|<*/ {
tp match {
case tp: CachedType =>
checkBasesUpToDate()
var basetp = baseTypeRefCache get tp
if (basetp == null) {
baseTypeRefCache.put(tp, NoPrefix)
basetp = computeBaseTypeRefOf(tp)
if (isCachable(tp)) baseTypeRefCache.put(tp, basetp)
else baseTypeRefCache.remove(tp)
} else if (basetp == NoPrefix) {
baseTypeRefCache.put(tp, null)
throw CyclicReference(this)
try {
checkBasesUpToDate()
var basetp = baseTypeRefCache get tp
if (basetp == null) {
baseTypeRefCache.put(tp, NoPrefix)
basetp = computeBaseTypeRefOf(tp)
if (isCachable(tp)) baseTypeRefCache.put(tp, basetp)
else baseTypeRefCache.remove(tp)
} else if (basetp == NoPrefix)
throw CyclicReference(this)
basetp
}
catch {
case ex: Throwable =>
baseTypeRefCache.put(tp, null)
throw ex
}
basetp
case _ =>
computeBaseTypeRefOf(tp)
}
Expand Down
16 changes: 11 additions & 5 deletions compiler/src/dotty/tools/dotc/core/TypeComparer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -574,8 +574,15 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
isNewSubType(tp1.parent, tp2)
case tp1 @ HKApply(tycon1, args1) =>
compareHkApply1(tp1, tycon1, args1, tp2)
case EtaExpansion(tycon1) =>
isSubType(tycon1, tp2)
case tp1: HKTypeLambda =>
def compareHKLambda = tp1 match {
case EtaExpansion(tycon1) => isSubType(tycon1, tp2)
case _ => tp2 match {
case tp2: HKTypeLambda => false // this case was covered in thirdTry
case _ => tp2.isHK && isSubType(tp1.resultType, tp2.appliedTo(tp1.paramRefs))
}
}
compareHKLambda
case AndType(tp11, tp12) =>
// Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 | T12) <: T2
// and analogously for T11 & (T121 | T122) & T12 <: T2
Expand Down Expand Up @@ -775,7 +782,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
(v < 0 || isSubType(tp1, tp2))
}
isSub(args1.head, args2.head)
} && isSubArgs(args1.tail, args2.tail, tparams)
} && isSubArgs(args1.tail, args2.tail, tparams.tail)

/** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where
* - `B` derives from one of the class symbols of `tp2`,
Expand Down Expand Up @@ -1287,8 +1294,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
tl.integrate(tparams1, tparam1.paramInfoAsSeenFrom(tp1)).bounds &
tl.integrate(tparams2, tparam2.paramInfoAsSeenFrom(tp2)).bounds),
resultTypeExp = tl =>
original(tl.integrate(tparams1, tp1).appliedTo(tl.paramRefs),
tl.integrate(tparams2, tp2).appliedTo(tl.paramRefs)))
original(tp1.appliedTo(tl.paramRefs), tp2.appliedTo(tl.paramRefs)))
}

/** Try to distribute `&` inside type, detect and handle conflicts
Expand Down
129 changes: 61 additions & 68 deletions compiler/src/dotty/tools/dotc/core/TypeOps.scala
Original file line number Diff line number Diff line change
Expand Up @@ -321,72 +321,11 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
}
}

/** If we have member definitions
*
* type argSym v= from
* type from v= to
*
* where the variances of both alias are the same, then enter a new definition
*
* type argSym v= to
*
* unless a definition for `argSym` already exists in the current scope.
*/
def forwardRef(argSym: Symbol, from: Symbol, to: TypeBounds, cls: ClassSymbol, decls: Scope) =
argSym.info match {
case info @ TypeBounds(lo2 @ TypeRef(_: ThisType, name), hi2) =>
if (name == from.name &&
(lo2 eq hi2) &&
info.variance == to.variance &&
!decls.lookup(argSym.name).exists) {
// println(s"short-circuit ${argSym.name} was: ${argSym.info}, now: $to")
enterArgBinding(argSym, to, cls, decls)
}
case _ =>
}


/** Normalize a list of parent types of class `cls` that may contain refinements
* to a list of typerefs referring to classes, by converting all refinements to member
* definitions in scope `decls`. Can add members to `decls` as a side-effect.
*/
def normalizeToClassRefs(parents: List[Type], cls: ClassSymbol, decls: Scope): List[TypeRef] = {

/** If we just entered the type argument binding
*
* type From = To
*
* and there is a type argument binding in a parent in `prefs` of the form
*
* type X = From
*
* then also add the binding
*
* type X = To
*
* to the current scope, provided (1) variances of both aliases are the same, and
* (2) X is not yet defined in current scope. This "short-circuiting" prevents
* long chains of aliases which would have to be traversed in type comparers.
*
* Note: Test i1401.scala shows that `forwardRefs` is also necessary
* for typechecking in the case where self types refer to type parameters
* that are upper-bounded by subclass instances.
*/
def forwardRefs(from: Symbol, to: Type, prefs: List[TypeRef]) = to match {
case to @ TypeBounds(lo1, hi1) if lo1 eq hi1 =>
for (pref <- prefs) {
def forward()(implicit ctx: Context): Unit =
for (argSym <- pref.decls)
if (argSym is BaseTypeArg)
forwardRef(argSym, from, to, cls, decls)
pref.info match {
case info: TempClassInfo => info.addSuspension(implicit ctx => forward())
case _ => forward()
}
}
case _ =>
}

// println(s"normalizing $parents of $cls in ${cls.owner}") // !!! DEBUG

// A map consolidating all refinements arising from parent type parameters
Expand Down Expand Up @@ -429,16 +368,70 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
s"redefinition of ${decls.lookup(name).debugString} in ${cls.showLocated}")
enterArgBinding(formals(name), refinedInfo, cls, decls)
}
// Forward definitions in super classes that have one of the refined parameters
// as aliases directly to the refined info.
// Note that this cannot be fused with the previous loop because we now
// assume that all arguments have been entered in `decls`.
refinements foreachBinding { (name, refinedInfo) =>
forwardRefs(formals(name), refinedInfo, parentRefs)
}

if (!ctx.settings.YsuppressParamForwarding.value)
forwardParamBindings(parentRefs, refinements, cls, decls)

parentRefs
}

/** Forward parameter bindings in baseclasses to argument types of
* class `cls` if possible.
* If there have member definitions
*
* type param v= middle
* type middle v= to
*
* where the variances of both alias are the same, then enter a new definition
*
* type param v= to
*
* If multiple forwarders would be generated, join their `to` types with an `&`.
*
* @param cls The class for which parameter bindings should be forwarded
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The params are not in the same order in the doc than in the declaration.

* @param decls Its scope
* @param parentRefs The parent type references of `cls`
* @param paramBindings The type parameter bindings generated for `cls`
*
*/
def forwardParamBindings(parentRefs: List[TypeRef],
paramBindings: SimpleMap[TypeName, Type],
cls: ClassSymbol, decls: Scope)(implicit ctx: Context) = {

def forwardRef(argSym: Symbol, from: TypeName, to: TypeAlias) = argSym.info match {
case info @ TypeAlias(TypeRef(_: ThisType, `from`)) if info.variance == to.variance =>
val existing = decls.lookup(argSym.name)
if (existing.exists) existing.info = existing.info & to
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

enterArgBindings sets the info using a LazyType, shouldn't the same thing be done here?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It seems we get away with it. I have a tendency to wait for an actual CyclicReference popping up before we take that measure.

else enterArgBinding(argSym, to, cls, decls)
case _ =>
}

def forwardRefs(from: TypeName, to: Type) = to match {
case to: TypeAlias =>
for (pref <- parentRefs) {
def forward()(implicit ctx: Context): Unit =
for (argSym <- pref.decls)
if (argSym is BaseTypeArg) forwardRef(argSym, from, to)
pref.info match {
case info: TempClassInfo => info.addSuspension(implicit ctx => forward())
case _ => forward()
}
}
case _ =>
}

paramBindings.foreachBinding(forwardRefs)
}

/** Used only for debugging: All BaseTypeArg definitions in
* `cls` and all its base classes.
*/
def allBaseTypeArgs(cls: ClassSymbol)(implicit ctx: Context) =
for { bc <- cls.baseClasses
sym <- bc.info.decls.toList
if sym.is(BaseTypeArg)
} yield sym

/** An argument bounds violation is a triple consisting of
* - the argument tree
* - a string "upper" or "lower" indicating which bound is violated
Expand Down
2 changes: 2 additions & 0 deletions compiler/src/dotty/tools/dotc/core/Types.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3030,6 +3030,8 @@ object Types {
override def hashCode: Int = identityHash
override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]

def withName(name: Name): this.type = { myRepr = name; this }

private var myRepr: Name = null
def repr(implicit ctx: Context): Name = {
if (myRepr == null) myRepr = SkolemName.fresh()
Expand Down
45 changes: 18 additions & 27 deletions compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -119,24 +119,26 @@ class TreePickler(pickler: TastyPickler) {
pickleType(c.symbolValue.termRef)
}

def pickleType(tpe0: Type, richTypes: Boolean = false)(implicit ctx: Context): Unit = try {
def pickleType(tpe0: Type, richTypes: Boolean = false)(implicit ctx: Context): Unit = {
val tpe = tpe0.stripTypeVar
val prev = pickledTypes.get(tpe)
if (prev == null) {
pickledTypes.put(tpe, currentAddr)
pickleNewType(tpe, richTypes)
}
else {
writeByte(SHARED)
writeRef(prev.asInstanceOf[Addr])
try {
val prev = pickledTypes.get(tpe)
if (prev == null) {
pickledTypes.put(tpe, currentAddr)
pickleNewType(tpe, richTypes)
}
else {
writeByte(SHARED)
writeRef(prev.asInstanceOf[Addr])
}
} catch {
case ex: AssertionError =>
println(i"error when pickling type $tpe")
throw ex
}
} catch {
case ex: AssertionError =>
println(i"error when pickling type $tpe0")
throw ex
}

private def pickleNewType(tpe: Type, richTypes: Boolean)(implicit ctx: Context): Unit = try { tpe match {
private def pickleNewType(tpe: Type, richTypes: Boolean)(implicit ctx: Context): Unit = tpe match {
case AppliedType(tycon, args) =>
writeByte(APPLIEDtype)
withLength { pickleType(tycon); args.foreach(pickleType(_)) }
Expand Down Expand Up @@ -241,21 +243,10 @@ class TreePickler(pickler: TastyPickler) {
pickleMethodic(POLYtype, tpe)
case tpe: MethodType if richTypes =>
pickleMethodic(METHODtype, tpe)
case tpe: TypeParamRef =>
if (!pickleParamRef(tpe))
// TODO figure out why this case arises in e.g. pickling AbstractFileReader.
ctx.typerState.constraint.entry(tpe) match {
case TypeBounds(lo, hi) if lo eq hi => pickleNewType(lo, richTypes)
case _ => assert(false, s"orphan poly parameter: $tpe")
}
case tpe: TermParamRef =>
assert(pickleParamRef(tpe), s"orphan method parameter: $tpe")
case tpe: ParamRef =>
assert(pickleParamRef(tpe), s"orphan parameter reference: $tpe")
case tpe: LazyRef =>
pickleType(tpe.ref)
}} catch {
case ex: AssertionError =>
println(i"error while pickling type $tpe")
throw ex
}

def picklePackageRef(pkg: Symbol)(implicit ctx: Context): Unit = {
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ class PlainPrinter(_ctx: Context) extends Printer {
def paramText(name: Name, bounds: TypeBounds): Text = name.toString ~ toText(bounds)
changePrec(GlobalPrec) {
"[" ~ Text((tp.paramNames, tp.paramInfos).zipped.map(paramText), ", ") ~
"]" ~ (" => " provided !tp.resultType.isInstanceOf[MethodType]) ~
"]" ~ lambdaHash(tp) ~ (" => " provided !tp.resultType.isInstanceOf[MethodType]) ~
toTextGlobal(tp.resultType)
}
case tp: TypeParamRef =>
Expand Down
Loading