From 7fe7954174f54cae10bba28026042d33c6706565 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 17 Apr 2019 19:02:33 +0200 Subject: [PATCH 1/7] Optimize interpolateTypeVars There seemed to have been a large performance regression due to its recent changes. Let's find out whether that's really the case. --- .../dotty/tools/dotc/typer/Inferencing.scala | 103 +++++++++--------- .../tools/dotc/util/SimpleIdentitySet.scala | 29 +++-- 2 files changed, 75 insertions(+), 57 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index eea23ca62c33..2c25272349ec 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -410,57 +410,60 @@ trait Inferencing { this: Typer => // anymore if they've been garbage-collected, so we can't use // `state.ownedVars.size > locked.size` as an early check to avoid computing // `qualifying`. - val qualifying = state.ownedVars -- locked - - if (!qualifying.isEmpty) { - typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, owned vars = ${state.ownedVars.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") - val resultAlreadyConstrained = - tree.isInstanceOf[Apply] || tree.tpe.isInstanceOf[MethodOrPoly] - if (!resultAlreadyConstrained) - constrainResult(tree.symbol, tree.tpe, pt) - // This is needed because it could establish singleton type upper bounds. See i2998.scala. - - val tp = tree.tpe.widen - val vs = variances(tp) - - // Avoid interpolating variables occurring in tree's type if typerstate has unreported errors. - // Reason: The errors might reflect unsatisfiable constraints. In that - // case interpolating without taking account the constraints risks producing - // nonsensical types that then in turn produce incomprehensible errors. - // An example is in neg/i1240.scala. Without the condition in the next code line - // we get for - // - // val y: List[List[String]] = List(List(1)) - // - // i1430.scala:5: error: type mismatch: - // found : Int(1) - // required: Nothing - // val y: List[List[String]] = List(List(1)) - // ^ - // With the condition, we get the much more sensical: - // - // i1430.scala:5: error: type mismatch: - // found : Int(1) - // required: String - // val y: List[List[String]] = List(List(1)) - val hasUnreportedErrors = state.reporter.hasUnreportedErrors - def constraint = state.constraint - for (tvar <- qualifying) - if (!tvar.isInstantiated && state.constraint.contains(tvar)) { - // Needs to be checked again, since previous interpolations could already have - // instantiated `tvar` through unification. - val v = vs(tvar) - if (v == null) { - typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = ${tvar.hasLowerBound}, $constraint") - tvar.instantiate(fromBelow = tvar.hasLowerBound) - } - else if (!hasUnreportedErrors) - if (v.intValue != 0) { - typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") - tvar.instantiate(fromBelow = v.intValue == 1) + + val ownedVars = state.ownedVars + if (ownedVars.size > 0) { + val qualifying = ownedVars -- locked + if (!qualifying.isEmpty) { + typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, owned vars = ${state.ownedVars.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") + val resultAlreadyConstrained = + tree.isInstanceOf[Apply] || tree.tpe.isInstanceOf[MethodOrPoly] + if (!resultAlreadyConstrained) + constrainResult(tree.symbol, tree.tpe, pt) + // This is needed because it could establish singleton type upper bounds. See i2998.scala. + + val tp = tree.tpe.widen + val vs = variances(tp) + + // Avoid interpolating variables occurring in tree's type if typerstate has unreported errors. + // Reason: The errors might reflect unsatisfiable constraints. In that + // case interpolating without taking account the constraints risks producing + // nonsensical types that then in turn produce incomprehensible errors. + // An example is in neg/i1240.scala. Without the condition in the next code line + // we get for + // + // val y: List[List[String]] = List(List(1)) + // + // i1430.scala:5: error: type mismatch: + // found : Int(1) + // required: Nothing + // val y: List[List[String]] = List(List(1)) + // ^ + // With the condition, we get the much more sensical: + // + // i1430.scala:5: error: type mismatch: + // found : Int(1) + // required: String + // val y: List[List[String]] = List(List(1)) + val hasUnreportedErrors = state.reporter.hasUnreportedErrors + def constraint = state.constraint + for (tvar <- qualifying) + if (!tvar.isInstantiated && state.constraint.contains(tvar)) { + // Needs to be checked again, since previous interpolations could already have + // instantiated `tvar` through unification. + val v = vs(tvar) + if (v == null) { + typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = ${tvar.hasLowerBound}, $constraint") + tvar.instantiate(fromBelow = tvar.hasLowerBound) } - else typr.println(i"no interpolation for nonvariant $tvar in $state") - } + else if (!hasUnreportedErrors) + if (v.intValue != 0) { + typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") + tvar.instantiate(fromBelow = v.intValue == 1) + } + else typr.println(i"no interpolation for nonvariant $tvar in $state") + } + } } tree } diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala index 2161d597e150..60a772450ea3 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala @@ -12,15 +12,12 @@ abstract class SimpleIdentitySet[+Elem <: AnyRef] { def - [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[Elem] def contains[E >: Elem <: AnyRef](x: E): Boolean def foreach(f: Elem => Unit): Unit - def toList: List[Elem] = { - val buf = new ListBuffer[Elem] - foreach(buf += _) - buf.toList - } + def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A + def toList: List[Elem] def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = - ((this: SimpleIdentitySet[E]) /: that.toList)(_ + _) + ((this: SimpleIdentitySet[E]) /: that)(_ + _) def -- [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[Elem] = - (this /: that.toList)(_ - _) + (this /: that)(_ - _) override def toString: String = toList.mkString("(", ", ", ")") } @@ -33,6 +30,8 @@ object SimpleIdentitySet { this def contains[E <: AnyRef](x: E): Boolean = false def foreach(f: Nothing => Unit): Unit = () + def /: [A, E <: AnyRef](z: A)(f: (A, E) => A): A = z + def toList = Nil } private class Set1[+Elem <: AnyRef](x0: AnyRef) extends SimpleIdentitySet[Elem] { @@ -43,6 +42,9 @@ object SimpleIdentitySet { if (x `eq` x0) empty else this def contains[E >: Elem <: AnyRef](x: E): Boolean = x `eq` x0 def foreach(f: Elem => Unit): Unit = f(x0.asInstanceOf[Elem]) + def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = + f(z, x0.asInstanceOf[E]) + def toList = x0.asInstanceOf[Elem] :: Nil } private class Set2[+Elem <: AnyRef](x0: AnyRef, x1: AnyRef) extends SimpleIdentitySet[Elem] { @@ -55,6 +57,9 @@ object SimpleIdentitySet { else this def contains[E >: Elem <: AnyRef](x: E): Boolean = (x `eq` x0) || (x `eq` x1) def foreach(f: Elem => Unit): Unit = { f(x0.asInstanceOf[Elem]); f(x1.asInstanceOf[Elem]) } + def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = + f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E]) + def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: Nil } private class Set3[+Elem <: AnyRef](x0: AnyRef, x1: AnyRef, x2: AnyRef) extends SimpleIdentitySet[Elem] { @@ -78,6 +83,9 @@ object SimpleIdentitySet { def foreach(f: Elem => Unit): Unit = { f(x0.asInstanceOf[Elem]); f(x1.asInstanceOf[Elem]); f(x2.asInstanceOf[Elem]) } + def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = + f(f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E]), x2.asInstanceOf[E]) + def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: x2.asInstanceOf[Elem] :: Nil } private class SetN[+Elem <: AnyRef](xs: Array[AnyRef]) extends SimpleIdentitySet[Elem] { @@ -115,5 +123,12 @@ object SimpleIdentitySet { var i = 0 while (i < size) { f(xs(i).asInstanceOf[Elem]); i += 1 } } + def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = + (z /: xs.asInstanceOf[Array[E]])(f) + def toList: List[Elem] = { + val buf = new ListBuffer[Elem] + foreach(buf += _) + buf.toList + } } } From 35e631833752837a72d758c44a2238a3cca4d515 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 17 Apr 2019 19:05:34 +0200 Subject: [PATCH 2/7] Another optimization --- compiler/src/dotty/tools/dotc/typer/Inferencing.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 2c25272349ec..8507fcd19c6d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -413,7 +413,7 @@ trait Inferencing { this: Typer => val ownedVars = state.ownedVars if (ownedVars.size > 0) { - val qualifying = ownedVars -- locked + val qualifying = if (locked.isEmpty) ownedVars else ownedVars -- locked if (!qualifying.isEmpty) { typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, owned vars = ${state.ownedVars.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") val resultAlreadyConstrained = From f72522ca7a30371ef5009dc4fe0fddca2630088f Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 18 Apr 2019 10:35:58 +0200 Subject: [PATCH 3/7] Avoid constructing intermediate sets in interpolateTypeVars --- .../dotty/tools/dotc/typer/Inferencing.scala | 99 +++++++++---------- .../tools/dotc/util/SimpleIdentitySet.scala | 16 ++- 2 files changed, 61 insertions(+), 54 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 8507fcd19c6d..3e138ca5ea5c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -412,58 +412,55 @@ trait Inferencing { this: Typer => // `qualifying`. val ownedVars = state.ownedVars - if (ownedVars.size > 0) { - val qualifying = if (locked.isEmpty) ownedVars else ownedVars -- locked - if (!qualifying.isEmpty) { - typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, owned vars = ${state.ownedVars.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") - val resultAlreadyConstrained = - tree.isInstanceOf[Apply] || tree.tpe.isInstanceOf[MethodOrPoly] - if (!resultAlreadyConstrained) - constrainResult(tree.symbol, tree.tpe, pt) - // This is needed because it could establish singleton type upper bounds. See i2998.scala. - - val tp = tree.tpe.widen - val vs = variances(tp) - - // Avoid interpolating variables occurring in tree's type if typerstate has unreported errors. - // Reason: The errors might reflect unsatisfiable constraints. In that - // case interpolating without taking account the constraints risks producing - // nonsensical types that then in turn produce incomprehensible errors. - // An example is in neg/i1240.scala. Without the condition in the next code line - // we get for - // - // val y: List[List[String]] = List(List(1)) - // - // i1430.scala:5: error: type mismatch: - // found : Int(1) - // required: Nothing - // val y: List[List[String]] = List(List(1)) - // ^ - // With the condition, we get the much more sensical: - // - // i1430.scala:5: error: type mismatch: - // found : Int(1) - // required: String - // val y: List[List[String]] = List(List(1)) - val hasUnreportedErrors = state.reporter.hasUnreportedErrors - def constraint = state.constraint - for (tvar <- qualifying) - if (!tvar.isInstantiated && state.constraint.contains(tvar)) { - // Needs to be checked again, since previous interpolations could already have - // instantiated `tvar` through unification. - val v = vs(tvar) - if (v == null) { - typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = ${tvar.hasLowerBound}, $constraint") - tvar.instantiate(fromBelow = tvar.hasLowerBound) - } - else if (!hasUnreportedErrors) - if (v.intValue != 0) { - typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") - tvar.instantiate(fromBelow = v.intValue == 1) - } - else typr.println(i"no interpolation for nonvariant $tvar in $state") + if (ownedVars.size > 0 && (locked.size == 0 || ownedVars.exists(!locked.contains(_)))) { + typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, owned vars = ${state.ownedVars.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") + val resultAlreadyConstrained = + tree.isInstanceOf[Apply] || tree.tpe.isInstanceOf[MethodOrPoly] + if (!resultAlreadyConstrained) + constrainResult(tree.symbol, tree.tpe, pt) + // This is needed because it could establish singleton type upper bounds. See i2998.scala. + + val tp = tree.tpe.widen + val vs = variances(tp) + + // Avoid interpolating variables occurring in tree's type if typerstate has unreported errors. + // Reason: The errors might reflect unsatisfiable constraints. In that + // case interpolating without taking account the constraints risks producing + // nonsensical types that then in turn produce incomprehensible errors. + // An example is in neg/i1240.scala. Without the condition in the next code line + // we get for + // + // val y: List[List[String]] = List(List(1)) + // + // i1430.scala:5: error: type mismatch: + // found : Int(1) + // required: Nothing + // val y: List[List[String]] = List(List(1)) + // ^ + // With the condition, we get the much more sensical: + // + // i1430.scala:5: error: type mismatch: + // found : Int(1) + // required: String + // val y: List[List[String]] = List(List(1)) + val hasUnreportedErrors = state.reporter.hasUnreportedErrors + def constraint = state.constraint + for (tvar <- ownedVars) + if (!locked.contains(tvar) && !tvar.isInstantiated && state.constraint.contains(tvar)) { + // Needs to be checked again, since previous interpolations could already have + // instantiated `tvar` through unification. + val v = vs(tvar) + if (v == null) { + typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = ${tvar.hasLowerBound}, $constraint") + tvar.instantiate(fromBelow = tvar.hasLowerBound) } - } + else if (!hasUnreportedErrors) + if (v.intValue != 0) { + typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") + tvar.instantiate(fromBelow = v.intValue == 1) + } + else typr.println(i"no interpolation for nonvariant $tvar in $state") + } } tree } diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala index 60a772450ea3..95b0c2293831 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala @@ -7,16 +7,17 @@ import collection.mutable.ListBuffer */ abstract class SimpleIdentitySet[+Elem <: AnyRef] { def size: Int - def isEmpty: Boolean = size == 0 + final def isEmpty: Boolean = size == 0 def + [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[E] def - [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[Elem] def contains[E >: Elem <: AnyRef](x: E): Boolean def foreach(f: Elem => Unit): Unit + def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A def toList: List[Elem] - def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = + final def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = ((this: SimpleIdentitySet[E]) /: that)(_ + _) - def -- [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[Elem] = + final def -- [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[Elem] = (this /: that)(_ - _) override def toString: String = toList.mkString("(", ", ", ")") } @@ -30,6 +31,7 @@ object SimpleIdentitySet { this def contains[E <: AnyRef](x: E): Boolean = false def foreach(f: Nothing => Unit): Unit = () + def exists[E <: AnyRef](p: E => Boolean): Boolean = false def /: [A, E <: AnyRef](z: A)(f: (A, E) => A): A = z def toList = Nil } @@ -42,6 +44,8 @@ object SimpleIdentitySet { if (x `eq` x0) empty else this def contains[E >: Elem <: AnyRef](x: E): Boolean = x `eq` x0 def foreach(f: Elem => Unit): Unit = f(x0.asInstanceOf[Elem]) + def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean = + p(x0.asInstanceOf[E]) def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = f(z, x0.asInstanceOf[E]) def toList = x0.asInstanceOf[Elem] :: Nil @@ -57,6 +61,8 @@ object SimpleIdentitySet { else this def contains[E >: Elem <: AnyRef](x: E): Boolean = (x `eq` x0) || (x `eq` x1) def foreach(f: Elem => Unit): Unit = { f(x0.asInstanceOf[Elem]); f(x1.asInstanceOf[Elem]) } + def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean = + p(x0.asInstanceOf[E]) || p(x1.asInstanceOf[E]) def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E]) def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: Nil @@ -83,6 +89,8 @@ object SimpleIdentitySet { def foreach(f: Elem => Unit): Unit = { f(x0.asInstanceOf[Elem]); f(x1.asInstanceOf[Elem]); f(x2.asInstanceOf[Elem]) } + def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean = + p(x0.asInstanceOf[E]) || p(x1.asInstanceOf[E]) || p(x2.asInstanceOf[E]) def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = f(f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E]), x2.asInstanceOf[E]) def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: x2.asInstanceOf[Elem] :: Nil @@ -123,6 +131,8 @@ object SimpleIdentitySet { var i = 0 while (i < size) { f(xs(i).asInstanceOf[Elem]); i += 1 } } + def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean = + xs.asInstanceOf[Array[E]].exists(p) def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = (z /: xs.asInstanceOf[Array[E]])(f) def toList: List[Elem] = { From 85f79cb96afd336be24b21ae8b7102a7d6dfa9a6 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 18 Apr 2019 12:57:00 +0200 Subject: [PATCH 4/7] Optimize SimpleIdentitySet some more - drop -- since it is not used anymore. - optimize ++ by special casing empty sets and large, array-backed sets. --- .../tools/dotc/util/SimpleIdentitySet.scala | 43 ++++++++++++++++--- 1 file changed, 37 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala index 95b0c2293831..6c6fc378458a 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.util -import collection.mutable.ListBuffer +import collection.mutable /** A simple linked set with `eq` as the comparison, optimized for small sets. * It has linear complexity for `contains`, `+`, and `-`. @@ -15,10 +15,13 @@ abstract class SimpleIdentitySet[+Elem <: AnyRef] { def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A def toList: List[Elem] - final def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = + def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = { + if (this.size == 0) that + else if (that.size == 0) this + else concat(that) + } + protected def concat[E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = ((this: SimpleIdentitySet[E]) /: that)(_ + _) - final def -- [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[Elem] = - (this /: that)(_ - _) override def toString: String = toList.mkString("(", ", ", ")") } @@ -96,7 +99,7 @@ object SimpleIdentitySet { def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: x2.asInstanceOf[Elem] :: Nil } - private class SetN[+Elem <: AnyRef](xs: Array[AnyRef]) extends SimpleIdentitySet[Elem] { + private class SetN[+Elem <: AnyRef](val xs: Array[AnyRef]) extends SimpleIdentitySet[Elem] { def size = xs.length def + [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[E] = if (contains(x)) this @@ -136,9 +139,37 @@ object SimpleIdentitySet { def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = (z /: xs.asInstanceOf[Array[E]])(f) def toList: List[Elem] = { - val buf = new ListBuffer[Elem] + val buf = new mutable.ListBuffer[Elem] foreach(buf += _) buf.toList } + override def concat [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = + that match { + case that: SetN[_] => + var toAdd: mutable.ArrayBuffer[AnyRef] = null + var i = 0 + val limit = that.xs.length + while (i < limit) { + val elem = that.xs(i) + if (!contains(elem)) { + if (toAdd == null) toAdd = new mutable.ArrayBuffer + toAdd += elem + } + i += 1 + } + if (toAdd == null) this + else { + val numAdded = toAdd.size + val xs1 = new Array[AnyRef](size + numAdded) + System.arraycopy(xs, 0, xs1, 0, size) + var i = 0 + while (i < numAdded) { + xs1(i + size) = toAdd(i) + i += 1 + } + new SetN[E](xs1) + } + case _ => super.concat(that) + } } } From ad2776ccf008863fbc5b84387cc08c7852a4e506 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 18 Apr 2019 17:53:08 +0200 Subject: [PATCH 5/7] More optimizations Bring back -- but optimize it for the case where the sets are similar. --- .../dotty/tools/dotc/typer/Inferencing.scala | 99 ++++++++++--------- .../tools/dotc/util/SimpleIdentitySet.scala | 47 ++++++++- 2 files changed, 95 insertions(+), 51 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 3e138ca5ea5c..eb0674802cf6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -412,55 +412,58 @@ trait Inferencing { this: Typer => // `qualifying`. val ownedVars = state.ownedVars - if (ownedVars.size > 0 && (locked.size == 0 || ownedVars.exists(!locked.contains(_)))) { - typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, owned vars = ${state.ownedVars.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") - val resultAlreadyConstrained = - tree.isInstanceOf[Apply] || tree.tpe.isInstanceOf[MethodOrPoly] - if (!resultAlreadyConstrained) - constrainResult(tree.symbol, tree.tpe, pt) - // This is needed because it could establish singleton type upper bounds. See i2998.scala. - - val tp = tree.tpe.widen - val vs = variances(tp) - - // Avoid interpolating variables occurring in tree's type if typerstate has unreported errors. - // Reason: The errors might reflect unsatisfiable constraints. In that - // case interpolating without taking account the constraints risks producing - // nonsensical types that then in turn produce incomprehensible errors. - // An example is in neg/i1240.scala. Without the condition in the next code line - // we get for - // - // val y: List[List[String]] = List(List(1)) - // - // i1430.scala:5: error: type mismatch: - // found : Int(1) - // required: Nothing - // val y: List[List[String]] = List(List(1)) - // ^ - // With the condition, we get the much more sensical: - // - // i1430.scala:5: error: type mismatch: - // found : Int(1) - // required: String - // val y: List[List[String]] = List(List(1)) - val hasUnreportedErrors = state.reporter.hasUnreportedErrors - def constraint = state.constraint - for (tvar <- ownedVars) - if (!locked.contains(tvar) && !tvar.isInstantiated && state.constraint.contains(tvar)) { - // Needs to be checked again, since previous interpolations could already have - // instantiated `tvar` through unification. - val v = vs(tvar) - if (v == null) { - typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = ${tvar.hasLowerBound}, $constraint") - tvar.instantiate(fromBelow = tvar.hasLowerBound) - } - else if (!hasUnreportedErrors) - if (v.intValue != 0) { - typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") - tvar.instantiate(fromBelow = v.intValue == 1) + if ((ownedVars ne locked) && !ownedVars.isEmpty) { + val qualifying = ownedVars -- locked + if (!qualifying.isEmpty) { + typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, owned vars = ${state.ownedVars.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") + val resultAlreadyConstrained = + tree.isInstanceOf[Apply] || tree.tpe.isInstanceOf[MethodOrPoly] + if (!resultAlreadyConstrained) + constrainResult(tree.symbol, tree.tpe, pt) + // This is needed because it could establish singleton type upper bounds. See i2998.scala. + + val tp = tree.tpe.widen + val vs = variances(tp) + + // Avoid interpolating variables occurring in tree's type if typerstate has unreported errors. + // Reason: The errors might reflect unsatisfiable constraints. In that + // case interpolating without taking account the constraints risks producing + // nonsensical types that then in turn produce incomprehensible errors. + // An example is in neg/i1240.scala. Without the condition in the next code line + // we get for + // + // val y: List[List[String]] = List(List(1)) + // + // i1430.scala:5: error: type mismatch: + // found : Int(1) + // required: Nothing + // val y: List[List[String]] = List(List(1)) + // ^ + // With the condition, we get the much more sensical: + // + // i1430.scala:5: error: type mismatch: + // found : Int(1) + // required: String + // val y: List[List[String]] = List(List(1)) + val hasUnreportedErrors = state.reporter.hasUnreportedErrors + def constraint = state.constraint + for (tvar <- qualifying) + if (!tvar.isInstantiated && state.constraint.contains(tvar)) { + // Needs to be checked again, since previous interpolations could already have + // instantiated `tvar` through unification. + val v = vs(tvar) + if (v == null) { + typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = ${tvar.hasLowerBound}, $constraint") + tvar.instantiate(fromBelow = tvar.hasLowerBound) } - else typr.println(i"no interpolation for nonvariant $tvar in $state") - } + else if (!hasUnreportedErrors) + if (v.intValue != 0) { + typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") + tvar.instantiate(fromBelow = v.intValue == 1) + } + else typr.println(i"no interpolation for nonvariant $tvar in $state") + } + } } tree } diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala index 6c6fc378458a..5ec20839790b 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala @@ -15,11 +15,16 @@ abstract class SimpleIdentitySet[+Elem <: AnyRef] { def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A def toList: List[Elem] - def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = { + def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = if (this.size == 0) that else if (that.size == 0) this else concat(that) - } + def -- [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = + if (that.size == 0) this + else + ((SimpleIdentitySet.empty: SimpleIdentitySet[E]) /: this) { (s, x) => + if (that.contains(x)) s else s + x + } protected def concat[E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = ((this: SimpleIdentitySet[E]) /: that)(_ + _) override def toString: String = toList.mkString("(", ", ", ")") @@ -143,7 +148,7 @@ object SimpleIdentitySet { foreach(buf += _) buf.toList } - override def concat [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = + override def concat[E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = that match { case that: SetN[_] => var toAdd: mutable.ArrayBuffer[AnyRef] = null @@ -171,5 +176,41 @@ object SimpleIdentitySet { } case _ => super.concat(that) } + override def -- [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = + that match { + case that: SetN[_] => + // both sets are large, optimize assuming they are similar + // by starting from empty set and adding elements + var toAdd: mutable.ArrayBuffer[AnyRef] = null + val thisSize = this.size + val thatSize = that.size + val thatElems = that.xs + var i = 0 + var searchStart = 0 + while (i < thisSize) { + val elem = this.xs(i) + var j = searchStart // search thatElems in round robin fashion, starting one after latest hit + var missing = false + while (!missing && elem != thatElems(j)) { + j = (j + 1) % thatSize + missing = j == searchStart + } + if (missing) { + if (toAdd == null) toAdd = new mutable.ArrayBuffer + toAdd += elem + } + else searchStart = (j + 1) % thatSize + i += 1 + } + if (toAdd == null) empty + else toAdd.size match { + case 1 => new Set1[E](toAdd(0)) + case 2 => new Set2[E](toAdd(0), toAdd(1)) + case 3 => new Set3[E](toAdd(0), toAdd(1), toAdd(2)) + case _ => new SetN[E](toAdd.toArray) + } + case _ => // this set is large, that set is small: reduce from above using `-` + ((this: SimpleIdentitySet[E]) /: that)(_ - _) + } } } From 792714bbf473e7f6305862e12343167ddb352abd Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 19 Apr 2019 14:56:58 +0200 Subject: [PATCH 6/7] Drop concat as separate method in SimpleIdentitySet --- .../src/dotty/tools/dotc/util/SimpleIdentitySet.scala | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala index 5ec20839790b..b73f1e47fc1c 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala @@ -18,15 +18,13 @@ abstract class SimpleIdentitySet[+Elem <: AnyRef] { def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = if (this.size == 0) that else if (that.size == 0) this - else concat(that) + else ((this: SimpleIdentitySet[E]) /: that)(_ + _) def -- [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = if (that.size == 0) this else ((SimpleIdentitySet.empty: SimpleIdentitySet[E]) /: this) { (s, x) => if (that.contains(x)) s else s + x } - protected def concat[E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = - ((this: SimpleIdentitySet[E]) /: that)(_ + _) override def toString: String = toList.mkString("(", ", ", ")") } @@ -148,7 +146,7 @@ object SimpleIdentitySet { foreach(buf += _) buf.toList } - override def concat[E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = + override def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = that match { case that: SetN[_] => var toAdd: mutable.ArrayBuffer[AnyRef] = null @@ -174,7 +172,7 @@ object SimpleIdentitySet { } new SetN[E](xs1) } - case _ => super.concat(that) + case _ => super.++(that) } override def -- [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = that match { From 4819f278f7f9eae6d23d0a7dcdad5ff765106517 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 19 Apr 2019 14:59:25 +0200 Subject: [PATCH 7/7] Address review comments --- compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala index b73f1e47fc1c..e71e1ce8a65d 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala @@ -189,8 +189,9 @@ object SimpleIdentitySet { val elem = this.xs(i) var j = searchStart // search thatElems in round robin fashion, starting one after latest hit var missing = false - while (!missing && elem != thatElems(j)) { - j = (j + 1) % thatSize + while (!missing && (elem ne thatElems(j))) { + j += 1 + if (j == thatSize) j = 0 missing = j == searchStart } if (missing) {