From ef5c96d4c831ddb719f205a71ff882bbd1ef6e4f Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sat, 6 Apr 2019 16:33:19 +0200 Subject: [PATCH 1/3] Fix type lambda reductions involving refinements Given: type F[X] = Foo[X] We've always been able to reduce `F[_]` to `Foo[_]` if `Foo` is a class. This commit does something similar for refinements, given: type G[X] = Bla { type R = X } We can reduce `G[_]` to `Bar { type R }` (previously we reduced it to `Bar { type R = Any }` which is incorrect). --- .../src/dotty/tools/dotc/core/TypeApplications.scala | 2 ++ tests/pos/hkRefAlias.scala | 10 ++++++++++ 2 files changed, 12 insertions(+) create mode 100644 tests/pos/hkRefAlias.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 45c5ce8586bc..85c24a7dbe10 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -129,6 +129,8 @@ object TypeApplications { def apply(t: Type): Type = t match { case t @ AppliedType(tycon, args1) if tycon.typeSymbol.isClass => t.derivedAppliedType(apply(tycon), args1.mapConserve(applyArg)) + case t @ RefinedType(parent, name, TypeAlias(info)) => + t.derivedRefinedType(apply(parent), name, applyArg(info).bounds) case p: TypeParamRef if p.binder == tycon => args(p.paramNum) match { case TypeBounds(lo, hi) => diff --git a/tests/pos/hkRefAlias.scala b/tests/pos/hkRefAlias.scala new file mode 100644 index 000000000000..58ec787def1c --- /dev/null +++ b/tests/pos/hkRefAlias.scala @@ -0,0 +1,10 @@ +class Bar +class X +class Y extends X + +object Test { + type G[X] = Bar { type R = X } + + implicitly[G[_] =:= (Bar { type R })] + implicitly[G[_ >: Y <: X] =:= (Bar { type R >: Y <: X })] +} From 2f13c2abfc3e6a507fa8d74cdbb31db250c830bc Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sat, 6 Apr 2019 13:05:15 +0200 Subject: [PATCH 2/3] Fix #6238: Strip WildcardType when applying arguments I'm not entirely sure why this fixes the issue but the previous behavior seems dubious to me: what's the meaning of applying a WildcardType argument to a type lambda ? By contrast, applying a TypeBounds has a well-defined behavior (`appliedTo` will call `TypeApplication#Reducer` to get rid of the type lambda). --- .../dotty/tools/dotc/typer/ProtoTypes.scala | 6 +- tests/pos/i6238.scala | 121 ++++++++++++++++++ 2 files changed, 126 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i6238.scala diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 7e6e801c357f..f7e431325320 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -586,10 +586,14 @@ object ProtoTypes { else if (tp.symbol.isStatic || (tp.prefix `eq` NoPrefix)) tp else tp.derivedSelect(wildApprox(tp.prefix, theMap, seen)) case tp @ AppliedType(tycon, args) => + def wildToBounds(tp: Type) = tp match { + case WildcardType(tp: TypeBounds) => tp + case tp => tp + } wildApprox(tycon, theMap, seen) match { case _: WildcardType => WildcardType // this ensures we get a * type case tycon1 => tp.derivedAppliedType(tycon1, - args.mapConserve(arg => wildApprox(arg, theMap, seen))) + args.mapConserve(arg => wildToBounds(wildApprox(arg, theMap, seen)))) } case tp: RefinedType => // default case, inlined for speed tp.derivedRefinedType( diff --git a/tests/pos/i6238.scala b/tests/pos/i6238.scala new file mode 100644 index 000000000000..9cd6d56d95ed --- /dev/null +++ b/tests/pos/i6238.scala @@ -0,0 +1,121 @@ +object K1 { + class Foo[T] + + class Bar[F[_]] + object Bar { + implicit def barF[F[_]](implicit fooF: Foo[Bar[F]]): Bar[F] = null + } + + class A[T] + object A { + implicit def fooA[F[_[_]]](implicit barB: F[B]): Foo[F[A]] = null + } + + class B[T] + object B { + implicit def fooB[F[_[_]]]: Foo[F[B]] = null + } +} + +object K1U { + class Foo[T] + + class Bar[F[_ <: Int]] + object Bar { + implicit def barF[F[_ <: Int]](implicit fooF: Foo[Bar[F]]): Bar[F] = null + } + + class A[T <: Int] + object A { + implicit def fooA[F[_[_ <: Int]]](implicit barB: F[B]): Foo[F[A]] = null + } + + class B[T <: Int] + object B { + implicit def fooB[F[_[_ <: Int]]]: Foo[F[B]] = null + } +} + +object K1L { + class Foo[T] + + class Bar[F[_ >: Int]] + object Bar { + implicit def barF[F[_ >: Int]](implicit fooF: Foo[Bar[F]]): Bar[F] = null + } + + class A[T >: Int] + object A { + implicit def fooA[F[_[_ >: Int]]](implicit barB: F[B]): Foo[F[A]] = null + } + + class B[T >: Int] + object B { + implicit def fooB[F[_[_ >: Int]]]: Foo[F[B]] = null + } +} + +object K11 { + class Foo[T] + + class Bar[F[_[_]]] + object Bar { + implicit def barF[F[_[_]]](implicit fooF: Foo[Bar[F]]): Bar[F] = null + } + + class A[T[_]] + object A { + implicit def fooA[F[_[_[_]]]](implicit barB: F[B]): Foo[F[A]] = null + } + + class B[T[_]] + object B { + implicit def fooB[F[_[_[_]]]]: Foo[F[B]] = null + } +} + +object K2 { + class Foo[T] + + class Bar[F[_, _]] + object Bar { + implicit def barF[F[_, _]](implicit fooF: Foo[Bar[F]]): Bar[F] = null + } + + class A[T, U] + object A { + implicit def fooA[F[_[_, _]]](implicit barB: F[B]): Foo[F[A]] = null + } + + class B[T, U] + object B { + implicit def fooB[F[_[_, _]]]: Foo[F[B]] = null + } +} + +object Test { + { + import K1._ + implicitly[Bar[A]] + } + + { + import K1U._ + implicitly[Bar[A]] + } + + { + import K1L._ + implicitly[Bar[A]] + } + + { + import K11._ + implicitly[Bar[A]] + } + + { + import K2._ + implicitly[Bar[A]] + } +} From b111fdfc8848fb599aa4194c55d389426255561d Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sat, 6 Apr 2019 17:02:41 +0200 Subject: [PATCH 3/3] filterConserve: Reduce the chances of stack overflow Observed when running the pos tests after the previous commit, the test suite didn't say which test caused the stackoverflow, but the stack trace ended with: at dotty.tools.dotc.core.Decorators$ListDecorator$.loop$2(Decorators.scala:90) at dotty.tools.dotc.core.Decorators$ListDecorator$.loop$2(Decorators.scala:90) at dotty.tools.dotc.core.Decorators$ListDecorator$.filterConserve$extension(Decorators.scala:97) at dotty.tools.dotc.core.OrderingConstraint.$anonfun$remove$1(OrderingConstraint.scala:453) at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237) at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36) at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33) at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198) at scala.collection.TraversableLike.map(TraversableLike.scala:237) at scala.collection.TraversableLike.map$(TraversableLike.scala:230) at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:198) at dotty.tools.dotc.core.OrderingConstraint.removeFromBoundss$1(OrderingConstraint.scala:453) at dotty.tools.dotc.core.OrderingConstraint.$anonfun$remove$4(OrderingConstraint.scala:456) at dotty.tools.dotc.util.SimpleIdentityMap$MapMore.mapValuesNow(SimpleIdentityMap.scala:209) at dotty.tools.dotc.core.OrderingConstraint.removeFromOrdering$1(OrderingConstraint.scala:456) at dotty.tools.dotc.core.OrderingConstraint.remove(OrderingConstraint.scala:458) at dotty.tools.dotc.core.OrderingConstraint.replace(OrderingConstraint.scala:440) at dotty.tools.dotc.core.OrderingConstraint.replace(OrderingConstraint.scala:132) at dotty.tools.dotc.core.Types$TypeVar.instantiateWith(Types.scala:3712) at dotty.tools.dotc.core.Types$TypeVar.instantiate(Types.scala:3724) at dotty.tools.dotc.typer.Inferencing.$anonfun$interpolateTypeVars$2(Inferencing.scala:450) ... --- compiler/src/dotty/tools/dotc/core/Decorators.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index ccf02262a4df..037104d5b864 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -44,7 +44,7 @@ object Decorators { } } - final val MaxFilterRecursions = 1000 + final val MaxFilterRecursions = 100 /** Implements filterConserve, zipWithConserve methods * on lists that avoid duplication of list nodes where feasible.