From 8e1f0da3a22fb282e7bbde728c488d417aff5358 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 11 Jul 2015 12:21:33 +0200 Subject: [PATCH 01/44] Clean up of logic in typeDefSig --- src/dotty/tools/dotc/typer/Namer.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 99119acb3275..7349b3198fa4 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -797,10 +797,10 @@ class Namer { typer: Typer => // // The scheme critically relies on an implementation detail of isRef, which // inspects a TypeRef's info, instead of simply dealiasing alias types. - val rhsType = typedAheadType(tdef.rhs).tpe + val rhsType = abstracted(typedAheadType(tdef.rhs).tpe) val unsafeInfo = rhsType match { - case _: TypeBounds => abstracted(rhsType).asInstanceOf[TypeBounds] - case _ => TypeAlias(abstracted(rhsType), if (sym is Local) sym.variance else 0) + case bounds: TypeBounds => bounds + case alias => TypeAlias(alias, if (sym is Local) sym.variance else 0) } sym.info = NoCompleter checkNonCyclic(sym, unsafeInfo, reportErrors = true) From e2e71dcbb2343d28be0f5311c1cb0094db8cdb05 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 12 Jul 2015 14:22:10 +0200 Subject: [PATCH 02/44] Avoid cyclic references in containsRefinedThis Avoid forcing info if the reference goes to a class. This avoided a CyclicReference when reading Scala's standard library form pos/collections when fiddling with the hk logic. --- src/dotty/tools/dotc/core/TypeApplications.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 617e4eb71222..f4f14101d18d 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -416,9 +416,10 @@ class TypeApplications(val self: Type) extends AnyVal { case RefinedThis(tp) => tp eq target case tp: NamedType => - tp.info match { + if (tp.symbol.isClass) !tp.symbol.isStatic && recur(tp.prefix) + else tp.info match { case TypeAlias(alias) => recur(alias) - case _ => !tp.symbol.isStatic && recur(tp.prefix) + case _ => recur(tp.prefix) } case tp: RefinedType => recur(tp.refinedInfo) || recur(tp.parent) From 92fe081bcdf7f02cd65350463db2d3d4fa72f1eb Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 14 Jul 2015 11:37:24 +0200 Subject: [PATCH 03/44] Eta expand type arguments corresponding to lambdas In Namer, eta expand any type argument that corresponds to a higher-kinded type parameter. Also, check that all type parameter lists are fully applied. --- src/dotty/tools/dotc/typer/Namer.scala | 26 +++++++++++++++++++++++++- tests/pos/partialApplications.scala | 4 ++-- 2 files changed, 27 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 7349b3198fa4..9df928f12b44 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -803,6 +803,30 @@ class Namer { typer: Typer => case alias => TypeAlias(alias, if (sym is Local) sym.variance else 0) } sym.info = NoCompleter - checkNonCyclic(sym, unsafeInfo, reportErrors = true) + sym.info = checkNonCyclic(sym, unsafeInfo, reportErrors = true) + etaExpandArgs.apply(sym.info) + } + + /** Eta expand all class types C appearing as arguments to a higher-kinded + * type parameter to type lambdas, e.g. [HK0] => C[HK0] + */ + def etaExpandArgs(implicit ctx: Context) = new TypeMap { + def etaExpandArg(tp: Type, tparam: Symbol): Type = + if (tparam.info.isLambda && tp.typeSymbol.isClass && tp.isLambda) tp.EtaExpand + else tp + def apply(tp: Type) = tp match { + case tp: RefinedType => + val args = tp.argInfos(interpolate = false).mapconserve(this) + if (args.nonEmpty) { + val tycon = tp.withoutArgs(args) + val tparams = tycon.typeParams + assert(args.length == tparams.length, + i"lengths differ in $tp: args = $args%, %, type params of $tycon = $tparams%, %") + this(tycon).appliedTo(args.zipWithConserve(tparams)(etaExpandArg)) + } + else mapOver(tp) + case _ => + mapOver(tp) + } } } diff --git a/tests/pos/partialApplications.scala b/tests/pos/partialApplications.scala index b68c4b945152..c1df1dee2035 100644 --- a/tests/pos/partialApplications.scala +++ b/tests/pos/partialApplications.scala @@ -1,8 +1,8 @@ object Test { - type Histogram = Map[_, Int] + type Histogram[X] = Map[X, Int] - type StringlyHistogram = Histogram[_ >: String] + type StringlyHistogram[X >: String] = Histogram[X] val xs: Histogram[String] = Map[String, Int]() From 3eb114351520b16028f9fbcd7a2fcdd6c7e0f5c7 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 14 Jul 2015 12:35:16 +0200 Subject: [PATCH 04/44] Check argument lengths in typedAppliedTypeTree With the hk-types schem changed, we need to make sure that actual and formal argument lists of parameterized types have the same length. --- src/dotty/tools/dotc/typer/Typer.scala | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index a2c49cdd9751..58c01753486e 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -830,8 +830,18 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit val argPts = if (ctx.mode is Mode.Pattern) tpt1.tpe.typeParams.map(_.info) else tree.args.map(_ => WildcardType) - val args1 = tree.args.zipWithConserve(argPts)(typed(_, _)).asInstanceOf[List[Tree]] - // check that arguments conform to bounds is done in phase FirstTransform + val tparams = tpt1.tpe.typeParams + var args = tree.args + if (tparams.isEmpty) { + ctx.error(d"${tpt1.tpe} does not take type parameters") + tpt1 + } + else if (args.length != tparams.length) { + ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos) + args = args.take(tparams.length) + } + val args1 = args.zipWithConserve(argPts)(typed(_, _)).asInstanceOf[List[Tree]] + // check that arguments conform to bounds is done in phase PostTyper assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) } From b10b93e6f8bdee650ccbe588db476bd0c8c8819c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 14 Jul 2015 12:38:05 +0200 Subject: [PATCH 05/44] Better diagnostics for failed sigName calls. It used to be "assertion error: NoType" whenever sigName git a projection of a missing member. Now we find out about what type was projected. --- src/dotty/tools/dotc/core/TypeErasure.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala index f27b2fd1ea5f..616e222e0b55 100644 --- a/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/src/dotty/tools/dotc/core/TypeErasure.scala @@ -445,7 +445,11 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean case tp: TypeRef => if (!tp.denot.exists) throw new MissingType(tp.prefix, tp.name) val sym = tp.symbol - if (!sym.isClass) sigName(tp.info) + if (!sym.isClass) { + val info = tp.info + if (!info.exists) assert(false, "undefined: $tp with symbol $sym") + sigName(info) + } else if (isDerivedValueClass(sym)) sigName(eraseDerivedValueClassRef(tp)) else normalizeClass(sym.asClass).fullName.asTypeName case defn.ArrayType(elem) => From 0bdee32e8b35b6c994248def87cb8f6f874572dd Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 14 Jul 2015 12:49:09 +0200 Subject: [PATCH 06/44] Black hole detection for LazyRefs Now catches attempts to recursively force a LazyRef type that's in train of being evaluated. --- src/dotty/tools/dotc/core/Types.scala | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 358720836787..6797836cfc16 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1815,7 +1815,16 @@ object Types { } case class LazyRef(refFn: () => Type) extends UncachedProxyType with ValueType { - lazy val ref = refFn() + private var myRef: Type = null + private var computed = false + lazy val ref = { + if (computed) assert(myRef != null) + else { + computed = true + myRef = refFn() + } + myRef + } override def underlying(implicit ctx: Context) = ref override def toString = s"LazyRef($ref)" override def equals(other: Any) = other match { From 0d95c76466012f9a7e6535ebba0620df4042f179 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 14 Jul 2015 12:50:39 +0200 Subject: [PATCH 07/44] Turn assertion into a test in etaExpandArgs. Erreneous programs could have a difference in lengths between type parameters and type args, but this is tested anyway in Typer. --- src/dotty/tools/dotc/typer/Namer.scala | 28 ++++++++++++++------------ 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 9df928f12b44..8b204debb0ce 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -814,19 +814,21 @@ class Namer { typer: Typer => def etaExpandArg(tp: Type, tparam: Symbol): Type = if (tparam.info.isLambda && tp.typeSymbol.isClass && tp.isLambda) tp.EtaExpand else tp - def apply(tp: Type) = tp match { - case tp: RefinedType => - val args = tp.argInfos(interpolate = false).mapconserve(this) - if (args.nonEmpty) { - val tycon = tp.withoutArgs(args) - val tparams = tycon.typeParams - assert(args.length == tparams.length, - i"lengths differ in $tp: args = $args%, %, type params of $tycon = $tparams%, %") - this(tycon).appliedTo(args.zipWithConserve(tparams)(etaExpandArg)) - } - else mapOver(tp) - case _ => - mapOver(tp) + def apply(tp: Type): Type = { + tp match { + case tp: RefinedType => + val args = tp.argInfos(interpolate = false).mapconserve(this) + if (args.nonEmpty) { + val tycon = tp.withoutArgs(args) + val tparams = tycon.typeParams + if (args.length == tparams.length) { // if lengths differ, problem is caught in typedTypeApply + val args1 = args.zipWithConserve(tparams)(etaExpandArg) + if (args1 ne args) return this(tycon).appliedTo(args1) + } + } + case _ => + } + mapOver(tp) } } } From fdf8f867fa1a1d2493052b19bd50907f1b5d75e8 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 14 Jul 2015 17:37:52 +0200 Subject: [PATCH 08/44] Fix argument eta expansion ... and move to TypeApplications. isLambda test was the wrong way before. --- src/dotty/tools/dotc/core/TypeApplications.scala | 3 +++ src/dotty/tools/dotc/typer/Namer.scala | 5 +---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index f4f14101d18d..6d3b7ac475b4 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -540,6 +540,9 @@ class TypeApplications(val self: Type) extends AnyVal { self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparams) } + def EtaExpandIfLambda(bound: Type)(implicit ctx: Context): Type = + if (bound.isLambda && self.typeSymbol.isClass && !isLambda) EtaExpand else self + /** Test whether this type has a base type of the form `B[T1, ..., Bn]` where * the type parameters of `B` match one-by-one the variances of `tparams`, * and where the lambda abstracted type diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 8b204debb0ce..eef0ce8b0319 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -811,9 +811,6 @@ class Namer { typer: Typer => * type parameter to type lambdas, e.g. [HK0] => C[HK0] */ def etaExpandArgs(implicit ctx: Context) = new TypeMap { - def etaExpandArg(tp: Type, tparam: Symbol): Type = - if (tparam.info.isLambda && tp.typeSymbol.isClass && tp.isLambda) tp.EtaExpand - else tp def apply(tp: Type): Type = { tp match { case tp: RefinedType => @@ -822,7 +819,7 @@ class Namer { typer: Typer => val tycon = tp.withoutArgs(args) val tparams = tycon.typeParams if (args.length == tparams.length) { // if lengths differ, problem is caught in typedTypeApply - val args1 = args.zipWithConserve(tparams)(etaExpandArg) + val args1 = args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfLambda(tparam.info)) if (args1 ne args) return this(tycon).appliedTo(args1) } } From 5f7eadf3f4d4798dec7c87c92a86c882948ac3de Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 14 Jul 2015 19:53:05 +0200 Subject: [PATCH 09/44] Strenghten condition in EtaExpandIfLambda Without the additional `typeParams.nonEmpty` condition we got a crash in t1439.scala --- src/dotty/tools/dotc/core/TypeApplications.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 6d3b7ac475b4..a97441681123 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -540,8 +540,10 @@ class TypeApplications(val self: Type) extends AnyVal { self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparams) } + /** Eta expand if `bound` is a type lambda */ def EtaExpandIfLambda(bound: Type)(implicit ctx: Context): Type = - if (bound.isLambda && self.typeSymbol.isClass && !isLambda) EtaExpand else self + if (bound.isLambda && self.typeSymbol.isClass && typeParams.nonEmpty && !isLambda) EtaExpand + else self /** Test whether this type has a base type of the form `B[T1, ..., Bn]` where * the type parameters of `B` match one-by-one the variances of `tparams`, From f19220307f25a08269ab5098de784f023cb6b02b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 14 Jul 2015 19:54:18 +0200 Subject: [PATCH 10/44] Adapt arguments in all type applications Previously, we did this only in applications in rhs of type definitions. Need to do it everywhere. --- src/dotty/tools/dotc/typer/Applications.scala | 7 +++---- src/dotty/tools/dotc/typer/Typer.scala | 9 +++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index 40029c42b805..8800c1a5598d 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -604,10 +604,6 @@ trait Applications extends Compatibility { self: Typer => val typedFn = typedExpr(tree.fun, PolyProto(typedArgs.tpes, pt)) typedFn.tpe.widen match { case pt: PolyType => - def adaptTypeArg(tree: tpd.Tree, bound: Type): tpd.Tree = - if (bound.isLambda && !tree.tpe.isLambda && tree.tpe.typeParams.nonEmpty) - tree.withType(tree.tpe.EtaExpand) - else tree if (typedArgs.length <= pt.paramBounds.length) typedArgs = typedArgs.zipWithConserve(pt.paramBounds)(adaptTypeArg) checkBounds(typedArgs, pt) @@ -616,6 +612,9 @@ trait Applications extends Compatibility { self: Typer => assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs) } + def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = + tree.withType(tree.tpe.EtaExpandIfLambda(bound)) + /** Rewrite `new Array[T](....)` trees to calls of newXYZArray methods. */ def convertNewArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match { case Apply(TypeApply(tycon, targ :: Nil), args) if tycon.symbol == defn.ArrayConstructor => diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 58c01753486e..337737437202 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -827,9 +827,6 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): AppliedTypeTree = track("typedAppliedTypeTree") { val tpt1 = typed(tree.tpt) - val argPts = - if (ctx.mode is Mode.Pattern) tpt1.tpe.typeParams.map(_.info) - else tree.args.map(_ => WildcardType) val tparams = tpt1.tpe.typeParams var args = tree.args if (tparams.isEmpty) { @@ -840,7 +837,11 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos) args = args.take(tparams.length) } - val args1 = args.zipWithConserve(argPts)(typed(_, _)).asInstanceOf[List[Tree]] + val argPts = + if (ctx.mode is Mode.Pattern) tpt1.tpe.typeParams.map(_.info) + else tree.args.map(_ => WildcardType) + def typedArg(arg: untpd.Tree, pt: Type) = adaptTypeArg(typed(arg, pt), pt) + val args1 = args.zipWithConserve(argPts)(typedArg(_, _)).asInstanceOf[List[Tree]] // check that arguments conform to bounds is done in phase PostTyper assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) } From c28f023c0eb35be14c0e649c14ad10b00603e615 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 15 Jul 2015 11:43:39 +0200 Subject: [PATCH 11/44] Avoid generating companion-methods for non-class types. This would lead to a crash. Example is in Predef: object Pair type Pair --- .../tools/dotc/core/unpickleScala2/Scala2Unpickler.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index e6eb89008bdc..457c8cfa681b 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -127,9 +127,11 @@ object Scala2Unpickler { val companionClassMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_CLASS_METHOD, claz, module) if (companionClassMethod.exists) companionClassMethod.entered - val companionModuleMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, module, claz) - if (companionModuleMethod.exists) - companionModuleMethod.entered + if (claz.isClass) { + val companionModuleMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, module, claz) + if (companionModuleMethod.exists) + companionModuleMethod.entered + } } if (denot.flagsUNSAFE is Module) { From 8c6b70912984e546f16064817446aea3f3863653 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 16 Jul 2015 12:14:33 +0200 Subject: [PATCH 12/44] Add EtaReduce method. EtaReduce will be used to keep applications on eta expanded methods small. --- .../tools/dotc/core/TypeApplications.scala | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index a97441681123..9837b39b128f 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -538,6 +538,7 @@ class TypeApplications(val self: Type) extends AnyVal { def EtaExpand(implicit ctx: Context): Type = { val tparams = typeParams self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparams) + //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") } /** Eta expand if `bound` is a type lambda */ @@ -545,6 +546,38 @@ class TypeApplications(val self: Type) extends AnyVal { if (bound.isLambda && self.typeSymbol.isClass && typeParams.nonEmpty && !isLambda) EtaExpand else self + /** If `self` is an eta expansion of type T, return T, otherwise NoType */ + def EtaReduce(implicit ctx: Context): Type = { + def etaCore(tp: Type, tparams: List[Symbol]): Type = tparams match { + case Nil => tp + case tparam :: otherParams => + tp match { + case tp: RefinedType => + tp.refinedInfo match { + case TypeAlias(TypeRef(RefinedThis(rt), rname)) // TODO: Drop once hk applications have been updated + if (rname == tparam.name) && (rt eq self) => + etaCore(tp.parent, otherParams) + case TypeRef(TypeAlias(TypeRef(RefinedThis(rt), rname)), tpnme.Apply) + if (rname == tparam.name) && (rt eq self) => + etaCore(tp.parent, otherParams) + case _ => + NoType + } + case _ => + NoType + } + } + self match { + case self @ RefinedType(parent, tpnme.Apply) => + val lc = parent.LambdaClass(forcing = false) + self.refinedInfo match { + case TypeAlias(alias) if lc.exists => etaCore(alias, lc.typeParams.reverse) + case _ => NoType + } + case _ => NoType + } + } + /** Test whether this type has a base type of the form `B[T1, ..., Bn]` where * the type parameters of `B` match one-by-one the variances of `tparams`, * and where the lambda abstracted type From a4d51ed8ad7eea8d126ddfb8395943a2e83950cd Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 17 Jul 2015 12:20:54 +0200 Subject: [PATCH 13/44] Add comment. --- src/dotty/tools/dotc/typer/Namer.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index eef0ce8b0319..7e7d4f802da0 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -808,7 +808,10 @@ class Namer { typer: Typer => } /** Eta expand all class types C appearing as arguments to a higher-kinded - * type parameter to type lambdas, e.g. [HK0] => C[HK0] + * type parameter to type lambdas, e.g. [HK0] => C[HK0]. This is necessary + * because in `typedAppliedTypeTree` we might ahve missed some eta expansions + * of arguments in F-bounds, because the recursive type was initialized with + * TypeBounds.empty. */ def etaExpandArgs(implicit ctx: Context) = new TypeMap { def apply(tp: Type): Type = { From c8b22f5e8a9d404ba671682db687d6a1bc1067eb Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 17 Jul 2015 12:21:18 +0200 Subject: [PATCH 14/44] Add a test that logs the classpath to pinpoint setup failures. --- test/dotc/tests.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index 0838874c9553..86d82be769cd 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -56,6 +56,8 @@ class tests extends CompilerTest { //@Test def pickle_core = compileDir(dotcDir, "core", testPickling, xerrors = 2) // two spurious comparison errors in Types and TypeOps + @Test def pos_arraycopy = + compileFile(runDir, "arraycopy", List("-Ylog-classpath")) @Test def pos_t2168_pat = compileFile(posDir, "t2168", twice) @Test def pos_erasure = compileFile(posDir, "erasure", twice) @Test def pos_Coder() = compileFile(posDir, "Coder", twice) From 36ea47659d7cccbff6b011e86063660bdf391bc5 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 14 Jul 2015 14:28:44 +0200 Subject: [PATCH 15/44] Don't eta expand in appliedTo --- src/dotty/tools/dotc/core/TypeApplications.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 9837b39b128f..12eeeb674b54 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -145,7 +145,7 @@ class TypeApplications(val self: Type) extends AnyVal { println(s"precomplete decls = ${self.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}") } val tparam = tparams.head - def needsEtaExpand = + val needsEtaExpand = try { (tparam is HigherKinded) && !arg.isLambda && arg.typeParams.nonEmpty } From 60b231ce30a6eb0e99d031bdbcee3032d3ae5f3b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 15 Jul 2015 10:03:14 +0200 Subject: [PATCH 16/44] Always lambda abstract TempPolyTypes representing types. --- .../core/unpickleScala2/Scala2Unpickler.scala | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 457c8cfa681b..ce03d7c6d0ed 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -50,18 +50,9 @@ object Scala2Unpickler { */ def depoly(tp: Type, denot: SymDenotation)(implicit ctx: Context): Type = tp match { case TempPolyType(tparams, restpe) => - if (denot.isAbstractType) - restpe.LambdaAbstract(tparams) // bounds needed? - else if (denot.isAliasType) { - var err: Option[(String, Position)] = None - val result = restpe.parameterizeWith(tparams) - for ((msg, pos) <- err) - ctx.warning( - sm"""$msg - |originally parsed type : ${tp.show} - |will be approximated by: ${result.show}. - |Proceed at own risk.""") - result + if (denot.isType) { + assert(!denot.isClass) + restpe.LambdaAbstract(tparams) } else PolyType.fromSymbols(tparams, restpe) From 525e0ac7bcb35eb78fedd4a943ef07106752f9fb Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 15 Jul 2015 10:04:11 +0200 Subject: [PATCH 17/44] Lambda abstract all TypeRefs when unpickling. --- .../tools/dotc/core/unpickleScala2/Scala2Unpickler.scala | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index ce03d7c6d0ed..1420237de11b 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -674,6 +674,11 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas else TypeRef(pre, sym.name.asTypeName) val args = until(end, readTypeRef) if (sym == defn.ByNameParamClass2x) ExprType(args.head) + else if (args.isEmpty && sym.typeParams.nonEmpty) { + val res = tycon.LambdaAbstract(sym.typeParams) + println(s"lambda abstract $tycon") + res + } else tycon.appliedTo(args) case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef()) From eb0b2886f642ec00b2e7f2db569c25f0e1e34757 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 15 Jul 2015 10:05:20 +0200 Subject: [PATCH 18/44] A non-class typeref has type parameters only if subtype of a lambda class. --- src/dotty/tools/dotc/core/TypeApplications.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 12eeeb674b54..ca5d93a02c9c 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -45,7 +45,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** The type parameters of this type are: * For a ClassInfo type, the type parameters of its class. * For a typeref referring to a class, the type parameters of the class. - * For a typeref referring to an alias or abstract type, the type parameters of + * For a typeref referring to a Lambda class, the type parameters of * its right hand side or upper bound. * For a refinement type, the type parameters of its parent, unless the refinement * re-binds the type parameter with a type-alias. @@ -59,7 +59,10 @@ class TypeApplications(val self: Type) extends AnyVal { case tp: TypeRef => val tsym = tp.typeSymbol if (tsym.isClass) tsym.typeParams - else tp.underlying.typeParams + else { + val lam = LambdaClass(forcing = false) + if (lam.exists) lam.typeParams else Nil//tp.underlying.typeParams + } case tp: RefinedType => val tparams = tp.parent.typeParams tp.refinedInfo match { From e2aa258c3781fa9ee62fa47dd3b1206b09588c17 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 17 Jul 2015 12:32:57 +0200 Subject: [PATCH 19/44] Drop parameterizeWith parameterizeWith picked between simple hk types and lambda abstraction. No longer needed because now we always lambda abstract. --- src/dotty/tools/dotc/core/TypeApplications.scala | 4 ++-- src/dotty/tools/dotc/typer/Namer.scala | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index ca5d93a02c9c..893bedeba353 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -437,7 +437,7 @@ class TypeApplications(val self: Type) extends AnyVal { } recur(self) } - +/* /** Given a type alias * * type T[boundSyms] = p.C[targs] @@ -500,7 +500,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (cls.isClass) matchParams(boundSyms, cls.typeParams, argInfos, Map()) else LambdaAbstract(boundSyms) } - +*/ /** The typed lambda abstraction of this type `T` relative to `boundSyms`. * This is: * diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 7e7d4f802da0..224ced0ceed3 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -780,11 +780,11 @@ class Namer { typer: Typer => completeParams(tdef.tparams) val tparamSyms = tdef.tparams map symbolOfTree val isDerived = tdef.rhs.isInstanceOf[untpd.DerivedTypeTree] - val toParameterize = tparamSyms.nonEmpty && !isDerived - val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived + //val toParameterize = tparamSyms.nonEmpty && !isDerived + //val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived def abstracted(tp: Type): Type = - if (needsLambda) tp.LambdaAbstract(tparamSyms) - else if (toParameterize) tp.parameterizeWith(tparamSyms) + if (tparamSyms.nonEmpty) tp.LambdaAbstract(tparamSyms) + //else if (toParameterize) tp.parameterizeWith(tparamSyms) else tp sym.info = abstracted(TypeBounds.empty) // Temporarily set info of defined type T to ` >: Nothing <: Any. From b12edd1959450f5473cef095821b86400b959e8d Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 19 Jul 2015 12:41:32 +0200 Subject: [PATCH 20/44] Allow to use safe substitution in LambdaAbstract Needed to avoid cycles involving F-boundes hk-types when reading Scala2 collection classes with new hk-scheme. --- src/dotty/tools/dotc/core/Substituters.scala | 27 +++++++++++++++++++ .../tools/dotc/core/TypeApplications.scala | 12 +++++++-- src/dotty/tools/dotc/core/Types.scala | 2 +- .../core/unpickleScala2/Scala2Unpickler.scala | 2 +- 4 files changed, 39 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/core/Substituters.scala b/src/dotty/tools/dotc/core/Substituters.scala index e4bbf2305805..0083ac626fae 100644 --- a/src/dotty/tools/dotc/core/Substituters.scala +++ b/src/dotty/tools/dotc/core/Substituters.scala @@ -277,4 +277,31 @@ trait Substituters { this: Context => final class SubstParamsMap(from: BindingType, to: List[Type]) extends DeepTypeMap { def apply(tp: Type) = substParams(tp, from, to, this) } + + /** A map for "cycle safe substitutions" which do not force the denotation + * of a TypeRef unless the name matches up with one of the substituted symbols. + */ + final class SafeSubstMap(from: List[Symbol], to: List[Type]) extends TypeMap { + def apply(tp: Type): Type = tp match { + case tp: NamedType => + try { + var sym: Symbol = null + var fs = from + var ts = to + while (fs.nonEmpty) { + if (fs.head.name == tp.name) { + if (sym == null) sym = tp.symbol + if (fs.head eq sym) return ts.head + } + fs = fs.tail + ts = ts.tail + } + tp.newLikeThis(apply(tp.prefix)) + } + catch { + case ex: CyclicReference => tp.derivedSelect(apply(tp.prefix)) + } + case _ => mapOver(tp) + } + } } diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 893bedeba353..a40047c398ab 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -511,14 +511,22 @@ class TypeApplications(val self: Type) extends AnyVal { * `this.Arg$i`. * * TypeBounds are lambda abstracting by lambda abstracting their upper bound. + * + * @param cycleParanoid If `true` don't force denotation of a TypeRef unless + * its name matches one of `boundSyms`. Needed to avoid cycles + * involving F-boundes hk-types when reading Scala2 collection classes + * with new hk-scheme. */ - def LambdaAbstract(boundSyms: List[Symbol])(implicit ctx: Context): Type = { + def LambdaAbstract(boundSyms: List[Symbol], cycleParanoid: Boolean = false)(implicit ctx: Context): Type = { def expand(tp: Type) = { val lambda = defn.lambdaTrait(boundSyms.map(_.variance)) val substitutedRHS = (rt: RefinedType) => { val argRefs = boundSyms.indices.toList.map(i => RefinedThis(rt).select(tpnme.lambdaArgName(i))) - tp.subst(boundSyms, argRefs).bounds.withVariance(1) + val substituted = + if (cycleParanoid) new ctx.SafeSubstMap(boundSyms, argRefs).apply(tp) + else tp.subst(boundSyms, argRefs) + substituted.bounds.withVariance(1) } val res = RefinedType(lambda.typeRef, tpnme.Apply, substitutedRHS) //println(i"lambda abstract $self wrt $boundSyms%, % --> $res") diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 6797836cfc16..9f2cc0f34dbe 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1492,7 +1492,7 @@ object Types { /** Create a NamedType of the same kind as this type, but with a new prefix. */ - protected def newLikeThis(prefix: Type)(implicit ctx: Context): NamedType = + def newLikeThis(prefix: Type)(implicit ctx: Context): NamedType = NamedType(prefix, name) /** Create a NamedType of the same kind as this type, but with a "inherited name". diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 1420237de11b..b0f31d763d4a 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -52,7 +52,7 @@ object Scala2Unpickler { case TempPolyType(tparams, restpe) => if (denot.isType) { assert(!denot.isClass) - restpe.LambdaAbstract(tparams) + restpe.LambdaAbstract(tparams, cycleParanoid = true) } else PolyType.fromSymbols(tparams, restpe) From 3be3aa3bc8c8e37bdb1f1495e40cfe357f678f28 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 19 Jul 2015 12:36:26 +0200 Subject: [PATCH 21/44] Use EtaExpand instead of LambdaAbstract when unpickling typerefs When unpickling from Scala2 TypeRefs with arguments which do not refer to classes, use EtaExpand instead of LambdaAbstract. Lambda Abstrct is wrong since it drops type arguments. --- .../tools/dotc/core/unpickleScala2/Scala2Unpickler.scala | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index b0f31d763d4a..c0b5ab64becf 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -674,11 +674,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas else TypeRef(pre, sym.name.asTypeName) val args = until(end, readTypeRef) if (sym == defn.ByNameParamClass2x) ExprType(args.head) - else if (args.isEmpty && sym.typeParams.nonEmpty) { - val res = tycon.LambdaAbstract(sym.typeParams) - println(s"lambda abstract $tycon") - res - } + else if (args.isEmpty && sym.typeParams.nonEmpty) tycon.EtaExpand else tycon.appliedTo(args) case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef()) From 47049965ce2b72e49b0d1ca29c10966f7d5012fb Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 19 Jul 2015 12:37:34 +0200 Subject: [PATCH 22/44] Avoid cycles when eliminating existentials in unpickling New hk-scheme caused cycles in elimExistentials which are fixed by this patch. --- .../core/unpickleScala2/Scala2Unpickler.scala | 27 ++++++++++++++++--- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index c0b5ab64becf..68439846d6d8 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -584,15 +584,35 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas * tp { name: T } */ def elimExistentials(boundSyms: List[Symbol], tp: Type)(implicit ctx: Context): Type = { + // Need to be careful not to run into cyclic references here (observed when + // comiling t247.scala). That's why we avoiud taking `symbol` of a TypeRef + // unless names match up. + val isBound = (tp: Type) => { + def refersTo(tp: Type, sym: Symbol): Boolean = tp match { + case tp @ TypeRef(_, name) => sym.name == name && sym == tp.symbol + case tp: TypeVar => refersTo(tp.underlying, sym) + case tp : LazyRef => refersTo(tp.ref, sym) + case _ => false + } + boundSyms.exists(refersTo(tp, _)) + } + // Cannot use standard `existsPart` method because it calls `lookupRefined` + // which can cause CyclicReference errors. + val isBoundAccumulator = new ExistsAccumulator(isBound) { + override def foldOver(x: Boolean, tp: Type): Boolean = tp match { + case tp: TypeRef => applyToPrefix(x, tp) + case _ => super.foldOver(x, tp) + } + } def removeSingleton(tp: Type): Type = if (tp isRef defn.SingletonClass) defn.AnyType else tp def elim(tp: Type): Type = tp match { case tp @ RefinedType(parent, name) => val parent1 = elim(tp.parent) tp.refinedInfo match { - case TypeAlias(info: TypeRef) if boundSyms contains info.symbol => + case TypeAlias(info: TypeRef) if isBound(info) => RefinedType(parent1, name, info.symbol.info) - case info: TypeRef if boundSyms contains info.symbol => + case info: TypeRef if isBound(info) => val info1 = info.symbol.info assert(info1.derivesFrom(defn.SingletonClass)) RefinedType(parent1, name, info1.mapReduceAnd(removeSingleton)(_ & _)) @@ -605,8 +625,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas tp } val tp1 = elim(tp) - val isBound = (tp: Type) => boundSyms contains tp.typeSymbol - if (tp1 existsPart isBound) { + if (isBoundAccumulator(false, tp1)) { val anyTypes = boundSyms map (_ => defn.AnyType) val boundBounds = boundSyms map (_.info.bounds.hi) val tp2 = tp1.subst(boundSyms, boundBounds).subst(boundSyms, anyTypes) From 094c3bdef0fcc725c6eada7cbf70a9c51ce1ed7e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 19 Jul 2015 12:38:19 +0200 Subject: [PATCH 23/44] Don't lambda abstract derived types Derived types already contain the lambda abstractoion; lambda abstracting them again would cause a double lambda. --- src/dotty/tools/dotc/typer/Namer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 224ced0ceed3..22523582202f 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -783,7 +783,7 @@ class Namer { typer: Typer => //val toParameterize = tparamSyms.nonEmpty && !isDerived //val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived def abstracted(tp: Type): Type = - if (tparamSyms.nonEmpty) tp.LambdaAbstract(tparamSyms) + if (tparamSyms.nonEmpty && !isDerived) tp.LambdaAbstract(tparamSyms) //else if (toParameterize) tp.parameterizeWith(tparamSyms) else tp sym.info = abstracted(TypeBounds.empty) From 31f9da14e5f2357c8af3c106acccc89fdd1f222c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 19 Jul 2015 12:57:06 +0200 Subject: [PATCH 24/44] Temporary fix to testLifted Discrepancies between numbers of formal and actual type arguments were observed when typing partialFunctions.scala under new scheme. Should come back to this when subtyping is rewrittem/simplified to work with new hk-scheme. Maybe testLifted is no longer needed at all. --- src/dotty/tools/dotc/core/TypeApplications.scala | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index a40047c398ab..c20d52d0d14a 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -15,6 +15,7 @@ import typer.Mode import util.Positions.Position import config.Printers._ import collection.mutable +import java.util.NoSuchElementException object TypeApplications { @@ -624,8 +625,13 @@ class TypeApplications(val self: Type) extends AnyVal { case nil => false } - if (tparams.isEmpty) false - else if (typeParams.nonEmpty) p(EtaExpand) || classBounds.nonEmpty && tryLift(self.baseClasses) - else classBounds.nonEmpty && tryLift(self.baseClasses) + try { // temporary, to avoid type mismatches in applications. Should come back to this + // when subtyping is rewritten to account for new hk-scheme. + if (tparams.isEmpty) false + else if (typeParams.nonEmpty) p(EtaExpand) || classBounds.nonEmpty && tryLift(self.baseClasses) + else classBounds.nonEmpty && tryLift(self.baseClasses) + } catch { + case ex: NoSuchElementException => false + } } } From 05a4660886276284f966896bc800c8862919c01f Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 19 Jul 2015 12:59:34 +0200 Subject: [PATCH 25/44] Follow alias types when computing type params. This aligns typeParams and rawTypeParams. It's not strictly to necessary, though. --- src/dotty/tools/dotc/core/TypeApplications.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index c20d52d0d14a..d0909fed4c5b 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -60,9 +60,10 @@ class TypeApplications(val self: Type) extends AnyVal { case tp: TypeRef => val tsym = tp.typeSymbol if (tsym.isClass) tsym.typeParams + else if (tsym.isAliasType) tp.underlying.rawTypeParams else { val lam = LambdaClass(forcing = false) - if (lam.exists) lam.typeParams else Nil//tp.underlying.typeParams + if (lam.exists) lam.typeParams else Nil } case tp: RefinedType => val tparams = tp.parent.typeParams @@ -85,7 +86,7 @@ class TypeApplications(val self: Type) extends AnyVal { * do not remove corresponding type parameters. * Second, it will return Nil for BoundTypes because we might get a NullPointer exception * on PolyParam#underlying otherwise (demonstrated by showClass test). - * Third, it won't return higher-kinded type parameters, i.e. the type parameters of + * Third, it won't return abstract higher-kinded type parameters, i.e. the type parameters of * an abstract type are always empty. */ final def rawTypeParams(implicit ctx: Context): List[TypeSymbol] = { From 83ae926dad0caf945a8633b39b557567415b2bbe Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 19 Jul 2015 13:02:36 +0200 Subject: [PATCH 26/44] Try to eta reduce type applications. Rewrite a type application like ([HK$0] => C[HK$0])(T) to C[T] Avoids application cahins to become unnecessarly large. --- src/dotty/tools/dotc/core/TypeApplications.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index d0909fed4c5b..dca56e812470 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -192,7 +192,9 @@ class TypeApplications(val self: Type) extends AnyVal { matchParams(tp, safeTypeParams, args) } case tp: RefinedType => - tp.derivedRefinedType( + val redux = tp.EtaReduce + if (redux.exists) redux.appliedTo(args) // Rewrite ([HK$0] => C[HK$0])(T) to C[T] + else tp.derivedRefinedType( instantiate(tp.parent, original), tp.refinedName, tp.refinedInfo) From 965e7e368bcf7346b28ef19f694be502e76a1ca3 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 19 Jul 2015 13:04:39 +0200 Subject: [PATCH 27/44] Project all high-kinded types with #Apply Used to be just instantiated lambdas. With the new scheme every type with a kind higher than * needs to be projected with #Apply. --- src/dotty/tools/dotc/core/TypeApplications.scala | 15 ++++++++++++--- src/dotty/tools/dotc/core/TypeOps.scala | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index dca56e812470..12b540cf47aa 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -206,11 +206,20 @@ class TypeApplications(val self: Type) extends AnyVal { tp } + def isHK(tp: Type): Boolean = tp match { + case tp: TypeRef => + val sym = tp.symbol + if (sym.isClass) sym.isLambdaTrait + else !sym.isAliasType || isHK(tp.info) + case tp: TypeProxy => isHK(tp.underlying) + case _ => false + } + if (args.isEmpty || ctx.erasedTypes) self else { val res = instantiate(self, self) - if (res.isInstantiatedLambda) - // Note: isInstantiatedLambda needs to be conservative, using isSafeLambda + if (isHK(res)) + // Note: isHK needs to be conservative, using isSafeLambda // in order to avoid cyclic reference errors. But this means that some fully // instantiated types will remain unprojected, which essentially means // that they stay as higher-kinded types. checkNonCyclic checks the type again @@ -218,7 +227,7 @@ class TypeApplications(val self: Type) extends AnyVal { // that fall through the hole. Not adding an #Apply typically manifests itself // with a <:< failure of two types that "look the same". An example is #779, // where compiling scala.immutable.Map gives a bounds violation. - res.select(tpnme.Apply) + TypeRef(res, tpnme.Apply) else res } } diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 2a9dbd09c4e3..77c6805f01a9 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -21,7 +21,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. * class C { type T; def f(x: T): T } * * and an expression `e` of type `C`. Then computing the type of `e.f` leads - * to the query asSeenFrom(`C`, `(x: T)T`). What should it's result be? The + * to the query asSeenFrom(`C`, `(x: T)T`). What should its result be? The * naive answer `(x: C.T)C.T` is incorrect given that we treat `C.T` as the existential * `exists(c: C)c.T`. What we need to do instead is to skolemize the existential. So * the answer would be `(x: c.T)c.T` for some (unknown) value `c` of type `C`. From c0918c69e578edad40320e0a5bac1603f5ce94fa Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 19 Jul 2015 13:05:56 +0200 Subject: [PATCH 28/44] Better error reporting in TreePickler Turn a possible NPE into an AssertionError. The latter are caught in pickleTree, so an error leaves a trace about what was pickled. --- src/dotty/tools/dotc/core/tasty/TreePickler.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 8847d2de3c40..a23d59339de4 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -200,7 +200,9 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)} case tpe: RefinedThis => writeByte(REFINEDthis) - writeRef(pickledTypes.get(tpe.binder).asInstanceOf[Addr]) + val binderAddr = pickledTypes.get(tpe.binder) + assert(binderAddr != null) + writeRef(binderAddr.asInstanceOf[Addr]) case tpe: SkolemType => pickleType(tpe.info) case tpe: RefinedType => From 09ebc0f2b643c2be090c1aa0343880f063edc5be Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 19 Jul 2015 13:09:05 +0200 Subject: [PATCH 29/44] Adapt type arguments in typedAppliedTypeTree Previously, only pattern bound arguments were adapated. This was an oversight. Also, change logix so that we survive empty type parameter lists. This was also an oversight before. --- src/dotty/tools/dotc/typer/Typer.scala | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 337737437202..7caec5135273 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -825,7 +825,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit res } - def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): AppliedTypeTree = track("typedAppliedTypeTree") { + def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): Tree = track("typedAppliedTypeTree") { val tpt1 = typed(tree.tpt) val tparams = tpt1.tpe.typeParams var args = tree.args @@ -833,17 +833,19 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit ctx.error(d"${tpt1.tpe} does not take type parameters") tpt1 } - else if (args.length != tparams.length) { - ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos) - args = args.take(tparams.length) + else { + if (args.length != tparams.length) { + ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos) + args = args.take(tparams.length) + } + def typedArg(arg: untpd.Tree, tparam: Symbol) = { + val arg1 = typed(arg, if (ctx.mode is Mode.Pattern) tparam.info else WildcardType) + adaptTypeArg(arg1, if (tparam.isCompleted) tparam.info else WildcardType) + } + val args1 = args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]] + // check that arguments conform to bounds is done in phase PostTyper + assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) } - val argPts = - if (ctx.mode is Mode.Pattern) tpt1.tpe.typeParams.map(_.info) - else tree.args.map(_ => WildcardType) - def typedArg(arg: untpd.Tree, pt: Type) = adaptTypeArg(typed(arg, pt), pt) - val args1 = args.zipWithConserve(argPts)(typedArg(_, _)).asInstanceOf[List[Tree]] - // check that arguments conform to bounds is done in phase PostTyper - assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) } def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(implicit ctx: Context): ByNameTypeTree = track("typedByNameTypeTree") { From caae19ba37b7b05b2fd2e1edbad62ee9bf46c5e4 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 19 Jul 2015 13:20:06 +0200 Subject: [PATCH 30/44] Add missing position to error --- src/dotty/tools/dotc/typer/Typer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 7caec5135273..d35356a855ef 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -830,7 +830,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit val tparams = tpt1.tpe.typeParams var args = tree.args if (tparams.isEmpty) { - ctx.error(d"${tpt1.tpe} does not take type parameters") + ctx.error(d"${tpt1.tpe} does not take type parameters", tree.pos) tpt1 } else { From 2634498cede2525b07c1e40fbad0f5ae0cf96fda Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Jul 2015 10:36:53 +0200 Subject: [PATCH 31/44] Rename Apply -> hkApply Want to have a unique name for Apply, so that tests for higher-kinded types become cheaper. --- src/dotty/tools/dotc/core/Definitions.scala | 2 +- src/dotty/tools/dotc/core/StdNames.scala | 2 +- src/dotty/tools/dotc/core/TypeApplications.scala | 12 ++++++------ src/dotty/tools/dotc/core/TypeComparer.scala | 8 ++++---- src/dotty/tools/dotc/core/Types.scala | 4 ++-- src/dotty/tools/dotc/core/tasty/TreePickler.scala | 2 +- .../dotc/core/unpickleScala2/Scala2Unpickler.scala | 4 ++-- src/dotty/tools/dotc/printing/RefinedPrinter.scala | 8 ++++---- src/dotty/tools/dotc/typer/Typer.scala | 7 ++++++- 9 files changed, 27 insertions(+), 22 deletions(-) diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala index fcd9ef224f10..03be776a0085 100644 --- a/src/dotty/tools/dotc/core/Definitions.scala +++ b/src/dotty/tools/dotc/core/Definitions.scala @@ -528,7 +528,7 @@ class Definitions { val paramDecls = newScope for (i <- 0 until vcs.length) newTypeParam(cls, tpnme.lambdaArgName(i), varianceFlags(vcs(i)), paramDecls) - newTypeField(cls, tpnme.Apply, Covariant, paramDecls) + newTypeField(cls, tpnme.hkApply, Covariant, paramDecls) val parentTraitRefs = for (i <- 0 until vcs.length if vcs(i) != 0) yield lambdaTrait(vcs.updated(i, 0)).typeRef diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala index 52318a386bd1..be2452428315 100644 --- a/src/dotty/tools/dotc/core/StdNames.scala +++ b/src/dotty/tools/dotc/core/StdNames.scala @@ -311,7 +311,7 @@ object StdNames { val AnnotatedType: N = "AnnotatedType" val AppliedTypeTree: N = "AppliedTypeTree" - val Apply: N = "Apply" + val hkApply: N = "$apply" val ArrayAnnotArg: N = "ArrayAnnotArg" val Constant: N = "Constant" val ConstantType: N = "ConstantType" diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 12b540cf47aa..c09d6a2e0da6 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -227,7 +227,7 @@ class TypeApplications(val self: Type) extends AnyVal { // that fall through the hole. Not adding an #Apply typically manifests itself // with a <:< failure of two types that "look the same". An example is #779, // where compiling scala.immutable.Map gives a bounds violation. - TypeRef(res, tpnme.Apply) + TypeRef(res, tpnme.hkApply) else res } } @@ -235,8 +235,8 @@ class TypeApplications(val self: Type) extends AnyVal { /** Simplify a fully instantiated type of the form `LambdaX{... type Apply = T } # Apply` to `T`. */ def simplifyApply(implicit ctx: Context): Type = self match { - case self @ TypeRef(prefix, tpnme.Apply) if prefix.isInstantiatedLambda => - prefix.member(tpnme.Apply).info match { + case self @ TypeRef(prefix, tpnme.hkApply) if prefix.isInstantiatedLambda => + prefix.member(tpnme.hkApply).info match { case TypeAlias(alias) => alias case _ => self } @@ -541,7 +541,7 @@ class TypeApplications(val self: Type) extends AnyVal { else tp.subst(boundSyms, argRefs) substituted.bounds.withVariance(1) } - val res = RefinedType(lambda.typeRef, tpnme.Apply, substitutedRHS) + val res = RefinedType(lambda.typeRef, tpnme.hkApply, substitutedRHS) //println(i"lambda abstract $self wrt $boundSyms%, % --> $res") res } @@ -581,7 +581,7 @@ class TypeApplications(val self: Type) extends AnyVal { case TypeAlias(TypeRef(RefinedThis(rt), rname)) // TODO: Drop once hk applications have been updated if (rname == tparam.name) && (rt eq self) => etaCore(tp.parent, otherParams) - case TypeRef(TypeAlias(TypeRef(RefinedThis(rt), rname)), tpnme.Apply) + case TypeRef(TypeAlias(TypeRef(RefinedThis(rt), rname)), tpnme.hkApply) if (rname == tparam.name) && (rt eq self) => etaCore(tp.parent, otherParams) case _ => @@ -592,7 +592,7 @@ class TypeApplications(val self: Type) extends AnyVal { } } self match { - case self @ RefinedType(parent, tpnme.Apply) => + case self @ RefinedType(parent, tpnme.hkApply) => val lc = parent.LambdaClass(forcing = false) self.refinedInfo match { case TypeAlias(alias) if lc.exists => etaCore(alias, lc.typeParams.reverse) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index ea815f6c0778..a8598ae443cf 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -471,7 +471,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * continue with `T <:< U` if `inOrder` is true and `U <:< T` otherwise. */ def compareHK(projection: NamedType, other: Type, inOrder: Boolean) = - projection.name == tpnme.Apply && { + projection.name == tpnme.hkApply && { // @@@ rewrite val lambda = projection.prefix.LambdaClass(forcing = true) lambda.exists && !other.isLambda && other.testLifted(lambda.typeParams, @@ -480,7 +480,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } /** The class symbols bounding the type of the `Apply` member of `tp` */ - private def classBounds(tp: Type) = tp.member(tpnme.Apply).info.classSymbols + private def classBounds(tp: Type) = tp.member(tpnme.hkApply).info.classSymbols /** Returns true iff either `tp11 <:< tp21` or `tp12 <:< tp22`, trying at the same time * to keep the constraint as wide as possible. Specifically, if @@ -633,9 +633,9 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { /** Does `tp` need to be eta lifted to be comparable to `target`? */ private def needsEtaLift(tp: Type, target: RefinedType): Boolean = { - //default.echo(i"needs eta $tp $target?", { + //default.echo(i"needs eta $tp $target?", { // @@@ rewrite val name = target.refinedName - (name.isLambdaArgName || (name eq tpnme.Apply)) && target.isLambda && + (name.isLambdaArgName || (name eq tpnme.hkApply)) && target.isLambda && tp.exists && !tp.isLambda //}) } diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 9f2cc0f34dbe..20b259db46ce 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -833,7 +833,7 @@ object Types { * * P { type T = String, type R = P{...}.T } # R --> String * - * (2) The refinement is a fully instantiated type lambda, and the projected name is "Apply". + * (2) The refinement is a fully instantiated type lambda, and the projected name is "$apply". * In this case the rhs of the apply is returned with all references to lambda argument types * substituted by their definitions. * @@ -869,7 +869,7 @@ object Types { else if (!pre.refinementRefersToThis) alias else alias match { case TypeRef(RefinedThis(`pre`), aliasName) => lookupRefined(aliasName) // (1) - case _ => if (name == tpnme.Apply) betaReduce(alias) else NoType // (2) + case _ => if (name == tpnme.hkApply) betaReduce(alias) else NoType // (2) } case _ => loop(pre.parent, resolved) } diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index a23d59339de4..d50817b60e19 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -176,7 +176,7 @@ class TreePickler(pickler: TastyPickler) { pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix) } case tpe: NamedType => - if (tpe.name == tpnme.Apply && tpe.prefix.argInfos.nonEmpty && tpe.prefix.isInstantiatedLambda) + if (tpe.name == tpnme.hkApply && tpe.prefix.argInfos.nonEmpty && tpe.prefix.isInstantiatedLambda) // instantiated lambdas are pickled as APPLIEDTYPE; #Apply will // be reconstituted when unpickling. pickleType(tpe.prefix) diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 68439846d6d8..af33ce3c2723 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -584,7 +584,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas * tp { name: T } */ def elimExistentials(boundSyms: List[Symbol], tp: Type)(implicit ctx: Context): Type = { - // Need to be careful not to run into cyclic references here (observed when + // Need to be careful not to run into cyclic references here (observed when // comiling t247.scala). That's why we avoiud taking `symbol` of a TypeRef // unless names match up. val isBound = (tp: Type) => { @@ -619,7 +619,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas case info => tp.derivedRefinedType(parent1, name, info) } - case tp @ TypeRef(pre, tpnme.Apply) if pre.isLambda => + case tp @ TypeRef(pre, tpnme.hkApply) => elim(pre) case _ => tp diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 70fab7e0fc5a..a46665ec02c8 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -122,7 +122,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close } if (tp.isSafeLambda) { - val (prefix, body, bindings) = extractApply(tp) + val (prefix, body, bindings) = decomposeHKApply(tp) prefix match { case prefix: TypeRef if prefix.symbol.isLambdaTrait && body.exists => return typeLambdaText(prefix.symbol, body, bindings) @@ -184,9 +184,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { * without a prefix, because the latter print nicer. * */ - def extractApply(tp: Type): (Type, Type, List[(Name, Type)]) = tp.stripTypeVar match { + def decomposeHKApply(tp: Type): (Type, Type, List[(Name, Type)]) = tp.stripTypeVar match { case tp @ RefinedType(parent, name) => - if (name == tpnme.Apply) { + if (name == tpnme.hkApply) { // simplify arguments so that parameters just print HK$i and not // LambdaI{...}.HK$i val simplifyArgs = new TypeMap { @@ -199,7 +199,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } (parent, simplifyArgs(tp.refinedInfo.followTypeAlias), Nil) } else if (name.isLambdaArgName) { - val (prefix, body, argBindings) = extractApply(parent) + val (prefix, body, argBindings) = decomposeHKApply(parent) (prefix, body, (name, tp.refinedInfo) :: argBindings) } else (tp, NoType, Nil) case _ => diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index d35356a855ef..3f7e0b81c9fb 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -1434,7 +1434,12 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit case Select(New(tpt), nme.CONSTRUCTOR) => tpt.tpe.dealias.argTypesLo case _ => Nil } - if (typeArgs.isEmpty) typeArgs = constrained(poly, tree)._2 + if (typeArgs.isEmpty) { + //for ((pname, pbound) <- poly.paramNames.zip(poly.paramBounds)) + // if (pbound.hi.isSafeLambda) + // ctx.error(d"cannot infer argument for higher-kinded type parameter $pname", tree.pos) + typeArgs = constrained(poly, tree)._2 + } convertNewArray( adaptInterpolated(tree.appliedToTypes(typeArgs), pt, original)) } From 845a5d65796365454ceaaba48603afa07792f7e7 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Jul 2015 10:58:51 +0200 Subject: [PATCH 32/44] Simplify EtaReduce by removing dead case. --- src/dotty/tools/dotc/core/TypeApplications.scala | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index c09d6a2e0da6..ca2e7601b5bc 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -578,10 +578,7 @@ class TypeApplications(val self: Type) extends AnyVal { tp match { case tp: RefinedType => tp.refinedInfo match { - case TypeAlias(TypeRef(RefinedThis(rt), rname)) // TODO: Drop once hk applications have been updated - if (rname == tparam.name) && (rt eq self) => - etaCore(tp.parent, otherParams) - case TypeRef(TypeAlias(TypeRef(RefinedThis(rt), rname)), tpnme.hkApply) + case TypeAlias(TypeRef(RefinedThis(rt), rname)) if (rname == tparam.name) && (rt eq self) => etaCore(tp.parent, otherParams) case _ => From 31048ad571b8c917a4c67f48899babae0703cae2 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Jul 2015 11:35:52 +0200 Subject: [PATCH 33/44] Rename of hk args HK$x -> hk$x These are not user-accessible types, so no need to follow type convention and write in upper case. Also, rename occurrences of lambda to Lambda, to make clear we mean a type lambda. --- src/dotty/tools/dotc/core/Definitions.scala | 12 +-- src/dotty/tools/dotc/core/NameOps.scala | 2 +- src/dotty/tools/dotc/core/StdNames.scala | 6 +- .../tools/dotc/core/TypeApplications.scala | 83 ++++--------------- src/dotty/tools/dotc/core/Types.scala | 6 +- 5 files changed, 27 insertions(+), 82 deletions(-) diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala index 03be776a0085..a503a2d23cb0 100644 --- a/src/dotty/tools/dotc/core/Definitions.scala +++ b/src/dotty/tools/dotc/core/Definitions.scala @@ -494,7 +494,7 @@ class Definitions { /** The set of HigherKindedXYZ traits encountered so far */ def lambdaTraits: Set[Symbol] = myLambdaTraits - private var lambdaTraitForVariances = mutable.Map[List[Int], ClassSymbol]() + private var LambdaTraitForVariances = mutable.Map[List[Int], ClassSymbol]() /** The HigherKinded trait corresponding to symbols `boundSyms` (which are assumed * to be the type parameters of a higher-kided type). This is a class symbol that @@ -513,7 +513,7 @@ class Definitions { * - for each positive or negative variance v_i there is a parent trait Pj which * is the same as LambdaXYZ except that it has `I` in i-th position. */ - def lambdaTrait(vcs: List[Int]): ClassSymbol = { + def LambdaTrait(vcs: List[Int]): ClassSymbol = { assert(vcs.nonEmpty) def varianceFlags(v: Int) = v match { @@ -527,17 +527,17 @@ class Definitions { val cls = denot.asClass.classSymbol val paramDecls = newScope for (i <- 0 until vcs.length) - newTypeParam(cls, tpnme.lambdaArgName(i), varianceFlags(vcs(i)), paramDecls) + newTypeParam(cls, tpnme.LambdaArgName(i), varianceFlags(vcs(i)), paramDecls) newTypeField(cls, tpnme.hkApply, Covariant, paramDecls) val parentTraitRefs = for (i <- 0 until vcs.length if vcs(i) != 0) - yield lambdaTrait(vcs.updated(i, 0)).typeRef + yield LambdaTrait(vcs.updated(i, 0)).typeRef denot.info = ClassInfo( ScalaPackageClass.thisType, cls, ObjectClass.typeRef :: parentTraitRefs.toList, paramDecls) } } - val traitName = tpnme.lambdaTraitName(vcs) + val traitName = tpnme.LambdaTraitName(vcs) def createTrait = { val cls = newClassSymbol( @@ -549,7 +549,7 @@ class Definitions { cls } - lambdaTraitForVariances.getOrElseUpdate(vcs, createTrait) + LambdaTraitForVariances.getOrElseUpdate(vcs, createTrait) } // ----- primitive value class machinery ------------------------------------------ diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala index bf5b68ce95fb..1a26463473ac 100644 --- a/src/dotty/tools/dotc/core/NameOps.scala +++ b/src/dotty/tools/dotc/core/NameOps.scala @@ -110,7 +110,7 @@ object NameOps { /** The index of the higher-kinded type parameter with this name. * Pre: isLambdaArgName. */ - def lambdaArgIndex: Int = + def LambdaArgIndex: Int = name.drop(tpnme.LAMBDA_ARG_PREFIX.length).toString.toInt /** If the name ends with $nn where nn are diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala index be2452428315..7c73d95da242 100644 --- a/src/dotty/tools/dotc/core/StdNames.scala +++ b/src/dotty/tools/dotc/core/StdNames.scala @@ -173,7 +173,7 @@ object StdNames { final val WILDCARD_STAR: N = "_*" final val REIFY_TREECREATOR_PREFIX: N = "$treecreator" final val REIFY_TYPECREATOR_PREFIX: N = "$typecreator" - final val LAMBDA_ARG_PREFIX: N = "HK$" + final val LAMBDA_ARG_PREFIX: N = "hk$" final val LAMBDA_ARG_PREFIXhead: Char = LAMBDA_ARG_PREFIX.head final val Any: N = "Any" @@ -739,8 +739,8 @@ object StdNames { def syntheticTypeParamNames(num: Int): List[TypeName] = (0 until num).map(syntheticTypeParamName)(breakOut) - def lambdaTraitName(vcs: List[Int]): TypeName = LambdaPrefix ++ vcs.map(varianceSuffix).mkString - def lambdaArgName(n: Int) = LAMBDA_ARG_PREFIX ++ n.toString + def LambdaTraitName(vcs: List[Int]): TypeName = LambdaPrefix ++ vcs.map(varianceSuffix).mkString + def LambdaArgName(n: Int) = LAMBDA_ARG_PREFIX ++ n.toString final val Conforms = encode("<:<") diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index ca2e7601b5bc..75a0b99571d0 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -141,6 +141,16 @@ class TypeApplications(val self: Type) extends AnyVal { def isInstantiatedLambda(implicit ctx: Context): Boolean = isSafeLambda && typeParams.isEmpty + def isHK(implicit ctx: Context): Boolean = self.dealias match { + case self: TypeRef => + self.info match { + case TypeBounds(_, hi) => hi.isHK + case _ => false + } + case RefinedType(_, name) => name == tpnme.hkApply || name.isLambdaArgName + case _ => false + } + /** Encode the type resulting from applying this type to given arguments */ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { def matchParams(tp: Type, tparams: List[TypeSymbol], args: List[Type]): Type = args match { @@ -187,7 +197,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (tsym.isClass || !tp.typeSymbol.isCompleting) original.typeParams else { ctx.warning(i"encountered F-bounded higher-kinded type parameters for $tsym; assuming they are invariant") - defn.lambdaTrait(args map alwaysZero).typeParams + defn.LambdaTrait(args map alwaysZero).typeParams // @@@ can we force? } matchParams(tp, safeTypeParams, args) } @@ -450,70 +460,7 @@ class TypeApplications(val self: Type) extends AnyVal { } recur(self) } -/* - /** Given a type alias - * - * type T[boundSyms] = p.C[targs] - * - * produce its equivalent right hand side RHS that makes no reference to the bound - * symbols on the left hand side. I.e. the type alias can be replaced by - * - * type T = RHS - * - * There are two strategies how this is achieved. - * 1st strategy: Applies if `C` is a class such that every bound symbol in `boundSyms` - * appears as an argument in `targs`, and in the same order. Then the rewriting replaces - * bound symbols by references to the parameters of class C. Example: - * - * Say we have: - * - * class Triple[type T1, type T2, type T3] - * type A[X] = Triple[(X, X), X, String] - * - * Then this is rewritable, as `X` appears as second type argument to `Triple`. - * Occurrences of `X` are rewritten to `this.T2` and the whole definition becomes: - * - * type A = Triple { type T1 = (this.T2, this.T2); type T3 = String } - * - * 2nd strategy: Used as a fallback if 1st strategy does not apply. It rewrites - * the RHS to a typed lambda abstraction. - */ - def parameterizeWith(boundSyms: List[Symbol])(implicit ctx: Context): Type = { - def matchParams(bsyms: List[Symbol], tparams: List[Symbol], targs: List[Type], - correspondingParamName: Map[Symbol, TypeName]): Type = { - if (bsyms.isEmpty) { - val correspondingNames = correspondingParamName.values.toSet - - def replacements(rt: RefinedType): List[Type] = - for (sym <- boundSyms) - yield TypeRef(RefinedThis(rt), correspondingParamName(sym)) - - def rewrite(tp: Type): Type = tp match { - case tp @ RefinedType(parent, name: TypeName) => - if (correspondingNames contains name) rewrite(parent) - else RefinedType( - rewrite(parent), name, - rt => tp.refinedInfo.subst(boundSyms, replacements(rt))) - case tp => - tp - } - - rewrite(self) - } - else if (tparams.isEmpty || targs.isEmpty) - LambdaAbstract(boundSyms) - else if (bsyms.head == targs.head.typeSymbol) - matchParams(bsyms.tail, tparams.tail, targs.tail, - correspondingParamName + (bsyms.head -> tparams.head.name.asTypeName)) - else - matchParams(bsyms, tparams.tail, targs.tail, correspondingParamName) - } - val cls = self.typeSymbol - if (cls.isClass) matchParams(boundSyms, cls.typeParams, argInfos, Map()) - else LambdaAbstract(boundSyms) - } -*/ /** The typed lambda abstraction of this type `T` relative to `boundSyms`. * This is: * @@ -532,18 +479,16 @@ class TypeApplications(val self: Type) extends AnyVal { */ def LambdaAbstract(boundSyms: List[Symbol], cycleParanoid: Boolean = false)(implicit ctx: Context): Type = { def expand(tp: Type) = { - val lambda = defn.lambdaTrait(boundSyms.map(_.variance)) + val lambda = defn.LambdaTrait(boundSyms.map(_.variance)) val substitutedRHS = (rt: RefinedType) => { val argRefs = boundSyms.indices.toList.map(i => - RefinedThis(rt).select(tpnme.lambdaArgName(i))) + RefinedThis(rt).select(tpnme.LambdaArgName(i))) val substituted = if (cycleParanoid) new ctx.SafeSubstMap(boundSyms, argRefs).apply(tp) else tp.subst(boundSyms, argRefs) substituted.bounds.withVariance(1) } - val res = RefinedType(lambda.typeRef, tpnme.hkApply, substitutedRHS) - //println(i"lambda abstract $self wrt $boundSyms%, % --> $res") - res + RefinedType(lambda.typeRef, tpnme.hkApply, substitutedRHS) } self match { case self @ TypeBounds(lo, hi) => diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 20b259db46ce..4656b06295c3 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1865,8 +1865,8 @@ object Types { case refinedInfo: TypeBounds if refinedInfo.variance != 0 && refinedName.isLambdaArgName => val cls = parent.LambdaClass(forcing = false) if (cls.exists) - assert(refinedInfo.variance == cls.typeParams.apply(refinedName.lambdaArgIndex).variance, - s"variance mismatch for $this, $cls, ${cls.typeParams}, ${cls.typeParams.apply(refinedName.lambdaArgIndex).variance}, ${refinedInfo.variance}") + assert(refinedInfo.variance == cls.typeParams.apply(refinedName.LambdaArgIndex).variance, + s"variance mismatch for $this, $cls, ${cls.typeParams}, ${cls.typeParams.apply(refinedName.LambdaArgIndex).variance}, ${refinedInfo.variance}") case _ => } this @@ -1882,7 +1882,7 @@ object Types { this else if ( refinedName.isLambdaArgName //&& { println(s"deriving $refinedName $parent $underlyingTypeParams"); true } - && refinedName.lambdaArgIndex < underlyingTypeParams.length + && refinedName.LambdaArgIndex < underlyingTypeParams.length && !parent.isLambda) derivedRefinedType(parent.EtaExpand, refinedName, refinedInfo) else From 5a9a48d21038b78545aba15d0d99238c3c395d47 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Jul 2015 12:15:27 +0200 Subject: [PATCH 34/44] Remove magic from derivedRefinedType. It's no longer needed with new hk scheme. --- src/dotty/tools/dotc/core/Types.scala | 20 +++----------------- 1 file changed, 3 insertions(+), 17 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 4656b06295c3..312d6b290e19 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1872,23 +1872,9 @@ object Types { this } - /** Derived refined type, with a twist: A refinement with a higher-kinded type param placeholder - * is transformed to a refinement of the original type parameter if that one exists. - */ - def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): RefinedType = { - lazy val underlyingTypeParams = parent.rawTypeParams - - if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) - this - else if ( refinedName.isLambdaArgName - //&& { println(s"deriving $refinedName $parent $underlyingTypeParams"); true } - && refinedName.LambdaArgIndex < underlyingTypeParams.length - && !parent.isLambda) - derivedRefinedType(parent.EtaExpand, refinedName, refinedInfo) - else - if (false) RefinedType(parent, refinedName, refinedInfo) - else RefinedType(parent, refinedName, rt => refinedInfo.substRefinedThis(this, RefinedThis(rt))) - } + def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): RefinedType = + if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this + else RefinedType(parent, refinedName, rt => refinedInfo.substRefinedThis(this, RefinedThis(rt))) /** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */ def wrapIfMember(parent: Type)(implicit ctx: Context): Type = From 38ac02916ec0beef0cb93f0fdd48ad1f9dfdcf48 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Jul 2015 13:59:34 +0200 Subject: [PATCH 35/44] Replace isLambda with ifHK Replace occurrences of isLambda with isHK, because isHK is a bit faster and simplier. --- .../tools/dotc/core/TypeApplications.scala | 55 +++++-------------- src/dotty/tools/dotc/core/TypeComparer.scala | 7 +-- src/dotty/tools/dotc/typer/Applications.scala | 2 +- src/dotty/tools/dotc/typer/Namer.scala | 2 +- src/dotty/tools/dotc/typer/Typer.scala | 7 +-- 5 files changed, 21 insertions(+), 52 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 75a0b99571d0..66542d02d996 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -141,13 +141,11 @@ class TypeApplications(val self: Type) extends AnyVal { def isInstantiatedLambda(implicit ctx: Context): Boolean = isSafeLambda && typeParams.isEmpty + /** Is receiver type higher-kinded (i.e. of kind != "*")? */ def isHK(implicit ctx: Context): Boolean = self.dealias match { - case self: TypeRef => - self.info match { - case TypeBounds(_, hi) => hi.isHK - case _ => false - } + case self: TypeRef => self.info.isHK case RefinedType(_, name) => name == tpnme.hkApply || name.isLambdaArgName + case TypeBounds(_, hi) => hi.isHK case _ => false } @@ -160,24 +158,7 @@ class TypeApplications(val self: Type) extends AnyVal { println(s"precomplete decls = ${self.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}") } val tparam = tparams.head - val needsEtaExpand = - try { - (tparam is HigherKinded) && !arg.isLambda && arg.typeParams.nonEmpty - } - catch { - case ex: CyclicReference => - if (ctx.mode.is(Mode.Scala2Unpickling)) - // When unpickling Scala2, we might run into cyclic references when - // checking whether eta expansion is needed or eta expanding. - // (e.g. try compile collection/generic/GenericTraversableTemplate.scala). - // In that case, back out gracefully. Ideally, we should not have - // underdefined pickling data that requires post-transformations like - // eta expansion, but we can't change Scala2's. - false - else throw ex - } - val arg1 = if (needsEtaExpand) arg.EtaExpand else arg - val tp1 = RefinedType(tp, tparam.name, arg1.toBounds(tparam)) + val tp1 = RefinedType(tp, tparam.name, arg.toBounds(tparam)) matchParams(tp1, tparams.tail, args1) case nil => tp } @@ -216,29 +197,23 @@ class TypeApplications(val self: Type) extends AnyVal { tp } - def isHK(tp: Type): Boolean = tp match { + /** Same as isHK, except we classify all abstract types as HK, + * (they must be, because the are applied). This avoids some forcing and + * CyclicReference errors of the standard isHK. + */ + def isKnownHK(tp: Type): Boolean = tp match { case tp: TypeRef => val sym = tp.symbol if (sym.isClass) sym.isLambdaTrait - else !sym.isAliasType || isHK(tp.info) - case tp: TypeProxy => isHK(tp.underlying) + else !sym.isAliasType || isKnownHK(tp.info) + case tp: TypeProxy => isKnownHK(tp.underlying) case _ => false } if (args.isEmpty || ctx.erasedTypes) self else { val res = instantiate(self, self) - if (isHK(res)) - // Note: isHK needs to be conservative, using isSafeLambda - // in order to avoid cyclic reference errors. But this means that some fully - // instantiated types will remain unprojected, which essentially means - // that they stay as higher-kinded types. checkNonCyclic checks the type again - // and potentially inserts an #Apply then. Hopefully, this catches all types - // that fall through the hole. Not adding an #Apply typically manifests itself - // with a <:< failure of two types that "look the same". An example is #779, - // where compiling scala.immutable.Map gives a bounds violation. - TypeRef(res, tpnme.hkApply) - else res + if (isKnownHK(res)) TypeRef(res, tpnme.hkApply) else res } } @@ -510,9 +485,9 @@ class TypeApplications(val self: Type) extends AnyVal { //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") } - /** Eta expand if `bound` is a type lambda */ - def EtaExpandIfLambda(bound: Type)(implicit ctx: Context): Type = - if (bound.isLambda && self.typeSymbol.isClass && typeParams.nonEmpty && !isLambda) EtaExpand + /** Eta expand if `bound` is a higher-kinded type */ + def EtaExpandIfHK(bound: Type)(implicit ctx: Context): Type = + if (bound.isHK && !isHK && self.typeSymbol.isClass && typeParams.nonEmpty) EtaExpand else self /** If `self` is an eta expansion of type T, return T, otherwise NoType */ diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index a8598ae443cf..35e4d804bb3a 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -633,11 +633,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { /** Does `tp` need to be eta lifted to be comparable to `target`? */ private def needsEtaLift(tp: Type, target: RefinedType): Boolean = { - //default.echo(i"needs eta $tp $target?", { // @@@ rewrite + // if (tp.isLambda != tp.isHK) println(i"discrepancy for $tp, isLambda = ${tp.isLambda}, isHK = ${tp.isHK}") val name = target.refinedName - (name.isLambdaArgName || (name eq tpnme.hkApply)) && target.isLambda && - tp.exists && !tp.isLambda - //}) + (name.isLambdaArgName || (name eq tpnme.hkApply)) && + tp.exists && !tp.isLambda // we do encounter Lambda classes without any arguments here @@@ check whether this makes sense. } /** Narrow gadt.bounds for the type parameter referenced by `tr` to include diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index 8800c1a5598d..c7d8acb371e7 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -613,7 +613,7 @@ trait Applications extends Compatibility { self: Typer => } def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = - tree.withType(tree.tpe.EtaExpandIfLambda(bound)) + tree.withType(tree.tpe.EtaExpandIfHK(bound)) /** Rewrite `new Array[T](....)` trees to calls of newXYZArray methods. */ def convertNewArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match { diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 22523582202f..c1341a9ae769 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -822,7 +822,7 @@ class Namer { typer: Typer => val tycon = tp.withoutArgs(args) val tparams = tycon.typeParams if (args.length == tparams.length) { // if lengths differ, problem is caught in typedTypeApply - val args1 = args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfLambda(tparam.info)) + val args1 = args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfHK(tparam.info)) if (args1 ne args) return this(tycon).appliedTo(args1) } } diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 3f7e0b81c9fb..d35356a855ef 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -1434,12 +1434,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit case Select(New(tpt), nme.CONSTRUCTOR) => tpt.tpe.dealias.argTypesLo case _ => Nil } - if (typeArgs.isEmpty) { - //for ((pname, pbound) <- poly.paramNames.zip(poly.paramBounds)) - // if (pbound.hi.isSafeLambda) - // ctx.error(d"cannot infer argument for higher-kinded type parameter $pname", tree.pos) - typeArgs = constrained(poly, tree)._2 - } + if (typeArgs.isEmpty) typeArgs = constrained(poly, tree)._2 convertNewArray( adaptInterpolated(tree.appliedToTypes(typeArgs), pt, original)) } From ab6e5cebebe8c59901413f89281073d6eec7f8e1 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Jul 2015 15:13:07 +0200 Subject: [PATCH 36/44] Rewrite compareHK to be kind-correct --- src/dotty/tools/dotc/core/TypeApplications.scala | 6 ++++++ src/dotty/tools/dotc/core/TypeComparer.scala | 16 +++++++--------- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 66542d02d996..18256b9e4bba 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -149,6 +149,12 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => false } + /** is receiver of the form T#$apply? */ + def isHKApply: Boolean = self match { + case TypeRef(_, name) => name == tpnme.hkApply + case _ => false + } + /** Encode the type resulting from applying this type to given arguments */ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { def matchParams(tp: Type, tparams: List[TypeSymbol], args: List[Type]): Type = args match { diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 35e4d804bb3a..9e86a22a7876 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -466,18 +466,16 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } - /** If `projection` is of the form T # Apply where `T` is an instance of a Lambda class, - * and `other` is not a type lambda projection, then convert `other` to a type lambda `U`, and + /** If `projection` is a hk projection T#$apply + * and `other` is not a hk projection, then convert `other` to a hk projection `U`, and * continue with `T <:< U` if `inOrder` is true and `U <:< T` otherwise. */ def compareHK(projection: NamedType, other: Type, inOrder: Boolean) = - projection.name == tpnme.hkApply && { // @@@ rewrite - val lambda = projection.prefix.LambdaClass(forcing = true) - lambda.exists && !other.isLambda && - other.testLifted(lambda.typeParams, - if (inOrder) isSubType(projection.prefix, _) else isSubType(_, projection.prefix), - if (inOrder) Nil else classBounds(projection.prefix)) - } + projection.name == tpnme.hkApply && + !other.isHKApply && + other.testLifted(projection.prefix.LambdaClass(forcing = true).typeParams, + if (inOrder) isSubType(projection.prefix, _) else isSubType(_, projection.prefix), + if (inOrder) Nil else classBounds(projection.prefix)) /** The class symbols bounding the type of the `Apply` member of `tp` */ private def classBounds(tp: Type) = tp.member(tpnme.hkApply).info.classSymbols From 4aa1d5e1d9e4edfc1df6161504861840b24687ae Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Jul 2015 18:12:04 +0200 Subject: [PATCH 37/44] Fix problem in typeParams Accidentally forwarded to rawTypeParams. This solved the problem with mismatching type params in appliedTo that was caught in testLifted. --- .../tools/dotc/core/TypeApplications.scala | 19 +++++++------------ src/dotty/tools/dotc/core/TypeComparer.scala | 2 +- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 18256b9e4bba..06286735357b 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -60,7 +60,7 @@ class TypeApplications(val self: Type) extends AnyVal { case tp: TypeRef => val tsym = tp.typeSymbol if (tsym.isClass) tsym.typeParams - else if (tsym.isAliasType) tp.underlying.rawTypeParams + else if (tsym.isAliasType) tp.underlying.typeParams else { val lam = LambdaClass(forcing = false) if (lam.exists) lam.typeParams else Nil @@ -525,12 +525,12 @@ class TypeApplications(val self: Type) extends AnyVal { } } - /** Test whether this type has a base type of the form `B[T1, ..., Bn]` where + /** Test whether this type has a base type of the form `B[T1, ..., Tn]` where * the type parameters of `B` match one-by-one the variances of `tparams`, * and where the lambda abstracted type * - * LambdaXYZ { type Apply = B[$hkArg$0, ..., $hkArg$n] } - * { type $hkArg$0 = T1; ...; type $hkArg$n = Tn } + * LambdaXYZ { type Apply = B[$hkArg$0, ..., $hkArg${n-1}] } + * { type $hkArg$0 = T1; ...; type $hkArg${n-1} = Tn } * * satisfies predicate `p`. Try base types in the order of their occurrence in `baseClasses`. * A type parameter matches a variance V if it has V as its variance or if V == 0. @@ -560,13 +560,8 @@ class TypeApplications(val self: Type) extends AnyVal { case nil => false } - try { // temporary, to avoid type mismatches in applications. Should come back to this - // when subtyping is rewritten to account for new hk-scheme. - if (tparams.isEmpty) false - else if (typeParams.nonEmpty) p(EtaExpand) || classBounds.nonEmpty && tryLift(self.baseClasses) - else classBounds.nonEmpty && tryLift(self.baseClasses) - } catch { - case ex: NoSuchElementException => false - } + tparams.nonEmpty && + (typeParams.nonEmpty && p(EtaExpand) || + classBounds.nonEmpty && tryLift(self.baseClasses)) } } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 9e86a22a7876..eb1dcacdd7e5 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -634,7 +634,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { // if (tp.isLambda != tp.isHK) println(i"discrepancy for $tp, isLambda = ${tp.isLambda}, isHK = ${tp.isHK}") val name = target.refinedName (name.isLambdaArgName || (name eq tpnme.hkApply)) && - tp.exists && !tp.isLambda // we do encounter Lambda classes without any arguments here @@@ check whether this makes sense. + tp.exists && !tp.isLambda // we do encounter Lambda classes without any arguments here } /** Narrow gadt.bounds for the type parameter referenced by `tr` to include From 4148970a984de945c69f345381f0f03e84d7d6c2 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 21 Jul 2015 18:31:32 +0200 Subject: [PATCH 38/44] Generalize eta reduction to partially applied types. Now also allows to reduce something like ([T] => Map[T, String]) to Map[_, String] --- src/dotty/tools/dotc/core/TypeApplications.scala | 8 ++++++-- src/dotty/tools/dotc/core/TypeComparer.scala | 7 +++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 06286735357b..5d856495b8ea 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -506,9 +506,12 @@ class TypeApplications(val self: Type) extends AnyVal { tp.refinedInfo match { case TypeAlias(TypeRef(RefinedThis(rt), rname)) if (rname == tparam.name) && (rt eq self) => + // todo: add bounds etaCore(tp.parent, otherParams) case _ => - NoType + val pcore = etaCore(tp.parent, tparams) + if (pcore.exists) tp.derivedRefinedType(pcore, tp.refinedName, tp.refinedInfo) + else NoType } case _ => NoType @@ -518,7 +521,8 @@ class TypeApplications(val self: Type) extends AnyVal { case self @ RefinedType(parent, tpnme.hkApply) => val lc = parent.LambdaClass(forcing = false) self.refinedInfo match { - case TypeAlias(alias) if lc.exists => etaCore(alias, lc.typeParams.reverse) + case TypeAlias(alias) if lc.exists => + etaCore(alias, lc.typeParams.reverse) case _ => NoType } case _ => NoType diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index eb1dcacdd7e5..c9b1e87947ae 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -1232,5 +1232,12 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx) + override def compareHK(projection: NamedType, other: Type, inOrder: Boolean) = + if (projection.name == tpnme.hkApply) + traceIndented(i"compareHK $projection, $other, $inOrder") { + super.compareHK(projection, other, inOrder) + } + else super.compareHK(projection, other, inOrder) + override def toString = "Subtype trace:" + { try b.toString finally b.clear() } } From e8aecfa4d48383321549aa8f1ec8d1edb0ccaf06 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 21 Jul 2015 18:42:17 +0200 Subject: [PATCH 39/44] Disallow wildcard arguments to higher-kinded types... ...unless the HK type can be eta-reduced to a class type. --- src/dotty/tools/dotc/typer/Typer.scala | 29 ++++++++++++++++++++- test/dotc/tests.scala | 1 + tests/neg/partialApplications.scala | 11 ++++++++ tests/pos/partialApplications.scala | 35 +++++++++++++++++++++++++- 4 files changed, 74 insertions(+), 2 deletions(-) create mode 100644 tests/neg/partialApplications.scala diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index d35356a855ef..a3c64f526379 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -844,7 +844,34 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit } val args1 = args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]] // check that arguments conform to bounds is done in phase PostTyper - assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) + val tree1 = assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) + if (tree1.tpe.isHKApply) + for (arg @ TypeBoundsTree(_, _) <- args1) + ctx.error("illegal wildcard type argument; does not correspond to type parameter of a class", arg.pos) + // The reason for outlawing such arguments is illustrated by the following example. + // Say we have + // + // type RMap[A, B] = Map[B, A] + // + // Then + // + // Rmap[_, Int] + // + // translates to + // + // Lambda$I { type hk$0; type hk$1 = Int; type $apply = Map[$hk1, $hk0] } # $apply + // + // Let's call the last type T. You would expect that + // + // Map[Int, String] <: RMap[_, Int] + // + // But that's not the case given the standard subtyping rules. In fact, the rhs reduces to + // + // Map[Int, T # $hk0] + // + // That means the second argument to `Map` is unknown and String is certainly not a subtype of it. + // To avoid the surprise we outlaw problematic wildcard arguments from the start. + tree1 } } diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index 86d82be769cd..b8423db38969 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -155,6 +155,7 @@ class tests extends CompilerTest { @Test def neg_traitParamsMixin = compileFile(negDir, "traitParamsMixin", xerrors = 2) @Test def neg_firstError = compileFile(negDir, "firstError", xerrors = 3) @Test def neg_implicitLowerBound = compileFile(negDir, "implicit-lower-bound", xerrors = 1) + @Test def neg_partialApplications = compileFile(negDir, "partialApplications", xerrors = 8) @Test def run_all = runFiles(runDir) diff --git a/tests/neg/partialApplications.scala b/tests/neg/partialApplications.scala new file mode 100644 index 000000000000..67f6cf059ad0 --- /dev/null +++ b/tests/neg/partialApplications.scala @@ -0,0 +1,11 @@ +object Test { + + type RMap[X, Y] = Map[Y, X] + val m = Map[Int, String]() + val ts: RMap[_, Int] = m // erorr // error + val us: RMap[String, _] = m // error // error + val vs: RMap[_, _] = m // error // error // error + val zz: RMap = m // error + +} + diff --git a/tests/pos/partialApplications.scala b/tests/pos/partialApplications.scala index c1df1dee2035..285dc86613cc 100644 --- a/tests/pos/partialApplications.scala +++ b/tests/pos/partialApplications.scala @@ -8,6 +8,39 @@ object Test { val ys: StringlyHistogram[String] = xs - val zs: StringlyHistogram = xs + def e = xs + + val zs: StringlyHistogram[_] = e + + type IntMap[Y] = Map[Int, Y] + + val is = Map[Int, Boolean]() + + val js: IntMap[Boolean] = is + + val ks: IntMap[_] = is + + type RMap[X, Y] = Map[Y, X] + + val rs = Map[Int, Float]() + + val ss: RMap[Float, Int] = rs + +} + +object Test2 { + type Histogram = Map[_, Int] + + type StringlyHistogram = Histogram[_ >: String] // error + + val xs: Histogram[String] = Map[String, Int]() // error + + val ys: StringlyHistogram[String] = xs // error + + val zs: StringlyHistogram = xs // error + + val xs1 = xs + val ys1 = ys + val zs1 = zs } From d1cd3d738213c08242cae66623edba8e0102574c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 21 Jul 2015 19:53:28 +0200 Subject: [PATCH 40/44] Keep track of parameter bounds in LambdaAbstract and EtaReduce 1) Lambda abstract now records bounds of abstracted type parameters in TypeLambda 2) Eta-reduce likewise keeps the bounds it finds in the TypeLambda 3) Eta-reduce now also translates hk$i references to type parameters of the reduced type. --- .../tools/dotc/core/TypeApplications.scala | 77 ++++++++++++++++--- 1 file changed, 66 insertions(+), 11 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 5d856495b8ea..d6cb3dc15721 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -445,11 +445,14 @@ class TypeApplications(val self: Type) extends AnyVal { /** The typed lambda abstraction of this type `T` relative to `boundSyms`. * This is: * - * LambdaXYZ{ type Apply = subst(T) } + * LambdaXYZ{ bounds }{ type Apply = toHK(T) } * - * where XYZ reflets that variances of the bound symbols and - * `subst` is a substitution that replaces every bound symbol sym_i by - * `this.Arg$i`. + * where + * - XYZ reflects the variances of the bound symbols, + * - `bounds` consists of type declarations `type hk$i >: toHK(L) <: toHK(U), + * one for each type parameter in `T` with non-trivial bounds L,U. + * - `toHK` is a substitution that replaces every bound symbol sym_i by + * `this.Arg$i`. * * TypeBounds are lambda abstracting by lambda abstracting their upper bound. * @@ -459,9 +462,9 @@ class TypeApplications(val self: Type) extends AnyVal { * with new hk-scheme. */ def LambdaAbstract(boundSyms: List[Symbol], cycleParanoid: Boolean = false)(implicit ctx: Context): Type = { - def expand(tp: Type) = { + def expand(tp: Type): Type = { val lambda = defn.LambdaTrait(boundSyms.map(_.variance)) - val substitutedRHS = (rt: RefinedType) => { + def toHK(tp: Type) = (rt: RefinedType) => { val argRefs = boundSyms.indices.toList.map(i => RefinedThis(rt).select(tpnme.LambdaArgName(i))) val substituted = @@ -469,7 +472,18 @@ class TypeApplications(val self: Type) extends AnyVal { else tp.subst(boundSyms, argRefs) substituted.bounds.withVariance(1) } - RefinedType(lambda.typeRef, tpnme.hkApply, substitutedRHS) + val boundNames = new mutable.ListBuffer[Name] + val boundss = new mutable.ListBuffer[TypeBounds] + for (sym <- boundSyms) { + val bounds = sym.info.bounds + if (!(TypeBounds.empty <:< bounds)) { + boundNames += sym.name + boundss += bounds + } + } + val lambdaWithBounds = + RefinedType.make(lambda.typeRef, boundNames.toList, boundss.toList.map(toHK)) + RefinedType(lambdaWithBounds, tpnme.hkApply, toHK(tp)) } self match { case self @ TypeBounds(lo, hi) => @@ -496,7 +510,29 @@ class TypeApplications(val self: Type) extends AnyVal { if (bound.isHK && !isHK && self.typeSymbol.isClass && typeParams.nonEmpty) EtaExpand else self - /** If `self` is an eta expansion of type T, return T, otherwise NoType */ + /** If `self` is a (potentially partially instantiated) eta expansion of type T, return T, + * otherwise NoType. More precisely if `self` is of the form + * + * T { type $apply = U[T1, ..., Tn] } + * + * where + * + * - hk$0, ..., hk${m-1} are the type parameters of T + * - a sublist of the arguments Ti_k (k = 0,...,m_1) are of the form T{...}.this.hk$i_k + * + * rewrite `self` to + * + * U[T'1,...T'j] + * + * where + * + * T'j = _ >: Lj <: Uj if j is in the i_k list defined above + * where Lj and Uj are the bounds of hk$j mapped using `fromHK`. + * = fromHK(Tj) otherwise. + * + * `fromHK` is the function that replaces every occurrence of `.this.hk$i` by the + * corresponding parameter reference in `U[T'1,...T'j]` + */ def EtaReduce(implicit ctx: Context): Type = { def etaCore(tp: Type, tparams: List[Symbol]): Type = tparams match { case Nil => tp @@ -506,8 +542,11 @@ class TypeApplications(val self: Type) extends AnyVal { tp.refinedInfo match { case TypeAlias(TypeRef(RefinedThis(rt), rname)) if (rname == tparam.name) && (rt eq self) => - // todo: add bounds - etaCore(tp.parent, otherParams) + // we have a binding T = Lambda$XYZ{...}.this.hk$i where hk$i names the current `tparam`. + val pcore = etaCore(tp.parent, otherParams) + val hkBounds = self.member(rname).info.bounds + if (TypeBounds.empty <:< hkBounds) pcore + else tp.derivedRefinedType(pcore, tp.refinedName, hkBounds) case _ => val pcore = etaCore(tp.parent, tparams) if (pcore.exists) tp.derivedRefinedType(pcore, tp.refinedName, tp.refinedInfo) @@ -517,12 +556,28 @@ class TypeApplications(val self: Type) extends AnyVal { NoType } } + // Map references `Lambda$XYZ{...}.this.hk$i to corresponding parameter references of the reduced core. + def fromHK(reduced: Type) = reduced match { + case reduced: RefinedType => + new TypeMap { + def apply(tp: Type): Type = tp match { + case TypeRef(RefinedThis(binder), name) if binder eq self => + assert(name.isLambdaArgName) + RefinedThis(reduced).select(reduced.typeParams.apply(name.LambdaArgIndex)) + case _ => + mapOver(tp) + } + }.apply(reduced) + case _ => + reduced + } + self match { case self @ RefinedType(parent, tpnme.hkApply) => val lc = parent.LambdaClass(forcing = false) self.refinedInfo match { case TypeAlias(alias) if lc.exists => - etaCore(alias, lc.typeParams.reverse) + fromHK(etaCore(alias, lc.typeParams.reverse)) case _ => NoType } case _ => NoType From 5ee7b9ed189d77fbf864c5558841f53750db2c30 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 22 Jul 2015 16:03:14 +0200 Subject: [PATCH 41/44] Avoid orphan RefinedThis types in glb/lub. --- src/dotty/tools/dotc/core/TypeComparer.scala | 4 ++-- src/dotty/tools/dotc/core/tasty/TreePickler.scala | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index c9b1e87947ae..fce803c56866 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -929,7 +929,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { tp1.derivedRefinedType( tp1.parent & tp2.parent, tp1.refinedName, - tp1.refinedInfo & tp2.refinedInfo) + tp1.refinedInfo & tp2.refinedInfo.substRefinedThis(tp2, RefinedThis(tp1))) case _ => NoType } @@ -995,7 +995,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { tp1.derivedRefinedType( tp1.parent | tp2.parent, tp1.refinedName, - tp1.refinedInfo | tp2.refinedInfo) + tp1.refinedInfo | tp2.refinedInfo.substRefinedThis(tp2, RefinedThis(tp1))) case _ => NoType } diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index d50817b60e19..58697c196b36 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -201,7 +201,7 @@ class TreePickler(pickler: TastyPickler) { case tpe: RefinedThis => writeByte(REFINEDthis) val binderAddr = pickledTypes.get(tpe.binder) - assert(binderAddr != null) + assert(binderAddr != null, tpe.binder) writeRef(binderAddr.asInstanceOf[Addr]) case tpe: SkolemType => pickleType(tpe.info) From 154f3511d52c6b748c03d97dd035f0ad79f9a355 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 22 Jul 2015 16:05:11 +0200 Subject: [PATCH 42/44] Move failing test to pending. The original IterableSelfRec is not syntactically legal after the hk changes. I attempted to fix, but there's still a type error. Need to investigate whether this is a true error or a bug. --- tests/{ => pending}/pos/IterableSelfRec.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename tests/{ => pending}/pos/IterableSelfRec.scala (84%) diff --git a/tests/pos/IterableSelfRec.scala b/tests/pending/pos/IterableSelfRec.scala similarity index 84% rename from tests/pos/IterableSelfRec.scala rename to tests/pending/pos/IterableSelfRec.scala index bba7a82d252a..a97833991785 100644 --- a/tests/pos/IterableSelfRec.scala +++ b/tests/pending/pos/IterableSelfRec.scala @@ -22,9 +22,9 @@ trait Seq[T] extends Iterable[T] { self => def apply(x: Int): T } -abstract class CollectionCompanion[+CC <: Collection { type This <: CC }] +abstract class CollectionCompanion[+CC[X] <: Collection[X] { type This <: CC }] -abstract class IterableCompanion[+CC <: Iterable { type This <: CC }] extends CollectionCompanion[CC] { +abstract class IterableCompanion[+CC[X] <: Iterable[X] { type This <: CC }] extends CollectionCompanion[CC] { def fromIterator[T](it: Iterator[T]): CC[T] def map[T, U](xs: Iterable[T], f: T => U): CC[U] = fromIterator(xs.iterator.map(f)) @@ -36,7 +36,7 @@ abstract class IterableCompanion[+CC <: Iterable { type This <: CC }] extends Co implicit def transformOps[T](xs: CC[T] @uncheckedVariance): TransformOps[CC, T] = ??? // new TransformOps[CC, T](xs) } -class TransformOps[+CC <: Iterable { type This <: CC }, T] (val xs: CC[T]) extends AnyVal { +class TransformOps[+CC[X] <: Iterable[X] { type This <: CC }, T] (val xs: CC[T]) extends AnyVal { def companion[T](xs: CC[T] @uncheckedVariance): IterableCompanion[CC] = xs.companion def map[U](f: T => U): CC[U] = companion(xs).map(xs, f) def filter(p: T => Boolean): CC[T] = companion(xs).filter(xs, p) From 71e3133ef65b06a5bce605cd4f0ebf879cc05118 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 21 Sep 2015 12:46:35 +0200 Subject: [PATCH 43/44] Eta expand $apply projected types if needed It turns out that asSeenFrom can produce types that get projected with $apply but that are not higher-kinded. An exampple failure is in Iter3, andother in scala.collection.immutable.Map (which is now part of the test suite). We now detect that situation, and eta expand the projected type in `derivedSelect`, this will force a subssequent `lookupRefined` which will give the desired normalized type. Also added is a configurable test that checks that $apply projected tyeps are in fact higher-kinded. --- src/dotty/tools/dotc/config/Config.scala | 5 + .../tools/dotc/core/TypeApplications.scala | 29 +++ src/dotty/tools/dotc/core/Types.scala | 20 +- src/dotty/tools/dotc/typer/Checking.scala | 14 +- tests/pos/GenericTraversableTemplate.scala | 232 ++++++++++++++++++ tests/pos/Iter3.scala | 199 +++++++++++++++ tests/pos/Map.scala | 194 +++++++++++++++ 7 files changed, 677 insertions(+), 16 deletions(-) create mode 100644 tests/pos/GenericTraversableTemplate.scala create mode 100644 tests/pos/Iter3.scala create mode 100644 tests/pos/Map.scala diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index 97893647c7fb..d66d1ecdb898 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -71,6 +71,11 @@ object Config { /** If this flag is set, take the fast path when comparing same-named type-aliases and types */ final val fastPathForRefinedSubtype = true + /** If this flag is set, $apply projections are checked that they apply to a + * higher-kinded type. + */ + final val checkProjections = false + /** When set, use new signature-based matching. * Advantage of doing so: It's supposed to be faster * Disadvantage: It might hide inconsistencies, so while debugging it's better to turn it off diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index d6cb3dc15721..d7d205be6a4e 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -155,6 +155,27 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => false } + /** True if it can be determined without forcing that the class symbol + * of this application exists and is not a lambda trait. + * Equivalent to + * + * self.classSymbol.exists && !self.classSymbol.isLambdaTrait + * + * but without forcing anything. + */ + def noHK(implicit ctx: Context): Boolean = self.stripTypeVar match { + case self: RefinedType => + self.parent.noHK + case self: TypeRef => + (self.denot.exists) && { + val sym = self.symbol + if (sym.isClass) !sym.isLambdaTrait + else sym.isCompleted && self.info.isAlias && self.info.bounds.hi.noHK + } + case _ => + false + } + /** Encode the type resulting from applying this type to given arguments */ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { def matchParams(tp: Type, tparams: List[TypeSymbol], args: List[Type]): Type = args match { @@ -510,6 +531,14 @@ class TypeApplications(val self: Type) extends AnyVal { if (bound.isHK && !isHK && self.typeSymbol.isClass && typeParams.nonEmpty) EtaExpand else self + /** Eta expand the prefix in front of any refinements. */ + def EtaExpandCore(implicit ctx: Context): Type = self.stripTypeVar match { + case self: RefinedType => + self.derivedRefinedType(self.parent.EtaExpandCore, self.refinedName, self.refinedInfo) + case _ => + self.EtaExpand + } + /** If `self` is a (potentially partially instantiated) eta expansion of type T, return T, * otherwise NoType. More precisely if `self` is of the form * diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 312d6b290e19..e545066af221 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1487,7 +1487,9 @@ object Types { if (prefix eq this.prefix) this else { val res = prefix.lookupRefined(name) - if (res.exists) res else newLikeThis(prefix) + if (res.exists) res + else if (name == tpnme.hkApply && prefix.noHK) derivedSelect(prefix.EtaExpandCore) + else newLikeThis(prefix) } /** Create a NamedType of the same kind as this type, but with a new prefix. @@ -1725,9 +1727,15 @@ object Types { } object TypeRef { + def checkProjection(prefix: Type, name: TypeName)(implicit ctx: Context) = + if (name == tpnme.hkApply && prefix.noHK) + assert(false, s"bad type : $prefix.$name should not be $$applied") + /** Create type ref with given prefix and name */ - def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef = + def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef = { + if (Config.checkProjections) checkProjection(prefix, name) ctx.uniqueNamedTypes.enterIfNew(prefix, name).asInstanceOf[TypeRef] + } /** Create type ref to given symbol */ def apply(prefix: Type, sym: TypeSymbol)(implicit ctx: Context): TypeRef = @@ -1736,8 +1744,10 @@ object Types { /** Create a non-member type ref (which cannot be reloaded using `member`), * with given prefix, name, and symbol. */ - def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef = + def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef = { + if (Config.checkProjections) checkProjection(prefix, name) unique(new TypeRefWithFixedSym(prefix, name, sym)) + } /** Create a type ref referring to given symbol with given name. * This is very similar to TypeRef(Type, Symbol), @@ -3198,7 +3208,9 @@ object Types { class MissingType(pre: Type, name: Name)(implicit ctx: Context) extends TypeError( i"""cannot resolve reference to type $pre.$name - |the classfile defining the type might be missing from the classpath${otherReason(pre)}""".stripMargin) + |the classfile defining the type might be missing from the classpath${otherReason(pre)}""".stripMargin) { + printStackTrace() + } private def otherReason(pre: Type)(implicit ctx: Context): String = pre match { case pre: ThisType if pre.givenSelfType.exists => diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index 3847cb5bef22..8376dd4e9619 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -115,18 +115,8 @@ object Checking { val parent1 = this(parent) val saved = cycleOK cycleOK = nestedCycleOK - - /** A derived refined type with two possible tweaks: - * (1) LazyRefs in parents are pulled out, - * (2) #Apply is added if the type is a fully applied type lambda. - */ - def derivedType(p: Type): Type = p match { - case p: LazyRef => LazyRef(() => derivedType(p.ref)) - case _ => - val res = tp.derivedRefinedType(p, name, this(tp.refinedInfo)) - if (res.isSafeLambda && res.typeParams.isEmpty) res.select(tpnme.Apply) else res - } - try derivedType(parent1) finally cycleOK = saved + try tp.derivedRefinedType(parent1, name, this(tp.refinedInfo)) + finally cycleOK = saved case tp @ TypeRef(pre, name) => try { // A prefix is interesting if it might contain (transitively) a reference diff --git a/tests/pos/GenericTraversableTemplate.scala b/tests/pos/GenericTraversableTemplate.scala new file mode 100644 index 000000000000..cd48cd23f4b3 --- /dev/null +++ b/tests/pos/GenericTraversableTemplate.scala @@ -0,0 +1,232 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + + +package scala +package collection +package generic + +import mutable.Builder +import scala.annotation.migration +import scala.annotation.unchecked.uncheckedVariance +import scala.language.higherKinds + +/** A template class for companion objects of ``regular`` collection classes + * that represent an unconstrained higher-kinded type. + * + * @tparam A The type of the collection elements. + * @tparam CC The type constructor representing the collection class. + * @author Martin Odersky + * @since 2.8 + * @define coll collection + * @define Coll CC + */ +trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { + + /** Applies a function `f` to all elements of this $coll. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + * + * @usecase def foreach(f: A => Unit): Unit + */ + def foreach[U](f: A => U): Unit + + /** Selects the first element of this $coll. + * + * @return the first element of this $coll. + * @throws `NoSuchElementException` if the $coll is empty. + */ + def head: A + + /** Tests whether this $coll is empty. + * + * @return `true` if the $coll contain no elements, `false` otherwise. + */ + def isEmpty: Boolean + + /** The factory companion object that builds instances of class $Coll. + * (or its `Iterable` superclass where class $Coll is not a `Seq`.) + */ + def companion: GenericCompanion[CC] + + /** The builder that builds instances of type $Coll[A] + */ + protected[this] def newBuilder: Builder[A, CC[A]] = companion.newBuilder[A] + + /** The generic builder that builds instances of $Coll + * at arbitrary element types. + */ + def genericBuilder[B]: Builder[B, CC[B]] = companion.newBuilder[B] + + private def sequential: TraversableOnce[A] = this.asInstanceOf[GenTraversableOnce[A]].seq + + /** Converts this $coll of pairs into two collections of the first and second + * half of each pair. + * + * {{{ + * val xs = $Coll( + * (1, "one"), + * (2, "two"), + * (3, "three")).unzip + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this $coll is a pair. + * @return a pair of ${coll}s, containing the first, respectively second + * half of each element pair of this $coll. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + val b1 = genericBuilder[A1] + val b2 = genericBuilder[A2] + for (xy <- sequential) { + val (x, y) = asPair(xy) + b1 += x + b2 += y + } + (b1.result(), b2.result()) + } + + /** Converts this $coll of triples into three collections of the first, second, + * and third element of each triple. + * + * {{{ + * val xs = $Coll( + * (1, "one", '1'), + * (2, "two", '2'), + * (3, "three", '3')).unzip3 + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three), + * // $Coll(1, 2, 3)) + * }}} + * + * @tparam A1 the type of the first member of the element triples + * @tparam A2 the type of the second member of the element triples + * @tparam A3 the type of the third member of the element triples + * @param asTriple an implicit conversion which asserts that the element type + * of this $coll is a triple. + * @return a triple of ${coll}s, containing the first, second, respectively + * third member of each element triple of this $coll. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val b1 = genericBuilder[A1] + val b2 = genericBuilder[A2] + val b3 = genericBuilder[A3] + + for (xyz <- sequential) { + val (x, y, z) = asTriple(xyz) + b1 += x + b2 += y + b3 += z + } + (b1.result(), b2.result(), b3.result()) + } + + /** Converts this $coll of traversable collections into + * a $coll formed by the elements of these traversable + * collections. + * + * @tparam B the type of the elements of each traversable collection. + * @param asTraversable an implicit conversion which asserts that the element + * type of this $coll is a `GenTraversable`. + * @return a new $coll resulting from concatenating all element ${coll}s. + * + * @usecase def flatten[B]: $Coll[B] + * + * @inheritdoc + * + * The resulting collection's type will be guided by the + * static type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(1, 2, 3) + * ).flatten + * // xs == List(1, 2, 3, 1, 2, 3) + * + * val ys = Set( + * List(1, 2, 3), + * List(3, 2, 1) + * ).flatten + * // ys == Set(1, 2, 3) + * }}} + */ + def flatten[B](implicit asTraversable: A => /*<: /*<:= headSize) fail + bs(i) += x + i += 1 + } + if (i != headSize) + fail + } + val bb = genericBuilder[CC[B]] + for (b <- bs) bb += b.result + bb.result() + } +} + diff --git a/tests/pos/Iter3.scala b/tests/pos/Iter3.scala new file mode 100644 index 000000000000..d0ae79f1f213 --- /dev/null +++ b/tests/pos/Iter3.scala @@ -0,0 +1,199 @@ +package dotty1.collections +package immutable + +import annotation.unchecked.uncheckedVariance + +// Like Iter2, but with non-variant types only. +object Iter2 { + + trait Iterator[A] extends IterableOnce[A] { + def hasNext: Boolean + def next: A + def iterator = this + def foreach(f: A => Unit): Unit = ??? + def map[B](f: A => B): Iterator[B] = ??? + def flatMap[B](f: A => IterableOnce[B]): Iterator[B] = ??? + def ++[B >: A](xs: IterableOnce[B]): Iterator[B] = ??? + def drop(n: Int): Iterator[A] = ??? + def indexWhere(p: A => Boolean): Int = { + var i = 0 + while (hasNext) { + if (p(next)) return i + i += 1 + } + -1 + } + def zip[B](that: Iterator[B]): Iterator[(A, B)] = ??? + } + + trait IterableOnce[A] { + def iterator: Iterator[A] + def buildIterator: Iterator[A] = iterator + } + + trait FromIterator[C[X] <: Iterable[X]] { + def fromIterator[B](it: Iterator[B]): C[B] + } + + trait Iterable[IA] extends IterableOnce[IA] with FromIterator[Iterable] + + trait Seq[AA] extends Iterable[AA] with FromIterator[Seq] { + def apply(i: Int): AA + def length: Int + } + + sealed trait List[A] extends Seq[A] with FromIterator[List] { + def isEmpty: Boolean + def head: A + def tail: List[A] + def iterator = new ListIterator[A](this) + def fromIterator[B](it: Iterator[B]): List[B] = it match { + case ListIterator(xs) => xs + case _ => if (it.hasNext) Cons(it.next, fromIterator(it)) else Nil.asInstanceOf[List[B]] + } + def apply(i: Int): A = { + require(!isEmpty) + if (i == 0) head else tail.apply(i - 1) + } + def length: Int = + if (isEmpty) 0 else 1 + tail.length + } + + case class Cons[A](x: A, xs: List[A]) extends List[A] { + def isEmpty = false + def head = x + def tail = xs + } + + case object Nil extends List[Nothing] { + def isEmpty = true + def head = ??? + def tail = ??? + } + + class ArrayBuffer[A] private (initElems: Array[AnyRef], initLen: Int) extends Seq[A] with FromIterator[ArrayBuffer] { + def this() = this(new Array[AnyRef](16), 0) + def this(it: ArrayIterator[A]) = this(it.elems, it.len) + private var elems: Array[AnyRef] = initElems + private var len = 0 + def iterator = + elems.iterator.take(len).asInstanceOf[Iterator[A]] + override def buildIterator = + new ArrayIterator(elems, len).asInstanceOf[Iterator[A]] + def fromIterator[B](it: Iterator[B]): ArrayBuffer[B] = + new ArrayBuffer(ArrayIterator.fromIterator(it)) + def apply(i: Int) = elems(i).asInstanceOf[A] + def length = len + } + + implicit class IterableTransforms[A, C[X] <: Iterable[X]](val c: Iterable[A] & FromIterator[C]) extends AnyVal { + def map[B](f: A => B): C[B] = c.fromIterator(c.buildIterator.map(f)) + def flatMap[B](f: A => IterableOnce[B]): C[B] = c.fromIterator(c.buildIterator.flatMap(f(_).buildIterator)) + def ++[B >: A](xs: IterableOnce[B]): C[B] = c.fromIterator(c.buildIterator ++ xs.buildIterator) + def drop(n: Int): C[A] = c.fromIterator(c.buildIterator.drop(n)) + def head: A = c.iterator.next + def zip[B](xs: IterableOnce[B]): C[(A, B)] = c.fromIterator(c.iterator.zip(xs.iterator)) + } + + implicit class SeqTransforms[SA, C[X] <: Seq[X]](val c: Seq[SA] & FromIterator[C]) extends AnyVal { + def reverse: C[SA] = { + val elems = new Array[AnyRef](c.length) + var i = elems.length + val it = c.iterator + while (it.hasNext) { + i -= 1 + elems(i) = it.next.asInstanceOf[AnyRef] + } + val xzz = c.fromIterator(ArrayIterator[SA](elems, c.length)) + xzz + } + def indexWhere(p: SA => Boolean): Int = c.iterator.indexWhere(p) + } + + case class ListIterator[A](xs: List[A]) extends Iterator[A] { + private[this] var current: List[A] = xs + def hasNext = !current.isEmpty + def next = { val res = current.head; current = current.tail; res } + } + + case class ArrayIterator[A](elems: Array[AnyRef], len: Int) extends Iterator[A] { + import ArrayIterator._ + + private def elem(i: Int) = elems(i).asInstanceOf[A] + + private var cur = 0 + + def hasNext = cur < len + def next = { val res = elem(cur); cur += 1; res } + + override def foreach(f: A => Unit): Unit = + for (i <- 0 until len) f(elem(i)) + + override def map[B](f: A => B): ArrayIterator[B] = { + var mapped = elems + for (i <- 0 until len) { + val x = elem(i) + val y = widen(f(x)) + if (widen(x) ne y) { + if (mapped eq elems) mapped = new Array[AnyRef](len) + mapped(i) = y + } + } + if (mapped eq elems) this.asInstanceOf[ArrayIterator[B]] + else new ArrayIterator(mapped, len) + } + + override def flatMap[B](f: A => IterableOnce[B]): ArrayIterator[B] = + flatten(map(f(_).buildIterator)) + + override def ++[B >: A](that: IterableOnce[B]): ArrayIterator[B] = { + val thatIterator @ ArrayIterator(elems2, len2) = fromIterator(that.iterator) + if (len == 0) thatIterator + else if (len2 == 0) this.asInstanceOf[ArrayIterator[B]] + else { + val resLen = len + len2 + val resElems = new Array[AnyRef](resLen) + Array.copy(elems, 0, resElems, 0, len) + Array.copy(elems2, 0, resElems, len, len2) + new ArrayIterator(resElems, resLen) + } + } + } + + object ArrayIterator { + private def widen(x: Any): AnyRef = x.asInstanceOf[AnyRef] + + def fromIterator[A](it: Iterator[A]): ArrayIterator[A] = it match { + case it: ArrayIterator[A] => it + case _ => + var elems = new Array[AnyRef](32) + var len = 0 + def ensureCapacity() = { + while (len > elems.length) { + val newElems = new Array[AnyRef](elems.length * 2) + Array.copy(elems, 0, newElems, 0, elems.length) + elems = newElems + } + } + while (it.hasNext) { + len += 1 + ensureCapacity() + elems(len - 1) = widen(it.next) + } + ArrayIterator(elems, len) + } + + def flatten[A](its: ArrayIterator[Iterator[A]]): ArrayIterator[A] = { + var arrayIts = its.map(fromIterator) + var totalLen = 0 + arrayIts.foreach(totalLen += _.len) + val allElems = new Array[AnyRef](totalLen) + var j = 0 + arrayIts.foreach { it => + Array.copy(it.elems, 0, allElems, j, it.len) + j += it.len + } + new ArrayIterator(allElems, totalLen) + } + } +} diff --git a/tests/pos/Map.scala b/tests/pos/Map.scala new file mode 100644 index 000000000000..5178d5a862cf --- /dev/null +++ b/tests/pos/Map.scala @@ -0,0 +1,194 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + +package scala +package collection +package immutable + +import generic._ + +/** + * A generic trait for immutable maps. Concrete classes have to provide + * functionality for the abstract methods in `Map`: + * + * {{{ + * def get(key: A): Option[B] + * def iterator: Iterator[(A, B)] + * def + [B1 >: B](kv: (A, B1)): Map[A, B1] + * def -(key: A): Map[A, B] + * }}} + * + * @since 1 + */ +trait Map[A, +B] extends Iterable[(A, B)] +// with GenMap[A, B] + with scala.collection.Map[A, B] + with MapLike[A, B, Map[A, B]] { self => + + override def empty: Map[A, B] = Map.empty + + /** Returns this $coll as an immutable map. + * + * A new map will not be built; lazy collections will stay lazy. + */ + @deprecatedOverriding("Immutable maps should do nothing on toMap except return themselves cast as a map.", "2.11.0") + override def toMap[T, U](implicit ev: (A, B) <:< (T, U)): immutable.Map[T, U] = + self.asInstanceOf[immutable.Map[T, U]] + + override def seq: Map[A, B] = this + + /** The same map with a given default function. + * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault[B1 >: B](d: A => B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, d) + + /** The same map with a given default value. + * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, x => d) + + /** Add a key/value pair to this map. + * @param key the key + * @param value the value + * @return A new map with the new binding added to this map + */ + override def updated [B1 >: B](key: A, value: B1): Map[A, B1] + def + [B1 >: B](kv: (A, B1)): Map[A, B1] +} + +/** $factoryInfo + * @define Coll `immutable.Map` + * @define coll immutable map + */ +object Map extends ImmutableMapFactory[Map] { + + /** $mapCanBuildFromInfo */ + implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B] + + def empty[A, B]: Map[A, B] = EmptyMap.asInstanceOf[Map[A, B]] + + class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault[A, B](underlying, d) with Map[A, B] { + override def empty = new WithDefault(underlying.empty, d) + override def updated[B1 >: B](key: A, value: B1): WithDefault[A, B1] = new WithDefault[A, B1](underlying.updated[B1](key, value), d) + override def + [B1 >: B](kv: (A, B1)): WithDefault[A, B1] = updated(kv._1, kv._2) + override def - (key: A): WithDefault[A, B] = new WithDefault(underlying - key, d) + override def withDefault[B1 >: B](d: A => B1): immutable.Map[A, B1] = new WithDefault[A, B1](underlying, d) + override def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new WithDefault[A, B1](underlying, x => d) + } + + private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable { + override def size: Int = 0 + def get(key: Any): Option[Nothing] = None + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value) + def + [B1](kv: (Any, B1)): Map[Any, B1] = updated(kv._1, kv._2) + def - (key: Any): Map[Any, Nothing] = this + } + + class Map1[A, +B](key1: A, value1: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + override def size = 1 + def get(key: A): Option[B] = + if (key == key1) Some(value1) else None + def iterator = Iterator((key1, value1)) + override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + if (key == key1) new Map1(key1, value) + else new Map2(key1, value1, key, value) + def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) + def - (key: A): Map[A, B] = + if (key == key1) Map.empty else this + override def foreach[U](f: ((A, B)) => U): Unit = { + f((key1, value1)) + } + } + + class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + override def size = 2 + def get(key: A): Option[B] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else None + def iterator = Iterator((key1, value1), (key2, value2)) + override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + if (key == key1) new Map2(key1, value, key2, value2) + else if (key == key2) new Map2(key1, value1, key2, value) + else new Map3(key1, value1, key2, value2, key, value) + def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) + def - (key: A): Map[A, B] = + if (key == key1) new Map1(key2, value2) + else if (key == key2) new Map1(key1, value1) + else this + override def foreach[U](f: ((A, B)) => U): Unit = { + f((key1, value1)); f((key2, value2)) + } + } + + class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + override def size = 3 + def get(key: A): Option[B] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else None + def iterator = Iterator((key1, value1), (key2, value2), (key3, value3)) + override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + if (key == key1) new Map3(key1, value, key2, value2, key3, value3) + else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) + else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) + else new Map4(key1, value1, key2, value2, key3, value3, key, value) + def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) + def - (key: A): Map[A, B] = + if (key == key1) new Map2(key2, value2, key3, value3) + else if (key == key2) new Map2(key1, value1, key3, value3) + else if (key == key3) new Map2(key1, value1, key2, value2) + else this + override def foreach[U](f: ((A, B)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)) + } + } + + class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + override def size = 4 + def get(key: A): Option[B] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else if (key == key4) Some(value4) + else None + def iterator = Iterator((key1, value1), (key2, value2), (key3, value3), (key4, value4)) + override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) + else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) + else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) + else new HashMap + ((key1, value1), (key2, value2), (key3, value3), (key4, value4), (key, value)) + def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) + def - (key: A): Map[A, B] = + if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) + else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) + else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) + else this + override def foreach[U](f: ((A, B)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) + } + } +} + +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[A, +B] extends scala.collection.AbstractMap[A, B] with Map[A, B] From 1ba3d612bce92b468fa3232a3157c2a49c48d741 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 21 Sep 2015 13:51:36 +0200 Subject: [PATCH 44/44] Fixes to comments suggested by reviewer Fixes suggested by @marter when reviewing previous hk PR. --- src/dotty/tools/dotc/core/TypeApplications.scala | 12 ++++++------ tests/pos/partialApplications.scala | 8 ++++---- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index d7d205be6a4e..927c4fcc5522 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -137,7 +137,7 @@ class TypeApplications(val self: Type) extends AnyVal { def isSafeLambda(implicit ctx: Context): Boolean = LambdaClass(forcing = false).exists - /** Is type `tp` a Lambda with all Arg$ fields fully instantiated? */ + /** Is type `tp` a Lambda with all hk$i fields fully instantiated? */ def isInstantiatedLambda(implicit ctx: Context): Boolean = isSafeLambda && typeParams.isEmpty @@ -211,7 +211,7 @@ class TypeApplications(val self: Type) extends AnyVal { } case tp: RefinedType => val redux = tp.EtaReduce - if (redux.exists) redux.appliedTo(args) // Rewrite ([HK$0] => C[HK$0])(T) to C[T] + if (redux.exists) redux.appliedTo(args) // Rewrite ([hk$0] => C[hk$0])(T) to C[T] else tp.derivedRefinedType( instantiate(tp.parent, original), tp.refinedName, @@ -473,7 +473,7 @@ class TypeApplications(val self: Type) extends AnyVal { * - `bounds` consists of type declarations `type hk$i >: toHK(L) <: toHK(U), * one for each type parameter in `T` with non-trivial bounds L,U. * - `toHK` is a substitution that replaces every bound symbol sym_i by - * `this.Arg$i`. + * `this.hk$i`. * * TypeBounds are lambda abstracting by lambda abstracting their upper bound. * @@ -516,7 +516,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** Convert a type constructor `TC` with type parameters `T1, ..., Tn` to * - * LambdaXYZ { Apply = TC[$hkArg$0, ..., $hkArg$n] } + * LambdaXYZ { Apply = TC[hk$0, ..., hk$n] } * * where XYZ is a corresponds to the variances of the type parameters. */ @@ -617,8 +617,8 @@ class TypeApplications(val self: Type) extends AnyVal { * the type parameters of `B` match one-by-one the variances of `tparams`, * and where the lambda abstracted type * - * LambdaXYZ { type Apply = B[$hkArg$0, ..., $hkArg${n-1}] } - * { type $hkArg$0 = T1; ...; type $hkArg${n-1} = Tn } + * LambdaXYZ { type Apply = B[hk$0, ..., hk${n-1}] } + * { type hk$0 = T1; ...; type hk${n-1} = Tn } * * satisfies predicate `p`. Try base types in the order of their occurrence in `baseClasses`. * A type parameter matches a variance V if it has V as its variance or if V == 0. diff --git a/tests/pos/partialApplications.scala b/tests/pos/partialApplications.scala index 285dc86613cc..696c544e78eb 100644 --- a/tests/pos/partialApplications.scala +++ b/tests/pos/partialApplications.scala @@ -31,13 +31,13 @@ object Test { object Test2 { type Histogram = Map[_, Int] - type StringlyHistogram = Histogram[_ >: String] // error + type StringlyHistogram = Histogram[_ >: String] - val xs: Histogram[String] = Map[String, Int]() // error + val xs: Histogram[String] = Map[String, Int]() - val ys: StringlyHistogram[String] = xs // error + val ys: StringlyHistogram[String] = xs - val zs: StringlyHistogram = xs // error + val zs: StringlyHistogram = xs val xs1 = xs val ys1 = ys