From 6c08fce1fd635d804d7ab34af596e6e475b06b79 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 19 Nov 2022 15:16:57 +0100 Subject: [PATCH 1/2] Copy last version of dotc to test files --- tests/pos-with-compiler-cc/dotc/Run.scala | 3 +- .../dotc/ast/Desugar.scala | 7 +- .../dotc/ast/Positioned.scala | 2 +- .../dotc/ast/TreeInfo.scala | 5 +- .../dotc/ast/TreeTypeMap.scala | 9 +- .../pos-with-compiler-cc/dotc/ast/Trees.scala | 314 ++++++------ tests/pos-with-compiler-cc/dotc/ast/tpd.scala | 3 +- .../pos-with-compiler-cc/dotc/ast/untpd.scala | 11 +- .../dotc/cc/CaptureOps.scala | 23 +- .../dotc/cc/CaptureSet.scala | 20 +- .../dotc/cc/CheckCaptures.scala | 19 +- .../dotc/classpath/DirectoryClassPath.scala | 5 +- .../dotc/classpath/FileUtils.scala | 3 +- .../classpath/VirtualDirectoryClassPath.scala | 3 +- .../dotc/config/Config.scala | 8 + .../dotc/config/Feature.scala | 9 +- .../dotc/config/ScalaSettings.scala | 5 +- .../dotc/config/SourceVersion.scala | 3 +- .../dotc/core/Annotations.scala | 36 +- .../dotc/core/Constraint.scala | 26 +- .../dotc/core/ConstraintHandling.scala | 27 +- .../dotc/core/Contexts.scala | 19 +- .../dotc/core/Decorators.scala | 6 +- .../dotc/core/Definitions.scala | 84 ++- .../dotc/core/Denotations.scala | 12 +- .../dotc/core/Flags.scala | 9 +- .../dotc/core/NameKinds.scala | 4 +- .../dotc/core/Names.scala | 2 +- .../dotc/core/OrderingConstraint.scala | 477 +++++++++++++++--- .../dotc/core/Phases.scala | 2 +- .../dotc/core/Scopes.scala | 4 +- .../dotc/core/StdNames.scala | 1 - .../dotc/core/SymDenotations.scala | 23 +- .../dotc/core/SymbolLoaders.scala | 6 +- .../dotc/core/TypeComparer.scala | 13 +- .../dotc/core/TypeErrors.scala | 5 +- .../dotc/core/TypeOps.scala | 29 +- .../dotc/core/Types.scala | 128 +++-- .../core/classfile/ClassfileConstants.scala | 21 +- .../dotc/core/classfile/ClassfileParser.scala | 14 +- .../dotc/core/tasty/TastyUnpickler.scala | 3 +- .../dotc/core/tasty/TreeUnpickler.scala | 11 +- .../dotc/decompiler/IDEDecompilerDriver.scala | 3 +- .../dotc/inlines/InlineReducer.scala | 15 +- .../dotc/inlines/Inliner.scala | 11 +- .../dotc/inlines/Inlines.scala | 5 +- .../dotc/inlines/PrepareInlineable.scala | 3 +- .../dotc/parsing/JavaParsers.scala | 11 +- .../dotc/parsing/Parsers.scala | 34 +- .../dotc/parsing/Scanners.scala | 23 +- .../dotc/printing/Highlighting.scala | 2 +- .../dotc/printing/PlainPrinter.scala | 22 +- .../dotc/printing/Printer.scala | 9 +- .../dotc/printing/RefinedPrinter.scala | 34 +- .../dotc/printing/Texts.scala | 29 +- .../dotc/profile/AsyncHelper.scala | 2 +- .../dotc/profile/ExtendedThreadMxBean.java | 15 +- .../dotc/quoted/Interpreter.scala | 370 ++++++++++++++ tests/pos-with-compiler-cc/dotc/report.scala | 36 +- .../dotc/reporting/Diagnostic.scala | 5 +- .../dotc/reporting/Message.scala | 11 +- .../dotc/reporting/Reporter.scala | 13 +- .../dotc/reporting/messages.scala | 13 +- .../dotc/sbt/ExtractAPI.scala | 3 +- .../dotc/sbt/ExtractDependencies.scala | 6 +- .../dotc/sbt/ThunkHolder.scala | 3 +- .../dotc/semanticdb/ExtractSemanticDB.scala | 4 +- .../dotc/semanticdb/SyntheticsExtractor.scala | 2 +- .../internal/SemanticdbTypeMapper.scala | 5 +- .../dotc/transform/CountOuterAccesses.scala | 2 +- .../dotc/transform/Erasure.scala | 2 +- .../dotc/transform/ForwardDepChecks.scala | 2 +- .../dotc/transform/HoistSuperArgs.scala | 9 +- .../dotc/transform/InterceptedMethods.scala | 2 +- .../dotc/transform/LazyVals.scala | 308 +++++++++-- .../dotc/transform/MacroTransform.scala | 2 +- .../dotc/transform/MegaPhase.scala | 2 +- .../dotc/transform/Memoize.scala | 2 +- .../dotc/transform/Mixin.scala | 5 +- .../dotc/transform/MoveStatics.scala | 3 +- .../dotc/transform/PatternMatcher.scala | 9 +- .../dotc/transform/PostTyper.scala | 5 +- .../transform/RepeatableAnnotations.scala | 3 +- .../dotc/transform/Splicer.scala | 356 +------------ .../dotc/transform/Splicing.scala | 3 +- .../dotc/transform/TreeChecker.scala | 2 +- .../dotc/transform/TryCatchPatterns.scala | 2 +- .../dotc/transform/TypeTestsCasts.scala | 3 +- .../dotc/transform/init/Semantic.scala | 4 +- .../dotc/transform/patmat/Space.scala | 1 + .../transform/sjs/ExplicitJSClasses.scala | 7 +- .../dotc/transform/sjs/PrepJSInterop.scala | 2 + .../dotc/typer/Applications.scala | 21 +- .../dotc/typer/Checking.scala | 36 +- .../dotc/typer/CrossVersionChecks.scala | 15 - .../dotc/typer/ErrorReporting.scala | 15 +- .../dotc/typer/Implicits.scala | 9 +- .../dotc/typer/ImportInfo.scala | 5 +- .../dotc/typer/Inferencing.scala | 152 +++--- .../dotc/typer/Namer.scala | 5 +- .../dotc/typer/ProtoTypes.scala | 24 +- .../dotc/typer/ReTyper.scala | 6 +- .../dotc/typer/RefChecks.scala | 6 +- .../dotc/typer/Synthesizer.scala | 7 +- .../dotc/typer/Typer.scala | 110 ++-- .../dotc/util/ReadOnlyMap.scala | 2 +- .../dotc/util/ReadOnlySet.scala | 2 +- .../dotc/util/ReusableInstance.scala | 5 +- .../dotc/util/SimpleIdentityMap.scala | 2 +- .../dotc/util/SimpleIdentitySet.scala | 2 +- .../dotc/util/SourceFile.scala | 5 +- .../dotc/util/SourcePosition.scala | 2 +- .../dotc/util/common.scala | 9 +- 113 files changed, 1994 insertions(+), 1285 deletions(-) create mode 100644 tests/pos-with-compiler-cc/dotc/quoted/Interpreter.scala diff --git a/tests/pos-with-compiler-cc/dotc/Run.scala b/tests/pos-with-compiler-cc/dotc/Run.scala index 705664177507..f7a08d1640ee 100644 --- a/tests/pos-with-compiler-cc/dotc/Run.scala +++ b/tests/pos-with-compiler-cc/dotc/Run.scala @@ -31,7 +31,6 @@ import java.nio.charset.StandardCharsets import scala.collection.mutable import scala.util.control.NonFatal import scala.io.Codec -import caps.unsafe.unsafeUnbox /** A compiler run. Exports various methods to compile source files */ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo { @@ -271,7 +270,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint Rewrites.writeBack() suppressions.runFinished(hasErrors = ctx.reporter.hasErrors) while (finalizeActions.nonEmpty) { - val action = finalizeActions.remove(0).unsafeUnbox + val action = finalizeActions.remove(0) action() } compiling = false diff --git a/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala index ba2c8f5f43e6..1e1db19bcf25 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala @@ -673,7 +673,7 @@ object desugar { else (Nil, Nil) } - var parents1: List[untpd.Tree] = parents // !cc! need explicit type to make capture checking pass + var parents1 = parents if (isEnumCase && parents.isEmpty) parents1 = enumClassTypeRef :: Nil if (isNonEnumCase) @@ -1779,10 +1779,7 @@ object desugar { val elems = segments flatMap { case ts: Thicket => ts.trees.tail case t => Nil - } map { (t: Tree) => t match - // !cc! explicitly typed parameter (t: Tree) is needed since otherwise - // we get an error similar to #16268. (The explicit type constrains the type of `segments` - // which is otherwise List[{*} tree]) + } map { case Block(Nil, EmptyTree) => Literal(Constant(())) // for s"... ${} ..." case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala case t => t diff --git a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala index fd30d441a6ee..d14addb8c9c7 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala @@ -15,7 +15,7 @@ import annotation.internal.sharable /** A base class for things that have positions (currently: modifiers and trees) */ -abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable, caps.Pure { +abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable { import Positioned.{ids, nextId, debugId} private var mySpan: Span = _ diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala index ff59a795d818..d17bfd0f7564 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala @@ -14,10 +14,7 @@ import scala.collection.mutable import scala.annotation.tailrec -trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => - - // Note: the <: Type constraint looks necessary (and is needed to make the file compile in dotc). - // But Scalac accepts the program happily without it. Need to find out why. +trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => def unsplice(tree: Trees.Tree[T]): Trees.Tree[T] = tree diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala index 5139a46d6352..71998aff9304 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala @@ -7,7 +7,6 @@ import Types._, Contexts._, Flags._ import Symbols._, Annotations._, Trees._, Symbols._, Constants.Constant import Decorators._ import dotty.tools.dotc.transform.SymUtils._ -import language.experimental.pureFunctions /** A map that applies three functions and a substitution together to a tree and * makes sure they are coordinated so that the result is well-typed. The functions are @@ -33,8 +32,8 @@ import language.experimental.pureFunctions * set, we would get a data race assertion error. */ class TreeTypeMap( - val typeMap: Type -> Type = IdentityTypeMap, - val treeMap: tpd.Tree -> tpd.Tree = identity[tpd.Tree](_), // !cc! need explicit instantiation of default argument + val typeMap: Type => Type = IdentityTypeMap, + val treeMap: tpd.Tree => tpd.Tree = identity _, val oldOwners: List[Symbol] = Nil, val newOwners: List[Symbol] = Nil, val substFrom: List[Symbol] = Nil, @@ -43,8 +42,8 @@ class TreeTypeMap( import tpd._ def copy( - typeMap: Type -> Type, - treeMap: tpd.Tree -> tpd.Tree, + typeMap: Type => Type, + treeMap: tpd.Tree => tpd.Tree, oldOwners: List[Symbol], newOwners: List[Symbol], substFrom: List[Symbol], diff --git a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala index 82b027b0231a..253477c5382c 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala @@ -15,13 +15,12 @@ import config.Printers.overload import annotation.internal.sharable import annotation.unchecked.uncheckedVariance import annotation.constructorOnly +import compiletime.uninitialized import Decorators._ -import annotation.retains -import language.experimental.pureFunctions object Trees { - type Untyped = Nothing + type Untyped = Type | Null /** The total number of created tree nodes, maintained if Stats.enabled */ @sharable var ntrees: Int = 0 @@ -47,36 +46,34 @@ object Trees { * - Type checking an untyped tree should remove all embedded `TypedSplice` * nodes. */ - abstract class Tree[-T >: Untyped](implicit @constructorOnly src: SourceFile) - extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable, caps.Pure { + abstract class Tree[+T <: Untyped](implicit @constructorOnly src: SourceFile) + extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable { if (Stats.enabled) ntrees += 1 /** The type constructor at the root of the tree */ - type ThisTree[T >: Untyped] <: Tree[T] + type ThisTree[T <: Untyped] <: Tree[T] - protected var myTpe: T @uncheckedVariance = _ + protected var myTpe: T @uncheckedVariance = uninitialized /** Destructively set the type of the tree. This should be called only when it is known that * it is safe under sharing to do so. One use-case is in the withType method below * which implements copy-on-write. Another use-case is in method interpolateAndAdapt in Typer, * where we overwrite with a simplified version of the type itself. */ - private[dotc] def overwriteType(tpe: T): Unit = + private[dotc] def overwriteType(tpe: T @uncheckedVariance): Unit = myTpe = tpe /** The type of the tree. In case of an untyped tree, * an UnAssignedTypeException is thrown. (Overridden by empty trees) */ - final def tpe: T @uncheckedVariance = { - if (myTpe == null) - throw UnAssignedTypeException(this) - myTpe - } + final def tpe: T = + if myTpe == null then throw UnAssignedTypeException(this) + myTpe.uncheckedNN /** Copy `tpe` attribute from tree `from` into this tree, independently * whether it is null or not. - final def copyAttr[U >: Untyped](from: Tree[U]): ThisTree[T] = { + final def copyAttr[U <: Untyped](from: Tree[U]): ThisTree[T] = { val t1 = this.withSpan(from.span) val t2 = if (from.myTpe != null) t1.withType(from.myTpe.asInstanceOf[Type]) @@ -133,10 +130,9 @@ object Trees { */ final def hasType: Boolean = myTpe != null - final def typeOpt: Type = myTpe match { + final def typeOpt: Type = myTpe match case tp: Type => tp - case _ => NoType - } + case null => NoType /** The denotation referred to by this tree. * Defined for `DenotingTree`s and `ProxyTree`s, NoDenotation for other @@ -168,7 +164,7 @@ object Trees { def toList: List[Tree[T]] = this :: Nil /** if this tree is the empty tree, the alternative, else this tree */ - inline def orElse[U >: Untyped <: T](inline that: Tree[U]): Tree[U] = + inline def orElse[U >: T <: Untyped](inline that: Tree[U]): Tree[U] = if (this eq genericEmptyTree) that else this /** The number of nodes in this tree */ @@ -219,42 +215,42 @@ object Trees { override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] } - class UnAssignedTypeException[T >: Untyped](tree: Tree[T]) extends RuntimeException { + class UnAssignedTypeException[T <: Untyped](tree: Tree[T]) extends RuntimeException { override def getMessage: String = s"type of $tree is not assigned" } - type LazyTree[-T >: Untyped] = Tree[T] | Lazy[Tree[T]] - type LazyTreeList[-T >: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] + type LazyTree[+T <: Untyped] = Tree[T] | Lazy[Tree[T]] + type LazyTreeList[+T <: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] // ------ Categories of trees ----------------------------------- /** Instances of this class are trees for which isType is definitely true. * Note that some trees have isType = true without being TypTrees (e.g. Ident, Annotated) */ - trait TypTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: TypTree[T] + trait TypTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: TypTree[T] override def isType: Boolean = true } /** Instances of this class are trees for which isTerm is definitely true. * Note that some trees have isTerm = true without being TermTrees (e.g. Ident, Annotated) */ - trait TermTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: TermTree[T] + trait TermTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: TermTree[T] override def isTerm: Boolean = true } /** Instances of this class are trees which are not terms but are legal * parts of patterns. */ - trait PatternTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: PatternTree[T] + trait PatternTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: PatternTree[T] override def isPattern: Boolean = true } /** Tree's denotation can be derived from its type */ - abstract class DenotingTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: DenotingTree[T] + abstract class DenotingTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: DenotingTree[T] override def denot(using Context): Denotation = typeOpt.stripped match case tpe: NamedType => tpe.denot case tpe: ThisType => tpe.cls.denot @@ -264,8 +260,8 @@ object Trees { /** Tree's denot/isType/isTerm properties come from a subtree * identified by `forwardTo`. */ - abstract class ProxyTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: ProxyTree[T] + abstract class ProxyTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: ProxyTree[T] def forwardTo: Tree[T] override def denot(using Context): Denotation = forwardTo.denot override def isTerm: Boolean = forwardTo.isTerm @@ -273,24 +269,24 @@ object Trees { } /** Tree has a name */ - abstract class NameTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: NameTree[T] + abstract class NameTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { + type ThisTree[+T <: Untyped] <: NameTree[T] def name: Name } /** Tree refers by name to a denotation */ - abstract class RefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { - type ThisTree[-T >: Untyped] <: RefTree[T] + abstract class RefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { + type ThisTree[+T <: Untyped] <: RefTree[T] def qualifier: Tree[T] override def isType: Boolean = name.isTypeName override def isTerm: Boolean = name.isTermName } /** Tree defines a new symbol */ - trait DefTree[-T >: Untyped] extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: DefTree[T] + trait DefTree[+T <: Untyped] extends DenotingTree[T] { + type ThisTree[+T <: Untyped] <: DefTree[T] - private var myMods: untpd.Modifiers | Null = _ + private var myMods: untpd.Modifiers | Null = uninitialized private[dotc] def rawMods: untpd.Modifiers = if (myMods == null) untpd.EmptyModifiers else myMods.uncheckedNN @@ -315,7 +311,7 @@ object Trees { extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = mdef.rawMods - sealed trait WithEndMarker[-T >: Untyped]: + sealed trait WithEndMarker[+T <: Untyped]: self: PackageDef[T] | NamedDefTree[T] => import WithEndMarker.* @@ -358,9 +354,9 @@ object Trees { end WithEndMarker - abstract class NamedDefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + abstract class NamedDefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] with DefTree[T] with WithEndMarker[T] { - type ThisTree[-T >: Untyped] <: NamedDefTree[T] + type ThisTree[+T <: Untyped] <: NamedDefTree[T] protected def srcName(using Context): Name = if name == nme.CONSTRUCTOR then nme.this_ @@ -397,8 +393,8 @@ object Trees { * The envelope of a MemberDef contains the whole definition and has its point * on the opening keyword (or the next token after that if keyword is missing). */ - abstract class MemberDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { - type ThisTree[-T >: Untyped] <: MemberDef[T] + abstract class MemberDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { + type ThisTree[+T <: Untyped] <: MemberDef[T] def rawComment: Option[Comment] = getAttachment(DocComment) @@ -411,40 +407,40 @@ object Trees { } /** A ValDef or DefDef tree */ - abstract class ValOrDefDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T] with WithLazyField[Tree[T]] { - type ThisTree[-T >: Untyped] <: ValOrDefDef[T] + abstract class ValOrDefDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T] with WithLazyField[Tree[T]] { + type ThisTree[+T <: Untyped] <: ValOrDefDef[T] def name: TermName def tpt: Tree[T] def unforcedRhs: LazyTree[T] = unforced def rhs(using Context): Tree[T] = forceIfLazy } - trait ValOrTypeDef[-T >: Untyped] extends MemberDef[T]: - type ThisTree[-T >: Untyped] <: ValOrTypeDef[T] + trait ValOrTypeDef[+T <: Untyped] extends MemberDef[T]: + type ThisTree[+T <: Untyped] <: ValOrTypeDef[T] - type ParamClause[T >: Untyped] = List[ValDef[T]] | List[TypeDef[T]] + type ParamClause[T <: Untyped] = List[ValDef[T]] | List[TypeDef[T]] // ----------- Tree case classes ------------------------------------ /** name */ - case class Ident[-T >: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) + case class Ident[+T <: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) extends RefTree[T] { - type ThisTree[-T >: Untyped] = Ident[T] + type ThisTree[+T <: Untyped] = Ident[T] def qualifier: Tree[T] = genericEmptyTree def isBackquoted: Boolean = hasAttachment(Backquoted) } - class SearchFailureIdent[-T >: Untyped] private[ast] (name: Name, expl: -> String)(implicit @constructorOnly src: SourceFile) + class SearchFailureIdent[+T <: Untyped] private[ast] (name: Name, expl: => String)(implicit @constructorOnly src: SourceFile) extends Ident[T](name) { def explanation = expl override def toString: String = s"SearchFailureIdent($explanation)" } /** qualifier.name, or qualifier#name, if qualifier is a type */ - case class Select[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) + case class Select[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) extends RefTree[T] { - type ThisTree[-T >: Untyped] = Select[T] + type ThisTree[+T <: Untyped] = Select[T] override def denot(using Context): Denotation = typeOpt match case ConstantType(_) if ConstFold.foldedUnops.contains(name) => @@ -466,15 +462,15 @@ object Trees { else span } - class SelectWithSig[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) + class SelectWithSig[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) extends Select[T](qualifier, name) { override def toString: String = s"SelectWithSig($qualifier, $name, $sig)" } /** qual.this */ - case class This[-T >: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) + case class This[+T <: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = This[T] + type ThisTree[+T <: Untyped] = This[T] // Denotation of a This tree is always the underlying class; needs correction for modules. override def denot(using Context): Denotation = typeOpt match { @@ -486,21 +482,21 @@ object Trees { } /** C.super[mix], where qual = C.this */ - case class Super[-T >: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) + case class Super[+T <: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Super[T] + type ThisTree[+T <: Untyped] = Super[T] def forwardTo: Tree[T] = qual } - abstract class GenericApply[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] <: GenericApply[T] + abstract class GenericApply[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { + type ThisTree[+T <: Untyped] <: GenericApply[T] val fun: Tree[T] val args: List[Tree[T]] def forwardTo: Tree[T] = fun } object GenericApply: - def unapply[T >: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match + def unapply[T <: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match case tree: GenericApply[T] => Some((tree.fun, tree.args)) case _ => None @@ -511,9 +507,9 @@ object Trees { case InfixTuple // r f (x1, ..., xN) where N != 1; needs to be treated specially for an error message in typedApply /** fun(args) */ - case class Apply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Apply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends GenericApply[T] { - type ThisTree[-T >: Untyped] = Apply[T] + type ThisTree[+T <: Untyped] = Apply[T] def setApplyKind(kind: ApplyKind) = putAttachment(untpd.KindOfApply, kind) @@ -527,57 +523,57 @@ object Trees { } /** fun[args] */ - case class TypeApply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class TypeApply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends GenericApply[T] { - type ThisTree[-T >: Untyped] = TypeApply[T] + type ThisTree[+T <: Untyped] = TypeApply[T] } /** const */ - case class Literal[-T >: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) + case class Literal[+T <: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) extends Tree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Literal[T] + type ThisTree[+T <: Untyped] = Literal[T] } /** new tpt, but no constructor call */ - case class New[-T >: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class New[+T <: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = New[T] + type ThisTree[+T <: Untyped] = New[T] } /** expr : tpt */ - case class Typed[-T >: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Typed[+T <: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Typed[T] + type ThisTree[+T <: Untyped] = Typed[T] def forwardTo: Tree[T] = expr } /** name = arg, in a parameter list */ - case class NamedArg[-T >: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) + case class NamedArg[+T <: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = NamedArg[T] + type ThisTree[+T <: Untyped] = NamedArg[T] } /** name = arg, outside a parameter list */ - case class Assign[-T >: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Assign[+T <: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Assign[T] + type ThisTree[+T <: Untyped] = Assign[T] } /** { stats; expr } */ - case class Block[-T >: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Block[+T <: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = Block[T] + type ThisTree[+T <: Untyped] = Block[T] override def isType: Boolean = expr.isType override def isTerm: Boolean = !isType // this will classify empty trees as terms, which is necessary } /** if cond then thenp else elsep */ - case class If[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + case class If[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = If[T] + type ThisTree[+T <: Untyped] = If[T] def isInline = false } - class InlineIf[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + class InlineIf[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) extends If(cond, thenp, elsep) { override def isInline = true override def toString = s"InlineIf($cond, $thenp, $elsep)" @@ -592,33 +588,33 @@ object Trees { * of the closure is a function type, otherwise it is the type * given in `tpt`, which must be a SAM type. */ - case class Closure[-T >: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Closure[+T <: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Closure[T] + type ThisTree[+T <: Untyped] = Closure[T] } /** selector match { cases } */ - case class Match[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + case class Match[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Match[T] + type ThisTree[+T <: Untyped] = Match[T] def isInline = false } - class InlineMatch[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + class InlineMatch[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends Match(selector, cases) { override def isInline = true override def toString = s"InlineMatch($selector, $cases)" } /** case pat if guard => body */ - case class CaseDef[-T >: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class CaseDef[+T <: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = CaseDef[T] + type ThisTree[+T <: Untyped] = CaseDef[T] } /** label[tpt]: { expr } */ - case class Labeled[-T >: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Labeled[+T <: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) extends NameTree[T] { - type ThisTree[-T >: Untyped] = Labeled[T] + type ThisTree[+T <: Untyped] = Labeled[T] def name: Name = bind.name } @@ -627,33 +623,33 @@ object Trees { * After program transformations this is not necessarily the enclosing method, because * closures can intervene. */ - case class Return[-T >: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) + case class Return[+T <: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Return[T] + type ThisTree[+T <: Untyped] = Return[T] } /** while (cond) { body } */ - case class WhileDo[-T >: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class WhileDo[+T <: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = WhileDo[T] + type ThisTree[+T <: Untyped] = WhileDo[T] } /** try block catch cases finally finalizer */ - case class Try[-T >: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Try[+T <: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Try[T] + type ThisTree[+T <: Untyped] = Try[T] } /** Seq(elems) * @param tpt The element type of the sequence. */ - case class SeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class SeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = SeqLiteral[T] + type ThisTree[+T <: Untyped] = SeqLiteral[T] } /** Array(elems) */ - class JavaSeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + class JavaSeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends SeqLiteral(elems, elemtpt) { override def toString: String = s"JavaSeqLiteral($elems, $elemtpt)" } @@ -674,17 +670,17 @@ object Trees { * different context: `bindings` represent the arguments to the inlined * call, whereas `expansion` represents the body of the inlined function. */ - case class Inlined[-T >: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Inlined[+T <: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = Inlined[T] + type ThisTree[+T <: Untyped] = Inlined[T] override def isTerm = expansion.isTerm override def isType = expansion.isType } /** A type tree that represents an existing or inferred type */ - case class TypeTree[-T >: Untyped]()(implicit @constructorOnly src: SourceFile) + case class TypeTree[+T <: Untyped]()(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = TypeTree[T] + type ThisTree[+T <: Untyped] = TypeTree[T] override def isEmpty: Boolean = !hasType override def toString: String = s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" @@ -695,25 +691,25 @@ object Trees { * - as a (result-)type of an inferred ValDef or DefDef. * Every TypeVar is created as the type of one InferredTypeTree. */ - class InferredTypeTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] + class InferredTypeTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] /** ref.type */ - case class SingletonTypeTree[-T >: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) + case class SingletonTypeTree[+T <: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = SingletonTypeTree[T] + type ThisTree[+T <: Untyped] = SingletonTypeTree[T] } /** tpt { refinements } */ - case class RefinedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class RefinedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = RefinedTypeTree[T] + type ThisTree[+T <: Untyped] = RefinedTypeTree[T] def forwardTo: Tree[T] = tpt } /** tpt[args] */ - case class AppliedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class AppliedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = AppliedTypeTree[T] + type ThisTree[+T <: Untyped] = AppliedTypeTree[T] def forwardTo: Tree[T] = tpt } @@ -740,40 +736,40 @@ object Trees { * source code written by the user with the trees used by the compiler (for * example, to make "find all references" work in the IDE). */ - case class LambdaTypeTree[-T >: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class LambdaTypeTree[+T <: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = LambdaTypeTree[T] + type ThisTree[+T <: Untyped] = LambdaTypeTree[T] } - case class TermLambdaTypeTree[-T >: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TermLambdaTypeTree[+T <: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = TermLambdaTypeTree[T] + type ThisTree[+T <: Untyped] = TermLambdaTypeTree[T] } /** [bound] selector match { cases } */ - case class MatchTypeTree[-T >: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + case class MatchTypeTree[+T <: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = MatchTypeTree[T] + type ThisTree[+T <: Untyped] = MatchTypeTree[T] } /** => T */ - case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) + case class ByNameTypeTree[+T <: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = ByNameTypeTree[T] + type ThisTree[+T <: Untyped] = ByNameTypeTree[T] } /** >: lo <: hi * >: lo <: hi = alias for RHS of bounded opaque type */ - case class TypeBoundsTree[-T >: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TypeBoundsTree[+T <: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = TypeBoundsTree[T] + type ThisTree[+T <: Untyped] = TypeBoundsTree[T] } /** name @ body */ - case class Bind[-T >: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Bind[+T <: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] with PatternTree[T] { - type ThisTree[-T >: Untyped] = Bind[T] + type ThisTree[+T <: Untyped] = Bind[T] override def isType: Boolean = name.isTypeName override def isTerm: Boolean = name.isTermName @@ -782,9 +778,9 @@ object Trees { } /** tree_1 | ... | tree_n */ - case class Alternative[-T >: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Alternative[+T <: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends PatternTree[T] { - type ThisTree[-T >: Untyped] = Alternative[T] + type ThisTree[+T <: Untyped] = Alternative[T] } /** The typed translation of `extractor(patterns)` in a pattern. The translation has the following @@ -801,26 +797,26 @@ object Trees { * val result = fun(sel)(implicits) * if (result.isDefined) "match patterns against result" */ - case class UnApply[-T >: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class UnApply[+T <: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with PatternTree[T] { - type ThisTree[-T >: Untyped] = UnApply[T] + type ThisTree[+T <: Untyped] = UnApply[T] def forwardTo = fun } /** mods val name: tpt = rhs */ - case class ValDef[-T >: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + case class ValDef[+T <: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) extends ValOrDefDef[T], ValOrTypeDef[T] { - type ThisTree[-T >: Untyped] = ValDef[T] + type ThisTree[+T <: Untyped] = ValDef[T] assert(isEmpty || (tpt ne genericEmptyTree)) def unforced: LazyTree[T] = preRhs protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x } /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */ - case class DefDef[-T >: Untyped] private[ast] (name: TermName, - paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + case class DefDef[+T <: Untyped] private[ast] (name: TermName, + paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) extends ValOrDefDef[T] { - type ThisTree[-T >: Untyped] = DefDef[T] + type ThisTree[+T <: Untyped] = DefDef[T] assert(tpt ne genericEmptyTree) def unforced: LazyTree[T] = preRhs protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x @@ -844,9 +840,9 @@ object Trees { * mods type name >: lo <: hi, if rhs = TypeBoundsTree(lo, hi) or * mods type name >: lo <: hi = rhs if rhs = TypeBoundsTree(lo, hi, alias) and opaque in mods */ - case class TypeDef[-T >: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TypeDef[+T <: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) extends MemberDef[T], ValOrTypeDef[T] { - type ThisTree[-T >: Untyped] = TypeDef[T] + type ThisTree[+T <: Untyped] = TypeDef[T] /** Is this a definition of a class? */ def isClassDef: Boolean = rhs.isInstanceOf[Template[?]] @@ -859,9 +855,9 @@ object Trees { * if this is of class untpd.DerivingTemplate. * Typed templates only have parents. */ - case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parentsOrDerived: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + case class Template[+T <: Untyped] private[ast] (constr: DefDef[T], parentsOrDerived: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList[T])(implicit @constructorOnly src: SourceFile) extends DefTree[T] with WithLazyField[List[Tree[T]]] { - type ThisTree[-T >: Untyped] = Template[T] + type ThisTree[+T <: Untyped] = Template[T] def unforcedBody: LazyTreeList[T] = unforced def unforced: LazyTreeList[T] = preBody protected def force(x: List[Tree[T @uncheckedVariance]]): Unit = preBody = x @@ -872,9 +868,9 @@ object Trees { } - abstract class ImportOrExport[-T >: Untyped](implicit @constructorOnly src: SourceFile) + abstract class ImportOrExport[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: ImportOrExport[T] + type ThisTree[+T <: Untyped] <: ImportOrExport[T] val expr: Tree[T] val selectors: List[untpd.ImportSelector] } @@ -883,36 +879,36 @@ object Trees { * where a selector is either an untyped `Ident`, `name` or * an untyped thicket consisting of `name` and `rename`. */ - case class Import[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + case class Import[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) extends ImportOrExport[T] { - type ThisTree[-T >: Untyped] = Import[T] + type ThisTree[+T <: Untyped] = Import[T] } /** export expr.selectors * where a selector is either an untyped `Ident`, `name` or * an untyped thicket consisting of `name` and `rename`. */ - case class Export[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + case class Export[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) extends ImportOrExport[T] { - type ThisTree[-T >: Untyped] = Export[T] + type ThisTree[+T <: Untyped] = Export[T] } /** package pid { stats } */ - case class PackageDef[-T >: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class PackageDef[+T <: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with WithEndMarker[T] { - type ThisTree[-T >: Untyped] = PackageDef[T] + type ThisTree[+T <: Untyped] = PackageDef[T] def forwardTo: RefTree[T] = pid protected def srcName(using Context): Name = pid.name } /** arg @annot */ - case class Annotated[-T >: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Annotated[+T <: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] { - type ThisTree[-T >: Untyped] = Annotated[T] + type ThisTree[+T <: Untyped] = Annotated[T] def forwardTo: Tree[T] = arg } - trait WithoutTypeOrPos[-T >: Untyped] extends Tree[T] { + trait WithoutTypeOrPos[+T <: Untyped] extends Tree[T] { override def withTypeUnchecked(tpe: Type): ThisTree[Type] = this.asInstanceOf[ThisTree[Type]] override def span: Span = NoSpan override def span_=(span: Span): Unit = {} @@ -923,17 +919,17 @@ object Trees { * The contained trees will be integrated when transformed with * a `transform(List[Tree])` call. */ - case class Thicket[-T >: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Thicket[+T <: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends Tree[T] with WithoutTypeOrPos[T] { myTpe = NoType.asInstanceOf[T] - type ThisTree[-T >: Untyped] = Thicket[T] + type ThisTree[+T <: Untyped] = Thicket[T] - def mapElems(op: Tree[T] => Tree[T] @uncheckedVariance): Thicket[T] = { + def mapElems[U >: T <: Untyped](op: Tree[T] => Tree[U]): Thicket[U] = { val newTrees = trees.mapConserve(op) if (trees eq newTrees) this else - Thicket[T](newTrees)(source).asInstanceOf[this.type] + Thicket[U](newTrees)(source).asInstanceOf[this.type] } override def foreachInThicket(op: Tree[T] => Unit): Unit = @@ -952,12 +948,12 @@ object Trees { mapElems(_.withSpan(span)).asInstanceOf[this.type] } - class EmptyTree[T >: Untyped] extends Thicket(Nil)(NoSource) { + class EmptyTree[T <: Untyped] extends Thicket(Nil)(NoSource) { // assert(uniqueId != 1492) override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyTree") } - class EmptyValDef[T >: Untyped] extends ValDef[T]( + class EmptyValDef[T <: Untyped] extends ValDef[T]( nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T])(NoSource) with WithoutTypeOrPos[T] { myTpe = NoType.asInstanceOf[T] setMods(untpd.Modifiers(PrivateLocal)) @@ -968,8 +964,8 @@ object Trees { @sharable val theEmptyTree = new EmptyTree[Type]() @sharable val theEmptyValDef = new EmptyValDef[Type]() - def genericEmptyValDef[T >: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] - def genericEmptyTree[T >: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] + def genericEmptyValDef[T <: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] + def genericEmptyTree[T <: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] /** Tree that replaces a level 1 splices in pickled (level 0) quotes. * It is only used when picking quotes (will never be in a TASTy file). @@ -980,13 +976,13 @@ object Trees { * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. * @param tpt Type of the hole */ - case class Hole[-T >: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: Hole[T] + case class Hole[+T <: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: Hole[T] override def isTerm: Boolean = isTermHole override def isType: Boolean = !isTermHole } - def flatten[T >: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { + def flatten[T <: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { def recur(buf: ListBuffer[Tree[T]] | Null, remaining: List[Tree[T]]): ListBuffer[Tree[T]] | Null = remaining match { case Thicket(elems) :: remaining1 => @@ -1039,7 +1035,7 @@ object Trees { // ----- Generic Tree Instances, inherited from `tpt` and `untpd`. - abstract class Instance[T >: Untyped <: Type] { inst => + abstract class Instance[T <: Untyped] { inst => type Tree = Trees.Tree[T] type TypTree = Trees.TypTree[T] @@ -1374,7 +1370,7 @@ object Trees { * innermost enclosing call for which the inlined version is currently * processed. */ - protected def inlineContext(call: Tree)(using Context): Context = ctx + protected def inlineContext(call: tpd.Tree)(using Context): Context = ctx /** The context to use when mapping or accumulating over a tree */ def localCtx(tree: Tree)(using Context): Context @@ -1522,7 +1518,7 @@ object Trees { } } - abstract class TreeAccumulator[X] { self: TreeAccumulator[X] @retains(caps.*) => + abstract class TreeAccumulator[X] { self => // Ties the knot of the traversal: call `foldOver(x, tree))` to dive in the `tree` node. def apply(x: X, tree: Tree)(using Context): X diff --git a/tests/pos-with-compiler-cc/dotc/ast/tpd.scala b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala index 1f43daec4d37..52325e36037d 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/tpd.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala @@ -18,7 +18,6 @@ import typer.ConstFold import scala.annotation.tailrec import scala.collection.mutable.ListBuffer -import language.experimental.pureFunctions /** Some creators for typed trees */ object tpd extends Trees.Instance[Type] with TypedTreeInfo { @@ -1455,7 +1454,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * @return The symbols imported. */ def importedSymbols(imp: Import, - selectorPredicate: untpd.ImportSelector -> Boolean = util.common.alwaysTrue) + selectorPredicate: untpd.ImportSelector => Boolean = util.common.alwaysTrue) (using Context): List[Symbol] = imp.selectors.find(selectorPredicate) match case Some(sel) => importedSymbols(imp.expr, sel.name) diff --git a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala index eb729d33a091..f72cafd4205d 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala @@ -11,8 +11,6 @@ import util.Spans.Span import annotation.constructorOnly import annotation.internal.sharable import Decorators._ -import annotation.retains -import language.experimental.pureFunctions object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { @@ -44,7 +42,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** mods object name impl */ case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) extends MemberDef { - type ThisTree[-T >: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef + type ThisTree[+T <: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) } @@ -151,7 +149,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class CapturingTypeTree(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ - case class DependentTypeTree(tp: List[Symbol] -> Type)(implicit @constructorOnly src: SourceFile) extends Tree + case class DependentTypeTree(tp: List[Symbol] => Type)(implicit @constructorOnly src: SourceFile) extends Tree @sharable object EmptyTypeIdent extends Ident(tpnme.EMPTY)(NoSource) with WithoutTypeOrPos[Untyped] { override def isEmpty: Boolean = true @@ -371,7 +369,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { // ------ Creation methods for untyped only ----------------- def Ident(name: Name)(implicit src: SourceFile): Ident = new Ident(name) - def SearchFailureIdent(name: Name, explanation: -> String)(implicit src: SourceFile): SearchFailureIdent = new SearchFailureIdent(name, explanation) + def SearchFailureIdent(name: Name, explanation: => String)(implicit src: SourceFile): SearchFailureIdent = new SearchFailureIdent(name, explanation) def Select(qualifier: Tree, name: Name)(implicit src: SourceFile): Select = new Select(qualifier, name) def SelectWithSig(qualifier: Tree, name: Name, sig: Signature)(implicit src: SourceFile): Select = new SelectWithSig(qualifier, name, sig) def This(qual: Ident)(implicit src: SourceFile): This = new This(qual) @@ -733,8 +731,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { } } - abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { - self: UntypedTreeAccumulator[X] @retains(caps.*) => + abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { self => override def foldMoreCases(x: X, tree: Tree)(using Context): X = tree match { case ModuleDef(name, impl) => this(x, impl) diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala index 3dfd1324ae1e..0ede1825e611 100644 --- a/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala @@ -185,24 +185,27 @@ extension (tp: Type) case _ => false +extension (cls: ClassSymbol) + + def pureBaseClass(using Context): Option[Symbol] = + cls.baseClasses.find(bc => + defn.pureBaseClasses.contains(bc) + || { + val selfType = bc.givenSelfType + selfType.exists && selfType.captureSet.isAlwaysEmpty + }) + extension (sym: Symbol) /** A class is pure if: * - one its base types has an explicitly declared self type with an empty capture set * - or it is a value class - * - or it is Nothing or Null + * - or it is an exception + * - or it is one of Nothing, Null, or String */ def isPureClass(using Context): Boolean = sym match case cls: ClassSymbol => - val AnyValClass = defn.AnyValClass - cls.baseClasses.exists(bc => - bc == AnyValClass - || { - val selfType = bc.givenSelfType - selfType.exists && selfType.captureSet.isAlwaysEmpty - }) - || cls == defn.NothingClass - || cls == defn.NullClass + cls.pureBaseClass.isDefined || defn.pureSimpleClasses.contains(cls) case _ => false diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala index 48ff614f2910..6bf6d7770d8b 100644 --- a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala @@ -16,7 +16,6 @@ import util.{SimpleIdentitySet, Property} import util.common.alwaysTrue import scala.collection.mutable import config.Config.ccAllowUnsoundMaps -import language.experimental.pureFunctions /** A class for capture sets. Capture sets can be constants or variables. * Capture sets support inclusion constraints <:< where <:< is subcapturing. @@ -38,7 +37,7 @@ import language.experimental.pureFunctions * if the mapped function is either a bijection or if it is idempotent * on capture references (c.f. doc comment on `map` below). */ -sealed abstract class CaptureSet extends Showable, caps.Pure: +sealed abstract class CaptureSet extends Showable: import CaptureSet.* /** The elements of this capture set. For capture variables, @@ -223,7 +222,7 @@ sealed abstract class CaptureSet extends Showable, caps.Pure: /** The largest subset (via <:<) of this capture set that only contains elements * for which `p` is true. */ - def filter(p: CaptureRef -> Boolean)(using Context): CaptureSet = + def filter(p: CaptureRef => Boolean)(using Context): CaptureSet = if this.isConst then val elems1 = elems.filter(p) if elems1 == elems then this @@ -327,11 +326,6 @@ object CaptureSet: /** Used as a recursion brake */ @sharable private[dotc] val Pending = Const(SimpleIdentitySet.empty) - /** The empty capture set with a description that says it's the elf type of an - * exception class. - */ - val emptyOfException: CaptureSet.Const = Const(emptySet, "of an exception class") - def apply(elems: CaptureRef*)(using Context): CaptureSet.Const = if elems.isEmpty then empty else Const(SimpleIdentitySet(elems.map(_.normalizedRef)*)) @@ -378,10 +372,8 @@ object CaptureSet: def isConst = isSolved def isAlwaysEmpty = false - /** A handler to be invoked if the root reference `*` is added to this set - * The handler is pure in the sense that it will only output diagnostics. - */ - var rootAddedHandler: () -> Context ?-> Unit = () => () + /** A handler to be invoked if the root reference `*` is added to this set */ + var rootAddedHandler: () => Context ?=> Unit = () => () var description: String = "" @@ -429,7 +421,7 @@ object CaptureSet: else CompareResult.fail(this) - override def disallowRootCapability(handler: () -> Context ?-> Unit)(using Context): this.type = + override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = rootAddedHandler = handler super.disallowRootCapability(handler) @@ -621,7 +613,7 @@ object CaptureSet: /** A variable with elements given at any time as { x <- source.elems | p(x) } */ class Filtered private[CaptureSet] - (val source: Var, p: CaptureRef -> Boolean)(using @constructorOnly ctx: Context) + (val source: Var, p: CaptureRef => Boolean)(using @constructorOnly ctx: Context) extends DerivedVar(source.elems.filter(p)): override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = diff --git a/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala index 97f8e1eea405..899914e872c8 100644 --- a/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala +++ b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala @@ -21,7 +21,6 @@ import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} import StdNames.nme import NameKinds.DefaultGetterName import reporting.trace -import language.experimental.pureFunctions /** The capture checker */ object CheckCaptures: @@ -517,8 +516,10 @@ class CheckCaptures extends Recheck, SymTransformer: for param <- cls.paramGetters do if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) - if cls.derivesFrom(defn.ThrowableClass) then - checkSubset(thisSet, CaptureSet.emptyOfException, tree.srcPos) + for pureBase <- cls.pureBaseClass do + checkSubset(thisSet, + CaptureSet.empty.withDescription(i"of pure base class $pureBase"), + tree.srcPos) super.recheckClassDef(tree, impl, cls) finally curEnv = saved @@ -720,21 +721,20 @@ class CheckCaptures extends Recheck, SymTransformer: * the innermost capturing type. The outer capture annotations can be * reconstructed with the returned function. */ - def destructCapturingType(tp: Type, reconstruct: Type -> Type = (x: Type) => x) // !cc! need monomorphic default argument - : (Type, CaptureSet, Boolean, Type -> Type) = + def destructCapturingType(tp: Type, reconstruct: Type => Type = x => x): ((Type, CaptureSet, Boolean), Type => Type) = tp.dealias match case tp @ CapturingType(parent, cs) => if parent.dealias.isCapturingType then destructCapturingType(parent, res => reconstruct(tp.derivedCapturingType(res, cs))) else - (parent, cs, tp.isBoxed, reconstruct) + ((parent, cs, tp.isBoxed), reconstruct) case actual => - (actual, CaptureSet(), false, reconstruct) + ((actual, CaptureSet(), false), reconstruct) def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { if expected.isInstanceOf[WildcardType] then actual else - val (parent, cs, actualIsBoxed, recon: (Type -> Type)) = destructCapturingType(actual) + val ((parent, cs, actualIsBoxed), recon) = destructCapturingType(actual) val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing val insertBox = needsAdaptation && covariant != actualIsBoxed @@ -880,6 +880,7 @@ class CheckCaptures extends Recheck, SymTransformer: * - Check that externally visible `val`s or `def`s have empty capture sets. If not, * suggest an explicit type. This is so that separate compilation (where external * symbols have empty capture sets) gives the same results as joint compilation. + * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. */ def postCheck(unit: tpd.Tree)(using Context): Unit = unit.foreachSubTree { @@ -935,7 +936,7 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => } if !ctx.reporter.errorsReported then - // We dont report errors hre if previous errors were reported, because other + // We dont report errors here if previous errors were reported, because other // errors often result in bad applied types, but flagging these bad types gives // often worse error messages than the original errors. val checkApplied = new TreeTraverser: diff --git a/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala index a5678970411b..7f20d7c7d9ea 100644 --- a/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala +++ b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala @@ -17,7 +17,6 @@ import PlainFile.toPlainFile import scala.jdk.CollectionConverters._ import scala.collection.immutable.ArraySeq import scala.util.control.NonFatal -import language.experimental.pureFunctions /** * A trait allowing to look for classpath entries in directories. It provides common logic for @@ -33,7 +32,7 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientCla protected def emptyFiles: Array[F] // avoids reifying ClassTag[F] protected def getSubDir(dirName: String): Option[F] - protected def listChildren(dir: F, filter: Option[F -> Boolean] = (None: Option[F -> Boolean])): Array[F] // !cc! need explicit typing of default argument + protected def listChildren(dir: F, filter: Option[F => Boolean] = None): Array[F] protected def getName(f: F): String protected def toAbstractFile(f: F): AbstractFile protected def isPackage(f: F): Boolean @@ -91,7 +90,7 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo if (packageDir.exists && packageDir.isDirectory) Some(packageDir) else None } - protected def listChildren(dir: JFile, filter: Option[JFile -> Boolean]): Array[JFile] = { + protected def listChildren(dir: JFile, filter: Option[JFile => Boolean]): Array[JFile] = { val listing = filter match { case Some(f) => dir.listFiles(mkFileFilter(f)) case None => dir.listFiles() diff --git a/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala index 0f5ac16b40bf..d6fa6fb78d07 100644 --- a/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala +++ b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala @@ -9,7 +9,6 @@ import scala.language.unsafeNulls import java.io.{File => JFile, FileFilter} import java.net.URL import dotty.tools.io.AbstractFile -import language.experimental.pureFunctions /** * Common methods related to Java files and abstract files used in the context of classpath @@ -79,7 +78,7 @@ object FileUtils { def mayBeValidPackage(dirName: String): Boolean = (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.') - def mkFileFilter(f: JFile -> Boolean): FileFilter = new FileFilter { + def mkFileFilter(f: JFile => Boolean): FileFilter = new FileFilter { def accept(pathname: JFile): Boolean = f(pathname) } } diff --git a/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala index ac80d543b539..0cb0ba59c52e 100644 --- a/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala @@ -8,7 +8,6 @@ import FileUtils._ import java.net.URL import dotty.tools.io.ClassPath -import language.experimental.pureFunctions case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { type F = AbstractFile @@ -29,7 +28,7 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi protected def emptyFiles: Array[AbstractFile] = Array.empty protected def getSubDir(packageDirName: String): Option[AbstractFile] = Option(lookupPath(dir)(packageDirName.split(java.io.File.separator).toIndexedSeq, directory = true)) - protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile -> Boolean]): Array[F] = filter match { + protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile => Boolean] = None): Array[F] = filter match { case Some(f) => dir.iterator.filter(f).toArray case _ => dir.toArray } diff --git a/tests/pos-with-compiler-cc/dotc/config/Config.scala b/tests/pos-with-compiler-cc/dotc/config/Config.scala index 17e3ec352e7c..cbd50429492e 100644 --- a/tests/pos-with-compiler-cc/dotc/config/Config.scala +++ b/tests/pos-with-compiler-cc/dotc/config/Config.scala @@ -22,6 +22,11 @@ object Config { */ inline val checkConstraintsNonCyclic = false + /** Check that reverse dependencies in constraints are correct and complete. + * Can also be enabled using -Ycheck-constraint-deps. + */ + inline val checkConstraintDeps = false + /** Check that each constraint resulting from a subtype test * is satisfiable. Also check that a type variable instantiation * satisfies its constraints. @@ -184,6 +189,9 @@ object Config { /** If set, prints a trace of all symbol completions */ inline val showCompletions = false + /** If set, show variable/variable reverse dependencies when printing constraints. */ + inline val showConstraintDeps = true + /** If set, method results that are context functions are flattened by adding * the parameters of the context function results to the methods themselves. * This is an optimization that reduces closure allocations. diff --git a/tests/pos-with-compiler-cc/dotc/config/Feature.scala b/tests/pos-with-compiler-cc/dotc/config/Feature.scala index c482bbe0911f..e7117f542384 100644 --- a/tests/pos-with-compiler-cc/dotc/config/Feature.scala +++ b/tests/pos-with-compiler-cc/dotc/config/Feature.scala @@ -10,7 +10,6 @@ import util.{SrcPos, NoSourcePosition} import SourceVersion._ import reporting.Message import NameKinds.QualifiedName -import language.experimental.pureFunctions object Feature: @@ -110,7 +109,11 @@ object Feature: case Some(v) => v case none => sourceVersionSetting - def migrateTo3(using Context): Boolean = sourceVersion == `3.0-migration` + def migrateTo3(using Context): Boolean = + sourceVersion == `3.0-migration` + + def fewerBracesEnabled(using Context) = + sourceVersion.isAtLeast(`3.3`) || enabled(fewerBraces) /** If current source migrates to `version`, issue given warning message * and return `true`, otherwise return `false`. @@ -124,7 +127,7 @@ object Feature: else false - def checkExperimentalFeature(which: String, srcPos: SrcPos, note: -> String = "")(using Context) = + def checkExperimentalFeature(which: String, srcPos: SrcPos, note: => String = "")(using Context) = if !isExperimentalEnabled then report.error(i"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala index a2dba94ad9fc..f7743dddda4e 100644 --- a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala @@ -17,7 +17,7 @@ class ScalaSettings extends SettingGroup with AllScalaSettings object ScalaSettings: // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` private val minTargetVersion = 8 - private val maxTargetVersion = 19 + private val maxTargetVersion = 20 def supportedTargetVersions: List[String] = (minTargetVersion to maxTargetVersion).toList.map(_.toString) @@ -64,7 +64,6 @@ trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSetti val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) - val YindentColons: Setting[Boolean] = BooleanSetting("-Yindent-colons", "(disabled: use -language:experimental.fewerBraces instead)") /* Decompiler settings */ val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) @@ -309,6 +308,7 @@ private sealed trait YSettings: val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") + val YcheckConstraintDeps: Setting[Boolean] = BooleanSetting("-Ycheck-constraint-deps", "Check dependency tracking in constraints (used for testing the algorithm).") val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty") @@ -330,6 +330,7 @@ private sealed trait YSettings: val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only)") val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references") val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with captureChecking language import, suppress type abbreviations") + val YlightweightLazyVals: Setting[Boolean] = BooleanSetting("-Ylightweight-lazy-vals", "Use experimental lightweight implementation of lazy vals") /** Area-specific debug output */ val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") diff --git a/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala b/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala index 545e2f2d9b42..4b9b1b247856 100644 --- a/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala +++ b/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala @@ -8,6 +8,7 @@ import util.Property enum SourceVersion: case `3.0-migration`, `3.0`, `3.1` // Note: do not add `3.1-migration` here, 3.1 is the same language as 3.0. case `3.2-migration`, `3.2` + case `3.3-migration`, `3.3` case `future-migration`, `future` val isMigrating: Boolean = toString.endsWith("-migration") @@ -18,7 +19,7 @@ enum SourceVersion: def isAtLeast(v: SourceVersion) = stable.ordinal >= v.ordinal object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.2` + def defaultSourceVersion = `3.3` /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) diff --git a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala index d33b1d39942e..aa8ead280bbf 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala @@ -8,7 +8,6 @@ import util.Spans.Span import printing.{Showable, Printer} import printing.Texts.Text import annotation.internal.sharable -import language.experimental.pureFunctions object Annotations { @@ -16,8 +15,7 @@ object Annotations { if (tree.symbol.isConstructor) tree.symbol.owner else tree.tpe.typeSymbol - abstract class Annotation extends Showable, caps.Pure { - + abstract class Annotation extends Showable { def tree(using Context): Tree def symbol(using Context): Symbol = annotClass(tree) @@ -98,11 +96,11 @@ object Annotations { def tree(using Context): Tree = t abstract class LazyAnnotation extends Annotation { - protected var mySym: Symbol | (Context ?-> Symbol) | Null + protected var mySym: Symbol | (Context ?=> Symbol) | Null override def symbol(using parentCtx: Context): Symbol = assert(mySym != null) mySym match { - case symFn: (Context ?-> Symbol) @unchecked => + case symFn: (Context ?=> Symbol) @unchecked => mySym = null mySym = atPhaseBeforeTransforms(symFn) // We should always produce the same annotation tree, no matter when the @@ -116,11 +114,11 @@ object Annotations { } mySym.asInstanceOf[Symbol] - protected var myTree: Tree | (Context ?-> Tree) | Null + protected var myTree: Tree | (Context ?=> Tree) | Null def tree(using Context): Tree = assert(myTree != null) myTree match { - case treeFn: (Context ?-> Tree) @unchecked => + case treeFn: (Context ?=> Tree) @unchecked => myTree = null myTree = atPhaseBeforeTransforms(treeFn) case _ => @@ -131,10 +129,10 @@ object Annotations { override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] } - class DeferredSymAndTree(symFn: Context ?-> Symbol, treeFn: Context ?-> Tree) + class DeferredSymAndTree(symFn: Context ?=> Symbol, treeFn: Context ?=> Tree) extends LazyAnnotation: - protected var mySym: Symbol | (Context ?-> Symbol) | Null = ctx ?=> symFn(using ctx) - protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) + protected var mySym: Symbol | (Context ?=> Symbol) | Null = ctx ?=> symFn(using ctx) + protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> treeFn(using ctx) /** An annotation indicating the body of a right-hand side, * typically of an inline method. Treated specially in @@ -155,11 +153,11 @@ object Annotations { abstract class LazyBodyAnnotation extends BodyAnnotation { // Copy-pasted from LazyAnnotation to avoid having to turn it into a trait - protected var myTree: Tree | (Context ?-> Tree) | Null + protected var myTree: Tree | (Context ?=> Tree) | Null def tree(using Context): Tree = assert(myTree != null) myTree match { - case treeFn: (Context ?-> Tree) @unchecked => + case treeFn: (Context ?=> Tree) @unchecked => myTree = null myTree = atPhaseBeforeTransforms(treeFn) case _ => @@ -171,9 +169,9 @@ object Annotations { } object LazyBodyAnnotation { - def apply(bodyFn: Context ?-> Tree): LazyBodyAnnotation = + def apply(bodyFn: Context ?=> Tree): LazyBodyAnnotation = new LazyBodyAnnotation: - protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> bodyFn(using ctx) + protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> bodyFn(using ctx) } object Annotation { @@ -202,21 +200,21 @@ object Annotations { apply(New(atp, args)) /** Create an annotation where the tree is computed lazily. */ - def deferred(sym: Symbol)(treeFn: Context ?-> Tree): Annotation = + def deferred(sym: Symbol)(treeFn: Context ?=> Tree): Annotation = new LazyAnnotation { - protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) - protected var mySym: Symbol | (Context ?-> Symbol) | Null = sym + protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> treeFn(using ctx) + protected var mySym: Symbol | (Context ?=> Symbol) | Null = sym } /** Create an annotation where the symbol and the tree are computed lazily. */ - def deferredSymAndTree(symFn: Context ?-> Symbol)(treeFn: Context ?-> Tree): Annotation = + def deferredSymAndTree(symFn: Context ?=> Symbol)(treeFn: Context ?=> Tree): Annotation = DeferredSymAndTree(symFn, treeFn) /** Extractor for child annotations */ object Child { /** A deferred annotation to the result of a given child computation */ - def later(delayedSym: Context ?-> Symbol, span: Span)(using Context): Annotation = { + def later(delayedSym: Context ?=> Symbol, span: Span)(using Context): Annotation = { def makeChildLater(using Context) = { val sym = delayedSym New(defn.ChildAnnot.typeRef.appliedTo(sym.owner.thisType.select(sym.name, sym)), Nil) diff --git a/tests/pos-with-compiler-cc/dotc/core/Constraint.scala b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala index 07b6e71cdcc9..fb87aed77c41 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Constraint.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala @@ -4,6 +4,7 @@ package core import Types._, Contexts._ import printing.Showable +import util.{SimpleIdentitySet, SimpleIdentityMap} /** Constraint over undetermined type parameters. Constraints are built * over values of the following types: @@ -128,7 +129,7 @@ abstract class Constraint extends Showable { /** Is `tv` marked as hard in the constraint? */ def isHard(tv: TypeVar): Boolean - + /** The same as this constraint, but with `tv` marked as hard. */ def withHard(tv: TypeVar)(using Context): This @@ -165,15 +166,32 @@ abstract class Constraint extends Showable { */ def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean - /** Check that no constrained parameter contains itself as a bound */ - def checkNonCyclic()(using Context): this.type - /** Does `param` occur at the toplevel in `tp` ? * Toplevel means: the type itself or a factor in some * combination of `&` or `|` types. */ def occursAtToplevel(param: TypeParamRef, tp: Type)(using Context): Boolean + /** A string that shows the reverse dependencies maintained by this constraint + * (coDeps and contraDeps for OrderingConstraints). + */ + def depsToString(using Context): String + + /** Does the constraint restricted to variables outside `except` depend on `tv` + * in the given direction `co`? + * @param `co` If true, test whether the constraint would change if the variable is made larger + * otherwise, test whether the constraint would change if the variable is made smaller. + */ + def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean + + /** Depending on Config settngs: + * - Under `checkConstraintsNonCyclic`, check that no constrained + * parameter contains itself as a bound. + * - Under `checkConstraintDeps`, check hat reverse dependencies in + * constraints are correct and complete. + */ + def checkWellFormed()(using Context): this.type + /** Check that constraint only refers to TypeParamRefs bound by itself */ def checkClosed()(using Context): Unit diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala index a3d8cabba971..4ed01a5fbe0d 100644 --- a/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala +++ b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala @@ -556,6 +556,13 @@ trait ConstraintHandling { inst end approximation + private def isTransparent(tp: Type, traitOnly: Boolean)(using Context): Boolean = tp match + case AndType(tp1, tp2) => + isTransparent(tp1, traitOnly) && isTransparent(tp2, traitOnly) + case _ => + val cls = tp.underlyingClassRef(refinementOK = false).typeSymbol + cls.isTransparentClass && (!traitOnly || cls.is(Trait)) + /** If `tp` is an intersection such that some operands are transparent trait instances * and others are not, replace as many transparent trait instances as possible with Any * as long as the result is still a subtype of `bound`. But fall back to the @@ -568,18 +575,17 @@ trait ConstraintHandling { var dropped: List[Type] = List() // the types dropped so far, last one on top def dropOneTransparentTrait(tp: Type): Type = - val tpd = tp.dealias - if tpd.typeSymbol.isTransparentTrait && !tpd.isLambdaSub && !kept.contains(tpd) then - dropped = tpd :: dropped + if isTransparent(tp, traitOnly = true) && !kept.contains(tp) then + dropped = tp :: dropped defn.AnyType - else tpd match + else tp match case AndType(tp1, tp2) => val tp1w = dropOneTransparentTrait(tp1) if tp1w ne tp1 then tp1w & tp2 else val tp2w = dropOneTransparentTrait(tp2) if tp2w ne tp2 then tp1 & tp2w - else tpd + else tp case _ => tp @@ -654,7 +660,16 @@ trait ConstraintHandling { val wideInst = if isSingleton(bound) then inst - else dropTransparentTraits(widenIrreducible(widenOr(widenSingle(inst))), bound) + else + val widenedFromSingle = widenSingle(inst) + val widenedFromUnion = widenOr(widenedFromSingle) + val widened = + if (widenedFromUnion ne widenedFromSingle) && isTransparent(widenedFromUnion, traitOnly = false) then + widenedFromSingle + else + dropTransparentTraits(widenedFromUnion, bound) + widenIrreducible(widened) + wideInst match case wideInst: TypeRef if wideInst.symbol.is(Module) => TermRef(wideInst.prefix, wideInst.symbol.sourceModule) diff --git a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala index 68620c6d3fe7..a6c1a24ebf96 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala @@ -39,13 +39,12 @@ import xsbti.AnalysisCallback import plugins._ import java.util.concurrent.atomic.AtomicInteger import java.nio.file.InvalidPathException -import language.experimental.pureFunctions object Contexts { private val (compilerCallbackLoc, store1) = Store.empty.newLocation[CompilerCallback]() private val (sbtCallbackLoc, store2) = store1.newLocation[AnalysisCallback]() - private val (printerFnLoc, store3) = store2.newLocation[Context -> Printer](new RefinedPrinter(_)) + private val (printerFnLoc, store3) = store2.newLocation[Context => Printer](new RefinedPrinter(_)) private val (settingsStateLoc, store4) = store3.newLocation[SettingsState]() private val (compilationUnitLoc, store5) = store4.newLocation[CompilationUnit]() private val (runLoc, store6) = store5.newLocation[Run | Null]() @@ -157,9 +156,9 @@ object Contexts { final def owner: Symbol = _owner /** The current tree */ - private var _tree: Tree[? >: Untyped]= _ - protected def tree_=(tree: Tree[? >: Untyped]): Unit = _tree = tree - final def tree: Tree[? >: Untyped] = _tree + private var _tree: Tree[?]= _ + protected def tree_=(tree: Tree[?]): Unit = _tree = tree + final def tree: Tree[?] = _tree /** The current scope */ private var _scope: Scope = _ @@ -212,7 +211,7 @@ object Contexts { def sbtCallback: AnalysisCallback = store(sbtCallbackLoc) /** The current plain printer */ - def printerFn: Context -> Printer = store(printerFnLoc) + def printerFn: Context => Printer = store(printerFnLoc) /** A function creating a printer */ def printer: Printer = @@ -276,7 +275,7 @@ object Contexts { def nestingLevel: Int = effectiveScope.nestingLevel /** Sourcefile corresponding to given abstract file, memoized */ - def getSource(file: AbstractFile, codec: -> Codec = Codec(settings.encoding.value)) = { + def getSource(file: AbstractFile, codec: => Codec = Codec(settings.encoding.value)) = { util.Stats.record("Context.getSource") base.sources.getOrElseUpdate(file, SourceFile(file, codec)) } @@ -470,7 +469,7 @@ object Contexts { } /** The context of expression `expr` seen as a member of a statement sequence */ - def exprContext(stat: Tree[? >: Untyped], exprOwner: Symbol): Context = + def exprContext(stat: Tree[?], exprOwner: Symbol): Context = if (exprOwner == this.owner) this else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext else fresh.setOwner(exprOwner) @@ -593,7 +592,7 @@ object Contexts { assert(owner != NoSymbol) this.owner = owner this - def setTree(tree: Tree[? >: Untyped]): this.type = + def setTree(tree: Tree[?]): this.type = util.Stats.record("Context.setTree") this.tree = tree this @@ -637,7 +636,7 @@ object Contexts { def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) def setSbtCallback(callback: AnalysisCallback): this.type = updateStore(sbtCallbackLoc, callback) - def setPrinterFn(printer: Context -> Printer): this.type = updateStore(printerFnLoc, printer) + def setPrinterFn(printer: Context => Printer): this.type = updateStore(printerFnLoc, printer) def setSettings(settingsState: SettingsState): this.type = updateStore(settingsStateLoc, settingsState) def setRun(run: Run | Null): this.type = updateStore(runLoc, run) def setProfiler(profiler: Profiler): this.type = updateStore(profilerLoc, profiler) diff --git a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala index 679e22b48c9e..54faf9a41177 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala @@ -10,7 +10,6 @@ import Contexts._, Names._, Phases._, Symbols._ import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ import transform.MegaPhase import reporting.{Message, NoExplanation} -import language.experimental.pureFunctions /** This object provides useful implicit decorators for types defined elsewhere */ object Decorators { @@ -59,7 +58,7 @@ object Decorators { padding + s.replace("\n", "\n" + padding) end extension - extension (str: -> String) + extension (str: => String) def toMessage: Message = reporting.NoExplanation(str) /** Implements a findSymbol method on iterators of Symbols that @@ -275,6 +274,9 @@ object Decorators { s"[cannot display due to $msg, raw string = $x]" case _ => String.valueOf(x).nn + /** Returns the simple class name of `x`. */ + def className: String = getClass.getSimpleName.nn + extension [T](x: T) def assertingErrorsReported(using Context): T = { assert(ctx.reporter.errorsReported) diff --git a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala index e68dc9102961..b43857b7d28c 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala @@ -19,7 +19,6 @@ import Symbols.requiredModuleRef import cc.{CapturingType, CaptureSet, EventuallyCapturingType} import scala.annotation.tailrec -import language.experimental.pureFunctions object Definitions { @@ -71,7 +70,7 @@ class Definitions { // NOTE: Ideally we would write `parentConstrs: => Type*` but SIP-24 is only // implemented in Dotty and not in Scala 2. // See . - private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: -> Seq[Type]): ClassSymbol = { + private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: => Seq[Type]): ClassSymbol = { val completer = new LazyType { def complete(denot: SymDenotation)(using Context): Unit = { val cls = denot.asClass.classSymbol @@ -183,7 +182,7 @@ class Definitions { tl => op(tl.paramRefs(0), tl.paramRefs(1)))) private def enterPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int, - resultTypeFn: PolyType -> Type, + resultTypeFn: PolyType => Type, flags: FlagSet = EmptyFlags, bounds: TypeBounds = TypeBounds.empty, useCompleter: Boolean = false) = { @@ -200,7 +199,7 @@ class Definitions { enterMethod(cls, name, info, flags) } - private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType -> Type, flags: FlagSet) = + private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) = enterPolyMethod(cls, name, 1, resultTypeFn, flags) private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef | Null] = { @@ -970,7 +969,6 @@ class Definitions { // Annotation base classes @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") - @tu lazy val ClassfileAnnotationClass: ClassSymbol = requiredClass("scala.annotation.ClassfileAnnotation") @tu lazy val StaticAnnotationClass: ClassSymbol = requiredClass("scala.annotation.StaticAnnotation") @tu lazy val RefiningAnnotationClass: ClassSymbol = requiredClass("scala.annotation.RefiningAnnotation") @@ -1350,6 +1348,15 @@ class Definitions { @tu lazy val untestableClasses: Set[Symbol] = Set(NothingClass, NullClass, SingletonClass) + /** Base classes that are assumed to be pure for the purposes of capture checking. + * Every class inheriting from a pure baseclass is pure. + */ + @tu lazy val pureBaseClasses = Set(defn.AnyValClass, defn.ThrowableClass) + + /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, + */ + @tu lazy val pureSimpleClasses = Set(StringClass, NothingClass, NullClass) + @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) @@ -1830,20 +1837,53 @@ class Definitions { def isInfix(sym: Symbol)(using Context): Boolean = (sym eq Object_eq) || (sym eq Object_ne) - @tu lazy val assumedTransparentTraits = - Set[Symbol](ComparableClass, ProductClass, SerializableClass, - // add these for now, until we had a chance to retrofit 2.13 stdlib - // we should do a more through sweep through it then. - requiredClass("scala.collection.SortedOps"), - requiredClass("scala.collection.StrictOptimizedSortedSetOps"), - requiredClass("scala.collection.generic.DefaultSerializable"), - requiredClass("scala.collection.generic.IsIterable"), - requiredClass("scala.collection.generic.IsIterableOnce"), - requiredClass("scala.collection.generic.IsMap"), - requiredClass("scala.collection.generic.IsSeq"), - requiredClass("scala.collection.generic.Subtractable"), - requiredClass("scala.collection.immutable.StrictOptimizedSeqOps") - ) + @tu lazy val assumedTransparentNames: Map[Name, Set[Symbol]] = + // add these for now, until we had a chance to retrofit 2.13 stdlib + // we should do a more through sweep through it then. + val strs = Map( + "Any" -> Set("scala"), + "AnyVal" -> Set("scala"), + "Matchable" -> Set("scala"), + "Product" -> Set("scala"), + "Object" -> Set("java.lang"), + "Comparable" -> Set("java.lang"), + "Serializable" -> Set("java.io"), + "BitSetOps" -> Set("scala.collection"), + "IndexedSeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "IterableOnceOps" -> Set("scala.collection"), + "IterableOps" -> Set("scala.collection"), + "LinearSeqOps" -> Set("scala.collection", "scala.collection.immutable"), + "MapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SortedMapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SortedOps" -> Set("scala.collection"), + "SortedSetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "StrictOptimizedIterableOps" -> Set("scala.collection"), + "StrictOptimizedLinearSeqOps" -> Set("scala.collection"), + "StrictOptimizedMapOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSeqOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSetOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSortedMapOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSortedSetOps" -> Set("scala.collection", "scala.collection.immutable"), + "ArrayDequeOps" -> Set("scala.collection.mutable"), + "DefaultSerializable" -> Set("scala.collection.generic"), + "IsIterable" -> Set("scala.collection.generic"), + "IsIterableLowPriority" -> Set("scala.collection.generic"), + "IsIterableOnce" -> Set("scala.collection.generic"), + "IsIterableOnceLowPriority" -> Set("scala.collection.generic"), + "IsMap" -> Set("scala.collection.generic"), + "IsSeq" -> Set("scala.collection.generic")) + strs.map { case (simple, pkgs) => ( + simple.toTypeName, + pkgs.map(pkg => staticRef(pkg.toTermName, isPackage = true).symbol.moduleClass) + ) + } + + def isAssumedTransparent(sym: Symbol): Boolean = + assumedTransparentNames.get(sym.name) match + case Some(pkgs) => pkgs.contains(sym.owner) + case none => false // ----- primitive value class machinery ------------------------------------------ @@ -1993,6 +2033,12 @@ class Definitions { addSyntheticSymbolsComments } + /** Definitions used in Lazy Vals implementation */ + val LazyValsModuleName = "scala.runtime.LazyVals" + @tu lazy val LazyValsModule = requiredModule(LazyValsModuleName) + @tu lazy val LazyValsWaitingState = requiredClass(s"$LazyValsModuleName.Waiting") + @tu lazy val LazyValsControlState = requiredClass(s"$LazyValsModuleName.LazyValControlState") + def addSyntheticSymbolsComments(using Context): Unit = def add(sym: Symbol, doc: String) = ctx.docCtx.foreach(_.addDocstring(sym, Some(Comment(NoSpan, doc)))) diff --git a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala index 5d99118e56af..f267e6c85e03 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala @@ -23,7 +23,6 @@ import config.Printers.overload import util.common._ import typer.ProtoTypes.NoViewsAllowed import collection.mutable.ListBuffer -import language.experimental.pureFunctions /** Denotations represent the meaning of symbols and named types. * The following diagram shows how the principal types of denotations @@ -76,7 +75,7 @@ object Denotations { /** A PreDenotation represents a group of single denotations or a single multi-denotation * It is used as an optimization to avoid forming MultiDenotations too eagerly. */ - abstract class PreDenotation extends caps.Pure { + abstract class PreDenotation { /** A denotation in the group exists */ def exists: Boolean @@ -1327,10 +1326,7 @@ object Denotations { } else owner } - def recur( - path: Name, - wrap: TermName -> Name = identity[Name] // !cc! default argument needs to be instantiated, error if [Name] is dropped - ): Denotation = path match { + def recur(path: Name, wrap: TermName => Name = identity): Denotation = path match { case path: TypeName => recur(path.toTermName, n => n.toTypeName) case ModuleClassName(underlying) => @@ -1340,7 +1336,7 @@ object Denotations { case qn @ AnyQualifiedName(prefix, _) => recur(prefix, n => wrap(qn.info.mkString(n).toTermName)) case path: SimpleName => - def recurSimple(len: Int, wrap: TermName -> Name): Denotation = { + def recurSimple(len: Int, wrap: TermName => Name): Denotation = { val point = path.lastIndexOf('.', len - 1) val selector = wrap(path.slice(point + 1, len).asTermName) val prefix = @@ -1368,7 +1364,7 @@ object Denotations { NoSymbol /** An exception for accessing symbols that are no longer valid in current run */ - class StaleSymbol(msg: -> String) extends Exception { + class StaleSymbol(msg: => String) extends Exception { util.Stats.record("stale symbol") override def getMessage(): String = msg } diff --git a/tests/pos-with-compiler-cc/dotc/core/Flags.scala b/tests/pos-with-compiler-cc/dotc/core/Flags.scala index 8bf65ed8288f..f23dce020f10 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Flags.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Flags.scala @@ -350,14 +350,14 @@ object Flags { /** Symbol is a method which should be marked ACC_SYNCHRONIZED */ val (_, Synchronized @ _, _) = newFlags(36, "") - /** Symbol is a Java-style varargs method */ - val (_, JavaVarargs @ _, _) = newFlags(37, "") + /** Symbol is a Java-style varargs method / a Java annotation */ + val (_, JavaVarargs @ _, JavaAnnotation @ _) = newFlags(37, "", "") /** Symbol is a Java default method */ val (_, DefaultMethod @ _, _) = newFlags(38, "") /** Symbol is a transparent inline method or trait */ - val (Transparent @ _, _, _) = newFlags(39, "transparent") + val (Transparent @ _, _, TransparentType @ _) = newFlags(39, "transparent") /** Symbol is an enum class or enum case (if used with case) */ val (Enum @ _, EnumVal @ _, _) = newFlags(40, "enum") @@ -477,7 +477,7 @@ object Flags { */ val AfterLoadFlags: FlagSet = commonFlags( FromStartFlags, AccessFlags, Final, AccessorOrSealed, - Abstract, LazyOrTrait, SelfName, JavaDefined, Transparent) + Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent) /** A value that's unstable unless complemented with a Stable flag */ val UnstableValueFlags: FlagSet = Mutable | Method @@ -609,5 +609,4 @@ object Flags { val SyntheticParam: FlagSet = Synthetic | Param val SyntheticTermParam: FlagSet = Synthetic | TermParam val SyntheticTypeParam: FlagSet = Synthetic | TypeParam - val TransparentTrait: FlagSet = Trait | Transparent } diff --git a/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala b/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala index 2ed9a17b9f7e..f71c16e82b70 100644 --- a/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala +++ b/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala @@ -23,14 +23,14 @@ object NameKinds { @sharable private val uniqueNameKinds = util.HashMap[String, UniqueNameKind]() /** A class for the info stored in a derived name */ - abstract class NameInfo extends caps.Pure { + abstract class NameInfo { def kind: NameKind def mkString(underlying: TermName): String def map(f: SimpleName => SimpleName): NameInfo = this } /** An abstract base class of classes that define the kind of a derived name info */ - abstract class NameKind(val tag: Int) extends caps.Pure { self => + abstract class NameKind(val tag: Int) { self => /** The info class defined by this kind */ type ThisInfo <: Info diff --git a/tests/pos-with-compiler-cc/dotc/core/Names.scala b/tests/pos-with-compiler-cc/dotc/core/Names.scala index 3c3c04147de6..f13c3a184bf9 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Names.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Names.scala @@ -30,7 +30,7 @@ object Names { * in a name table. A derived term name adds a tag, and possibly a number * or a further simple name to some other name. */ - abstract class Name extends Designator, Showable, caps.Pure derives CanEqual { + abstract class Name extends Designator, Showable derives CanEqual { /** A type for names of the same kind as this name */ type ThisName <: Name diff --git a/tests/pos-with-compiler-cc/dotc/core/OrderingConstraint.scala b/tests/pos-with-compiler-cc/dotc/core/OrderingConstraint.scala index 961d106a14c8..ac6cb78f9e91 100644 --- a/tests/pos-with-compiler-cc/dotc/core/OrderingConstraint.scala +++ b/tests/pos-with-compiler-cc/dotc/core/OrderingConstraint.scala @@ -13,31 +13,37 @@ import reflect.ClassTag import annotation.tailrec import annotation.internal.sharable import cc.{CapturingType, derivedCapturingType} -import caps.unsafe.unsafeUnbox object OrderingConstraint { - type ArrayValuedMap[T] = SimpleIdentityMap[TypeLambda, Array[T]] + /** If true, use reverse dependencies in `replace` to avoid checking the bounds + * of all parameters in the constraint. This can speed things up, but there are some + * rare corner cases where reverse dependencies miss a parameter. Specifically, + * if a constraint contains a free reference to TypeParam P and afterwards the + * same P is added as a bound variable to the constraint, a backwards link would + * then become necessary at this point but is missing. This causes two CB projects + * to fail when reverse dependencies are checked (parboiled2 and perspective). + * In these rare cases `replace` could behave differently when optimized. However, + * no deviation was found in the two projects. It is not clear what the "right" + * behavior of `replace` should be in these cases. Normally, PolyTypes added + * to constraints are supposed to be fresh, so that would mean that the behavior + * with optimizeReplace = true would be correct. But the previous behavior without + * reverse dependency checking corresponds to `optimizeReplace = false`. This behavior + * makes sense if we assume that the added polytype was simply added too late, so we + * want to establish the link between newly bound variable and pre-existing reference. + */ + private final val optimizeReplace = true + + private type ArrayValuedMap[T] = SimpleIdentityMap[TypeLambda, Array[T]] /** The type of `OrderingConstraint#boundsMap` */ - type ParamBounds = ArrayValuedMap[Type] + private type ParamBounds = ArrayValuedMap[Type] /** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */ - type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] - - /** A new constraint with given maps and given set of hard typevars */ - private def newConstraint( - boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering, - hardVars: TypeVars)(using Context) : OrderingConstraint = - if boundsMap.isEmpty && lowerMap.isEmpty && upperMap.isEmpty then - empty - else - val result = new OrderingConstraint(boundsMap, lowerMap, upperMap, hardVars) - if ctx.run != null then ctx.run.nn.recordConstraintSize(result, result.boundsMap.size) - result + private type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] /** A lens for updating a single entry array in one of the three constraint maps */ - abstract class ConstraintLens[T <: AnyRef: ClassTag] { + private abstract class ConstraintLens[T <: AnyRef: ClassTag] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[T] | Null def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[T])(using Context): OrderingConstraint def initial: T @@ -48,7 +54,7 @@ object OrderingConstraint { } /** The `current` constraint but with the entry for `param` updated to `entry`. - * `current` is used linearly. If it is different from `prev` it is + * `current` is used linearly. If it is different from `prev` then `current` is * known to be dead after the call. Hence it is OK to update destructively * parts of `current` which are not shared by `prev`. */ @@ -90,27 +96,27 @@ object OrderingConstraint { map(prev, current, param.binder, param.paramNum, f) } - val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type] { + private val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[Type] | Null = c.boundsMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[Type])(using Context): OrderingConstraint = - newConstraint(c.boundsMap.updated(poly, entries), c.lowerMap, c.upperMap, c.hardVars) + c.newConstraint(boundsMap = c.boundsMap.updated(poly, entries)) def initial = NoType } - val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + private val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.lowerMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = - newConstraint(c.boundsMap, c.lowerMap.updated(poly, entries), c.upperMap, c.hardVars) + c.newConstraint(lowerMap = c.lowerMap.updated(poly, entries)) def initial = Nil } - val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + private val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.upperMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = - newConstraint(c.boundsMap, c.lowerMap, c.upperMap.updated(poly, entries), c.hardVars) + c.newConstraint(upperMap = c.upperMap.updated(poly, entries)) def initial = Nil } @@ -144,11 +150,27 @@ class OrderingConstraint(private val boundsMap: ParamBounds, private val lowerMap : ParamOrdering, private val upperMap : ParamOrdering, private val hardVars : TypeVars) extends Constraint { + thisConstraint => import UnificationDirection.* type This = OrderingConstraint + /** A new constraint with given maps and given set of hard typevars */ + def newConstraint( // !!! Dotty problem: Making newConstraint `private` causes -Ytest-pickler failure. + boundsMap: ParamBounds = this.boundsMap, + lowerMap: ParamOrdering = this.lowerMap, + upperMap: ParamOrdering = this.upperMap, + hardVars: TypeVars = this.hardVars)(using Context) : OrderingConstraint = + if boundsMap.isEmpty && lowerMap.isEmpty && upperMap.isEmpty then + empty + else + val result = new OrderingConstraint(boundsMap, lowerMap, upperMap, hardVars) + if ctx.run != null then ctx.run.nn.recordConstraintSize(result, result.boundsMap.size) + result.coDeps = this.coDeps + result.contraDeps = this.contraDeps + result + // ----------- Basic indices -------------------------------------------------- /** The number of type parameters in the given entry array */ @@ -218,6 +240,189 @@ class OrderingConstraint(private val boundsMap: ParamBounds, if tvar == null then NoType else tvar +// ------------- Type parameter dependencies ---------------------------------------- + + private type ReverseDeps = SimpleIdentityMap[TypeParamRef, SimpleIdentitySet[TypeParamRef]] + + /** A map that associates type parameters of this constraint with all other type + * parameters that refer to them in their bounds covariantly, such that, if the + * type parameter is instantiated to a larger type, the constraint would be narrowed + * (i.e. solution set changes other than simply being made larger). + */ + private var coDeps: ReverseDeps = SimpleIdentityMap.empty + + /** A map that associates type parameters of this constraint with all other type + * parameters that refer to them in their bounds covariantly, such that, if the + * type parameter is instantiated to a smaller type, the constraint would be narrowed. + * (i.e. solution set changes other than simply being made larger). + */ + private var contraDeps: ReverseDeps = SimpleIdentityMap.empty + + /** Null-safe indexing */ + extension (deps: ReverseDeps) def at(param: TypeParamRef): SimpleIdentitySet[TypeParamRef] = + val result = deps(param) + if null == result // swapped operand order important since `==` is overloaded in `SimpleIdentitySet` + then SimpleIdentitySet.empty + else result + + override def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean = + def origin(tv: TypeVar) = + assert(!instType(tv).exists) + tv.origin + val param = origin(tv) + val excluded = except.map(origin) + val qualifies: TypeParamRef => Boolean = !excluded.contains(_) + def test(deps: ReverseDeps, lens: ConstraintLens[List[TypeParamRef]]) = + deps.at(param).exists(qualifies) + || lens(this, tv.origin.binder, tv.origin.paramNum).exists(qualifies) + if co then test(coDeps, upperLens) else test(contraDeps, lowerLens) + + /** Modify traversals in two respects: + * - when encountering an application C[Ts], where C is a type variable or parameter + * that has an instantiation in this constraint, assume the type parameters of + * the instantiation instead of the type parameters of C when traversing the + * arguments Ts. That can make a difference for the variance in which an argument + * is traversed. Example constraint: + * + * constrained types: C[X], A + * A >: C[B] + * C := Option + * + * Here, B is traversed with variance +1 instead of 0. Test case: pos/t3152.scala + * + * - When typing a prefx, don't avoid negative variances. This matters only for the + * corner case where a parameter is instantiated to Nothing (see comment in + * TypeAccumulator#applyToPrefix). When determining instantiation directions in + * interpolations (which is what dependency variances are for), it can be ignored. + */ + private trait ConstraintAwareTraversal[T] extends TypeAccumulator[T]: + + override def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = + def tparams(tycon: Type): List[ParamInfo] = tycon match + case tycon: TypeVar if !tycon.inst.exists => tparams(tycon.origin) + case tycon: TypeParamRef => + entry(tycon) match + case _: TypeBounds => tp.tyconTypeParams + case tycon1 if tycon1.typeParams.nonEmpty => tycon1.typeParams + case _ => tp.tyconTypeParams + case _ => tp.tyconTypeParams + tparams(tp.tycon) + + override def applyToPrefix(x: T, tp: NamedType): T = + this(x, tp.prefix) + end ConstraintAwareTraversal + + private class Adjuster(srcParam: TypeParamRef)(using Context) + extends TypeTraverser, ConstraintAwareTraversal[Unit]: + + var add: Boolean = compiletime.uninitialized + val seen = util.HashSet[LazyRef]() + + def update(deps: ReverseDeps, referenced: TypeParamRef): ReverseDeps = + val prev = deps.at(referenced) + val newSet = if add then prev + srcParam else prev - srcParam + if newSet.isEmpty then deps.remove(referenced) + else deps.updated(referenced, newSet) + + def traverse(t: Type) = t match + case param: TypeParamRef => + entry(param) match + case _: TypeBounds => + if variance >= 0 then coDeps = update(coDeps, param) + if variance <= 0 then contraDeps = update(contraDeps, param) + case tp => + traverse(tp) + case tp: LazyRef => + if !seen.contains(tp) then + seen += tp + traverse(tp.ref) + case _ => traverseChildren(t) + end Adjuster + + /** Adjust dependencies to account for the delta of previous entry `prevEntry` + * and the new bound `entry` for the type parameter `srcParam`. + */ + def adjustDeps(entry: Type | Null, prevEntry: Type | Null, srcParam: TypeParamRef)(using Context): this.type = + val adjuster = new Adjuster(srcParam) + + /** Adjust reverse dependencies of all type parameters referenced by `bound` + * @param isLower `bound` is a lower bound + * @param add if true, add referenced variables to dependencoes, otherwise drop them. + */ + def adjustReferenced(bound: Type, isLower: Boolean, add: Boolean) = + adjuster.variance = if isLower then 1 else -1 + adjuster.add = add + adjuster.seen.clear() + adjuster.traverse(bound) + + /** Use an optimized strategy to adjust dependencies to account for the delta + * of previous bound `prevBound` and new bound `bound`: If `prevBound` is some + * and/or prefix of `bound`, and `baseCase` is true, just add the new parts of `bound`. + * @param isLower `bound` and `prevBound` are lower bounds + * @return true iff the delta strategy succeeded, false if it failed in which case + * the constraint is left unchanged. + */ + def adjustDelta(bound: Type, prevBound: Type, isLower: Boolean, baseCase: => Boolean): Boolean = + if bound eq prevBound then + baseCase + else bound match + case bound: AndOrType => + adjustDelta(bound.tp1, prevBound, isLower, baseCase) && { + adjustReferenced(bound.tp2, isLower, add = true) + true + } + case _ => false + + /** Add or remove depenencies referenced in `bounds`. + * @param add if true, dependecies are added, otherwise they are removed + */ + def adjustBounds(bounds: TypeBounds, add: Boolean) = + adjustReferenced(bounds.lo, isLower = true, add) + adjustReferenced(bounds.hi, isLower = false, add) + + entry match + case entry @ TypeBounds(lo, hi) => + prevEntry match + case prevEntry @ TypeBounds(plo, phi) => + if !adjustDelta(lo, plo, isLower = true, + adjustDelta(hi, phi, isLower = false, true)) + then + adjustBounds(prevEntry, add = false) + adjustBounds(entry, add = true) + case _ => + adjustBounds(entry, add = true) + case _ => + prevEntry match + case prevEntry: TypeBounds => + adjustBounds(prevEntry, add = false) + case _ => + dropDeps(srcParam) // srcParam is instantiated, so its dependencies can be dropped + this + end adjustDeps + + /** Adjust dependencies to account for adding or dropping all `entries` associated + * with `poly`. + * @param add if true, entries is added, otherwise it is dropped + */ + def adjustDeps(poly: TypeLambda, entries: Array[Type], add: Boolean)(using Context): this.type = + for n <- 0 until paramCount(entries) do + if add + then adjustDeps(entries(n), NoType, poly.paramRefs(n)) + else adjustDeps(NoType, entries(n), poly.paramRefs(n)) + this + + /** Remove all reverse dependencies of `param` */ + def dropDeps(param: TypeParamRef)(using Context): Unit = + coDeps = coDeps.remove(param) + contraDeps = contraDeps.remove(param) + + /** A string representing the two dependency maps */ + def depsToString(using Context): String = + def depsStr(deps: ReverseDeps): String = + def depStr(param: TypeParamRef) = i"$param --> ${deps.at(param).toList}%, %" + if deps.isEmpty then "" else i"\n ${deps.toList.map((k, v) => depStr(k))}%\n %" + i" co-deps:${depsStr(coDeps)}\n contra-deps:${depsStr(contraDeps)}\n" + // ---------- Adding TypeLambdas -------------------------------------------------- /** The bound type `tp` without constrained parameters which are clearly @@ -283,7 +488,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val entries1 = new Array[Type](nparams * 2) poly.paramInfos.copyToArray(entries1, 0) tvars.copyToArray(entries1, nparams) - newConstraint(boundsMap.updated(poly, entries1), lowerMap, upperMap, hardVars).init(poly) + newConstraint(boundsMap = this.boundsMap.updated(poly, entries1)) + .init(poly) } /** Split dependent parameters off the bounds for parameters in `poly`. @@ -299,13 +505,14 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val param = poly.paramRefs(i) val bounds = dropWildcards(nonParamBounds(param)) val stripped = stripParams(bounds, todos, isUpper = true) - current = updateEntry(current, param, stripped) + current = boundsLens.update(this, current, param, stripped) while todos.nonEmpty do - current = todos.head.unsafeUnbox(current, param) + current = todos.head(current, param) todos.dropInPlace(1) i += 1 } - current.checkNonCyclic() + current.adjustDeps(poly, current.boundsMap(poly).nn, add = true) + .checkWellFormed() } // ---------- Updates ------------------------------------------------------------ @@ -427,10 +634,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, case _ => Nil - private def updateEntry(current: This, param: TypeParamRef, tp: Type)(using Context): This = { - if Config.checkNoWildcardsInConstraint then assert(!tp.containsWildcardTypes) - var current1 = boundsLens.update(this, current, param, tp) - tp match { + private def updateEntry(current: This, param: TypeParamRef, newEntry: Type)(using Context): This = { + if Config.checkNoWildcardsInConstraint then assert(!newEntry.containsWildcardTypes) + val oldEntry = current.entry(param) + var current1 = boundsLens.update(this, current, param, newEntry) + .adjustDeps(newEntry, oldEntry, param) + newEntry match { case TypeBounds(lo, hi) => for p <- dependentParams(lo, isUpper = false) do current1 = order(current1, p, param) @@ -443,10 +652,10 @@ class OrderingConstraint(private val boundsMap: ParamBounds, /** The public version of `updateEntry`. Guarantees that there are no cycles */ def updateEntry(param: TypeParamRef, tp: Type)(using Context): This = - updateEntry(this, param, ensureNonCyclic(param, tp)).checkNonCyclic() + updateEntry(this, param, ensureNonCyclic(param, tp)).checkWellFormed() def addLess(param1: TypeParamRef, param2: TypeParamRef, direction: UnificationDirection)(using Context): This = - order(this, param1, param2, direction).checkNonCyclic() + order(this, param1, param2, direction).checkWellFormed() // ---------- Replacements and Removals ------------------------------------- @@ -456,24 +665,81 @@ class OrderingConstraint(private val boundsMap: ParamBounds, */ def replace(param: TypeParamRef, tp: Type)(using Context): OrderingConstraint = val replacement = tp.dealiasKeepAnnots.stripTypeVar - if param == replacement then this.checkNonCyclic() + if param == replacement then this.checkWellFormed() else assert(replacement.isValueTypeOrLambda) - var current = - if isRemovable(param.binder) then remove(param.binder) - else updateEntry(this, param, replacement) - - def removeParam(ps: List[TypeParamRef]) = ps.filterConserve(param ne _) - def replaceParam(tp: Type, atPoly: TypeLambda, atIdx: Int): Type = - current.ensureNonCyclic(atPoly.paramRefs(atIdx), tp.substParam(param, replacement)) - - current.foreachParam { (p, i) => - current = boundsLens.map(this, current, p, i, replaceParam(_, p, i)) - current = lowerLens.map(this, current, p, i, removeParam) - current = upperLens.map(this, current, p, i, removeParam) - } - current.checkNonCyclic() + val replacedTypeVar = typeVarOfParam(param) + //println(i"replace $param with $replacement in $this") + + def mapReplacedTypeVarTo(to: Type) = new TypeMap: + override def apply(t: Type): Type = + if (t eq replacedTypeVar) && t.exists then to else mapOver(t) + + var current = this + + def removeParamFrom(ps: List[TypeParamRef]) = + ps.filterConserve(param ne _) + + for lo <- lower(param) do + current = upperLens.map(this, current, lo, removeParamFrom) + for hi <- upper(param) do + current = lowerLens.map(this, current, hi, removeParamFrom) + + def replaceParamIn(other: TypeParamRef) = + val oldEntry = current.entry(other) + val newEntry = current.ensureNonCyclic(other, oldEntry.substParam(param, replacement)) + current = boundsLens.update(this, current, other, newEntry) + var oldDepEntry = oldEntry + var newDepEntry = newEntry + replacedTypeVar match + case tvar: TypeVar => + if tvar.inst.exists // `isInstantiated` would use ctx.typerState.constraint rather than the current constraint + then + // If the type variable has been instantiated, we need to forget about + // the instantiation for old dependencies. + // I.e. to find out what the old entry was, we should not follow + // the newly instantiated type variable but assume the type variable's origin `param`. + // An example where this happens is if `replace` is called from TypeVar's `instantiateWith`. + oldDepEntry = mapReplacedTypeVarTo(param)(oldDepEntry) + else + // If the type variable has not been instantiated, we need to replace references to it + // in the new entry by `replacement`. Otherwise we would get stuck in an uninstantiated + // type variable. + // An example where this happens is if `replace` is called from unify. + newDepEntry = mapReplacedTypeVarTo(replacement)(newDepEntry) + case _ => + if oldDepEntry ne newDepEntry then + if current eq this then + // We can end up here if oldEntry eq newEntry, so posssibly no new constraint + // was created, but oldDepEntry ne newDepEntry. In that case we must make + // sure we have a new constraint before updating dependencies. + current = newConstraint() + current.adjustDeps(newDepEntry, oldDepEntry, other) + end replaceParamIn + + if optimizeReplace then + val co = current.coDeps.at(param) + val contra = current.contraDeps.at(param) + current.foreachParam { (p, i) => + val other = p.paramRefs(i) + entry(other) match + case _: TypeBounds => + if co.contains(other) || contra.contains(other) then + replaceParamIn(other) + case _ => replaceParamIn(other) + } + else + current.foreachParam { (p, i) => + val other = p.paramRefs(i) + if other != param then replaceParamIn(other) + } + + current = + if isRemovable(param.binder) then current.remove(param.binder) + else updateEntry(current, param, replacement) + current.dropDeps(param) + current.checkWellFormed() end replace def remove(pt: TypeLambda)(using Context): This = { @@ -486,7 +752,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } val hardVars1 = pt.paramRefs.foldLeft(hardVars)((hvs, param) => hvs - typeVarOfParam(param)) newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap), hardVars1) - .checkNonCyclic() + .adjustDeps(pt, boundsMap(pt).nn, add = false) + .checkWellFormed() } def isRemovable(pt: TypeLambda): Boolean = { @@ -512,7 +779,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def swapKey[T](m: ArrayValuedMap[T]) = val info = m(from) if info == null then m else m.remove(from).updated(to, info) - var current = newConstraint(swapKey(boundsMap), swapKey(lowerMap), swapKey(upperMap), hardVars) + var current = newConstraint(swapKey(boundsMap), swapKey(lowerMap), swapKey(upperMap)) def subst[T <: Type](x: T): T = x.subst(from, to).asInstanceOf[T] current.foreachParam {(p, i) => current = boundsLens.map(this, current, p, i, subst) @@ -520,12 +787,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, current = upperLens.map(this, current, p, i, _.map(subst)) } constr.println(i"renamed $this to $current") - current.checkNonCyclic() + current.checkWellFormed() def isHard(tv: TypeVar) = hardVars.contains(tv) def withHard(tv: TypeVar)(using Context) = - newConstraint(boundsMap, lowerMap, upperMap, hardVars + tv) + newConstraint(hardVars = this.hardVars + tv) def instType(tvar: TypeVar): Type = entry(tvar.origin) match case _: TypeBounds => NoType @@ -552,6 +819,26 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(tvar.origin == param, i"mismatch $tvar, $param") case _ => + def occursAtToplevel(param: TypeParamRef, inst: Type)(using Context): Boolean = + def occurs(tp: Type)(using Context): Boolean = tp match + case tp: AndOrType => + occurs(tp.tp1) || occurs(tp.tp2) + case tp: TypeParamRef => + (tp eq param) || entry(tp).match + case NoType => false + case TypeBounds(lo, hi) => (lo eq hi) && occurs(lo) + case inst => occurs(inst) + case tp: TypeVar => + occurs(tp.underlying) + case TypeBounds(lo, hi) => + occurs(lo) || occurs(hi) + case _ => + val tp1 = tp.dealias + (tp1 ne tp) && occurs(tp1) + + occurs(inst) + end occursAtToplevel + // ---------- Exploration -------------------------------------------------------- def domainLambdas: List[TypeLambda] = boundsMap.keys @@ -604,7 +891,57 @@ class OrderingConstraint(private val boundsMap: ParamBounds, // ---------- Checking ----------------------------------------------- - def checkNonCyclic()(using Context): this.type = + def checkWellFormed()(using Context): this.type = + + /** Check that each dependency A -> B in coDeps and contraDeps corresponds to + * a reference to A at the right variance in the entry of B. + */ + def checkBackward(deps: ReverseDeps, depsName: String, v: Int)(using Context): Unit = + deps.foreachBinding { (param, params) => + for srcParam <- params do + assert(contains(srcParam) && occursAtVariance(param, v, in = entry(srcParam)), + i"wrong $depsName backwards reference $param -> $srcParam in $thisConstraint") + } + + /** A type traverser that checks that all references bound in the constraint + * are accounted for in coDeps and/or contraDeps. + */ + def checkForward(srcParam: TypeParamRef)(using Context) = + new TypeTraverser with ConstraintAwareTraversal[Unit]: + val seen = util.HashSet[LazyRef]() + def traverse(t: Type): Unit = t match + case param: TypeParamRef if param ne srcParam => + def check(deps: ReverseDeps, directDeps: List[TypeParamRef], depsName: String) = + assert(deps.at(param).contains(srcParam) || directDeps.contains(srcParam), + i"missing $depsName backwards reference $param -> $srcParam in $thisConstraint") + entry(param) match + case _: TypeBounds => + if variance >= 0 then check(contraDeps, upper(param), "contra") + if variance <= 0 then check(coDeps, lower(param), "co") + case tp => + traverse(tp) + case tp: LazyRef => + if !seen.contains(tp) then + seen += tp + traverse(tp.ref) + case _ => traverseChildren(t) + + /** Does `param` occur at variance `v` or else at variance 0 in entry `in`? */ + def occursAtVariance(param: TypeParamRef, v: Int, in: Type)(using Context): Boolean = + val test = new TypeAccumulator[Boolean] with ConstraintAwareTraversal[Boolean]: + def apply(x: Boolean, t: Type): Boolean = + if x then true + else t match + case t: TypeParamRef => + entry(t) match + case _: TypeBounds => + t == param && (variance == 0 || variance == v) + case e => + apply(x, e) + case _ => + foldOver(x, t) + test(false, in) + if Config.checkConstraintsNonCyclic then domainParams.foreach { param => val inst = entry(param) @@ -613,28 +950,13 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(!occursAtToplevel(param, inst), s"cyclic bound for $param: ${inst.show} in ${this.show}") } - this - - def occursAtToplevel(param: TypeParamRef, inst: Type)(using Context): Boolean = - - def occurs(tp: Type)(using Context): Boolean = tp match - case tp: AndOrType => - occurs(tp.tp1) || occurs(tp.tp2) - case tp: TypeParamRef => - (tp eq param) || entry(tp).match - case NoType => false - case TypeBounds(lo, hi) => (lo eq hi) && occurs(lo) - case inst => occurs(inst) - case tp: TypeVar => - occurs(tp.underlying) - case TypeBounds(lo, hi) => - occurs(lo) || occurs(hi) - case _ => - val tp1 = tp.dealias - (tp1 ne tp) && occurs(tp1) + if Config.checkConstraintDeps || ctx.settings.YcheckConstraintDeps.value then + checkBackward(coDeps, "co", -1) + checkBackward(contraDeps, "contra", +1) + domainParams.foreach(p => if contains(p) then checkForward(p).traverse(entry(p))) - occurs(inst) - end occursAtToplevel + this + end checkWellFormed override def checkClosed()(using Context): Unit = @@ -664,13 +986,16 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val constrainedText = " constrained types = " + domainLambdas.mkString("\n") val boundsText = - " bounds = " + { + "\n bounds = " + { val assocs = for (param <- domainParams) yield s"${param.binder.paramNames(param.paramNum)}: ${entryText(entry(param))}" assocs.mkString("\n") } - constrainedText + "\n" + boundsText + val depsText = + "\n coDeps = " + coDeps + + "\n contraDeps = " + contraDeps + constrainedText + boundsText + depsText } } diff --git a/tests/pos-with-compiler-cc/dotc/core/Phases.scala b/tests/pos-with-compiler-cc/dotc/core/Phases.scala index 3744b1f21122..205554e418ed 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Phases.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Phases.scala @@ -285,7 +285,7 @@ object Phases { final def isTyper(phase: Phase): Boolean = phase.id == typerPhase.id } - abstract class Phase extends caps.Pure { + abstract class Phase { /** A name given to the `Phase` that can be used to debug the compiler. For * instance, it is possible to print trees after a given phase using: diff --git a/tests/pos-with-compiler-cc/dotc/core/Scopes.scala b/tests/pos-with-compiler-cc/dotc/core/Scopes.scala index 5a2ad41c88df..99076b422358 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Scopes.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Scopes.scala @@ -64,7 +64,7 @@ object Scopes { * or to delete them. These methods are provided by subclass * MutableScope. */ - abstract class Scope extends printing.Showable, caps.Pure { + abstract class Scope extends printing.Showable { /** The last scope-entry from which all others are reachable via `prev` */ private[dotc] def lastEntry: ScopeEntry | Null @@ -467,7 +467,7 @@ object Scopes { override def size: Int = 0 override def nestingLevel: Int = 0 override def toList(using Context): List[Symbol] = Nil - override def cloneScope(using Context): MutableScope = unsupported("cloneScope") + override def cloneScope(using Context): MutableScope = newScope(nestingLevel) override def lookupEntry(name: Name)(using Context): ScopeEntry | Null = null override def lookupNextEntry(entry: ScopeEntry)(using Context): ScopeEntry | Null = null } diff --git a/tests/pos-with-compiler-cc/dotc/core/StdNames.scala b/tests/pos-with-compiler-cc/dotc/core/StdNames.scala index c0aca9d8abf4..50c96191143c 100644 --- a/tests/pos-with-compiler-cc/dotc/core/StdNames.scala +++ b/tests/pos-with-compiler-cc/dotc/core/StdNames.scala @@ -243,7 +243,6 @@ object StdNames { final val ToString: N = "ToString" final val Xor: N = "^" - final val ClassfileAnnotation: N = "ClassfileAnnotation" final val ClassManifest: N = "ClassManifest" final val Enum: N = "Enum" final val Group: N = "Group" diff --git a/tests/pos-with-compiler-cc/dotc/core/SymDenotations.scala b/tests/pos-with-compiler-cc/dotc/core/SymDenotations.scala index 31c540955371..a4f1bf3c5e80 100644 --- a/tests/pos-with-compiler-cc/dotc/core/SymDenotations.scala +++ b/tests/pos-with-compiler-cc/dotc/core/SymDenotations.scala @@ -25,7 +25,6 @@ import reporting._ import collection.mutable import transform.TypeUtils._ import cc.{CapturingType, derivedCapturingType} -import language.experimental.pureFunctions import scala.annotation.internal.sharable @@ -809,7 +808,7 @@ object SymDenotations { /** Is this a Scala or Java annotation ? */ def isAnnotation(using Context): Boolean = - isClass && derivesFrom(defn.AnnotationClass) + isClass && (derivesFrom(defn.AnnotationClass) || is(JavaAnnotation)) /** Is this symbol a class that extends `java.io.Serializable` ? */ def isSerializable(using Context): Boolean = @@ -1152,9 +1151,9 @@ object SymDenotations { final def isEffectivelySealed(using Context): Boolean = isOneOf(FinalOrSealed) || isClass && !isOneOf(EffectivelyOpenFlags) - final def isTransparentTrait(using Context): Boolean = - isAllOf(TransparentTrait) - || defn.assumedTransparentTraits.contains(symbol) + final def isTransparentClass(using Context): Boolean = + is(TransparentType) + || defn.isAssumedTransparent(symbol) || isClass && hasAnnotation(defn.TransparentTraitAnnot) /** The class containing this denotation which has the given effective name. */ @@ -2430,8 +2429,6 @@ object SymDenotations { ) if compiledNow.exists then compiledNow else - //val union = (d1: Set[AbstractFile], d2: Set[AbstractFile]) => d1.union(d2) - // !cc! need to break `u` out into separate definition, writing `_ union _` below gives an error val assocFiles = multi.aggregate(d => Set(d.symbol.associatedFile.nn), _ union _) if assocFiles.size == 1 then multi // they are all overloaded variants from the same file @@ -2637,8 +2634,8 @@ object SymDenotations { * of these function types. */ abstract class LazyType extends UncachedGroundType - with (Symbol -> LazyType) - with ((TermSymbol, ClassSymbol) -> LazyType) { self => + with (Symbol => LazyType) + with ((TermSymbol, ClassSymbol) => LazyType) { self => /** Sets all missing fields of given denotation */ def complete(denot: SymDenotation)(using Context): Unit @@ -2649,8 +2646,8 @@ object SymDenotations { private var myDecls: Scope = EmptyScope private var mySourceModule: Symbol | Null = null private var myModuleClass: Symbol | Null = null - private var mySourceModuleFn: Context ?-> Symbol = LazyType.NoSymbolFn - private var myModuleClassFn: Context ?-> Symbol = LazyType.NoSymbolFn + private var mySourceModuleFn: Context ?=> Symbol = LazyType.NoSymbolFn + private var myModuleClassFn: Context ?=> Symbol = LazyType.NoSymbolFn /** The type parameters computed by the completer before completion has finished */ def completerTypeParams(sym: Symbol)(using Context): List[TypeParamInfo] = @@ -2666,8 +2663,8 @@ object SymDenotations { myModuleClass.nn def withDecls(decls: Scope): this.type = { myDecls = decls; this } - def withSourceModule(sourceModuleFn: Context ?-> Symbol): this.type = { mySourceModuleFn = sourceModuleFn; this } - def withModuleClass(moduleClassFn: Context ?-> Symbol): this.type = { myModuleClassFn = moduleClassFn; this } + def withSourceModule(sourceModuleFn: Context ?=> Symbol): this.type = { mySourceModuleFn = sourceModuleFn; this } + def withModuleClass(moduleClassFn: Context ?=> Symbol): this.type = { myModuleClassFn = moduleClassFn; this } override def toString: String = getClass.toString diff --git a/tests/pos-with-compiler-cc/dotc/core/SymbolLoaders.scala b/tests/pos-with-compiler-cc/dotc/core/SymbolLoaders.scala index 5af45a016891..c5ae98853061 100644 --- a/tests/pos-with-compiler-cc/dotc/core/SymbolLoaders.scala +++ b/tests/pos-with-compiler-cc/dotc/core/SymbolLoaders.scala @@ -23,7 +23,6 @@ import ast.desugar import parsing.JavaParsers.OutlineJavaParser import parsing.Parsers.OutlineParser -import language.experimental.pureFunctions object SymbolLoaders { @@ -212,10 +211,7 @@ object SymbolLoaders { override def sourceModule(using Context): TermSymbol = _sourceModule def description(using Context): String = "package loader " + sourceModule.fullName - private var enterFlatClasses: Option[() -> Context ?-> Unit] = None - // Having a pure function type returning `Unit` does look weird. - // The point is that the function should not have any effect that matters for - // the compiler, in particular it should not capture a context. + private var enterFlatClasses: Option[() => Context ?=> Unit] = None Stats.record("package scopes") diff --git a/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala b/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala index aa42785ecf78..658bf4122aa4 100644 --- a/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala +++ b/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala @@ -24,7 +24,6 @@ import typer.Applications.productSelectorTypes import reporting.trace import annotation.constructorOnly import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam, isAlwaysPure} -import language.experimental.pureFunctions /** Provides methods to compare types. */ @@ -830,7 +829,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling && (recur(tp1.widen.stripCapturing, parent2) || tp1.isInstanceOf[SingletonType] && recur(tp1, parent2) // this alternative is needed in case the right hand side is a - // capturing type that contains the lhs as an |-alternative. + // capturing type that contains the lhs as an alternative of a union type. ) catch case ex: AssertionError => println(i"assertion failed while compare captured $tp1 <:< $tp2") @@ -2401,8 +2400,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling NoType } - private def andTypeGen(tp1: Type, tp2: Type, op: (Type, Type) -> Type, - original: (Type, Type) -> Type = _ & _, isErased: Boolean = ctx.erasedTypes): Type = trace(s"andTypeGen(${tp1.show}, ${tp2.show})", subtyping, show = true) { + private def andTypeGen(tp1: Type, tp2: Type, op: (Type, Type) => Type, + original: (Type, Type) => Type = _ & _, isErased: Boolean = ctx.erasedTypes): Type = trace(s"andTypeGen(${tp1.show}, ${tp2.show})", subtyping, show = true) { val t1 = distributeAnd(tp1, tp2) if (t1.exists) t1 else { @@ -2463,7 +2462,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * [X1, ..., Xn] -> op(tp1[X1, ..., Xn], tp2[X1, ..., Xn]) */ def liftIfHK(tp1: Type, tp2: Type, - op: (Type, Type) -> Type, original: (Type, Type) -> Type, combineVariance: (Variance, Variance) -> Variance) = { + op: (Type, Type) => Type, original: (Type, Type) => Type, combineVariance: (Variance, Variance) => Variance) = { val tparams1 = tp1.typeParams val tparams2 = tp2.typeParams def applied(tp: Type) = tp.appliedTo(tp.typeParams.map(_.paramInfoAsSeenFrom(tp))) @@ -2978,8 +2977,8 @@ object TypeComparer { comparing(_.provablyDisjoint(tp1, tp2)) def liftIfHK(tp1: Type, tp2: Type, - op: (Type, Type) -> Type, original: (Type, Type) -> Type, - combineVariance: (Variance, Variance) -> Variance)(using Context): Type = + op: (Type, Type) => Type, original: (Type, Type) => Type, + combineVariance: (Variance, Variance) => Variance)(using Context): Type = comparing(_.liftIfHK(tp1, tp2, op, original, combineVariance)) def constValue(tp: Type)(using Context): Option[Constant] = diff --git a/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala b/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala index 4d8aae319d27..a3b594eb0f09 100644 --- a/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala +++ b/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala @@ -13,7 +13,6 @@ import Decorators._ import reporting._ import ast.untpd import config.Printers.cyclicErrors -import language.experimental.pureFunctions class TypeError(msg: String) extends Exception(msg) { def this() = this("") @@ -44,7 +43,7 @@ class MissingType(pre: Type, name: Name) extends TypeError { } } -class RecursionOverflow(val op: String, details: -> String, val previous: Throwable, val weight: Int) +class RecursionOverflow(val op: String, details: => String, val previous: Throwable, val weight: Int) extends TypeError { def explanation: String = s"$op $details" @@ -92,7 +91,7 @@ extends TypeError { // Beware: Since this object is only used when handling a StackOverflow, this code // cannot consume significant amounts of stack. object handleRecursive { - def apply(op: String, details: -> String, exc: Throwable, weight: Int = 1)(using Context): Nothing = + def apply(op: String, details: => String, exc: Throwable, weight: Int = 1)(using Context): Nothing = if (ctx.settings.YnoDecodeStacktraces.value) throw exc else diff --git a/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala b/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala index 920a8d645278..9363b27b4dde 100644 --- a/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala +++ b/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala @@ -23,7 +23,6 @@ import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe -import language.experimental.pureFunctions object TypeOps: @@ -226,16 +225,18 @@ object TypeOps: */ def orDominator(tp: Type)(using Context): Type = { - /** a faster version of cs1 intersect cs2 that treats bottom types correctly */ + /** a faster version of cs1 intersect cs2 */ def intersect(cs1: List[ClassSymbol], cs2: List[ClassSymbol]): List[ClassSymbol] = - if cs1.head == defn.NothingClass then cs2 - else if cs2.head == defn.NothingClass then cs1 - else if cs1.head == defn.NullClass && !ctx.explicitNulls && cs2.head.derivesFrom(defn.ObjectClass) then cs2 - else if cs2.head == defn.NullClass && !ctx.explicitNulls && cs1.head.derivesFrom(defn.ObjectClass) then cs1 - else - val cs2AsSet = new util.HashSet[ClassSymbol](128) - cs2.foreach(cs2AsSet += _) - cs1.filter(cs2AsSet.contains) + val cs2AsSet = BaseClassSet(cs2) + cs1.filter(cs2AsSet.contains) + + /** a version of Type#baseClasses that treats bottom types correctly */ + def orBaseClasses(tp: Type): List[ClassSymbol] = tp.stripTypeVar match + case OrType(tp1, tp2) => + if tp1.isBottomType && (tp1 frozen_<:< tp2) then orBaseClasses(tp2) + else if tp2.isBottomType && (tp2 frozen_<:< tp1) then orBaseClasses(tp1) + else intersect(orBaseClasses(tp1), orBaseClasses(tp2)) + case _ => tp.baseClasses /** The minimal set of classes in `cs` which derive all other classes in `cs` */ def dominators(cs: List[ClassSymbol], accu: List[ClassSymbol]): List[ClassSymbol] = (cs: @unchecked) match { @@ -370,7 +371,7 @@ object TypeOps: } // Step 3: Intersect base classes of both sides - val commonBaseClasses = tp.mapReduceOr(_.baseClasses)(intersect) + val commonBaseClasses = orBaseClasses(tp) val doms = dominators(commonBaseClasses, Nil) def baseTp(cls: ClassSymbol): Type = tp.baseType(cls).mapReduceOr(identity)(mergeRefinedOrApplied) @@ -525,7 +526,7 @@ object TypeOps: * does not update `ctx.nestingLevel` when entering a block so I'm leaving * this as Future Work™. */ - def avoid(tp: Type, symsToAvoid: -> List[Symbol])(using Context): Type = { + def avoid(tp: Type, symsToAvoid: => List[Symbol])(using Context): Type = { val widenMap = new AvoidMap { @threadUnsafe lazy val forbidden = symsToAvoid.toSet def toAvoid(tp: NamedType) = @@ -874,6 +875,10 @@ object TypeOps: } def instantiate(): Type = { + // if there's a change in variance in type parameters (between subtype tp1 and supertype tp2) + // then we don't want to maximise the type variables in the wrong direction. + // For instance 15967, A[-Z] and B[Y] extends A[Y], we don't want to maximise Y to Any + maximizeType(protoTp1.baseType(tp2.classSymbol), NoSpan) maximizeType(protoTp1, NoSpan) wildApprox(protoTp1) } diff --git a/tests/pos-with-compiler-cc/dotc/core/Types.scala b/tests/pos-with-compiler-cc/dotc/core/Types.scala index 2e806492ad67..29a2496ab2a7 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Types.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Types.scala @@ -43,8 +43,6 @@ import scala.annotation.internal.sharable import scala.annotation.threadUnsafe import dotty.tools.dotc.transform.SymUtils._ -import language.experimental.pureFunctions -import annotation.retains object Types { @@ -92,7 +90,7 @@ object Types { * * Note: please keep in sync with copy in `docs/docs/internals/type-system.md`. */ - abstract class Type extends Hashable, printing.Showable, caps.Pure { + abstract class Type extends Hashable with printing.Showable { // ----- Tests ----------------------------------------------------- @@ -747,16 +745,6 @@ object Types { // which means that we always defensively copy the type in the future. This second // measure is necessary because findMember calls might be cached, so do not // necessarily appear in nested order. - // Without the defensive copy, Typer.scala fails to compile at the line - // - // untpd.rename(lhsCore, setterName).withType(setterType), WildcardType) - // - // because the subtype check - // - // ThisTree[Untyped]#ThisTree[Typed] <: Tree[Typed] - // - // fails (in fact it thinks the underlying type of the LHS is `Tree[Untyped]`.) - // // Without the `openedTwice` trick, Typer.scala fails to Ycheck // at phase resolveSuper. val rt = @@ -1289,15 +1277,14 @@ object Types { * then the top-level union isn't widened. This is needed so that type inference can infer nullable types. */ def widenUnion(using Context): Type = widen match - case tp @ OrNull(tp1) => - tp match - case tp: OrType => - // Don't widen `T|Null`, since otherwise we wouldn't be able to infer nullable unions. - val tp1Widen = tp1.widenUnionWithoutNull - if (tp1Widen.isRef(defn.AnyClass)) tp1Widen - else tp.derivedOrType(tp1Widen, defn.NullType) - case _ => - tp.widenUnionWithoutNull + case tp: OrType => tp match + case OrNull(tp1) => + // Don't widen `T|Null`, since otherwise we wouldn't be able to infer nullable unions. + val tp1Widen = tp1.widenUnionWithoutNull + if (tp1Widen.isRef(defn.AnyClass)) tp1Widen + else tp.derivedOrType(tp1Widen, defn.NullType) + case _ => + tp.widenUnionWithoutNull case tp => tp.widenUnionWithoutNull @@ -2161,7 +2148,7 @@ object Types { /** A trait for proto-types, used as expected types in typer */ trait ProtoType extends Type { def isMatchedBy(tp: Type, keepConstraint: Boolean = false)(using Context): Boolean - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T + def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T def map(tm: TypeMap)(using Context): ProtoType /** If this prototype captures a context, the same prototype except that the result @@ -2460,6 +2447,8 @@ object Types { } private def checkDenot()(using Context) = {} + //if name.toString == "getConstructor" then + // println(i"set denot of $this to ${denot.info}, ${denot.getClass}, ${Phases.phaseOf(denot.validFor.lastPhaseId)} at ${ctx.phase}") private def checkSymAssign(sym: Symbol)(using Context) = { def selfTypeOf(sym: Symbol) = @@ -3023,7 +3012,7 @@ object Types { } // `refFn` can be null only if `computed` is true. - case class LazyRef(private var refFn: (Context -> (Type | Null)) | Null) extends UncachedProxyType with ValueType { + case class LazyRef(private var refFn: (Context => (Type | Null)) | Null) extends UncachedProxyType with ValueType { private var myRef: Type | Null = null private var computed = false @@ -3063,7 +3052,7 @@ object Types { override def hashCode: Int = System.identityHashCode(this) } object LazyRef: - def of(refFn: Context ?-> (Type | Null)): LazyRef = LazyRef(refFn(using _)) + def of(refFn: Context ?=> (Type | Null)): LazyRef = LazyRef(refFn(using _)) // --- Refined Type and RecType ------------------------------------------------ @@ -3159,7 +3148,7 @@ object Types { * * Where `RecThis(...)` points back to the enclosing `RecType`. */ - class RecType(@constructorOnly parentExp: RecType => Type) extends RefinedOrRecType with BindingType { + class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType { // See discussion in findMember#goRec why these vars are needed private[Types] var opened: Boolean = false @@ -3430,25 +3419,29 @@ object Types { private var myAtoms: Atoms = _ private var myWidened: Type = _ + private def computeAtoms()(using Context): Atoms = + if tp1.hasClassSymbol(defn.NothingClass) then tp2.atoms + else if tp2.hasClassSymbol(defn.NothingClass) then tp1.atoms + else tp1.atoms | tp2.atoms + + private def computeWidenSingletons()(using Context): Type = + val tp1w = tp1.widenSingletons + val tp2w = tp2.widenSingletons + if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft) + private def ensureAtomsComputed()(using Context): Unit = - if atomsRunId != ctx.runId then - myAtoms = - if tp1.hasClassSymbol(defn.NothingClass) then tp2.atoms - else if tp2.hasClassSymbol(defn.NothingClass) then tp1.atoms - else tp1.atoms | tp2.atoms - val tp1w = tp1.widenSingletons - val tp2w = tp2.widenSingletons - myWidened = if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft) + if atomsRunId != ctx.runId && !isProvisional then + myAtoms = computeAtoms() + myWidened = computeWidenSingletons() atomsRunId = ctx.runId override def atoms(using Context): Atoms = ensureAtomsComputed() - myAtoms + if isProvisional then computeAtoms() else myAtoms - override def widenSingletons(using Context): Type = { + override def widenSingletons(using Context): Type = ensureAtomsComputed() - myWidened - } + if isProvisional then computeWidenSingletons() else myWidened def derivedOrType(tp1: Type, tp2: Type, soft: Boolean = isSoft)(using Context): Type = if ((tp1 eq this.tp1) && (tp2 eq this.tp2) && soft == isSoft) this @@ -3879,8 +3872,8 @@ object Types { } abstract case class MethodType(paramNames: List[TermName])( - @constructorOnly paramInfosExp: MethodType => List[Type], - @constructorOnly resultTypeExp: MethodType => Type) + paramInfosExp: MethodType => List[Type], + resultTypeExp: MethodType => Type) extends MethodOrPoly with TermLambda with NarrowCached { thisMethodType => type This = MethodType @@ -3906,10 +3899,7 @@ object Types { protected def prefixString: String = companion.prefixString } - final class CachedMethodType(paramNames: List[TermName])( - @constructorOnly paramInfosExp: MethodType => List[Type], - @constructorOnly resultTypeExp: MethodType => Type, - val companion: MethodTypeCompanion) + final class CachedMethodType(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type, val companion: MethodTypeCompanion) extends MethodType(paramNames)(paramInfosExp, resultTypeExp) abstract class LambdaTypeCompanion[N <: Name, PInfo <: Type, LT <: LambdaType] { @@ -4057,8 +4047,7 @@ object Types { * Variances are stored in the `typeParams` list of the lambda. */ class HKTypeLambda(val paramNames: List[TypeName], @constructorOnly variances: List[Variance])( - @constructorOnly paramInfosExp: HKTypeLambda => List[TypeBounds], - @constructorOnly resultTypeExp: HKTypeLambda => Type) + paramInfosExp: HKTypeLambda => List[TypeBounds], resultTypeExp: HKTypeLambda => Type) extends HKLambda with TypeLambda { type This = HKTypeLambda def companion: HKTypeLambda.type = HKTypeLambda @@ -4126,8 +4115,7 @@ object Types { * except it applies to terms and parameters do not have variances. */ class PolyType(val paramNames: List[TypeName])( - @constructorOnly paramInfosExp: PolyType => List[TypeBounds], - @constructorOnly resultTypeExp: PolyType => Type) + paramInfosExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type) extends MethodOrPoly with TypeLambda { type This = PolyType @@ -5311,7 +5299,7 @@ object Types { val et = new PreviousErrorType ctx.base.errorTypeMsg(et) = m et - def apply(s: -> String)(using Context): ErrorType = + def apply(s: => String)(using Context): ErrorType = apply(s.toMessage) end ErrorType @@ -5509,6 +5497,14 @@ object Types { stop == StopAt.Static && tp.currentSymbol.isStatic && isStaticPrefix(tp.prefix) || stop == StopAt.Package && tp.currentSymbol.is(Package) } + + /** The type parameters of the constructor of this applied type. + * Overridden in OrderingConstraint's ConstraintAwareTraversal to take account + * of instantiations in the constraint that are not yet propagated to the + * instance types of type variables. + */ + protected def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = + tp.tyconTypeParams end VariantTraversal /** A supertrait for some typemaps that are bijections. Used for capture checking. @@ -5537,7 +5533,7 @@ object Types { end BiTypeMap abstract class TypeMap(implicit protected var mapCtx: Context) - extends VariantTraversal with (Type -> Type) { thisMap: TypeMap => + extends VariantTraversal with (Type => Type) { thisMap => def apply(tp: Type): Type @@ -5616,17 +5612,11 @@ object Types { case tp: NamedType => if stopBecauseStaticOrLocal(tp) then tp else - val prefix1 = atVariance(variance max 0)(this(tp.prefix)) - // A prefix is never contravariant. Even if say `p.A` is used in a contravariant - // context, we cannot assume contravariance for `p` because `p`'s lower - // bound might not have a binding for `A` (e.g. the lower bound could be `Nothing`). - // By contrast, covariance does translate to the prefix, since we have that - // if `p <: q` then `p.A <: q.A`, and well-formedness requires that `A` is a member - // of `p`'s upper bound. + val prefix1 = atVariance(variance max 0)(this(tp.prefix)) // see comment of TypeAccumulator's applyToPrefix derivedSelect(tp, prefix1) case tp: AppliedType => - derivedAppliedType(tp, this(tp.tycon), mapArgs(tp.args, tp.tyconTypeParams)) + derivedAppliedType(tp, this(tp.tycon), mapArgs(tp.args, tyconTypeParams(tp))) case tp: LambdaType => mapOverLambda(tp) @@ -5732,7 +5722,7 @@ object Types { protected def mapClassInfo(tp: ClassInfo): Type = derivedClassInfo(tp, this(tp.prefix)) - def andThen(f: Type -> Type): TypeMap = new TypeMap { + def andThen(f: Type => Type): TypeMap = new TypeMap { override def stopAt = thisMap.stopAt def apply(tp: Type) = f(thisMap(tp)) } @@ -5953,7 +5943,7 @@ object Types { case nil => true } - if (distributeArgs(args, tp.tyconTypeParams)) + if (distributeArgs(args, tyconTypeParams(tp))) range(tp.derivedAppliedType(tycon, loBuf.toList), tp.derivedAppliedType(tycon, hiBuf.toList)) else if tycon.isLambdaSub || args.exists(isRangeOfNonTermTypes) then @@ -6069,14 +6059,22 @@ object Types { abstract class TypeAccumulator[T](implicit protected val accCtx: Context) extends VariantTraversal with ((T, Type) => T) { - this: TypeAccumulator[T] @annotation.retains(caps.*) => def apply(x: T, tp: Type): T protected def applyToAnnot(x: T, annot: Annotation): T = x // don't go into annotations - protected final def applyToPrefix(x: T, tp: NamedType): T = - atVariance(variance max 0)(this(x, tp.prefix)) // see remark on NamedType case in TypeMap + /** A prefix is never contravariant. Even if say `p.A` is used in a contravariant + * context, we cannot assume contravariance for `p` because `p`'s lower + * bound might not have a binding for `A`, since the lower bound could be `Nothing`. + * By contrast, covariance does translate to the prefix, since we have that + * if `p <: q` then `p.A <: q.A`, and well-formedness requires that `A` is a member + * of `p`'s upper bound. + * Overridden in OrderingConstraint's ConstraintAwareTraversal, where a + * more relaxed scheme is used. + */ + protected def applyToPrefix(x: T, tp: NamedType): T = + atVariance(variance max 0)(this(x, tp.prefix)) def foldOver(x: T, tp: Type): T = { record(s"foldOver $getClass") @@ -6099,7 +6097,7 @@ object Types { } foldArgs(acc, tparams.tail, args.tail) } - foldArgs(this(x, tycon), tp.tyconTypeParams, args) + foldArgs(this(x, tycon), tyconTypeParams(tp), args) case _: BoundType | _: ThisType => x @@ -6141,7 +6139,7 @@ object Types { foldOver(x2, tp.cases) case CapturingType(parent, refs) => - (this(x, parent) /: refs.elems)(apply) // !cc! does not work under apply := this + (this(x, parent) /: refs.elems)(this) case AnnotatedType(underlying, annot) => this(applyToAnnot(x, annot), underlying) diff --git a/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileConstants.scala b/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileConstants.scala index 3b05ee351b86..4aa60d973264 100644 --- a/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileConstants.scala +++ b/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileConstants.scala @@ -346,6 +346,7 @@ object ClassfileConstants { case JAVA_ACC_ENUM => Enum case JAVA_ACC_ABSTRACT => if (isClass) Abstract else Deferred case JAVA_ACC_INTERFACE => PureInterfaceCreationFlags | JavaDefined + case JAVA_ACC_ANNOTATION => JavaAnnotation case _ => EmptyFlags } @@ -353,18 +354,16 @@ object ClassfileConstants { if (jflag == 0) base else base | translateFlag(jflag) private def translateFlags(jflags: Int, baseFlags: FlagSet): FlagSet = { - val nflags = - if ((jflags & JAVA_ACC_ANNOTATION) == 0) jflags - else jflags & ~(JAVA_ACC_ABSTRACT | JAVA_ACC_INTERFACE) // annotations are neither abstract nor interfaces var res: FlagSet = baseFlags | JavaDefined - res = addFlag(res, nflags & JAVA_ACC_PRIVATE) - res = addFlag(res, nflags & JAVA_ACC_PROTECTED) - res = addFlag(res, nflags & JAVA_ACC_FINAL) - res = addFlag(res, nflags & JAVA_ACC_SYNTHETIC) - res = addFlag(res, nflags & JAVA_ACC_STATIC) - res = addFlag(res, nflags & JAVA_ACC_ENUM) - res = addFlag(res, nflags & JAVA_ACC_ABSTRACT) - res = addFlag(res, nflags & JAVA_ACC_INTERFACE) + res = addFlag(res, jflags & JAVA_ACC_PRIVATE) + res = addFlag(res, jflags & JAVA_ACC_PROTECTED) + res = addFlag(res, jflags & JAVA_ACC_FINAL) + res = addFlag(res, jflags & JAVA_ACC_SYNTHETIC) + res = addFlag(res, jflags & JAVA_ACC_STATIC) + res = addFlag(res, jflags & JAVA_ACC_ENUM) + res = addFlag(res, jflags & JAVA_ACC_ABSTRACT) + res = addFlag(res, jflags & JAVA_ACC_INTERFACE) + res = addFlag(res, jflags & JAVA_ACC_ANNOTATION) res } diff --git a/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala b/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala index 6c3fd2cf7336..33a1e1dd6e73 100644 --- a/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala +++ b/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala @@ -23,7 +23,6 @@ import scala.annotation.switch import typer.Checking.checkNonCyclic import io.{AbstractFile, ZipArchive} import scala.util.control.NonFatal -import language.experimental.pureFunctions object ClassfileParser { /** Marker trait for unpicklers that can be embedded in classfiles. */ @@ -166,11 +165,7 @@ class ClassfileParser( * Updates the read pointer of 'in'. */ def parseParents: List[Type] = { val superType = - if (isAnnotation) { - in.nextChar - defn.AnnotationClass.typeRef - } - else if (classRoot.symbol == defn.ComparableClass || + if (classRoot.symbol == defn.ComparableClass || classRoot.symbol == defn.JavaCloneableClass || classRoot.symbol == defn.JavaSerializableClass) { // Treat these interfaces as universal traits @@ -187,7 +182,6 @@ class ClassfileParser( // Consequently, no best implicit for the "Integral" evidence parameter of "range" // is found. Previously, this worked because of weak conformance, which has been dropped. - if (isAnnotation) ifaces = defn.ClassfileAnnotationClass.typeRef :: ifaces superType :: ifaces } @@ -630,10 +624,10 @@ class ClassfileParser( case (name, tag: EnumTag) => untpd.NamedArg(name.name, tag.toTree).withSpan(NoSpan) } - protected var mySym: Symbol | (Context ?-> Symbol) = + protected var mySym: Symbol | (Context ?=> Symbol) = (ctx: Context) ?=> annotType.classSymbol - protected var myTree: Tree | (Context ?-> Tree) = + protected var myTree: Tree | (Context ?=> Tree) = (ctx: Context) ?=> untpd.resolveConstructor(annotType, args) def untpdTree(using Context): untpd.Tree = @@ -846,7 +840,7 @@ class ClassfileParser( class AnnotConstructorCompleter(classInfo: TempClassInfoType) extends LazyType { def complete(denot: SymDenotation)(using Context): Unit = { - val attrs = classInfo.decls.toList.filter(sym => sym.isTerm && sym != denot.symbol) + val attrs = classInfo.decls.toList.filter(sym => sym.isTerm && sym != denot.symbol && sym.name != nme.CONSTRUCTOR) val paramNames = attrs.map(_.name.asTermName) val paramTypes = attrs.map(_.info.resultType) denot.info = MethodType(paramNames, paramTypes, classRoot.typeRef) diff --git a/tests/pos-with-compiler-cc/dotc/core/tasty/TastyUnpickler.scala b/tests/pos-with-compiler-cc/dotc/core/tasty/TastyUnpickler.scala index 5cc172c65439..70bdec7780e2 100644 --- a/tests/pos-with-compiler-cc/dotc/core/tasty/TastyUnpickler.scala +++ b/tests/pos-with-compiler-cc/dotc/core/tasty/TastyUnpickler.scala @@ -11,7 +11,6 @@ import TastyBuffer.NameRef import scala.collection.mutable import Names.{TermName, termName, EmptyTermName} import NameKinds._ -import language.experimental.pureFunctions object TastyUnpickler { @@ -19,7 +18,7 @@ object TastyUnpickler { def unpickle(reader: TastyReader, nameAtRef: NameTable): R } - class NameTable extends (NameRef -> TermName) { + class NameTable extends (NameRef => TermName) { private val names = new mutable.ArrayBuffer[TermName] def add(name: TermName): mutable.ArrayBuffer[TermName] = names += name def apply(ref: NameRef): TermName = names(ref.index) diff --git a/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala b/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala index 69bd0d95ba3a..617a2c55a7ad 100644 --- a/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala +++ b/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala @@ -46,7 +46,6 @@ import dotty.tools.tasty.TastyFormat._ import scala.annotation.constructorOnly import scala.annotation.internal.sharable -import language.experimental.pureFunctions /** Unpickler for typed trees * @param reader the reader from which to unpickle @@ -664,9 +663,9 @@ class TreeUnpickler(reader: TastyReader, /** Read modifier list into triplet of flags, annotations and a privateWithin * boundary symbol. */ - def readModifiers(end: Addr)(using Context): (FlagSet, List[Symbol -> Annotation], Symbol) = { + def readModifiers(end: Addr)(using Context): (FlagSet, List[Symbol => Annotation], Symbol) = { var flags: FlagSet = EmptyFlags - var annotFns: List[Symbol -> Annotation] = Nil + var annotFns: List[Symbol => Annotation] = Nil var privateWithin: Symbol = NoSymbol while (currentAddr.index != end.index) { def addFlag(flag: FlagSet) = { @@ -733,7 +732,7 @@ class TreeUnpickler(reader: TastyReader, private def readWithin(using Context): Symbol = readType().typeSymbol - private def readAnnot(using Context): Symbol -> Annotation = + private def readAnnot(using Context): Symbol => Annotation = readByte() val end = readEnd() val tp = readType() @@ -1451,10 +1450,10 @@ class TreeUnpickler(reader: TastyReader, setSpan(start, CaseDef(pat, guard, rhs)) } - def readLater[T <: AnyRef](end: Addr, op: TreeReader -> Context ?-> T)(using Context): Trees.Lazy[T] = + def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context ?=> T)(using Context): Trees.Lazy[T] = readLaterWithOwner(end, op)(ctx.owner) - def readLaterWithOwner[T <: AnyRef](end: Addr, op: TreeReader -> Context ?-> T)(using Context): Symbol -> Trees.Lazy[T] = { + def readLaterWithOwner[T <: AnyRef](end: Addr, op: TreeReader => Context ?=> T)(using Context): Symbol => Trees.Lazy[T] = { val localReader = fork goto(end) val mode = ctx.mode diff --git a/tests/pos-with-compiler-cc/dotc/decompiler/IDEDecompilerDriver.scala b/tests/pos-with-compiler-cc/dotc/decompiler/IDEDecompilerDriver.scala index 5bf526bd4bdd..c148ff5f9bca 100644 --- a/tests/pos-with-compiler-cc/dotc/decompiler/IDEDecompilerDriver.scala +++ b/tests/pos-with-compiler-cc/dotc/decompiler/IDEDecompilerDriver.scala @@ -11,7 +11,6 @@ import dotty.tools.dotc.reporting._ import dotty.tools.io.AbstractFile import scala.quoted.runtime.impl.QuotesImpl -import caps.unsafe.unsafeUnbox /** * Decompiler to be used with IDEs @@ -41,7 +40,7 @@ class IDEDecompilerDriver(val settings: List[String]) extends dotc.Driver { val unit = ctx.run.nn.units.head val decompiled = QuotesImpl.showDecompiledTree(unit.tpdTree) - val tree = new TastyHTMLPrinter(unit.pickled.head._2.unsafeUnbox()).showContents() + val tree = new TastyHTMLPrinter(unit.pickled.head._2()).showContents() reporter.removeBufferedMessages.foreach(message => System.err.println(message)) (tree, decompiled) diff --git a/tests/pos-with-compiler-cc/dotc/inlines/InlineReducer.scala b/tests/pos-with-compiler-cc/dotc/inlines/InlineReducer.scala index debf51872d5a..460d0a61c252 100644 --- a/tests/pos-with-compiler-cc/dotc/inlines/InlineReducer.scala +++ b/tests/pos-with-compiler-cc/dotc/inlines/InlineReducer.scala @@ -269,12 +269,21 @@ class InlineReducer(inliner: Inliner)(using Context): } } - // Extractors contain Bind nodes in type parameter lists, the tree looks like this: + // Extractors can contain Bind nodes in type parameter lists, + // for that case tree looks like this: // UnApply[t @ t](pats)(implicits): T[t] // Test case is pos/inline-caseclass.scala. + // Alternatively, for explicitly specified type binds in type annotations like in + // case A(B): A[t] + // the tree will look like this: + // Unapply[t](pats)(implicits) : T[t @ t] + // and the binds will be found in the type tree instead + // Test case is pos-macros/i15971 + val tptBinds = getBinds(Set.empty[TypeSymbol], tpt) val binds: Set[TypeSymbol] = pat match { - case UnApply(TypeApply(_, tpts), _, _) => getBinds(Set.empty[TypeSymbol], tpts) - case _ => getBinds(Set.empty[TypeSymbol], tpt) + case UnApply(TypeApply(_, tpts), _, _) => + getBinds(Set.empty[TypeSymbol], tpts) ++ tptBinds + case _ => tptBinds } val extractBindVariance = new TypeAccumulator[TypeBindsMap] { diff --git a/tests/pos-with-compiler-cc/dotc/inlines/Inliner.scala b/tests/pos-with-compiler-cc/dotc/inlines/Inliner.scala index a23af052ca24..bea42e82ce6f 100644 --- a/tests/pos-with-compiler-cc/dotc/inlines/Inliner.scala +++ b/tests/pos-with-compiler-cc/dotc/inlines/Inliner.scala @@ -23,7 +23,6 @@ import util.Spans.Span import dotty.tools.dotc.transform.Splicer import quoted.QuoteUtils import scala.annotation.constructorOnly -import language.experimental.pureFunctions /** General support for inlining */ object Inliner: @@ -109,8 +108,8 @@ object Inliner: // They are generally left alone (not mapped further, and if they wrap a type // the type Inlined wrapper gets dropped private class InlinerMap( - typeMap: Type -> Type, - treeMap: Tree -> Tree, + typeMap: Type => Type, + treeMap: Tree => Tree, oldOwners: List[Symbol], newOwners: List[Symbol], substFrom: List[Symbol], @@ -119,8 +118,8 @@ object Inliner: typeMap, treeMap, oldOwners, newOwners, substFrom, substTo, InlineCopier()): override def copy( - typeMap: Type -> Type, - treeMap: Tree -> Tree, + typeMap: Type => Type, + treeMap: Tree => Tree, oldOwners: List[Symbol], newOwners: List[Symbol], substFrom: List[Symbol], @@ -171,7 +170,7 @@ class Inliner(val call: tpd.Tree)(using Context): /** A map from references to (type and value) parameters of the inlineable method * to their corresponding argument or proxy references, as given by `paramBinding`. */ - private[inlines] val paramProxy: mutable.HashMap[Type, Type] = new mutable.HashMap + private[inlines] val paramProxy = new mutable.HashMap[Type, Type] /** A map from the classes of (direct and outer) this references in `rhsToInline` * to references of their proxies. diff --git a/tests/pos-with-compiler-cc/dotc/inlines/Inlines.scala b/tests/pos-with-compiler-cc/dotc/inlines/Inlines.scala index 8be23b932e98..1806cdfc909b 100644 --- a/tests/pos-with-compiler-cc/dotc/inlines/Inlines.scala +++ b/tests/pos-with-compiler-cc/dotc/inlines/Inlines.scala @@ -85,7 +85,10 @@ object Inlines: if (tree.symbol == defn.CompiletimeTesting_typeChecks) return Intrinsics.typeChecks(tree) if (tree.symbol == defn.CompiletimeTesting_typeCheckErrors) return Intrinsics.typeCheckErrors(tree) - CrossVersionChecks.checkExperimentalRef(tree.symbol, tree.srcPos) + if ctx.isAfterTyper then + // During typer we wait with cross version checks until PostTyper, in order + // not to provoke cyclic references. See i16116 for a test case. + CrossVersionChecks.checkExperimentalRef(tree.symbol, tree.srcPos) if tree.symbol.isConstructor then return tree // error already reported for the inline constructor definition diff --git a/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala b/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala index db52712c39e2..7e47bbfdfa8a 100644 --- a/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala +++ b/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala @@ -22,7 +22,6 @@ import transform.SymUtils.* import config.Printers.inlining import util.Property import dotty.tools.dotc.transform.TreeMapWithStages._ -import language.experimental.pureFunctions object PrepareInlineable { import tpd._ @@ -263,7 +262,7 @@ object PrepareInlineable { * to have the inline method as owner. */ def registerInlineInfo( - inlined: Symbol, treeExpr: Context ?-> Tree)(using Context): Unit = + inlined: Symbol, treeExpr: Context ?=> Tree)(using Context): Unit = inlined.unforcedAnnotation(defn.BodyAnnot) match { case Some(ann: ConcreteBodyAnnotation) => case Some(ann: LazyBodyAnnotation) if ann.isEvaluated || ann.isEvaluating => diff --git a/tests/pos-with-compiler-cc/dotc/parsing/JavaParsers.scala b/tests/pos-with-compiler-cc/dotc/parsing/JavaParsers.scala index 4611554a01a3..183845fcf3ec 100644 --- a/tests/pos-with-compiler-cc/dotc/parsing/JavaParsers.scala +++ b/tests/pos-with-compiler-cc/dotc/parsing/JavaParsers.scala @@ -822,7 +822,7 @@ object JavaParsers { val iface = atSpan(start, nameOffset) { TypeDef( name, - makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.Trait | Flags.JavaInterface | Flags.Abstract) + makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.JavaInterface) } addCompanionObject(statics, iface) } @@ -858,10 +858,9 @@ object JavaParsers { } (statics.toList, members.toList) } - def annotationParents: List[Select] = List( - scalaAnnotationDot(tpnme.Annotation), - Select(javaLangDot(nme.annotation), tpnme.Annotation), - scalaAnnotationDot(tpnme.ClassfileAnnotation) + def annotationParents: List[Tree] = List( + javaLangObject(), + Select(javaLangDot(nme.annotation), tpnme.Annotation) ) def annotationDecl(start: Offset, mods: Modifiers): List[Tree] = { accept(AT) @@ -877,7 +876,7 @@ object JavaParsers { List(constructorParams), TypeTree(), EmptyTree).withMods(Modifiers(Flags.JavaDefined)) val templ = makeTemplate(annotationParents, constr :: body, List(), true) val annot = atSpan(start, nameOffset) { - TypeDef(name, templ).withMods(mods | Flags.Abstract) + TypeDef(name, templ).withMods(mods | Flags.JavaInterface | Flags.JavaAnnotation) } addCompanionObject(statics, annot) } diff --git a/tests/pos-with-compiler-cc/dotc/parsing/Parsers.scala b/tests/pos-with-compiler-cc/dotc/parsing/Parsers.scala index 359de5427a04..a198cccc85cc 100644 --- a/tests/pos-with-compiler-cc/dotc/parsing/Parsers.scala +++ b/tests/pos-with-compiler-cc/dotc/parsing/Parsers.scala @@ -33,7 +33,6 @@ import config.Feature import config.Feature.{sourceVersion, migrateTo3, globalOnlyImports} import config.SourceVersion._ import config.SourceVersion -import language.experimental.pureFunctions object Parsers { @@ -144,10 +143,10 @@ object Parsers { syntaxError(msg, Span(offset, offset + length)) lastErrorOffset = in.offset - def syntaxError(msg: -> String, offset: Int): Unit = + def syntaxError(msg: => String, offset: Int): Unit = syntaxError(msg.toMessage, offset) - def syntaxError(msg: -> String): Unit = + def syntaxError(msg: => String): Unit = syntaxError(msg, in.offset) /** Unconditionally issue an error at given span, without @@ -156,7 +155,7 @@ object Parsers { def syntaxError(msg: Message, span: Span): Unit = report.error(msg, source.atSpan(span)) - def syntaxError(msg: -> String, span: Span): Unit = + def syntaxError(msg: => String, span: Span): Unit = syntaxError(msg.toMessage, span) def unimplementedExpr(using Context): Select = @@ -289,7 +288,7 @@ object Parsers { syntaxError(msg, offset) skip() - def syntaxErrorOrIncomplete(msg: -> String): Unit = + def syntaxErrorOrIncomplete(msg: => String): Unit = syntaxErrorOrIncomplete(msg.toMessage, in.offset) def syntaxErrorOrIncomplete(msg: Message, span: Span): Unit = @@ -779,7 +778,7 @@ object Parsers { } }) canRewrite &= (in.isAfterLineEnd || statCtdTokens.contains(in.token)) // test (5) - if (canRewrite && (!underColonSyntax || in.fewerBracesEnabled)) { + if canRewrite && (!underColonSyntax || Feature.fewerBracesEnabled) then val openingPatchStr = if !colonRequired then "" else if testChar(startOpening - 1, Chars.isOperatorPart(_)) then " :" @@ -787,7 +786,6 @@ object Parsers { val (startClosing, endClosing) = closingElimRegion() patch(source, Span(startOpening, endOpening), openingPatchStr) patch(source, Span(startClosing, endClosing), "") - } t } @@ -1026,7 +1024,7 @@ object Parsers { * body */ def isColonLambda = - in.fewerBracesEnabled && in.token == COLONfollow && followingIsLambdaAfterColon() + Feature.fewerBracesEnabled && in.token == COLONfollow && followingIsLambdaAfterColon() /** operand { infixop operand | MatchClause } [postfixop], * @@ -2371,7 +2369,7 @@ object Parsers { /** PostfixExpr ::= InfixExpr [id [nl]] * InfixExpr ::= PrefixExpr * | InfixExpr id [nl] InfixExpr - * | InfixExpr id `:` IndentedExpr + * | InfixExpr id ColonArgument * | InfixExpr MatchClause */ def postfixExpr(location: Location = Location.ElseWhere): Tree = @@ -2415,10 +2413,11 @@ object Parsers { * | SimpleExpr `.` MatchClause * | SimpleExpr (TypeArgs | NamedTypeArgs) * | SimpleExpr1 ArgumentExprs - * | SimpleExpr1 `:` ColonArgument -- under language.experimental.fewerBraces - * ColonArgument ::= indent (CaseClauses | Block) outdent - * | FunParams (‘=>’ | ‘?=>’) ColonArgBody - * | HkTypeParamClause ‘=>’ ColonArgBody + * | SimpleExpr1 ColonArgument + * ColonArgument ::= colon [LambdaStart] + * indent (CaseClauses | Block) outdent + * LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + * | HkTypeParamClause ‘=>’ * ColonArgBody ::= indent (CaseClauses | Block) outdent * Quoted ::= ‘'’ ‘{’ Block ‘}’ * | ‘'’ ‘[’ Type ‘]’ @@ -2823,11 +2822,14 @@ object Parsers { if (isIdent(nme.raw.BAR)) { in.nextToken(); pattern1(location) :: patternAlts(location) } else Nil - /** Pattern1 ::= Pattern2 [Ascription] + /** Pattern1 ::= PatVar Ascription + * | [‘-’] integerLiteral Ascription + * | [‘-’] floatingPointLiteral Ascription + * | Pattern2 */ def pattern1(location: Location = Location.InPattern): Tree = val p = pattern2() - if in.isColon then + if (isVarPattern(p) || p.isInstanceOf[Number]) && in.isColon then in.nextToken() ascription(p, location) else p @@ -3808,7 +3810,7 @@ object Parsers { if !(name.isEmpty && noParams) then acceptColon() val parents = if isSimpleLiteral then rejectWildcardType(annotType()) :: Nil - else constrApp() :: withConstrApps() + else refinedTypeRest(constrApp()) :: withConstrApps() val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then accept(EQUALS) diff --git a/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala b/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala index c4fe8dc44ddc..0540ef27a4d3 100644 --- a/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala +++ b/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala @@ -17,7 +17,7 @@ import scala.collection.mutable import scala.collection.immutable.SortedMap import rewrites.Rewrites.patch import config.Feature -import config.Feature.migrateTo3 +import config.Feature.{migrateTo3, fewerBracesEnabled} import config.SourceVersion.`3.0` import reporting.{NoProfile, Profile} @@ -202,25 +202,6 @@ object Scanners { def featureEnabled(name: TermName) = Feature.enabled(name)(using languageImportContext) def erasedEnabled = featureEnabled(Feature.erasedDefinitions) - private inline val fewerBracesByDefault = false - // turn on to study impact on codebase if `fewerBraces` was the default - - private var fewerBracesEnabledCache = false - private var fewerBracesEnabledCtx: Context = NoContext - - def fewerBracesEnabled = - if fewerBracesEnabledCtx ne myLanguageImportContext then - fewerBracesEnabledCache = - featureEnabled(Feature.fewerBraces) - || fewerBracesByDefault && indentSyntax && !migrateTo3 - // ensure that fewer braces is not the default for 3.0-migration since - // { x: T => - // expr - // } - // would be ambiguous - fewerBracesEnabledCtx = myLanguageImportContext - fewerBracesEnabledCache - private var postfixOpsEnabledCache = false private var postfixOpsEnabledCtx: Context = NoContext @@ -1569,7 +1550,7 @@ object Scanners { * InBraces a pair of braces { ... } * Indented a pair of ... tokens */ - abstract class Region(val closedBy: Token) extends caps.Pure: + abstract class Region(val closedBy: Token): /** The region enclosing this one, or `null` for the outermost region */ def outer: Region | Null diff --git a/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala b/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala index 091f8bfb5c16..ceb5afdea750 100644 --- a/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala +++ b/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala @@ -7,7 +7,7 @@ import core.Contexts._ object Highlighting { - sealed abstract class Highlight(private val highlight: String) { + abstract class Highlight(private val highlight: String) { def text: String def show(using Context): String = if ctx.useColors then highlight + text + Console.RESET else text diff --git a/tests/pos-with-compiler-cc/dotc/printing/PlainPrinter.scala b/tests/pos-with-compiler-cc/dotc/printing/PlainPrinter.scala index 95b268f0b07a..f0479f818c9f 100644 --- a/tests/pos-with-compiler-cc/dotc/printing/PlainPrinter.scala +++ b/tests/pos-with-compiler-cc/dotc/printing/PlainPrinter.scala @@ -111,8 +111,14 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def refinementNameString(tp: RefinedType): String = nameString(tp.refinedName) /** String representation of a refinement */ - protected def toTextRefinement(rt: RefinedType): Closed = - (refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close + protected def toTextRefinement(rt: RefinedType): Text = + val keyword = rt.refinedInfo match { + case _: ExprType | _: MethodOrPoly => "def " + case _: TypeBounds => "type " + case _: TypeProxy => "val " + case _ => "" + } + (keyword ~ refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close protected def argText(arg: Type): Text = homogenizeArg(arg) match { case arg: TypeBounds => "?" ~ toText(arg) @@ -258,8 +264,9 @@ class PlainPrinter(_ctx: Context) extends Printer { if annot.symbol == defn.InlineParamAnnot || annot.symbol == defn.ErasedParamAnnot then toText(tpe) else toTextLocal(tpe) ~ " " ~ toText(annot) case tp: TypeVar => + def toTextCaret(tp: Type) = if printDebug then toTextLocal(tp) ~ Str("^") else toText(tp) if (tp.isInstantiated) - toTextLocal(tp.instanceOpt) ~ (Str("^") provided printDebug) + toTextCaret(tp.instanceOpt) else { val constr = ctx.typerState.constraint val bounds = @@ -267,7 +274,7 @@ class PlainPrinter(_ctx: Context) extends Printer { withMode(Mode.Printing)(TypeComparer.fullBounds(tp.origin)) else TypeBounds.empty - if (bounds.isTypeAlias) toText(bounds.lo) ~ (Str("^") provided printDebug) + if (bounds.isTypeAlias) toTextCaret(bounds.lo) else if (ctx.settings.YshowVarBounds.value) "(" ~ toText(tp.origin) ~ "?" ~ toText(bounds) ~ ")" else toText(tp.origin) } @@ -279,7 +286,7 @@ class PlainPrinter(_ctx: Context) extends Printer { } "LazyRef(" ~ refTxt ~ ")" case Range(lo, hi) => - toText(lo) ~ " .. " ~ toText(hi) + toText(lo) ~ ".." ~ toText(hi) case _ => tp.fallbackToText(this) } @@ -609,7 +616,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def toText(sc: Scope): Text = ("Scope{" ~ dclsText(sc.toList) ~ "}").close - def toText[T >: Untyped](tree: Tree[T]): Text = { + def toText[T <: Untyped](tree: Tree[T]): Text = { def toTextElem(elem: Any): Text = elem match { case elem: Showable => elem.toText(this) case elem: List[?] => "List(" ~ Text(elem map toTextElem, ",") ~ ")" @@ -691,8 +698,9 @@ class PlainPrinter(_ctx: Context) extends Printer { Text(ups.map(toText), ", ") Text(deps, "\n") } + val depsText = if Config.showConstraintDeps then c.depsToString else "" //Printer.debugPrintUnique = false - Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText)) + Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText, depsText)) finally ctx.typerState.constraint = savedConstraint diff --git a/tests/pos-with-compiler-cc/dotc/printing/Printer.scala b/tests/pos-with-compiler-cc/dotc/printing/Printer.scala index 812351e8cec0..326630844dde 100644 --- a/tests/pos-with-compiler-cc/dotc/printing/Printer.scala +++ b/tests/pos-with-compiler-cc/dotc/printing/Printer.scala @@ -15,7 +15,7 @@ import scala.annotation.internal.sharable /** The base class of all printers */ -abstract class Printer extends caps.Pure { +abstract class Printer { private var prec: Precedence = GlobalPrec @@ -31,7 +31,7 @@ abstract class Printer extends caps.Pure { * ### `atPrec` vs `changePrec` * * This is to be used when changing precedence inside some sort of parentheses: - * for instance, to print T[A]` use + * for instance, to print `T[A]` use * `toText(T) ~ '[' ~ atPrec(GlobalPrec) { toText(A) } ~ ']'`. * * If the presence of the parentheses depends on precedence, inserting them manually is most certainly a bug. @@ -60,8 +60,7 @@ abstract class Printer extends caps.Pure { * A op B op' C parses as (A op B) op' C if op and op' are left-associative, and as * A op (B op' C) if they're right-associative, so we need respectively * ```scala - * val isType = ??? // is this a term or type operator? - * val prec = parsing.precedence(op, isType) + * val prec = parsing.precedence(op) * // either: * changePrec(prec) { toText(a) ~ op ~ atPrec(prec + 1) { toText(b) } } // for left-associative op and op' * // or: @@ -149,7 +148,7 @@ abstract class Printer extends caps.Pure { def toText(sc: Scope): Text /** Textual representation of tree */ - def toText[T >: Untyped](tree: Tree[T]): Text + def toText[T <: Untyped](tree: Tree[T]): Text /** Textual representation of source position */ def toText(pos: SourcePosition): Text diff --git a/tests/pos-with-compiler-cc/dotc/printing/RefinedPrinter.scala b/tests/pos-with-compiler-cc/dotc/printing/RefinedPrinter.scala index 2a87ec9b4bbe..62e1cd5baec8 100644 --- a/tests/pos-with-compiler-cc/dotc/printing/RefinedPrinter.scala +++ b/tests/pos-with-compiler-cc/dotc/printing/RefinedPrinter.scala @@ -40,7 +40,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { override def printerContext: Context = myCtx - def withEnclosingDef(enclDef: Tree[? >: Untyped])(op: => Text): Text = { + def withEnclosingDef(enclDef: Tree[?])(op: => Text): Text = { val savedCtx = myCtx if (enclDef.hasType && enclDef.symbol.exists) myCtx = ctx.withOwner(enclDef.symbol) @@ -308,15 +308,15 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { protected def exprToText(tp: ExprType): Text = "=> " ~ toText(tp.resType) - protected def blockToText[T >: Untyped](block: Block[T]): Text = + protected def blockToText[T <: Untyped](block: Block[T]): Text = blockText(block.stats :+ block.expr) - protected def blockText[T >: Untyped](trees: List[Tree[T]]): Text = + protected def blockText[T <: Untyped](trees: List[Tree[T]]): Text = inContextBracket { ("{" ~ toText(trees, "\n") ~ "}").close } - protected def typeApplyText[T >: Untyped](tree: TypeApply[T]): Text = { + protected def typeApplyText[T <: Untyped](tree: TypeApply[T]): Text = { val funText = toTextLocal(tree.fun) tree.fun match { case Select(New(tpt), nme.CONSTRUCTOR) if tpt.typeOpt.dealias.isInstanceOf[AppliedType] => @@ -326,7 +326,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - protected def toTextCore[T >: Untyped](tree: Tree[T]): Text = { + protected def toTextCore[T <: Untyped](tree: Tree[T]): Text = { import untpd._ def isLocalThis(tree: Tree) = tree.typeOpt match { @@ -739,7 +739,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - override def toText[T >: Untyped](tree: Tree[T]): Text = controlled { + override def toText[T <: Untyped](tree: Tree[T]): Text = controlled { import untpd._ var txt = toTextCore(tree) @@ -826,7 +826,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { protected def dropAnnotForModText(sym: Symbol): Boolean = sym == defn.BodyAnnot - protected def optAscription[T >: Untyped](tpt: Tree[T]): Text = optText(tpt)(": " ~ _) + protected def optAscription[T <: Untyped](tpt: Tree[T]): Text = optText(tpt)(": " ~ _) private def idText(tree: untpd.Tree): Text = (if showUniqueIds && tree.hasType && tree.symbol.exists then s"#${tree.symbol.id}" else "") ~ @@ -842,7 +842,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { private def useSymbol(tree: untpd.Tree) = tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value - protected def nameIdText[T >: Untyped](tree: NameTree[T]): Text = + protected def nameIdText[T <: Untyped](tree: NameTree[T]): Text = if (tree.hasType && tree.symbol.exists) { val str = nameString(tree.symbol) tree match { @@ -856,13 +856,13 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { private def toTextOwner(tree: Tree[?]) = "[owner = " ~ tree.symbol.maybeOwner.show ~ "]" provided ctx.settings.YprintDebugOwners.value - protected def dclTextOr[T >: Untyped](tree: Tree[T])(treeText: => Text): Text = + protected def dclTextOr[T <: Untyped](tree: Tree[T])(treeText: => Text): Text = toTextOwner(tree) ~ { if (useSymbol(tree)) annotsText(tree.symbol) ~~ dclText(tree.symbol) else treeText } - def paramsText[T>: Untyped](params: ParamClause[T]): Text = (params: @unchecked) match + def paramsText[T <: Untyped](params: ParamClause[T]): Text = (params: @unchecked) match case Nil => "()" case untpd.ValDefs(vparams @ (vparam :: _)) => @@ -872,10 +872,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case untpd.TypeDefs(tparams) => "[" ~ toText(tparams, ", ") ~ "]" - def addParamssText[T >: Untyped](leading: Text, paramss: List[ParamClause[T]]): Text = + def addParamssText[T <: Untyped](leading: Text, paramss: List[ParamClause[T]]): Text = paramss.foldLeft(leading)((txt, params) => txt ~ paramsText(params)) - protected def valDefToText[T >: Untyped](tree: ValDef[T]): Text = { + protected def valDefToText[T <: Untyped](tree: ValDef[T]): Text = { dclTextOr(tree) { modText(tree.mods, tree.symbol, keywordStr(if (tree.mods.is(Mutable)) "var" else "val"), isType = false) ~~ valDefText(nameIdText(tree)) ~ optAscription(tree.tpt) ~ @@ -883,7 +883,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - protected def defDefToText[T >: Untyped](tree: DefDef[T]): Text = { + protected def defDefToText[T <: Untyped](tree: DefDef[T]): Text = { import untpd._ dclTextOr(tree) { val defKeyword = modText(tree.mods, tree.symbol, keywordStr("def"), isType = false) @@ -989,8 +989,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { ) } - protected def toTextPackageId[T >: Untyped](pid: Tree[T]): Text = - if (homogenizedView && pid.hasType) toTextLocal(pid.tpe.asInstanceOf[Showable]) + protected def toTextPackageId[T <: Untyped](pid: Tree[T]): Text = + if (homogenizedView && pid.hasType) toTextLocal(pid.typeOpt) else toTextLocal(pid) protected def packageDefText(tree: PackageDef): Text = { @@ -1044,10 +1044,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def optText(name: Name)(encl: Text => Text): Text = if (name.isEmpty) "" else encl(toText(name)) - def optText[T >: Untyped](tree: Tree[T])(encl: Text => Text): Text = + def optText[T <: Untyped](tree: Tree[T])(encl: Text => Text): Text = if (tree.isEmpty) "" else encl(toText(tree)) - def optText[T >: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text = + def optText[T <: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text = if (tree.exists(!_.isEmpty)) encl(blockText(tree)) else "" override protected def treatAsTypeParam(sym: Symbol): Boolean = sym.is(TypeParam) diff --git a/tests/pos-with-compiler-cc/dotc/printing/Texts.scala b/tests/pos-with-compiler-cc/dotc/printing/Texts.scala index 17f86e766869..411fa74ebffa 100644 --- a/tests/pos-with-compiler-cc/dotc/printing/Texts.scala +++ b/tests/pos-with-compiler-cc/dotc/printing/Texts.scala @@ -15,12 +15,17 @@ object Texts { case Vertical(relems) => relems.isEmpty } + // Str Ver Clo Flu + // isVertical F T F F + // isClosed F T T F + // isFluid F F T T + // isSplittable F F F T def isVertical: Boolean = isInstanceOf[Vertical] def isClosed: Boolean = isVertical || isInstanceOf[Closed] def isFluid: Boolean = isInstanceOf[Fluid] def isSplittable: Boolean = isFluid && !isClosed - def close: Closed = new Closed(relems) + def close: Text = if isSplittable then Closed(relems) else this def remaining(width: Int): Int = this match { case Str(s, _) => @@ -53,7 +58,7 @@ object Texts { } private def appendIndented(that: Text)(width: Int): Text = - Vertical(that.layout(width - indentMargin).indented :: this.relems) + Fluid(that.layout(width - indentMargin).indented :: this.relems) private def append(width: Int)(that: Text): Text = if (this.isEmpty) that.layout(width) @@ -113,7 +118,7 @@ object Texts { sb.append("|") } } - sb.append(s) + sb.append(s.replaceAll("[ ]+$", "")) case _ => var follow = false for (elem <- relems.reverse) { @@ -138,7 +143,13 @@ object Texts { def ~ (that: Text): Text = if (this.isEmpty) that else if (that.isEmpty) this - else Fluid(that :: this :: Nil) + else this match + case Fluid(relems1) if !isClosed => that match + case Fluid(relems2) if !that.isClosed => Fluid(relems2 ++ relems1) + case _ => Fluid(that +: relems1) + case _ => that match + case Fluid(relems2) if !that.isClosed => Fluid(relems2 :+ this) + case _ => Fluid(that :: this :: Nil) def ~~ (that: Text): Text = if (this.isEmpty) that @@ -161,9 +172,9 @@ object Texts { def apply(xs: Traversable[Text], sep: String = " "): Text = if (sep == "\n") lines(xs) else { - val ys = xs filterNot (_.isEmpty) + val ys = xs.filterNot(_.isEmpty) if (ys.isEmpty) Str("") - else ys reduce (_ ~ sep ~ _) + else ys.reduceRight((a, b) => (a ~ sep).close ~ b) } /** The given texts `xs`, each on a separate line */ @@ -176,12 +187,16 @@ object Texts { case class Str(s: String, lineRange: LineRange = EmptyLineRange) extends Text { override def relems: List[Text] = List(this) + override def toString = this match + case Str(s, EmptyLineRange) => s"Str($s)" + case Str(s, lineRange) => s"Str($s, $lineRange)" } case class Vertical(relems: List[Text]) extends Text case class Fluid(relems: List[Text]) extends Text - class Closed(relems: List[Text]) extends Fluid(relems) + class Closed(relems: List[Text]) extends Fluid(relems): + override def productPrefix = "Closed" implicit def stringToText(s: String): Text = Str(s) diff --git a/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala b/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala index 2c4537b238a5..61bee4d9f32a 100644 --- a/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala +++ b/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala @@ -107,7 +107,7 @@ object AsyncHelper { var lastEndNs = 0L } - val localData: ThreadLocal[ThreadProfileData] = new ThreadLocal[ThreadProfileData] + val localData = new ThreadLocal[ThreadProfileData] private class SinglePhaseInstrumentedThreadPoolExecutor ( corePoolSize: Int, maximumPoolSize: Int, keepAliveTime: Long, unit: TimeUnit, diff --git a/tests/pos-with-compiler-cc/dotc/profile/ExtendedThreadMxBean.java b/tests/pos-with-compiler-cc/dotc/profile/ExtendedThreadMxBean.java index 68ae4f148cfd..60f44db16add 100644 --- a/tests/pos-with-compiler-cc/dotc/profile/ExtendedThreadMxBean.java +++ b/tests/pos-with-compiler-cc/dotc/profile/ExtendedThreadMxBean.java @@ -248,13 +248,14 @@ public SunThreadMxBean(ThreadMXBean underlying) { super(underlying); this.real = underlying; try { - getThreadUserTimeMethod = real.getClass().getMethod("getThreadUserTime", long[].class); - isThreadAllocatedMemoryEnabledMethod = real.getClass().getMethod("isThreadAllocatedMemoryEnabled"); - setThreadAllocatedMemoryEnabledMethod = real.getClass().getMethod("setThreadAllocatedMemoryEnabled", Boolean.TYPE); - getThreadAllocatedBytesMethod1 = real.getClass().getMethod("getThreadAllocatedBytes", Long.TYPE); - getThreadAllocatedBytesMethod2 = real.getClass().getMethod("getThreadAllocatedBytes", long[].class); - isThreadAllocatedMemorySupportedMethod = real.getClass().getMethod("isThreadAllocatedMemorySupported"); - getThreadCpuTimeMethod = real.getClass().getMethod("getThreadCpuTime", long[].class); + Class cls = Class.forName("com.sun.management.ThreadMXBean"); + getThreadUserTimeMethod = cls.getMethod("getThreadUserTime", long[].class); + isThreadAllocatedMemoryEnabledMethod = cls.getMethod("isThreadAllocatedMemoryEnabled"); + setThreadAllocatedMemoryEnabledMethod = cls.getMethod("setThreadAllocatedMemoryEnabled", Boolean.TYPE); + getThreadAllocatedBytesMethod1 = cls.getMethod("getThreadAllocatedBytes", Long.TYPE); + getThreadAllocatedBytesMethod2 = cls.getMethod("getThreadAllocatedBytes", long[].class); + isThreadAllocatedMemorySupportedMethod = cls.getMethod("isThreadAllocatedMemorySupported"); + getThreadCpuTimeMethod = cls.getMethod("getThreadCpuTime", long[].class); getThreadUserTimeMethod.setAccessible(true); isThreadAllocatedMemoryEnabledMethod.setAccessible(true); diff --git a/tests/pos-with-compiler-cc/dotc/quoted/Interpreter.scala b/tests/pos-with-compiler-cc/dotc/quoted/Interpreter.scala new file mode 100644 index 000000000000..5a9490c3723e --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/quoted/Interpreter.scala @@ -0,0 +1,370 @@ +package dotty.tools.dotc +package quoted + +import scala.language.unsafeNulls + +import scala.collection.mutable +import scala.reflect.ClassTag + +import java.io.{PrintWriter, StringWriter} +import java.lang.reflect.{InvocationTargetException, Method => JLRMethod} + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.TreeMapWithImplicits +import dotty.tools.dotc.core.Annotations._ +import dotty.tools.dotc.core.Constants._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Denotations.staticRef +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.NameKinds.FlatName +import dotty.tools.dotc.core.Names._ +import dotty.tools.dotc.core.StagingContext._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.TypeErasure +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.transform.TreeMapWithStages._ +import dotty.tools.dotc.typer.ImportInfo.withRootImports +import dotty.tools.dotc.util.SrcPos +import dotty.tools.repl.AbstractFileClassLoader + +/** Tree interpreter for metaprogramming constructs */ +abstract class Interpreter(pos: SrcPos, classLoader: ClassLoader)(using Context): + import Interpreter._ + import tpd._ + + type Env = Map[Symbol, Object] + + /** Returns the result of interpreting the code in the tree. + * Return Some of the result or None if the result type is not consistent with the expected type. + * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. + */ + final def interpret[T](tree: Tree)(implicit ct: ClassTag[T]): Option[T] = + interpretTree(tree)(Map.empty) match { + case obj: T => Some(obj) + case obj => + // TODO upgrade to a full type tag check or something similar + report.error(s"Interpreted tree returned a result of an unexpected type. Expected ${ct.runtimeClass} but was ${obj.getClass}", pos) + None + } + + /** Returns the result of interpreting the code in the tree. + * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. + */ + protected def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { + case Literal(Constant(value)) => + interpretLiteral(value) + + case tree: Ident if tree.symbol.is(Inline, butNot = Method) => + tree.tpe.widenTermRefExpr match + case ConstantType(c) => c.value.asInstanceOf[Object] + case _ => throw new StopInterpretation(em"${tree.symbol} could not be inlined", tree.srcPos) + + // TODO disallow interpreted method calls as arguments + case Call(fn, args) => + if (fn.symbol.isConstructor && fn.symbol.owner.owner.is(Package)) + interpretNew(fn.symbol, args.flatten.map(interpretTree)) + else if (fn.symbol.is(Module)) + interpretModuleAccess(fn.symbol) + else if (fn.symbol.is(Method) && fn.symbol.isStatic) { + val staticMethodCall = interpretedStaticMethodCall(fn.symbol.owner, fn.symbol) + staticMethodCall(interpretArgs(args, fn.symbol.info)) + } + else if fn.symbol.isStatic then + assert(args.isEmpty) + interpretedStaticFieldAccess(fn.symbol) + else if (fn.qualifier.symbol.is(Module) && fn.qualifier.symbol.isStatic) + if (fn.name == nme.asInstanceOfPM) + interpretModuleAccess(fn.qualifier.symbol) + else { + val staticMethodCall = interpretedStaticMethodCall(fn.qualifier.symbol.moduleClass, fn.symbol) + staticMethodCall(interpretArgs(args, fn.symbol.info)) + } + else if (env.contains(fn.symbol)) + env(fn.symbol) + else if (tree.symbol.is(InlineProxy)) + interpretTree(tree.symbol.defTree.asInstanceOf[ValOrDefDef].rhs) + else + unexpectedTree(tree) + + case closureDef((ddef @ DefDef(_, ValDefs(arg :: Nil) :: Nil, _, _))) => + (obj: AnyRef) => interpretTree(ddef.rhs)(using env.updated(arg.symbol, obj)) + + // Interpret `foo(j = x, i = y)` which it is expanded to + // `val j$1 = x; val i$1 = y; foo(i = i$1, j = j$1)` + case Block(stats, expr) => interpretBlock(stats, expr) + case NamedArg(_, arg) => interpretTree(arg) + + case Inlined(_, bindings, expansion) => interpretBlock(bindings, expansion) + + case Typed(expr, _) => + interpretTree(expr) + + case SeqLiteral(elems, _) => + interpretVarargs(elems.map(e => interpretTree(e))) + + case _ => + unexpectedTree(tree) + } + + private def interpretArgs(argss: List[List[Tree]], fnType: Type)(using Env): List[Object] = { + def interpretArgsGroup(args: List[Tree], argTypes: List[Type]): List[Object] = + assert(args.size == argTypes.size) + val view = + for (arg, info) <- args.lazyZip(argTypes) yield + info match + case _: ExprType => () => interpretTree(arg) // by-name argument + case _ => interpretTree(arg) // by-value argument + view.toList + + fnType.dealias match + case fnType: MethodType if fnType.isErasedMethod => interpretArgs(argss, fnType.resType) + case fnType: MethodType => + val argTypes = fnType.paramInfos + assert(argss.head.size == argTypes.size) + interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, fnType.resType) + case fnType: AppliedType if defn.isContextFunctionType(fnType) => + val argTypes :+ resType = fnType.args: @unchecked + interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, resType) + case fnType: PolyType => interpretArgs(argss, fnType.resType) + case fnType: ExprType => interpretArgs(argss, fnType.resType) + case _ => + assert(argss.isEmpty) + Nil + } + + private def interpretBlock(stats: List[Tree], expr: Tree)(implicit env: Env) = { + var unexpected: Option[Object] = None + val newEnv = stats.foldLeft(env)((accEnv, stat) => stat match { + case stat: ValDef => + accEnv.updated(stat.symbol, interpretTree(stat.rhs)(accEnv)) + case stat => + if (unexpected.isEmpty) + unexpected = Some(unexpectedTree(stat)) + accEnv + }) + unexpected.getOrElse(interpretTree(expr)(newEnv)) + } + + private def interpretLiteral(value: Any)(implicit env: Env): Object = + value.asInstanceOf[Object] + + private def interpretVarargs(args: List[Object])(implicit env: Env): Object = + args.toSeq + + private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol)(implicit env: Env): List[Object] => Object = { + val (inst, clazz) = + try + if (moduleClass.name.startsWith(str.REPL_SESSION_LINE)) + (null, loadReplLineClass(moduleClass)) + else { + val inst = loadModule(moduleClass) + (inst, inst.getClass) + } + catch + case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspension triggered by a dependency on $sym", pos) + ctx.compilationUnit.suspend() // this throws a SuspendException + + val name = fn.name.asTermName + val method = getMethod(clazz, name, paramsSig(fn)) + (args: List[Object]) => stopIfRuntimeException(method.invoke(inst, args: _*), method) + } + + private def interpretedStaticFieldAccess(sym: Symbol)(implicit env: Env): Object = { + val clazz = loadClass(sym.owner.fullName.toString) + val field = clazz.getField(sym.name.toString) + field.get(null) + } + + private def interpretModuleAccess(fn: Symbol)(implicit env: Env): Object = + loadModule(fn.moduleClass) + + private def interpretNew(fn: Symbol, args: => List[Object])(implicit env: Env): Object = { + val clazz = loadClass(fn.owner.fullName.toString) + val constr = clazz.getConstructor(paramsSig(fn): _*) + constr.newInstance(args: _*).asInstanceOf[Object] + } + + private def unexpectedTree(tree: Tree)(implicit env: Env): Object = + throw new StopInterpretation("Unexpected tree could not be interpreted: " + tree, tree.srcPos) + + private def loadModule(sym: Symbol): Object = + if (sym.owner.is(Package)) { + // is top level object + val moduleClass = loadClass(sym.fullName.toString) + moduleClass.getField(str.MODULE_INSTANCE_FIELD).get(null) + } + else { + // nested object in an object + val className = { + val pack = sym.topLevelClass.owner + if (pack == defn.RootPackage || pack == defn.EmptyPackageClass) sym.flatName.toString + else pack.showFullName + "." + sym.flatName + } + val clazz = loadClass(className) + clazz.getConstructor().newInstance().asInstanceOf[Object] + } + + private def loadReplLineClass(moduleClass: Symbol)(implicit env: Env): Class[?] = { + val lineClassloader = new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader) + lineClassloader.loadClass(moduleClass.name.firstPart.toString) + } + + private def loadClass(name: String): Class[?] = + try classLoader.loadClass(name) + catch { + case _: ClassNotFoundException if ctx.compilationUnit.isSuspendable => + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspension triggered by a dependency on $name", pos) + ctx.compilationUnit.suspend() + case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspension triggered by a dependency on $sym", pos) + ctx.compilationUnit.suspend() // this throws a SuspendException + } + + private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = + try clazz.getMethod(name.toString, paramClasses: _*) + catch { + case _: NoSuchMethodException => + val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" + throw new StopInterpretation(msg, pos) + case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspension triggered by a dependency on $sym", pos) + ctx.compilationUnit.suspend() // this throws a SuspendException + } + + private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T = + try thunk + catch { + case ex: RuntimeException => + val sw = new StringWriter() + sw.write("A runtime exception occurred while executing macro expansion\n") + sw.write(ex.getMessage) + sw.write("\n") + ex.printStackTrace(new PrintWriter(sw)) + sw.write("\n") + throw new StopInterpretation(sw.toString, pos) + case ex: InvocationTargetException => + ex.getTargetException match { + case ex: scala.quoted.runtime.StopMacroExpansion => + throw ex + case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspension triggered by a dependency on $sym", pos) + ctx.compilationUnit.suspend() // this throws a SuspendException + case targetException => + val sw = new StringWriter() + sw.write("Exception occurred while executing macro expansion.\n") + if (!ctx.settings.Ydebug.value) { + val end = targetException.getStackTrace.lastIndexWhere { x => + x.getClassName == method.getDeclaringClass.getCanonicalName && x.getMethodName == method.getName + } + val shortStackTrace = targetException.getStackTrace.take(end + 1) + targetException.setStackTrace(shortStackTrace) + } + targetException.printStackTrace(new PrintWriter(sw)) + sw.write("\n") + throw new StopInterpretation(sw.toString, pos) + } + } + + private object MissingClassDefinedInCurrentRun { + def unapply(targetException: NoClassDefFoundError)(using Context): Option[Symbol] = { + val className = targetException.getMessage + if (className eq null) None + else { + val sym = staticRef(className.toTypeName).symbol + if (sym.isDefinedInCurrentRun) Some(sym) else None + } + } + } + + /** List of classes of the parameters of the signature of `sym` */ + private def paramsSig(sym: Symbol): List[Class[?]] = { + def paramClass(param: Type): Class[?] = { + def arrayDepth(tpe: Type, depth: Int): (Type, Int) = tpe match { + case JavaArrayType(elemType) => arrayDepth(elemType, depth + 1) + case _ => (tpe, depth) + } + def javaArraySig(tpe: Type): String = { + val (elemType, depth) = arrayDepth(tpe, 0) + val sym = elemType.classSymbol + val suffix = + if (sym == defn.BooleanClass) "Z" + else if (sym == defn.ByteClass) "B" + else if (sym == defn.ShortClass) "S" + else if (sym == defn.IntClass) "I" + else if (sym == defn.LongClass) "J" + else if (sym == defn.FloatClass) "F" + else if (sym == defn.DoubleClass) "D" + else if (sym == defn.CharClass) "C" + else "L" + javaSig(elemType) + ";" + ("[" * depth) + suffix + } + def javaSig(tpe: Type): String = tpe match { + case tpe: JavaArrayType => javaArraySig(tpe) + case _ => + // Take the flatten name of the class and the full package name + val pack = tpe.classSymbol.topLevelClass.owner + val packageName = if (pack == defn.EmptyPackageClass) "" else s"${pack.fullName}." + packageName + tpe.classSymbol.fullNameSeparated(FlatName).toString + } + + val sym = param.classSymbol + if (sym == defn.BooleanClass) classOf[Boolean] + else if (sym == defn.ByteClass) classOf[Byte] + else if (sym == defn.CharClass) classOf[Char] + else if (sym == defn.ShortClass) classOf[Short] + else if (sym == defn.IntClass) classOf[Int] + else if (sym == defn.LongClass) classOf[Long] + else if (sym == defn.FloatClass) classOf[Float] + else if (sym == defn.DoubleClass) classOf[Double] + else java.lang.Class.forName(javaSig(param), false, classLoader) + } + def getExtraParams(tp: Type): List[Type] = tp.widenDealias match { + case tp: AppliedType if defn.isContextFunctionType(tp) => + // Call context function type direct method + tp.args.init.map(arg => TypeErasure.erasure(arg)) ::: getExtraParams(tp.args.last) + case _ => Nil + } + val extraParams = getExtraParams(sym.info.finalResultType) + val allParams = TypeErasure.erasure(sym.info) match { + case meth: MethodType => meth.paramInfos ::: extraParams + case _ => extraParams + } + allParams.map(paramClass) + } +end Interpreter + +object Interpreter: + /** Exception that stops interpretation if some issue is found */ + class StopInterpretation(val msg: String, val pos: SrcPos) extends Exception + + object Call: + import tpd._ + /** Matches an expression that is either a field access or an application + * It retruns a TermRef containing field accessed or a method reference and the arguments passed to it. + */ + def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = + Call0.unapply(arg).map((fn, args) => (fn, args.reverse)) + + private object Call0 { + def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = arg match { + case Select(Call0(fn, args), nme.apply) if defn.isContextFunctionType(fn.tpe.widenDealias.finalResultType) => + Some((fn, args)) + case fn: Ident => Some((tpd.desugarIdent(fn).withSpan(fn.span), Nil)) + case fn: Select => Some((fn, Nil)) + case Apply(f @ Call0(fn, args1), args2) => + if (f.tpe.widenDealias.isErasedMethod) Some((fn, args1)) + else Some((fn, args2 :: args1)) + case TypeApply(Call0(fn, args), _) => Some((fn, args)) + case _ => None + } + } + end Call diff --git a/tests/pos-with-compiler-cc/dotc/report.scala b/tests/pos-with-compiler-cc/dotc/report.scala index 636da444696c..00399ecbfd0a 100644 --- a/tests/pos-with-compiler-cc/dotc/report.scala +++ b/tests/pos-with-compiler-cc/dotc/report.scala @@ -9,15 +9,15 @@ import config.SourceVersion import ast._ import config.Feature.sourceVersion import java.lang.System.currentTimeMillis -import language.experimental.pureFunctions + object report: /** For sending messages that are printed only if -verbose is set */ - def inform(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def inform(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = if ctx.settings.verbose.value then echo(msg, pos) - def echo(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def echo(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = ctx.reporter.report(new Info(msg.toMessage, pos.sourcePos)) private def issueWarning(warning: Warning)(using Context): Unit = @@ -26,28 +26,28 @@ object report: def deprecationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new DeprecationWarning(msg, pos.sourcePos)) - def deprecationWarning(msg: -> String, pos: SrcPos)(using Context): Unit = + def deprecationWarning(msg: => String, pos: SrcPos)(using Context): Unit = deprecationWarning(msg.toMessage, pos) def migrationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new MigrationWarning(msg, pos.sourcePos)) - def migrationWarning(msg: -> String, pos: SrcPos)(using Context): Unit = + def migrationWarning(msg: => String, pos: SrcPos)(using Context): Unit = migrationWarning(msg.toMessage, pos) def uncheckedWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new UncheckedWarning(msg, pos.sourcePos)) - def uncheckedWarning(msg: -> String, pos: SrcPos)(using Context): Unit = + def uncheckedWarning(msg: => String, pos: SrcPos)(using Context): Unit = uncheckedWarning(msg.toMessage, pos) def featureWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new FeatureWarning(msg, pos.sourcePos)) - def featureWarning(msg: -> String, pos: SrcPos)(using Context): Unit = + def featureWarning(msg: => String, pos: SrcPos)(using Context): Unit = featureWarning(msg.toMessage, pos) - def featureWarning(feature: String, featureDescription: -> String, + def featureWarning(feature: String, featureDescription: => String, featureUseSite: Symbol, required: Boolean, pos: SrcPos)(using Context): Unit = { val req = if (required) "needs to" else "should" val fqname = s"scala.language.$feature" @@ -70,7 +70,7 @@ object report: def warning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new Warning(msg, addInlineds(pos))) - def warning(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def warning(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = warning(msg.toMessage, pos) def error(msg: Message, pos: SrcPos)(using Context): Unit = @@ -78,7 +78,7 @@ object report: ctx.reporter.report(new Error(msg, fullPos)) if ctx.settings.YdebugError.value then Thread.dumpStack() - def error(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def error(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = error(msg.toMessage, pos) def error(ex: TypeError, pos: SrcPos)(using Context): Unit = @@ -91,14 +91,14 @@ object report: if sourceVersion.isMigrating && sourceVersion.ordinal <= from.ordinal then migrationWarning(msg, pos) else error(msg, pos) - def errorOrMigrationWarning(msg: -> String, pos: SrcPos, from: SourceVersion)(using Context): Unit = + def errorOrMigrationWarning(msg: => String, pos: SrcPos, from: SourceVersion)(using Context): Unit = errorOrMigrationWarning(msg.toMessage, pos, from) def gradualErrorOrMigrationWarning(msg: Message, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = if sourceVersion.isAtLeast(errorFrom) then errorOrMigrationWarning(msg, pos, errorFrom) else if sourceVersion.isAtLeast(warnFrom) then warning(msg, pos) - def gradualErrorOrMigrationWarning(msg: -> String, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = + def gradualErrorOrMigrationWarning(msg: => String, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = gradualErrorOrMigrationWarning(msg.toMessage, pos, warnFrom, errorFrom) def restrictionError(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = @@ -111,27 +111,27 @@ object report: * See [[config.CompilerCommand#explainAdvanced]] for the exact meaning of * "contains" here. */ - def log(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def log(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = if (ctx.settings.Ylog.value.containsPhase(ctx.phase)) echo(s"[log ${ctx.phase}] $msg", pos) - def debuglog(msg: -> String)(using Context): Unit = + def debuglog(msg: => String)(using Context): Unit = if (ctx.debug) log(msg) - def informTime(msg: -> String, start: Long)(using Context): Unit = { + def informTime(msg: => String, start: Long)(using Context): Unit = { def elapsed = s" in ${currentTimeMillis - start}ms" informProgress(msg + elapsed) } - def informProgress(msg: -> String)(using Context): Unit = + def informProgress(msg: => String)(using Context): Unit = inform("[" + msg + "]") - def logWith[T](msg: -> String)(value: T)(using Context): T = { + def logWith[T](msg: => String)(value: T)(using Context): T = { log(msg + " " + value) value } - def debugwarn(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def debugwarn(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = if (ctx.settings.Ydebug.value) warning(msg, pos) private def addInlineds(pos: SrcPos)(using Context): SourcePosition = diff --git a/tests/pos-with-compiler-cc/dotc/reporting/Diagnostic.scala b/tests/pos-with-compiler-cc/dotc/reporting/Diagnostic.scala index b792aed4264e..a92da7821fab 100644 --- a/tests/pos-with-compiler-cc/dotc/reporting/Diagnostic.scala +++ b/tests/pos-with-compiler-cc/dotc/reporting/Diagnostic.scala @@ -12,7 +12,6 @@ import dotty.tools.dotc.util.SourcePosition import java.util.Optional import scala.util.chaining._ import core.Decorators.toMessage -import language.experimental.pureFunctions object Diagnostic: @@ -26,7 +25,7 @@ object Diagnostic: msg: Message, pos: SourcePosition ) extends Diagnostic(msg, pos, ERROR): - def this(str: -> String, pos: SourcePosition) = this(str.toMessage, pos) + def this(str: => String, pos: SourcePosition) = this(str.toMessage, pos) /** A sticky error is an error that should not be hidden by backtracking and * trying some alternative path. Typically, errors issued after catching @@ -50,7 +49,7 @@ object Diagnostic: msg: Message, pos: SourcePosition ) extends Diagnostic(msg, pos, INFO): - def this(str: -> String, pos: SourcePosition) = this(str.toMessage, pos) + def this(str: => String, pos: SourcePosition) = this(str.toMessage, pos) abstract class ConditionalWarning( msg: Message, diff --git a/tests/pos-with-compiler-cc/dotc/reporting/Message.scala b/tests/pos-with-compiler-cc/dotc/reporting/Message.scala index 62ee4c54c354..9e397d606491 100644 --- a/tests/pos-with-compiler-cc/dotc/reporting/Message.scala +++ b/tests/pos-with-compiler-cc/dotc/reporting/Message.scala @@ -4,9 +4,10 @@ package reporting import core.Contexts.*, core.Decorators.*, core.Mode import config.SourceVersion + import scala.language.unsafeNulls + import scala.annotation.threadUnsafe -import language.experimental.pureFunctions object Message { val nonSensicalStartTag: String = "" @@ -118,15 +119,15 @@ abstract class Message(val errorId: ErrorMessageID) { self => override val canExplain = self.canExplain } - def append(suffix: -> String): Message = mapMsg(_ ++ suffix) + def append(suffix: => String): Message = mapMsg(_ ++ suffix) - def mapMsg(f: String -> String): Message = new Message(errorId): + def mapMsg(f: String => String): Message = new Message(errorId): val kind = self.kind def msg = f(self.msg) def explain = self.explain override def canExplain = self.canExplain - def appendExplanation(suffix: -> String): Message = new Message(errorId): + def appendExplanation(suffix: => String): Message = new Message(errorId): val kind = self.kind def msg = self.msg def explain = self.explain ++ suffix @@ -143,7 +144,7 @@ abstract class Message(val errorId: ErrorMessageID) { self => } /** The fallback `Message` containing no explanation and having no `kind` */ -class NoExplanation(msgFn: -> String) extends Message(ErrorMessageID.NoExplanationID) { +class NoExplanation(msgFn: => String) extends Message(ErrorMessageID.NoExplanationID) { def msg: String = msgFn def explain: String = "" val kind: MessageKind = MessageKind.NoKind diff --git a/tests/pos-with-compiler-cc/dotc/reporting/Reporter.scala b/tests/pos-with-compiler-cc/dotc/reporting/Reporter.scala index 2cb9ce50cbbe..497e77ae4a7c 100644 --- a/tests/pos-with-compiler-cc/dotc/reporting/Reporter.scala +++ b/tests/pos-with-compiler-cc/dotc/reporting/Reporter.scala @@ -10,13 +10,11 @@ import dotty.tools.dotc.core.Symbols.{NoSymbol, Symbol} import dotty.tools.dotc.reporting.Diagnostic._ import dotty.tools.dotc.reporting.Message._ import dotty.tools.dotc.util.NoSourcePosition -import core.Decorators.toMessage import java.io.{BufferedReader, PrintWriter} import scala.annotation.internal.sharable import scala.collection.mutable -import scala.caps.unsafe.unsafeUnbox -import language.experimental.pureFunctions +import core.Decorators.toMessage object Reporter { /** Convert a SimpleReporter into a real Reporter */ @@ -33,7 +31,7 @@ object Reporter { type ErrorHandler = (Diagnostic, Context) => Unit - private val defaultIncompleteHandler: (Diagnostic, Context) -> Unit = + private val defaultIncompleteHandler: ErrorHandler = (mc, ctx) => ctx.reporter.report(mc)(using ctx) /** Show prompt if `-Xprompt` is passed as a flag to the compiler */ @@ -86,14 +84,13 @@ abstract class Reporter extends interfaces.ReporterResult { private var incompleteHandler: ErrorHandler = defaultIncompleteHandler def withIncompleteHandler[T](handler: ErrorHandler)(op: => T): T = { - val saved = incompleteHandler.unsafeUnbox + val saved = incompleteHandler incompleteHandler = handler try op finally incompleteHandler = saved } - private def isIncompleteChecking = - incompleteHandler.unsafeUnbox ne defaultIncompleteHandler + private def isIncompleteChecking = incompleteHandler ne defaultIncompleteHandler private var _errorCount = 0 private var _warningCount = 0 @@ -206,7 +203,7 @@ abstract class Reporter extends interfaces.ReporterResult { def report(dia: Diagnostic)(using Context): Unit = issueIfNotSuppressed(dia) def incomplete(dia: Diagnostic)(using Context): Unit = - incompleteHandler.unsafeUnbox(dia, ctx) + incompleteHandler(dia, ctx) /** Summary of warnings and errors */ def summary: String = { diff --git a/tests/pos-with-compiler-cc/dotc/reporting/messages.scala b/tests/pos-with-compiler-cc/dotc/reporting/messages.scala index 06bd76dfceda..2197ea63a1c2 100644 --- a/tests/pos-with-compiler-cc/dotc/reporting/messages.scala +++ b/tests/pos-with-compiler-cc/dotc/reporting/messages.scala @@ -26,7 +26,6 @@ import ast.untpd import ast.tpd import transform.SymUtils._ import cc.CaptureSet.IdentityCaptRefMap -import language.experimental.pureFunctions /** Messages * ======== @@ -243,7 +242,7 @@ import language.experimental.pureFunctions } } - class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: -> String*)(using Context) + class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) extends TypeMismatchMsg(found, expected)(TypeMismatchID): // replace constrained TypeParamRefs and their typevars by their bounds where possible @@ -299,7 +298,7 @@ import language.experimental.pureFunctions end TypeMismatch - class NotAMember(site: Type, val name: Name, selected: String, addendum: -> String = "")(using Context) + class NotAMember(site: Type, val name: Name, selected: String, addendum: => String = "")(using Context) extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) { //println(i"site = $site, decls = ${site.decls}, source = ${site.typeSymbol.sourceFile}") //DEBUG @@ -823,7 +822,7 @@ import language.experimental.pureFunctions |Write `.to$targetType` instead.""".stripMargin def explain = "" - class PatternMatchExhaustivity(uncoveredFn: -> String, hasMore: Boolean)(using Context) + class PatternMatchExhaustivity(uncoveredFn: => String, hasMore: Boolean)(using Context) extends Message(PatternMatchExhaustivityID) { def kind = MessageKind.PatternMatchExhaustivity lazy val uncovered = uncoveredFn @@ -843,7 +842,7 @@ import language.experimental.pureFunctions |""" } - class UncheckedTypePattern(msgFn: -> String)(using Context) + class UncheckedTypePattern(msgFn: => String)(using Context) extends PatternMatchMsg(UncheckedTypePatternID) { def msg = msgFn def explain = @@ -1391,7 +1390,7 @@ import language.experimental.pureFunctions |""".stripMargin } - class TypeDoesNotTakeParameters(tpe: Type, params: List[Trees.Tree[Trees.Untyped]])(using Context) + class TypeDoesNotTakeParameters(tpe: Type, params: List[untpd.Tree])(using Context) extends TypeMsg(TypeDoesNotTakeParametersID) { private def fboundsAddendum = if tpe.typeSymbol.isAllOf(Provisional | TypeParam) then @@ -1973,7 +1972,7 @@ import language.experimental.pureFunctions } } - class CyclicInheritance(symbol: Symbol, addendum: -> String)(using Context) extends SyntaxMsg(CyclicInheritanceID) { + class CyclicInheritance(symbol: Symbol, addendum: => String)(using Context) extends SyntaxMsg(CyclicInheritanceID) { def msg = em"Cyclic inheritance: $symbol extends itself$addendum" def explain = { val codeExample = "class A extends A" diff --git a/tests/pos-with-compiler-cc/dotc/sbt/ExtractAPI.scala b/tests/pos-with-compiler-cc/dotc/sbt/ExtractAPI.scala index e75133c78759..e561b26abf6d 100644 --- a/tests/pos-with-compiler-cc/dotc/sbt/ExtractAPI.scala +++ b/tests/pos-with-compiler-cc/dotc/sbt/ExtractAPI.scala @@ -26,7 +26,6 @@ import java.io.PrintWriter import scala.collection.mutable import scala.util.hashing.MurmurHash3 import scala.util.chaining.* -import language.experimental.pureFunctions /** This phase sends a representation of the API of classes to sbt via callbacks. * @@ -595,7 +594,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { } } - def apiLazy(tp: -> Type): api.Type = { + def apiLazy(tp: => Type): api.Type = { // TODO: The sbt api needs a convenient way to make a lazy type. // For now, we repurpose Structure for this. val apiTp = lzy(Array(apiType(tp))) diff --git a/tests/pos-with-compiler-cc/dotc/sbt/ExtractDependencies.scala b/tests/pos-with-compiler-cc/dotc/sbt/ExtractDependencies.scala index a554c8e5066f..f7b15dc21eb0 100644 --- a/tests/pos-with-compiler-cc/dotc/sbt/ExtractDependencies.scala +++ b/tests/pos-with-compiler-cc/dotc/sbt/ExtractDependencies.scala @@ -25,7 +25,7 @@ import xsbti.api.DependencyContext import xsbti.api.DependencyContext._ import scala.collection.{Set, mutable} -import language.experimental.pureFunctions + /** This phase sends information on classes' dependencies to sbt via callbacks. * @@ -189,7 +189,7 @@ object ExtractDependencies { sym.fullName.stripModuleClassSuffix.toString /** Report an internal error in incremental compilation. */ - def internalError(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def internalError(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = report.error(s"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos) } @@ -461,7 +461,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT // Avoid cycles by remembering both the types (testcase: // tests/run/enum-values.scala) and the symbols of named types (testcase: // tests/pos-java-interop/i13575) we've seen before. - private val seen = new mutable.HashSet[Symbol | Type] + val seen = new mutable.HashSet[Symbol | Type] def traverse(tp: Type): Unit = if (!seen.contains(tp)) { seen += tp tp match { diff --git a/tests/pos-with-compiler-cc/dotc/sbt/ThunkHolder.scala b/tests/pos-with-compiler-cc/dotc/sbt/ThunkHolder.scala index d4ee3dc9a68f..60aa76c91ed4 100644 --- a/tests/pos-with-compiler-cc/dotc/sbt/ThunkHolder.scala +++ b/tests/pos-with-compiler-cc/dotc/sbt/ThunkHolder.scala @@ -5,7 +5,6 @@ package sbt import scala.annotation.tailrec import scala.collection.mutable.ListBuffer import xsbti.api -import language.experimental.pureFunctions /** Create and hold thunks. A thunk is a (potentially) unevaluated value * that may be evaluated once. @@ -25,7 +24,7 @@ private[sbt] trait ThunkHolder { /** Store the by-name parameter `s` in a `Lazy` container without evaluating it. * It will be forced by the next call to `forceThunks()` */ - def lzy[T <: AnyRef](t: -> T): api.Lazy[T] = { + def lzy[T <: AnyRef](t: => T): api.Lazy[T] = { val l = api.SafeLazy.apply(() => t).nn thunks += l l diff --git a/tests/pos-with-compiler-cc/dotc/semanticdb/ExtractSemanticDB.scala b/tests/pos-with-compiler-cc/dotc/semanticdb/ExtractSemanticDB.scala index 916503e94203..071efb1fb91c 100644 --- a/tests/pos-with-compiler-cc/dotc/semanticdb/ExtractSemanticDB.scala +++ b/tests/pos-with-compiler-cc/dotc/semanticdb/ExtractSemanticDB.scala @@ -59,7 +59,7 @@ class ExtractSemanticDB extends Phase: private val localBodies = mutable.HashMap[Symbol, Tree]() /** The extracted symbol occurrences */ - val occurrences: mutable.ListBuffer[SymbolOccurrence] = new mutable.ListBuffer() + val occurrences = new mutable.ListBuffer[SymbolOccurrence]() /** The extracted symbol infos */ val symbolInfos = new mutable.ListBuffer[SymbolInformation]() @@ -67,7 +67,7 @@ class ExtractSemanticDB extends Phase: val synthetics = new mutable.ListBuffer[s.Synthetic]() /** A cache of localN names */ - val localNames: mutable.HashSet[String] = new mutable.HashSet() + val localNames = new mutable.HashSet[String]() /** The symbol occurrences generated so far, as a set */ private val generated = new mutable.HashSet[SymbolOccurrence] diff --git a/tests/pos-with-compiler-cc/dotc/semanticdb/SyntheticsExtractor.scala b/tests/pos-with-compiler-cc/dotc/semanticdb/SyntheticsExtractor.scala index 50765a172ffd..b2f26e3e992f 100644 --- a/tests/pos-with-compiler-cc/dotc/semanticdb/SyntheticsExtractor.scala +++ b/tests/pos-with-compiler-cc/dotc/semanticdb/SyntheticsExtractor.scala @@ -11,7 +11,7 @@ import dotty.tools.dotc.{semanticdb => s} class SyntheticsExtractor: import Scala3.{_, given} - val visited: collection.mutable.HashSet[Tree] = collection.mutable.HashSet() + val visited = collection.mutable.HashSet[Tree]() def tryFindSynthetic(tree: Tree)(using Context, SemanticSymbolBuilder, TypeOps): Option[s.Synthetic] = extension (synth: s.Synthetic) diff --git a/tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbTypeMapper.scala b/tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbTypeMapper.scala index 71cb30fbd5fb..2310bcdbc97c 100644 --- a/tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbTypeMapper.scala +++ b/tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbTypeMapper.scala @@ -1,5 +1,4 @@ package dotty.tools.dotc.semanticdb.internal -import language.experimental.pureFunctions abstract class SemanticdbTypeMapper[BaseType, CustomType] { def toCustom(base: BaseType): CustomType @@ -7,8 +6,8 @@ abstract class SemanticdbTypeMapper[BaseType, CustomType] { } object SemanticdbTypeMapper { - def apply[BaseType, CustomType](baseToCustom: BaseType -> CustomType)( - customToBase: CustomType -> BaseType + def apply[BaseType, CustomType](baseToCustom: BaseType => CustomType)( + customToBase: CustomType => BaseType ): SemanticdbTypeMapper[BaseType, CustomType] = new SemanticdbTypeMapper[BaseType, CustomType] { def toCustom(base: BaseType): CustomType = baseToCustom(base) diff --git a/tests/pos-with-compiler-cc/dotc/transform/CountOuterAccesses.scala b/tests/pos-with-compiler-cc/dotc/transform/CountOuterAccesses.scala index 2342170d79b8..91b5bc6a3de4 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/CountOuterAccesses.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/CountOuterAccesses.scala @@ -43,7 +43,7 @@ class CountOuterAccesses extends MiniPhase: // LambdaLift can create outer paths. These need to be known in this phase. /** The number of times an outer accessor that might be dropped is accessed */ - val outerAccessCount: mutable.HashMap[Symbol, Int] = new { + val outerAccessCount = new mutable.HashMap[Symbol, Int] { override def default(s: Symbol): Int = 0 } diff --git a/tests/pos-with-compiler-cc/dotc/transform/Erasure.scala b/tests/pos-with-compiler-cc/dotc/transform/Erasure.scala index 84005424e3ec..c797c9fd92c1 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Erasure.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Erasure.scala @@ -614,7 +614,7 @@ object Erasure { * are handled separately by [[typedDefDef]], [[typedValDef]] and [[typedTyped]]. */ override def typedTypeTree(tree: untpd.TypeTree, pt: Type)(using Context): TypeTree = - checkNotErasedClass(tree.withType(erasure(tree.tpe))) + checkNotErasedClass(tree.withType(erasure(tree.typeOpt))) /** This override is only needed to semi-erase type ascriptions */ override def typedTyped(tree: untpd.Typed, pt: Type)(using Context): Tree = diff --git a/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala b/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala index 25e8b49cc1ba..bf8a6fa6c7bf 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala @@ -26,7 +26,7 @@ object ForwardDepChecks: /** A class to help in forward reference checking */ class LevelInfo(val outer: OptLevelInfo, val owner: Symbol, stats: List[Tree])(using Context) - extends OptLevelInfo, caps.Pure { + extends OptLevelInfo { override val levelAndIndex: LevelAndIndex = stats.foldLeft(outer.levelAndIndex, 0) {(mi, stat) => val (m, idx) = mi diff --git a/tests/pos-with-compiler-cc/dotc/transform/HoistSuperArgs.scala b/tests/pos-with-compiler-cc/dotc/transform/HoistSuperArgs.scala index 84dbf3a0dcd1..9a36d65babe8 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/HoistSuperArgs.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/HoistSuperArgs.scala @@ -13,6 +13,7 @@ import collection.mutable import ast.Trees._ import core.NameKinds.SuperArgName import SymUtils._ +import core.Decorators.* object HoistSuperArgs { val name: String = "hoistSuperArgs" @@ -41,7 +42,7 @@ object HoistSuperArgs { * as method parameters. The definition is installed in the scope enclosing the class, * or, if that is a package, it is made a static method of the class itself. */ -class HoistSuperArgs extends MiniPhase, IdentityDenotTransformer { thisPhase => +class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase => import ast.tpd._ override def phaseName: String = HoistSuperArgs.name @@ -181,9 +182,11 @@ class HoistSuperArgs extends MiniPhase, IdentityDenotTransformer { thisPhase => /** Hoist complex arguments in super call out of the class. */ def hoistSuperArgsFromCall(superCall: Tree, cdef: DefDef, lifted: mutable.ListBuffer[Symbol]): Tree = superCall match - case Block(defs, expr) => + case Block(defs, expr) if !expr.symbol.owner.is(Scala2x) => + // MO: The guard avoids the crash for #16351. + // It would be good to dig deeper, but I won't have the time myself to do it. cpy.Block(superCall)( - stats = defs.mapconserve { (t: Tree) => t match // !cc! explicity typed scrutinee is needed + stats = defs.mapconserve { case vdef: ValDef => try cpy.ValDef(vdef)(rhs = hoistSuperArg(vdef.rhs, cdef, lifted.toList)) finally lifted += vdef.symbol diff --git a/tests/pos-with-compiler-cc/dotc/transform/InterceptedMethods.scala b/tests/pos-with-compiler-cc/dotc/transform/InterceptedMethods.scala index ad068b84c041..c95500d856be 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/InterceptedMethods.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/InterceptedMethods.scala @@ -65,7 +65,7 @@ class InterceptedMethods extends MiniPhase { override def transformApply(tree: Apply)(using Context): Tree = { lazy val qual = tree.fun match { case Select(qual, _) => qual - case ident @ Ident(_) => + case ident: Ident => ident.tpe match { case TermRef(prefix: TermRef, _) => tpd.ref(prefix) diff --git a/tests/pos-with-compiler-cc/dotc/transform/LazyVals.scala b/tests/pos-with-compiler-cc/dotc/transform/LazyVals.scala index c32ea61cff2b..3b37ef130231 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/LazyVals.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/LazyVals.scala @@ -2,30 +2,33 @@ package dotty.tools.dotc package transform import java.util.IdentityHashMap - import ast.tpd import core.Annotations.Annotation import core.Constants.Constant -import core.Contexts._ -import core.Decorators._ +import core.Contexts.* +import core.Decorators.* import core.DenotTransformers.IdentityDenotTransformer -import core.Flags._ -import core.NameKinds.{LazyBitMapName, LazyLocalInitName, LazyLocalName, ExpandedName} +import core.Flags.* +import core.NameKinds.{ExpandedName, LazyBitMapName, LazyLocalInitName, LazyLocalName} import core.StdNames.nme -import core.Symbols._ -import core.Types._ +import core.Symbols.* +import core.Types.* import core.{Names, StdNames} +import dotty.tools.dotc.config.Feature import transform.MegaPhase.MiniPhase -import transform.SymUtils._ +import transform.SymUtils.* + import scala.collection.mutable class LazyVals extends MiniPhase with IdentityDenotTransformer { import LazyVals._ import tpd._ - /** this map contains mutable state of transformation: OffsetDefs to be appended to companion object definitions, - * and number of bits currently used */ - class OffsetInfo(var defs: List[Tree], var ord:Int) + /** + * The map contains the list of the offset trees. + */ + class OffsetInfo(var defs: List[Tree], var ord: Int = 0) + private val appendOffsetDefs = mutable.Map.empty[Symbol, OffsetInfo] override def phaseName: String = LazyVals.name @@ -52,6 +55,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { else nullables.toList } + private def needsBoxing(tp: Type)(using Context): Boolean = tp.classSymbol.isPrimitiveValueClass override def prepareForUnit(tree: Tree)(using Context): Context = { if (lazyValNullables == null) @@ -62,7 +66,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { override def transformDefDef(tree: DefDef)(using Context): Tree = transformLazyVal(tree) - override def transformValDef(tree: ValDef)(using Context): Tree = transformLazyVal(tree) @@ -103,10 +106,9 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { /** Append offset fields to companion objects - */ + */ override def transformTemplate(template: Template)(using Context): Tree = { val cls = ctx.owner.asClass - appendOffsetDefs.get(cls) match { case None => template case Some(data) => @@ -115,7 +117,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { } } - private def addInFront(prefix: List[Tree], stats: List[Tree]) = stats match { case first :: rest if isSuperConstrCall(first) => first :: prefix ::: rest case _ => prefix ::: stats @@ -186,7 +187,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { Thicket(holderTree, initTree, accessor) } - override def transformStats(trees: List[tpd.Tree])(using Context): List[Tree] = { // backend requires field usage to be after field definition // need to bring containers to start of method @@ -274,6 +274,231 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { } } + /** + * Create a threadsafe lazy accessor and function that computes the field's value. `Evaluating` and + * `NullValue` are represented by `object`s and `Waiting` by a class that allows awaiting the completion + * of the evaluation. Note that since tail-recursive functions are transformed *before* lazy-vals, + * this implementation does involve explicit while loop. `PatternMatcher` is coming before `LazyVals`, + * therefore the pattern matching is implemented using if-s. + * + * ``` + * private @volatile var _x: AnyRef = null + * + * def x: A = + * val result = _x + * if result.isInstanceOf[A] then + * result // possible unboxing applied here + * else if result.eq(NullValue) then + * null // possible unboxing applied here + * else + * x_compute() // possible unboxing applied here + * + * private def x_compute(): AnyRef = + * while do + * val current: AnyRef = _x + * if current.eq(null) then + * if CAS(_x, null, Evaluating) then + * var resultNullable: AnyRef = null + * var result: AnyRef = null + * try + * resultNullable = rhs + * nullable = null // nulls out the nullable fields used only in initialization + * if resultNullable.eq(null) then + * result = NullValue + * else + * result = resultNullable + * finally + * if !CAS(_x, Evaluating, result) then + * val lock = _x.asInstanceOf[Waiting] + * CAS(_x, lock, result) + * lock.release() + * return resultNullable + * else + * if current.isInstanceOf[LazyValControlState] then + * if current.eq(Evaluating) then // To avoid creating Waiting instance + * CAS(current, current, new Waiting) + * else if current.isInstanceOf[Waiting] then + * current.asInstanceOf[Waiting].await() + * else return null + * else + * return current + * end while + * * ``` + * + * @param memberDef the transformed lazy field member definition + * @param claz the class containing this lazy val field + * @param target the target synthetic field + * @param offset the offset of the field in the storage allocation of the class + * @param thiz a reference to the transformed class + */ + def mkThreadSafeDef(memberDef: ValOrDefDef, + claz: ClassSymbol, + target: Symbol, + offset: Tree, + thiz: Tree)(using Context): (DefDef, DefDef) = { + val tp = memberDef.tpe.widenDealias.resultType.widenDealias + val waiting = ref(defn.LazyValsWaitingState) + val controlState = ref(defn.LazyValsControlState) + val evaluating = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.evaluating) + val nullValue = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.nullValue) + val objCasFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.objCas) + val accessorMethodSymbol = memberDef.symbol.asTerm + val lazyInitMethodName = LazyLocalInitName.fresh(memberDef.name.asTermName) + val lazyInitMethodSymbol = newSymbol(claz, lazyInitMethodName, Synthetic | Method | Private, MethodType(Nil)(_ => Nil, _ => defn.ObjectType)) + + val rhs = memberDef.rhs + val rhsMappedOwner = rhs.changeOwnerAfter(memberDef.symbol, lazyInitMethodSymbol, this) + val valueSymbol = newSymbol(accessorMethodSymbol, lazyNme.result, Synthetic, defn.ObjectType) + + val immediateValueCondition = + if (defn.LazyValsControlState.isSubClass(tp.classSymbol)) then + ref(valueSymbol).select(defn.Any_!=).appliedTo(nullLiteral).select(nme.And).appliedTo(ref(valueSymbol) + .select(defn.Any_isInstanceOf).appliedToType(defn.LazyValsControlState.typeRef) + .select(nme.UNARY_!).appliedToNone) + else + ref(valueSymbol).select(defn.Any_isInstanceOf).appliedToType(tp) + + val accessorBody = + Block( + ValDef(valueSymbol, ref(target)) :: Nil, + If( // if _x != null && !_x.isInstanceOf[LazyValControlState] then + immediateValueCondition, + ref(valueSymbol).ensureConforms(tp), // then return _x.asInstanceOf[A] + If( + ref(valueSymbol).select(defn.Object_eq).appliedTo(nullValue), + nullLiteral.ensureConforms(tp), + ref(lazyInitMethodSymbol).ensureApplied.ensureConforms(tp) // else return x_compute() + ) + ) + ) + + val accessorDef = DefDef(accessorMethodSymbol, accessorBody) + + // if observed a null (uninitialized) value + val initialize = { + // var result: AnyRef + val resSymbNullable = newSymbol(lazyInitMethodSymbol, lazyNme.resultNullable, Synthetic | Mutable, defn.ObjectType) + val resSymb = newSymbol(lazyInitMethodSymbol, lazyNme.result, Synthetic | Mutable, defn.ObjectType) + // releasing block in finally + val lockRel = { + val lockSymb = newSymbol(lazyInitMethodSymbol, lazyNme.lock, Synthetic, waiting.typeOpt) + Block(ValDef(lockSymb, ref(target).cast(waiting.typeOpt)) + :: objCasFlag.appliedTo(thiz, offset, ref(lockSymb), ref(resSymb)) :: Nil, + ref(lockSymb).select(lazyNme.RLazyVals.waitingRelease).ensureApplied) + } + // finally block + val fin = If( + objCasFlag.appliedTo(thiz, offset, evaluating, ref(resSymb)).select(nme.UNARY_!).appliedToNone, + lockRel, + unitLiteral + ) + // entire try block + val evaluate = Try( + + Block( + (Assign(ref(resSymbNullable), if needsBoxing(tp) && rhsMappedOwner != EmptyTree then rhsMappedOwner.ensureConforms(defn.boxedType(tp)) else rhsMappedOwner) // try result = rhs + :: If( + ref(resSymbNullable).select(defn.Object_eq).appliedTo(nullLiteral), + Assign(ref(resSymb), nullValue), + Assign(ref(resSymb), ref(resSymbNullable)) + ) :: Nil) + ::: nullOut(nullableFor(accessorMethodSymbol)), + unitLiteral), + Nil, + fin + ) + // if CAS(_, null, Evaluating) + If( + objCasFlag.appliedTo(thiz, offset, nullLiteral, evaluating), + Block(ValDef(resSymb, nullLiteral) :: ValDef(resSymbNullable, nullLiteral) :: evaluate :: Nil, // var result: AnyRef = null + Return(ref(resSymbNullable), lazyInitMethodSymbol)), + unitLiteral + ).withType(defn.UnitType) + } + + val current = newSymbol(lazyInitMethodSymbol, lazyNme.current, Synthetic, defn.ObjectType) + val ifNotUninitialized = + If( + ref(current).select(defn.Any_isInstanceOf).appliedToTypeTree(controlState), + // if a control state + If( + ref(current).select(defn.Object_eq).appliedTo(evaluating), + // if is Evaluating then CAS(_, Evaluating, new Waiting) + Block( + objCasFlag.appliedTo(thiz, offset, ref(current), Select(New(waiting), StdNames.nme.CONSTRUCTOR).ensureApplied) :: Nil, + unitLiteral + ), + // if not Evaluating + If( + ref(current).select(defn.Any_isInstanceOf).appliedToTypeTree(waiting), + // if is waiting + ref(current).select(defn.Any_asInstanceOf).appliedToTypeTree(waiting).select(lazyNme.RLazyVals.waitingAwaitRelease, _.info.paramInfoss.exists(_.size == 0)).ensureApplied, + Return(nullLiteral, lazyInitMethodSymbol) + ) + ), + // if not a control state + Return(ref(current), lazyInitMethodSymbol) + ) + + val initBody = Block(ValDef(current, ref(target)) :: Nil, If(ref(current).select(defn.Object_eq).appliedTo(nullLiteral), initialize, ifNotUninitialized).withType(defn.UnitType)) + val initMainLoop = WhileDo(EmptyTree, initBody) // becomes: while (true) do { body } + val initMethodDef = DefDef(lazyInitMethodSymbol, initMainLoop) + (accessorDef, initMethodDef) + } + + def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = { + assert(!(x.symbol is Mutable)) + if ctx.settings.YlightweightLazyVals.value then + transformMemberDefThreadSafeNew(x) + else + transformMemberDefThreadSafeLegacy(x) + } + + def transformMemberDefThreadSafeNew(x: ValOrDefDef)(using Context): Thicket = { + import dotty.tools.dotc.core.Types._ + import dotty.tools.dotc.core.Flags._ + + val claz = x.symbol.owner.asClass + val thizClass = Literal(Constant(claz.info)) + + def offsetName(id: Int) = s"${StdNames.nme.LAZY_FIELD_OFFSET}${if (x.symbol.owner.is(Module)) "_m_" else ""}$id".toTermName + val containerName = LazyLocalName.fresh(x.name.asTermName) + val containerSymbol = newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags | Private, defn.ObjectType, coord = x.symbol.coord).enteredAfter(this) + containerSymbol.addAnnotation(Annotation(defn.VolatileAnnot)) // private @volatile var _x: AnyRef + containerSymbol.addAnnotations(x.symbol.annotations) // pass annotations from original definition + val stat = x.symbol.isStatic + if stat then + containerSymbol.setFlag(JavaStatic) + val getOffset = + if stat then + Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getStaticFieldOffset) + else + Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffsetStatic) + val containerTree = ValDef(containerSymbol, nullLiteral) + + // create an offset for this lazy val + val offsetSymbol: TermSymbol = appendOffsetDefs.get(claz) match + case Some(info) => + newSymbol(claz, offsetName(info.defs.size), Synthetic, defn.LongType).enteredAfter(this) + case None => + newSymbol(claz, offsetName(0), Synthetic, defn.LongType).enteredAfter(this) + offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot)) + val fieldTree = thizClass.select(lazyNme.RLazyVals.getDeclaredField).appliedTo(Literal(Constant(containerName.mangledString))) + val offsetTree = ValDef(offsetSymbol.nn, getOffset.appliedTo(fieldTree)) + val offsetInfo = appendOffsetDefs.getOrElseUpdate(claz, new OffsetInfo(Nil)) + offsetInfo.defs = offsetTree :: offsetInfo.defs + val offset = ref(offsetSymbol.nn) + + val swapOver = + if stat then + tpd.clsOf(x.symbol.owner.typeRef) + else + This(claz) + + val (accessorDef, initMethodDef) = mkThreadSafeDef(x, claz, containerSymbol, offset, swapOver) + Thicket(containerTree, accessorDef, initMethodDef) + } + /** Create a threadsafe lazy accessor equivalent to such code * ``` * def methodSymbol(): Int = { @@ -305,7 +530,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { * } * ``` */ - def mkThreadSafeDef(methodSymbol: TermSymbol, + def mkThreadSafeDefLegacy(methodSymbol: TermSymbol, claz: ClassSymbol, ord: Int, target: Symbol, @@ -374,15 +599,12 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { DefDef(methodSymbol, loop) } - def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = { - assert(!(x.symbol is Mutable)) - + def transformMemberDefThreadSafeLegacy(x: ValOrDefDef)(using Context): Thicket = { val tpe = x.tpe.widen.resultType.widen val claz = x.symbol.owner.asClass val thizClass = Literal(Constant(claz.info)) - val helperModule = requiredModule("scala.runtime.LazyVals") - val getOffset = Select(ref(helperModule), lazyNme.RLazyVals.getOffset) - val getOffsetStatic = Select(ref(helperModule), lazyNme.RLazyVals.getOffsetStatic) + val getOffset = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffset) + val getOffsetStatic = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffsetStatic) var offsetSymbol: TermSymbol | Null = null var flag: Tree = EmptyTree var ord = 0 @@ -425,17 +647,16 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val containerName = LazyLocalName.fresh(x.name.asTermName) val containerSymbol = newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags, tpe, coord = x.symbol.coord).enteredAfter(this) - val containerTree = ValDef(containerSymbol, defaultValue(tpe)) val offset = ref(offsetSymbol.nn) - val getFlag = Select(ref(helperModule), lazyNme.RLazyVals.get) - val setFlag = Select(ref(helperModule), lazyNme.RLazyVals.setFlag) - val wait = Select(ref(helperModule), lazyNme.RLazyVals.wait4Notification) - val state = Select(ref(helperModule), lazyNme.RLazyVals.state) - val cas = Select(ref(helperModule), lazyNme.RLazyVals.cas) + val getFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.get) + val setFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.setFlag) + val wait = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.wait4Notification) + val state = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.state) + val cas = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.cas) - val accessor = mkThreadSafeDef(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait) + val accessor = mkThreadSafeDefLegacy(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait) if (flag eq EmptyTree) Thicket(containerTree, accessor) else Thicket(containerTree, flag, accessor) @@ -445,26 +666,35 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { object LazyVals { val name: String = "lazyVals" val description: String = "expand lazy vals" - object lazyNme { import Names.TermName object RLazyVals { import scala.runtime.LazyVals.{Names => N} - val get: TermName = N.get.toTermName - val setFlag: TermName = N.setFlag.toTermName - val wait4Notification: TermName = N.wait4Notification.toTermName - val state: TermName = N.state.toTermName - val cas: TermName = N.cas.toTermName - val getOffset: TermName = N.getOffset.toTermName - val getOffsetStatic: TermName = "getOffsetStatic".toTermName - val getDeclaredField: TermName = "getDeclaredField".toTermName + val waitingAwaitRelease: TermName = "await".toTermName + val waitingRelease: TermName = "countDown".toTermName + val evaluating: TermName = "Evaluating".toTermName + val nullValue: TermName = "NullValue".toTermName + val objCas: TermName = "objCAS".toTermName + val get: TermName = N.get.toTermName + val setFlag: TermName = N.setFlag.toTermName + val wait4Notification: TermName = N.wait4Notification.toTermName + val state: TermName = N.state.toTermName + val cas: TermName = N.cas.toTermName + val getOffset: TermName = N.getOffset.toTermName + val getOffsetStatic: TermName = "getOffsetStatic".toTermName + val getStaticFieldOffset: TermName = "getStaticFieldOffset".toTermName + val getDeclaredField: TermName = "getDeclaredField".toTermName } val flag: TermName = "flag".toTermName val state: TermName = "state".toTermName val result: TermName = "result".toTermName + val resultNullable: TermName = "resultNullable".toTermName val value: TermName = "value".toTermName val initialized: TermName = "initialized".toTermName val initialize: TermName = "initialize".toTermName val retry: TermName = "retry".toTermName + val current: TermName = "current".toTermName + val lock: TermName = "lock".toTermName + val discard: TermName = "discard".toTermName } } diff --git a/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala b/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala index bff0e8340c0b..27ccd622bc65 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala @@ -9,7 +9,7 @@ import Contexts._ /** A base class for transforms. * A transform contains a compiler phase which applies a tree transformer. */ -abstract class MacroTransform extends Phase, caps.Pure { +abstract class MacroTransform extends Phase { import ast.tpd._ diff --git a/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala b/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala index 2543a89af4d7..9d241216bdaa 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala @@ -28,7 +28,7 @@ object MegaPhase { * - Other: to prepape/transform a tree that does not have a specific prepare/transform * method pair. */ - abstract class MiniPhase extends Phase, caps.Pure { + abstract class MiniPhase extends Phase { private[MegaPhase] var superPhase: MegaPhase = _ private[MegaPhase] var idxInGroup: Int = _ diff --git a/tests/pos-with-compiler-cc/dotc/transform/Memoize.scala b/tests/pos-with-compiler-cc/dotc/transform/Memoize.scala index 3552d08e81f2..6456066bfdb0 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Memoize.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Memoize.scala @@ -27,7 +27,7 @@ object Memoize { val description: String = "add private fields to getters and setters" private final class MyState { - val classesThatNeedReleaseFence: util.HashSet[Symbol] = new util.HashSet() + val classesThatNeedReleaseFence = new util.HashSet[Symbol] } } diff --git a/tests/pos-with-compiler-cc/dotc/transform/Mixin.scala b/tests/pos-with-compiler-cc/dotc/transform/Mixin.scala index 9a220d9c4f8c..5ca09dd6188f 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Mixin.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Mixin.scala @@ -228,10 +228,7 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => } val superCallsAndArgs: Map[Symbol, (Tree, List[Tree], List[Tree])] = ( - for - p: Tree <- impl.parents // !cc! explicit type on `p` is needed - constr = stripBlock(p).symbol - if constr.isConstructor + for (p <- impl.parents; constr = stripBlock(p).symbol if constr.isConstructor) yield constr.owner -> transformConstructor(p) ).toMap diff --git a/tests/pos-with-compiler-cc/dotc/transform/MoveStatics.scala b/tests/pos-with-compiler-cc/dotc/transform/MoveStatics.scala index c50a96dc8b81..99702686edf8 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/MoveStatics.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/MoveStatics.scala @@ -38,8 +38,7 @@ class MoveStatics extends MiniPhase with SymTransformer { override def transformStats(trees: List[Tree])(using Context): List[Tree] = if (ctx.owner.is(Flags.Package)) { val (classes, others) = trees.partition(x => x.isInstanceOf[TypeDef] && x.symbol.isClass) - val pairs = classes.groupBy(cls => cls.symbol.name.stripModuleClassSuffix: Name).asInstanceOf[Map[Name, List[TypeDef]]] - // !cc! type ascription `: Name` needed to make it compile under captureChecking + val pairs = classes.groupBy(_.symbol.name.stripModuleClassSuffix).asInstanceOf[Map[Name, List[TypeDef]]] def rebuild(orig: TypeDef, newBody: List[Tree]): Tree = { val staticFields = newBody.filter(x => x.isInstanceOf[ValDef] && x.symbol.hasAnnotation(defn.ScalaStaticAnnot)).asInstanceOf[List[ValDef]] diff --git a/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala b/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala index 6004f376b7b4..70fa0e5cc513 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala @@ -18,7 +18,6 @@ import config.Printers.patmatch import reporting._ import dotty.tools.dotc.ast._ import util.Property._ -import language.experimental.pureFunctions /** The pattern matching transform. * After this phase, the only Match nodes remaining in the code are simple switches @@ -106,7 +105,7 @@ object PatternMatcher { // TODO: Drop Case once we use everywhere else `isPatmatGenerated`. /** The plan `let x = rhs in body(x)` where `x` is a fresh variable */ - private def letAbstract(rhs: Tree, tpe: Type = NoType)(body: Symbol -> Plan): Plan = { + private def letAbstract(rhs: Tree, tpe: Type = NoType)(body: Symbol => Plan): Plan = { val declTpe = if tpe.exists then tpe else rhs.tpe val vble = newVar(rhs, EmptyFlags, declTpe) initializer(vble) = rhs @@ -114,7 +113,7 @@ object PatternMatcher { } /** The plan `l: { expr(l) }` where `l` is a fresh label */ - private def altsLabeledAbstract(expr: (-> Plan) -> Plan): Plan = { + private def altsLabeledAbstract(expr: (=> Plan) => Plan): Plan = { val label = newSymbol(ctx.owner, PatMatAltsName.fresh(), Synthetic | Label, defn.UnitType) LabeledPlan(label, expr(ReturnPlan(label))) @@ -468,7 +467,7 @@ object PatternMatcher { // ----- Optimizing plans --------------- /** A superclass for plan transforms */ - class PlanTransform extends (Plan -> Plan) { + class PlanTransform extends (Plan => Plan) { protected val treeMap: TreeMap = new TreeMap { override def transform(tree: Tree)(using Context) = tree } @@ -1033,7 +1032,7 @@ object PatternMatcher { case _ => end checkSwitch - val optimizations: List[(String, Plan -> Plan)] = List( + val optimizations: List[(String, Plan => Plan)] = List( "mergeTests" -> mergeTests, "inlineVars" -> inlineVars ) diff --git a/tests/pos-with-compiler-cc/dotc/transform/PostTyper.scala b/tests/pos-with-compiler-cc/dotc/transform/PostTyper.scala index 0424b48751bc..05aaa745bb18 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/PostTyper.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/PostTyper.scala @@ -302,12 +302,14 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase checkNoConstructorProxy(tree) transformSelect(tree, Nil) case tree: Apply => - val methType = tree.fun.tpe.widen + val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] val app = if (methType.isErasedMethod) tpd.cpy.Apply(tree)( tree.fun, tree.args.mapConserve(arg => + if methType.isResultDependent then + Checking.checkRealizable(arg.tpe, arg.srcPos, "erased argument") if (methType.isImplicitMethod && arg.span.isSynthetic) arg match case _: RefTree | _: Apply | _: TypeApply if arg.symbol.is(Erased) => @@ -360,6 +362,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase } case Inlined(call, bindings, expansion) if !call.isEmpty => val pos = call.sourcePos + CrossVersionChecks.checkExperimentalRef(call.symbol, pos) val callTrace = Inlines.inlineCallTrace(call.symbol, pos)(using ctx.withSource(pos.source)) cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(call))) case templ: Template => diff --git a/tests/pos-with-compiler-cc/dotc/transform/RepeatableAnnotations.scala b/tests/pos-with-compiler-cc/dotc/transform/RepeatableAnnotations.scala index e8f8a80e1a0d..1cf687187eeb 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/RepeatableAnnotations.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/RepeatableAnnotations.scala @@ -10,6 +10,7 @@ import Symbols.defn import Constants._ import Types._ import Decorators._ +import Flags._ import scala.collection.mutable @@ -33,7 +34,7 @@ class RepeatableAnnotations extends MiniPhase: val annsByType = stableGroupBy(annotations, _.symbol) annsByType.flatMap { case (_, a :: Nil) => a :: Nil - case (sym, anns) if sym.derivesFrom(defn.ClassfileAnnotationClass) => + case (sym, anns) if sym.is(JavaDefined) => sym.getAnnotation(defn.JavaRepeatableAnnot).flatMap(_.argumentConstant(0)) match case Some(Constant(containerTpe: Type)) => val clashingAnns = annsByType.getOrElse(containerTpe.classSymbol, Nil) diff --git a/tests/pos-with-compiler-cc/dotc/transform/Splicer.scala b/tests/pos-with-compiler-cc/dotc/transform/Splicer.scala index 5d031d100e53..ea83f276a59c 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Splicer.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Splicer.scala @@ -19,6 +19,8 @@ import dotty.tools.dotc.core.Denotations.staticRef import dotty.tools.dotc.core.TypeErasure import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.quoted.Interpreter + import scala.util.control.NonFatal import dotty.tools.dotc.util.SrcPos import dotty.tools.repl.AbstractFileClassLoader @@ -29,11 +31,11 @@ import dotty.tools.dotc.quoted.{PickledQuotes, QuoteUtils} import scala.quoted.Quotes import scala.quoted.runtime.impl._ -import language.experimental.pureFunctions /** Utility class to splice quoted expressions */ object Splicer { - import tpd._ + import tpd.* + import Interpreter.* /** Splice the Tree for a Quoted expression. `${'{xyz}}` becomes `xyz` * and for `$xyz` the tree of `xyz` is interpreted for which the @@ -51,14 +53,11 @@ object Splicer { val oldContextClassLoader = Thread.currentThread().getContextClassLoader Thread.currentThread().setContextClassLoader(classLoader) try { - val interpreter = new Interpreter(splicePos, classLoader) + val interpreter = new SpliceInterpreter(splicePos, classLoader) // Some parts of the macro are evaluated during the unpickling performed in quotedExprToTree - val interpretedExpr: Option[Quotes -> scala.quoted.Expr[Any]] = // !cc! explicit type ascription needed here - interpreter.interpret(tree) - val interpretedTree: Tree = interpretedExpr match - case Some(macroClosure) => PickledQuotes.quotedExprToTree(macroClosure(QuotesImpl())) - case None => tree + val interpretedExpr = interpreter.interpret[Quotes => scala.quoted.Expr[Any]](tree) + val interpretedTree = interpretedExpr.fold(tree)(macroClosure => PickledQuotes.quotedExprToTree(macroClosure(QuotesImpl()))) checkEscapedVariables(interpretedTree, macroOwner) } finally { @@ -223,24 +222,13 @@ object Splicer { checkIfValidStaticCall(tree)(using Set.empty) } - /** Tree interpreter that evaluates the tree */ - private class Interpreter(pos: SrcPos, classLoader: ClassLoader)(using Context) { - - type Env = Map[Symbol, Object] - - /** Returns the interpreted result of interpreting the code a call to the symbol with default arguments. - * Return Some of the result or None if some error happen during the interpretation. - */ - def interpret[T](tree: Tree)(implicit ct: ClassTag[T]): Option[T] = - interpretTree(tree)(Map.empty) match { - case obj: T => Some(obj) - case obj => - // TODO upgrade to a full type tag check or something similar - report.error(s"Interpreted tree returned a result of an unexpected type. Expected ${ct.runtimeClass} but was ${obj.getClass}", pos) - None - } + /** Tree interpreter that evaluates the tree. + * Interpreter is assumed to start at quotation level -1. + */ + private class SpliceInterpreter(pos: SrcPos, classLoader: ClassLoader)(using Context) extends Interpreter(pos, classLoader) { - def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { + override protected def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { + // Interpret level -1 quoted code `'{...}` (assumed without level 0 splices) case Apply(Select(Apply(TypeApply(fn, _), quoted :: Nil), nme.apply), _) if fn.symbol == defn.QuotedRuntime_exprQuote => val quoted1 = quoted match { case quoted: Ident if quoted.symbol.isAllOf(InlineByNameProxy) => @@ -249,324 +237,14 @@ object Splicer { case Inlined(EmptyTree, _, quoted) => quoted case _ => quoted } - interpretQuote(quoted1) + new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(quoted1, ctx.owner)).withSpan(quoted1.span), SpliceScope.getCurrent) + // Interpret level -1 `Type.of[T]` case Apply(TypeApply(fn, quoted :: Nil), _) if fn.symbol == defn.QuotedTypeModule_of => - interpretTypeQuote(quoted) - - case Literal(Constant(value)) => - interpretLiteral(value) - - case tree: Ident if tree.symbol.is(Inline, butNot = Method) => - tree.tpe.widenTermRefExpr match - case ConstantType(c) => c.value.asInstanceOf[Object] - case _ => throw new StopInterpretation(em"${tree.symbol} could not be inlined", tree.srcPos) - - // TODO disallow interpreted method calls as arguments - case Call(fn, args) => - if (fn.symbol.isConstructor && fn.symbol.owner.owner.is(Package)) - interpretNew(fn.symbol, args.flatten.map(interpretTree)) - else if (fn.symbol.is(Module)) - interpretModuleAccess(fn.symbol) - else if (fn.symbol.is(Method) && fn.symbol.isStatic) { - val staticMethodCall = interpretedStaticMethodCall(fn.symbol.owner, fn.symbol) - staticMethodCall(interpretArgs(args, fn.symbol.info)) - } - else if fn.symbol.isStatic then - assert(args.isEmpty) - interpretedStaticFieldAccess(fn.symbol) - else if (fn.qualifier.symbol.is(Module) && fn.qualifier.symbol.isStatic) - if (fn.name == nme.asInstanceOfPM) - interpretModuleAccess(fn.qualifier.symbol) - else { - val staticMethodCall = interpretedStaticMethodCall(fn.qualifier.symbol.moduleClass, fn.symbol) - staticMethodCall(interpretArgs(args, fn.symbol.info)) - } - else if (env.contains(fn.symbol)) - env(fn.symbol) - else if (tree.symbol.is(InlineProxy)) - interpretTree(tree.symbol.defTree.asInstanceOf[ValOrDefDef].rhs) - else - unexpectedTree(tree) - - case closureDef((ddef @ DefDef(_, ValDefs(arg :: Nil) :: Nil, _, _))) => - (obj: AnyRef) => interpretTree(ddef.rhs)(using env.updated(arg.symbol, obj)) - - // Interpret `foo(j = x, i = y)` which it is expanded to - // `val j$1 = x; val i$1 = y; foo(i = i$1, j = j$1)` - case Block(stats, expr) => interpretBlock(stats, expr) - case NamedArg(_, arg) => interpretTree(arg) - - case Inlined(_, bindings, expansion) => interpretBlock(bindings, expansion) - - case Typed(expr, _) => - interpretTree(expr) - - case SeqLiteral(elems, _) => - interpretVarargs(elems.map(e => interpretTree(e))) + new TypeImpl(QuoteUtils.changeOwnerOfTree(quoted, ctx.owner), SpliceScope.getCurrent) case _ => - unexpectedTree(tree) - } - - private def interpretArgs(argss: List[List[Tree]], fnType: Type)(using Env): List[Object] = { - def interpretArgsGroup(args: List[Tree], argTypes: List[Type]): List[Object] = - assert(args.size == argTypes.size) - val view = - for (arg, info) <- args.lazyZip(argTypes) yield - info match - case _: ExprType => () => interpretTree(arg) // by-name argument - case _ => interpretTree(arg) // by-value argument - view.toList - - fnType.dealias match - case fnType: MethodType if fnType.isErasedMethod => interpretArgs(argss, fnType.resType) - case fnType: MethodType => - val argTypes = fnType.paramInfos - assert(argss.head.size == argTypes.size) - interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, fnType.resType) - case fnType: AppliedType if defn.isContextFunctionType(fnType) => - val argTypes :+ resType = fnType.args: @unchecked - interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, resType) - case fnType: PolyType => interpretArgs(argss, fnType.resType) - case fnType: ExprType => interpretArgs(argss, fnType.resType) - case _ => - assert(argss.isEmpty) - Nil - } - - private def interpretBlock(stats: List[Tree], expr: Tree)(implicit env: Env) = { - var unexpected: Option[Object] = None - val newEnv = stats.foldLeft(env)((accEnv, stat) => stat match { - case stat: ValDef => - accEnv.updated(stat.symbol, interpretTree(stat.rhs)(accEnv)) - case stat => - if (unexpected.isEmpty) - unexpected = Some(unexpectedTree(stat)) - accEnv - }) - unexpected.getOrElse(interpretTree(expr)(newEnv)) - } - - private def interpretQuote(tree: Tree)(implicit env: Env): Object = - new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(tree, ctx.owner)).withSpan(tree.span), SpliceScope.getCurrent) - - private def interpretTypeQuote(tree: Tree)(implicit env: Env): Object = - new TypeImpl(QuoteUtils.changeOwnerOfTree(tree, ctx.owner), SpliceScope.getCurrent) - - private def interpretLiteral(value: Any)(implicit env: Env): Object = - value.asInstanceOf[Object] - - private def interpretVarargs(args: List[Object])(implicit env: Env): Object = - args.toSeq - - private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol)(implicit env: Env): List[Object] => Object = { - val (inst, clazz) = - try - if (moduleClass.name.startsWith(str.REPL_SESSION_LINE)) - (null, loadReplLineClass(moduleClass)) - else { - val inst = loadModule(moduleClass) - (inst, inst.getClass) - } - catch - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - - val name = fn.name.asTermName - val method = getMethod(clazz, name, paramsSig(fn)) - (args: List[Object]) => stopIfRuntimeException(method.invoke(inst, args: _*), method) - } - - private def interpretedStaticFieldAccess(sym: Symbol)(implicit env: Env): Object = { - val clazz = loadClass(sym.owner.fullName.toString) - val field = clazz.getField(sym.name.toString) - field.get(null) - } - - private def interpretModuleAccess(fn: Symbol)(implicit env: Env): Object = - loadModule(fn.moduleClass) - - private def interpretNew(fn: Symbol, args: => List[Object])(implicit env: Env): Object = { - val clazz = loadClass(fn.owner.fullName.toString) - val constr = clazz.getConstructor(paramsSig(fn): _*) - constr.newInstance(args: _*).asInstanceOf[Object] - } - - private def unexpectedTree(tree: Tree)(implicit env: Env): Object = - throw new StopInterpretation("Unexpected tree could not be interpreted: " + tree, tree.srcPos) - - private def loadModule(sym: Symbol): Object = - if (sym.owner.is(Package)) { - // is top level object - val moduleClass = loadClass(sym.fullName.toString) - moduleClass.getField(str.MODULE_INSTANCE_FIELD).get(null) - } - else { - // nested object in an object - val className = { - val pack = sym.topLevelClass.owner - if (pack == defn.RootPackage || pack == defn.EmptyPackageClass) sym.flatName.toString - else pack.showFullName + "." + sym.flatName - } - val clazz = loadClass(className) - clazz.getConstructor().newInstance().asInstanceOf[Object] - } - - private def loadReplLineClass(moduleClass: Symbol)(implicit env: Env): Class[?] = { - val lineClassloader = new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader) - lineClassloader.loadClass(moduleClass.name.firstPart.toString) - } - - private def loadClass(name: String): Class[?] = - try classLoader.loadClass(name) - catch { - case _: ClassNotFoundException => - val msg = s"Could not find class $name in classpath" - throw new StopInterpretation(msg, pos) - } - - private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = - try clazz.getMethod(name.toString, paramClasses: _*) - catch { - case _: NoSuchMethodException => - val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" - throw new StopInterpretation(msg, pos) - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - } - - private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T = - try thunk - catch { - case ex: RuntimeException => - val sw = new StringWriter() - sw.write("A runtime exception occurred while executing macro expansion\n") - sw.write(ex.getMessage) - sw.write("\n") - ex.printStackTrace(new PrintWriter(sw)) - sw.write("\n") - throw new StopInterpretation(sw.toString, pos) - case ex: InvocationTargetException => - ex.getTargetException match { - case ex: scala.quoted.runtime.StopMacroExpansion => - throw ex - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - case targetException => - val sw = new StringWriter() - sw.write("Exception occurred while executing macro expansion.\n") - if (!ctx.settings.Ydebug.value) { - val end = targetException.getStackTrace.lastIndexWhere { x => - x.getClassName == method.getDeclaringClass.getCanonicalName && x.getMethodName == method.getName - } - val shortStackTrace = targetException.getStackTrace.take(end + 1) - targetException.setStackTrace(shortStackTrace) - } - targetException.printStackTrace(new PrintWriter(sw)) - sw.write("\n") - throw new StopInterpretation(sw.toString, pos) - } - } - - private object MissingClassDefinedInCurrentRun { - def unapply(targetException: NoClassDefFoundError)(using Context): Option[Symbol] = { - val className = targetException.getMessage - if (className == null) None - else { - val sym = staticRef(className.toTypeName).symbol - if (sym.isDefinedInCurrentRun) Some(sym) else None - } - } - } - - /** List of classes of the parameters of the signature of `sym` */ - private def paramsSig(sym: Symbol): List[Class[?]] = { - def paramClass(param: Type): Class[?] = { - def arrayDepth(tpe: Type, depth: Int): (Type, Int) = tpe match { - case JavaArrayType(elemType) => arrayDepth(elemType, depth + 1) - case _ => (tpe, depth) - } - def javaArraySig(tpe: Type): String = { - val (elemType, depth) = arrayDepth(tpe, 0) - val sym = elemType.classSymbol - val suffix = - if (sym == defn.BooleanClass) "Z" - else if (sym == defn.ByteClass) "B" - else if (sym == defn.ShortClass) "S" - else if (sym == defn.IntClass) "I" - else if (sym == defn.LongClass) "J" - else if (sym == defn.FloatClass) "F" - else if (sym == defn.DoubleClass) "D" - else if (sym == defn.CharClass) "C" - else "L" + javaSig(elemType) + ";" - ("[" * depth) + suffix - } - def javaSig(tpe: Type): String = tpe match { - case tpe: JavaArrayType => javaArraySig(tpe) - case _ => - // Take the flatten name of the class and the full package name - val pack = tpe.classSymbol.topLevelClass.owner - val packageName = if (pack == defn.EmptyPackageClass) "" else s"${pack.fullName}." - packageName + tpe.classSymbol.fullNameSeparated(FlatName).toString - } - - val sym = param.classSymbol - if (sym == defn.BooleanClass) classOf[Boolean] - else if (sym == defn.ByteClass) classOf[Byte] - else if (sym == defn.CharClass) classOf[Char] - else if (sym == defn.ShortClass) classOf[Short] - else if (sym == defn.IntClass) classOf[Int] - else if (sym == defn.LongClass) classOf[Long] - else if (sym == defn.FloatClass) classOf[Float] - else if (sym == defn.DoubleClass) classOf[Double] - else java.lang.Class.forName(javaSig(param), false, classLoader) - } - def getExtraParams(tp: Type): List[Type] = tp.widenDealias match { - case tp: AppliedType if defn.isContextFunctionType(tp) => - // Call context function type direct method - tp.args.init.map(arg => TypeErasure.erasure(arg)) ::: getExtraParams(tp.args.last) - case _ => Nil - } - val extraParams = getExtraParams(sym.info.finalResultType) - val allParams = TypeErasure.erasure(sym.info) match { - case meth: MethodType => meth.paramInfos ::: extraParams - case _ => extraParams - } - allParams.map(paramClass) - } - } - - - - /** Exception that stops interpretation if some issue is found */ - private class StopInterpretation(val msg: String, val pos: SrcPos) extends Exception - - object Call { - /** Matches an expression that is either a field access or an application - * It retruns a TermRef containing field accessed or a method reference and the arguments passed to it. - */ - def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = - Call0.unapply(arg).map((fn, args) => (fn, args.reverse)) - - private object Call0 { - def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = arg match { - case Select(Call0(fn, args), nme.apply) if defn.isContextFunctionType(fn.tpe.widenDealias.finalResultType) => - Some((fn, args)) - case fn: Ident => Some((tpd.desugarIdent(fn).withSpan(fn.span), Nil)) - case fn: Select => Some((fn, Nil)) - case Apply(f @ Call0(fn, args1), args2) => - if (f.tpe.widenDealias.isErasedMethod) Some((fn, args1)) - else Some((fn, args2 :: args1)) - case TypeApply(Call0(fn, args), _) => Some((fn, args)) - case _ => None - } + super.interpretTree(tree) } } } diff --git a/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala b/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala index df6128d249d2..ad3f0322130d 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala @@ -25,7 +25,6 @@ import dotty.tools.dotc.transform.TreeMapWithStages._ import dotty.tools.dotc.config.ScalaRelease.* import scala.annotation.constructorOnly -import language.experimental.pureFunctions object Splicing: val name: String = "splicing" @@ -187,7 +186,7 @@ class Splicing extends MacroTransform: * {{{ | T2 | x, X | (x$1: Expr[T1], X$1: Type[X]) => (using Quotes) ?=> {... ${x$1} ... X$1.Underlying ...} }}} * ``` */ - private class SpliceTransformer(spliceOwner: Symbol, isCaptured: Symbol -> Boolean) extends Transformer: + private class SpliceTransformer(spliceOwner: Symbol, isCaptured: Symbol => Boolean) extends Transformer: private var refBindingMap = mutable.Map.empty[Symbol, (Tree, Symbol)] /** Reference to the `Quotes` instance of the current level 1 splice */ private var quotes: Tree | Null = null // TODO: add to the context diff --git a/tests/pos-with-compiler-cc/dotc/transform/TreeChecker.scala b/tests/pos-with-compiler-cc/dotc/transform/TreeChecker.scala index 6b4b0bc4545a..dc8defa90eef 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/TreeChecker.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/TreeChecker.scala @@ -376,7 +376,7 @@ class TreeChecker extends Phase with SymTransformer { override def typedIdent(tree: untpd.Ident, pt: Type)(using Context): Tree = { assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase) - assert(tree.isType || ctx.mode.is(Mode.Pattern) && untpd.isWildcardArg(tree) || !needsSelect(tree.tpe), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}") + assert(tree.isType || ctx.mode.is(Mode.Pattern) && untpd.isWildcardArg(tree) || !needsSelect(tree.typeOpt), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}") assertDefined(tree) checkNotRepeated(super.typedIdent(tree, pt)) diff --git a/tests/pos-with-compiler-cc/dotc/transform/TryCatchPatterns.scala b/tests/pos-with-compiler-cc/dotc/transform/TryCatchPatterns.scala index f9779cbbfee4..92d22b1cc57e 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/TryCatchPatterns.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/TryCatchPatterns.scala @@ -49,7 +49,7 @@ class TryCatchPatterns extends MiniPhase { override def checkPostCondition(tree: Tree)(using Context): Unit = tree match { case Try(_, cases, _) => - cases.foreach { (t: CaseDef) => t match // !cc! explicity typed scrutinee is needed + cases.foreach { case CaseDef(Typed(_, _), guard, _) => assert(guard.isEmpty, "Try case should not contain a guard.") case CaseDef(Bind(_, _), guard, _) => assert(guard.isEmpty, "Try case should not contain a guard.") case c => diff --git a/tests/pos-with-compiler-cc/dotc/transform/TypeTestsCasts.scala b/tests/pos-with-compiler-cc/dotc/transform/TypeTestsCasts.scala index 7db89300e710..b2a101649457 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/TypeTestsCasts.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/TypeTestsCasts.scala @@ -15,7 +15,6 @@ import core.Flags._ import util.Spans._ import reporting._ import config.Printers.{ transforms => debug } -import language.experimental.pureFunctions /** This transform normalizes type tests and type casts, * also replacing type tests with singleton argument type with reference equality check @@ -196,7 +195,7 @@ object TypeTestsCasts { def testCls = effectiveClass(testType.widen) def unboxedTestCls = effectiveClass(unboxedTestType.widen) - def unreachable(why: -> String)(using Context): Boolean = { + def unreachable(why: => String)(using Context): Boolean = { if (flagUnrelated) if (inMatch) report.error(em"this case is unreachable since $why", expr.srcPos) else report.warning(em"this will always yield false since $why", expr.srcPos) diff --git a/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala b/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala index 541cf50c43e1..a48aa77fe79f 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala @@ -18,7 +18,6 @@ import Errors.* import scala.collection.mutable import scala.annotation.tailrec -import caps.unsafe.unsafeBoxFunArg object Semantic: @@ -1670,8 +1669,7 @@ object Semantic: } // initialize super classes after outers are set - tasks.foreach(((task: () => Unit) => task()).unsafeBoxFunArg) - // !cc! .asInstanceOf needed to convert from `(() => Unit) -> Unit` to `(box () => Unit) -> Unit`. + tasks.foreach(task => task()) end if var fieldsChanged = true diff --git a/tests/pos-with-compiler-cc/dotc/transform/patmat/Space.scala b/tests/pos-with-compiler-cc/dotc/transform/patmat/Space.scala index ca0e149f881f..8e891f822255 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/patmat/Space.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/patmat/Space.scala @@ -306,6 +306,7 @@ object SpaceEngine { val isEmptyTp = extractorMemberType(unappResult, nme.isEmpty, NoSourcePosition) isEmptyTp <:< ConstantType(Constant(false)) } + || unappResult.derivesFrom(defn.NonEmptyTupleClass) } /** Is the unapply or unapplySeq irrefutable? diff --git a/tests/pos-with-compiler-cc/dotc/transform/sjs/ExplicitJSClasses.scala b/tests/pos-with-compiler-cc/dotc/transform/sjs/ExplicitJSClasses.scala index a7f6d3e7dea7..3c87621413b7 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/sjs/ExplicitJSClasses.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/sjs/ExplicitJSClasses.scala @@ -722,9 +722,8 @@ object ExplicitJSClasses { val LocalJSClassValueName: UniqueNameKind = new UniqueNameKind("$jsclass") private final class MyState { - val nestedObject2superTypeConstructor: MutableSymbolMap[Type] = new MutableSymbolMap[Type] - val localClass2jsclassVal: MutableSymbolMap[TermSymbol] = new MutableSymbolMap[TermSymbol] - val notYetReferencedLocalClasses: util.HashSet[Symbol] = new util.HashSet[Symbol] - // !cc! type ascriptions needed for 3 vals above, otherwise they get strange inferred types + val nestedObject2superTypeConstructor = new MutableSymbolMap[Type] + val localClass2jsclassVal = new MutableSymbolMap[TermSymbol] + val notYetReferencedLocalClasses = new util.HashSet[Symbol] } } diff --git a/tests/pos-with-compiler-cc/dotc/transform/sjs/PrepJSInterop.scala b/tests/pos-with-compiler-cc/dotc/transform/sjs/PrepJSInterop.scala index e75769147f80..48e6802e0f6c 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/sjs/PrepJSInterop.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/sjs/PrepJSInterop.scala @@ -974,6 +974,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP tree.rhs match { case sel: Select if sel.symbol == jsdefn.JSPackage_native => // ok + case rhs: Ident if rhs.symbol == jsdefn.JSPackage_native => + // ok case _ => val pos = if (tree.rhs != EmptyTree) tree.rhs.srcPos else tree.srcPos report.error(s"$longKindStr may only call js.native.", pos) diff --git a/tests/pos-with-compiler-cc/dotc/typer/Applications.scala b/tests/pos-with-compiler-cc/dotc/typer/Applications.scala index c62efa71fceb..ec72c48b2422 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Applications.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Applications.scala @@ -6,7 +6,6 @@ import core._ import ast.{Trees, tpd, untpd, desugar} import util.Stats.record import util.{SrcPos, NoSourcePosition} -import Trees.Untyped import Contexts._ import Flags._ import Symbols._ @@ -491,7 +490,7 @@ trait Applications extends Compatibility { i"${err.refStr(methRef)}$infoStr" /** Re-order arguments to correctly align named arguments */ - def reorder[T >: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = { + def reorder[T <: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = { /** @param pnames The list of parameter names that are missing arguments * @param args The list of arguments that are not yet passed, or that are waiting to be dropped @@ -548,7 +547,7 @@ trait Applications extends Compatibility { /** Is `sym` a constructor of a Java-defined annotation? */ def isJavaAnnotConstr(sym: Symbol): Boolean = - sym.is(JavaDefined) && sym.isConstructor && sym.owner.derivesFrom(defn.AnnotationClass) + sym.is(JavaDefined) && sym.isConstructor && sym.owner.is(JavaAnnotation) /** Match re-ordered arguments against formal parameters * @param n The position of the first parameter in formals in `methType`. @@ -754,7 +753,7 @@ trait Applications extends Compatibility { /** Subclass of Application for type checking an Apply node, where * types of arguments are either known or unknown. */ - abstract class TypedApply[T >: Untyped]( + abstract class TypedApply[T <: Untyped]( app: untpd.Apply, fun: Tree, methRef: TermRef, args: List[Trees.Tree[T]], resultType: Type, override val applyKind: ApplyKind)(using Context) extends Application(methRef, fun.tpe, args, resultType) { @@ -1005,7 +1004,7 @@ trait Applications extends Compatibility { // applications of inline functions. tree.args match { case (arg @ Match(EmptyTree, cases)) :: Nil => - cases.foreach { (t: untpd.CaseDef) => t match // !cc! explicity typed scrutinee is needed + cases.foreach { case CaseDef(Typed(_: untpd.Ident, _), _, _) => // OK case CaseDef(Bind(_, Typed(_: untpd.Ident, _)), _, _) => // OK case CaseDef(Ident(name), _, _) if name == nme.WILDCARD => // Ok @@ -1502,11 +1501,17 @@ trait Applications extends Compatibility { } /** Drop any leading implicit parameter sections */ - def stripImplicit(tp: Type)(using Context): Type = tp match { + def stripImplicit(tp: Type, wildcardOnly: Boolean = false)(using Context): Type = tp match { case mt: MethodType if mt.isImplicitMethod => - stripImplicit(resultTypeApprox(mt)) + stripImplicit(resultTypeApprox(mt, wildcardOnly)) case pt: PolyType => - pt.derivedLambdaType(pt.paramNames, pt.paramInfos, stripImplicit(pt.resultType)).asInstanceOf[PolyType].flatten + pt.derivedLambdaType(pt.paramNames, pt.paramInfos, + stripImplicit(pt.resultType, wildcardOnly = true)) + // can't use TypeParamRefs for parameter references in `resultTypeApprox` + // since their bounds can refer to type parameters in `pt` that are not + // bound by the constraint. This can lead to hygiene violations if subsequently + // `pt` itself is added to the constraint. Test case is run/enrich-gentraversable.scala. + .asInstanceOf[PolyType].flatten case _ => tp } diff --git a/tests/pos-with-compiler-cc/dotc/typer/Checking.scala b/tests/pos-with-compiler-cc/dotc/typer/Checking.scala index 2e83a32ae8e4..99399832085f 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Checking.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Checking.scala @@ -42,7 +42,6 @@ import transform.TypeUtils.* import collection.mutable import reporting._ -import language.experimental.pureFunctions object Checking { import tpd._ @@ -473,7 +472,7 @@ object Checking { def checkWithDeferred(flag: FlagSet) = if (sym.isOneOf(flag)) fail(AbstractMemberMayNotHaveModifier(sym, flag)) - def checkNoConflict(flag1: FlagSet, flag2: FlagSet, msg: -> String) = + def checkNoConflict(flag1: FlagSet, flag2: FlagSet, msg: => String) = if (sym.isAllOf(flag1 | flag2)) fail(msg.toMessage) def checkCombination(flag1: FlagSet, flag2: FlagSet) = if sym.isAllOf(flag1 | flag2) then @@ -497,7 +496,7 @@ object Checking { } if sym.is(Transparent) then if sym.isType then - if !sym.is(Trait) then fail(em"`transparent` can only be used for traits".toMessage) + if !sym.isExtensibleClass then fail(em"`transparent` can only be used for extensible classes and traits".toMessage) else if !sym.isInlineMethod then fail(em"`transparent` can only be used for inline methods".toMessage) if (!sym.isClass && sym.is(Abstract)) @@ -601,7 +600,7 @@ object Checking { */ def checkNoPrivateLeaks(sym: Symbol)(using Context): Type = { class NotPrivate extends TypeMap { - var errors: List[() -> String] = Nil + var errors: List[() => String] = Nil private var inCaptureSet: Boolean = false def accessBoundary(sym: Symbol): Symbol = @@ -783,7 +782,7 @@ object Checking { languageImport(qual) match case Some(nme.experimental) if !ctx.owner.isInExperimentalScope && !selectors.forall(isAllowedImport) => - def check(stable: -> String) = + def check(stable: => String) = Feature.checkExperimentalFeature("features", imp.srcPos, s"\n\nNote: the scope enclosing the import is not considered experimental because it contains the\nnon-experimental $stable") if ctx.owner.is(Package) then @@ -1037,7 +1036,7 @@ trait Checking { /** Issue a feature warning if feature is not enabled */ def checkFeature(name: TermName, - description: -> String, + description: => String, featureUseSite: Symbol, pos: SrcPos)(using Context): Unit = if !Feature.enabled(name) then @@ -1047,7 +1046,7 @@ trait Checking { * are feasible, i.e. that their lower bound conforms to their upper bound. If a type * argument is infeasible, issue and error and continue with upper bound. */ - def checkFeasibleParent(tp: Type, pos: SrcPos, where: -> String = "")(using Context): Type = { + def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = { def checkGoodBounds(tp: Type) = tp match { case tp @ TypeBounds(lo, hi) if !(lo <:< hi) => report.error(ex"no type exists between low bound $lo and high bound $hi$where", pos) @@ -1111,6 +1110,8 @@ trait Checking { def checkParentCall(call: Tree, caller: ClassSymbol)(using Context): Unit = if (!ctx.isAfterTyper) { val called = call.tpe.classSymbol + if (called.is(JavaAnnotation)) + report.error(i"${called.name} must appear without any argument to be a valid class parent because it is a Java annotation", call.srcPos) if (caller.is(Trait)) report.error(i"$caller may not call constructor of $called", call.srcPos) else if (called.is(Trait) && !caller.mixins.contains(called)) @@ -1264,6 +1265,23 @@ trait Checking { if !Inlines.inInlineMethod && !ctx.isInlineContext then report.error(em"$what can only be used in an inline method", pos) + /** Check that the class corresponding to this tree is either a Scala or Java annotation. + * + * @return The original tree or an error tree in case `tree` isn't a valid + * annotation or already an error tree. + */ + def checkAnnotClass(tree: Tree)(using Context): Tree = + if tree.tpe.isError then + return tree + val cls = Annotations.annotClass(tree) + if cls.is(JavaDefined) then + if !cls.is(JavaAnnotation) then + errorTree(tree, em"$cls is not a valid Java annotation: it was not declared with `@interface`") + else tree + else if !cls.derivesFrom(defn.AnnotationClass) then + errorTree(tree, em"$cls is not a valid Scala annotation: it does not extend `scala.annotation.Annotation`") + else tree + /** Check arguments of compiler-defined annotations */ def checkAnnotArgs(tree: Tree)(using Context): tree.type = val cls = Annotations.annotClass(tree) @@ -1508,7 +1526,7 @@ trait ReChecking extends Checking { override def checkCanThrow(tp: Type, span: Span)(using Context): Tree = EmptyTree override def checkCatch(pat: Tree, guard: Tree)(using Context): Unit = () override def checkNoContextFunctionType(tree: Tree)(using Context): Unit = () - override def checkFeature(name: TermName, description: -> String, featureUseSite: Symbol, pos: SrcPos)(using Context): Unit = () + override def checkFeature(name: TermName, description: => String, featureUseSite: Symbol, pos: SrcPos)(using Context): Unit = () } trait NoChecking extends ReChecking { @@ -1519,7 +1537,7 @@ trait NoChecking extends ReChecking { override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = tp override def checkImplicitConversionDefOK(sym: Symbol)(using Context): Unit = () override def checkImplicitConversionUseOK(tree: Tree)(using Context): Unit = () - override def checkFeasibleParent(tp: Type, pos: SrcPos, where: -> String = "")(using Context): Type = tp + override def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = tp override def checkAnnotArgs(tree: Tree)(using Context): tree.type = tree override def checkNoTargetNameConflict(stats: List[Tree])(using Context): Unit = () override def checkParentCall(call: Tree, caller: ClassSymbol)(using Context): Unit = () diff --git a/tests/pos-with-compiler-cc/dotc/typer/CrossVersionChecks.scala b/tests/pos-with-compiler-cc/dotc/typer/CrossVersionChecks.scala index 00b037a4e259..746b01c934a3 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/CrossVersionChecks.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/CrossVersionChecks.scala @@ -115,15 +115,6 @@ class CrossVersionChecks extends MiniPhase: } } - /** Check that classes extending experimental classes or nested in experimental classes have the @experimental annotation. */ - private def checkExperimentalInheritance(cls: ClassSymbol)(using Context): Unit = - if !cls.isAnonymousClass && !cls.isInExperimentalScope then - cls.info.parents.find(_.typeSymbol.isExperimental) match - case Some(parent) => - report.error(em"extension of experimental ${parent.typeSymbol} must have @experimental annotation", cls.srcPos) - case _ => - end checkExperimentalInheritance - override def transformValDef(tree: ValDef)(using Context): ValDef = checkDeprecatedOvers(tree) checkExperimentalAnnots(tree.symbol) @@ -136,12 +127,6 @@ class CrossVersionChecks extends MiniPhase: checkExperimentalSignature(tree.symbol, tree) tree - override def transformTemplate(tree: Template)(using Context): Tree = - val cls = ctx.owner.asClass - checkExperimentalInheritance(cls) - checkExperimentalAnnots(cls) - tree - override def transformIdent(tree: Ident)(using Context): Ident = { checkUndesiredProperties(tree.symbol, tree.srcPos) tree diff --git a/tests/pos-with-compiler-cc/dotc/typer/ErrorReporting.scala b/tests/pos-with-compiler-cc/dotc/typer/ErrorReporting.scala index 06ca1a3072f4..b2e0a4481297 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/ErrorReporting.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/ErrorReporting.scala @@ -15,7 +15,6 @@ import reporting._ import collection.mutable import scala.util.matching.Regex -import language.experimental.pureFunctions object ErrorReporting { @@ -27,7 +26,7 @@ object ErrorReporting { def errorTree(tree: untpd.Tree, msg: Message)(using Context): tpd.Tree = errorTree(tree, msg, tree.srcPos) - def errorTree(tree: untpd.Tree, msg: -> String)(using Context): tpd.Tree = + def errorTree(tree: untpd.Tree, msg: => String)(using Context): tpd.Tree = errorTree(tree, msg.toMessage) def errorTree(tree: untpd.Tree, msg: TypeError, pos: SrcPos)(using Context): tpd.Tree = @@ -38,7 +37,7 @@ object ErrorReporting { ErrorType(msg) } - def errorType(msg: -> String, pos: SrcPos)(using Context): ErrorType = + def errorType(msg: => String, pos: SrcPos)(using Context): ErrorType = errorType(msg.toMessage, pos) def errorType(ex: TypeError, pos: SrcPos)(using Context): ErrorType = { @@ -65,7 +64,7 @@ object ErrorReporting { case tp: AppliedType if tp.isMatchAlias => MatchTypeTrace.record(tp.tryNormalize) case tp: MatchType => MatchTypeTrace.record(tp.tryNormalize) case _ => foldOver(s, tp) - tps.foldLeft("")(collectMatchTrace.apply) // !cc! .apply needed since otherwise box conversion gets confused + tps.foldLeft("")(collectMatchTrace) class Errors(using Context) { @@ -188,7 +187,9 @@ object ErrorReporting { |The tests were made under $constraintText""" def whyFailedStr(fail: FailedExtension) = - i""" failed with + i""" + | + | failed with: | |${fail.whyFailed.message.indented(8)}""" @@ -268,8 +269,8 @@ class ImplicitSearchError( pt: Type, where: String, paramSymWithMethodCallTree: Option[(Symbol, tpd.Tree)] = None, - ignoredInstanceNormalImport: -> Option[SearchSuccess], - importSuggestionAddendum: -> String + ignoredInstanceNormalImport: => Option[SearchSuccess], + importSuggestionAddendum: => String )(using ctx: Context) { def missingArgMsg = arg.tpe match { diff --git a/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala b/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala index fcf75a14f9d8..0400d241e367 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala @@ -568,9 +568,9 @@ object Implicits: if reasons.length > 1 then reasons.mkString("\n\t* ", "\n\t* ", "") else - reasons.mkString + reasons.mkString(" ", "", "") - def explanation(using Context) = em"Failed to synthesize an instance of type ${clarify(expectedType)}: ${formatReasons}" + def explanation(using Context) = em"Failed to synthesize an instance of type ${clarify(expectedType)}:${formatReasons}" end Implicits @@ -1651,7 +1651,7 @@ end Implicits * recursive references and emit a complete implicit dictionary when the outermost search * is complete. */ -abstract class SearchHistory extends caps.Pure: +abstract class SearchHistory: val root: SearchRoot /** Does this search history contain any by name implicit arguments. */ val byname: Boolean @@ -1896,8 +1896,7 @@ sealed class TermRefSet(using Context): prefixes0 match case prefix: Type => f(TermRef(prefix, sym.uncheckedNN)) case prefixes: List[Type] => prefixes.foreach(pre => f(TermRef(pre, sym.uncheckedNN))) - elems.forEach(handle.asInstanceOf) - // !cc! cast is needed to circumvent problematic interaction of box and Java wildcards + elems.forEach(handle) // used only for debugging def showAsList: List[TermRef] = { diff --git a/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala b/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala index 3cc88fa323b9..b5be2daf873b 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala @@ -11,7 +11,6 @@ import Implicits.RenamedImplicitRef import StdNames.nme import printing.Texts.Text import NameKinds.QualifiedName -import language.experimental.pureFunctions object ImportInfo { @@ -50,10 +49,10 @@ object ImportInfo { * @param isRootImport true if this is one of the implicit imports of scala, java.lang, * scala.Predef in the start context, false otherwise. */ -class ImportInfo(symf: Context ?-> Symbol, +class ImportInfo(symf: Context ?=> Symbol, val selectors: List[untpd.ImportSelector], val qualifier: untpd.Tree, - val isRootImport: Boolean = false) extends Showable, caps.Pure { + val isRootImport: Boolean = false) extends Showable { private def symNameOpt = qualifier match { case ref: untpd.RefTree => Some(ref.name.asTermName) diff --git a/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala b/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala index 27b83e025cf9..9d2db773c4d4 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala @@ -6,15 +6,14 @@ import core._ import ast._ import Contexts._, Types._, Flags._, Symbols._ import ProtoTypes._ -import NameKinds.{AvoidNameKind, UniqueName} +import NameKinds.UniqueName import util.Spans._ -import util.{Stats, SimpleIdentityMap, SrcPos} +import util.{Stats, SimpleIdentityMap, SimpleIdentitySet, SrcPos} import Decorators._ import config.Printers.{gadts, typr} import annotation.tailrec import reporting._ import collection.mutable - import scala.annotation.internal.sharable object Inferencing { @@ -574,7 +573,7 @@ trait Inferencing { this: Typer => * Then `Y` also occurs co-variantly in `T` because it needs to be minimized in order to constrain * `T` the least. See `variances` for more detail. */ - def interpolateTypeVars(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = { + def interpolateTypeVars(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = val state = ctx.typerState // Note that some variables in `locked` might not be in `state.ownedVars` @@ -583,7 +582,7 @@ trait Inferencing { this: Typer => // `qualifying`. val ownedVars = state.ownedVars - if ((ownedVars ne locked) && !ownedVars.isEmpty) { + if (ownedVars ne locked) && !ownedVars.isEmpty then val qualifying = ownedVars -- locked if (!qualifying.isEmpty) { typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, pt = $pt, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") @@ -619,44 +618,67 @@ trait Inferencing { this: Typer => if state.reporter.hasUnreportedErrors then return tree def constraint = state.constraint - type InstantiateQueue = mutable.ListBuffer[(TypeVar, Boolean)] - val toInstantiate = new InstantiateQueue - for tvar <- qualifying do - if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then - constrainIfDependentParamRef(tvar, tree) - // Needs to be checked again, since previous interpolations could already have - // instantiated `tvar` through unification. - val v = vs(tvar) - if v == null then - // Even though `tvar` is non-occurring in `v`, the specific - // instantiation we pick still matters because `tvar` might appear - // in the bounds of a non-`qualifying` type variable in the - // constraint. - // In particular, if `tvar` was created as the upper or lower - // bound of an existing variable by `LevelAvoidMap`, we - // instantiate it in the direction corresponding to the - // original variable which might be further constrained later. - // Otherwise, we simply rely on `hasLowerBound`. - val name = tvar.origin.paramName - val fromBelow = - name.is(AvoidNameKind.UpperBound) || - !name.is(AvoidNameKind.LowerBound) && tvar.hasLowerBound - typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") - toInstantiate += ((tvar, fromBelow)) - else if v.intValue != 0 then - typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") - toInstantiate += ((tvar, v.intValue == 1)) - else comparing(cmp => - if !cmp.levelOK(tvar.nestingLevel, ctx.nestingLevel) then - // Invariant: The type of a tree whose enclosing scope is level - // N only contains type variables of level <= N. - typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") - cmp.atLevel(ctx.nestingLevel, tvar.origin) - else - typr.println(i"no interpolation for nonvariant $tvar in $state") - ) - /** Instantiate all type variables in `buf` in the indicated directions. + /** Values of this type report type variables to instantiate with variance indication: + * +1 variable appears covariantly, can be instantiated from lower bound + * -1 variable appears contravariantly, can be instantiated from upper bound + * 0 variable does not appear at all, can be instantiated from either bound + */ + type ToInstantiate = List[(TypeVar, Int)] + + val toInstantiate: ToInstantiate = + val buf = new mutable.ListBuffer[(TypeVar, Int)] + for tvar <- qualifying do + if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then + constrainIfDependentParamRef(tvar, tree) + if !tvar.isInstantiated then + // isInstantiated needs to be checked again, since previous interpolations could already have + // instantiated `tvar` through unification. + val v = vs(tvar) + if v == null then buf += ((tvar, 0)) + else if v.intValue != 0 then buf += ((tvar, v.intValue)) + else comparing(cmp => + if !cmp.levelOK(tvar.nestingLevel, ctx.nestingLevel) then + // Invariant: The type of a tree whose enclosing scope is level + // N only contains type variables of level <= N. + typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") + cmp.atLevel(ctx.nestingLevel, tvar.origin) + else + typr.println(i"no interpolation for nonvariant $tvar in $state") + ) + buf.toList + + def typeVarsIn(xs: ToInstantiate): TypeVars = + xs.foldLeft(SimpleIdentitySet.empty: TypeVars)((tvs, tvi) => tvs + tvi._1) + + /** Filter list of proposed instantiations so that they don't constrain further + * the current constraint. + */ + def filterByDeps(tvs0: ToInstantiate): ToInstantiate = + val excluded = // ignore dependencies from other variables that are being instantiated + typeVarsIn(tvs0) + def step(tvs: ToInstantiate): ToInstantiate = tvs match + case tvs @ (hd @ (tvar, v)) :: tvs1 => + def aboveOK = !constraint.dependsOn(tvar, excluded, co = true) + def belowOK = !constraint.dependsOn(tvar, excluded, co = false) + if v == 0 && !aboveOK then + step((tvar, 1) :: tvs1) + else if v == 0 && !belowOK then + step((tvar, -1) :: tvs1) + else if v == -1 && !aboveOK || v == 1 && !belowOK then + typr.println(i"drop $tvar, $v in $tp, $pt, qualifying = ${qualifying.toList}, tvs0 = ${tvs0.toList}%, %, excluded = ${excluded.toList}, $constraint") + step(tvs1) + else // no conflict, keep the instantiation proposal + tvs.derivedCons(hd, step(tvs1)) + case Nil => + Nil + val tvs1 = step(tvs0) + if tvs1 eq tvs0 then tvs1 + else filterByDeps(tvs1) // filter again with smaller excluded set + end filterByDeps + + /** Instantiate all type variables in `tvs` in the indicated directions, + * as described in the doc comment of `ToInstantiate`. * If a type variable A is instantiated from below, and there is another * type variable B in `buf` that is known to be smaller than A, wait and * instantiate all other type variables before trying to instantiate A again. @@ -685,29 +707,37 @@ trait Inferencing { this: Typer => * * V2 := V3, O2 := O3 */ - def doInstantiate(buf: InstantiateQueue): Unit = - if buf.nonEmpty then - val suspended = new InstantiateQueue - while buf.nonEmpty do - val first @ (tvar, fromBelow) = buf.head - buf.dropInPlace(1) - if !tvar.isInstantiated then - val suspend = buf.exists{ (following, _) => - if fromBelow then - constraint.isLess(following.origin, tvar.origin) - else - constraint.isLess(tvar.origin, following.origin) + def doInstantiate(tvs: ToInstantiate): Unit = + + /** Try to instantiate `tvs`, return any suspended type variables */ + def tryInstantiate(tvs: ToInstantiate): ToInstantiate = tvs match + case (hd @ (tvar, v)) :: tvs1 => + val fromBelow = v == 1 || (v == 0 && tvar.hasLowerBound) + typr.println( + i"interpolate${if v == 0 then " non-occurring" else ""} $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") + if tvar.isInstantiated then + tryInstantiate(tvs1) + else + val suspend = tvs1.exists{ (following, _) => + if fromBelow + then constraint.isLess(following.origin, tvar.origin) + else constraint.isLess(tvar.origin, following.origin) } - if suspend then suspended += first else tvar.instantiate(fromBelow) - end if - end while - doInstantiate(suspended) + if suspend then + typr.println(i"suspended: $hd") + hd :: tryInstantiate(tvs1) + else + tvar.instantiate(fromBelow) + tryInstantiate(tvs1) + case Nil => Nil + if tvs.nonEmpty then doInstantiate(tryInstantiate(tvs)) end doInstantiate - doInstantiate(toInstantiate) + + doInstantiate(filterByDeps(toInstantiate)) } - } + end if tree - } + end interpolateTypeVars /** If `tvar` represents a parameter of a dependent method type in the current `call` * approximate it from below with the type of the actual argument. Skolemize that diff --git a/tests/pos-with-compiler-cc/dotc/typer/Namer.scala b/tests/pos-with-compiler-cc/dotc/typer/Namer.scala index ad8d0e50d348..6aab561c44b7 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Namer.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Namer.scala @@ -833,7 +833,7 @@ class Namer { typer: Typer => if (cls eq sym) report.error("An annotation class cannot be annotated with iself", annotTree.srcPos) else { - val ann = Annotation.deferred(cls)(typedAheadAnnotation(annotTree)(using annotCtx)) + val ann = Annotation.deferred(cls)(typedAheadExpr(annotTree)(using annotCtx)) sym.addAnnotation(ann) } } @@ -1618,9 +1618,6 @@ class Namer { typer: Typer => def typedAheadExpr(tree: Tree, pt: Type = WildcardType)(using Context): tpd.Tree = typedAhead(tree, typer.typedExpr(_, pt)) - def typedAheadAnnotation(tree: Tree)(using Context): tpd.Tree = - typedAheadExpr(tree, defn.AnnotationClass.typeRef) - def typedAheadAnnotationClass(tree: Tree)(using Context): Symbol = tree match { case Apply(fn, _) => typedAheadAnnotationClass(fn) case TypeApply(fn, _) => typedAheadAnnotationClass(fn) diff --git a/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala b/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala index 8775206ace7b..6fb019ee057c 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala @@ -17,7 +17,6 @@ import util.SourceFile import TypeComparer.necessarySubType import scala.annotation.internal.sharable -import scala.annotation.retains object ProtoTypes { @@ -123,15 +122,15 @@ object ProtoTypes { } /** A trait for prototypes that match all types */ - trait MatchAlways extends ProtoType, caps.Pure { + trait MatchAlways extends ProtoType { def isMatchedBy(tp1: Type, keepConstraint: Boolean)(using Context): Boolean = true def map(tm: TypeMap)(using Context): ProtoType = this - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = x + def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = x override def toString: String = getClass.toString } /** A class marking ignored prototypes that can be revealed by `deepenProto` */ - abstract case class IgnoredProto(ignored: Type) extends CachedGroundType, MatchAlways, caps.Pure: + abstract case class IgnoredProto(ignored: Type) extends CachedGroundType with MatchAlways: private var myWasDeepened = false override def revealIgnored = ignored override def deepenProto(using Context): Type = @@ -165,7 +164,7 @@ object ProtoTypes { * [ ].name: proto */ abstract case class SelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean) - extends CachedProxyType, ProtoType, ValueTypeOrProto, caps.Pure { + extends CachedProxyType with ProtoType with ValueTypeOrProto { /** Is the set of members of this type unknown, in the sense that we * cannot compute a non-trivial upper approximation? This is the case if: @@ -240,7 +239,7 @@ object ProtoTypes { memberProto.unusableForInference def map(tm: TypeMap)(using Context): SelectionProto = derivedSelectionProto(name, tm(memberProto), compat) - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = ta(x, memberProto) + def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = ta(x, memberProto) override def deepenProto(using Context): SelectionProto = derivedSelectionProto(name, memberProto.deepenProto, compat) @@ -545,7 +544,7 @@ object ProtoTypes { def map(tm: TypeMap)(using Context): FunProto = derivedFunProto(args, tm(resultType), typer) - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = + def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = ta(ta.foldOver(x, typedArgs().tpes), resultType) override def deepenProto(using Context): FunProto = @@ -575,7 +574,7 @@ object ProtoTypes { * []: argType => resultType */ abstract case class ViewProto(argType: Type, resType: Type) - extends CachedGroundType, ApplyingProto, caps.Pure { + extends CachedGroundType with ApplyingProto { override def resultType(using Context): Type = resType @@ -602,7 +601,7 @@ object ProtoTypes { def map(tm: TypeMap)(using Context): ViewProto = derivedViewProto(tm(argType), tm(resultType)) - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = + def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = ta(ta(x, argType), resultType) override def deepenProto(using Context): ViewProto = @@ -656,7 +655,7 @@ object ProtoTypes { def map(tm: TypeMap)(using Context): PolyProto = derivedPolyProto(targs, tm(resultType)) - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = + def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = ta(ta.foldOver(x, targs.tpes), resultType) override def deepenProto(using Context): PolyProto = @@ -704,10 +703,7 @@ object ProtoTypes { def newTypeVars(tl: TypeLambda): List[TypeTree] = for (paramRef <- tl.paramRefs) yield { - val tt = InferredTypeTree[Type]().withSpan(owningTree.span) - // !cc! explicit type argument [Type] needed since otherwise it is - // inferred to be `{*} Type`, which violates the upper bound. The - // inference works like this because of the contravariance of Tree. + val tt = InferredTypeTree().withSpan(owningTree.span) val tvar = TypeVar(paramRef, state, nestingLevel) state.ownedVars += tvar tt.withType(tvar) diff --git a/tests/pos-with-compiler-cc/dotc/typer/ReTyper.scala b/tests/pos-with-compiler-cc/dotc/typer/ReTyper.scala index 7099234c80e1..b53b2f9ec57a 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/ReTyper.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/ReTyper.scala @@ -71,7 +71,7 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking promote(tree) override def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(using Context): TypTree = - promote(TypeTree(tree.tpe).withSpan(tree.span)) + promote(TypeTree(tree.typeOpt).withSpan(tree.span)) override def typedExport(exp: untpd.Export)(using Context): Export = promote(exp) @@ -87,8 +87,8 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking // retract PatternOrTypeBits like in typedExpr withoutMode(Mode.PatternOrTypeBits)(typedUnadapted(tree.fun, AnyFunctionProto)) val implicits1 = tree.implicits.map(typedExpr(_)) - val patterns1 = tree.patterns.mapconserve(pat => typed(pat, pat.tpe)) - untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.tpe) + val patterns1 = tree.patterns.mapconserve(pat => typed(pat, pat.typeOpt)) + untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.typeOpt) } override def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = diff --git a/tests/pos-with-compiler-cc/dotc/typer/RefChecks.scala b/tests/pos-with-compiler-cc/dotc/typer/RefChecks.scala index 4d2c7ffdfe7d..8afcec4dee63 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/RefChecks.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/RefChecks.scala @@ -20,7 +20,6 @@ import config.SourceVersion.{`3.0`, `future`} import config.Printers.refcheck import reporting._ import Constants.Constant -import language.experimental.pureFunctions object RefChecks { import tpd._ @@ -620,8 +619,7 @@ object RefChecks { val missing = missingTermSymbols // Group missing members by the name of the underlying symbol, // to consolidate getters and setters. - val grouped = missing.groupBy(sym => sym.underlyingSymbol.name: Name) - // !cc! type ascription needed + val grouped = missing.groupBy(_.underlyingSymbol.name) val missingMethods = grouped.toList flatMap { case (name, syms) => @@ -863,7 +861,7 @@ object RefChecks { * Return an optional by name error message if this test fails. */ def variantInheritanceProblems( - baseCls: Symbol, middle: Symbol, baseStr: String, middleStr: String): Option[() -> String] = { + baseCls: Symbol, middle: Symbol, baseStr: String, middleStr: String): Option[() => String] = { val superBT = self.baseType(middle) val thisBT = self.baseType(baseCls) val combinedBT = superBT.baseType(baseCls) diff --git a/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala b/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala index b96d80345bc3..e3f5382ecad7 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala @@ -19,13 +19,12 @@ import ast.Trees.genericEmptyTree import annotation.{tailrec, constructorOnly} import ast.tpd._ import Synthesizer._ -import language.experimental.pureFunctions /** Synthesize terms for special classes */ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): /** Handlers to synthesize implicits for special types */ - type SpecialHandler = (Type, Span) -> Context ?-> TreeWithErrors + type SpecialHandler = (Type, Span) => Context ?=> TreeWithErrors private type SpecialHandlers = List[(ClassSymbol, SpecialHandler)] val synthesizedClassTag: SpecialHandler = (formal, span) => @@ -476,7 +475,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): if acceptableMsg.isEmpty && clsIsGenericSum then val elemLabels = cls.children.map(c => ConstantType(Constant(c.name.toString))) - def internalError(msg: -> String)(using Context): Unit = + def internalError(msg: => String)(using Context): Unit = report.error(i"""Internal error when synthesizing sum mirror for $cls: |$msg""".stripMargin, ctx.source.atSpan(span)) @@ -596,7 +595,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case JavaArrayType(elemTp) => defn.ArrayOf(escapeJavaArray(elemTp)) case _ => tp - private enum ManifestKind extends caps.Pure: // !cc! should all enums be Pure? + private enum ManifestKind: case Full, Opt, Clss /** The kind that should be used for an array element, if we are `OptManifest` then this diff --git a/tests/pos-with-compiler-cc/dotc/typer/Typer.scala b/tests/pos-with-compiler-cc/dotc/typer/Typer.scala index 9e0f8edebe4e..032bed38482c 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Typer.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Typer.scala @@ -51,7 +51,6 @@ import Nullables._ import NullOpsDecorator._ import cc.CheckCaptures import config.Config -import language.experimental.pureFunctions import scala.annotation.constructorOnly @@ -477,17 +476,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * (x: T | Null) => x.$asInstanceOf$[x.type & T] */ def toNotNullTermRef(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match - case tp @ OrNull(tpnn) => - tp match - case ref: TermRef - if pt != AssignProto && // Ensure it is not the lhs of Assign - ctx.notNullInfos.impliesNotNull(ref) && - // If a reference is in the context, it is already trackable at the point we add it. - // Hence, we don't use isTracked in the next line, because checking use out of order is enough. - !ref.usedOutOfOrder => - tree.cast(AndType(ref, tpnn)) - case _ => - tree + case ref: TermRef + if pt != AssignProto && // Ensure it is not the lhs of Assign + ctx.notNullInfos.impliesNotNull(ref) && + // If a reference is in the context, it is already trackable at the point we add it. + // Hence, we don't use isTracked in the next line, because checking use out of order is enough. + !ref.usedOutOfOrder => + ref match + case OrNull(tpnn) => tree.cast(AndType(ref, tpnn)) + case _ => tree case _ => tree @@ -681,7 +678,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer errorTree(tree, "cannot convert to type selection") // will never be printed due to fallback } - def selectWithFallback(fallBack: Context ?-> Tree) = + def selectWithFallback(fallBack: Context ?=> Tree) = tryAlternatively(typeSelectOnTerm)(fallBack) if (tree.qualifier.isType) { @@ -1105,7 +1102,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * expected type of a block is the anonymous class defined inside it. In that * case there's technically a leak which is not removed by the ascription. */ - protected def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: -> List[Symbol])(using Context): Tree = { + protected def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: => List[Symbol])(using Context): Tree = { def ascribeType(tree: Tree, pt: Type): Tree = tree match { case block @ Block(stats, expr) if !expr.isInstanceOf[Closure] => val expr1 = ascribeType(expr, pt) @@ -2264,7 +2261,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedAnnotation(annot: untpd.Tree)(using Context): Tree = - checkAnnotArgs(typed(annot, defn.AnnotationClass.typeRef)) + checkAnnotClass(checkAnnotArgs(typed(annot))) def registerNowarn(tree: Tree, mdef: untpd.Tree)(using Context): Unit = val annot = Annotations.Annotation(tree) @@ -2600,6 +2597,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer */ def ensureConstrCall(cls: ClassSymbol, parent: Tree, psym: Symbol)(using Context): Tree = if parent.isType && !cls.is(Trait) && !cls.is(JavaDefined) && psym.isClass + // Annotations are represented as traits with constructors, but should + // never be called as such outside of annotation trees. + && !psym.is(JavaAnnotation) && (!psym.is(Trait) || psym.primaryConstructor.info.takesParams && !cls.superClass.isSubClass(psym)) then typed(untpd.New(untpd.TypedSplice(parent), Nil)) @@ -2674,7 +2674,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer end typedPackageDef def typedAnnotated(tree: untpd.Annotated, pt: Type)(using Context): Tree = { - val annot1 = typedExpr(tree.annot, defn.AnnotationClass.typeRef) + val annot1 = checkAnnotClass(typedExpr(tree.annot)) val annotCls = Annotations.annotClass(annot1) if annotCls == defn.NowarnAnnot then registerNowarn(annot1, tree) @@ -2823,7 +2823,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tupleXXLobj = untpd.ref(defn.TupleXXLModule.termRef) val app = untpd.cpy.Apply(tree)(tupleXXLobj, elems.map(untpd.TypedSplice(_))) .withSpan(tree.span) - val app1 = typed(app, defn.TupleXXLClass.typeRef) + val app1 = typed(app, if ctx.mode.is(Mode.Pattern) then pt else defn.TupleXXLClass.typeRef) if (ctx.mode.is(Mode.Pattern)) app1 else { val elemTpes = elems.lazyZip(pts).map((elem, pt) => @@ -3169,7 +3169,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedPattern(tree: untpd.Tree, selType: Type = WildcardType)(using Context): Tree = withMode(Mode.Pattern)(typed(tree, selType)) - def tryEither[T](op: Context ?-> T)(fallBack: (T, TyperState) => T)(using Context): T = { + def tryEither[T](op: Context ?=> T)(fallBack: (T, TyperState) => T)(using Context): T = { val nestedCtx = ctx.fresh.setNewTyperState() val result = op(using nestedCtx) if (nestedCtx.reporter.hasErrors && !nestedCtx.reporter.hasStickyErrors) { @@ -3186,7 +3186,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Try `op1`, if there are errors, try `op2`, if `op2` also causes errors, fall back * to errors and result of `op1`. */ - def tryAlternatively[T](op1: Context ?-> T)(op2: Context ?-> T)(using Context): T = + def tryAlternatively[T](op1: Context ?=> T)(op2: Context ?=> T)(using Context): T = tryEither(op1) { (failedVal, failedState) => tryEither(op2) { (_, _) => failedState.commit() @@ -3426,42 +3426,59 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ErrorReporting.missingArgs(tree, mt) tree.withType(mt.resultType) - def adaptOverloaded(ref: TermRef) = { + def adaptOverloaded(ref: TermRef) = + // get all the alternatives val altDenots = val allDenots = ref.denot.alternatives if pt.isExtensionApplyProto then allDenots.filter(_.symbol.is(ExtensionMethod)) else allDenots + typr.println(i"adapt overloaded $ref with alternatives ${altDenots map (_.info)}%\n\n %") + + /** Search for an alternative that does not take parameters. + * If there is one, return it, otherwise emit an error. + */ + def tryParameterless(alts: List[TermRef])(error: => tpd.Tree): Tree = + alts.filter(_.info.isParameterless) match + case alt :: Nil => readaptSimplified(tree.withType(alt)) + case _ => + if altDenots.exists(_.info.paramInfoss == ListOfNil) then + typed(untpd.Apply(untpd.TypedSplice(tree), Nil), pt, locked) + else + error + def altRef(alt: SingleDenotation) = TermRef(ref.prefix, ref.name, alt) val alts = altDenots.map(altRef) - resolveOverloaded(alts, pt) match { + + resolveOverloaded(alts, pt) match case alt :: Nil => readaptSimplified(tree.withType(alt)) case Nil => - // If alternative matches, there are still two ways to recover: + // If no alternative matches, there are still two ways to recover: // 1. If context is an application, try to insert an apply or implicit // 2. If context is not an application, pick a alternative that does // not take parameters. - def noMatches = - errorTree(tree, NoMatchingOverload(altDenots, pt)) - def hasEmptyParams(denot: SingleDenotation) = denot.info.paramInfoss == ListOfNil - pt match { + + def errorNoMatch = errorTree(tree, NoMatchingOverload(altDenots, pt)) + + pt match case pt: FunOrPolyProto if pt.applyKind != ApplyKind.Using => // insert apply or convert qualifier, but only for a regular application - tryInsertApplyOrImplicit(tree, pt, locked)(noMatches) + tryInsertApplyOrImplicit(tree, pt, locked)(errorNoMatch) case _ => - alts.filter(_.info.isParameterless) match { - case alt :: Nil => readaptSimplified(tree.withType(alt)) - case _ => - if (altDenots exists (_.info.paramInfoss == ListOfNil)) - typed(untpd.Apply(untpd.TypedSplice(tree), Nil), pt, locked) - else - noMatches - } - } + tryParameterless(alts)(errorNoMatch) + case ambiAlts => - if tree.tpe.isErroneous || pt.isErroneous then tree.withType(UnspecifiedErrorType) - else + // If there are ambiguous alternatives, and: + // 1. the types aren't erroneous + // 2. the expected type is not a function type + // 3. there exist a parameterless alternative + // + // Then, pick the parameterless alternative. + // See tests/pos/i10715-scala and tests/pos/i10715-java. + + /** Constructs an "ambiguous overload" error */ + def errorAmbiguous = val remainingDenots = altDenots.filter(denot => ambiAlts.contains(altRef(denot))) val addendum = if ambiAlts.exists(!_.symbol.exists) then @@ -3470,8 +3487,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer |Note: Overloaded definitions introduced by refinements cannot be resolved""" else "" errorTree(tree, AmbiguousOverload(tree, remainingDenots, pt, addendum)) - } - } + end errorAmbiguous + + if tree.tpe.isErroneous || pt.isErroneous then + tree.withType(UnspecifiedErrorType) + else + pt match + case _: FunProto => + errorAmbiguous + case _ => + tryParameterless(alts)(errorAmbiguous) + + end match + end adaptOverloaded def adaptToArgs(wtp: Type, pt: FunProto): Tree = wtp match { case wtp: MethodOrPoly => @@ -4180,7 +4208,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Types the body Scala 2 macro declaration `def f = macro ` */ protected def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree = // TODO check that call is to a method with valid signature - def typedPrefix(tree: untpd.RefTree)(splice: Context ?-> Tree -> Tree)(using Context): Tree = { + def typedPrefix(tree: untpd.RefTree)(splice: Context ?=> Tree => Tree)(using Context): Tree = { tryAlternatively { splice(typedExpr(tree, defn.AnyType)) } { diff --git a/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala b/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala index d24a9ab3ddb2..020303c18bc2 100644 --- a/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala +++ b/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala @@ -3,7 +3,7 @@ package dotc.util /** A class for the reading part of mutable or immutable maps. */ -abstract class ReadOnlyMap[Key, Value] extends caps.Pure: +abstract class ReadOnlyMap[Key, Value]: def lookup(x: Key): Value | Null diff --git a/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala b/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala index 318a04e846fe..4826d02743a9 100644 --- a/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala +++ b/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala @@ -2,7 +2,7 @@ package dotty.tools.dotc.util /** A class for the readonly part of mutable sets. */ -abstract class ReadOnlySet[T] extends caps.Pure: +abstract class ReadOnlySet[T]: /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ def lookup(x: T): T | Null diff --git a/tests/pos-with-compiler-cc/dotc/util/ReusableInstance.scala b/tests/pos-with-compiler-cc/dotc/util/ReusableInstance.scala index 75addc916b78..4dd897dd082a 100644 --- a/tests/pos-with-compiler-cc/dotc/util/ReusableInstance.scala +++ b/tests/pos-with-compiler-cc/dotc/util/ReusableInstance.scala @@ -2,7 +2,6 @@ package dotty.tools.dotc.util import scala.collection.mutable.ArrayBuffer import scala.util.chaining._ -import language.experimental.pureFunctions /** A wrapper for a list of cached instances of a type `T`. * The wrapper is recursion-reentrant: several instances are kept, so @@ -15,7 +14,7 @@ import language.experimental.pureFunctions * * Ported from scala.reflect.internal.util.ReusableInstance */ -final class ReusableInstance[T <: AnyRef] private (make: -> T) { +final class ReusableInstance[T <: AnyRef] private (make: => T) { private[this] val cache = new ArrayBuffer[T](ReusableInstance.InitialSize).tap(_.addOne(make)) private[this] var taken = 0 @@ -30,5 +29,5 @@ final class ReusableInstance[T <: AnyRef] private (make: -> T) { object ReusableInstance { private inline val InitialSize = 4 - def apply[T <: AnyRef](make: -> T): ReusableInstance[T] = new ReusableInstance[T](make) + def apply[T <: AnyRef](make: => T): ReusableInstance[T] = new ReusableInstance[T](make) } diff --git a/tests/pos-with-compiler-cc/dotc/util/SimpleIdentityMap.scala b/tests/pos-with-compiler-cc/dotc/util/SimpleIdentityMap.scala index 2b4aa6eda48e..2f202bc05921 100644 --- a/tests/pos-with-compiler-cc/dotc/util/SimpleIdentityMap.scala +++ b/tests/pos-with-compiler-cc/dotc/util/SimpleIdentityMap.scala @@ -5,7 +5,7 @@ import collection.mutable.ListBuffer /** A simple linked map with `eq` as the key comparison, optimized for small maps. * It has linear complexity for `apply`, `updated`, and `remove`. */ -sealed abstract class SimpleIdentityMap[K <: AnyRef, +V <: AnyRef] extends (K => V | Null) { +abstract class SimpleIdentityMap[K <: AnyRef, +V <: AnyRef] extends (K => V | Null) { final def isEmpty: Boolean = this eq SimpleIdentityMap.myEmpty def size: Int def apply(k: K): V | Null diff --git a/tests/pos-with-compiler-cc/dotc/util/SimpleIdentitySet.scala b/tests/pos-with-compiler-cc/dotc/util/SimpleIdentitySet.scala index 32851fd823d5..dd766dc99c7e 100644 --- a/tests/pos-with-compiler-cc/dotc/util/SimpleIdentitySet.scala +++ b/tests/pos-with-compiler-cc/dotc/util/SimpleIdentitySet.scala @@ -7,7 +7,7 @@ import collection.mutable /** A simple linked set with `eq` as the comparison, optimized for small sets. * It has linear complexity for `contains`, `+`, and `-`. */ -sealed abstract class SimpleIdentitySet[+Elem <: AnyRef] { +abstract class SimpleIdentitySet[+Elem <: AnyRef] { def size: Int def + [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[E] def - [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[Elem] diff --git a/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala b/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala index 8a5a4828adfd..42d07869f74e 100644 --- a/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala +++ b/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala @@ -21,7 +21,6 @@ import java.nio.file.{FileSystemException, NoSuchFileException} import java.util.Optional import java.util.concurrent.atomic.AtomicInteger import java.util.regex.Pattern -import language.experimental.pureFunctions object ScriptSourceFile { @sharable private val headerPattern = Pattern.compile("""^(::)?!#.*(\r|\n|\r\n)""", Pattern.MULTILINE) @@ -60,7 +59,7 @@ object ScriptSourceFile { } } -class SourceFile(val file: AbstractFile, computeContent: -> Array[Char]) extends interfaces.SourceFile, caps.Pure { +class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends interfaces.SourceFile { import SourceFile._ private var myContent: Array[Char] | Null = null @@ -279,7 +278,7 @@ object SourceFile { else SourceFile(file, chars) - def apply(file: AbstractFile | Null, computeContent: -> Array[Char]): SourceFile = new SourceFile(file, computeContent) + def apply(file: AbstractFile | Null, computeContent: => Array[Char]): SourceFile = new SourceFile(file, computeContent) } @sharable object NoSource extends SourceFile(NoAbstractFile, Array[Char]()) { diff --git a/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala b/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala index ef4350741036..29f9a34d2292 100644 --- a/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala +++ b/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala @@ -12,7 +12,7 @@ import scala.annotation.internal.sharable /** A source position is comprised of a span and a source file */ case class SourcePosition(source: SourceFile, span: Span, outer: SourcePosition = NoSourcePosition) -extends SrcPos, interfaces.SourcePosition, Showable, caps.Pure { +extends SrcPos, interfaces.SourcePosition, Showable { def sourcePos(using Context) = this diff --git a/tests/pos-with-compiler-cc/dotc/util/common.scala b/tests/pos-with-compiler-cc/dotc/util/common.scala index 70e0e82a7d50..85ce9a29f2df 100644 --- a/tests/pos-with-compiler-cc/dotc/util/common.scala +++ b/tests/pos-with-compiler-cc/dotc/util/common.scala @@ -2,13 +2,12 @@ package dotty.tools.dotc package util import core.Types.WildcardType -import language.experimental.pureFunctions /** Common values hoisted out for performance */ object common { - val alwaysTrue: Any -> Boolean = Function.const(true) - val alwaysFalse: Any -> Boolean = Function.const(false) - val alwaysZero: Any -> Int = Function.const(0) - val alwaysWildcardType: Any -> WildcardType.type = Function.const(WildcardType) + val alwaysTrue: Any => Boolean = Function.const(true) + val alwaysFalse: Any => Boolean = Function.const(false) + val alwaysZero: Any => Int = Function.const(0) + val alwaysWildcardType: Any => WildcardType.type = Function.const(WildcardType) } From 3002842993c92041dd6f603b59dab712154918cf Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 7 Nov 2022 10:23:04 +0100 Subject: [PATCH 2/2] Changes to compiler codebase so that it passes capture checking # Conflicts: # tests/pos-with-compiler-cc/dotc/ast/Trees.scala --- tests/pos-with-compiler-cc/dotc/Run.scala | 3 +- .../dotc/ast/Desugar.scala | 7 +- .../dotc/ast/Positioned.scala | 2 +- .../dotc/ast/TreeTypeMap.scala | 9 +- .../pos-with-compiler-cc/dotc/ast/Trees.scala | 8 +- tests/pos-with-compiler-cc/dotc/ast/tpd.scala | 3 +- .../pos-with-compiler-cc/dotc/ast/untpd.scala | 9 +- .../dotc/cc/CaptureSet.scala | 15 +- .../dotc/cc/CheckCaptures.scala | 10 +- .../dotc/classpath/DirectoryClassPath.scala | 5 +- .../dotc/classpath/FileUtils.scala | 3 +- .../classpath/VirtualDirectoryClassPath.scala | 3 +- .../dotc/config/Config.scala | 8 - .../dotc/config/Feature.scala | 3 +- .../dotc/config/ScalaSettings.scala | 4 +- .../dotc/core/Annotations.scala | 36 +- .../dotc/core/Constraint.scala | 26 +- .../dotc/core/Contexts.scala | 9 +- .../dotc/core/Decorators.scala | 3 +- .../dotc/core/Definitions.scala | 13 +- .../dotc/core/Denotations.scala | 12 +- .../dotc/core/NameKinds.scala | 4 +- .../dotc/core/Names.scala | 2 +- .../dotc/core/OrderingConstraint.scala | 477 +++--------------- .../dotc/core/Phases.scala | 2 +- .../dotc/core/Scopes.scala | 2 +- .../dotc/core/SymDenotations.scala | 15 +- .../dotc/core/SymbolLoaders.scala | 6 +- .../dotc/core/TypeComparer.scala | 11 +- .../dotc/core/TypeErrors.scala | 5 +- .../dotc/core/TypeOps.scala | 25 +- .../dotc/core/Types.scala | 73 ++- .../dotc/core/classfile/ClassfileParser.scala | 5 +- .../dotc/core/tasty/TastyUnpickler.scala | 3 +- .../dotc/core/tasty/TreeUnpickler.scala | 11 +- .../dotc/decompiler/IDEDecompilerDriver.scala | 3 +- .../dotc/inlines/Inliner.scala | 11 +- .../dotc/inlines/Inlines.scala | 5 +- .../dotc/inlines/PrepareInlineable.scala | 3 +- .../dotc/parsing/Parsers.scala | 9 +- .../dotc/parsing/Scanners.scala | 2 +- .../dotc/printing/Highlighting.scala | 2 +- .../dotc/printing/PlainPrinter.scala | 5 +- .../dotc/printing/Printer.scala | 2 +- .../dotc/profile/AsyncHelper.scala | 2 +- tests/pos-with-compiler-cc/dotc/report.scala | 36 +- .../dotc/reporting/Diagnostic.scala | 5 +- .../dotc/reporting/Message.scala | 11 +- .../dotc/reporting/Reporter.scala | 13 +- .../dotc/reporting/messages.scala | 11 +- .../dotc/sbt/ExtractAPI.scala | 3 +- .../dotc/sbt/ExtractDependencies.scala | 6 +- .../dotc/sbt/ThunkHolder.scala | 3 +- .../dotc/semanticdb/ExtractSemanticDB.scala | 4 +- .../dotc/semanticdb/SyntheticsExtractor.scala | 2 +- .../internal/SemanticdbTypeMapper.scala | 5 +- .../dotc/transform/CountOuterAccesses.scala | 2 +- .../dotc/transform/ForwardDepChecks.scala | 2 +- .../dotc/transform/HoistSuperArgs.scala | 4 +- .../dotc/transform/LazyVals.scala | 308 ++--------- .../dotc/transform/MacroTransform.scala | 2 +- .../dotc/transform/MegaPhase.scala | 2 +- .../dotc/transform/Memoize.scala | 2 +- .../dotc/transform/Mixin.scala | 5 +- .../dotc/transform/MoveStatics.scala | 3 +- .../dotc/transform/PatternMatcher.scala | 9 +- .../dotc/transform/PostTyper.scala | 1 - .../dotc/transform/Splicer.scala | 8 +- .../dotc/transform/Splicing.scala | 3 +- .../dotc/transform/TryCatchPatterns.scala | 2 +- .../dotc/transform/TypeTestsCasts.scala | 3 +- .../dotc/transform/init/Semantic.scala | 4 +- .../dotc/transform/patmat/Space.scala | 1 - .../transform/sjs/ExplicitJSClasses.scala | 7 +- .../dotc/transform/sjs/PrepJSInterop.scala | 2 - .../dotc/typer/Applications.scala | 14 +- .../dotc/typer/Checking.scala | 15 +- .../dotc/typer/ErrorReporting.scala | 11 +- .../dotc/typer/Implicits.scala | 5 +- .../dotc/typer/ImportInfo.scala | 5 +- .../dotc/typer/Inferencing.scala | 152 +++--- .../dotc/typer/ProtoTypes.scala | 24 +- .../dotc/typer/RefChecks.scala | 6 +- .../dotc/typer/Synthesizer.scala | 7 +- .../dotc/typer/Typer.scala | 13 +- .../dotc/util/ReadOnlyMap.scala | 2 +- .../dotc/util/ReadOnlySet.scala | 2 +- .../dotc/util/ReusableInstance.scala | 5 +- .../dotc/util/SimpleIdentityMap.scala | 2 +- .../dotc/util/SimpleIdentitySet.scala | 2 +- .../dotc/util/SourceFile.scala | 5 +- .../dotc/util/SourcePosition.scala | 2 +- .../dotc/util/common.scala | 9 +- 93 files changed, 534 insertions(+), 1092 deletions(-) diff --git a/tests/pos-with-compiler-cc/dotc/Run.scala b/tests/pos-with-compiler-cc/dotc/Run.scala index f7a08d1640ee..705664177507 100644 --- a/tests/pos-with-compiler-cc/dotc/Run.scala +++ b/tests/pos-with-compiler-cc/dotc/Run.scala @@ -31,6 +31,7 @@ import java.nio.charset.StandardCharsets import scala.collection.mutable import scala.util.control.NonFatal import scala.io.Codec +import caps.unsafe.unsafeUnbox /** A compiler run. Exports various methods to compile source files */ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo { @@ -270,7 +271,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint Rewrites.writeBack() suppressions.runFinished(hasErrors = ctx.reporter.hasErrors) while (finalizeActions.nonEmpty) { - val action = finalizeActions.remove(0) + val action = finalizeActions.remove(0).unsafeUnbox action() } compiling = false diff --git a/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala index 1e1db19bcf25..ba2c8f5f43e6 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala @@ -673,7 +673,7 @@ object desugar { else (Nil, Nil) } - var parents1 = parents + var parents1: List[untpd.Tree] = parents // !cc! need explicit type to make capture checking pass if (isEnumCase && parents.isEmpty) parents1 = enumClassTypeRef :: Nil if (isNonEnumCase) @@ -1779,7 +1779,10 @@ object desugar { val elems = segments flatMap { case ts: Thicket => ts.trees.tail case t => Nil - } map { + } map { (t: Tree) => t match + // !cc! explicitly typed parameter (t: Tree) is needed since otherwise + // we get an error similar to #16268. (The explicit type constrains the type of `segments` + // which is otherwise List[{*} tree]) case Block(Nil, EmptyTree) => Literal(Constant(())) // for s"... ${} ..." case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala case t => t diff --git a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala index d14addb8c9c7..fd30d441a6ee 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala @@ -15,7 +15,7 @@ import annotation.internal.sharable /** A base class for things that have positions (currently: modifiers and trees) */ -abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable { +abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable, caps.Pure { import Positioned.{ids, nextId, debugId} private var mySpan: Span = _ diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala index 71998aff9304..5139a46d6352 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala @@ -7,6 +7,7 @@ import Types._, Contexts._, Flags._ import Symbols._, Annotations._, Trees._, Symbols._, Constants.Constant import Decorators._ import dotty.tools.dotc.transform.SymUtils._ +import language.experimental.pureFunctions /** A map that applies three functions and a substitution together to a tree and * makes sure they are coordinated so that the result is well-typed. The functions are @@ -32,8 +33,8 @@ import dotty.tools.dotc.transform.SymUtils._ * set, we would get a data race assertion error. */ class TreeTypeMap( - val typeMap: Type => Type = IdentityTypeMap, - val treeMap: tpd.Tree => tpd.Tree = identity _, + val typeMap: Type -> Type = IdentityTypeMap, + val treeMap: tpd.Tree -> tpd.Tree = identity[tpd.Tree](_), // !cc! need explicit instantiation of default argument val oldOwners: List[Symbol] = Nil, val newOwners: List[Symbol] = Nil, val substFrom: List[Symbol] = Nil, @@ -42,8 +43,8 @@ class TreeTypeMap( import tpd._ def copy( - typeMap: Type => Type, - treeMap: tpd.Tree => tpd.Tree, + typeMap: Type -> Type, + treeMap: tpd.Tree -> tpd.Tree, oldOwners: List[Symbol], newOwners: List[Symbol], substFrom: List[Symbol], diff --git a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala index 253477c5382c..7a6a8df45db6 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala @@ -17,6 +17,8 @@ import annotation.unchecked.uncheckedVariance import annotation.constructorOnly import compiletime.uninitialized import Decorators._ +import annotation.retains +import language.experimental.pureFunctions object Trees { @@ -47,7 +49,7 @@ object Trees { * nodes. */ abstract class Tree[+T <: Untyped](implicit @constructorOnly src: SourceFile) - extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable { + extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable, caps.Pure { if (Stats.enabled) ntrees += 1 @@ -431,7 +433,7 @@ object Trees { def isBackquoted: Boolean = hasAttachment(Backquoted) } - class SearchFailureIdent[+T <: Untyped] private[ast] (name: Name, expl: => String)(implicit @constructorOnly src: SourceFile) + class SearchFailureIdent[+T <: Untyped] private[ast] (name: Name, expl: -> String)(implicit @constructorOnly src: SourceFile) extends Ident[T](name) { def explanation = expl override def toString: String = s"SearchFailureIdent($explanation)" @@ -1518,7 +1520,7 @@ object Trees { } } - abstract class TreeAccumulator[X] { self => + abstract class TreeAccumulator[X] { self: TreeAccumulator[X] @retains(caps.*) => // Ties the knot of the traversal: call `foldOver(x, tree))` to dive in the `tree` node. def apply(x: X, tree: Tree)(using Context): X diff --git a/tests/pos-with-compiler-cc/dotc/ast/tpd.scala b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala index 52325e36037d..1f43daec4d37 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/tpd.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala @@ -18,6 +18,7 @@ import typer.ConstFold import scala.annotation.tailrec import scala.collection.mutable.ListBuffer +import language.experimental.pureFunctions /** Some creators for typed trees */ object tpd extends Trees.Instance[Type] with TypedTreeInfo { @@ -1454,7 +1455,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * @return The symbols imported. */ def importedSymbols(imp: Import, - selectorPredicate: untpd.ImportSelector => Boolean = util.common.alwaysTrue) + selectorPredicate: untpd.ImportSelector -> Boolean = util.common.alwaysTrue) (using Context): List[Symbol] = imp.selectors.find(selectorPredicate) match case Some(sel) => importedSymbols(imp.expr, sel.name) diff --git a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala index f72cafd4205d..79145551382f 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala @@ -11,6 +11,8 @@ import util.Spans.Span import annotation.constructorOnly import annotation.internal.sharable import Decorators._ +import annotation.retains +import language.experimental.pureFunctions object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { @@ -149,7 +151,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class CapturingTypeTree(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ - case class DependentTypeTree(tp: List[Symbol] => Type)(implicit @constructorOnly src: SourceFile) extends Tree + case class DependentTypeTree(tp: List[Symbol] -> Type)(implicit @constructorOnly src: SourceFile) extends Tree @sharable object EmptyTypeIdent extends Ident(tpnme.EMPTY)(NoSource) with WithoutTypeOrPos[Untyped] { override def isEmpty: Boolean = true @@ -369,7 +371,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { // ------ Creation methods for untyped only ----------------- def Ident(name: Name)(implicit src: SourceFile): Ident = new Ident(name) - def SearchFailureIdent(name: Name, explanation: => String)(implicit src: SourceFile): SearchFailureIdent = new SearchFailureIdent(name, explanation) + def SearchFailureIdent(name: Name, explanation: -> String)(implicit src: SourceFile): SearchFailureIdent = new SearchFailureIdent(name, explanation) def Select(qualifier: Tree, name: Name)(implicit src: SourceFile): Select = new Select(qualifier, name) def SelectWithSig(qualifier: Tree, name: Name, sig: Signature)(implicit src: SourceFile): Select = new SelectWithSig(qualifier, name, sig) def This(qual: Ident)(implicit src: SourceFile): This = new This(qual) @@ -731,7 +733,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { } } - abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { self => + abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { + self: UntypedTreeAccumulator[X] @retains(caps.*) => override def foldMoreCases(x: X, tree: Tree)(using Context): X = tree match { case ModuleDef(name, impl) => this(x, impl) diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala index 6bf6d7770d8b..30d25979f87e 100644 --- a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala @@ -16,6 +16,7 @@ import util.{SimpleIdentitySet, Property} import util.common.alwaysTrue import scala.collection.mutable import config.Config.ccAllowUnsoundMaps +import language.experimental.pureFunctions /** A class for capture sets. Capture sets can be constants or variables. * Capture sets support inclusion constraints <:< where <:< is subcapturing. @@ -37,7 +38,7 @@ import config.Config.ccAllowUnsoundMaps * if the mapped function is either a bijection or if it is idempotent * on capture references (c.f. doc comment on `map` below). */ -sealed abstract class CaptureSet extends Showable: +sealed abstract class CaptureSet extends Showable, caps.Pure: import CaptureSet.* /** The elements of this capture set. For capture variables, @@ -222,7 +223,7 @@ sealed abstract class CaptureSet extends Showable: /** The largest subset (via <:<) of this capture set that only contains elements * for which `p` is true. */ - def filter(p: CaptureRef => Boolean)(using Context): CaptureSet = + def filter(p: CaptureRef -> Boolean)(using Context): CaptureSet = if this.isConst then val elems1 = elems.filter(p) if elems1 == elems then this @@ -372,8 +373,10 @@ object CaptureSet: def isConst = isSolved def isAlwaysEmpty = false - /** A handler to be invoked if the root reference `*` is added to this set */ - var rootAddedHandler: () => Context ?=> Unit = () => () + /** A handler to be invoked if the root reference `*` is added to this set + * The handler is pure in the sense that it will only output diagnostics. + */ + var rootAddedHandler: () -> Context ?-> Unit = () => () var description: String = "" @@ -421,7 +424,7 @@ object CaptureSet: else CompareResult.fail(this) - override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = + override def disallowRootCapability(handler: () -> Context ?-> Unit)(using Context): this.type = rootAddedHandler = handler super.disallowRootCapability(handler) @@ -613,7 +616,7 @@ object CaptureSet: /** A variable with elements given at any time as { x <- source.elems | p(x) } */ class Filtered private[CaptureSet] - (val source: Var, p: CaptureRef => Boolean)(using @constructorOnly ctx: Context) + (val source: Var, p: CaptureRef -> Boolean)(using @constructorOnly ctx: Context) extends DerivedVar(source.elems.filter(p)): override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = diff --git a/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala index 899914e872c8..9e3a3e348824 100644 --- a/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala +++ b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala @@ -21,6 +21,7 @@ import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} import StdNames.nme import NameKinds.DefaultGetterName import reporting.trace +import language.experimental.pureFunctions /** The capture checker */ object CheckCaptures: @@ -721,20 +722,21 @@ class CheckCaptures extends Recheck, SymTransformer: * the innermost capturing type. The outer capture annotations can be * reconstructed with the returned function. */ - def destructCapturingType(tp: Type, reconstruct: Type => Type = x => x): ((Type, CaptureSet, Boolean), Type => Type) = + def destructCapturingType(tp: Type, reconstruct: Type -> Type = (x: Type) => x) // !cc! need monomorphic default argument + : (Type, CaptureSet, Boolean, Type -> Type) = tp.dealias match case tp @ CapturingType(parent, cs) => if parent.dealias.isCapturingType then destructCapturingType(parent, res => reconstruct(tp.derivedCapturingType(res, cs))) else - ((parent, cs, tp.isBoxed), reconstruct) + (parent, cs, tp.isBoxed, reconstruct) case actual => - ((actual, CaptureSet(), false), reconstruct) + (actual, CaptureSet(), false, reconstruct) def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { if expected.isInstanceOf[WildcardType] then actual else - val ((parent, cs, actualIsBoxed), recon) = destructCapturingType(actual) + val (parent, cs, actualIsBoxed, recon: (Type -> Type)) = destructCapturingType(actual) val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing val insertBox = needsAdaptation && covariant != actualIsBoxed diff --git a/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala index 7f20d7c7d9ea..a5678970411b 100644 --- a/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala +++ b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala @@ -17,6 +17,7 @@ import PlainFile.toPlainFile import scala.jdk.CollectionConverters._ import scala.collection.immutable.ArraySeq import scala.util.control.NonFatal +import language.experimental.pureFunctions /** * A trait allowing to look for classpath entries in directories. It provides common logic for @@ -32,7 +33,7 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientCla protected def emptyFiles: Array[F] // avoids reifying ClassTag[F] protected def getSubDir(dirName: String): Option[F] - protected def listChildren(dir: F, filter: Option[F => Boolean] = None): Array[F] + protected def listChildren(dir: F, filter: Option[F -> Boolean] = (None: Option[F -> Boolean])): Array[F] // !cc! need explicit typing of default argument protected def getName(f: F): String protected def toAbstractFile(f: F): AbstractFile protected def isPackage(f: F): Boolean @@ -90,7 +91,7 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo if (packageDir.exists && packageDir.isDirectory) Some(packageDir) else None } - protected def listChildren(dir: JFile, filter: Option[JFile => Boolean]): Array[JFile] = { + protected def listChildren(dir: JFile, filter: Option[JFile -> Boolean]): Array[JFile] = { val listing = filter match { case Some(f) => dir.listFiles(mkFileFilter(f)) case None => dir.listFiles() diff --git a/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala index d6fa6fb78d07..0f5ac16b40bf 100644 --- a/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala +++ b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala @@ -9,6 +9,7 @@ import scala.language.unsafeNulls import java.io.{File => JFile, FileFilter} import java.net.URL import dotty.tools.io.AbstractFile +import language.experimental.pureFunctions /** * Common methods related to Java files and abstract files used in the context of classpath @@ -78,7 +79,7 @@ object FileUtils { def mayBeValidPackage(dirName: String): Boolean = (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.') - def mkFileFilter(f: JFile => Boolean): FileFilter = new FileFilter { + def mkFileFilter(f: JFile -> Boolean): FileFilter = new FileFilter { def accept(pathname: JFile): Boolean = f(pathname) } } diff --git a/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala index 0cb0ba59c52e..ac80d543b539 100644 --- a/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala @@ -8,6 +8,7 @@ import FileUtils._ import java.net.URL import dotty.tools.io.ClassPath +import language.experimental.pureFunctions case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { type F = AbstractFile @@ -28,7 +29,7 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi protected def emptyFiles: Array[AbstractFile] = Array.empty protected def getSubDir(packageDirName: String): Option[AbstractFile] = Option(lookupPath(dir)(packageDirName.split(java.io.File.separator).toIndexedSeq, directory = true)) - protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile => Boolean] = None): Array[F] = filter match { + protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile -> Boolean]): Array[F] = filter match { case Some(f) => dir.iterator.filter(f).toArray case _ => dir.toArray } diff --git a/tests/pos-with-compiler-cc/dotc/config/Config.scala b/tests/pos-with-compiler-cc/dotc/config/Config.scala index cbd50429492e..17e3ec352e7c 100644 --- a/tests/pos-with-compiler-cc/dotc/config/Config.scala +++ b/tests/pos-with-compiler-cc/dotc/config/Config.scala @@ -22,11 +22,6 @@ object Config { */ inline val checkConstraintsNonCyclic = false - /** Check that reverse dependencies in constraints are correct and complete. - * Can also be enabled using -Ycheck-constraint-deps. - */ - inline val checkConstraintDeps = false - /** Check that each constraint resulting from a subtype test * is satisfiable. Also check that a type variable instantiation * satisfies its constraints. @@ -189,9 +184,6 @@ object Config { /** If set, prints a trace of all symbol completions */ inline val showCompletions = false - /** If set, show variable/variable reverse dependencies when printing constraints. */ - inline val showConstraintDeps = true - /** If set, method results that are context functions are flattened by adding * the parameters of the context function results to the methods themselves. * This is an optimization that reduces closure allocations. diff --git a/tests/pos-with-compiler-cc/dotc/config/Feature.scala b/tests/pos-with-compiler-cc/dotc/config/Feature.scala index e7117f542384..0b3a344900df 100644 --- a/tests/pos-with-compiler-cc/dotc/config/Feature.scala +++ b/tests/pos-with-compiler-cc/dotc/config/Feature.scala @@ -10,6 +10,7 @@ import util.{SrcPos, NoSourcePosition} import SourceVersion._ import reporting.Message import NameKinds.QualifiedName +import language.experimental.pureFunctions object Feature: @@ -127,7 +128,7 @@ object Feature: else false - def checkExperimentalFeature(which: String, srcPos: SrcPos, note: => String = "")(using Context) = + def checkExperimentalFeature(which: String, srcPos: SrcPos, note: -> String = "")(using Context) = if !isExperimentalEnabled then report.error(i"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala index f7743dddda4e..f27a10c9af3d 100644 --- a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala @@ -17,7 +17,7 @@ class ScalaSettings extends SettingGroup with AllScalaSettings object ScalaSettings: // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` private val minTargetVersion = 8 - private val maxTargetVersion = 20 + private val maxTargetVersion = 19 def supportedTargetVersions: List[String] = (minTargetVersion to maxTargetVersion).toList.map(_.toString) @@ -308,7 +308,6 @@ private sealed trait YSettings: val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") - val YcheckConstraintDeps: Setting[Boolean] = BooleanSetting("-Ycheck-constraint-deps", "Check dependency tracking in constraints (used for testing the algorithm).") val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty") @@ -330,7 +329,6 @@ private sealed trait YSettings: val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only)") val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references") val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with captureChecking language import, suppress type abbreviations") - val YlightweightLazyVals: Setting[Boolean] = BooleanSetting("-Ylightweight-lazy-vals", "Use experimental lightweight implementation of lazy vals") /** Area-specific debug output */ val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") diff --git a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala index aa8ead280bbf..d33b1d39942e 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala @@ -8,6 +8,7 @@ import util.Spans.Span import printing.{Showable, Printer} import printing.Texts.Text import annotation.internal.sharable +import language.experimental.pureFunctions object Annotations { @@ -15,7 +16,8 @@ object Annotations { if (tree.symbol.isConstructor) tree.symbol.owner else tree.tpe.typeSymbol - abstract class Annotation extends Showable { + abstract class Annotation extends Showable, caps.Pure { + def tree(using Context): Tree def symbol(using Context): Symbol = annotClass(tree) @@ -96,11 +98,11 @@ object Annotations { def tree(using Context): Tree = t abstract class LazyAnnotation extends Annotation { - protected var mySym: Symbol | (Context ?=> Symbol) | Null + protected var mySym: Symbol | (Context ?-> Symbol) | Null override def symbol(using parentCtx: Context): Symbol = assert(mySym != null) mySym match { - case symFn: (Context ?=> Symbol) @unchecked => + case symFn: (Context ?-> Symbol) @unchecked => mySym = null mySym = atPhaseBeforeTransforms(symFn) // We should always produce the same annotation tree, no matter when the @@ -114,11 +116,11 @@ object Annotations { } mySym.asInstanceOf[Symbol] - protected var myTree: Tree | (Context ?=> Tree) | Null + protected var myTree: Tree | (Context ?-> Tree) | Null def tree(using Context): Tree = assert(myTree != null) myTree match { - case treeFn: (Context ?=> Tree) @unchecked => + case treeFn: (Context ?-> Tree) @unchecked => myTree = null myTree = atPhaseBeforeTransforms(treeFn) case _ => @@ -129,10 +131,10 @@ object Annotations { override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] } - class DeferredSymAndTree(symFn: Context ?=> Symbol, treeFn: Context ?=> Tree) + class DeferredSymAndTree(symFn: Context ?-> Symbol, treeFn: Context ?-> Tree) extends LazyAnnotation: - protected var mySym: Symbol | (Context ?=> Symbol) | Null = ctx ?=> symFn(using ctx) - protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> treeFn(using ctx) + protected var mySym: Symbol | (Context ?-> Symbol) | Null = ctx ?=> symFn(using ctx) + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) /** An annotation indicating the body of a right-hand side, * typically of an inline method. Treated specially in @@ -153,11 +155,11 @@ object Annotations { abstract class LazyBodyAnnotation extends BodyAnnotation { // Copy-pasted from LazyAnnotation to avoid having to turn it into a trait - protected var myTree: Tree | (Context ?=> Tree) | Null + protected var myTree: Tree | (Context ?-> Tree) | Null def tree(using Context): Tree = assert(myTree != null) myTree match { - case treeFn: (Context ?=> Tree) @unchecked => + case treeFn: (Context ?-> Tree) @unchecked => myTree = null myTree = atPhaseBeforeTransforms(treeFn) case _ => @@ -169,9 +171,9 @@ object Annotations { } object LazyBodyAnnotation { - def apply(bodyFn: Context ?=> Tree): LazyBodyAnnotation = + def apply(bodyFn: Context ?-> Tree): LazyBodyAnnotation = new LazyBodyAnnotation: - protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> bodyFn(using ctx) + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> bodyFn(using ctx) } object Annotation { @@ -200,21 +202,21 @@ object Annotations { apply(New(atp, args)) /** Create an annotation where the tree is computed lazily. */ - def deferred(sym: Symbol)(treeFn: Context ?=> Tree): Annotation = + def deferred(sym: Symbol)(treeFn: Context ?-> Tree): Annotation = new LazyAnnotation { - protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> treeFn(using ctx) - protected var mySym: Symbol | (Context ?=> Symbol) | Null = sym + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) + protected var mySym: Symbol | (Context ?-> Symbol) | Null = sym } /** Create an annotation where the symbol and the tree are computed lazily. */ - def deferredSymAndTree(symFn: Context ?=> Symbol)(treeFn: Context ?=> Tree): Annotation = + def deferredSymAndTree(symFn: Context ?-> Symbol)(treeFn: Context ?-> Tree): Annotation = DeferredSymAndTree(symFn, treeFn) /** Extractor for child annotations */ object Child { /** A deferred annotation to the result of a given child computation */ - def later(delayedSym: Context ?=> Symbol, span: Span)(using Context): Annotation = { + def later(delayedSym: Context ?-> Symbol, span: Span)(using Context): Annotation = { def makeChildLater(using Context) = { val sym = delayedSym New(defn.ChildAnnot.typeRef.appliedTo(sym.owner.thisType.select(sym.name, sym)), Nil) diff --git a/tests/pos-with-compiler-cc/dotc/core/Constraint.scala b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala index fb87aed77c41..07b6e71cdcc9 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Constraint.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala @@ -4,7 +4,6 @@ package core import Types._, Contexts._ import printing.Showable -import util.{SimpleIdentitySet, SimpleIdentityMap} /** Constraint over undetermined type parameters. Constraints are built * over values of the following types: @@ -129,7 +128,7 @@ abstract class Constraint extends Showable { /** Is `tv` marked as hard in the constraint? */ def isHard(tv: TypeVar): Boolean - + /** The same as this constraint, but with `tv` marked as hard. */ def withHard(tv: TypeVar)(using Context): This @@ -166,32 +165,15 @@ abstract class Constraint extends Showable { */ def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean + /** Check that no constrained parameter contains itself as a bound */ + def checkNonCyclic()(using Context): this.type + /** Does `param` occur at the toplevel in `tp` ? * Toplevel means: the type itself or a factor in some * combination of `&` or `|` types. */ def occursAtToplevel(param: TypeParamRef, tp: Type)(using Context): Boolean - /** A string that shows the reverse dependencies maintained by this constraint - * (coDeps and contraDeps for OrderingConstraints). - */ - def depsToString(using Context): String - - /** Does the constraint restricted to variables outside `except` depend on `tv` - * in the given direction `co`? - * @param `co` If true, test whether the constraint would change if the variable is made larger - * otherwise, test whether the constraint would change if the variable is made smaller. - */ - def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean - - /** Depending on Config settngs: - * - Under `checkConstraintsNonCyclic`, check that no constrained - * parameter contains itself as a bound. - * - Under `checkConstraintDeps`, check hat reverse dependencies in - * constraints are correct and complete. - */ - def checkWellFormed()(using Context): this.type - /** Check that constraint only refers to TypeParamRefs bound by itself */ def checkClosed()(using Context): Unit diff --git a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala index a6c1a24ebf96..4f8157b5ea3c 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala @@ -39,12 +39,13 @@ import xsbti.AnalysisCallback import plugins._ import java.util.concurrent.atomic.AtomicInteger import java.nio.file.InvalidPathException +import language.experimental.pureFunctions object Contexts { private val (compilerCallbackLoc, store1) = Store.empty.newLocation[CompilerCallback]() private val (sbtCallbackLoc, store2) = store1.newLocation[AnalysisCallback]() - private val (printerFnLoc, store3) = store2.newLocation[Context => Printer](new RefinedPrinter(_)) + private val (printerFnLoc, store3) = store2.newLocation[Context -> Printer](new RefinedPrinter(_)) private val (settingsStateLoc, store4) = store3.newLocation[SettingsState]() private val (compilationUnitLoc, store5) = store4.newLocation[CompilationUnit]() private val (runLoc, store6) = store5.newLocation[Run | Null]() @@ -211,7 +212,7 @@ object Contexts { def sbtCallback: AnalysisCallback = store(sbtCallbackLoc) /** The current plain printer */ - def printerFn: Context => Printer = store(printerFnLoc) + def printerFn: Context -> Printer = store(printerFnLoc) /** A function creating a printer */ def printer: Printer = @@ -275,7 +276,7 @@ object Contexts { def nestingLevel: Int = effectiveScope.nestingLevel /** Sourcefile corresponding to given abstract file, memoized */ - def getSource(file: AbstractFile, codec: => Codec = Codec(settings.encoding.value)) = { + def getSource(file: AbstractFile, codec: -> Codec = Codec(settings.encoding.value)) = { util.Stats.record("Context.getSource") base.sources.getOrElseUpdate(file, SourceFile(file, codec)) } @@ -636,7 +637,7 @@ object Contexts { def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) def setSbtCallback(callback: AnalysisCallback): this.type = updateStore(sbtCallbackLoc, callback) - def setPrinterFn(printer: Context => Printer): this.type = updateStore(printerFnLoc, printer) + def setPrinterFn(printer: Context -> Printer): this.type = updateStore(printerFnLoc, printer) def setSettings(settingsState: SettingsState): this.type = updateStore(settingsStateLoc, settingsState) def setRun(run: Run | Null): this.type = updateStore(runLoc, run) def setProfiler(profiler: Profiler): this.type = updateStore(profilerLoc, profiler) diff --git a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala index 54faf9a41177..444b1b110e12 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala @@ -10,6 +10,7 @@ import Contexts._, Names._, Phases._, Symbols._ import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ import transform.MegaPhase import reporting.{Message, NoExplanation} +import language.experimental.pureFunctions /** This object provides useful implicit decorators for types defined elsewhere */ object Decorators { @@ -58,7 +59,7 @@ object Decorators { padding + s.replace("\n", "\n" + padding) end extension - extension (str: => String) + extension (str: -> String) def toMessage: Message = reporting.NoExplanation(str) /** Implements a findSymbol method on iterators of Symbols that diff --git a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala index b43857b7d28c..9c1ccf531ea4 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala @@ -19,6 +19,7 @@ import Symbols.requiredModuleRef import cc.{CapturingType, CaptureSet, EventuallyCapturingType} import scala.annotation.tailrec +import language.experimental.pureFunctions object Definitions { @@ -70,7 +71,7 @@ class Definitions { // NOTE: Ideally we would write `parentConstrs: => Type*` but SIP-24 is only // implemented in Dotty and not in Scala 2. // See . - private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: => Seq[Type]): ClassSymbol = { + private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: -> Seq[Type]): ClassSymbol = { val completer = new LazyType { def complete(denot: SymDenotation)(using Context): Unit = { val cls = denot.asClass.classSymbol @@ -182,7 +183,7 @@ class Definitions { tl => op(tl.paramRefs(0), tl.paramRefs(1)))) private def enterPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int, - resultTypeFn: PolyType => Type, + resultTypeFn: PolyType -> Type, flags: FlagSet = EmptyFlags, bounds: TypeBounds = TypeBounds.empty, useCompleter: Boolean = false) = { @@ -199,7 +200,7 @@ class Definitions { enterMethod(cls, name, info, flags) } - private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) = + private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType -> Type, flags: FlagSet) = enterPolyMethod(cls, name, 1, resultTypeFn, flags) private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef | Null] = { @@ -2033,12 +2034,6 @@ class Definitions { addSyntheticSymbolsComments } - /** Definitions used in Lazy Vals implementation */ - val LazyValsModuleName = "scala.runtime.LazyVals" - @tu lazy val LazyValsModule = requiredModule(LazyValsModuleName) - @tu lazy val LazyValsWaitingState = requiredClass(s"$LazyValsModuleName.Waiting") - @tu lazy val LazyValsControlState = requiredClass(s"$LazyValsModuleName.LazyValControlState") - def addSyntheticSymbolsComments(using Context): Unit = def add(sym: Symbol, doc: String) = ctx.docCtx.foreach(_.addDocstring(sym, Some(Comment(NoSpan, doc)))) diff --git a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala index f267e6c85e03..5d99118e56af 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala @@ -23,6 +23,7 @@ import config.Printers.overload import util.common._ import typer.ProtoTypes.NoViewsAllowed import collection.mutable.ListBuffer +import language.experimental.pureFunctions /** Denotations represent the meaning of symbols and named types. * The following diagram shows how the principal types of denotations @@ -75,7 +76,7 @@ object Denotations { /** A PreDenotation represents a group of single denotations or a single multi-denotation * It is used as an optimization to avoid forming MultiDenotations too eagerly. */ - abstract class PreDenotation { + abstract class PreDenotation extends caps.Pure { /** A denotation in the group exists */ def exists: Boolean @@ -1326,7 +1327,10 @@ object Denotations { } else owner } - def recur(path: Name, wrap: TermName => Name = identity): Denotation = path match { + def recur( + path: Name, + wrap: TermName -> Name = identity[Name] // !cc! default argument needs to be instantiated, error if [Name] is dropped + ): Denotation = path match { case path: TypeName => recur(path.toTermName, n => n.toTypeName) case ModuleClassName(underlying) => @@ -1336,7 +1340,7 @@ object Denotations { case qn @ AnyQualifiedName(prefix, _) => recur(prefix, n => wrap(qn.info.mkString(n).toTermName)) case path: SimpleName => - def recurSimple(len: Int, wrap: TermName => Name): Denotation = { + def recurSimple(len: Int, wrap: TermName -> Name): Denotation = { val point = path.lastIndexOf('.', len - 1) val selector = wrap(path.slice(point + 1, len).asTermName) val prefix = @@ -1364,7 +1368,7 @@ object Denotations { NoSymbol /** An exception for accessing symbols that are no longer valid in current run */ - class StaleSymbol(msg: => String) extends Exception { + class StaleSymbol(msg: -> String) extends Exception { util.Stats.record("stale symbol") override def getMessage(): String = msg } diff --git a/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala b/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala index f71c16e82b70..2ed9a17b9f7e 100644 --- a/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala +++ b/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala @@ -23,14 +23,14 @@ object NameKinds { @sharable private val uniqueNameKinds = util.HashMap[String, UniqueNameKind]() /** A class for the info stored in a derived name */ - abstract class NameInfo { + abstract class NameInfo extends caps.Pure { def kind: NameKind def mkString(underlying: TermName): String def map(f: SimpleName => SimpleName): NameInfo = this } /** An abstract base class of classes that define the kind of a derived name info */ - abstract class NameKind(val tag: Int) { self => + abstract class NameKind(val tag: Int) extends caps.Pure { self => /** The info class defined by this kind */ type ThisInfo <: Info diff --git a/tests/pos-with-compiler-cc/dotc/core/Names.scala b/tests/pos-with-compiler-cc/dotc/core/Names.scala index f13c3a184bf9..3c3c04147de6 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Names.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Names.scala @@ -30,7 +30,7 @@ object Names { * in a name table. A derived term name adds a tag, and possibly a number * or a further simple name to some other name. */ - abstract class Name extends Designator, Showable derives CanEqual { + abstract class Name extends Designator, Showable, caps.Pure derives CanEqual { /** A type for names of the same kind as this name */ type ThisName <: Name diff --git a/tests/pos-with-compiler-cc/dotc/core/OrderingConstraint.scala b/tests/pos-with-compiler-cc/dotc/core/OrderingConstraint.scala index ac6cb78f9e91..961d106a14c8 100644 --- a/tests/pos-with-compiler-cc/dotc/core/OrderingConstraint.scala +++ b/tests/pos-with-compiler-cc/dotc/core/OrderingConstraint.scala @@ -13,37 +13,31 @@ import reflect.ClassTag import annotation.tailrec import annotation.internal.sharable import cc.{CapturingType, derivedCapturingType} +import caps.unsafe.unsafeUnbox object OrderingConstraint { - /** If true, use reverse dependencies in `replace` to avoid checking the bounds - * of all parameters in the constraint. This can speed things up, but there are some - * rare corner cases where reverse dependencies miss a parameter. Specifically, - * if a constraint contains a free reference to TypeParam P and afterwards the - * same P is added as a bound variable to the constraint, a backwards link would - * then become necessary at this point but is missing. This causes two CB projects - * to fail when reverse dependencies are checked (parboiled2 and perspective). - * In these rare cases `replace` could behave differently when optimized. However, - * no deviation was found in the two projects. It is not clear what the "right" - * behavior of `replace` should be in these cases. Normally, PolyTypes added - * to constraints are supposed to be fresh, so that would mean that the behavior - * with optimizeReplace = true would be correct. But the previous behavior without - * reverse dependency checking corresponds to `optimizeReplace = false`. This behavior - * makes sense if we assume that the added polytype was simply added too late, so we - * want to establish the link between newly bound variable and pre-existing reference. - */ - private final val optimizeReplace = true - - private type ArrayValuedMap[T] = SimpleIdentityMap[TypeLambda, Array[T]] + type ArrayValuedMap[T] = SimpleIdentityMap[TypeLambda, Array[T]] /** The type of `OrderingConstraint#boundsMap` */ - private type ParamBounds = ArrayValuedMap[Type] + type ParamBounds = ArrayValuedMap[Type] /** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */ - private type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] + type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] + + /** A new constraint with given maps and given set of hard typevars */ + private def newConstraint( + boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering, + hardVars: TypeVars)(using Context) : OrderingConstraint = + if boundsMap.isEmpty && lowerMap.isEmpty && upperMap.isEmpty then + empty + else + val result = new OrderingConstraint(boundsMap, lowerMap, upperMap, hardVars) + if ctx.run != null then ctx.run.nn.recordConstraintSize(result, result.boundsMap.size) + result /** A lens for updating a single entry array in one of the three constraint maps */ - private abstract class ConstraintLens[T <: AnyRef: ClassTag] { + abstract class ConstraintLens[T <: AnyRef: ClassTag] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[T] | Null def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[T])(using Context): OrderingConstraint def initial: T @@ -54,7 +48,7 @@ object OrderingConstraint { } /** The `current` constraint but with the entry for `param` updated to `entry`. - * `current` is used linearly. If it is different from `prev` then `current` is + * `current` is used linearly. If it is different from `prev` it is * known to be dead after the call. Hence it is OK to update destructively * parts of `current` which are not shared by `prev`. */ @@ -96,27 +90,27 @@ object OrderingConstraint { map(prev, current, param.binder, param.paramNum, f) } - private val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type] { + val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[Type] | Null = c.boundsMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[Type])(using Context): OrderingConstraint = - c.newConstraint(boundsMap = c.boundsMap.updated(poly, entries)) + newConstraint(c.boundsMap.updated(poly, entries), c.lowerMap, c.upperMap, c.hardVars) def initial = NoType } - private val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.lowerMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = - c.newConstraint(lowerMap = c.lowerMap.updated(poly, entries)) + newConstraint(c.boundsMap, c.lowerMap.updated(poly, entries), c.upperMap, c.hardVars) def initial = Nil } - private val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.upperMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = - c.newConstraint(upperMap = c.upperMap.updated(poly, entries)) + newConstraint(c.boundsMap, c.lowerMap, c.upperMap.updated(poly, entries), c.hardVars) def initial = Nil } @@ -150,27 +144,11 @@ class OrderingConstraint(private val boundsMap: ParamBounds, private val lowerMap : ParamOrdering, private val upperMap : ParamOrdering, private val hardVars : TypeVars) extends Constraint { - thisConstraint => import UnificationDirection.* type This = OrderingConstraint - /** A new constraint with given maps and given set of hard typevars */ - def newConstraint( // !!! Dotty problem: Making newConstraint `private` causes -Ytest-pickler failure. - boundsMap: ParamBounds = this.boundsMap, - lowerMap: ParamOrdering = this.lowerMap, - upperMap: ParamOrdering = this.upperMap, - hardVars: TypeVars = this.hardVars)(using Context) : OrderingConstraint = - if boundsMap.isEmpty && lowerMap.isEmpty && upperMap.isEmpty then - empty - else - val result = new OrderingConstraint(boundsMap, lowerMap, upperMap, hardVars) - if ctx.run != null then ctx.run.nn.recordConstraintSize(result, result.boundsMap.size) - result.coDeps = this.coDeps - result.contraDeps = this.contraDeps - result - // ----------- Basic indices -------------------------------------------------- /** The number of type parameters in the given entry array */ @@ -240,189 +218,6 @@ class OrderingConstraint(private val boundsMap: ParamBounds, if tvar == null then NoType else tvar -// ------------- Type parameter dependencies ---------------------------------------- - - private type ReverseDeps = SimpleIdentityMap[TypeParamRef, SimpleIdentitySet[TypeParamRef]] - - /** A map that associates type parameters of this constraint with all other type - * parameters that refer to them in their bounds covariantly, such that, if the - * type parameter is instantiated to a larger type, the constraint would be narrowed - * (i.e. solution set changes other than simply being made larger). - */ - private var coDeps: ReverseDeps = SimpleIdentityMap.empty - - /** A map that associates type parameters of this constraint with all other type - * parameters that refer to them in their bounds covariantly, such that, if the - * type parameter is instantiated to a smaller type, the constraint would be narrowed. - * (i.e. solution set changes other than simply being made larger). - */ - private var contraDeps: ReverseDeps = SimpleIdentityMap.empty - - /** Null-safe indexing */ - extension (deps: ReverseDeps) def at(param: TypeParamRef): SimpleIdentitySet[TypeParamRef] = - val result = deps(param) - if null == result // swapped operand order important since `==` is overloaded in `SimpleIdentitySet` - then SimpleIdentitySet.empty - else result - - override def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean = - def origin(tv: TypeVar) = - assert(!instType(tv).exists) - tv.origin - val param = origin(tv) - val excluded = except.map(origin) - val qualifies: TypeParamRef => Boolean = !excluded.contains(_) - def test(deps: ReverseDeps, lens: ConstraintLens[List[TypeParamRef]]) = - deps.at(param).exists(qualifies) - || lens(this, tv.origin.binder, tv.origin.paramNum).exists(qualifies) - if co then test(coDeps, upperLens) else test(contraDeps, lowerLens) - - /** Modify traversals in two respects: - * - when encountering an application C[Ts], where C is a type variable or parameter - * that has an instantiation in this constraint, assume the type parameters of - * the instantiation instead of the type parameters of C when traversing the - * arguments Ts. That can make a difference for the variance in which an argument - * is traversed. Example constraint: - * - * constrained types: C[X], A - * A >: C[B] - * C := Option - * - * Here, B is traversed with variance +1 instead of 0. Test case: pos/t3152.scala - * - * - When typing a prefx, don't avoid negative variances. This matters only for the - * corner case where a parameter is instantiated to Nothing (see comment in - * TypeAccumulator#applyToPrefix). When determining instantiation directions in - * interpolations (which is what dependency variances are for), it can be ignored. - */ - private trait ConstraintAwareTraversal[T] extends TypeAccumulator[T]: - - override def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = - def tparams(tycon: Type): List[ParamInfo] = tycon match - case tycon: TypeVar if !tycon.inst.exists => tparams(tycon.origin) - case tycon: TypeParamRef => - entry(tycon) match - case _: TypeBounds => tp.tyconTypeParams - case tycon1 if tycon1.typeParams.nonEmpty => tycon1.typeParams - case _ => tp.tyconTypeParams - case _ => tp.tyconTypeParams - tparams(tp.tycon) - - override def applyToPrefix(x: T, tp: NamedType): T = - this(x, tp.prefix) - end ConstraintAwareTraversal - - private class Adjuster(srcParam: TypeParamRef)(using Context) - extends TypeTraverser, ConstraintAwareTraversal[Unit]: - - var add: Boolean = compiletime.uninitialized - val seen = util.HashSet[LazyRef]() - - def update(deps: ReverseDeps, referenced: TypeParamRef): ReverseDeps = - val prev = deps.at(referenced) - val newSet = if add then prev + srcParam else prev - srcParam - if newSet.isEmpty then deps.remove(referenced) - else deps.updated(referenced, newSet) - - def traverse(t: Type) = t match - case param: TypeParamRef => - entry(param) match - case _: TypeBounds => - if variance >= 0 then coDeps = update(coDeps, param) - if variance <= 0 then contraDeps = update(contraDeps, param) - case tp => - traverse(tp) - case tp: LazyRef => - if !seen.contains(tp) then - seen += tp - traverse(tp.ref) - case _ => traverseChildren(t) - end Adjuster - - /** Adjust dependencies to account for the delta of previous entry `prevEntry` - * and the new bound `entry` for the type parameter `srcParam`. - */ - def adjustDeps(entry: Type | Null, prevEntry: Type | Null, srcParam: TypeParamRef)(using Context): this.type = - val adjuster = new Adjuster(srcParam) - - /** Adjust reverse dependencies of all type parameters referenced by `bound` - * @param isLower `bound` is a lower bound - * @param add if true, add referenced variables to dependencoes, otherwise drop them. - */ - def adjustReferenced(bound: Type, isLower: Boolean, add: Boolean) = - adjuster.variance = if isLower then 1 else -1 - adjuster.add = add - adjuster.seen.clear() - adjuster.traverse(bound) - - /** Use an optimized strategy to adjust dependencies to account for the delta - * of previous bound `prevBound` and new bound `bound`: If `prevBound` is some - * and/or prefix of `bound`, and `baseCase` is true, just add the new parts of `bound`. - * @param isLower `bound` and `prevBound` are lower bounds - * @return true iff the delta strategy succeeded, false if it failed in which case - * the constraint is left unchanged. - */ - def adjustDelta(bound: Type, prevBound: Type, isLower: Boolean, baseCase: => Boolean): Boolean = - if bound eq prevBound then - baseCase - else bound match - case bound: AndOrType => - adjustDelta(bound.tp1, prevBound, isLower, baseCase) && { - adjustReferenced(bound.tp2, isLower, add = true) - true - } - case _ => false - - /** Add or remove depenencies referenced in `bounds`. - * @param add if true, dependecies are added, otherwise they are removed - */ - def adjustBounds(bounds: TypeBounds, add: Boolean) = - adjustReferenced(bounds.lo, isLower = true, add) - adjustReferenced(bounds.hi, isLower = false, add) - - entry match - case entry @ TypeBounds(lo, hi) => - prevEntry match - case prevEntry @ TypeBounds(plo, phi) => - if !adjustDelta(lo, plo, isLower = true, - adjustDelta(hi, phi, isLower = false, true)) - then - adjustBounds(prevEntry, add = false) - adjustBounds(entry, add = true) - case _ => - adjustBounds(entry, add = true) - case _ => - prevEntry match - case prevEntry: TypeBounds => - adjustBounds(prevEntry, add = false) - case _ => - dropDeps(srcParam) // srcParam is instantiated, so its dependencies can be dropped - this - end adjustDeps - - /** Adjust dependencies to account for adding or dropping all `entries` associated - * with `poly`. - * @param add if true, entries is added, otherwise it is dropped - */ - def adjustDeps(poly: TypeLambda, entries: Array[Type], add: Boolean)(using Context): this.type = - for n <- 0 until paramCount(entries) do - if add - then adjustDeps(entries(n), NoType, poly.paramRefs(n)) - else adjustDeps(NoType, entries(n), poly.paramRefs(n)) - this - - /** Remove all reverse dependencies of `param` */ - def dropDeps(param: TypeParamRef)(using Context): Unit = - coDeps = coDeps.remove(param) - contraDeps = contraDeps.remove(param) - - /** A string representing the two dependency maps */ - def depsToString(using Context): String = - def depsStr(deps: ReverseDeps): String = - def depStr(param: TypeParamRef) = i"$param --> ${deps.at(param).toList}%, %" - if deps.isEmpty then "" else i"\n ${deps.toList.map((k, v) => depStr(k))}%\n %" - i" co-deps:${depsStr(coDeps)}\n contra-deps:${depsStr(contraDeps)}\n" - // ---------- Adding TypeLambdas -------------------------------------------------- /** The bound type `tp` without constrained parameters which are clearly @@ -488,8 +283,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val entries1 = new Array[Type](nparams * 2) poly.paramInfos.copyToArray(entries1, 0) tvars.copyToArray(entries1, nparams) - newConstraint(boundsMap = this.boundsMap.updated(poly, entries1)) - .init(poly) + newConstraint(boundsMap.updated(poly, entries1), lowerMap, upperMap, hardVars).init(poly) } /** Split dependent parameters off the bounds for parameters in `poly`. @@ -505,14 +299,13 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val param = poly.paramRefs(i) val bounds = dropWildcards(nonParamBounds(param)) val stripped = stripParams(bounds, todos, isUpper = true) - current = boundsLens.update(this, current, param, stripped) + current = updateEntry(current, param, stripped) while todos.nonEmpty do - current = todos.head(current, param) + current = todos.head.unsafeUnbox(current, param) todos.dropInPlace(1) i += 1 } - current.adjustDeps(poly, current.boundsMap(poly).nn, add = true) - .checkWellFormed() + current.checkNonCyclic() } // ---------- Updates ------------------------------------------------------------ @@ -634,12 +427,10 @@ class OrderingConstraint(private val boundsMap: ParamBounds, case _ => Nil - private def updateEntry(current: This, param: TypeParamRef, newEntry: Type)(using Context): This = { - if Config.checkNoWildcardsInConstraint then assert(!newEntry.containsWildcardTypes) - val oldEntry = current.entry(param) - var current1 = boundsLens.update(this, current, param, newEntry) - .adjustDeps(newEntry, oldEntry, param) - newEntry match { + private def updateEntry(current: This, param: TypeParamRef, tp: Type)(using Context): This = { + if Config.checkNoWildcardsInConstraint then assert(!tp.containsWildcardTypes) + var current1 = boundsLens.update(this, current, param, tp) + tp match { case TypeBounds(lo, hi) => for p <- dependentParams(lo, isUpper = false) do current1 = order(current1, p, param) @@ -652,10 +443,10 @@ class OrderingConstraint(private val boundsMap: ParamBounds, /** The public version of `updateEntry`. Guarantees that there are no cycles */ def updateEntry(param: TypeParamRef, tp: Type)(using Context): This = - updateEntry(this, param, ensureNonCyclic(param, tp)).checkWellFormed() + updateEntry(this, param, ensureNonCyclic(param, tp)).checkNonCyclic() def addLess(param1: TypeParamRef, param2: TypeParamRef, direction: UnificationDirection)(using Context): This = - order(this, param1, param2, direction).checkWellFormed() + order(this, param1, param2, direction).checkNonCyclic() // ---------- Replacements and Removals ------------------------------------- @@ -665,81 +456,24 @@ class OrderingConstraint(private val boundsMap: ParamBounds, */ def replace(param: TypeParamRef, tp: Type)(using Context): OrderingConstraint = val replacement = tp.dealiasKeepAnnots.stripTypeVar - if param == replacement then this.checkWellFormed() + if param == replacement then this.checkNonCyclic() else assert(replacement.isValueTypeOrLambda) + var current = + if isRemovable(param.binder) then remove(param.binder) + else updateEntry(this, param, replacement) - val replacedTypeVar = typeVarOfParam(param) - //println(i"replace $param with $replacement in $this") - - def mapReplacedTypeVarTo(to: Type) = new TypeMap: - override def apply(t: Type): Type = - if (t eq replacedTypeVar) && t.exists then to else mapOver(t) - - var current = this - - def removeParamFrom(ps: List[TypeParamRef]) = - ps.filterConserve(param ne _) - - for lo <- lower(param) do - current = upperLens.map(this, current, lo, removeParamFrom) - for hi <- upper(param) do - current = lowerLens.map(this, current, hi, removeParamFrom) - - def replaceParamIn(other: TypeParamRef) = - val oldEntry = current.entry(other) - val newEntry = current.ensureNonCyclic(other, oldEntry.substParam(param, replacement)) - current = boundsLens.update(this, current, other, newEntry) - var oldDepEntry = oldEntry - var newDepEntry = newEntry - replacedTypeVar match - case tvar: TypeVar => - if tvar.inst.exists // `isInstantiated` would use ctx.typerState.constraint rather than the current constraint - then - // If the type variable has been instantiated, we need to forget about - // the instantiation for old dependencies. - // I.e. to find out what the old entry was, we should not follow - // the newly instantiated type variable but assume the type variable's origin `param`. - // An example where this happens is if `replace` is called from TypeVar's `instantiateWith`. - oldDepEntry = mapReplacedTypeVarTo(param)(oldDepEntry) - else - // If the type variable has not been instantiated, we need to replace references to it - // in the new entry by `replacement`. Otherwise we would get stuck in an uninstantiated - // type variable. - // An example where this happens is if `replace` is called from unify. - newDepEntry = mapReplacedTypeVarTo(replacement)(newDepEntry) - case _ => - if oldDepEntry ne newDepEntry then - if current eq this then - // We can end up here if oldEntry eq newEntry, so posssibly no new constraint - // was created, but oldDepEntry ne newDepEntry. In that case we must make - // sure we have a new constraint before updating dependencies. - current = newConstraint() - current.adjustDeps(newDepEntry, oldDepEntry, other) - end replaceParamIn - - if optimizeReplace then - val co = current.coDeps.at(param) - val contra = current.contraDeps.at(param) - current.foreachParam { (p, i) => - val other = p.paramRefs(i) - entry(other) match - case _: TypeBounds => - if co.contains(other) || contra.contains(other) then - replaceParamIn(other) - case _ => replaceParamIn(other) - } - else - current.foreachParam { (p, i) => - val other = p.paramRefs(i) - if other != param then replaceParamIn(other) - } - - current = - if isRemovable(param.binder) then current.remove(param.binder) - else updateEntry(current, param, replacement) - current.dropDeps(param) - current.checkWellFormed() + def removeParam(ps: List[TypeParamRef]) = ps.filterConserve(param ne _) + + def replaceParam(tp: Type, atPoly: TypeLambda, atIdx: Int): Type = + current.ensureNonCyclic(atPoly.paramRefs(atIdx), tp.substParam(param, replacement)) + + current.foreachParam { (p, i) => + current = boundsLens.map(this, current, p, i, replaceParam(_, p, i)) + current = lowerLens.map(this, current, p, i, removeParam) + current = upperLens.map(this, current, p, i, removeParam) + } + current.checkNonCyclic() end replace def remove(pt: TypeLambda)(using Context): This = { @@ -752,8 +486,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } val hardVars1 = pt.paramRefs.foldLeft(hardVars)((hvs, param) => hvs - typeVarOfParam(param)) newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap), hardVars1) - .adjustDeps(pt, boundsMap(pt).nn, add = false) - .checkWellFormed() + .checkNonCyclic() } def isRemovable(pt: TypeLambda): Boolean = { @@ -779,7 +512,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def swapKey[T](m: ArrayValuedMap[T]) = val info = m(from) if info == null then m else m.remove(from).updated(to, info) - var current = newConstraint(swapKey(boundsMap), swapKey(lowerMap), swapKey(upperMap)) + var current = newConstraint(swapKey(boundsMap), swapKey(lowerMap), swapKey(upperMap), hardVars) def subst[T <: Type](x: T): T = x.subst(from, to).asInstanceOf[T] current.foreachParam {(p, i) => current = boundsLens.map(this, current, p, i, subst) @@ -787,12 +520,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, current = upperLens.map(this, current, p, i, _.map(subst)) } constr.println(i"renamed $this to $current") - current.checkWellFormed() + current.checkNonCyclic() def isHard(tv: TypeVar) = hardVars.contains(tv) def withHard(tv: TypeVar)(using Context) = - newConstraint(hardVars = this.hardVars + tv) + newConstraint(boundsMap, lowerMap, upperMap, hardVars + tv) def instType(tvar: TypeVar): Type = entry(tvar.origin) match case _: TypeBounds => NoType @@ -819,26 +552,6 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(tvar.origin == param, i"mismatch $tvar, $param") case _ => - def occursAtToplevel(param: TypeParamRef, inst: Type)(using Context): Boolean = - def occurs(tp: Type)(using Context): Boolean = tp match - case tp: AndOrType => - occurs(tp.tp1) || occurs(tp.tp2) - case tp: TypeParamRef => - (tp eq param) || entry(tp).match - case NoType => false - case TypeBounds(lo, hi) => (lo eq hi) && occurs(lo) - case inst => occurs(inst) - case tp: TypeVar => - occurs(tp.underlying) - case TypeBounds(lo, hi) => - occurs(lo) || occurs(hi) - case _ => - val tp1 = tp.dealias - (tp1 ne tp) && occurs(tp1) - - occurs(inst) - end occursAtToplevel - // ---------- Exploration -------------------------------------------------------- def domainLambdas: List[TypeLambda] = boundsMap.keys @@ -891,57 +604,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, // ---------- Checking ----------------------------------------------- - def checkWellFormed()(using Context): this.type = - - /** Check that each dependency A -> B in coDeps and contraDeps corresponds to - * a reference to A at the right variance in the entry of B. - */ - def checkBackward(deps: ReverseDeps, depsName: String, v: Int)(using Context): Unit = - deps.foreachBinding { (param, params) => - for srcParam <- params do - assert(contains(srcParam) && occursAtVariance(param, v, in = entry(srcParam)), - i"wrong $depsName backwards reference $param -> $srcParam in $thisConstraint") - } - - /** A type traverser that checks that all references bound in the constraint - * are accounted for in coDeps and/or contraDeps. - */ - def checkForward(srcParam: TypeParamRef)(using Context) = - new TypeTraverser with ConstraintAwareTraversal[Unit]: - val seen = util.HashSet[LazyRef]() - def traverse(t: Type): Unit = t match - case param: TypeParamRef if param ne srcParam => - def check(deps: ReverseDeps, directDeps: List[TypeParamRef], depsName: String) = - assert(deps.at(param).contains(srcParam) || directDeps.contains(srcParam), - i"missing $depsName backwards reference $param -> $srcParam in $thisConstraint") - entry(param) match - case _: TypeBounds => - if variance >= 0 then check(contraDeps, upper(param), "contra") - if variance <= 0 then check(coDeps, lower(param), "co") - case tp => - traverse(tp) - case tp: LazyRef => - if !seen.contains(tp) then - seen += tp - traverse(tp.ref) - case _ => traverseChildren(t) - - /** Does `param` occur at variance `v` or else at variance 0 in entry `in`? */ - def occursAtVariance(param: TypeParamRef, v: Int, in: Type)(using Context): Boolean = - val test = new TypeAccumulator[Boolean] with ConstraintAwareTraversal[Boolean]: - def apply(x: Boolean, t: Type): Boolean = - if x then true - else t match - case t: TypeParamRef => - entry(t) match - case _: TypeBounds => - t == param && (variance == 0 || variance == v) - case e => - apply(x, e) - case _ => - foldOver(x, t) - test(false, in) - + def checkNonCyclic()(using Context): this.type = if Config.checkConstraintsNonCyclic then domainParams.foreach { param => val inst = entry(param) @@ -950,13 +613,28 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(!occursAtToplevel(param, inst), s"cyclic bound for $param: ${inst.show} in ${this.show}") } - if Config.checkConstraintDeps || ctx.settings.YcheckConstraintDeps.value then - checkBackward(coDeps, "co", -1) - checkBackward(contraDeps, "contra", +1) - domainParams.foreach(p => if contains(p) then checkForward(p).traverse(entry(p))) - this - end checkWellFormed + + def occursAtToplevel(param: TypeParamRef, inst: Type)(using Context): Boolean = + + def occurs(tp: Type)(using Context): Boolean = tp match + case tp: AndOrType => + occurs(tp.tp1) || occurs(tp.tp2) + case tp: TypeParamRef => + (tp eq param) || entry(tp).match + case NoType => false + case TypeBounds(lo, hi) => (lo eq hi) && occurs(lo) + case inst => occurs(inst) + case tp: TypeVar => + occurs(tp.underlying) + case TypeBounds(lo, hi) => + occurs(lo) || occurs(hi) + case _ => + val tp1 = tp.dealias + (tp1 ne tp) && occurs(tp1) + + occurs(inst) + end occursAtToplevel override def checkClosed()(using Context): Unit = @@ -986,16 +664,13 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val constrainedText = " constrained types = " + domainLambdas.mkString("\n") val boundsText = - "\n bounds = " + { + " bounds = " + { val assocs = for (param <- domainParams) yield s"${param.binder.paramNames(param.paramNum)}: ${entryText(entry(param))}" assocs.mkString("\n") } - val depsText = - "\n coDeps = " + coDeps + - "\n contraDeps = " + contraDeps - constrainedText + boundsText + depsText + constrainedText + "\n" + boundsText } } diff --git a/tests/pos-with-compiler-cc/dotc/core/Phases.scala b/tests/pos-with-compiler-cc/dotc/core/Phases.scala index 205554e418ed..3744b1f21122 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Phases.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Phases.scala @@ -285,7 +285,7 @@ object Phases { final def isTyper(phase: Phase): Boolean = phase.id == typerPhase.id } - abstract class Phase { + abstract class Phase extends caps.Pure { /** A name given to the `Phase` that can be used to debug the compiler. For * instance, it is possible to print trees after a given phase using: diff --git a/tests/pos-with-compiler-cc/dotc/core/Scopes.scala b/tests/pos-with-compiler-cc/dotc/core/Scopes.scala index 99076b422358..6139f1a12656 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Scopes.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Scopes.scala @@ -64,7 +64,7 @@ object Scopes { * or to delete them. These methods are provided by subclass * MutableScope. */ - abstract class Scope extends printing.Showable { + abstract class Scope extends printing.Showable, caps.Pure { /** The last scope-entry from which all others are reachable via `prev` */ private[dotc] def lastEntry: ScopeEntry | Null diff --git a/tests/pos-with-compiler-cc/dotc/core/SymDenotations.scala b/tests/pos-with-compiler-cc/dotc/core/SymDenotations.scala index a4f1bf3c5e80..ad100962dca6 100644 --- a/tests/pos-with-compiler-cc/dotc/core/SymDenotations.scala +++ b/tests/pos-with-compiler-cc/dotc/core/SymDenotations.scala @@ -25,6 +25,7 @@ import reporting._ import collection.mutable import transform.TypeUtils._ import cc.{CapturingType, derivedCapturingType} +import language.experimental.pureFunctions import scala.annotation.internal.sharable @@ -2429,6 +2430,8 @@ object SymDenotations { ) if compiledNow.exists then compiledNow else + //val union = (d1: Set[AbstractFile], d2: Set[AbstractFile]) => d1.union(d2) + // !cc! need to break `u` out into separate definition, writing `_ union _` below gives an error val assocFiles = multi.aggregate(d => Set(d.symbol.associatedFile.nn), _ union _) if assocFiles.size == 1 then multi // they are all overloaded variants from the same file @@ -2634,8 +2637,8 @@ object SymDenotations { * of these function types. */ abstract class LazyType extends UncachedGroundType - with (Symbol => LazyType) - with ((TermSymbol, ClassSymbol) => LazyType) { self => + with (Symbol -> LazyType) + with ((TermSymbol, ClassSymbol) -> LazyType) { self => /** Sets all missing fields of given denotation */ def complete(denot: SymDenotation)(using Context): Unit @@ -2646,8 +2649,8 @@ object SymDenotations { private var myDecls: Scope = EmptyScope private var mySourceModule: Symbol | Null = null private var myModuleClass: Symbol | Null = null - private var mySourceModuleFn: Context ?=> Symbol = LazyType.NoSymbolFn - private var myModuleClassFn: Context ?=> Symbol = LazyType.NoSymbolFn + private var mySourceModuleFn: Context ?-> Symbol = LazyType.NoSymbolFn + private var myModuleClassFn: Context ?-> Symbol = LazyType.NoSymbolFn /** The type parameters computed by the completer before completion has finished */ def completerTypeParams(sym: Symbol)(using Context): List[TypeParamInfo] = @@ -2663,8 +2666,8 @@ object SymDenotations { myModuleClass.nn def withDecls(decls: Scope): this.type = { myDecls = decls; this } - def withSourceModule(sourceModuleFn: Context ?=> Symbol): this.type = { mySourceModuleFn = sourceModuleFn; this } - def withModuleClass(moduleClassFn: Context ?=> Symbol): this.type = { myModuleClassFn = moduleClassFn; this } + def withSourceModule(sourceModuleFn: Context ?-> Symbol): this.type = { mySourceModuleFn = sourceModuleFn; this } + def withModuleClass(moduleClassFn: Context ?-> Symbol): this.type = { myModuleClassFn = moduleClassFn; this } override def toString: String = getClass.toString diff --git a/tests/pos-with-compiler-cc/dotc/core/SymbolLoaders.scala b/tests/pos-with-compiler-cc/dotc/core/SymbolLoaders.scala index c5ae98853061..5af45a016891 100644 --- a/tests/pos-with-compiler-cc/dotc/core/SymbolLoaders.scala +++ b/tests/pos-with-compiler-cc/dotc/core/SymbolLoaders.scala @@ -23,6 +23,7 @@ import ast.desugar import parsing.JavaParsers.OutlineJavaParser import parsing.Parsers.OutlineParser +import language.experimental.pureFunctions object SymbolLoaders { @@ -211,7 +212,10 @@ object SymbolLoaders { override def sourceModule(using Context): TermSymbol = _sourceModule def description(using Context): String = "package loader " + sourceModule.fullName - private var enterFlatClasses: Option[() => Context ?=> Unit] = None + private var enterFlatClasses: Option[() -> Context ?-> Unit] = None + // Having a pure function type returning `Unit` does look weird. + // The point is that the function should not have any effect that matters for + // the compiler, in particular it should not capture a context. Stats.record("package scopes") diff --git a/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala b/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala index 658bf4122aa4..95830fcf4b00 100644 --- a/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala +++ b/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala @@ -24,6 +24,7 @@ import typer.Applications.productSelectorTypes import reporting.trace import annotation.constructorOnly import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam, isAlwaysPure} +import language.experimental.pureFunctions /** Provides methods to compare types. */ @@ -2400,8 +2401,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling NoType } - private def andTypeGen(tp1: Type, tp2: Type, op: (Type, Type) => Type, - original: (Type, Type) => Type = _ & _, isErased: Boolean = ctx.erasedTypes): Type = trace(s"andTypeGen(${tp1.show}, ${tp2.show})", subtyping, show = true) { + private def andTypeGen(tp1: Type, tp2: Type, op: (Type, Type) -> Type, + original: (Type, Type) -> Type = _ & _, isErased: Boolean = ctx.erasedTypes): Type = trace(s"andTypeGen(${tp1.show}, ${tp2.show})", subtyping, show = true) { val t1 = distributeAnd(tp1, tp2) if (t1.exists) t1 else { @@ -2462,7 +2463,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * [X1, ..., Xn] -> op(tp1[X1, ..., Xn], tp2[X1, ..., Xn]) */ def liftIfHK(tp1: Type, tp2: Type, - op: (Type, Type) => Type, original: (Type, Type) => Type, combineVariance: (Variance, Variance) => Variance) = { + op: (Type, Type) -> Type, original: (Type, Type) -> Type, combineVariance: (Variance, Variance) -> Variance) = { val tparams1 = tp1.typeParams val tparams2 = tp2.typeParams def applied(tp: Type) = tp.appliedTo(tp.typeParams.map(_.paramInfoAsSeenFrom(tp))) @@ -2977,8 +2978,8 @@ object TypeComparer { comparing(_.provablyDisjoint(tp1, tp2)) def liftIfHK(tp1: Type, tp2: Type, - op: (Type, Type) => Type, original: (Type, Type) => Type, - combineVariance: (Variance, Variance) => Variance)(using Context): Type = + op: (Type, Type) -> Type, original: (Type, Type) -> Type, + combineVariance: (Variance, Variance) -> Variance)(using Context): Type = comparing(_.liftIfHK(tp1, tp2, op, original, combineVariance)) def constValue(tp: Type)(using Context): Option[Constant] = diff --git a/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala b/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala index a3b594eb0f09..4d8aae319d27 100644 --- a/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala +++ b/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala @@ -13,6 +13,7 @@ import Decorators._ import reporting._ import ast.untpd import config.Printers.cyclicErrors +import language.experimental.pureFunctions class TypeError(msg: String) extends Exception(msg) { def this() = this("") @@ -43,7 +44,7 @@ class MissingType(pre: Type, name: Name) extends TypeError { } } -class RecursionOverflow(val op: String, details: => String, val previous: Throwable, val weight: Int) +class RecursionOverflow(val op: String, details: -> String, val previous: Throwable, val weight: Int) extends TypeError { def explanation: String = s"$op $details" @@ -91,7 +92,7 @@ extends TypeError { // Beware: Since this object is only used when handling a StackOverflow, this code // cannot consume significant amounts of stack. object handleRecursive { - def apply(op: String, details: => String, exc: Throwable, weight: Int = 1)(using Context): Nothing = + def apply(op: String, details: -> String, exc: Throwable, weight: Int = 1)(using Context): Nothing = if (ctx.settings.YnoDecodeStacktraces.value) throw exc else diff --git a/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala b/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala index 9363b27b4dde..05ce8cefc285 100644 --- a/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala +++ b/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala @@ -23,6 +23,7 @@ import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe +import language.experimental.pureFunctions object TypeOps: @@ -225,18 +226,16 @@ object TypeOps: */ def orDominator(tp: Type)(using Context): Type = { - /** a faster version of cs1 intersect cs2 */ + /** a faster version of cs1 intersect cs2 that treats bottom types correctly */ def intersect(cs1: List[ClassSymbol], cs2: List[ClassSymbol]): List[ClassSymbol] = - val cs2AsSet = BaseClassSet(cs2) - cs1.filter(cs2AsSet.contains) - - /** a version of Type#baseClasses that treats bottom types correctly */ - def orBaseClasses(tp: Type): List[ClassSymbol] = tp.stripTypeVar match - case OrType(tp1, tp2) => - if tp1.isBottomType && (tp1 frozen_<:< tp2) then orBaseClasses(tp2) - else if tp2.isBottomType && (tp2 frozen_<:< tp1) then orBaseClasses(tp1) - else intersect(orBaseClasses(tp1), orBaseClasses(tp2)) - case _ => tp.baseClasses + if cs1.head == defn.NothingClass then cs2 + else if cs2.head == defn.NothingClass then cs1 + else if cs1.head == defn.NullClass && !ctx.explicitNulls && cs2.head.derivesFrom(defn.ObjectClass) then cs2 + else if cs2.head == defn.NullClass && !ctx.explicitNulls && cs1.head.derivesFrom(defn.ObjectClass) then cs1 + else + val cs2AsSet = new util.HashSet[ClassSymbol](128) + cs2.foreach(cs2AsSet += _) + cs1.filter(cs2AsSet.contains) /** The minimal set of classes in `cs` which derive all other classes in `cs` */ def dominators(cs: List[ClassSymbol], accu: List[ClassSymbol]): List[ClassSymbol] = (cs: @unchecked) match { @@ -371,7 +370,7 @@ object TypeOps: } // Step 3: Intersect base classes of both sides - val commonBaseClasses = orBaseClasses(tp) + val commonBaseClasses = tp.mapReduceOr(_.baseClasses)(intersect) val doms = dominators(commonBaseClasses, Nil) def baseTp(cls: ClassSymbol): Type = tp.baseType(cls).mapReduceOr(identity)(mergeRefinedOrApplied) @@ -526,7 +525,7 @@ object TypeOps: * does not update `ctx.nestingLevel` when entering a block so I'm leaving * this as Future Work™. */ - def avoid(tp: Type, symsToAvoid: => List[Symbol])(using Context): Type = { + def avoid(tp: Type, symsToAvoid: -> List[Symbol])(using Context): Type = { val widenMap = new AvoidMap { @threadUnsafe lazy val forbidden = symsToAvoid.toSet def toAvoid(tp: NamedType) = diff --git a/tests/pos-with-compiler-cc/dotc/core/Types.scala b/tests/pos-with-compiler-cc/dotc/core/Types.scala index 29a2496ab2a7..e34130a052ba 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Types.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Types.scala @@ -43,6 +43,8 @@ import scala.annotation.internal.sharable import scala.annotation.threadUnsafe import dotty.tools.dotc.transform.SymUtils._ +import language.experimental.pureFunctions +import annotation.retains object Types { @@ -90,7 +92,7 @@ object Types { * * Note: please keep in sync with copy in `docs/docs/internals/type-system.md`. */ - abstract class Type extends Hashable with printing.Showable { + abstract class Type extends Hashable, printing.Showable, caps.Pure { // ----- Tests ----------------------------------------------------- @@ -2148,7 +2150,7 @@ object Types { /** A trait for proto-types, used as expected types in typer */ trait ProtoType extends Type { def isMatchedBy(tp: Type, keepConstraint: Boolean = false)(using Context): Boolean - def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T def map(tm: TypeMap)(using Context): ProtoType /** If this prototype captures a context, the same prototype except that the result @@ -2447,8 +2449,6 @@ object Types { } private def checkDenot()(using Context) = {} - //if name.toString == "getConstructor" then - // println(i"set denot of $this to ${denot.info}, ${denot.getClass}, ${Phases.phaseOf(denot.validFor.lastPhaseId)} at ${ctx.phase}") private def checkSymAssign(sym: Symbol)(using Context) = { def selfTypeOf(sym: Symbol) = @@ -3012,7 +3012,7 @@ object Types { } // `refFn` can be null only if `computed` is true. - case class LazyRef(private var refFn: (Context => (Type | Null)) | Null) extends UncachedProxyType with ValueType { + case class LazyRef(private var refFn: (Context -> (Type | Null)) | Null) extends UncachedProxyType with ValueType { private var myRef: Type | Null = null private var computed = false @@ -3052,7 +3052,7 @@ object Types { override def hashCode: Int = System.identityHashCode(this) } object LazyRef: - def of(refFn: Context ?=> (Type | Null)): LazyRef = LazyRef(refFn(using _)) + def of(refFn: Context ?-> (Type | Null)): LazyRef = LazyRef(refFn(using _)) // --- Refined Type and RecType ------------------------------------------------ @@ -3148,7 +3148,7 @@ object Types { * * Where `RecThis(...)` points back to the enclosing `RecType`. */ - class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType { + class RecType(@constructorOnly parentExp: RecType => Type) extends RefinedOrRecType with BindingType { // See discussion in findMember#goRec why these vars are needed private[Types] var opened: Boolean = false @@ -3872,8 +3872,8 @@ object Types { } abstract case class MethodType(paramNames: List[TermName])( - paramInfosExp: MethodType => List[Type], - resultTypeExp: MethodType => Type) + @constructorOnly paramInfosExp: MethodType => List[Type], + @constructorOnly resultTypeExp: MethodType => Type) extends MethodOrPoly with TermLambda with NarrowCached { thisMethodType => type This = MethodType @@ -3899,7 +3899,10 @@ object Types { protected def prefixString: String = companion.prefixString } - final class CachedMethodType(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type, val companion: MethodTypeCompanion) + final class CachedMethodType(paramNames: List[TermName])( + @constructorOnly paramInfosExp: MethodType => List[Type], + @constructorOnly resultTypeExp: MethodType => Type, + val companion: MethodTypeCompanion) extends MethodType(paramNames)(paramInfosExp, resultTypeExp) abstract class LambdaTypeCompanion[N <: Name, PInfo <: Type, LT <: LambdaType] { @@ -4047,7 +4050,8 @@ object Types { * Variances are stored in the `typeParams` list of the lambda. */ class HKTypeLambda(val paramNames: List[TypeName], @constructorOnly variances: List[Variance])( - paramInfosExp: HKTypeLambda => List[TypeBounds], resultTypeExp: HKTypeLambda => Type) + @constructorOnly paramInfosExp: HKTypeLambda => List[TypeBounds], + @constructorOnly resultTypeExp: HKTypeLambda => Type) extends HKLambda with TypeLambda { type This = HKTypeLambda def companion: HKTypeLambda.type = HKTypeLambda @@ -4115,7 +4119,8 @@ object Types { * except it applies to terms and parameters do not have variances. */ class PolyType(val paramNames: List[TypeName])( - paramInfosExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type) + @constructorOnly paramInfosExp: PolyType => List[TypeBounds], + @constructorOnly resultTypeExp: PolyType => Type) extends MethodOrPoly with TypeLambda { type This = PolyType @@ -5299,7 +5304,7 @@ object Types { val et = new PreviousErrorType ctx.base.errorTypeMsg(et) = m et - def apply(s: => String)(using Context): ErrorType = + def apply(s: -> String)(using Context): ErrorType = apply(s.toMessage) end ErrorType @@ -5497,14 +5502,6 @@ object Types { stop == StopAt.Static && tp.currentSymbol.isStatic && isStaticPrefix(tp.prefix) || stop == StopAt.Package && tp.currentSymbol.is(Package) } - - /** The type parameters of the constructor of this applied type. - * Overridden in OrderingConstraint's ConstraintAwareTraversal to take account - * of instantiations in the constraint that are not yet propagated to the - * instance types of type variables. - */ - protected def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = - tp.tyconTypeParams end VariantTraversal /** A supertrait for some typemaps that are bijections. Used for capture checking. @@ -5533,7 +5530,7 @@ object Types { end BiTypeMap abstract class TypeMap(implicit protected var mapCtx: Context) - extends VariantTraversal with (Type => Type) { thisMap => + extends VariantTraversal with (Type -> Type) { thisMap: TypeMap => def apply(tp: Type): Type @@ -5612,11 +5609,17 @@ object Types { case tp: NamedType => if stopBecauseStaticOrLocal(tp) then tp else - val prefix1 = atVariance(variance max 0)(this(tp.prefix)) // see comment of TypeAccumulator's applyToPrefix + val prefix1 = atVariance(variance max 0)(this(tp.prefix)) + // A prefix is never contravariant. Even if say `p.A` is used in a contravariant + // context, we cannot assume contravariance for `p` because `p`'s lower + // bound might not have a binding for `A` (e.g. the lower bound could be `Nothing`). + // By contrast, covariance does translate to the prefix, since we have that + // if `p <: q` then `p.A <: q.A`, and well-formedness requires that `A` is a member + // of `p`'s upper bound. derivedSelect(tp, prefix1) case tp: AppliedType => - derivedAppliedType(tp, this(tp.tycon), mapArgs(tp.args, tyconTypeParams(tp))) + derivedAppliedType(tp, this(tp.tycon), mapArgs(tp.args, tp.tyconTypeParams)) case tp: LambdaType => mapOverLambda(tp) @@ -5722,7 +5725,7 @@ object Types { protected def mapClassInfo(tp: ClassInfo): Type = derivedClassInfo(tp, this(tp.prefix)) - def andThen(f: Type => Type): TypeMap = new TypeMap { + def andThen(f: Type -> Type): TypeMap = new TypeMap { override def stopAt = thisMap.stopAt def apply(tp: Type) = f(thisMap(tp)) } @@ -5943,7 +5946,7 @@ object Types { case nil => true } - if (distributeArgs(args, tyconTypeParams(tp))) + if (distributeArgs(args, tp.tyconTypeParams)) range(tp.derivedAppliedType(tycon, loBuf.toList), tp.derivedAppliedType(tycon, hiBuf.toList)) else if tycon.isLambdaSub || args.exists(isRangeOfNonTermTypes) then @@ -6059,22 +6062,14 @@ object Types { abstract class TypeAccumulator[T](implicit protected val accCtx: Context) extends VariantTraversal with ((T, Type) => T) { + this: TypeAccumulator[T] @annotation.retains(caps.*) => def apply(x: T, tp: Type): T protected def applyToAnnot(x: T, annot: Annotation): T = x // don't go into annotations - /** A prefix is never contravariant. Even if say `p.A` is used in a contravariant - * context, we cannot assume contravariance for `p` because `p`'s lower - * bound might not have a binding for `A`, since the lower bound could be `Nothing`. - * By contrast, covariance does translate to the prefix, since we have that - * if `p <: q` then `p.A <: q.A`, and well-formedness requires that `A` is a member - * of `p`'s upper bound. - * Overridden in OrderingConstraint's ConstraintAwareTraversal, where a - * more relaxed scheme is used. - */ - protected def applyToPrefix(x: T, tp: NamedType): T = - atVariance(variance max 0)(this(x, tp.prefix)) + protected final def applyToPrefix(x: T, tp: NamedType): T = + atVariance(variance max 0)(this(x, tp.prefix)) // see remark on NamedType case in TypeMap def foldOver(x: T, tp: Type): T = { record(s"foldOver $getClass") @@ -6097,7 +6092,7 @@ object Types { } foldArgs(acc, tparams.tail, args.tail) } - foldArgs(this(x, tycon), tyconTypeParams(tp), args) + foldArgs(this(x, tycon), tp.tyconTypeParams, args) case _: BoundType | _: ThisType => x @@ -6139,7 +6134,7 @@ object Types { foldOver(x2, tp.cases) case CapturingType(parent, refs) => - (this(x, parent) /: refs.elems)(this) + (this(x, parent) /: refs.elems)(apply) // !cc! does not work under apply := this case AnnotatedType(underlying, annot) => this(applyToAnnot(x, annot), underlying) diff --git a/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala b/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala index 33a1e1dd6e73..1cb4970265a7 100644 --- a/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala +++ b/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala @@ -23,6 +23,7 @@ import scala.annotation.switch import typer.Checking.checkNonCyclic import io.{AbstractFile, ZipArchive} import scala.util.control.NonFatal +import language.experimental.pureFunctions object ClassfileParser { /** Marker trait for unpicklers that can be embedded in classfiles. */ @@ -624,10 +625,10 @@ class ClassfileParser( case (name, tag: EnumTag) => untpd.NamedArg(name.name, tag.toTree).withSpan(NoSpan) } - protected var mySym: Symbol | (Context ?=> Symbol) = + protected var mySym: Symbol | (Context ?-> Symbol) = (ctx: Context) ?=> annotType.classSymbol - protected var myTree: Tree | (Context ?=> Tree) = + protected var myTree: Tree | (Context ?-> Tree) = (ctx: Context) ?=> untpd.resolveConstructor(annotType, args) def untpdTree(using Context): untpd.Tree = diff --git a/tests/pos-with-compiler-cc/dotc/core/tasty/TastyUnpickler.scala b/tests/pos-with-compiler-cc/dotc/core/tasty/TastyUnpickler.scala index 70bdec7780e2..5cc172c65439 100644 --- a/tests/pos-with-compiler-cc/dotc/core/tasty/TastyUnpickler.scala +++ b/tests/pos-with-compiler-cc/dotc/core/tasty/TastyUnpickler.scala @@ -11,6 +11,7 @@ import TastyBuffer.NameRef import scala.collection.mutable import Names.{TermName, termName, EmptyTermName} import NameKinds._ +import language.experimental.pureFunctions object TastyUnpickler { @@ -18,7 +19,7 @@ object TastyUnpickler { def unpickle(reader: TastyReader, nameAtRef: NameTable): R } - class NameTable extends (NameRef => TermName) { + class NameTable extends (NameRef -> TermName) { private val names = new mutable.ArrayBuffer[TermName] def add(name: TermName): mutable.ArrayBuffer[TermName] = names += name def apply(ref: NameRef): TermName = names(ref.index) diff --git a/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala b/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala index 617a2c55a7ad..69bd0d95ba3a 100644 --- a/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala +++ b/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala @@ -46,6 +46,7 @@ import dotty.tools.tasty.TastyFormat._ import scala.annotation.constructorOnly import scala.annotation.internal.sharable +import language.experimental.pureFunctions /** Unpickler for typed trees * @param reader the reader from which to unpickle @@ -663,9 +664,9 @@ class TreeUnpickler(reader: TastyReader, /** Read modifier list into triplet of flags, annotations and a privateWithin * boundary symbol. */ - def readModifiers(end: Addr)(using Context): (FlagSet, List[Symbol => Annotation], Symbol) = { + def readModifiers(end: Addr)(using Context): (FlagSet, List[Symbol -> Annotation], Symbol) = { var flags: FlagSet = EmptyFlags - var annotFns: List[Symbol => Annotation] = Nil + var annotFns: List[Symbol -> Annotation] = Nil var privateWithin: Symbol = NoSymbol while (currentAddr.index != end.index) { def addFlag(flag: FlagSet) = { @@ -732,7 +733,7 @@ class TreeUnpickler(reader: TastyReader, private def readWithin(using Context): Symbol = readType().typeSymbol - private def readAnnot(using Context): Symbol => Annotation = + private def readAnnot(using Context): Symbol -> Annotation = readByte() val end = readEnd() val tp = readType() @@ -1450,10 +1451,10 @@ class TreeUnpickler(reader: TastyReader, setSpan(start, CaseDef(pat, guard, rhs)) } - def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context ?=> T)(using Context): Trees.Lazy[T] = + def readLater[T <: AnyRef](end: Addr, op: TreeReader -> Context ?-> T)(using Context): Trees.Lazy[T] = readLaterWithOwner(end, op)(ctx.owner) - def readLaterWithOwner[T <: AnyRef](end: Addr, op: TreeReader => Context ?=> T)(using Context): Symbol => Trees.Lazy[T] = { + def readLaterWithOwner[T <: AnyRef](end: Addr, op: TreeReader -> Context ?-> T)(using Context): Symbol -> Trees.Lazy[T] = { val localReader = fork goto(end) val mode = ctx.mode diff --git a/tests/pos-with-compiler-cc/dotc/decompiler/IDEDecompilerDriver.scala b/tests/pos-with-compiler-cc/dotc/decompiler/IDEDecompilerDriver.scala index c148ff5f9bca..5bf526bd4bdd 100644 --- a/tests/pos-with-compiler-cc/dotc/decompiler/IDEDecompilerDriver.scala +++ b/tests/pos-with-compiler-cc/dotc/decompiler/IDEDecompilerDriver.scala @@ -11,6 +11,7 @@ import dotty.tools.dotc.reporting._ import dotty.tools.io.AbstractFile import scala.quoted.runtime.impl.QuotesImpl +import caps.unsafe.unsafeUnbox /** * Decompiler to be used with IDEs @@ -40,7 +41,7 @@ class IDEDecompilerDriver(val settings: List[String]) extends dotc.Driver { val unit = ctx.run.nn.units.head val decompiled = QuotesImpl.showDecompiledTree(unit.tpdTree) - val tree = new TastyHTMLPrinter(unit.pickled.head._2()).showContents() + val tree = new TastyHTMLPrinter(unit.pickled.head._2.unsafeUnbox()).showContents() reporter.removeBufferedMessages.foreach(message => System.err.println(message)) (tree, decompiled) diff --git a/tests/pos-with-compiler-cc/dotc/inlines/Inliner.scala b/tests/pos-with-compiler-cc/dotc/inlines/Inliner.scala index bea42e82ce6f..a23af052ca24 100644 --- a/tests/pos-with-compiler-cc/dotc/inlines/Inliner.scala +++ b/tests/pos-with-compiler-cc/dotc/inlines/Inliner.scala @@ -23,6 +23,7 @@ import util.Spans.Span import dotty.tools.dotc.transform.Splicer import quoted.QuoteUtils import scala.annotation.constructorOnly +import language.experimental.pureFunctions /** General support for inlining */ object Inliner: @@ -108,8 +109,8 @@ object Inliner: // They are generally left alone (not mapped further, and if they wrap a type // the type Inlined wrapper gets dropped private class InlinerMap( - typeMap: Type => Type, - treeMap: Tree => Tree, + typeMap: Type -> Type, + treeMap: Tree -> Tree, oldOwners: List[Symbol], newOwners: List[Symbol], substFrom: List[Symbol], @@ -118,8 +119,8 @@ object Inliner: typeMap, treeMap, oldOwners, newOwners, substFrom, substTo, InlineCopier()): override def copy( - typeMap: Type => Type, - treeMap: Tree => Tree, + typeMap: Type -> Type, + treeMap: Tree -> Tree, oldOwners: List[Symbol], newOwners: List[Symbol], substFrom: List[Symbol], @@ -170,7 +171,7 @@ class Inliner(val call: tpd.Tree)(using Context): /** A map from references to (type and value) parameters of the inlineable method * to their corresponding argument or proxy references, as given by `paramBinding`. */ - private[inlines] val paramProxy = new mutable.HashMap[Type, Type] + private[inlines] val paramProxy: mutable.HashMap[Type, Type] = new mutable.HashMap /** A map from the classes of (direct and outer) this references in `rhsToInline` * to references of their proxies. diff --git a/tests/pos-with-compiler-cc/dotc/inlines/Inlines.scala b/tests/pos-with-compiler-cc/dotc/inlines/Inlines.scala index 1806cdfc909b..8be23b932e98 100644 --- a/tests/pos-with-compiler-cc/dotc/inlines/Inlines.scala +++ b/tests/pos-with-compiler-cc/dotc/inlines/Inlines.scala @@ -85,10 +85,7 @@ object Inlines: if (tree.symbol == defn.CompiletimeTesting_typeChecks) return Intrinsics.typeChecks(tree) if (tree.symbol == defn.CompiletimeTesting_typeCheckErrors) return Intrinsics.typeCheckErrors(tree) - if ctx.isAfterTyper then - // During typer we wait with cross version checks until PostTyper, in order - // not to provoke cyclic references. See i16116 for a test case. - CrossVersionChecks.checkExperimentalRef(tree.symbol, tree.srcPos) + CrossVersionChecks.checkExperimentalRef(tree.symbol, tree.srcPos) if tree.symbol.isConstructor then return tree // error already reported for the inline constructor definition diff --git a/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala b/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala index 7e47bbfdfa8a..db52712c39e2 100644 --- a/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala +++ b/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala @@ -22,6 +22,7 @@ import transform.SymUtils.* import config.Printers.inlining import util.Property import dotty.tools.dotc.transform.TreeMapWithStages._ +import language.experimental.pureFunctions object PrepareInlineable { import tpd._ @@ -262,7 +263,7 @@ object PrepareInlineable { * to have the inline method as owner. */ def registerInlineInfo( - inlined: Symbol, treeExpr: Context ?=> Tree)(using Context): Unit = + inlined: Symbol, treeExpr: Context ?-> Tree)(using Context): Unit = inlined.unforcedAnnotation(defn.BodyAnnot) match { case Some(ann: ConcreteBodyAnnotation) => case Some(ann: LazyBodyAnnotation) if ann.isEvaluated || ann.isEvaluating => diff --git a/tests/pos-with-compiler-cc/dotc/parsing/Parsers.scala b/tests/pos-with-compiler-cc/dotc/parsing/Parsers.scala index a198cccc85cc..0b1a2bdcd679 100644 --- a/tests/pos-with-compiler-cc/dotc/parsing/Parsers.scala +++ b/tests/pos-with-compiler-cc/dotc/parsing/Parsers.scala @@ -33,6 +33,7 @@ import config.Feature import config.Feature.{sourceVersion, migrateTo3, globalOnlyImports} import config.SourceVersion._ import config.SourceVersion +import language.experimental.pureFunctions object Parsers { @@ -143,10 +144,10 @@ object Parsers { syntaxError(msg, Span(offset, offset + length)) lastErrorOffset = in.offset - def syntaxError(msg: => String, offset: Int): Unit = + def syntaxError(msg: -> String, offset: Int): Unit = syntaxError(msg.toMessage, offset) - def syntaxError(msg: => String): Unit = + def syntaxError(msg: -> String): Unit = syntaxError(msg, in.offset) /** Unconditionally issue an error at given span, without @@ -155,7 +156,7 @@ object Parsers { def syntaxError(msg: Message, span: Span): Unit = report.error(msg, source.atSpan(span)) - def syntaxError(msg: => String, span: Span): Unit = + def syntaxError(msg: -> String, span: Span): Unit = syntaxError(msg.toMessage, span) def unimplementedExpr(using Context): Select = @@ -288,7 +289,7 @@ object Parsers { syntaxError(msg, offset) skip() - def syntaxErrorOrIncomplete(msg: => String): Unit = + def syntaxErrorOrIncomplete(msg: -> String): Unit = syntaxErrorOrIncomplete(msg.toMessage, in.offset) def syntaxErrorOrIncomplete(msg: Message, span: Span): Unit = diff --git a/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala b/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala index 0540ef27a4d3..a1165c44c09e 100644 --- a/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala +++ b/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala @@ -1550,7 +1550,7 @@ object Scanners { * InBraces a pair of braces { ... } * Indented a pair of ... tokens */ - abstract class Region(val closedBy: Token): + abstract class Region(val closedBy: Token) extends caps.Pure: /** The region enclosing this one, or `null` for the outermost region */ def outer: Region | Null diff --git a/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala b/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala index ceb5afdea750..091f8bfb5c16 100644 --- a/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala +++ b/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala @@ -7,7 +7,7 @@ import core.Contexts._ object Highlighting { - abstract class Highlight(private val highlight: String) { + sealed abstract class Highlight(private val highlight: String) { def text: String def show(using Context): String = if ctx.useColors then highlight + text + Console.RESET else text diff --git a/tests/pos-with-compiler-cc/dotc/printing/PlainPrinter.scala b/tests/pos-with-compiler-cc/dotc/printing/PlainPrinter.scala index f0479f818c9f..1a65b48ded41 100644 --- a/tests/pos-with-compiler-cc/dotc/printing/PlainPrinter.scala +++ b/tests/pos-with-compiler-cc/dotc/printing/PlainPrinter.scala @@ -286,7 +286,7 @@ class PlainPrinter(_ctx: Context) extends Printer { } "LazyRef(" ~ refTxt ~ ")" case Range(lo, hi) => - toText(lo) ~ ".." ~ toText(hi) + toText(lo) ~ " .. " ~ toText(hi) case _ => tp.fallbackToText(this) } @@ -698,9 +698,8 @@ class PlainPrinter(_ctx: Context) extends Printer { Text(ups.map(toText), ", ") Text(deps, "\n") } - val depsText = if Config.showConstraintDeps then c.depsToString else "" //Printer.debugPrintUnique = false - Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText, depsText)) + Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText)) finally ctx.typerState.constraint = savedConstraint diff --git a/tests/pos-with-compiler-cc/dotc/printing/Printer.scala b/tests/pos-with-compiler-cc/dotc/printing/Printer.scala index 326630844dde..25429c8fc01b 100644 --- a/tests/pos-with-compiler-cc/dotc/printing/Printer.scala +++ b/tests/pos-with-compiler-cc/dotc/printing/Printer.scala @@ -15,7 +15,7 @@ import scala.annotation.internal.sharable /** The base class of all printers */ -abstract class Printer { +abstract class Printer extends caps.Pure { private var prec: Precedence = GlobalPrec diff --git a/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala b/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala index 61bee4d9f32a..2c4537b238a5 100644 --- a/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala +++ b/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala @@ -107,7 +107,7 @@ object AsyncHelper { var lastEndNs = 0L } - val localData = new ThreadLocal[ThreadProfileData] + val localData: ThreadLocal[ThreadProfileData] = new ThreadLocal[ThreadProfileData] private class SinglePhaseInstrumentedThreadPoolExecutor ( corePoolSize: Int, maximumPoolSize: Int, keepAliveTime: Long, unit: TimeUnit, diff --git a/tests/pos-with-compiler-cc/dotc/report.scala b/tests/pos-with-compiler-cc/dotc/report.scala index 00399ecbfd0a..636da444696c 100644 --- a/tests/pos-with-compiler-cc/dotc/report.scala +++ b/tests/pos-with-compiler-cc/dotc/report.scala @@ -9,15 +9,15 @@ import config.SourceVersion import ast._ import config.Feature.sourceVersion import java.lang.System.currentTimeMillis - +import language.experimental.pureFunctions object report: /** For sending messages that are printed only if -verbose is set */ - def inform(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def inform(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = if ctx.settings.verbose.value then echo(msg, pos) - def echo(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def echo(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = ctx.reporter.report(new Info(msg.toMessage, pos.sourcePos)) private def issueWarning(warning: Warning)(using Context): Unit = @@ -26,28 +26,28 @@ object report: def deprecationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new DeprecationWarning(msg, pos.sourcePos)) - def deprecationWarning(msg: => String, pos: SrcPos)(using Context): Unit = + def deprecationWarning(msg: -> String, pos: SrcPos)(using Context): Unit = deprecationWarning(msg.toMessage, pos) def migrationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new MigrationWarning(msg, pos.sourcePos)) - def migrationWarning(msg: => String, pos: SrcPos)(using Context): Unit = + def migrationWarning(msg: -> String, pos: SrcPos)(using Context): Unit = migrationWarning(msg.toMessage, pos) def uncheckedWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new UncheckedWarning(msg, pos.sourcePos)) - def uncheckedWarning(msg: => String, pos: SrcPos)(using Context): Unit = + def uncheckedWarning(msg: -> String, pos: SrcPos)(using Context): Unit = uncheckedWarning(msg.toMessage, pos) def featureWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new FeatureWarning(msg, pos.sourcePos)) - def featureWarning(msg: => String, pos: SrcPos)(using Context): Unit = + def featureWarning(msg: -> String, pos: SrcPos)(using Context): Unit = featureWarning(msg.toMessage, pos) - def featureWarning(feature: String, featureDescription: => String, + def featureWarning(feature: String, featureDescription: -> String, featureUseSite: Symbol, required: Boolean, pos: SrcPos)(using Context): Unit = { val req = if (required) "needs to" else "should" val fqname = s"scala.language.$feature" @@ -70,7 +70,7 @@ object report: def warning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new Warning(msg, addInlineds(pos))) - def warning(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def warning(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = warning(msg.toMessage, pos) def error(msg: Message, pos: SrcPos)(using Context): Unit = @@ -78,7 +78,7 @@ object report: ctx.reporter.report(new Error(msg, fullPos)) if ctx.settings.YdebugError.value then Thread.dumpStack() - def error(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def error(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = error(msg.toMessage, pos) def error(ex: TypeError, pos: SrcPos)(using Context): Unit = @@ -91,14 +91,14 @@ object report: if sourceVersion.isMigrating && sourceVersion.ordinal <= from.ordinal then migrationWarning(msg, pos) else error(msg, pos) - def errorOrMigrationWarning(msg: => String, pos: SrcPos, from: SourceVersion)(using Context): Unit = + def errorOrMigrationWarning(msg: -> String, pos: SrcPos, from: SourceVersion)(using Context): Unit = errorOrMigrationWarning(msg.toMessage, pos, from) def gradualErrorOrMigrationWarning(msg: Message, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = if sourceVersion.isAtLeast(errorFrom) then errorOrMigrationWarning(msg, pos, errorFrom) else if sourceVersion.isAtLeast(warnFrom) then warning(msg, pos) - def gradualErrorOrMigrationWarning(msg: => String, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = + def gradualErrorOrMigrationWarning(msg: -> String, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = gradualErrorOrMigrationWarning(msg.toMessage, pos, warnFrom, errorFrom) def restrictionError(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = @@ -111,27 +111,27 @@ object report: * See [[config.CompilerCommand#explainAdvanced]] for the exact meaning of * "contains" here. */ - def log(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def log(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = if (ctx.settings.Ylog.value.containsPhase(ctx.phase)) echo(s"[log ${ctx.phase}] $msg", pos) - def debuglog(msg: => String)(using Context): Unit = + def debuglog(msg: -> String)(using Context): Unit = if (ctx.debug) log(msg) - def informTime(msg: => String, start: Long)(using Context): Unit = { + def informTime(msg: -> String, start: Long)(using Context): Unit = { def elapsed = s" in ${currentTimeMillis - start}ms" informProgress(msg + elapsed) } - def informProgress(msg: => String)(using Context): Unit = + def informProgress(msg: -> String)(using Context): Unit = inform("[" + msg + "]") - def logWith[T](msg: => String)(value: T)(using Context): T = { + def logWith[T](msg: -> String)(value: T)(using Context): T = { log(msg + " " + value) value } - def debugwarn(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def debugwarn(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = if (ctx.settings.Ydebug.value) warning(msg, pos) private def addInlineds(pos: SrcPos)(using Context): SourcePosition = diff --git a/tests/pos-with-compiler-cc/dotc/reporting/Diagnostic.scala b/tests/pos-with-compiler-cc/dotc/reporting/Diagnostic.scala index a92da7821fab..b792aed4264e 100644 --- a/tests/pos-with-compiler-cc/dotc/reporting/Diagnostic.scala +++ b/tests/pos-with-compiler-cc/dotc/reporting/Diagnostic.scala @@ -12,6 +12,7 @@ import dotty.tools.dotc.util.SourcePosition import java.util.Optional import scala.util.chaining._ import core.Decorators.toMessage +import language.experimental.pureFunctions object Diagnostic: @@ -25,7 +26,7 @@ object Diagnostic: msg: Message, pos: SourcePosition ) extends Diagnostic(msg, pos, ERROR): - def this(str: => String, pos: SourcePosition) = this(str.toMessage, pos) + def this(str: -> String, pos: SourcePosition) = this(str.toMessage, pos) /** A sticky error is an error that should not be hidden by backtracking and * trying some alternative path. Typically, errors issued after catching @@ -49,7 +50,7 @@ object Diagnostic: msg: Message, pos: SourcePosition ) extends Diagnostic(msg, pos, INFO): - def this(str: => String, pos: SourcePosition) = this(str.toMessage, pos) + def this(str: -> String, pos: SourcePosition) = this(str.toMessage, pos) abstract class ConditionalWarning( msg: Message, diff --git a/tests/pos-with-compiler-cc/dotc/reporting/Message.scala b/tests/pos-with-compiler-cc/dotc/reporting/Message.scala index 9e397d606491..62ee4c54c354 100644 --- a/tests/pos-with-compiler-cc/dotc/reporting/Message.scala +++ b/tests/pos-with-compiler-cc/dotc/reporting/Message.scala @@ -4,10 +4,9 @@ package reporting import core.Contexts.*, core.Decorators.*, core.Mode import config.SourceVersion - import scala.language.unsafeNulls - import scala.annotation.threadUnsafe +import language.experimental.pureFunctions object Message { val nonSensicalStartTag: String = "" @@ -119,15 +118,15 @@ abstract class Message(val errorId: ErrorMessageID) { self => override val canExplain = self.canExplain } - def append(suffix: => String): Message = mapMsg(_ ++ suffix) + def append(suffix: -> String): Message = mapMsg(_ ++ suffix) - def mapMsg(f: String => String): Message = new Message(errorId): + def mapMsg(f: String -> String): Message = new Message(errorId): val kind = self.kind def msg = f(self.msg) def explain = self.explain override def canExplain = self.canExplain - def appendExplanation(suffix: => String): Message = new Message(errorId): + def appendExplanation(suffix: -> String): Message = new Message(errorId): val kind = self.kind def msg = self.msg def explain = self.explain ++ suffix @@ -144,7 +143,7 @@ abstract class Message(val errorId: ErrorMessageID) { self => } /** The fallback `Message` containing no explanation and having no `kind` */ -class NoExplanation(msgFn: => String) extends Message(ErrorMessageID.NoExplanationID) { +class NoExplanation(msgFn: -> String) extends Message(ErrorMessageID.NoExplanationID) { def msg: String = msgFn def explain: String = "" val kind: MessageKind = MessageKind.NoKind diff --git a/tests/pos-with-compiler-cc/dotc/reporting/Reporter.scala b/tests/pos-with-compiler-cc/dotc/reporting/Reporter.scala index 497e77ae4a7c..2cb9ce50cbbe 100644 --- a/tests/pos-with-compiler-cc/dotc/reporting/Reporter.scala +++ b/tests/pos-with-compiler-cc/dotc/reporting/Reporter.scala @@ -10,11 +10,13 @@ import dotty.tools.dotc.core.Symbols.{NoSymbol, Symbol} import dotty.tools.dotc.reporting.Diagnostic._ import dotty.tools.dotc.reporting.Message._ import dotty.tools.dotc.util.NoSourcePosition +import core.Decorators.toMessage import java.io.{BufferedReader, PrintWriter} import scala.annotation.internal.sharable import scala.collection.mutable -import core.Decorators.toMessage +import scala.caps.unsafe.unsafeUnbox +import language.experimental.pureFunctions object Reporter { /** Convert a SimpleReporter into a real Reporter */ @@ -31,7 +33,7 @@ object Reporter { type ErrorHandler = (Diagnostic, Context) => Unit - private val defaultIncompleteHandler: ErrorHandler = + private val defaultIncompleteHandler: (Diagnostic, Context) -> Unit = (mc, ctx) => ctx.reporter.report(mc)(using ctx) /** Show prompt if `-Xprompt` is passed as a flag to the compiler */ @@ -84,13 +86,14 @@ abstract class Reporter extends interfaces.ReporterResult { private var incompleteHandler: ErrorHandler = defaultIncompleteHandler def withIncompleteHandler[T](handler: ErrorHandler)(op: => T): T = { - val saved = incompleteHandler + val saved = incompleteHandler.unsafeUnbox incompleteHandler = handler try op finally incompleteHandler = saved } - private def isIncompleteChecking = incompleteHandler ne defaultIncompleteHandler + private def isIncompleteChecking = + incompleteHandler.unsafeUnbox ne defaultIncompleteHandler private var _errorCount = 0 private var _warningCount = 0 @@ -203,7 +206,7 @@ abstract class Reporter extends interfaces.ReporterResult { def report(dia: Diagnostic)(using Context): Unit = issueIfNotSuppressed(dia) def incomplete(dia: Diagnostic)(using Context): Unit = - incompleteHandler(dia, ctx) + incompleteHandler.unsafeUnbox(dia, ctx) /** Summary of warnings and errors */ def summary: String = { diff --git a/tests/pos-with-compiler-cc/dotc/reporting/messages.scala b/tests/pos-with-compiler-cc/dotc/reporting/messages.scala index 2197ea63a1c2..1e85a1c917b1 100644 --- a/tests/pos-with-compiler-cc/dotc/reporting/messages.scala +++ b/tests/pos-with-compiler-cc/dotc/reporting/messages.scala @@ -26,6 +26,7 @@ import ast.untpd import ast.tpd import transform.SymUtils._ import cc.CaptureSet.IdentityCaptRefMap +import language.experimental.pureFunctions /** Messages * ======== @@ -242,7 +243,7 @@ import cc.CaptureSet.IdentityCaptRefMap } } - class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) + class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: -> String*)(using Context) extends TypeMismatchMsg(found, expected)(TypeMismatchID): // replace constrained TypeParamRefs and their typevars by their bounds where possible @@ -298,7 +299,7 @@ import cc.CaptureSet.IdentityCaptRefMap end TypeMismatch - class NotAMember(site: Type, val name: Name, selected: String, addendum: => String = "")(using Context) + class NotAMember(site: Type, val name: Name, selected: String, addendum: -> String = "")(using Context) extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) { //println(i"site = $site, decls = ${site.decls}, source = ${site.typeSymbol.sourceFile}") //DEBUG @@ -822,7 +823,7 @@ import cc.CaptureSet.IdentityCaptRefMap |Write `.to$targetType` instead.""".stripMargin def explain = "" - class PatternMatchExhaustivity(uncoveredFn: => String, hasMore: Boolean)(using Context) + class PatternMatchExhaustivity(uncoveredFn: -> String, hasMore: Boolean)(using Context) extends Message(PatternMatchExhaustivityID) { def kind = MessageKind.PatternMatchExhaustivity lazy val uncovered = uncoveredFn @@ -842,7 +843,7 @@ import cc.CaptureSet.IdentityCaptRefMap |""" } - class UncheckedTypePattern(msgFn: => String)(using Context) + class UncheckedTypePattern(msgFn: -> String)(using Context) extends PatternMatchMsg(UncheckedTypePatternID) { def msg = msgFn def explain = @@ -1972,7 +1973,7 @@ import cc.CaptureSet.IdentityCaptRefMap } } - class CyclicInheritance(symbol: Symbol, addendum: => String)(using Context) extends SyntaxMsg(CyclicInheritanceID) { + class CyclicInheritance(symbol: Symbol, addendum: -> String)(using Context) extends SyntaxMsg(CyclicInheritanceID) { def msg = em"Cyclic inheritance: $symbol extends itself$addendum" def explain = { val codeExample = "class A extends A" diff --git a/tests/pos-with-compiler-cc/dotc/sbt/ExtractAPI.scala b/tests/pos-with-compiler-cc/dotc/sbt/ExtractAPI.scala index e561b26abf6d..e75133c78759 100644 --- a/tests/pos-with-compiler-cc/dotc/sbt/ExtractAPI.scala +++ b/tests/pos-with-compiler-cc/dotc/sbt/ExtractAPI.scala @@ -26,6 +26,7 @@ import java.io.PrintWriter import scala.collection.mutable import scala.util.hashing.MurmurHash3 import scala.util.chaining.* +import language.experimental.pureFunctions /** This phase sends a representation of the API of classes to sbt via callbacks. * @@ -594,7 +595,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { } } - def apiLazy(tp: => Type): api.Type = { + def apiLazy(tp: -> Type): api.Type = { // TODO: The sbt api needs a convenient way to make a lazy type. // For now, we repurpose Structure for this. val apiTp = lzy(Array(apiType(tp))) diff --git a/tests/pos-with-compiler-cc/dotc/sbt/ExtractDependencies.scala b/tests/pos-with-compiler-cc/dotc/sbt/ExtractDependencies.scala index f7b15dc21eb0..a554c8e5066f 100644 --- a/tests/pos-with-compiler-cc/dotc/sbt/ExtractDependencies.scala +++ b/tests/pos-with-compiler-cc/dotc/sbt/ExtractDependencies.scala @@ -25,7 +25,7 @@ import xsbti.api.DependencyContext import xsbti.api.DependencyContext._ import scala.collection.{Set, mutable} - +import language.experimental.pureFunctions /** This phase sends information on classes' dependencies to sbt via callbacks. * @@ -189,7 +189,7 @@ object ExtractDependencies { sym.fullName.stripModuleClassSuffix.toString /** Report an internal error in incremental compilation. */ - def internalError(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def internalError(msg: -> String, pos: SrcPos = NoSourcePosition)(using Context): Unit = report.error(s"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos) } @@ -461,7 +461,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT // Avoid cycles by remembering both the types (testcase: // tests/run/enum-values.scala) and the symbols of named types (testcase: // tests/pos-java-interop/i13575) we've seen before. - val seen = new mutable.HashSet[Symbol | Type] + private val seen = new mutable.HashSet[Symbol | Type] def traverse(tp: Type): Unit = if (!seen.contains(tp)) { seen += tp tp match { diff --git a/tests/pos-with-compiler-cc/dotc/sbt/ThunkHolder.scala b/tests/pos-with-compiler-cc/dotc/sbt/ThunkHolder.scala index 60aa76c91ed4..d4ee3dc9a68f 100644 --- a/tests/pos-with-compiler-cc/dotc/sbt/ThunkHolder.scala +++ b/tests/pos-with-compiler-cc/dotc/sbt/ThunkHolder.scala @@ -5,6 +5,7 @@ package sbt import scala.annotation.tailrec import scala.collection.mutable.ListBuffer import xsbti.api +import language.experimental.pureFunctions /** Create and hold thunks. A thunk is a (potentially) unevaluated value * that may be evaluated once. @@ -24,7 +25,7 @@ private[sbt] trait ThunkHolder { /** Store the by-name parameter `s` in a `Lazy` container without evaluating it. * It will be forced by the next call to `forceThunks()` */ - def lzy[T <: AnyRef](t: => T): api.Lazy[T] = { + def lzy[T <: AnyRef](t: -> T): api.Lazy[T] = { val l = api.SafeLazy.apply(() => t).nn thunks += l l diff --git a/tests/pos-with-compiler-cc/dotc/semanticdb/ExtractSemanticDB.scala b/tests/pos-with-compiler-cc/dotc/semanticdb/ExtractSemanticDB.scala index 071efb1fb91c..916503e94203 100644 --- a/tests/pos-with-compiler-cc/dotc/semanticdb/ExtractSemanticDB.scala +++ b/tests/pos-with-compiler-cc/dotc/semanticdb/ExtractSemanticDB.scala @@ -59,7 +59,7 @@ class ExtractSemanticDB extends Phase: private val localBodies = mutable.HashMap[Symbol, Tree]() /** The extracted symbol occurrences */ - val occurrences = new mutable.ListBuffer[SymbolOccurrence]() + val occurrences: mutable.ListBuffer[SymbolOccurrence] = new mutable.ListBuffer() /** The extracted symbol infos */ val symbolInfos = new mutable.ListBuffer[SymbolInformation]() @@ -67,7 +67,7 @@ class ExtractSemanticDB extends Phase: val synthetics = new mutable.ListBuffer[s.Synthetic]() /** A cache of localN names */ - val localNames = new mutable.HashSet[String]() + val localNames: mutable.HashSet[String] = new mutable.HashSet() /** The symbol occurrences generated so far, as a set */ private val generated = new mutable.HashSet[SymbolOccurrence] diff --git a/tests/pos-with-compiler-cc/dotc/semanticdb/SyntheticsExtractor.scala b/tests/pos-with-compiler-cc/dotc/semanticdb/SyntheticsExtractor.scala index b2f26e3e992f..50765a172ffd 100644 --- a/tests/pos-with-compiler-cc/dotc/semanticdb/SyntheticsExtractor.scala +++ b/tests/pos-with-compiler-cc/dotc/semanticdb/SyntheticsExtractor.scala @@ -11,7 +11,7 @@ import dotty.tools.dotc.{semanticdb => s} class SyntheticsExtractor: import Scala3.{_, given} - val visited = collection.mutable.HashSet[Tree]() + val visited: collection.mutable.HashSet[Tree] = collection.mutable.HashSet() def tryFindSynthetic(tree: Tree)(using Context, SemanticSymbolBuilder, TypeOps): Option[s.Synthetic] = extension (synth: s.Synthetic) diff --git a/tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbTypeMapper.scala b/tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbTypeMapper.scala index 2310bcdbc97c..71cb30fbd5fb 100644 --- a/tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbTypeMapper.scala +++ b/tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbTypeMapper.scala @@ -1,4 +1,5 @@ package dotty.tools.dotc.semanticdb.internal +import language.experimental.pureFunctions abstract class SemanticdbTypeMapper[BaseType, CustomType] { def toCustom(base: BaseType): CustomType @@ -6,8 +7,8 @@ abstract class SemanticdbTypeMapper[BaseType, CustomType] { } object SemanticdbTypeMapper { - def apply[BaseType, CustomType](baseToCustom: BaseType => CustomType)( - customToBase: CustomType => BaseType + def apply[BaseType, CustomType](baseToCustom: BaseType -> CustomType)( + customToBase: CustomType -> BaseType ): SemanticdbTypeMapper[BaseType, CustomType] = new SemanticdbTypeMapper[BaseType, CustomType] { def toCustom(base: BaseType): CustomType = baseToCustom(base) diff --git a/tests/pos-with-compiler-cc/dotc/transform/CountOuterAccesses.scala b/tests/pos-with-compiler-cc/dotc/transform/CountOuterAccesses.scala index 91b5bc6a3de4..2342170d79b8 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/CountOuterAccesses.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/CountOuterAccesses.scala @@ -43,7 +43,7 @@ class CountOuterAccesses extends MiniPhase: // LambdaLift can create outer paths. These need to be known in this phase. /** The number of times an outer accessor that might be dropped is accessed */ - val outerAccessCount = new mutable.HashMap[Symbol, Int] { + val outerAccessCount: mutable.HashMap[Symbol, Int] = new { override def default(s: Symbol): Int = 0 } diff --git a/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala b/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala index bf8a6fa6c7bf..25e8b49cc1ba 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala @@ -26,7 +26,7 @@ object ForwardDepChecks: /** A class to help in forward reference checking */ class LevelInfo(val outer: OptLevelInfo, val owner: Symbol, stats: List[Tree])(using Context) - extends OptLevelInfo { + extends OptLevelInfo, caps.Pure { override val levelAndIndex: LevelAndIndex = stats.foldLeft(outer.levelAndIndex, 0) {(mi, stat) => val (m, idx) = mi diff --git a/tests/pos-with-compiler-cc/dotc/transform/HoistSuperArgs.scala b/tests/pos-with-compiler-cc/dotc/transform/HoistSuperArgs.scala index 9a36d65babe8..2aae0a4b66cb 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/HoistSuperArgs.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/HoistSuperArgs.scala @@ -42,7 +42,7 @@ object HoistSuperArgs { * as method parameters. The definition is installed in the scope enclosing the class, * or, if that is a package, it is made a static method of the class itself. */ -class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase => +class HoistSuperArgs extends MiniPhase, IdentityDenotTransformer { thisPhase => import ast.tpd._ override def phaseName: String = HoistSuperArgs.name @@ -186,7 +186,7 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase // MO: The guard avoids the crash for #16351. // It would be good to dig deeper, but I won't have the time myself to do it. cpy.Block(superCall)( - stats = defs.mapconserve { + stats = defs.mapconserve { (t: Tree) => t match // !cc! explicity typed scrutinee is needed case vdef: ValDef => try cpy.ValDef(vdef)(rhs = hoistSuperArg(vdef.rhs, cdef, lifted.toList)) finally lifted += vdef.symbol diff --git a/tests/pos-with-compiler-cc/dotc/transform/LazyVals.scala b/tests/pos-with-compiler-cc/dotc/transform/LazyVals.scala index 3b37ef130231..c32ea61cff2b 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/LazyVals.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/LazyVals.scala @@ -2,33 +2,30 @@ package dotty.tools.dotc package transform import java.util.IdentityHashMap + import ast.tpd import core.Annotations.Annotation import core.Constants.Constant -import core.Contexts.* -import core.Decorators.* +import core.Contexts._ +import core.Decorators._ import core.DenotTransformers.IdentityDenotTransformer -import core.Flags.* -import core.NameKinds.{ExpandedName, LazyBitMapName, LazyLocalInitName, LazyLocalName} +import core.Flags._ +import core.NameKinds.{LazyBitMapName, LazyLocalInitName, LazyLocalName, ExpandedName} import core.StdNames.nme -import core.Symbols.* -import core.Types.* +import core.Symbols._ +import core.Types._ import core.{Names, StdNames} -import dotty.tools.dotc.config.Feature import transform.MegaPhase.MiniPhase -import transform.SymUtils.* - +import transform.SymUtils._ import scala.collection.mutable class LazyVals extends MiniPhase with IdentityDenotTransformer { import LazyVals._ import tpd._ - /** - * The map contains the list of the offset trees. - */ - class OffsetInfo(var defs: List[Tree], var ord: Int = 0) - + /** this map contains mutable state of transformation: OffsetDefs to be appended to companion object definitions, + * and number of bits currently used */ + class OffsetInfo(var defs: List[Tree], var ord:Int) private val appendOffsetDefs = mutable.Map.empty[Symbol, OffsetInfo] override def phaseName: String = LazyVals.name @@ -55,7 +52,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { else nullables.toList } - private def needsBoxing(tp: Type)(using Context): Boolean = tp.classSymbol.isPrimitiveValueClass override def prepareForUnit(tree: Tree)(using Context): Context = { if (lazyValNullables == null) @@ -66,6 +62,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { override def transformDefDef(tree: DefDef)(using Context): Tree = transformLazyVal(tree) + override def transformValDef(tree: ValDef)(using Context): Tree = transformLazyVal(tree) @@ -106,9 +103,10 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { /** Append offset fields to companion objects - */ + */ override def transformTemplate(template: Template)(using Context): Tree = { val cls = ctx.owner.asClass + appendOffsetDefs.get(cls) match { case None => template case Some(data) => @@ -117,6 +115,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { } } + private def addInFront(prefix: List[Tree], stats: List[Tree]) = stats match { case first :: rest if isSuperConstrCall(first) => first :: prefix ::: rest case _ => prefix ::: stats @@ -187,6 +186,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { Thicket(holderTree, initTree, accessor) } + override def transformStats(trees: List[tpd.Tree])(using Context): List[Tree] = { // backend requires field usage to be after field definition // need to bring containers to start of method @@ -274,231 +274,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { } } - /** - * Create a threadsafe lazy accessor and function that computes the field's value. `Evaluating` and - * `NullValue` are represented by `object`s and `Waiting` by a class that allows awaiting the completion - * of the evaluation. Note that since tail-recursive functions are transformed *before* lazy-vals, - * this implementation does involve explicit while loop. `PatternMatcher` is coming before `LazyVals`, - * therefore the pattern matching is implemented using if-s. - * - * ``` - * private @volatile var _x: AnyRef = null - * - * def x: A = - * val result = _x - * if result.isInstanceOf[A] then - * result // possible unboxing applied here - * else if result.eq(NullValue) then - * null // possible unboxing applied here - * else - * x_compute() // possible unboxing applied here - * - * private def x_compute(): AnyRef = - * while do - * val current: AnyRef = _x - * if current.eq(null) then - * if CAS(_x, null, Evaluating) then - * var resultNullable: AnyRef = null - * var result: AnyRef = null - * try - * resultNullable = rhs - * nullable = null // nulls out the nullable fields used only in initialization - * if resultNullable.eq(null) then - * result = NullValue - * else - * result = resultNullable - * finally - * if !CAS(_x, Evaluating, result) then - * val lock = _x.asInstanceOf[Waiting] - * CAS(_x, lock, result) - * lock.release() - * return resultNullable - * else - * if current.isInstanceOf[LazyValControlState] then - * if current.eq(Evaluating) then // To avoid creating Waiting instance - * CAS(current, current, new Waiting) - * else if current.isInstanceOf[Waiting] then - * current.asInstanceOf[Waiting].await() - * else return null - * else - * return current - * end while - * * ``` - * - * @param memberDef the transformed lazy field member definition - * @param claz the class containing this lazy val field - * @param target the target synthetic field - * @param offset the offset of the field in the storage allocation of the class - * @param thiz a reference to the transformed class - */ - def mkThreadSafeDef(memberDef: ValOrDefDef, - claz: ClassSymbol, - target: Symbol, - offset: Tree, - thiz: Tree)(using Context): (DefDef, DefDef) = { - val tp = memberDef.tpe.widenDealias.resultType.widenDealias - val waiting = ref(defn.LazyValsWaitingState) - val controlState = ref(defn.LazyValsControlState) - val evaluating = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.evaluating) - val nullValue = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.nullValue) - val objCasFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.objCas) - val accessorMethodSymbol = memberDef.symbol.asTerm - val lazyInitMethodName = LazyLocalInitName.fresh(memberDef.name.asTermName) - val lazyInitMethodSymbol = newSymbol(claz, lazyInitMethodName, Synthetic | Method | Private, MethodType(Nil)(_ => Nil, _ => defn.ObjectType)) - - val rhs = memberDef.rhs - val rhsMappedOwner = rhs.changeOwnerAfter(memberDef.symbol, lazyInitMethodSymbol, this) - val valueSymbol = newSymbol(accessorMethodSymbol, lazyNme.result, Synthetic, defn.ObjectType) - - val immediateValueCondition = - if (defn.LazyValsControlState.isSubClass(tp.classSymbol)) then - ref(valueSymbol).select(defn.Any_!=).appliedTo(nullLiteral).select(nme.And).appliedTo(ref(valueSymbol) - .select(defn.Any_isInstanceOf).appliedToType(defn.LazyValsControlState.typeRef) - .select(nme.UNARY_!).appliedToNone) - else - ref(valueSymbol).select(defn.Any_isInstanceOf).appliedToType(tp) - - val accessorBody = - Block( - ValDef(valueSymbol, ref(target)) :: Nil, - If( // if _x != null && !_x.isInstanceOf[LazyValControlState] then - immediateValueCondition, - ref(valueSymbol).ensureConforms(tp), // then return _x.asInstanceOf[A] - If( - ref(valueSymbol).select(defn.Object_eq).appliedTo(nullValue), - nullLiteral.ensureConforms(tp), - ref(lazyInitMethodSymbol).ensureApplied.ensureConforms(tp) // else return x_compute() - ) - ) - ) - - val accessorDef = DefDef(accessorMethodSymbol, accessorBody) - - // if observed a null (uninitialized) value - val initialize = { - // var result: AnyRef - val resSymbNullable = newSymbol(lazyInitMethodSymbol, lazyNme.resultNullable, Synthetic | Mutable, defn.ObjectType) - val resSymb = newSymbol(lazyInitMethodSymbol, lazyNme.result, Synthetic | Mutable, defn.ObjectType) - // releasing block in finally - val lockRel = { - val lockSymb = newSymbol(lazyInitMethodSymbol, lazyNme.lock, Synthetic, waiting.typeOpt) - Block(ValDef(lockSymb, ref(target).cast(waiting.typeOpt)) - :: objCasFlag.appliedTo(thiz, offset, ref(lockSymb), ref(resSymb)) :: Nil, - ref(lockSymb).select(lazyNme.RLazyVals.waitingRelease).ensureApplied) - } - // finally block - val fin = If( - objCasFlag.appliedTo(thiz, offset, evaluating, ref(resSymb)).select(nme.UNARY_!).appliedToNone, - lockRel, - unitLiteral - ) - // entire try block - val evaluate = Try( - - Block( - (Assign(ref(resSymbNullable), if needsBoxing(tp) && rhsMappedOwner != EmptyTree then rhsMappedOwner.ensureConforms(defn.boxedType(tp)) else rhsMappedOwner) // try result = rhs - :: If( - ref(resSymbNullable).select(defn.Object_eq).appliedTo(nullLiteral), - Assign(ref(resSymb), nullValue), - Assign(ref(resSymb), ref(resSymbNullable)) - ) :: Nil) - ::: nullOut(nullableFor(accessorMethodSymbol)), - unitLiteral), - Nil, - fin - ) - // if CAS(_, null, Evaluating) - If( - objCasFlag.appliedTo(thiz, offset, nullLiteral, evaluating), - Block(ValDef(resSymb, nullLiteral) :: ValDef(resSymbNullable, nullLiteral) :: evaluate :: Nil, // var result: AnyRef = null - Return(ref(resSymbNullable), lazyInitMethodSymbol)), - unitLiteral - ).withType(defn.UnitType) - } - - val current = newSymbol(lazyInitMethodSymbol, lazyNme.current, Synthetic, defn.ObjectType) - val ifNotUninitialized = - If( - ref(current).select(defn.Any_isInstanceOf).appliedToTypeTree(controlState), - // if a control state - If( - ref(current).select(defn.Object_eq).appliedTo(evaluating), - // if is Evaluating then CAS(_, Evaluating, new Waiting) - Block( - objCasFlag.appliedTo(thiz, offset, ref(current), Select(New(waiting), StdNames.nme.CONSTRUCTOR).ensureApplied) :: Nil, - unitLiteral - ), - // if not Evaluating - If( - ref(current).select(defn.Any_isInstanceOf).appliedToTypeTree(waiting), - // if is waiting - ref(current).select(defn.Any_asInstanceOf).appliedToTypeTree(waiting).select(lazyNme.RLazyVals.waitingAwaitRelease, _.info.paramInfoss.exists(_.size == 0)).ensureApplied, - Return(nullLiteral, lazyInitMethodSymbol) - ) - ), - // if not a control state - Return(ref(current), lazyInitMethodSymbol) - ) - - val initBody = Block(ValDef(current, ref(target)) :: Nil, If(ref(current).select(defn.Object_eq).appliedTo(nullLiteral), initialize, ifNotUninitialized).withType(defn.UnitType)) - val initMainLoop = WhileDo(EmptyTree, initBody) // becomes: while (true) do { body } - val initMethodDef = DefDef(lazyInitMethodSymbol, initMainLoop) - (accessorDef, initMethodDef) - } - - def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = { - assert(!(x.symbol is Mutable)) - if ctx.settings.YlightweightLazyVals.value then - transformMemberDefThreadSafeNew(x) - else - transformMemberDefThreadSafeLegacy(x) - } - - def transformMemberDefThreadSafeNew(x: ValOrDefDef)(using Context): Thicket = { - import dotty.tools.dotc.core.Types._ - import dotty.tools.dotc.core.Flags._ - - val claz = x.symbol.owner.asClass - val thizClass = Literal(Constant(claz.info)) - - def offsetName(id: Int) = s"${StdNames.nme.LAZY_FIELD_OFFSET}${if (x.symbol.owner.is(Module)) "_m_" else ""}$id".toTermName - val containerName = LazyLocalName.fresh(x.name.asTermName) - val containerSymbol = newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags | Private, defn.ObjectType, coord = x.symbol.coord).enteredAfter(this) - containerSymbol.addAnnotation(Annotation(defn.VolatileAnnot)) // private @volatile var _x: AnyRef - containerSymbol.addAnnotations(x.symbol.annotations) // pass annotations from original definition - val stat = x.symbol.isStatic - if stat then - containerSymbol.setFlag(JavaStatic) - val getOffset = - if stat then - Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getStaticFieldOffset) - else - Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffsetStatic) - val containerTree = ValDef(containerSymbol, nullLiteral) - - // create an offset for this lazy val - val offsetSymbol: TermSymbol = appendOffsetDefs.get(claz) match - case Some(info) => - newSymbol(claz, offsetName(info.defs.size), Synthetic, defn.LongType).enteredAfter(this) - case None => - newSymbol(claz, offsetName(0), Synthetic, defn.LongType).enteredAfter(this) - offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot)) - val fieldTree = thizClass.select(lazyNme.RLazyVals.getDeclaredField).appliedTo(Literal(Constant(containerName.mangledString))) - val offsetTree = ValDef(offsetSymbol.nn, getOffset.appliedTo(fieldTree)) - val offsetInfo = appendOffsetDefs.getOrElseUpdate(claz, new OffsetInfo(Nil)) - offsetInfo.defs = offsetTree :: offsetInfo.defs - val offset = ref(offsetSymbol.nn) - - val swapOver = - if stat then - tpd.clsOf(x.symbol.owner.typeRef) - else - This(claz) - - val (accessorDef, initMethodDef) = mkThreadSafeDef(x, claz, containerSymbol, offset, swapOver) - Thicket(containerTree, accessorDef, initMethodDef) - } - /** Create a threadsafe lazy accessor equivalent to such code * ``` * def methodSymbol(): Int = { @@ -530,7 +305,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { * } * ``` */ - def mkThreadSafeDefLegacy(methodSymbol: TermSymbol, + def mkThreadSafeDef(methodSymbol: TermSymbol, claz: ClassSymbol, ord: Int, target: Symbol, @@ -599,12 +374,15 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { DefDef(methodSymbol, loop) } - def transformMemberDefThreadSafeLegacy(x: ValOrDefDef)(using Context): Thicket = { + def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = { + assert(!(x.symbol is Mutable)) + val tpe = x.tpe.widen.resultType.widen val claz = x.symbol.owner.asClass val thizClass = Literal(Constant(claz.info)) - val getOffset = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffset) - val getOffsetStatic = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffsetStatic) + val helperModule = requiredModule("scala.runtime.LazyVals") + val getOffset = Select(ref(helperModule), lazyNme.RLazyVals.getOffset) + val getOffsetStatic = Select(ref(helperModule), lazyNme.RLazyVals.getOffsetStatic) var offsetSymbol: TermSymbol | Null = null var flag: Tree = EmptyTree var ord = 0 @@ -647,16 +425,17 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val containerName = LazyLocalName.fresh(x.name.asTermName) val containerSymbol = newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags, tpe, coord = x.symbol.coord).enteredAfter(this) + val containerTree = ValDef(containerSymbol, defaultValue(tpe)) val offset = ref(offsetSymbol.nn) - val getFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.get) - val setFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.setFlag) - val wait = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.wait4Notification) - val state = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.state) - val cas = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.cas) + val getFlag = Select(ref(helperModule), lazyNme.RLazyVals.get) + val setFlag = Select(ref(helperModule), lazyNme.RLazyVals.setFlag) + val wait = Select(ref(helperModule), lazyNme.RLazyVals.wait4Notification) + val state = Select(ref(helperModule), lazyNme.RLazyVals.state) + val cas = Select(ref(helperModule), lazyNme.RLazyVals.cas) - val accessor = mkThreadSafeDefLegacy(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait) + val accessor = mkThreadSafeDef(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait) if (flag eq EmptyTree) Thicket(containerTree, accessor) else Thicket(containerTree, flag, accessor) @@ -666,35 +445,26 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { object LazyVals { val name: String = "lazyVals" val description: String = "expand lazy vals" + object lazyNme { import Names.TermName object RLazyVals { import scala.runtime.LazyVals.{Names => N} - val waitingAwaitRelease: TermName = "await".toTermName - val waitingRelease: TermName = "countDown".toTermName - val evaluating: TermName = "Evaluating".toTermName - val nullValue: TermName = "NullValue".toTermName - val objCas: TermName = "objCAS".toTermName - val get: TermName = N.get.toTermName - val setFlag: TermName = N.setFlag.toTermName - val wait4Notification: TermName = N.wait4Notification.toTermName - val state: TermName = N.state.toTermName - val cas: TermName = N.cas.toTermName - val getOffset: TermName = N.getOffset.toTermName - val getOffsetStatic: TermName = "getOffsetStatic".toTermName - val getStaticFieldOffset: TermName = "getStaticFieldOffset".toTermName - val getDeclaredField: TermName = "getDeclaredField".toTermName + val get: TermName = N.get.toTermName + val setFlag: TermName = N.setFlag.toTermName + val wait4Notification: TermName = N.wait4Notification.toTermName + val state: TermName = N.state.toTermName + val cas: TermName = N.cas.toTermName + val getOffset: TermName = N.getOffset.toTermName + val getOffsetStatic: TermName = "getOffsetStatic".toTermName + val getDeclaredField: TermName = "getDeclaredField".toTermName } val flag: TermName = "flag".toTermName val state: TermName = "state".toTermName val result: TermName = "result".toTermName - val resultNullable: TermName = "resultNullable".toTermName val value: TermName = "value".toTermName val initialized: TermName = "initialized".toTermName val initialize: TermName = "initialize".toTermName val retry: TermName = "retry".toTermName - val current: TermName = "current".toTermName - val lock: TermName = "lock".toTermName - val discard: TermName = "discard".toTermName } } diff --git a/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala b/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala index 27ccd622bc65..bff0e8340c0b 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala @@ -9,7 +9,7 @@ import Contexts._ /** A base class for transforms. * A transform contains a compiler phase which applies a tree transformer. */ -abstract class MacroTransform extends Phase { +abstract class MacroTransform extends Phase, caps.Pure { import ast.tpd._ diff --git a/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala b/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala index 9d241216bdaa..2543a89af4d7 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala @@ -28,7 +28,7 @@ object MegaPhase { * - Other: to prepape/transform a tree that does not have a specific prepare/transform * method pair. */ - abstract class MiniPhase extends Phase { + abstract class MiniPhase extends Phase, caps.Pure { private[MegaPhase] var superPhase: MegaPhase = _ private[MegaPhase] var idxInGroup: Int = _ diff --git a/tests/pos-with-compiler-cc/dotc/transform/Memoize.scala b/tests/pos-with-compiler-cc/dotc/transform/Memoize.scala index 6456066bfdb0..3552d08e81f2 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Memoize.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Memoize.scala @@ -27,7 +27,7 @@ object Memoize { val description: String = "add private fields to getters and setters" private final class MyState { - val classesThatNeedReleaseFence = new util.HashSet[Symbol] + val classesThatNeedReleaseFence: util.HashSet[Symbol] = new util.HashSet() } } diff --git a/tests/pos-with-compiler-cc/dotc/transform/Mixin.scala b/tests/pos-with-compiler-cc/dotc/transform/Mixin.scala index 5ca09dd6188f..9a220d9c4f8c 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Mixin.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Mixin.scala @@ -228,7 +228,10 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => } val superCallsAndArgs: Map[Symbol, (Tree, List[Tree], List[Tree])] = ( - for (p <- impl.parents; constr = stripBlock(p).symbol if constr.isConstructor) + for + p: Tree <- impl.parents // !cc! explicit type on `p` is needed + constr = stripBlock(p).symbol + if constr.isConstructor yield constr.owner -> transformConstructor(p) ).toMap diff --git a/tests/pos-with-compiler-cc/dotc/transform/MoveStatics.scala b/tests/pos-with-compiler-cc/dotc/transform/MoveStatics.scala index 99702686edf8..c50a96dc8b81 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/MoveStatics.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/MoveStatics.scala @@ -38,7 +38,8 @@ class MoveStatics extends MiniPhase with SymTransformer { override def transformStats(trees: List[Tree])(using Context): List[Tree] = if (ctx.owner.is(Flags.Package)) { val (classes, others) = trees.partition(x => x.isInstanceOf[TypeDef] && x.symbol.isClass) - val pairs = classes.groupBy(_.symbol.name.stripModuleClassSuffix).asInstanceOf[Map[Name, List[TypeDef]]] + val pairs = classes.groupBy(cls => cls.symbol.name.stripModuleClassSuffix: Name).asInstanceOf[Map[Name, List[TypeDef]]] + // !cc! type ascription `: Name` needed to make it compile under captureChecking def rebuild(orig: TypeDef, newBody: List[Tree]): Tree = { val staticFields = newBody.filter(x => x.isInstanceOf[ValDef] && x.symbol.hasAnnotation(defn.ScalaStaticAnnot)).asInstanceOf[List[ValDef]] diff --git a/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala b/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala index 70fa0e5cc513..6004f376b7b4 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala @@ -18,6 +18,7 @@ import config.Printers.patmatch import reporting._ import dotty.tools.dotc.ast._ import util.Property._ +import language.experimental.pureFunctions /** The pattern matching transform. * After this phase, the only Match nodes remaining in the code are simple switches @@ -105,7 +106,7 @@ object PatternMatcher { // TODO: Drop Case once we use everywhere else `isPatmatGenerated`. /** The plan `let x = rhs in body(x)` where `x` is a fresh variable */ - private def letAbstract(rhs: Tree, tpe: Type = NoType)(body: Symbol => Plan): Plan = { + private def letAbstract(rhs: Tree, tpe: Type = NoType)(body: Symbol -> Plan): Plan = { val declTpe = if tpe.exists then tpe else rhs.tpe val vble = newVar(rhs, EmptyFlags, declTpe) initializer(vble) = rhs @@ -113,7 +114,7 @@ object PatternMatcher { } /** The plan `l: { expr(l) }` where `l` is a fresh label */ - private def altsLabeledAbstract(expr: (=> Plan) => Plan): Plan = { + private def altsLabeledAbstract(expr: (-> Plan) -> Plan): Plan = { val label = newSymbol(ctx.owner, PatMatAltsName.fresh(), Synthetic | Label, defn.UnitType) LabeledPlan(label, expr(ReturnPlan(label))) @@ -467,7 +468,7 @@ object PatternMatcher { // ----- Optimizing plans --------------- /** A superclass for plan transforms */ - class PlanTransform extends (Plan => Plan) { + class PlanTransform extends (Plan -> Plan) { protected val treeMap: TreeMap = new TreeMap { override def transform(tree: Tree)(using Context) = tree } @@ -1032,7 +1033,7 @@ object PatternMatcher { case _ => end checkSwitch - val optimizations: List[(String, Plan => Plan)] = List( + val optimizations: List[(String, Plan -> Plan)] = List( "mergeTests" -> mergeTests, "inlineVars" -> inlineVars ) diff --git a/tests/pos-with-compiler-cc/dotc/transform/PostTyper.scala b/tests/pos-with-compiler-cc/dotc/transform/PostTyper.scala index 05aaa745bb18..2648e19e23a8 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/PostTyper.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/PostTyper.scala @@ -362,7 +362,6 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase } case Inlined(call, bindings, expansion) if !call.isEmpty => val pos = call.sourcePos - CrossVersionChecks.checkExperimentalRef(call.symbol, pos) val callTrace = Inlines.inlineCallTrace(call.symbol, pos)(using ctx.withSource(pos.source)) cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(call))) case templ: Template => diff --git a/tests/pos-with-compiler-cc/dotc/transform/Splicer.scala b/tests/pos-with-compiler-cc/dotc/transform/Splicer.scala index ea83f276a59c..da1cf4e9a44e 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Splicer.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Splicer.scala @@ -31,6 +31,7 @@ import dotty.tools.dotc.quoted.{PickledQuotes, QuoteUtils} import scala.quoted.Quotes import scala.quoted.runtime.impl._ +import language.experimental.pureFunctions /** Utility class to splice quoted expressions */ object Splicer { @@ -56,8 +57,11 @@ object Splicer { val interpreter = new SpliceInterpreter(splicePos, classLoader) // Some parts of the macro are evaluated during the unpickling performed in quotedExprToTree - val interpretedExpr = interpreter.interpret[Quotes => scala.quoted.Expr[Any]](tree) - val interpretedTree = interpretedExpr.fold(tree)(macroClosure => PickledQuotes.quotedExprToTree(macroClosure(QuotesImpl()))) + val interpretedExpr: Option[Quotes -> scala.quoted.Expr[Any]] = // !cc! explicit type ascription needed here + interpreter.interpret(tree) + val interpretedTree: Tree = interpretedExpr match + case Some(macroClosure) => PickledQuotes.quotedExprToTree(macroClosure(QuotesImpl())) + case None => tree checkEscapedVariables(interpretedTree, macroOwner) } finally { diff --git a/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala b/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala index ad3f0322130d..df6128d249d2 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala @@ -25,6 +25,7 @@ import dotty.tools.dotc.transform.TreeMapWithStages._ import dotty.tools.dotc.config.ScalaRelease.* import scala.annotation.constructorOnly +import language.experimental.pureFunctions object Splicing: val name: String = "splicing" @@ -186,7 +187,7 @@ class Splicing extends MacroTransform: * {{{ | T2 | x, X | (x$1: Expr[T1], X$1: Type[X]) => (using Quotes) ?=> {... ${x$1} ... X$1.Underlying ...} }}} * ``` */ - private class SpliceTransformer(spliceOwner: Symbol, isCaptured: Symbol => Boolean) extends Transformer: + private class SpliceTransformer(spliceOwner: Symbol, isCaptured: Symbol -> Boolean) extends Transformer: private var refBindingMap = mutable.Map.empty[Symbol, (Tree, Symbol)] /** Reference to the `Quotes` instance of the current level 1 splice */ private var quotes: Tree | Null = null // TODO: add to the context diff --git a/tests/pos-with-compiler-cc/dotc/transform/TryCatchPatterns.scala b/tests/pos-with-compiler-cc/dotc/transform/TryCatchPatterns.scala index 92d22b1cc57e..f9779cbbfee4 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/TryCatchPatterns.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/TryCatchPatterns.scala @@ -49,7 +49,7 @@ class TryCatchPatterns extends MiniPhase { override def checkPostCondition(tree: Tree)(using Context): Unit = tree match { case Try(_, cases, _) => - cases.foreach { + cases.foreach { (t: CaseDef) => t match // !cc! explicity typed scrutinee is needed case CaseDef(Typed(_, _), guard, _) => assert(guard.isEmpty, "Try case should not contain a guard.") case CaseDef(Bind(_, _), guard, _) => assert(guard.isEmpty, "Try case should not contain a guard.") case c => diff --git a/tests/pos-with-compiler-cc/dotc/transform/TypeTestsCasts.scala b/tests/pos-with-compiler-cc/dotc/transform/TypeTestsCasts.scala index b2a101649457..7db89300e710 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/TypeTestsCasts.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/TypeTestsCasts.scala @@ -15,6 +15,7 @@ import core.Flags._ import util.Spans._ import reporting._ import config.Printers.{ transforms => debug } +import language.experimental.pureFunctions /** This transform normalizes type tests and type casts, * also replacing type tests with singleton argument type with reference equality check @@ -195,7 +196,7 @@ object TypeTestsCasts { def testCls = effectiveClass(testType.widen) def unboxedTestCls = effectiveClass(unboxedTestType.widen) - def unreachable(why: => String)(using Context): Boolean = { + def unreachable(why: -> String)(using Context): Boolean = { if (flagUnrelated) if (inMatch) report.error(em"this case is unreachable since $why", expr.srcPos) else report.warning(em"this will always yield false since $why", expr.srcPos) diff --git a/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala b/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala index a48aa77fe79f..541cf50c43e1 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala @@ -18,6 +18,7 @@ import Errors.* import scala.collection.mutable import scala.annotation.tailrec +import caps.unsafe.unsafeBoxFunArg object Semantic: @@ -1669,7 +1670,8 @@ object Semantic: } // initialize super classes after outers are set - tasks.foreach(task => task()) + tasks.foreach(((task: () => Unit) => task()).unsafeBoxFunArg) + // !cc! .asInstanceOf needed to convert from `(() => Unit) -> Unit` to `(box () => Unit) -> Unit`. end if var fieldsChanged = true diff --git a/tests/pos-with-compiler-cc/dotc/transform/patmat/Space.scala b/tests/pos-with-compiler-cc/dotc/transform/patmat/Space.scala index 8e891f822255..ca0e149f881f 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/patmat/Space.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/patmat/Space.scala @@ -306,7 +306,6 @@ object SpaceEngine { val isEmptyTp = extractorMemberType(unappResult, nme.isEmpty, NoSourcePosition) isEmptyTp <:< ConstantType(Constant(false)) } - || unappResult.derivesFrom(defn.NonEmptyTupleClass) } /** Is the unapply or unapplySeq irrefutable? diff --git a/tests/pos-with-compiler-cc/dotc/transform/sjs/ExplicitJSClasses.scala b/tests/pos-with-compiler-cc/dotc/transform/sjs/ExplicitJSClasses.scala index 3c87621413b7..a7f6d3e7dea7 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/sjs/ExplicitJSClasses.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/sjs/ExplicitJSClasses.scala @@ -722,8 +722,9 @@ object ExplicitJSClasses { val LocalJSClassValueName: UniqueNameKind = new UniqueNameKind("$jsclass") private final class MyState { - val nestedObject2superTypeConstructor = new MutableSymbolMap[Type] - val localClass2jsclassVal = new MutableSymbolMap[TermSymbol] - val notYetReferencedLocalClasses = new util.HashSet[Symbol] + val nestedObject2superTypeConstructor: MutableSymbolMap[Type] = new MutableSymbolMap[Type] + val localClass2jsclassVal: MutableSymbolMap[TermSymbol] = new MutableSymbolMap[TermSymbol] + val notYetReferencedLocalClasses: util.HashSet[Symbol] = new util.HashSet[Symbol] + // !cc! type ascriptions needed for 3 vals above, otherwise they get strange inferred types } } diff --git a/tests/pos-with-compiler-cc/dotc/transform/sjs/PrepJSInterop.scala b/tests/pos-with-compiler-cc/dotc/transform/sjs/PrepJSInterop.scala index 48e6802e0f6c..e75769147f80 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/sjs/PrepJSInterop.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/sjs/PrepJSInterop.scala @@ -974,8 +974,6 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP tree.rhs match { case sel: Select if sel.symbol == jsdefn.JSPackage_native => // ok - case rhs: Ident if rhs.symbol == jsdefn.JSPackage_native => - // ok case _ => val pos = if (tree.rhs != EmptyTree) tree.rhs.srcPos else tree.srcPos report.error(s"$longKindStr may only call js.native.", pos) diff --git a/tests/pos-with-compiler-cc/dotc/typer/Applications.scala b/tests/pos-with-compiler-cc/dotc/typer/Applications.scala index ec72c48b2422..fa237e316bd5 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Applications.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Applications.scala @@ -1004,7 +1004,7 @@ trait Applications extends Compatibility { // applications of inline functions. tree.args match { case (arg @ Match(EmptyTree, cases)) :: Nil => - cases.foreach { + cases.foreach { (t: untpd.CaseDef) => t match // !cc! explicity typed scrutinee is needed case CaseDef(Typed(_: untpd.Ident, _), _, _) => // OK case CaseDef(Bind(_, Typed(_: untpd.Ident, _)), _, _) => // OK case CaseDef(Ident(name), _, _) if name == nme.WILDCARD => // Ok @@ -1501,17 +1501,11 @@ trait Applications extends Compatibility { } /** Drop any leading implicit parameter sections */ - def stripImplicit(tp: Type, wildcardOnly: Boolean = false)(using Context): Type = tp match { + def stripImplicit(tp: Type)(using Context): Type = tp match { case mt: MethodType if mt.isImplicitMethod => - stripImplicit(resultTypeApprox(mt, wildcardOnly)) + stripImplicit(resultTypeApprox(mt)) case pt: PolyType => - pt.derivedLambdaType(pt.paramNames, pt.paramInfos, - stripImplicit(pt.resultType, wildcardOnly = true)) - // can't use TypeParamRefs for parameter references in `resultTypeApprox` - // since their bounds can refer to type parameters in `pt` that are not - // bound by the constraint. This can lead to hygiene violations if subsequently - // `pt` itself is added to the constraint. Test case is run/enrich-gentraversable.scala. - .asInstanceOf[PolyType].flatten + pt.derivedLambdaType(pt.paramNames, pt.paramInfos, stripImplicit(pt.resultType)).asInstanceOf[PolyType].flatten case _ => tp } diff --git a/tests/pos-with-compiler-cc/dotc/typer/Checking.scala b/tests/pos-with-compiler-cc/dotc/typer/Checking.scala index 99399832085f..8d9687cbb21d 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Checking.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Checking.scala @@ -42,6 +42,7 @@ import transform.TypeUtils.* import collection.mutable import reporting._ +import language.experimental.pureFunctions object Checking { import tpd._ @@ -472,7 +473,7 @@ object Checking { def checkWithDeferred(flag: FlagSet) = if (sym.isOneOf(flag)) fail(AbstractMemberMayNotHaveModifier(sym, flag)) - def checkNoConflict(flag1: FlagSet, flag2: FlagSet, msg: => String) = + def checkNoConflict(flag1: FlagSet, flag2: FlagSet, msg: -> String) = if (sym.isAllOf(flag1 | flag2)) fail(msg.toMessage) def checkCombination(flag1: FlagSet, flag2: FlagSet) = if sym.isAllOf(flag1 | flag2) then @@ -600,7 +601,7 @@ object Checking { */ def checkNoPrivateLeaks(sym: Symbol)(using Context): Type = { class NotPrivate extends TypeMap { - var errors: List[() => String] = Nil + var errors: List[() -> String] = Nil private var inCaptureSet: Boolean = false def accessBoundary(sym: Symbol): Symbol = @@ -782,7 +783,7 @@ object Checking { languageImport(qual) match case Some(nme.experimental) if !ctx.owner.isInExperimentalScope && !selectors.forall(isAllowedImport) => - def check(stable: => String) = + def check(stable: -> String) = Feature.checkExperimentalFeature("features", imp.srcPos, s"\n\nNote: the scope enclosing the import is not considered experimental because it contains the\nnon-experimental $stable") if ctx.owner.is(Package) then @@ -1036,7 +1037,7 @@ trait Checking { /** Issue a feature warning if feature is not enabled */ def checkFeature(name: TermName, - description: => String, + description: -> String, featureUseSite: Symbol, pos: SrcPos)(using Context): Unit = if !Feature.enabled(name) then @@ -1046,7 +1047,7 @@ trait Checking { * are feasible, i.e. that their lower bound conforms to their upper bound. If a type * argument is infeasible, issue and error and continue with upper bound. */ - def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = { + def checkFeasibleParent(tp: Type, pos: SrcPos, where: -> String = "")(using Context): Type = { def checkGoodBounds(tp: Type) = tp match { case tp @ TypeBounds(lo, hi) if !(lo <:< hi) => report.error(ex"no type exists between low bound $lo and high bound $hi$where", pos) @@ -1526,7 +1527,7 @@ trait ReChecking extends Checking { override def checkCanThrow(tp: Type, span: Span)(using Context): Tree = EmptyTree override def checkCatch(pat: Tree, guard: Tree)(using Context): Unit = () override def checkNoContextFunctionType(tree: Tree)(using Context): Unit = () - override def checkFeature(name: TermName, description: => String, featureUseSite: Symbol, pos: SrcPos)(using Context): Unit = () + override def checkFeature(name: TermName, description: -> String, featureUseSite: Symbol, pos: SrcPos)(using Context): Unit = () } trait NoChecking extends ReChecking { @@ -1537,7 +1538,7 @@ trait NoChecking extends ReChecking { override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = tp override def checkImplicitConversionDefOK(sym: Symbol)(using Context): Unit = () override def checkImplicitConversionUseOK(tree: Tree)(using Context): Unit = () - override def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = tp + override def checkFeasibleParent(tp: Type, pos: SrcPos, where: -> String = "")(using Context): Type = tp override def checkAnnotArgs(tree: Tree)(using Context): tree.type = tree override def checkNoTargetNameConflict(stats: List[Tree])(using Context): Unit = () override def checkParentCall(call: Tree, caller: ClassSymbol)(using Context): Unit = () diff --git a/tests/pos-with-compiler-cc/dotc/typer/ErrorReporting.scala b/tests/pos-with-compiler-cc/dotc/typer/ErrorReporting.scala index b2e0a4481297..c63e8e900f67 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/ErrorReporting.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/ErrorReporting.scala @@ -15,6 +15,7 @@ import reporting._ import collection.mutable import scala.util.matching.Regex +import language.experimental.pureFunctions object ErrorReporting { @@ -26,7 +27,7 @@ object ErrorReporting { def errorTree(tree: untpd.Tree, msg: Message)(using Context): tpd.Tree = errorTree(tree, msg, tree.srcPos) - def errorTree(tree: untpd.Tree, msg: => String)(using Context): tpd.Tree = + def errorTree(tree: untpd.Tree, msg: -> String)(using Context): tpd.Tree = errorTree(tree, msg.toMessage) def errorTree(tree: untpd.Tree, msg: TypeError, pos: SrcPos)(using Context): tpd.Tree = @@ -37,7 +38,7 @@ object ErrorReporting { ErrorType(msg) } - def errorType(msg: => String, pos: SrcPos)(using Context): ErrorType = + def errorType(msg: -> String, pos: SrcPos)(using Context): ErrorType = errorType(msg.toMessage, pos) def errorType(ex: TypeError, pos: SrcPos)(using Context): ErrorType = { @@ -64,7 +65,7 @@ object ErrorReporting { case tp: AppliedType if tp.isMatchAlias => MatchTypeTrace.record(tp.tryNormalize) case tp: MatchType => MatchTypeTrace.record(tp.tryNormalize) case _ => foldOver(s, tp) - tps.foldLeft("")(collectMatchTrace) + tps.foldLeft("")(collectMatchTrace.apply) // !cc! .apply needed since otherwise box conversion gets confused class Errors(using Context) { @@ -269,8 +270,8 @@ class ImplicitSearchError( pt: Type, where: String, paramSymWithMethodCallTree: Option[(Symbol, tpd.Tree)] = None, - ignoredInstanceNormalImport: => Option[SearchSuccess], - importSuggestionAddendum: => String + ignoredInstanceNormalImport: -> Option[SearchSuccess], + importSuggestionAddendum: -> String )(using ctx: Context) { def missingArgMsg = arg.tpe match { diff --git a/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala b/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala index 0400d241e367..5200dd73a313 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala @@ -1651,7 +1651,7 @@ end Implicits * recursive references and emit a complete implicit dictionary when the outermost search * is complete. */ -abstract class SearchHistory: +abstract class SearchHistory extends caps.Pure: val root: SearchRoot /** Does this search history contain any by name implicit arguments. */ val byname: Boolean @@ -1896,7 +1896,8 @@ sealed class TermRefSet(using Context): prefixes0 match case prefix: Type => f(TermRef(prefix, sym.uncheckedNN)) case prefixes: List[Type] => prefixes.foreach(pre => f(TermRef(pre, sym.uncheckedNN))) - elems.forEach(handle) + elems.forEach(handle.asInstanceOf) + // !cc! cast is needed to circumvent problematic interaction of box and Java wildcards // used only for debugging def showAsList: List[TermRef] = { diff --git a/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala b/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala index b5be2daf873b..3cc88fa323b9 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala @@ -11,6 +11,7 @@ import Implicits.RenamedImplicitRef import StdNames.nme import printing.Texts.Text import NameKinds.QualifiedName +import language.experimental.pureFunctions object ImportInfo { @@ -49,10 +50,10 @@ object ImportInfo { * @param isRootImport true if this is one of the implicit imports of scala, java.lang, * scala.Predef in the start context, false otherwise. */ -class ImportInfo(symf: Context ?=> Symbol, +class ImportInfo(symf: Context ?-> Symbol, val selectors: List[untpd.ImportSelector], val qualifier: untpd.Tree, - val isRootImport: Boolean = false) extends Showable { + val isRootImport: Boolean = false) extends Showable, caps.Pure { private def symNameOpt = qualifier match { case ref: untpd.RefTree => Some(ref.name.asTermName) diff --git a/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala b/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala index 9d2db773c4d4..27b83e025cf9 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala @@ -6,14 +6,15 @@ import core._ import ast._ import Contexts._, Types._, Flags._, Symbols._ import ProtoTypes._ -import NameKinds.UniqueName +import NameKinds.{AvoidNameKind, UniqueName} import util.Spans._ -import util.{Stats, SimpleIdentityMap, SimpleIdentitySet, SrcPos} +import util.{Stats, SimpleIdentityMap, SrcPos} import Decorators._ import config.Printers.{gadts, typr} import annotation.tailrec import reporting._ import collection.mutable + import scala.annotation.internal.sharable object Inferencing { @@ -573,7 +574,7 @@ trait Inferencing { this: Typer => * Then `Y` also occurs co-variantly in `T` because it needs to be minimized in order to constrain * `T` the least. See `variances` for more detail. */ - def interpolateTypeVars(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = + def interpolateTypeVars(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = { val state = ctx.typerState // Note that some variables in `locked` might not be in `state.ownedVars` @@ -582,7 +583,7 @@ trait Inferencing { this: Typer => // `qualifying`. val ownedVars = state.ownedVars - if (ownedVars ne locked) && !ownedVars.isEmpty then + if ((ownedVars ne locked) && !ownedVars.isEmpty) { val qualifying = ownedVars -- locked if (!qualifying.isEmpty) { typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, pt = $pt, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") @@ -618,67 +619,44 @@ trait Inferencing { this: Typer => if state.reporter.hasUnreportedErrors then return tree def constraint = state.constraint + type InstantiateQueue = mutable.ListBuffer[(TypeVar, Boolean)] + val toInstantiate = new InstantiateQueue + for tvar <- qualifying do + if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then + constrainIfDependentParamRef(tvar, tree) + // Needs to be checked again, since previous interpolations could already have + // instantiated `tvar` through unification. + val v = vs(tvar) + if v == null then + // Even though `tvar` is non-occurring in `v`, the specific + // instantiation we pick still matters because `tvar` might appear + // in the bounds of a non-`qualifying` type variable in the + // constraint. + // In particular, if `tvar` was created as the upper or lower + // bound of an existing variable by `LevelAvoidMap`, we + // instantiate it in the direction corresponding to the + // original variable which might be further constrained later. + // Otherwise, we simply rely on `hasLowerBound`. + val name = tvar.origin.paramName + val fromBelow = + name.is(AvoidNameKind.UpperBound) || + !name.is(AvoidNameKind.LowerBound) && tvar.hasLowerBound + typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") + toInstantiate += ((tvar, fromBelow)) + else if v.intValue != 0 then + typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") + toInstantiate += ((tvar, v.intValue == 1)) + else comparing(cmp => + if !cmp.levelOK(tvar.nestingLevel, ctx.nestingLevel) then + // Invariant: The type of a tree whose enclosing scope is level + // N only contains type variables of level <= N. + typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") + cmp.atLevel(ctx.nestingLevel, tvar.origin) + else + typr.println(i"no interpolation for nonvariant $tvar in $state") + ) - /** Values of this type report type variables to instantiate with variance indication: - * +1 variable appears covariantly, can be instantiated from lower bound - * -1 variable appears contravariantly, can be instantiated from upper bound - * 0 variable does not appear at all, can be instantiated from either bound - */ - type ToInstantiate = List[(TypeVar, Int)] - - val toInstantiate: ToInstantiate = - val buf = new mutable.ListBuffer[(TypeVar, Int)] - for tvar <- qualifying do - if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then - constrainIfDependentParamRef(tvar, tree) - if !tvar.isInstantiated then - // isInstantiated needs to be checked again, since previous interpolations could already have - // instantiated `tvar` through unification. - val v = vs(tvar) - if v == null then buf += ((tvar, 0)) - else if v.intValue != 0 then buf += ((tvar, v.intValue)) - else comparing(cmp => - if !cmp.levelOK(tvar.nestingLevel, ctx.nestingLevel) then - // Invariant: The type of a tree whose enclosing scope is level - // N only contains type variables of level <= N. - typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") - cmp.atLevel(ctx.nestingLevel, tvar.origin) - else - typr.println(i"no interpolation for nonvariant $tvar in $state") - ) - buf.toList - - def typeVarsIn(xs: ToInstantiate): TypeVars = - xs.foldLeft(SimpleIdentitySet.empty: TypeVars)((tvs, tvi) => tvs + tvi._1) - - /** Filter list of proposed instantiations so that they don't constrain further - * the current constraint. - */ - def filterByDeps(tvs0: ToInstantiate): ToInstantiate = - val excluded = // ignore dependencies from other variables that are being instantiated - typeVarsIn(tvs0) - def step(tvs: ToInstantiate): ToInstantiate = tvs match - case tvs @ (hd @ (tvar, v)) :: tvs1 => - def aboveOK = !constraint.dependsOn(tvar, excluded, co = true) - def belowOK = !constraint.dependsOn(tvar, excluded, co = false) - if v == 0 && !aboveOK then - step((tvar, 1) :: tvs1) - else if v == 0 && !belowOK then - step((tvar, -1) :: tvs1) - else if v == -1 && !aboveOK || v == 1 && !belowOK then - typr.println(i"drop $tvar, $v in $tp, $pt, qualifying = ${qualifying.toList}, tvs0 = ${tvs0.toList}%, %, excluded = ${excluded.toList}, $constraint") - step(tvs1) - else // no conflict, keep the instantiation proposal - tvs.derivedCons(hd, step(tvs1)) - case Nil => - Nil - val tvs1 = step(tvs0) - if tvs1 eq tvs0 then tvs1 - else filterByDeps(tvs1) // filter again with smaller excluded set - end filterByDeps - - /** Instantiate all type variables in `tvs` in the indicated directions, - * as described in the doc comment of `ToInstantiate`. + /** Instantiate all type variables in `buf` in the indicated directions. * If a type variable A is instantiated from below, and there is another * type variable B in `buf` that is known to be smaller than A, wait and * instantiate all other type variables before trying to instantiate A again. @@ -707,37 +685,29 @@ trait Inferencing { this: Typer => * * V2 := V3, O2 := O3 */ - def doInstantiate(tvs: ToInstantiate): Unit = - - /** Try to instantiate `tvs`, return any suspended type variables */ - def tryInstantiate(tvs: ToInstantiate): ToInstantiate = tvs match - case (hd @ (tvar, v)) :: tvs1 => - val fromBelow = v == 1 || (v == 0 && tvar.hasLowerBound) - typr.println( - i"interpolate${if v == 0 then " non-occurring" else ""} $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") - if tvar.isInstantiated then - tryInstantiate(tvs1) - else - val suspend = tvs1.exists{ (following, _) => - if fromBelow - then constraint.isLess(following.origin, tvar.origin) - else constraint.isLess(tvar.origin, following.origin) + def doInstantiate(buf: InstantiateQueue): Unit = + if buf.nonEmpty then + val suspended = new InstantiateQueue + while buf.nonEmpty do + val first @ (tvar, fromBelow) = buf.head + buf.dropInPlace(1) + if !tvar.isInstantiated then + val suspend = buf.exists{ (following, _) => + if fromBelow then + constraint.isLess(following.origin, tvar.origin) + else + constraint.isLess(tvar.origin, following.origin) } - if suspend then - typr.println(i"suspended: $hd") - hd :: tryInstantiate(tvs1) - else - tvar.instantiate(fromBelow) - tryInstantiate(tvs1) - case Nil => Nil - if tvs.nonEmpty then doInstantiate(tryInstantiate(tvs)) + if suspend then suspended += first else tvar.instantiate(fromBelow) + end if + end while + doInstantiate(suspended) end doInstantiate - - doInstantiate(filterByDeps(toInstantiate)) + doInstantiate(toInstantiate) } - end if + } tree - end interpolateTypeVars + } /** If `tvar` represents a parameter of a dependent method type in the current `call` * approximate it from below with the type of the actual argument. Skolemize that diff --git a/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala b/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala index 6fb019ee057c..8775206ace7b 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala @@ -17,6 +17,7 @@ import util.SourceFile import TypeComparer.necessarySubType import scala.annotation.internal.sharable +import scala.annotation.retains object ProtoTypes { @@ -122,15 +123,15 @@ object ProtoTypes { } /** A trait for prototypes that match all types */ - trait MatchAlways extends ProtoType { + trait MatchAlways extends ProtoType, caps.Pure { def isMatchedBy(tp1: Type, keepConstraint: Boolean)(using Context): Boolean = true def map(tm: TypeMap)(using Context): ProtoType = this - def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = x + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = x override def toString: String = getClass.toString } /** A class marking ignored prototypes that can be revealed by `deepenProto` */ - abstract case class IgnoredProto(ignored: Type) extends CachedGroundType with MatchAlways: + abstract case class IgnoredProto(ignored: Type) extends CachedGroundType, MatchAlways, caps.Pure: private var myWasDeepened = false override def revealIgnored = ignored override def deepenProto(using Context): Type = @@ -164,7 +165,7 @@ object ProtoTypes { * [ ].name: proto */ abstract case class SelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean) - extends CachedProxyType with ProtoType with ValueTypeOrProto { + extends CachedProxyType, ProtoType, ValueTypeOrProto, caps.Pure { /** Is the set of members of this type unknown, in the sense that we * cannot compute a non-trivial upper approximation? This is the case if: @@ -239,7 +240,7 @@ object ProtoTypes { memberProto.unusableForInference def map(tm: TypeMap)(using Context): SelectionProto = derivedSelectionProto(name, tm(memberProto), compat) - def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = ta(x, memberProto) + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = ta(x, memberProto) override def deepenProto(using Context): SelectionProto = derivedSelectionProto(name, memberProto.deepenProto, compat) @@ -544,7 +545,7 @@ object ProtoTypes { def map(tm: TypeMap)(using Context): FunProto = derivedFunProto(args, tm(resultType), typer) - def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = ta(ta.foldOver(x, typedArgs().tpes), resultType) override def deepenProto(using Context): FunProto = @@ -574,7 +575,7 @@ object ProtoTypes { * []: argType => resultType */ abstract case class ViewProto(argType: Type, resType: Type) - extends CachedGroundType with ApplyingProto { + extends CachedGroundType, ApplyingProto, caps.Pure { override def resultType(using Context): Type = resType @@ -601,7 +602,7 @@ object ProtoTypes { def map(tm: TypeMap)(using Context): ViewProto = derivedViewProto(tm(argType), tm(resultType)) - def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = ta(ta(x, argType), resultType) override def deepenProto(using Context): ViewProto = @@ -655,7 +656,7 @@ object ProtoTypes { def map(tm: TypeMap)(using Context): PolyProto = derivedPolyProto(targs, tm(resultType)) - def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = ta(ta.foldOver(x, targs.tpes), resultType) override def deepenProto(using Context): PolyProto = @@ -703,7 +704,10 @@ object ProtoTypes { def newTypeVars(tl: TypeLambda): List[TypeTree] = for (paramRef <- tl.paramRefs) yield { - val tt = InferredTypeTree().withSpan(owningTree.span) + val tt = InferredTypeTree[Type]().withSpan(owningTree.span) + // !cc! explicit type argument [Type] needed since otherwise it is + // inferred to be `{*} Type`, which violates the upper bound. The + // inference works like this because of the contravariance of Tree. val tvar = TypeVar(paramRef, state, nestingLevel) state.ownedVars += tvar tt.withType(tvar) diff --git a/tests/pos-with-compiler-cc/dotc/typer/RefChecks.scala b/tests/pos-with-compiler-cc/dotc/typer/RefChecks.scala index 8afcec4dee63..4d2c7ffdfe7d 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/RefChecks.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/RefChecks.scala @@ -20,6 +20,7 @@ import config.SourceVersion.{`3.0`, `future`} import config.Printers.refcheck import reporting._ import Constants.Constant +import language.experimental.pureFunctions object RefChecks { import tpd._ @@ -619,7 +620,8 @@ object RefChecks { val missing = missingTermSymbols // Group missing members by the name of the underlying symbol, // to consolidate getters and setters. - val grouped = missing.groupBy(_.underlyingSymbol.name) + val grouped = missing.groupBy(sym => sym.underlyingSymbol.name: Name) + // !cc! type ascription needed val missingMethods = grouped.toList flatMap { case (name, syms) => @@ -861,7 +863,7 @@ object RefChecks { * Return an optional by name error message if this test fails. */ def variantInheritanceProblems( - baseCls: Symbol, middle: Symbol, baseStr: String, middleStr: String): Option[() => String] = { + baseCls: Symbol, middle: Symbol, baseStr: String, middleStr: String): Option[() -> String] = { val superBT = self.baseType(middle) val thisBT = self.baseType(baseCls) val combinedBT = superBT.baseType(baseCls) diff --git a/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala b/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala index e3f5382ecad7..b96d80345bc3 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala @@ -19,12 +19,13 @@ import ast.Trees.genericEmptyTree import annotation.{tailrec, constructorOnly} import ast.tpd._ import Synthesizer._ +import language.experimental.pureFunctions /** Synthesize terms for special classes */ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): /** Handlers to synthesize implicits for special types */ - type SpecialHandler = (Type, Span) => Context ?=> TreeWithErrors + type SpecialHandler = (Type, Span) -> Context ?-> TreeWithErrors private type SpecialHandlers = List[(ClassSymbol, SpecialHandler)] val synthesizedClassTag: SpecialHandler = (formal, span) => @@ -475,7 +476,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): if acceptableMsg.isEmpty && clsIsGenericSum then val elemLabels = cls.children.map(c => ConstantType(Constant(c.name.toString))) - def internalError(msg: => String)(using Context): Unit = + def internalError(msg: -> String)(using Context): Unit = report.error(i"""Internal error when synthesizing sum mirror for $cls: |$msg""".stripMargin, ctx.source.atSpan(span)) @@ -595,7 +596,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case JavaArrayType(elemTp) => defn.ArrayOf(escapeJavaArray(elemTp)) case _ => tp - private enum ManifestKind: + private enum ManifestKind extends caps.Pure: // !cc! should all enums be Pure? case Full, Opt, Clss /** The kind that should be used for an array element, if we are `OptManifest` then this diff --git a/tests/pos-with-compiler-cc/dotc/typer/Typer.scala b/tests/pos-with-compiler-cc/dotc/typer/Typer.scala index 032bed38482c..5d708281e6fa 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Typer.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Typer.scala @@ -51,6 +51,7 @@ import Nullables._ import NullOpsDecorator._ import cc.CheckCaptures import config.Config +import language.experimental.pureFunctions import scala.annotation.constructorOnly @@ -678,7 +679,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer errorTree(tree, "cannot convert to type selection") // will never be printed due to fallback } - def selectWithFallback(fallBack: Context ?=> Tree) = + def selectWithFallback(fallBack: Context ?-> Tree) = tryAlternatively(typeSelectOnTerm)(fallBack) if (tree.qualifier.isType) { @@ -1102,7 +1103,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * expected type of a block is the anonymous class defined inside it. In that * case there's technically a leak which is not removed by the ascription. */ - protected def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: => List[Symbol])(using Context): Tree = { + protected def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: -> List[Symbol])(using Context): Tree = { def ascribeType(tree: Tree, pt: Type): Tree = tree match { case block @ Block(stats, expr) if !expr.isInstanceOf[Closure] => val expr1 = ascribeType(expr, pt) @@ -2823,7 +2824,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tupleXXLobj = untpd.ref(defn.TupleXXLModule.termRef) val app = untpd.cpy.Apply(tree)(tupleXXLobj, elems.map(untpd.TypedSplice(_))) .withSpan(tree.span) - val app1 = typed(app, if ctx.mode.is(Mode.Pattern) then pt else defn.TupleXXLClass.typeRef) + val app1 = typed(app, defn.TupleXXLClass.typeRef) if (ctx.mode.is(Mode.Pattern)) app1 else { val elemTpes = elems.lazyZip(pts).map((elem, pt) => @@ -3169,7 +3170,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedPattern(tree: untpd.Tree, selType: Type = WildcardType)(using Context): Tree = withMode(Mode.Pattern)(typed(tree, selType)) - def tryEither[T](op: Context ?=> T)(fallBack: (T, TyperState) => T)(using Context): T = { + def tryEither[T](op: Context ?-> T)(fallBack: (T, TyperState) => T)(using Context): T = { val nestedCtx = ctx.fresh.setNewTyperState() val result = op(using nestedCtx) if (nestedCtx.reporter.hasErrors && !nestedCtx.reporter.hasStickyErrors) { @@ -3186,7 +3187,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Try `op1`, if there are errors, try `op2`, if `op2` also causes errors, fall back * to errors and result of `op1`. */ - def tryAlternatively[T](op1: Context ?=> T)(op2: Context ?=> T)(using Context): T = + def tryAlternatively[T](op1: Context ?-> T)(op2: Context ?-> T)(using Context): T = tryEither(op1) { (failedVal, failedState) => tryEither(op2) { (_, _) => failedState.commit() @@ -4208,7 +4209,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Types the body Scala 2 macro declaration `def f = macro ` */ protected def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree = // TODO check that call is to a method with valid signature - def typedPrefix(tree: untpd.RefTree)(splice: Context ?=> Tree => Tree)(using Context): Tree = { + def typedPrefix(tree: untpd.RefTree)(splice: Context ?-> Tree -> Tree)(using Context): Tree = { tryAlternatively { splice(typedExpr(tree, defn.AnyType)) } { diff --git a/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala b/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala index 020303c18bc2..d24a9ab3ddb2 100644 --- a/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala +++ b/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala @@ -3,7 +3,7 @@ package dotc.util /** A class for the reading part of mutable or immutable maps. */ -abstract class ReadOnlyMap[Key, Value]: +abstract class ReadOnlyMap[Key, Value] extends caps.Pure: def lookup(x: Key): Value | Null diff --git a/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala b/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala index 4826d02743a9..318a04e846fe 100644 --- a/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala +++ b/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala @@ -2,7 +2,7 @@ package dotty.tools.dotc.util /** A class for the readonly part of mutable sets. */ -abstract class ReadOnlySet[T]: +abstract class ReadOnlySet[T] extends caps.Pure: /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ def lookup(x: T): T | Null diff --git a/tests/pos-with-compiler-cc/dotc/util/ReusableInstance.scala b/tests/pos-with-compiler-cc/dotc/util/ReusableInstance.scala index 4dd897dd082a..75addc916b78 100644 --- a/tests/pos-with-compiler-cc/dotc/util/ReusableInstance.scala +++ b/tests/pos-with-compiler-cc/dotc/util/ReusableInstance.scala @@ -2,6 +2,7 @@ package dotty.tools.dotc.util import scala.collection.mutable.ArrayBuffer import scala.util.chaining._ +import language.experimental.pureFunctions /** A wrapper for a list of cached instances of a type `T`. * The wrapper is recursion-reentrant: several instances are kept, so @@ -14,7 +15,7 @@ import scala.util.chaining._ * * Ported from scala.reflect.internal.util.ReusableInstance */ -final class ReusableInstance[T <: AnyRef] private (make: => T) { +final class ReusableInstance[T <: AnyRef] private (make: -> T) { private[this] val cache = new ArrayBuffer[T](ReusableInstance.InitialSize).tap(_.addOne(make)) private[this] var taken = 0 @@ -29,5 +30,5 @@ final class ReusableInstance[T <: AnyRef] private (make: => T) { object ReusableInstance { private inline val InitialSize = 4 - def apply[T <: AnyRef](make: => T): ReusableInstance[T] = new ReusableInstance[T](make) + def apply[T <: AnyRef](make: -> T): ReusableInstance[T] = new ReusableInstance[T](make) } diff --git a/tests/pos-with-compiler-cc/dotc/util/SimpleIdentityMap.scala b/tests/pos-with-compiler-cc/dotc/util/SimpleIdentityMap.scala index 2f202bc05921..2b4aa6eda48e 100644 --- a/tests/pos-with-compiler-cc/dotc/util/SimpleIdentityMap.scala +++ b/tests/pos-with-compiler-cc/dotc/util/SimpleIdentityMap.scala @@ -5,7 +5,7 @@ import collection.mutable.ListBuffer /** A simple linked map with `eq` as the key comparison, optimized for small maps. * It has linear complexity for `apply`, `updated`, and `remove`. */ -abstract class SimpleIdentityMap[K <: AnyRef, +V <: AnyRef] extends (K => V | Null) { +sealed abstract class SimpleIdentityMap[K <: AnyRef, +V <: AnyRef] extends (K => V | Null) { final def isEmpty: Boolean = this eq SimpleIdentityMap.myEmpty def size: Int def apply(k: K): V | Null diff --git a/tests/pos-with-compiler-cc/dotc/util/SimpleIdentitySet.scala b/tests/pos-with-compiler-cc/dotc/util/SimpleIdentitySet.scala index dd766dc99c7e..32851fd823d5 100644 --- a/tests/pos-with-compiler-cc/dotc/util/SimpleIdentitySet.scala +++ b/tests/pos-with-compiler-cc/dotc/util/SimpleIdentitySet.scala @@ -7,7 +7,7 @@ import collection.mutable /** A simple linked set with `eq` as the comparison, optimized for small sets. * It has linear complexity for `contains`, `+`, and `-`. */ -abstract class SimpleIdentitySet[+Elem <: AnyRef] { +sealed abstract class SimpleIdentitySet[+Elem <: AnyRef] { def size: Int def + [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[E] def - [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[Elem] diff --git a/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala b/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala index 42d07869f74e..8a5a4828adfd 100644 --- a/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala +++ b/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala @@ -21,6 +21,7 @@ import java.nio.file.{FileSystemException, NoSuchFileException} import java.util.Optional import java.util.concurrent.atomic.AtomicInteger import java.util.regex.Pattern +import language.experimental.pureFunctions object ScriptSourceFile { @sharable private val headerPattern = Pattern.compile("""^(::)?!#.*(\r|\n|\r\n)""", Pattern.MULTILINE) @@ -59,7 +60,7 @@ object ScriptSourceFile { } } -class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends interfaces.SourceFile { +class SourceFile(val file: AbstractFile, computeContent: -> Array[Char]) extends interfaces.SourceFile, caps.Pure { import SourceFile._ private var myContent: Array[Char] | Null = null @@ -278,7 +279,7 @@ object SourceFile { else SourceFile(file, chars) - def apply(file: AbstractFile | Null, computeContent: => Array[Char]): SourceFile = new SourceFile(file, computeContent) + def apply(file: AbstractFile | Null, computeContent: -> Array[Char]): SourceFile = new SourceFile(file, computeContent) } @sharable object NoSource extends SourceFile(NoAbstractFile, Array[Char]()) { diff --git a/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala b/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala index 29f9a34d2292..ef4350741036 100644 --- a/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala +++ b/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala @@ -12,7 +12,7 @@ import scala.annotation.internal.sharable /** A source position is comprised of a span and a source file */ case class SourcePosition(source: SourceFile, span: Span, outer: SourcePosition = NoSourcePosition) -extends SrcPos, interfaces.SourcePosition, Showable { +extends SrcPos, interfaces.SourcePosition, Showable, caps.Pure { def sourcePos(using Context) = this diff --git a/tests/pos-with-compiler-cc/dotc/util/common.scala b/tests/pos-with-compiler-cc/dotc/util/common.scala index 85ce9a29f2df..70e0e82a7d50 100644 --- a/tests/pos-with-compiler-cc/dotc/util/common.scala +++ b/tests/pos-with-compiler-cc/dotc/util/common.scala @@ -2,12 +2,13 @@ package dotty.tools.dotc package util import core.Types.WildcardType +import language.experimental.pureFunctions /** Common values hoisted out for performance */ object common { - val alwaysTrue: Any => Boolean = Function.const(true) - val alwaysFalse: Any => Boolean = Function.const(false) - val alwaysZero: Any => Int = Function.const(0) - val alwaysWildcardType: Any => WildcardType.type = Function.const(WildcardType) + val alwaysTrue: Any -> Boolean = Function.const(true) + val alwaysFalse: Any -> Boolean = Function.const(false) + val alwaysZero: Any -> Int = Function.const(0) + val alwaysWildcardType: Any -> WildcardType.type = Function.const(WildcardType) }