diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 022ffbed5408..f7a08d1640ee 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -164,10 +164,15 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint private var finalizeActions = mutable.ListBuffer[() => Unit]() /** Will be set to true if any of the compiled compilation units contains - * a pureFunctions or captureChecking language import. + * a pureFunctions language import. */ var pureFunsImportEncountered = false + /** Will be set to true if any of the compiled compilation units contains + * a captureChecking language import. + */ + var ccImportEncountered = false + def compile(files: List[AbstractFile]): Unit = try val sources = files.map(runContext.getSource(_)) @@ -229,6 +234,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint def runPhases(using Context) = { var lastPrintedTree: PrintedTree = NoPrintedTree val profiler = ctx.profiler + var phasesWereAdjusted = false for (phase <- ctx.base.allPhases) if (phase.isRunnable) @@ -247,6 +253,11 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) ctx.typerState.gc() } + if !phasesWereAdjusted then + phasesWereAdjusted = true + if !Feature.ccEnabledSomewhere then + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase.prev) + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase) profiler.finished() } diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 3261cb1d90f8..0ede1825e611 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -166,8 +166,49 @@ extension (tp: Type) case CapturingType(_, _) => true case _ => false + /** Is type known to be always pure by its class structure, + * so that adding a capture set to it would not make sense? + */ + def isAlwaysPure(using Context): Boolean = tp.dealias match + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then sym.isPureClass + else tp.superType.isAlwaysPure + case CapturingType(parent, refs) => + parent.isAlwaysPure || refs.isAlwaysEmpty + case tp: TypeProxy => + tp.superType.isAlwaysPure + case tp: AndType => + tp.tp1.isAlwaysPure || tp.tp2.isAlwaysPure + case tp: OrType => + tp.tp1.isAlwaysPure && tp.tp2.isAlwaysPure + case _ => + false + +extension (cls: ClassSymbol) + + def pureBaseClass(using Context): Option[Symbol] = + cls.baseClasses.find(bc => + defn.pureBaseClasses.contains(bc) + || { + val selfType = bc.givenSelfType + selfType.exists && selfType.captureSet.isAlwaysEmpty + }) + extension (sym: Symbol) + /** A class is pure if: + * - one its base types has an explicitly declared self type with an empty capture set + * - or it is a value class + * - or it is an exception + * - or it is one of Nothing, Null, or String + */ + def isPureClass(using Context): Boolean = sym match + case cls: ClassSymbol => + cls.pureBaseClass.isDefined || defn.pureSimpleClasses.contains(cls) + case _ => + false + /** Does this symbol allow results carrying the universal capability? * Currently this is true only for function type applies (since their * results are unboxed) and `erasedValue` since this function is magic in diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index d3e32ac538a4..6bf6d7770d8b 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -271,7 +271,7 @@ sealed abstract class CaptureSet extends Showable: map(Substituters.SubstParamsMap(tl, to)) /** Invoke handler if this set has (or later aquires) the root capability `*` */ - def disallowRootCapability(handler: () => Unit)(using Context): this.type = + def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = if isUniversal then handler() this @@ -373,7 +373,7 @@ object CaptureSet: def isAlwaysEmpty = false /** A handler to be invoked if the root reference `*` is added to this set */ - var addRootHandler: () => Unit = () => () + var rootAddedHandler: () => Context ?=> Unit = () => () var description: String = "" @@ -404,7 +404,7 @@ object CaptureSet: def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = if !isConst && recordElemsState() then elems ++= newElems - if isUniversal then addRootHandler() + if isUniversal then rootAddedHandler() // assert(id != 2 || elems.size != 2, this) (CompareResult.OK /: deps) { (r, dep) => r.andAlso(dep.tryInclude(newElems, this)) @@ -421,8 +421,8 @@ object CaptureSet: else CompareResult.fail(this) - override def disallowRootCapability(handler: () => Unit)(using Context): this.type = - addRootHandler = handler + override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = + rootAddedHandler = handler super.disallowRootCapability(handler) private var computingApprox = false diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index cf1d4266e89b..37a16a870f31 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -10,7 +10,8 @@ import config.Printers.{capt, recheckr} import config.{Config, Feature} import ast.{tpd, untpd, Trees} import Trees.* -import typer.RefChecks.{checkAllOverrides, checkParents} +import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents} +import typer.Checking.{checkBounds, checkAppliedTypesIn} import util.{SimpleIdentitySet, EqHashMap, SrcPos} import transform.SymUtils.* import transform.{Recheck, PreRecheck} @@ -18,6 +19,7 @@ import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} import StdNames.nme +import NameKinds.DefaultGetterName import reporting.trace /** The capture checker */ @@ -335,12 +337,21 @@ class CheckCaptures extends Recheck, SymTransformer: override def recheckApply(tree: Apply, pt: Type)(using Context): Type = val meth = tree.fun.symbol includeCallCaptures(meth, tree.srcPos) - if meth == defn.Caps_unsafeBox || meth == defn.Caps_unsafeUnbox then + def mapArgUsing(f: Type => Type) = val arg :: Nil = tree.args: @unchecked - val argType0 = recheckStart(arg, pt) - .forceBoxStatus(boxed = meth == defn.Caps_unsafeBox) + val argType0 = f(recheckStart(arg, pt)) val argType = super.recheckFinish(argType0, arg, pt) super.recheckFinish(argType, tree, pt) + + if meth == defn.Caps_unsafeBox then + mapArgUsing(_.forceBoxStatus(true)) + else if meth == defn.Caps_unsafeUnbox then + mapArgUsing(_.forceBoxStatus(false)) + else if meth == defn.Caps_unsafeBoxFunArg then + mapArgUsing { + case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual, isErased) => + defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual, isErased) + } else super.recheckApply(tree, pt) match case appType @ CapturingType(appType1, refs) => @@ -432,7 +443,8 @@ class CheckCaptures extends Recheck, SymTransformer: block match case closureDef(mdef) => pt.dealias match - case defn.FunctionOf(ptformals, _, _, _) if ptformals.forall(_.captureSet.isAlwaysEmpty) => + case defn.FunctionOf(ptformals, _, _, _) + if ptformals.nonEmpty && ptformals.forall(_.captureSet.isAlwaysEmpty) => // Redo setup of the anonymous function so that formal parameters don't // get capture sets. This is important to avoid false widenings to `*` // when taking the base type of the actual closures's dependent function @@ -442,9 +454,10 @@ class CheckCaptures extends Recheck, SymTransformer: // First, undo the previous setup which installed a completer for `meth`. atPhase(preRecheckPhase.prev)(meth.denot.copySymDenotation()) .installAfter(preRecheckPhase) + // Next, update all parameter symbols to match expected formals meth.paramSymss.head.lazyZip(ptformals).foreach { (psym, pformal) => - psym.copySymDenotation(info = pformal).installAfter(preRecheckPhase) + psym.updateInfoBetween(preRecheckPhase, thisPhase, pformal.mapExprType) } // Next, update types of parameter ValDefs mdef.paramss.head.lazyZip(ptformals).foreach { (param, pformal) => @@ -452,36 +465,21 @@ class CheckCaptures extends Recheck, SymTransformer: tpt.rememberTypeAlways(pformal) } // Next, install a new completer reflecting the new parameters for the anonymous method + val mt = meth.info.asInstanceOf[MethodType] val completer = new LazyType: def complete(denot: SymDenotation)(using Context) = - denot.info = MethodType(ptformals, mdef.tpt.knownType) + denot.info = mt.companion(ptformals, mdef.tpt.knownType) .showing(i"simplify info of $meth to $result", capt) recheckDef(mdef, meth) - meth.copySymDenotation(info = completer, initFlags = meth.flags &~ Touched) - .installAfter(preRecheckPhase) + meth.updateInfoBetween(preRecheckPhase, thisPhase, completer) case _ => case _ => super.recheckBlock(block, pt) - /** If `rhsProto` has `*` as its capture set, wrap `rhs` in a `unsafeBox`. - * Used to infer `unsafeBox` for expressions that get assigned to variables - * that have universal capture set. - */ - def maybeBox(rhs: Tree, rhsProto: Type)(using Context): Tree = - if rhsProto.captureSet.isUniversal then - ref(defn.Caps_unsafeBox).appliedToType(rhsProto).appliedTo(rhs) - else rhs - - override def recheckAssign(tree: Assign)(using Context): Type = - val rhsProto = recheck(tree.lhs).widen - recheck(maybeBox(tree.rhs, rhsProto), rhsProto) - defn.UnitType - override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Unit = try if !sym.is(Module) then // Modules are checked by checking the module class - if sym.is(Mutable) then recheck(maybeBox(tree.rhs, sym.info), sym.info) - else super.recheckValDef(tree, sym) + super.recheckValDef(tree, sym) finally if !sym.is(Param) then // Parameters with inferred types belong to anonymous methods. We need to wait @@ -503,7 +501,8 @@ class CheckCaptures extends Recheck, SymTransformer: /** Class-specific capture set relations: * 1. The capture set of a class includes the capture sets of its parents. * 2. The capture set of the self type of a class includes the capture set of the class. - * 3. The capture set of the self type of a class includes the capture set of every class parameter. + * 3. The capture set of the self type of a class includes the capture set of every class parameter, + * unless the parameter is marked @constructorOnly. */ override def recheckClassDef(tree: TypeDef, impl: Template, cls: ClassSymbol)(using Context): Type = val saved = curEnv @@ -515,7 +514,12 @@ class CheckCaptures extends Recheck, SymTransformer: val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") checkSubset(localSet, thisSet, tree.srcPos) // (2) for param <- cls.paramGetters do - checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then + checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + for pureBase <- cls.pureBaseClass do + checkSubset(thisSet, + CaptureSet.empty.withDescription(i"of pure base class $pureBase"), + tree.srcPos) super.recheckClassDef(tree, impl, cls) finally curEnv = saved @@ -772,7 +776,8 @@ class CheckCaptures extends Recheck, SymTransformer: // We can't box/unbox the universal capability. Leave `actual` as it is // so we get an error in checkConforms. This tends to give better error // messages than disallowing the root capability in `criticalSet`. - capt.println(i"cannot box/unbox $actual vs $expected") + if ctx.settings.YccDebug.value then + println(i"cannot box/unbox $actual vs $expected") actual else // Disallow future addition of `*` to `criticalSet`. @@ -845,13 +850,21 @@ class CheckCaptures extends Recheck, SymTransformer: cls => !parentTrees(cls).exists(ptree => parentTrees.contains(ptree.tpe.classSymbol)) } assert(roots.nonEmpty) - for root <- roots do - checkParents(root, parentTrees(root)) + for case root: ClassSymbol <- roots do + checkSelfAgainstParents(root, root.baseClasses) val selfType = root.asClass.classInfo.selfType interpolator(startingVariance = -1).traverse(selfType) if !root.isEffectivelySealed then + def matchesExplicitRefsInBaseClass(refs: CaptureSet, cls: ClassSymbol): Boolean = + cls.baseClasses.tail.exists { psym => + val selfType = psym.asClass.givenSelfType + selfType.exists && selfType.captureSet.elems == refs.elems + } selfType match - case CapturingType(_, refs: CaptureSet.Var) if !refs.isUniversal => + case CapturingType(_, refs: CaptureSet.Var) + if !refs.isUniversal && !matchesExplicitRefsInBaseClass(refs, root) => + // Forbid inferred self types unless they are already implied by an explicit + // self type in a parent. report.error( i"""$root needs an explicitly declared self type since its |inferred self type $selfType @@ -885,15 +898,23 @@ class CheckCaptures extends Recheck, SymTransformer: val isLocal = sym.owner.ownersIterator.exists(_.isTerm) || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) - - // The following classes of definitions need explicit capture types ... - if !isLocal // ... since external capture types are not inferred - || sym.owner.is(Trait) // ... since we do OverridingPairs checking before capture inference - || sym.allOverriddenSymbols.nonEmpty // ... since we do override checking before capture inference - then + def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly + sym.is(Private) // private symbols can always have inferred types + || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be + // too annoying. This is a hole since a defualt getter's result type + // might leak into a type variable. + || // non-local symbols cannot have inferred types since external capture types are not inferred + isLocal // local symbols still need explicit types if + && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference + def isNotPureThis(ref: CaptureRef) = ref match { + case ref: ThisType => !ref.cls.isPureClass + case _ => true + } + if !canUseInferred then val inferred = t.tpt.knownType def checkPure(tp: Type) = tp match - case CapturingType(_, refs) if !refs.elems.isEmpty => + case CapturingType(_, refs) + if !refs.elems.filter(isNotPureThis).isEmpty => val resultStr = if t.isInstanceOf[DefDef] then " result" else "" report.error( em"""Non-local $sym cannot have an inferred$resultStr type @@ -902,8 +923,27 @@ class CheckCaptures extends Recheck, SymTransformer: |The type needs to be declared explicitly.""", t.srcPos) case _ => inferred.foreachPart(checkPure, StopAt.Static) + case t @ TypeApply(fun, args) => + fun.knownType.widen match + case tl: PolyType => + val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => + arg.withType(arg.knownType.forceBoxStatus( + bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) + } + checkBounds(normArgs, tl) + case _ => case _ => } - + if !ctx.reporter.errorsReported then + // We dont report errors hre if previous errors were reported, because other + // errors often result in bad applied types, but flagging these bad types gives + // often worse error messages than the original errors. + val checkApplied = new TreeTraverser: + def traverse(t: Tree)(using Context) = t match + case tree: InferredTypeTree => + case tree: New => + case tree: TypeTree => checkAppliedTypesIn(tree.withKnownType) + case _ => traverseChildren(t) + checkApplied.traverse(unit) end CaptureChecker end CheckCaptures diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 42c80e524a6e..95f2e71437a8 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -98,7 +98,10 @@ extends tpd.TreeTraverser: def addCaptureRefinements(tp: Type): Type = tp match case _: TypeRef | _: AppliedType if tp.typeParams.isEmpty => tp.typeSymbol match - case cls: ClassSymbol if !defn.isFunctionClass(cls) => + case cls: ClassSymbol + if !defn.isFunctionClass(cls) && !cls.is(JavaDefined) => + // We assume that Java classes can refer to capturing Scala types only indirectly, + // using type parameters. Hence, no need to refine them. cls.paramGetters.foldLeft(tp) { (core, getter) => if getter.termRef.isTracked then val getterType = tp.memberInfo(getter).strippedDealias @@ -117,14 +120,14 @@ extends tpd.TreeTraverser: case tp: (TypeRef | AppliedType) => val sym = tp.typeSymbol if sym.isClass then - tp.typeSymbol == defn.AnyClass + sym == defn.AnyClass // we assume Any is a shorthand of {*} Any, so if Any is an upper // bound, the type is taken to be impure. else superTypeIsImpure(tp.superType) case tp: (RefinedOrRecType | MatchType) => superTypeIsImpure(tp.underlying) case tp: AndType => - superTypeIsImpure(tp.tp1) || canHaveInferredCapture(tp.tp2) + superTypeIsImpure(tp.tp1) || needsVariable(tp.tp2) case tp: OrType => superTypeIsImpure(tp.tp1) && superTypeIsImpure(tp.tp2) case _ => @@ -132,23 +135,26 @@ extends tpd.TreeTraverser: }.showing(i"super type is impure $tp = $result", capt) /** Should a capture set variable be added on type `tp`? */ - def canHaveInferredCapture(tp: Type): Boolean = { + def needsVariable(tp: Type): Boolean = { tp.typeParams.isEmpty && tp.match case tp: (TypeRef | AppliedType) => val tp1 = tp.dealias - if tp1 ne tp then canHaveInferredCapture(tp1) + if tp1 ne tp then needsVariable(tp1) else val sym = tp1.typeSymbol - if sym.isClass then !sym.isValueClass && sym != defn.AnyClass + if sym.isClass then + !sym.isPureClass && sym != defn.AnyClass else superTypeIsImpure(tp1) case tp: (RefinedOrRecType | MatchType) => - canHaveInferredCapture(tp.underlying) + needsVariable(tp.underlying) case tp: AndType => - canHaveInferredCapture(tp.tp1) && canHaveInferredCapture(tp.tp2) + needsVariable(tp.tp1) && needsVariable(tp.tp2) case tp: OrType => - canHaveInferredCapture(tp.tp1) || canHaveInferredCapture(tp.tp2) - case CapturingType(_, refs) => - refs.isConst && !refs.isUniversal + needsVariable(tp.tp1) || needsVariable(tp.tp2) + case CapturingType(parent, refs) => + needsVariable(parent) + && refs.isConst // if refs is a variable, no need to add another + && !refs.isUniversal // if refs is {*}, an added variable would not change anything case _ => false }.showing(i"can have inferred capture $tp = $result", capt) @@ -181,7 +187,7 @@ extends tpd.TreeTraverser: CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) - case _ if canHaveInferredCapture(tp) => + case _ if needsVariable(tp) => val cs = tp.dealias match case CapturingType(_, refs) => CaptureSet.Var(refs.elems) case _ => CaptureSet.Var() @@ -382,20 +388,18 @@ extends tpd.TreeTraverser: return tree.tpt match case tpt: TypeTree if tree.symbol.allOverriddenSymbols.hasNext => + tree.paramss.foreach(traverse) transformTT(tpt, boxed = false, exact = true) + traverse(tree.rhs) //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") case _ => - traverseChildren(tree) + traverseChildren(tree) case tree @ ValDef(_, tpt: TypeTree, _) => - val isVar = tree.symbol.is(Mutable) - val overrides = tree.symbol.allOverriddenSymbols.hasNext - //if overrides then println(i"transforming overriding ${tree.symbol}") - if isVar || overrides then - transformTT(tpt, - boxed = isVar, // types of mutable variables are boxed - exact = overrides // types of symbols that override a parent don't get a capture set - ) - traverseChildren(tree) + transformTT(tpt, + boxed = tree.symbol.is(Mutable), // types of mutable variables are boxed + exact = tree.symbol.allOverriddenSymbols.hasNext // types of symbols that override a parent don't get a capture set + ) + traverse(tree.rhs) case tree @ TypeApply(fn, args) => traverse(fn) for case arg: TypeTree <- args do diff --git a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala index e8f7fd502baa..dacbd27e0f35 100644 --- a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala +++ b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala @@ -31,10 +31,12 @@ object Synthetics: * The types of these symbols are transformed in a special way without * looking at the definitions's RHS */ - def needsTransform(sym: SymDenotation)(using Context): Boolean = - isSyntheticCopyMethod(sym) - || isSyntheticCompanionMethod(sym, nme.apply, nme.unapply) - || isSyntheticCopyDefaultGetterMethod(sym) + def needsTransform(symd: SymDenotation)(using Context): Boolean = + isSyntheticCopyMethod(symd) + || isSyntheticCompanionMethod(symd, nme.apply, nme.unapply) + || isSyntheticCopyDefaultGetterMethod(symd) + || (symd.symbol eq defn.Object_eq) + || (symd.symbol eq defn.Object_ne) /** Method is excluded from regular capture checking. * Excluded are synthetic class members @@ -141,13 +143,16 @@ object Synthetics: /** Drop added capture information from the type of an `unapply` */ private def dropUnapplyCaptures(info: Type)(using Context): Type = info match case info: MethodType => - val CapturingType(oldParamInfo, _) :: Nil = info.paramInfos: @unchecked - def oldResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = oldResult(tp.resType)) - case CapturingType(tp, _) => - tp - info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + info.paramInfos match + case CapturingType(oldParamInfo, _) :: Nil => + def oldResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = oldResult(tp.resType)) + case CapturingType(tp, _) => + tp + info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + case _ => + info case info: PolyType => info.derivedLambdaType(resType = dropUnapplyCaptures(info.resType)) @@ -163,7 +168,9 @@ object Synthetics: sym.copySymDenotation(info = addUnapplyCaptures(sym.info)) case nme.apply | nme.copy => sym.copySymDenotation(info = addCaptureDeps(sym.info)) - + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = + MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType)) /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method * of a case class, transform it back to what it was before the CC phase. @@ -176,5 +183,7 @@ object Synthetics: sym.copySymDenotation(info = dropUnapplyCaptures(sym.info)) case nme.apply | nme.copy => sym.copySymDenotation(info = dropCaptureDeps(sym.info)) + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = defn.methOfAnyRef(defn.BooleanType)) end Synthetics \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index d836716c3901..e7117f542384 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -79,19 +79,27 @@ object Feature: def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) + /** Is pureFunctions enabled for this compilation unit? */ def pureFunsEnabled(using Context) = enabledBySetting(pureFunctions) || ctx.compilationUnit.knowsPureFuns || ccEnabled + /** Is captureChecking enabled for this compilation unit? */ def ccEnabled(using Context) = enabledBySetting(captureChecking) || ctx.compilationUnit.needsCaptureChecking + /** Is pureFunctions enabled for any of the currently compiled compilation units? */ def pureFunsEnabledSomewhere(using Context) = enabledBySetting(pureFunctions) - || enabledBySetting(captureChecking) || ctx.run != null && ctx.run.nn.pureFunsImportEncountered + || ccEnabledSomewhere + + /** Is captureChecking enabled for any of the currently compiled compilation units? */ + def ccEnabledSomewhere(using Context) = + enabledBySetting(captureChecking) + || ctx.run != null && ctx.run.nn.ccImportEncountered def sourceVersionSetting(using Context): SourceVersion = SourceVersion.valueOf(ctx.settings.source.value) @@ -143,6 +151,11 @@ object Feature: def isExperimentalEnabled(using Context): Boolean = Properties.experimental && !ctx.settings.YnoExperimental.value + /** Handle language import `import language..` if it is one + * of the global imports `pureFunctions` or `captureChecking`. In this case + * make the compilation unit's and current run's fields accordingly. + * @return true iff import that was handled + */ def handleGlobalLanguageImport(prefix: TermName, imported: Name)(using Context): Boolean = val fullFeatureName = QualifiedName(prefix, imported.asTermName) if fullFeatureName == pureFunctions then @@ -151,7 +164,7 @@ object Feature: true else if fullFeatureName == captureChecking then ctx.compilationUnit.needsCaptureChecking = true - if ctx.run != null then ctx.run.nn.pureFunsImportEncountered = true + if ctx.run != null then ctx.run.nn.ccImportEncountered = true true else false diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index 4b441d512dec..a61701eee2d7 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -149,7 +149,7 @@ class CheckRealizable(using Context) { */ private def boundsRealizability(tp: Type) = { - val memberProblems = withMode(Mode.CheckBounds) { + val memberProblems = withMode(Mode.CheckBoundsOrSelfType) { for { mbr <- tp.nonClassTypeMembers if !(mbr.info.loBound <:< mbr.info.hiBound) @@ -157,7 +157,7 @@ class CheckRealizable(using Context) { yield new HasProblemBounds(mbr.name, mbr.info) } - val refinementProblems = withMode(Mode.CheckBounds) { + val refinementProblems = withMode(Mode.CheckBoundsOrSelfType) { for { name <- refinedNames(tp) if (name.isTypeName) diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 1dfa04822766..a3d8cabba971 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -58,6 +58,12 @@ trait ConstraintHandling { */ protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty + /** Used for match type reduction: If false, we don't recognize an abstract type + * to be a subtype type of any of its base classes. This is in place only at the + * toplevel; it is turned on again when we add parts of the scrutinee to the constraint. + */ + protected var canWidenAbstract: Boolean = true + protected var myNecessaryConstraintsOnly = false /** When collecting the constraints needed for a particular subtyping * judgment to be true, we sometimes need to approximate the constraint @@ -839,13 +845,17 @@ trait ConstraintHandling { //checkPropagated(s"adding $description")(true) // DEBUG in case following fails checkPropagated(s"added $description") { addConstraintInvocations += 1 + val saved = canWidenAbstract + canWidenAbstract = true try bound match case bound: TypeParamRef if constraint contains bound => addParamBound(bound) case _ => val pbound = avoidLambdaParams(bound) kindCompatible(param, pbound) && addBoundTransitively(param, pbound, !fromBelow) - finally addConstraintInvocations -= 1 + finally + canWidenAbstract = saved + addConstraintInvocations -= 1 } end addConstraint diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 174244b4a456..3373fee9fce6 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -961,9 +961,11 @@ class Definitions { def RuntimeTupleFunctionsModule(using Context): Symbol = requiredModule("scala.runtime.TupledFunctions") @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") - @tu lazy val Caps_unsafeBox: Symbol = CapsModule.requiredMethod("unsafeBox") - @tu lazy val Caps_unsafeUnbox: Symbol = CapsModule.requiredMethod("unsafeUnbox") @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("*") + @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") + @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") + @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") + @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") // Annotation base classes @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") @@ -1347,6 +1349,15 @@ class Definitions { @tu lazy val untestableClasses: Set[Symbol] = Set(NothingClass, NullClass, SingletonClass) + /** Base classes that are assumed to be pure for the purposes of capture checking. + * Every class inheriting from a pure baseclass is pure. + */ + @tu lazy val pureBaseClasses = Set(defn.AnyValClass, defn.ThrowableClass) + + /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, + */ + @tu lazy val pureSimpleClasses = Set(StringClass, NothingClass, NullClass) + @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 33ac3de70767..40a45b9f4678 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -70,10 +70,14 @@ object Mode { /** We are currently unpickling Scala2 info */ val Scala2Unpickling: Mode = newMode(13, "Scala2Unpickling") - /** We are currently checking bounds to be non-empty, so we should not - * do any widening when computing members of refined types. + /** Signifies one of two possible situations: + * 1. We are currently checking bounds to be non-empty, so we should not + * do any widening when computing members of refined types. + * 2. We are currently checking self type conformance, so we should not + * ignore capture sets added to otherwise pure classes (only needed + * for capture checking). */ - val CheckBounds: Mode = newMode(14, "CheckBounds") + val CheckBoundsOrSelfType: Mode = newMode(14, "CheckBoundsOrSelfType") /** Use Scala2 scheme for overloading and implicit resolution */ val OldOverloadingResolution: Mode = newMode(15, "OldOverloadingResolution") diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index b4a2dcac1b85..205554e418ed 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -197,6 +197,14 @@ object Phases { config.println(s"nextDenotTransformerId = ${nextDenotTransformerId.toList}") } + /** Unlink `phase` from Denot transformer chain. This means that + * any denotation transformer defined by the phase will not be executed. + */ + def unlinkPhaseAsDenotTransformer(phase: Phase)(using Context) = + for i <- 0 until nextDenotTransformerId.length do + if nextDenotTransformerId(i) == phase.id then + nextDenotTransformerId(i) = nextDenotTransformerId(phase.id + 1) + private var myParserPhase: Phase = _ private var myTyperPhase: Phase = _ private var myPostTyperPhase: Phase = _ diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 283a7e3a474e..e823e3c9a72f 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -23,7 +23,7 @@ import typer.ProtoTypes.constrained import typer.Applications.productSelectorTypes import reporting.trace import annotation.constructorOnly -import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam} +import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam, isAlwaysPure} /** Provides methods to compare types. */ @@ -60,8 +60,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Indicates whether the subtype check used GADT bounds */ private var GADTused: Boolean = false - protected var canWidenAbstract: Boolean = true - private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance @@ -522,7 +520,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling res case CapturingType(parent1, refs1) => - if subCaptures(refs1, tp2.captureSet, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) + if tp2.isAny then true + else if subCaptures(refs1, tp2.captureSet, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) + || !ctx.mode.is(Mode.CheckBoundsOrSelfType) && tp1.isAlwaysPure then recur(parent1, tp2) else thirdTry case tp1: AnnotatedType if !tp1.isRefining => @@ -826,7 +826,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if refs1.isAlwaysEmpty then recur(tp1, parent2) else subCaptures(refs1, refs2, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) - && recur(tp1.widen.stripCapturing, parent2) + && (recur(tp1.widen.stripCapturing, parent2) + || tp1.isInstanceOf[SingletonType] && recur(tp1, parent2) + // this alternative is needed in case the right hand side is a + // capturing type that contains the lhs as an |-alternative. + ) catch case ex: AssertionError => println(i"assertion failed while compare captured $tp1 <:< $tp2") throw ex diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 3e3490b1f01b..9363b27b4dde 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -186,7 +186,7 @@ object TypeOps: if (normed.exists) normed else mapOver case tp: MethodicType => // See documentation of `Types#simplified` - val addTypeVars = new TypeMap: + val addTypeVars = new TypeMap with IdempotentCaptRefMap: val constraint = ctx.typerState.constraint def apply(t: Type): Type = t match case t: TypeParamRef => constraint.typeVarOfParam(t).orElse(t) @@ -615,7 +615,7 @@ object TypeOps: boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type)( - using Context): List[BoundsViolation] = withMode(Mode.CheckBounds) { + using Context): List[BoundsViolation] = withMode(Mode.CheckBoundsOrSelfType) { val argTypes = args.tpes /** Replace all wildcards in `tps` with `#` where `` is the diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 4bcd6f0d8de7..2c59be445010 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -765,11 +765,11 @@ object Types { val rinfo = tp.refinedInfo if (name.isTypeName && !pinfo.isInstanceOf[ClassInfo]) { // simplified case that runs more efficiently val jointInfo = - if rinfo.isInstanceOf[TypeAlias] && !ctx.mode.is(Mode.CheckBounds) then + if rinfo.isInstanceOf[TypeAlias] && !ctx.mode.is(Mode.CheckBoundsOrSelfType) then // In normal situations, the only way to "improve" on rinfo is to return an empty type bounds // So, we do not lose anything essential in "widening" to rinfo. // We need to compute the precise info only when checking for empty bounds - // which is communicated by the CheckBounds mode. + // which is communicated by the CheckBoundsOrSelfType mode. rinfo else if ctx.base.pendingMemberSearches.contains(name) then pinfo safe_& rinfo @@ -2338,7 +2338,8 @@ object Types { lastDenotation match { case lastd0: SingleDenotation => val lastd = lastd0.skipRemoved - if (lastd.validFor.runId == ctx.runId && (checkedPeriod != Nowhere)) finish(lastd.current) + if lastd.validFor.runId == ctx.runId && checkedPeriod != Nowhere then + finish(lastd.current) else lastd match { case lastd: SymDenotation => if (stillValid(lastd) && (checkedPeriod != Nowhere)) finish(lastd.current) @@ -2443,6 +2444,8 @@ object Types { } private def checkDenot()(using Context) = {} + //if name.toString == "getConstructor" then + // println(i"set denot of $this to ${denot.info}, ${denot.getClass}, ${Phases.phaseOf(denot.validFor.lastPhaseId)} at ${ctx.phase}") private def checkSymAssign(sym: Symbol)(using Context) = { def selfTypeOf(sym: Symbol) = @@ -6032,14 +6035,10 @@ object Types { /** A range of possible types between lower bound `lo` and upper bound `hi`. * Only used internally in `ApproximatingTypeMap`. */ - case class Range(lo: Type, hi: Type) extends UncachedGroundType { + case class Range(lo: Type, hi: Type) extends UncachedGroundType: assert(!lo.isInstanceOf[Range]) assert(!hi.isInstanceOf[Range]) - override def toText(printer: Printer): Text = - lo.toText(printer) ~ ".." ~ hi.toText(printer) - } - /** Approximate wildcards by their bounds */ class AvoidWildcardsMap(using Context) extends ApproximatingTypeMap: protected def mapWild(t: WildcardType) = diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 60a5c74eedbc..e0c940fd81aa 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -278,6 +278,8 @@ class PlainPrinter(_ctx: Context) extends Printer { case ex: Throwable => Str("...") } "LazyRef(" ~ refTxt ~ ")" + case Range(lo, hi) => + toText(lo) ~ ".." ~ toText(hi) case _ => tp.fallbackToText(this) } diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 6d783854ae35..0ac9087a08c0 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -22,6 +22,7 @@ import StdNames.nme import reporting.trace import annotation.constructorOnly import cc.CaptureSet.IdempotentCaptRefMap +import dotty.tools.dotc.core.Denotations.SingleDenotation object Recheck: import tpd.* @@ -71,7 +72,7 @@ object Recheck: val symd = sym.denot symd.validFor.firstPhaseId == phase.id + 1 && (sym.originDenotation ne symd) - extension (tree: Tree) + extension [T <: Tree](tree: T) /** Remember `tpe` as the type of `tree`, which might be different from the * type stored in the tree itself, unless a type was already remembered for `tree`. @@ -86,11 +87,27 @@ object Recheck: if tpe ne tree.tpe then tree.putAttachment(RecheckedType, tpe) /** The remembered type of the tree, or if none was installed, the original type */ - def knownType = + def knownType: Type = tree.attachmentOrElse(RecheckedType, tree.tpe) def hasRememberedType: Boolean = tree.hasAttachment(RecheckedType) + def withKnownType(using Context): T = tree.getAttachment(RecheckedType) match + case Some(tpe) => tree.withType(tpe).asInstanceOf[T] + case None => tree + + extension (tpe: Type) + + /** Map ExprType => T to () ?=> T (and analogously for pure versions). + * Even though this phase runs after ElimByName, ExprTypes can still occur + * as by-name arguments of applied types. See note in doc comment for + * ElimByName phase. Test case is bynamefun.scala. + */ + def mapExprType(using Context): Type = tpe match + case ExprType(rt) => defn.ByNameFunction(rt) + case _ => tpe + + /** A base class that runs a simplified typer pass over an already re-typed program. The pass * does not transform trees but returns instead the re-typed type of each tree as it is * traversed. The Recheck phase must be directly preceded by a phase of type PreRecheck. @@ -116,7 +133,9 @@ abstract class Recheck extends Phase, SymTransformer: else sym def run(using Context): Unit = - newRechecker().checkUnit(ctx.compilationUnit) + val rechecker = newRechecker() + rechecker.checkUnit(ctx.compilationUnit) + rechecker.reset() def newRechecker()(using Context): Rechecker @@ -136,6 +155,12 @@ abstract class Recheck extends Phase, SymTransformer: */ def keepType(tree: Tree): Boolean = keepAllTypes + private val prevSelDenots = util.HashMap[NamedType, Denotation]() + + def reset()(using Context): Unit = + for (ref, mbr) <- prevSelDenots.iterator do + ref.withDenot(mbr) + /** Constant-folded rechecked type `tp` of tree `tree` */ protected def constFold(tree: Tree, tp: Type)(using Context): Type = val tree1 = tree.withType(tp) @@ -147,18 +172,42 @@ abstract class Recheck extends Phase, SymTransformer: def recheckSelect(tree: Select, pt: Type)(using Context): Type = val Select(qual, name) = tree - recheckSelection(tree, recheck(qual, AnySelectionProto).widenIfUnstable, name, pt) + val proto = + if tree.symbol == defn.Any_asInstanceOf then WildcardType + else AnySelectionProto + recheckSelection(tree, recheck(qual, proto).widenIfUnstable, name, pt) + + /** When we select the `apply` of a function with type such as `(=> A) => B`, + * we need to convert the parameter type `=> A` to `() ?=> A`. See doc comment + * of `mapExprType`. + */ + def normalizeByName(mbr: SingleDenotation)(using Context): SingleDenotation = mbr.info match + case mt: MethodType if mt.paramInfos.exists(_.isInstanceOf[ExprType]) => + mbr.derivedSingleDenotation(mbr.symbol, + mt.derivedLambdaType(paramInfos = mt.paramInfos.map(_.mapExprType))) + case _ => + mbr def recheckSelection(tree: Select, qualType: Type, name: Name, sharpen: Denotation => Denotation)(using Context): Type = if name.is(OuterSelectName) then tree.tpe else //val pre = ta.maybeSkolemizePrefix(qualType, name) - val mbr = sharpen( + val mbr = normalizeByName( + sharpen( qualType.findMember(name, qualType, excluded = if tree.symbol.is(Private) then EmptyFlags else Private - )).suchThat(tree.symbol == _) - constFold(tree, qualType.select(name, mbr)) + )).suchThat(tree.symbol == _)) + val newType = tree.tpe match + case prevType: NamedType => + val prevDenot = prevType.denot + val newType = qualType.select(name, mbr) + if (newType eq prevType) && (mbr.info ne prevDenot.info) && !prevSelDenots.contains(prevType) then + prevSelDenots(prevType) = prevDenot + newType + case _ => + qualType.select(name, mbr) + constFold(tree, newType) //.showing(i"recheck select $qualType . $name : ${mbr.info} = $result") @@ -212,7 +261,8 @@ abstract class Recheck extends Phase, SymTransformer: mt.instantiate(argTypes) def recheckApply(tree: Apply, pt: Type)(using Context): Type = - recheck(tree.fun).widen match + val funtpe = recheck(tree.fun) + funtpe.widen match case fntpe: MethodType => assert(fntpe.paramInfos.hasSameLengthAs(tree.args)) val formals = @@ -220,7 +270,7 @@ abstract class Recheck extends Phase, SymTransformer: else fntpe.paramInfos def recheckArgs(args: List[Tree], formals: List[Type], prefs: List[ParamRef]): List[Type] = args match case arg :: args1 => - val argType = recheck(arg, formals.head) + val argType = recheck(arg, formals.head.mapExprType) val formals1 = if fntpe.isParamDependent then formals.tail.map(_.substParam(prefs.head, argType)) @@ -232,6 +282,8 @@ abstract class Recheck extends Phase, SymTransformer: val argTypes = recheckArgs(tree.args, formals, fntpe.paramRefs) constFold(tree, instantiate(fntpe, argTypes, tree.fun.symbol)) //.showing(i"typed app $tree : $fntpe with ${tree.args}%, % : $argTypes%, % = $result") + case tp => + assert(false, i"unexpected type of ${tree.fun}: $funtpe") def recheckTypeApply(tree: TypeApply, pt: Type)(using Context): Type = recheck(tree.fun).widen match @@ -262,7 +314,7 @@ abstract class Recheck extends Phase, SymTransformer: recheckBlock(tree.stats, tree.expr, pt) def recheckInlined(tree: Inlined, pt: Type)(using Context): Type = - recheckBlock(tree.bindings, tree.expansion, pt) + recheckBlock(tree.bindings, tree.expansion, pt)(using inlineContext(tree.call)) def recheckIf(tree: If, pt: Type)(using Context): Type = recheck(tree.cond, defn.BooleanType) @@ -297,7 +349,20 @@ abstract class Recheck extends Phase, SymTransformer: val rawType = recheck(tree.expr) val ownType = avoidMap(rawType) - checkConforms(ownType, tree.from.symbol.returnProto, tree) + + // The pattern matching translation, which runs before this phase + // sometimes instantiates return types with singleton type alternatives + // but the returned expression is widened. We compensate by widening the expected + // type as well. See also `widenSkolems` in `checkConformsExpr` which fixes + // a more general problem. It turns out that pattern matching returns + // are not checked by Ycheck, that's why these problems were allowed to slip + // through. + def widened(tp: Type): Type = tp match + case tp: SingletonType => tp.widen + case tp: AndOrType => tp.derivedAndOrType(widened(tp.tp1), widened(tp.tp2)) + case tp @ AnnotatedType(tp1, ann) => tp.derivedAnnotatedType(widened(tp1), ann) + case _ => tp + checkConforms(ownType, widened(tree.from.symbol.returnProto), tree) defn.NothingType end recheckReturn @@ -423,6 +488,27 @@ abstract class Recheck extends Phase, SymTransformer: throw ex } + /** Typing and previous transforms sometiems leaves skolem types in prefixes of + * NamedTypes in `expected` that do not match the `actual` Type. -Ycheck does + * not complain (need to find out why), but a full recheck does. We compensate + * by de-skolemizing everywhere in `expected` except when variance is negative. + * @return If `tp` contains SkolemTypes in covariant or invariant positions, + * the type where these SkolemTypes are mapped to their underlying type. + * Otherwise, `tp` itself + */ + def widenSkolems(tp: Type)(using Context): Type = + object widenSkolems extends TypeMap, IdempotentCaptRefMap: + var didWiden: Boolean = false + def apply(t: Type): Type = t match + case t: SkolemType if variance >= 0 => + didWiden = true + apply(t.underlying) + case t: LazyRef => t + case t @ AnnotatedType(t1, ann) => t.derivedAnnotatedType(apply(t1), ann) + case _ => mapOver(t) + val tp1 = widenSkolems(tp) + if widenSkolems.didWiden then tp1 else tp + /** If true, print info for some successful checkConforms operations (failing ones give * an error message in any case). */ @@ -438,11 +524,16 @@ abstract class Recheck extends Phase, SymTransformer: def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = //println(i"check conforms $actual <:< $expected") - val isCompatible = + + def isCompatible(expected: Type): Boolean = actual <:< expected || expected.isRepeatedParam - && actual <:< expected.translateFromRepeated(toArray = tree.tpe.isRef(defn.ArrayClass)) - if !isCompatible then + && isCompatible(expected.translateFromRepeated(toArray = tree.tpe.isRef(defn.ArrayClass))) + || { + val widened = widenSkolems(expected) + (widened ne expected) && isCompatible(widened) + } + if !isCompatible(expected) then recheckr.println(i"conforms failed for ${tree}: $actual vs $expected") err.typeMismatch(tree.withType(actual), expected) else if debugSuccesses then @@ -450,6 +541,7 @@ abstract class Recheck extends Phase, SymTransformer: case _: Ident => println(i"SUCCESS $tree:\n${TypeComparer.explained(_.isSubType(actual, expected))}") case _ => + end checkConformsExpr def checkUnit(unit: CompilationUnit)(using Context): Unit = recheck(unit.tpdTree) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 6ab0e7fc499f..dc8defa90eef 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -91,7 +91,7 @@ class TreeChecker extends Phase with SymTransformer { if (ctx.phaseId <= erasurePhase.id) { val initial = symd.initial assert(symd == initial || symd.signature == initial.signature, - i"""Signature of ${sym.showLocated} changed at phase ${ctx.phase.prevMega} + i"""Signature of ${sym} in ${sym.ownersIterator.toList}%, % changed at phase ${ctx.phase.prevMega} |Initial info: ${initial.info} |Initial sig : ${initial.signature} |Current info: ${symd.info} diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index c53213d7bd37..16499f6c1f3e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -67,11 +67,12 @@ object Checking { */ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type = NoType, tpt: Tree = EmptyTree)(using Context): Unit = - args.lazyZip(boundss).foreach { (arg, bound) => - if !bound.isLambdaSub && !arg.tpe.hasSimpleKind then - errorTree(arg, - showInferred(MissingTypeParameterInTypeApp(arg.tpe), app, tpt)) - } + if ctx.phase != Phases.checkCapturesPhase then + args.lazyZip(boundss).foreach { (arg, bound) => + if !bound.isLambdaSub && !arg.tpe.hasSimpleKind then + errorTree(arg, + showInferred(MissingTypeParameterInTypeApp(arg.tpe), app, tpt)) + } for (arg, which, bound) <- TypeOps.boundsViolations(args, boundss, instantiate, app) do report.error( showInferred(DoesNotConformToBound(arg.tpe, which, bound), app, tpt), diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index 044dd7bb8528..00b037a4e259 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -117,7 +117,7 @@ class CrossVersionChecks extends MiniPhase: /** Check that classes extending experimental classes or nested in experimental classes have the @experimental annotation. */ private def checkExperimentalInheritance(cls: ClassSymbol)(using Context): Unit = - if !cls.isAnonymousClass && !cls.hasAnnotation(defn.ExperimentalAnnot) then + if !cls.isAnonymousClass && !cls.isInExperimentalScope then cls.info.parents.find(_.typeSymbol.isExperimental) match case Some(parent) => report.error(em"extension of experimental ${parent.typeSymbol} must have @experimental annotation", cls.srcPos) diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 1aa53d866b5e..8afcec4dee63 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -91,24 +91,42 @@ object RefChecks { cls.thisType } + /** - Check that self type of `cls` conforms to self types of all `parents` as seen from + * `cls.thisType` + * - If self type of `cls` is explicit, check that it conforms to the self types + * of all its class symbols. + * @param deep If true and a self type of a parent is not given explicitly, recurse to + * check against the parents of the parent. This is needed when capture checking, + * since we assume (& check) that the capture set of an inferred self type + * is the intersection of the capture sets of all its parents + */ + def checkSelfAgainstParents(cls: ClassSymbol, parents: List[Symbol])(using Context): Unit = + withMode(Mode.CheckBoundsOrSelfType) { + val cinfo = cls.classInfo + + def checkSelfConforms(other: ClassSymbol, category: String, relation: String) = + val otherSelf = other.declaredSelfTypeAsSeenFrom(cls.thisType) + if otherSelf.exists then + if !(cinfo.selfType <:< otherSelf) then + report.error(DoesNotConformToSelfType(category, cinfo.selfType, cls, otherSelf, relation, other), + cls.srcPos) + + for psym <- parents do + checkSelfConforms(psym.asClass, "illegal inheritance", "parent") + for reqd <- cls.asClass.givenSelfType.classSymbols do + if reqd != cls then + checkSelfConforms(reqd, "missing requirement", "required") + } + end checkSelfAgainstParents + /** Check that self type of this class conforms to self types of parents * and required classes. Also check that only `enum` constructs extend * `java.lang.Enum` and no user-written class extends ContextFunctionN. */ def checkParents(cls: Symbol, parentTrees: List[Tree])(using Context): Unit = cls.info match { case cinfo: ClassInfo => - def checkSelfConforms(other: ClassSymbol, category: String, relation: String) = { - val otherSelf = other.declaredSelfTypeAsSeenFrom(cls.thisType) - if otherSelf.exists && !(cinfo.selfType <:< otherSelf) then - report.error(DoesNotConformToSelfType(category, cinfo.selfType, cls, otherSelf, relation, other), - cls.srcPos) - } val psyms = cls.asClass.parentSyms - for (psym <- psyms) - checkSelfConforms(psym.asClass, "illegal inheritance", "parent") - for reqd <- cinfo.cls.givenSelfType.classSymbols do - if reqd != cls then - checkSelfConforms(reqd, "missing requirement", "required") + checkSelfAgainstParents(cls.asClass, psyms) def isClassExtendingJavaEnum = !cls.isOneOf(Enum | Trait) && psyms.contains(defn.JavaEnumClass) @@ -774,17 +792,19 @@ object RefChecks { // For each member, check that the type of its symbol, as seen from `self` // can override the info of this member - for (name <- membersToCheck) - for (mbrd <- self.member(name).alternatives) { - val mbr = mbrd.symbol - val mbrType = mbr.info.asSeenFrom(self, mbr.owner) - if (!mbrType.overrides(mbrd.info, relaxedCheck = false, matchLoosely = true)) - report.errorOrMigrationWarning( - em"""${mbr.showLocated} is not a legal implementation of `$name` in $clazz - | its type $mbrType - | does not conform to ${mbrd.info}""", - (if (mbr.owner == clazz) mbr else clazz).srcPos, from = `3.0`) + withMode(Mode.IgnoreCaptures) { + for (name <- membersToCheck) + for (mbrd <- self.member(name).alternatives) { + val mbr = mbrd.symbol + val mbrType = mbr.info.asSeenFrom(self, mbr.owner) + if (!mbrType.overrides(mbrd.info, relaxedCheck = false, matchLoosely = true)) + report.errorOrMigrationWarning( + em"""${mbr.showLocated} is not a legal implementation of `$name` in $clazz + | its type $mbrType + | does not conform to ${mbrd.info}""", + (if (mbr.owner == clazz) mbr else clazz).srcPos, from = `3.0`) } + } } /** Check that inheriting a case class does not constitute a variant refinement diff --git a/compiler/src/dotty/tools/package.scala b/compiler/src/dotty/tools/package.scala index 57a58151acc7..4ec9ae40c421 100644 --- a/compiler/src/dotty/tools/package.scala +++ b/compiler/src/dotty/tools/package.scala @@ -18,7 +18,7 @@ package object tools { * Flow-typing under explicit nulls will automatically insert many necessary * occurrences of uncheckedNN. */ - inline def uncheckedNN: T = x.asInstanceOf[T] + transparent inline def uncheckedNN: T = x.asInstanceOf[T] inline def toOption: Option[T] = if x == null then None else Some(x.asInstanceOf[T]) diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index a7d8778d4c61..30126f07b49a 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -84,7 +84,6 @@ boxmap-paper.scala # Function types print differnt after unpickling since test mispredicts Feature.preFundsEnabled caps-universal.scala - # GADT cast applied to singleton type difference i4176-gadt.scala diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index 7ea05c615e0d..e9d0e26f33b0 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -36,6 +36,12 @@ class BootstrappedOnlyCompilationTests { ).checkCompile() } + @Test def posWithCompilerCC: Unit = + implicit val testGroup: TestGroup = TestGroup("compilePosWithCompilerCC") + aggregateTests( + compileDir("tests/pos-with-compiler-cc/dotc", withCompilerOptions.and("-language:experimental.captureChecking")) + ).checkCompile() + @Test def posWithCompiler: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePosWithCompiler") aggregateTests( diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 932acb90371c..c1b465ad4a88 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -242,7 +242,8 @@ class CompilationTests { ).checkCompile() } - @Test def recheck: Unit = + //@Test disabled in favor of posWithCompilerCC to save time. + def recheck: Unit = given TestGroup = TestGroup("recheck") aggregateTests( compileFilesInDir("tests/new", recheckOptions), diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 21b2f7a4dece..fb1721f98b35 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -7,12 +7,26 @@ import annotation.experimental /** The universal capture reference */ val `*`: Any = () - /** If argument is of type `cs T`, converts to type `box cs T`. This - * avoids the error that would be raised when boxing `*`. - */ - extension [T](x: T) def unsafeBox: T = x + object unsafe: + + /** If argument is of type `cs T`, converts to type `box cs T`. This + * avoids the error that would be raised when boxing `*`. + */ + extension [T](x: T) def unsafeBox: T = x + + /** If argument is of type `box cs T`, converts to type `cs T`. This + * avoids the error that would be raised when unboxing `*`. + */ + extension [T](x: T) def unsafeUnbox: T = x + + /** If argument is of type `box cs T`, converts to type `cs T`. This + * avoids the error that would be raised when unboxing `*`. + */ + extension [T, U](f: T => U) def unsafeBoxFunArg: T => U = f + end unsafe - /** If argument is of type `box cs T`, converts to type `cs T`. This - * avoids the error that would be raised when unboxing `*`. + /** Mixing in this trait forces a trait or class to be pure, i.e. + * have no capabilities retained in its self type. */ - extension [T](x: T) def unsafeUnbox: T = x + trait Pure: + this: Pure => diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 81510d22d2c2..408930bbeee9 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -19,5 +19,7 @@ object MiMaFilters { ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$pureFunctions$"), ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$captureChecking$"), ProblemFilters.exclude[MissingClassProblem]("scala.caps"), + ProblemFilters.exclude[MissingClassProblem]("scala.caps$Pure"), + ProblemFilters.exclude[MissingClassProblem]("scala.caps$unsafe$"), ) } diff --git a/tests/neg-custom-args/captures/boundschecks.scala b/tests/neg-custom-args/captures/boundschecks.scala new file mode 100644 index 000000000000..cf4eab28f19d --- /dev/null +++ b/tests/neg-custom-args/captures/boundschecks.scala @@ -0,0 +1,18 @@ +object test { + + class Tree + + def f[X <: Tree](x: X): Unit = () + + class C[X <: Tree](x: X) + + def foo(t: {*} Tree) = + f(t) // error + f[{*} Tree](t) // error + f[Tree](t) // error + val c1 = C(t) // error + val c2 = C[{*} Tree](t) // error + val c3 = C[Tree](t) // error + + val foo: C[{*} Tree] = ??? +} diff --git a/tests/neg-custom-args/captures/boundschecks2.scala b/tests/neg-custom-args/captures/boundschecks2.scala new file mode 100644 index 000000000000..f6927b04931b --- /dev/null +++ b/tests/neg-custom-args/captures/boundschecks2.scala @@ -0,0 +1,13 @@ +object test { + + class Tree + + def f[X <: Tree](x: X): Unit = () + + class C[X <: Tree](x: X) + + val foo: C[{*} Tree] = ??? // error + type T = C[{*} Tree] // error + val bar: T -> T = ??? + val baz: C[{*} Tree] -> Unit = ??? // error +} diff --git a/tests/neg-custom-args/captures/capt-depfun.scala b/tests/neg-custom-args/captures/capt-depfun.scala index a74764f432c7..c01eed7c4b25 100644 --- a/tests/neg-custom-args/captures/capt-depfun.scala +++ b/tests/neg-custom-args/captures/capt-depfun.scala @@ -1,8 +1,9 @@ import annotation.retains class C type Cap = C @retains(caps.*) +class Str def f(y: Cap, z: Cap) = def g(): C @retains(y, z) = ??? - val ac: ((x: Cap) => String @retains(x) => String @retains(x)) = ??? - val dc: (({y, z} String) => {y, z} String) = ac(g()) // error + val ac: ((x: Cap) => Str @retains(x) => Str @retains(x)) = ??? + val dc: (({y, z} Str) => {y, z} Str) = ac(g()) // error diff --git a/tests/neg-custom-args/captures/capt-depfun2.scala b/tests/neg-custom-args/captures/capt-depfun2.scala index 74b9441593c1..52dd74aabf9f 100644 --- a/tests/neg-custom-args/captures/capt-depfun2.scala +++ b/tests/neg-custom-args/captures/capt-depfun2.scala @@ -1,11 +1,12 @@ import annotation.retains class C type Cap = C @retains(caps.*) +class Str def f(y: Cap, z: Cap) = def g(): C @retains(y, z) = ??? - val ac: ((x: Cap) => Array[String @retains(x)]) = ??? - val dc = ac(g()) // error: Needs explicit type Array[? >: String <: {y, z} String] + val ac: ((x: Cap) => Array[Str @retains(x)]) = ??? + val dc = ac(g()) // error: Needs explicit type Array[? >: Str <: {y, z} Str] // This is a shortcoming of rechecking since the originally inferred - // type is `Array[String]` and the actual type after rechecking - // cannot be expressed as `Array[C String]` for any capture set C \ No newline at end of file + // type is `Array[Str]` and the actual type after rechecking + // cannot be expressed as `Array[C Str]` for any capture set C \ No newline at end of file diff --git a/tests/neg-custom-args/captures/cc-this.check b/tests/neg-custom-args/captures/cc-this.check index c492df15078f..0049f42a5db5 100644 --- a/tests/neg-custom-args/captures/cc-this.check +++ b/tests/neg-custom-args/captures/cc-this.check @@ -9,10 +9,7 @@ 10 | class C2(val x: () => Int): // error | ^ | reference (C2.this.x : () => Int) is not included in allowed capture set {} of the self type of class C2 --- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this.scala:17:8 --------------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this.scala:17:8 ------------------------------------------------------------ 17 | class C4(val f: () => Int) extends C3 // error - | ^ - | illegal inheritance: self type {C4.this.f} C4 of class C4 does not conform to self type C3 - | of parent class C3 - | - | longer explanation available when compiling with `-explain` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | reference (C4.this.f : () => Int) is not included in allowed capture set {} of pure base class class C3 diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index d10519636ca8..086524d307a2 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -1,8 +1,6 @@ --- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 ----------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 -------------------------------------------------------- 2 |class D extends C: // error - | ^ - | illegal inheritance: self type {*} D of class D does not conform to self type C - | of parent class C - | - | longer explanation available when compiling with `-explain` + |^ + |reference (scala.caps.* : Any) is not included in allowed capture set {} of pure base class class C +3 | this: {*} D => diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check new file mode 100644 index 000000000000..aca5d9217d64 --- /dev/null +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -0,0 +1,17 @@ +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:2:6 ----------------------------------------------- +2 |class Err extends Exception: // error + |^ + |reference (scala.caps.* : Any) is not included in allowed capture set {} of pure base class class Throwable +3 | self: {*} Err => +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:10:6 ---------------------------------------------- +10 |class Err4(c: {*} Any) extends AnyVal // error + |^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |reference (Err4.this.c : {*} Any) is not included in allowed capture set {} of pure base class class AnyVal +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- +7 | val x = c // error + | ^ + |(c : {*} Any) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:8 ----------------------------------------------- +8 | class Err3(c: {*} Any) extends Exception // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | reference (Err3.this.c : {*} Any) is not included in allowed capture set {} of pure base class class Throwable diff --git a/tests/neg-custom-args/captures/exception-definitions.scala b/tests/neg-custom-args/captures/exception-definitions.scala new file mode 100644 index 000000000000..9f3539b7febf --- /dev/null +++ b/tests/neg-custom-args/captures/exception-definitions.scala @@ -0,0 +1,12 @@ + +class Err extends Exception: // error + self: {*} Err => + +def test(c: {*} Any) = + class Err2 extends Exception: + val x = c // error + class Err3(c: {*} Any) extends Exception // error + +class Err4(c: {*} Any) extends AnyVal // error + + diff --git a/tests/neg-custom-args/captures/i15116.check b/tests/neg-custom-args/captures/i15116.check index 83c552087646..8b8d2d2e091b 100644 --- a/tests/neg-custom-args/captures/i15116.check +++ b/tests/neg-custom-args/captures/i15116.check @@ -9,7 +9,7 @@ 5 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Baz.this} Foo{m: {Baz.this} String} + | {Baz.this} Foo{m: {*} String} | with non-empty capture set {Baz.this}. | The type needs to be declared explicitly. -- Error: tests/neg-custom-args/captures/i15116.scala:7:6 -------------------------------------------------------------- @@ -23,6 +23,6 @@ 9 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Baz2.this} Foo{m: {Baz2.this} String} + | {Baz2.this} Foo{m: {*} String} | with non-empty capture set {Baz2.this}. | The type needs to be declared explicitly. diff --git a/tests/neg-custom-args/captures/lazylist.check b/tests/neg-custom-args/captures/lazylist.check index e43538ad97f7..1ad705556b88 100644 --- a/tests/neg-custom-args/captures/lazylist.check +++ b/tests/neg-custom-args/captures/lazylist.check @@ -5,6 +5,13 @@ | method tail of type -> {*} lazylists.LazyList[Nothing] has incompatible type | | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:17:15 ------------------------------------- +17 | def tail = xs() // error + | ^^^^ + | Found: {LazyCons.this.xs} lazylists.LazyList[T] + | Required: lazylists.LazyList[T] + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:35:29 ------------------------------------- 35 | val ref1c: LazyList[Int] = ref1 // error | ^^^^ @@ -33,10 +40,3 @@ | Required: {cap1, ref3, cap3} lazylists.LazyList[Int] | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/lazylist.scala:17:6 ----------------------------------------------------------- -17 | def tail = xs() // error: cannot have an inferred type - | ^^^^^^^^^^^^^^^ - | Non-local method tail cannot have an inferred result type - | {LazyCons.this.xs} lazylists.LazyList[? T] - | with non-empty capture set {LazyCons.this.xs}. - | The type needs to be declared explicitly. diff --git a/tests/neg-custom-args/captures/lazylist.scala b/tests/neg-custom-args/captures/lazylist.scala index 56bfc3ea6da2..2674f15a0ee3 100644 --- a/tests/neg-custom-args/captures/lazylist.scala +++ b/tests/neg-custom-args/captures/lazylist.scala @@ -14,7 +14,7 @@ abstract class LazyList[+T]: class LazyCons[+T](val x: T, val xs: () => {*} LazyList[T]) extends LazyList[T]: def isEmpty = false def head = x - def tail = xs() // error: cannot have an inferred type + def tail = xs() // error object LazyNil extends LazyList[Nothing]: def isEmpty = true diff --git a/tests/neg-custom-args/captures/selftype.scala b/tests/neg-custom-args/captures/selftype.scala new file mode 100644 index 000000000000..21148f625a7a --- /dev/null +++ b/tests/neg-custom-args/captures/selftype.scala @@ -0,0 +1,4 @@ +@annotation.experimental class C(x: () => Unit) extends caps.Pure // error + +@annotation.experimental class D(@annotation.constructorOnly x: () => Unit) extends caps.Pure // ok + diff --git a/tests/neg-custom-args/captures/try.check b/tests/neg-custom-args/captures/try.check index 13aff2661b85..c9cc7f7c1b56 100644 --- a/tests/neg-custom-args/captures/try.check +++ b/tests/neg-custom-args/captures/try.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/try.scala:23:49 ------------------------------------------ 23 | val a = handle[Exception, CanThrow[Exception]] { // error | ^ - | Found: ? ({*} CT[Exception]) -> CanThrow[? Exception] + | Found: ? ({*} CT[Exception]) -> CanThrow[Exception] | Required: CanThrow[Exception] => box {*} CT[Exception] 24 | (x: CanThrow[Exception]) => x 25 | }{ @@ -10,7 +10,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/try.scala:29:43 ------------------------------------------ 29 | val b = handle[Exception, () -> Nothing] { // error | ^ - | Found: ? (x: {*} CT[Exception]) -> {x} () -> ? Nothing + | Found: ? (x: {*} CT[Exception]) -> {x} () -> Nothing | Required: CanThrow[Exception] => () -> Nothing 30 | (x: CanThrow[Exception]) => () => raise(new Exception)(using x) 31 | } { diff --git a/tests/neg-custom-args/captures/vars.check b/tests/neg-custom-args/captures/vars.check index e4f28fd45e93..8fe72a76493a 100644 --- a/tests/neg-custom-args/captures/vars.check +++ b/tests/neg-custom-args/captures/vars.check @@ -9,7 +9,7 @@ 15 | val u = a // error | ^ | Found: (a : box {*} String -> String) - | Required: {*} (x$0: ? String) -> ? String + | Required: {*} (x$0: String) -> String | | longer explanation available when compiling with `-explain` -- Error: tests/neg-custom-args/captures/vars.scala:16:2 --------------------------------------------------------------- @@ -25,7 +25,7 @@ -- Error: tests/neg-custom-args/captures/vars.scala:32:8 --------------------------------------------------------------- 32 | local { cap3 => // error | ^ - | The expression's type box {*} (x$0: ? String) -> ? String is not allowed to capture the root capability `*`. + | The expression's type box {*} (x$0: String) -> String is not allowed to capture the root capability `*`. | This usually means that a capability persists longer than its allowed lifetime. 33 | def g(x: String): String = if cap3 == cap3 then "" else "a" 34 | g diff --git a/tests/pending/pos/i16268.scala b/tests/pending/pos/i16268.scala new file mode 100644 index 000000000000..6b44e71a2247 --- /dev/null +++ b/tests/pending/pos/i16268.scala @@ -0,0 +1,25 @@ +import language.experimental.captureChecking +class Tree +case class Thicket(trees: List[Tree]) extends Tree + +def test1(segments: List[{*} Tree]) = + val elems = segments flatMap { (t: {*} Tree) => t match // error + case ts: Thicket => ts.trees.tail + case t => Nil + } + elems + +def test2(segments: List[{*} Tree]) = + val f = (t: {*} Tree) => t match + case ts: Thicket => ts.trees.tail + case t => Nil + val elems = segments.flatMap(f) // error + elems + +def test3(c: {*} Any)(segments: List[{c} Tree]) = + val elems = segments flatMap { (t: {c} Tree) => t match + case ts: Thicket => ts.trees.tail + case t => Nil + } + elems + diff --git a/tests/pos-custom-args/captures/bynamefun.scala b/tests/pos-custom-args/captures/bynamefun.scala new file mode 100644 index 000000000000..86bad201ffc3 --- /dev/null +++ b/tests/pos-custom-args/captures/bynamefun.scala @@ -0,0 +1,11 @@ +object test: + class Plan(elem: Plan) + object SomePlan extends Plan(???) + def f1(expr: (-> Plan) -> Plan): Plan = expr(SomePlan) + f1 { onf => Plan(onf) } + def f2(expr: (=> Plan) -> Plan): Plan = ??? + f2 { onf => Plan(onf) } + def f3(expr: (-> Plan) => Plan): Plan = ??? + f1 { onf => Plan(onf) } + def f4(expr: (=> Plan) => Plan): Plan = ??? + f2 { onf => Plan(onf) } diff --git a/tests/pos-custom-args/captures/capt-test.scala b/tests/pos-custom-args/captures/capt-test.scala index 6ee0d2a4d9f4..c61577e96eb1 100644 --- a/tests/pos-custom-args/captures/capt-test.scala +++ b/tests/pos-custom-args/captures/capt-test.scala @@ -21,6 +21,9 @@ def map[A, B](f: A => B)(xs: LIST[A]): LIST[B] = class C type Cap = {*} C +class Foo(x: Cap): + this: {x} Foo => + def test(c: Cap, d: Cap) = def f(x: Cap): Unit = if c == x then () def g(x: Cap): Unit = if d == x then () diff --git a/tests/pos-custom-args/captures/cmp-singleton-2.scala b/tests/pos-custom-args/captures/cmp-singleton-2.scala new file mode 100644 index 000000000000..daaa4add3858 --- /dev/null +++ b/tests/pos-custom-args/captures/cmp-singleton-2.scala @@ -0,0 +1,11 @@ +class T +class A extends T +class B extends T + +def test(tp: T) = + val mapping: Map[A, String] = ??? + + tp match + case a: A => mapping(a) match + case s: String => B() + case null => a diff --git a/tests/pos-custom-args/captures/cmp-singleton.scala b/tests/pos-custom-args/captures/cmp-singleton.scala new file mode 100644 index 000000000000..45b4009f5e89 --- /dev/null +++ b/tests/pos-custom-args/captures/cmp-singleton.scala @@ -0,0 +1,10 @@ +class Denotation +abstract class SingleDenotation extends Denotation +def goRefined: Denotation = + val foo: Denotation = ??? + val joint = foo + joint match + case joint: SingleDenotation => + joint + case _ => + joint \ No newline at end of file diff --git a/tests/pos-custom-args/captures/foreach.scala b/tests/pos-custom-args/captures/foreach.scala new file mode 100644 index 000000000000..b7dfc49272a9 --- /dev/null +++ b/tests/pos-custom-args/captures/foreach.scala @@ -0,0 +1,4 @@ +import caps.unsafe.* +def test = + val tasks = new collection.mutable.ArrayBuffer[() => Unit] + val _: Unit = tasks.foreach(((task: () => Unit) => task()).unsafeBoxFunArg) diff --git a/tests/pos-custom-args/captures/gadt-ycheck.scala b/tests/pos-custom-args/captures/gadt-ycheck.scala new file mode 100644 index 000000000000..946763b53e7e --- /dev/null +++ b/tests/pos-custom-args/captures/gadt-ycheck.scala @@ -0,0 +1,14 @@ +package test + +import reflect.ClassTag +import language.experimental.pureFunctions + +object Settings: + val OptionTag: ClassTag[Option[?]] = ClassTag(classOf[Option[?]]) + + class Setting[T: ClassTag](propertyClass: Option[Class[?]]): + def tryToSet() = + def update(value: Any): String = ??? + implicitly[ClassTag[T]] match + case OptionTag => + update(Some(propertyClass.get.getConstructor().newInstance())) diff --git a/tests/pos-custom-args/captures/matchtypes.scala b/tests/pos-custom-args/captures/matchtypes.scala new file mode 100644 index 000000000000..b2442277f1f7 --- /dev/null +++ b/tests/pos-custom-args/captures/matchtypes.scala @@ -0,0 +1,10 @@ +type HEAD[X <: NonEmptyTuple] = X match { + case x *: (_ <: NonEmptyTuple) => x +} + +inline def head[A <: NonEmptyTuple](x: A): HEAD[A] = null.asInstanceOf[HEAD[A]] + +def show[A, T <: Tuple](x: A *: T) = + show1(head(x)) + show1(x.head) +def show1[A](x: A): String = ??? \ No newline at end of file diff --git a/tests/pos-custom-args/captures/overrides.scala b/tests/pos-custom-args/captures/overrides.scala index 66f19726ffa7..7e70afe7a327 100644 --- a/tests/pos-custom-args/captures/overrides.scala +++ b/tests/pos-custom-args/captures/overrides.scala @@ -12,15 +12,3 @@ class Bar extends Foo: class Baz extends Bar: override def foo = () => println("baz") override def bar = "baz" - //override def toString = bar - -abstract class Message: - protected def msg: String - override def toString = msg - -abstract class SyntaxMsg extends Message - -class CyclicInheritance extends SyntaxMsg: - def msg = "cyclic" - - diff --git a/tests/pos-custom-args/captures/overrides/A.scala b/tests/pos-custom-args/captures/overrides/A.scala new file mode 100644 index 000000000000..6a81f8562164 --- /dev/null +++ b/tests/pos-custom-args/captures/overrides/A.scala @@ -0,0 +1,4 @@ +abstract class Message: + lazy val message: String = ??? + def rawMessage = message + diff --git a/tests/pos-custom-args/captures/overrides/B.scala b/tests/pos-custom-args/captures/overrides/B.scala new file mode 100644 index 000000000000..ce4a3f20f1d2 --- /dev/null +++ b/tests/pos-custom-args/captures/overrides/B.scala @@ -0,0 +1,6 @@ + +abstract class SyntaxMsg extends Message + +class CyclicInheritance extends SyntaxMsg + + diff --git a/tests/pos-custom-args/captures/selftypes.scala b/tests/pos-custom-args/captures/selftypes.scala new file mode 100644 index 000000000000..c1b8eefce506 --- /dev/null +++ b/tests/pos-custom-args/captures/selftypes.scala @@ -0,0 +1,15 @@ + import annotation.constructorOnly + trait A: + self: A => + def foo: Int + + abstract class B extends A: + def foo: Int + + class C extends B: + def foo = 1 + def derived = this + + class D(@constructorOnly op: Int => Int) extends C: + val x = 1//op(1) + diff --git a/tests/pos-custom-args/captures/unsafe-unbox.scala b/tests/pos-custom-args/captures/unsafe-unbox.scala index e846a7db1b69..b228d8c07925 100644 --- a/tests/pos-custom-args/captures/unsafe-unbox.scala +++ b/tests/pos-custom-args/captures/unsafe-unbox.scala @@ -1,4 +1,4 @@ -import caps.* +import caps.unsafe.* def test = var finalizeActions = collection.mutable.ListBuffer[() => Unit]() val action = finalizeActions.remove(0).unsafeUnbox diff --git a/tests/pos-custom-args/captures/vars1.scala b/tests/pos-custom-args/captures/vars1.scala index 8c2f2cb8b5d5..c008bac2e72f 100644 --- a/tests/pos-custom-args/captures/vars1.scala +++ b/tests/pos-custom-args/captures/vars1.scala @@ -1,4 +1,4 @@ -import caps.* +import caps.unsafe.* object Test: type ErrorHandler = (Int, String) => Unit @@ -11,15 +11,11 @@ object Test: def defaultIncompleteHandler1(): ErrorHandler = ??? val defaultIncompleteHandler2: ErrorHandler = ??? - var incompleteHandler1: ErrorHandler = defaultIncompleteHandler1() - var incompleteHandler2: ErrorHandler = defaultIncompleteHandler2 - var incompleteHandler3: ErrorHandler = defaultIncompleteHandler1().unsafeBox - var incompleteHandler4: ErrorHandler = defaultIncompleteHandler2.unsafeBox - private var incompleteHandler5 = defaultIncompleteHandler1() - private var incompleteHandler6 = defaultIncompleteHandler2 + var incompleteHandler1: ErrorHandler = defaultIncompleteHandler1().unsafeBox + var incompleteHandler2: ErrorHandler = defaultIncompleteHandler2.unsafeBox private var incompleteHandler7 = defaultIncompleteHandler1().unsafeBox private var incompleteHandler8 = defaultIncompleteHandler2.unsafeBox - incompleteHandler1 = defaultIncompleteHandler2 + incompleteHandler1 = defaultIncompleteHandler2.unsafeBox incompleteHandler1 = defaultIncompleteHandler2.unsafeBox val saved = incompleteHandler1.unsafeUnbox diff --git a/tests/pos-custom-args/no-experimental/dotty-experimental.scala b/tests/pos-custom-args/no-experimental/dotty-experimental.scala index 74e79c85eaaa..320c68dbea50 100644 --- a/tests/pos-custom-args/no-experimental/dotty-experimental.scala +++ b/tests/pos-custom-args/no-experimental/dotty-experimental.scala @@ -1,6 +1,6 @@ package dotty.tools object test { - val x = caps.unsafeBox + val x = caps.unsafe.unsafeBox } diff --git a/tests/pos-with-compiler-cc/dotc/Bench.scala b/tests/pos-with-compiler-cc/dotc/Bench.scala new file mode 100644 index 000000000000..c9c032b0ae7d --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Bench.scala @@ -0,0 +1,64 @@ +package dotty.tools +package dotc + +import core.Contexts._ +import reporting.Reporter +import io.AbstractFile + +import scala.annotation.internal.sharable + +/** A main class for running compiler benchmarks. Can instantiate a given + * number of compilers and run each (sequentially) a given number of times + * on the same sources. + */ +object Bench extends Driver: + + @sharable private var numRuns = 1 + + private def ntimes(n: Int)(op: => Reporter): Reporter = + (0 until n).foldLeft(emptyReporter)((_, _) => op) + + @sharable private var times: Array[Int] = _ + + override def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = + times = new Array[Int](numRuns) + var reporter: Reporter = emptyReporter + for i <- 0 until numRuns do + val start = System.nanoTime() + reporter = super.doCompile(compiler, files) + times(i) = ((System.nanoTime - start) / 1000000).toInt + println(s"time elapsed: ${times(i)}ms") + if ctx.settings.Xprompt.value then + print("hit to continue >") + System.in.nn.read() + println() + reporter + + def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = { + val pos = args indexOf name + if (pos < 0) (default, args) + else (args(pos + 1).toInt, (args take pos) ++ (args drop (pos + 2))) + } + + def reportTimes() = + val best = times.sorted + val measured = numRuns / 3 + val avgBest = best.take(measured).sum / measured + val avgLast = times.reverse.take(measured).sum / measured + println(s"best out of $numRuns runs: ${best(0)}") + println(s"average out of best $measured: $avgBest") + println(s"average out of last $measured: $avgLast") + + override def process(args: Array[String], rootCtx: Context): Reporter = + val (numCompilers, args1) = extractNumArg(args, "#compilers") + val (numRuns, args2) = extractNumArg(args1, "#runs") + this.numRuns = numRuns + var reporter: Reporter = emptyReporter + for i <- 0 until numCompilers do + reporter = super.process(args2, rootCtx) + reportTimes() + reporter + +end Bench + + diff --git a/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala b/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala new file mode 100644 index 000000000000..44ca582c3c61 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala @@ -0,0 +1,166 @@ +package dotty.tools +package dotc + +import core._ +import Contexts._ +import SymDenotations.ClassDenotation +import Symbols._ +import util.{FreshNameCreator, SourceFile, NoSource} +import util.Spans.Span +import ast.{tpd, untpd} +import tpd.{Tree, TreeTraverser} +import ast.Trees.{Import, Ident} +import typer.Nullables +import transform.SymUtils._ +import core.Decorators._ +import config.{SourceVersion, Feature} +import StdNames.nme +import scala.annotation.internal.sharable + +class CompilationUnit protected (val source: SourceFile) { + + override def toString: String = source.toString + + var untpdTree: untpd.Tree = untpd.EmptyTree + + var tpdTree: tpd.Tree = tpd.EmptyTree + + /** Is this the compilation unit of a Java file */ + def isJava: Boolean = source.file.name.endsWith(".java") + + /** The source version for this unit, as determined by a language import */ + var sourceVersion: Option[SourceVersion] = None + + /** Pickled TASTY binaries, indexed by class. */ + var pickled: Map[ClassSymbol, () => Array[Byte]] = Map() + + /** The fresh name creator for the current unit. + * FIXME(#7661): This is not fine-grained enough to enable reproducible builds, + * see https://github.com/scala/scala/commit/f50ec3c866263448d803139e119b33afb04ec2bc + */ + val freshNames: FreshNameCreator = new FreshNameCreator.Default + + /** Will be set to `true` if there are inline call that must be inlined after typer. + * The information is used in phase `Inlining` in order to avoid traversing trees that need no transformations. + */ + var needsInlining: Boolean = false + + /** Set to `true` if inliner added anonymous mirrors that need to be completed */ + var needsMirrorSupport: Boolean = false + + /** Will be set to `true` if contains `Quote`. + * The information is used in phase `Staging`/`Splicing`/`PickleQuotes` in order to avoid traversing trees that need no transformations. + */ + var needsStaging: Boolean = false + + /** Will be set to true if the unit contains a captureChecking language import */ + var needsCaptureChecking: Boolean = false + + /** Will be set to true if the unit contains a pureFunctions language import */ + var knowsPureFuns: Boolean = false + + var suspended: Boolean = false + var suspendedAtInliningPhase: Boolean = false + + /** Can this compilation unit be suspended */ + def isSuspendable: Boolean = true + + /** Suspends the compilation unit by thowing a SuspendException + * and recording the suspended compilation unit + */ + def suspend()(using Context): Nothing = + assert(isSuspendable) + if !suspended then + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspended: $this") + suspended = true + ctx.run.nn.suspendedUnits += this + if ctx.phase == Phases.inliningPhase then + suspendedAtInliningPhase = true + throw CompilationUnit.SuspendException() + + private var myAssignmentSpans: Map[Int, List[Span]] | Null = null + + /** A map from (name-) offsets of all local variables in this compilation unit + * that can be tracked for being not null to the list of spans of assignments + * to these variables. + */ + def assignmentSpans(using Context): Map[Int, List[Span]] = + if myAssignmentSpans == null then myAssignmentSpans = Nullables.assignmentSpans + myAssignmentSpans.nn +} + +@sharable object NoCompilationUnit extends CompilationUnit(NoSource) { + + override def isJava: Boolean = false + + override def suspend()(using Context): Nothing = + throw CompilationUnit.SuspendException() + + override def assignmentSpans(using Context): Map[Int, List[Span]] = Map.empty +} + +object CompilationUnit { + + class SuspendException extends Exception + + /** Make a compilation unit for top class `clsd` with the contents of the `unpickled` tree */ + def apply(clsd: ClassDenotation, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = + val file = clsd.symbol.associatedFile.nn + apply(SourceFile(file, Array.empty[Char]), unpickled, forceTrees) + + /** Make a compilation unit, given picked bytes and unpickled tree */ + def apply(source: SourceFile, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = { + assert(!unpickled.isEmpty, unpickled) + val unit1 = new CompilationUnit(source) + unit1.tpdTree = unpickled + if (forceTrees) { + val force = new Force + force.traverse(unit1.tpdTree) + unit1.needsStaging = force.containsQuote + unit1.needsInlining = force.containsInline + } + unit1 + } + + /** Create a compilation unit corresponding to `source`. + * If `mustExist` is true, this will fail if `source` does not exist. + */ + def apply(source: SourceFile, mustExist: Boolean = true)(using Context): CompilationUnit = { + val src = + if (!mustExist) + source + else if (source.file.isDirectory) { + report.error(s"expected file, received directory '${source.file.path}'") + NoSource + } + else if (!source.file.exists) { + report.error(s"source file not found: ${source.file.path}") + NoSource + } + else source + new CompilationUnit(src) + } + + /** Force the tree to be loaded */ + private class Force extends TreeTraverser { + var containsQuote = false + var containsInline = false + var containsCaptureChecking = false + def traverse(tree: Tree)(using Context): Unit = { + if (tree.symbol.isQuote) + containsQuote = true + if tree.symbol.is(Flags.Inline) then + containsInline = true + tree match + case Import(qual, selectors) => + tpd.languageImport(qual) match + case Some(prefix) => + for case untpd.ImportSelector(untpd.Ident(imported), untpd.EmptyTree, _) <- selectors do + Feature.handleGlobalLanguageImport(prefix, imported) + case _ => + case _ => + traverseChildren(tree) + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Compiler.scala b/tests/pos-with-compiler-cc/dotc/Compiler.scala new file mode 100644 index 000000000000..b121a47781e1 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Compiler.scala @@ -0,0 +1,171 @@ +package dotty.tools +package dotc + +import core._ +import Contexts._ +import typer.{TyperPhase, RefChecks} +import cc.CheckCaptures +import parsing.Parser +import Phases.Phase +import transform._ +import dotty.tools.backend +import backend.jvm.{CollectSuperCalls, GenBCode} +import localopt.StringInterpolatorOpt + +/** The central class of the dotc compiler. The job of a compiler is to create + * runs, which process given `phases` in a given `rootContext`. + */ +class Compiler { + + /** Meta-ordering constraint: + * + * DenotTransformers that change the signature of their denotation's info must go + * after erasure. The reason is that denotations are permanently referred to by + * TermRefs which contain a signature. If the signature of a symbol would change, + * all refs to it would become outdated - they could not be dereferenced in the + * new phase. + * + * After erasure, signature changing denot-transformers are OK because signatures + * are never recomputed later than erasure. + */ + def phases: List[List[Phase]] = + frontendPhases ::: picklerPhases ::: transformPhases ::: backendPhases + + /** Phases dealing with the frontend up to trees ready for TASTY pickling */ + protected def frontendPhases: List[List[Phase]] = + List(new Parser) :: // Compiler frontend: scanner, parser + List(new TyperPhase) :: // Compiler frontend: namer, typer + List(new YCheckPositions) :: // YCheck positions + List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks + List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files + List(new PostTyper) :: // Additional checks and cleanups after type checking + List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) + List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks + List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols + Nil + + /** Phases dealing with TASTY tree pickling and unpickling */ + protected def picklerPhases: List[List[Phase]] = + List(new Pickler) :: // Generate TASTY info + List(new Inlining) :: // Inline and execute macros + List(new PostInlining) :: // Add mirror support for inlined code + List(new Staging) :: // Check staging levels and heal staged types + List(new Splicing) :: // Replace level 1 splices with holes + List(new PickleQuotes) :: // Turn quoted trees into explicit run-time data structures + Nil + + /** Phases dealing with the transformation from pickled trees to backend trees */ + protected def transformPhases: List[List[Phase]] = + List(new InstrumentCoverage) :: // Perform instrumentation for code coverage (if -coverage-out is set) + List(new FirstTransform, // Some transformations to put trees into a canonical form + new CheckReentrant, // Internal use only: Check that compiled program has no data races involving global vars + new ElimPackagePrefixes, // Eliminate references to package prefixes in Select nodes + new CookComments, // Cook the comments: expand variables, doc, etc. + new CheckStatic, // Check restrictions that apply to @static members + new CheckLoopingImplicits, // Check that implicit defs do not call themselves in an infinite loop + new BetaReduce, // Reduce closure applications + new InlineVals, // Check right hand-sides of an `inline val`s + new ExpandSAMs, // Expand single abstract method closures to anonymous classes + new ElimRepeated, // Rewrite vararg parameters and arguments + new RefChecks) :: // Various checks mostly related to abstract members and overriding + List(new init.Checker) :: // Check initialization of objects + List(new CrossVersionChecks, // Check issues related to deprecated and experimental + new ProtectedAccessors, // Add accessors for protected members + new ExtensionMethods, // Expand methods of value classes with extension methods + new UncacheGivenAliases, // Avoid caching RHS of simple parameterless given aliases + new ElimByName, // Map by-name parameters to functions + new HoistSuperArgs, // Hoist complex arguments of supercalls to enclosing scope + new ForwardDepChecks, // Check that there are no forward references to local vals + new SpecializeApplyMethods, // Adds specialized methods to FunctionN + new TryCatchPatterns, // Compile cases in try/catch + new PatternMatcher) :: // Compile pattern matches + List(new TestRecheck.Pre) :: // Test only: run rechecker, enabled under -Yrecheck-test + List(new TestRecheck) :: // Test only: run rechecker, enabled under -Yrecheck-test + List(new CheckCaptures.Pre) :: // Preparations for check captures phase, enabled under captureChecking + List(new CheckCaptures) :: // Check captures, enabled under captureChecking + List(new ElimOpaque, // Turn opaque into normal aliases + new sjs.ExplicitJSClasses, // Make all JS classes explicit (Scala.js only) + new ExplicitOuter, // Add accessors to outer classes from nested ones. + new ExplicitSelf, // Make references to non-trivial self types explicit as casts + new StringInterpolatorOpt) :: // Optimizes raw and s and f string interpolators by rewriting them to string concatenations or formats + List(new PruneErasedDefs, // Drop erased definitions from scopes and simplify erased expressions + new UninitializedDefs, // Replaces `compiletime.uninitialized` by `_` + new InlinePatterns, // Remove placeholders of inlined patterns + new VCInlineMethods, // Inlines calls to value class methods + new SeqLiterals, // Express vararg arguments as arrays + new InterceptedMethods, // Special handling of `==`, `|=`, `getClass` methods + new Getters, // Replace non-private vals and vars with getter defs (fields are added later) + new SpecializeFunctions, // Specialized Function{0,1,2} by replacing super with specialized super + new SpecializeTuples, // Specializes Tuples by replacing tuple construction and selection trees + new LiftTry, // Put try expressions that might execute on non-empty stacks into their own methods + new CollectNullableFields, // Collect fields that can be nulled out after use in lazy initialization + new ElimOuterSelect, // Expand outer selections + new ResolveSuper, // Implement super accessors + new FunctionXXLForwarders, // Add forwarders for FunctionXXL apply method + new ParamForwarding, // Add forwarders for aliases of superclass parameters + new TupleOptimizations, // Optimize generic operations on tuples + new LetOverApply, // Lift blocks from receivers of applications + new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify. + List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements. + List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types + new PureStats, // Remove pure stats from blocks + new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations + new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference + new ArrayApply, // Optimize `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` + new sjs.AddLocalJSFakeNews, // Adds fake new invocations to local JS classes in calls to `createLocalJSClass` + new ElimPolyFunction, // Rewrite PolyFunction subclasses to FunctionN subclasses + new TailRec, // Rewrite tail recursion to loops + new CompleteJavaEnums, // Fill in constructors for Java enums + new Mixin, // Expand trait fields and trait initializers + new LazyVals, // Expand lazy vals + new Memoize, // Add private fields to getters and setters + new NonLocalReturns, // Expand non-local returns + new CapturedVars) :: // Represent vars captured by closures as heap objects + List(new Constructors, // Collect initialization code in primary constructors + // Note: constructors changes decls in transformTemplate, no InfoTransformers should be added after it + new Instrumentation) :: // Count calls and allocations under -Yinstrument + List(new LambdaLift, // Lifts out nested functions to class scope, storing free variables in environments + // Note: in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here + new ElimStaticThis, // Replace `this` references to static objects by global identifiers + new CountOuterAccesses) :: // Identify outer accessors that can be dropped + List(new DropOuterAccessors, // Drop unused outer accessors + new CheckNoSuperThis, // Check that supercalls don't contain references to `this` + new Flatten, // Lift all inner classes to package scope + new TransformWildcards, // Replace wildcards with default values + new MoveStatics, // Move static methods from companion to the class itself + new ExpandPrivate, // Widen private definitions accessed from nested classes + new RestoreScopes, // Repair scopes rendered invalid by moving definitions in prior phases of the group + new SelectStatic, // get rid of selects that would be compiled into GetStatic + new sjs.JUnitBootstrappers, // Generate JUnit-specific bootstrapper classes for Scala.js (not enabled by default) + new CollectEntryPoints, // Collect all entry points and save them in the context + new CollectSuperCalls, // Find classes that are called with super + new RepeatableAnnotations) :: // Aggregate repeatable annotations + Nil + + /** Generate the output of the compilation */ + protected def backendPhases: List[List[Phase]] = + List(new backend.sjs.GenSJSIR) :: // Generate .sjsir files for Scala.js (not enabled by default) + List(new GenBCode) :: // Generate JVM bytecode + Nil + + var runId: Int = 1 + def nextRunId: Int = { + runId += 1; runId + } + + def reset()(using Context): Unit = { + ctx.base.reset() + val run = ctx.run + if (run != null) run.reset() + } + + def newRun(using Context): Run = { + reset() + val rctx = + if ctx.settings.Xsemanticdb.value then + ctx.addMode(Mode.ReadPositions) + else + ctx + new Run(this, rctx) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Driver.scala b/tests/pos-with-compiler-cc/dotc/Driver.scala new file mode 100644 index 000000000000..14a71463c66d --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Driver.scala @@ -0,0 +1,207 @@ +package dotty.tools.dotc + +import dotty.tools.FatalError +import config.CompilerCommand +import core.Comments.{ContextDoc, ContextDocstrings} +import core.Contexts._ +import core.{MacroClassLoader, TypeError} +import dotty.tools.dotc.ast.Positioned +import dotty.tools.io.AbstractFile +import reporting._ +import core.Decorators._ +import config.Feature + +import scala.util.control.NonFatal +import fromtasty.{TASTYCompiler, TastyFileUtil} + +/** Run the Dotty compiler. + * + * Extending this class lets you customize many aspect of the compilation + * process, but in most cases you only need to call [[process]] on the + * existing object [[Main]]. + */ +class Driver { + + protected def newCompiler(using Context): Compiler = + if (ctx.settings.fromTasty.value) new TASTYCompiler + else new Compiler + + protected def emptyReporter: Reporter = new StoreReporter(null) + + protected def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = + if files.nonEmpty then + try + val run = compiler.newRun + run.compile(files) + finish(compiler, run) + catch + case ex: FatalError => + report.error(ex.getMessage.nn) // signals that we should fail compilation. + case ex: TypeError => + println(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}") + throw ex + case ex: Throwable => + println(s"$ex while compiling ${files.map(_.path).mkString(", ")}") + throw ex + ctx.reporter + + protected def finish(compiler: Compiler, run: Run)(using Context): Unit = + run.printSummary() + if !ctx.reporter.errorsReported && run.suspendedUnits.nonEmpty then + val suspendedUnits = run.suspendedUnits.toList + if (ctx.settings.XprintSuspension.value) + report.echo(i"compiling suspended $suspendedUnits%, %") + val run1 = compiler.newRun + for unit <- suspendedUnits do unit.suspended = false + run1.compileUnits(suspendedUnits) + finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh)) + + protected def initCtx: Context = (new ContextBase).initialCtx + + protected def sourcesRequired: Boolean = true + + protected def command: CompilerCommand = ScalacCommand + + /** Setup context with initialized settings from CLI arguments, then check if there are any settings that + * would change the default behaviour of the compiler. + * + * @return If there is no setting like `-help` preventing us from continuing compilation, + * this method returns a list of files to compile and an updated Context. + * If compilation should be interrupted, this method returns None. + */ + def setup(args: Array[String], rootCtx: Context): Option[(List[AbstractFile], Context)] = { + val ictx = rootCtx.fresh + val summary = command.distill(args, ictx.settings)(ictx.settingsState)(using ictx) + ictx.setSettings(summary.sstate) + Feature.checkExperimentalSettings(using ictx) + MacroClassLoader.init(ictx) + Positioned.init(using ictx) + + inContext(ictx) { + if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then + ictx.setProperty(ContextDoc, new ContextDocstrings) + val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) + fileNamesOrNone.map { fileNames => + val files = fileNames.map(ctx.getFile) + (files, fromTastySetup(files)) + } + } + } + + /** Setup extra classpath of tasty and jar files */ + protected def fromTastySetup(files: List[AbstractFile])(using Context): Context = + if ctx.settings.fromTasty.value then + val newEntries: List[String] = files + .flatMap { file => + if !file.exists then + report.error(s"File does not exist: ${file.path}") + None + else file.extension match + case "jar" => Some(file.path) + case "tasty" => + TastyFileUtil.getClassPath(file) match + case Some(classpath) => Some(classpath) + case _ => + report.error(s"Could not load classname from: ${file.path}") + None + case _ => + report.error(s"File extension is not `tasty` or `jar`: ${file.path}") + None + } + .distinct + val ctx1 = ctx.fresh + val fullClassPath = + (newEntries :+ ctx.settings.classpath.value).mkString(java.io.File.pathSeparator.nn) + ctx1.setSetting(ctx1.settings.classpath, fullClassPath) + else ctx + + /** Entry point to the compiler that can be conveniently used with Java reflection. + * + * This entry point can easily be used without depending on the `dotty` package, + * you only need to depend on `dotty-interfaces` and call this method using + * reflection. This allows you to write code that will work against multiple + * versions of dotty without recompilation. + * + * The trade-off is that you can only pass a SimpleReporter to this method + * and not a normal Reporter which is more powerful. + * + * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala]] + * + * @param args Arguments to pass to the compiler. + * @param simple Used to log errors, warnings, and info messages. + * The default reporter is used if this is `null`. + * @param callback Used to execute custom code during the compilation + * process. No callbacks will be executed if this is `null`. + * @return + */ + final def process(args: Array[String], simple: interfaces.SimpleReporter | Null, + callback: interfaces.CompilerCallback | Null): interfaces.ReporterResult = { + val reporter = if (simple == null) null else Reporter.fromSimpleReporter(simple) + process(args, reporter, callback) + } + + /** Principal entry point to the compiler. + * + * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/EntryPointsTest.scala.disabled]] + * in method `runCompiler` + * + * @param args Arguments to pass to the compiler. + * @param reporter Used to log errors, warnings, and info messages. + * The default reporter is used if this is `null`. + * @param callback Used to execute custom code during the compilation + * process. No callbacks will be executed if this is `null`. + * @return The `Reporter` used. Use `Reporter#hasErrors` to check + * if compilation succeeded. + */ + final def process(args: Array[String], reporter: Reporter | Null = null, + callback: interfaces.CompilerCallback | Null = null): Reporter = { + val compileCtx = initCtx.fresh + if (reporter != null) + compileCtx.setReporter(reporter) + if (callback != null) + compileCtx.setCompilerCallback(callback) + process(args, compileCtx) + } + + /** Entry point to the compiler with no optional arguments. + * + * This overload is provided for compatibility reasons: the + * `RawCompiler` of sbt expects this method to exist and calls + * it using reflection. Keeping it means that we can change + * the other overloads without worrying about breaking compatibility + * with sbt. + */ + final def process(args: Array[String]): Reporter = + process(args, null: Reporter | Null, null: interfaces.CompilerCallback | Null) + + /** Entry point to the compiler using a custom `Context`. + * + * In most cases, you do not need a custom `Context` and should + * instead use one of the other overloads of `process`. However, + * the other overloads cannot be overridden, instead you + * should override this one which they call internally. + * + * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/EntryPointsTest.scala.disabled]] + * in method `runCompilerWithContext` + * + * @param args Arguments to pass to the compiler. + * @param rootCtx The root Context to use. + * @return The `Reporter` used. Use `Reporter#hasErrors` to check + * if compilation succeeded. + */ + def process(args: Array[String], rootCtx: Context): Reporter = { + setup(args, rootCtx) match + case Some((files, compileCtx)) => + doCompile(newCompiler(using compileCtx), files)(using compileCtx) + case None => + rootCtx.reporter + } + + def main(args: Array[String]): Unit = { + // Preload scala.util.control.NonFatal. Otherwise, when trying to catch a StackOverflowError, + // we may try to load it but fail with another StackOverflowError and lose the original exception, + // see . + val _ = NonFatal + sys.exit(if (process(args).hasErrors) 1 else 0) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Main.scala b/tests/pos-with-compiler-cc/dotc/Main.scala new file mode 100644 index 000000000000..3288fded52a2 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Main.scala @@ -0,0 +1,5 @@ +package dotty.tools +package dotc + +/** Main class of the `dotc` batch compiler. */ +object Main extends Driver diff --git a/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala b/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala new file mode 100644 index 000000000000..ae20d81226c9 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala @@ -0,0 +1,9 @@ +package dotty.tools.dotc + +import dotty.tools.FatalError + +class MissingCoreLibraryException(rootPackage: String) extends FatalError( + s"""Could not find package $rootPackage from compiler core libraries. + |Make sure the compiler core libraries are on the classpath. + """.stripMargin +) diff --git a/tests/pos-with-compiler-cc/dotc/Resident.scala b/tests/pos-with-compiler-cc/dotc/Resident.scala new file mode 100644 index 000000000000..0b9bca0dc75b --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Resident.scala @@ -0,0 +1,61 @@ +package dotty.tools +package dotc + +import core.Contexts._ +import reporting.Reporter +import java.io.EOFException +import scala.annotation.tailrec + +/** A compiler which stays resident between runs. This is more of a PoC than + * something that's expected to be used often + * + * Usage: + * + * > scala dotty.tools.dotc.Resident + * + * dotc> "more options and files to compile" + * + * ... + * + * dotc> :reset // reset all options to the ones passed on the command line + * + * ... + * + * dotc> :q // quit + */ +class Resident extends Driver { + + object residentCompiler extends Compiler + + override def sourcesRequired: Boolean = false + + private val quit = ":q" + private val reset = ":reset" + private val prompt = "dotc> " + + private def getLine() = { + Console.print(prompt) + try scala.io.StdIn.readLine() catch { case _: EOFException => quit } + } + + final override def process(args: Array[String], rootCtx: Context): Reporter = { + @tailrec def loop(args: Array[String], prevCtx: Context): Reporter = { + setup(args, prevCtx) match + case Some((files, ctx)) => + inContext(ctx) { + doCompile(residentCompiler, files) + } + var nextCtx = ctx + var line = getLine() + while (line == reset) { + nextCtx = rootCtx + line = getLine() + } + if line.startsWith(quit) then ctx.reporter + else loop((line split "\\s+").asInstanceOf[Array[String]], nextCtx) + case None => + prevCtx.reporter + } + loop(args, rootCtx) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Run.scala b/tests/pos-with-compiler-cc/dotc/Run.scala new file mode 100644 index 000000000000..705664177507 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Run.scala @@ -0,0 +1,398 @@ +package dotty.tools +package dotc + +import core._ +import Contexts._ +import Periods._ +import Symbols._ +import Scopes._ +import Names.Name +import Denotations.Denotation +import typer.Typer +import typer.ImportInfo.withRootImports +import Decorators._ +import io.AbstractFile +import Phases.unfusedPhases + +import util._ +import reporting.{Suppression, Action, Profile, ActiveProfile, NoProfile} +import reporting.Diagnostic +import reporting.Diagnostic.Warning +import rewrites.Rewrites +import profile.Profiler +import printing.XprintMode +import typer.ImplicitRunInfo +import config.Feature +import StdNames.nme + +import java.io.{BufferedWriter, OutputStreamWriter} +import java.nio.charset.StandardCharsets + +import scala.collection.mutable +import scala.util.control.NonFatal +import scala.io.Codec +import caps.unsafe.unsafeUnbox + +/** A compiler run. Exports various methods to compile source files */ +class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo { + + /** Default timeout to stop looking for further implicit suggestions, in ms. + * This is usually for the first import suggestion; subsequent suggestions + * may get smaller timeouts. @see ImportSuggestions.reduceTimeBudget + */ + private var myImportSuggestionBudget: Int = + Int.MinValue // sentinel value; means whatever is set in command line option + + def importSuggestionBudget = + if myImportSuggestionBudget == Int.MinValue then ictx.settings.XimportSuggestionTimeout.value + else myImportSuggestionBudget + + def importSuggestionBudget_=(x: Int) = + myImportSuggestionBudget = x + + /** If this variable is set to `true`, some core typer operations will + * return immediately. Currently these early abort operations are + * `Typer.typed` and `Implicits.typedImplicit`. + */ + @volatile var isCancelled = false + + private var compiling = false + + private var myUnits: List[CompilationUnit] = Nil + private var myUnitsCached: List[CompilationUnit] = Nil + private var myFiles: Set[AbstractFile] = _ + + // `@nowarn` annotations by source file, populated during typer + private val mySuppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty + // source files whose `@nowarn` annotations are processed + private val mySuppressionsComplete: mutable.Set[SourceFile] = mutable.Set.empty + // warnings issued before a source file's `@nowarn` annotations are processed, suspended so that `@nowarn` can filter them + private val mySuspendedMessages: mutable.LinkedHashMap[SourceFile, mutable.LinkedHashSet[Warning]] = mutable.LinkedHashMap.empty + + object suppressions: + // When the REPL creates a new run (ReplDriver.compile), parsing is already done in the old context, with the + // previous Run. Parser warnings were suspended in the old run and need to be copied over so they are not lost. + // Same as scala/scala/commit/79ca1408c7. + def initSuspendedMessages(oldRun: Run | Null) = if oldRun != null then + mySuspendedMessages.clear() + mySuspendedMessages ++= oldRun.mySuspendedMessages + + def suppressionsComplete(source: SourceFile) = source == NoSource || mySuppressionsComplete(source) + + def addSuspendedMessage(warning: Warning) = + mySuspendedMessages.getOrElseUpdate(warning.pos.source, mutable.LinkedHashSet.empty) += warning + + def nowarnAction(dia: Diagnostic): Action.Warning.type | Action.Verbose.type | Action.Silent.type = + mySuppressions.getOrElse(dia.pos.source, Nil).find(_.matches(dia)) match { + case Some(s) => + s.markUsed() + if (s.verbose) Action.Verbose + else Action.Silent + case _ => + Action.Warning + } + + def addSuppression(sup: Suppression): Unit = + val source = sup.annotPos.source + mySuppressions.getOrElseUpdate(source, mutable.ListBuffer.empty) += sup + + def reportSuspendedMessages(source: SourceFile)(using Context): Unit = { + // sort suppressions. they are not added in any particular order because of lazy type completion + for (sups <- mySuppressions.get(source)) + mySuppressions(source) = sups.sortBy(sup => 0 - sup.start) + mySuppressionsComplete += source + mySuspendedMessages.remove(source).foreach(_.foreach(ctx.reporter.issueIfNotSuppressed)) + } + + def runFinished(hasErrors: Boolean): Unit = + // report suspended messages (in case the run finished before typer) + mySuspendedMessages.keysIterator.toList.foreach(reportSuspendedMessages) + // report unused nowarns only if all all phases are done + if !hasErrors && ctx.settings.WunusedHas.nowarn then + for { + source <- mySuppressions.keysIterator.toList + sups <- mySuppressions.remove(source) + sup <- sups.reverse + } if (!sup.used) + report.warning("@nowarn annotation does not suppress any warnings", sup.annotPos) + + /** The compilation units currently being compiled, this may return different + * results over time. + */ + def units: List[CompilationUnit] = myUnits + + private def units_=(us: List[CompilationUnit]): Unit = + myUnits = us + + var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer() + + def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit = + if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then + val where = + if suspendedUnits.size == 1 then i"in ${suspendedUnits.head}." + else i"""among + | + | ${suspendedUnits.toList}%, % + |""" + val enableXprintSuspensionHint = + if ctx.settings.XprintSuspension.value then "" + else "\n\nCompiling with -Xprint-suspension gives more information." + report.error(em"""Cyclic macro dependencies $where + |Compilation stopped since no further progress can be made. + | + |To fix this, place macros in one set of files and their callers in another.$enableXprintSuspensionHint""") + + /** The files currently being compiled (active or suspended). + * This may return different results over time. + * These files do not have to be source files since it's possible to compile + * from TASTY. + */ + def files: Set[AbstractFile] = { + if (myUnits ne myUnitsCached) { + myUnitsCached = myUnits + myFiles = (myUnits ++ suspendedUnits).map(_.source.file).toSet + } + myFiles + } + + /** The source files of all late entered symbols, as a set */ + private var lateFiles = mutable.Set[AbstractFile]() + + /** A cache for static references to packages and classes */ + val staticRefs = util.EqHashMap[Name, Denotation](initialCapacity = 1024) + + /** Actions that need to be performed at the end of the current compilation run */ + private var finalizeActions = mutable.ListBuffer[() => Unit]() + + /** Will be set to true if any of the compiled compilation units contains + * a pureFunctions language import. + */ + var pureFunsImportEncountered = false + + /** Will be set to true if any of the compiled compilation units contains + * a captureChecking language import. + */ + var ccImportEncountered = false + + def compile(files: List[AbstractFile]): Unit = + try + val sources = files.map(runContext.getSource(_)) + compileSources(sources) + catch + case NonFatal(ex) => + if units.nonEmpty then report.echo(i"exception occurred while compiling $units%, %") + else report.echo(s"exception occurred while compiling ${files.map(_.name).mkString(", ")}") + throw ex + + /** TODO: There's a fundamental design problem here: We assemble phases using `fusePhases` + * when we first build the compiler. But we modify them with -Yskip, -Ystop + * on each run. That modification needs to either transform the tree structure, + * or we need to assemble phases on each run, and take -Yskip, -Ystop into + * account. I think the latter would be preferable. + */ + def compileSources(sources: List[SourceFile]): Unit = + if (sources forall (_.exists)) { + units = sources.map(CompilationUnit(_)) + compileUnits() + } + + + def compileUnits(us: List[CompilationUnit]): Unit = { + units = us + compileUnits() + } + + def compileUnits(us: List[CompilationUnit], ctx: Context): Unit = { + units = us + compileUnits()(using ctx) + } + + var profile: Profile = NoProfile + + private def compileUnits()(using Context) = Stats.maybeMonitored { + if (!ctx.mode.is(Mode.Interactive)) // IDEs might have multi-threaded access, accesses are synchronized + ctx.base.checkSingleThreaded() + + compiling = true + + profile = + if ctx.settings.Vprofile.value + || !ctx.settings.VprofileSortedBy.value.isEmpty + || ctx.settings.VprofileDetails.value != 0 + then ActiveProfile(ctx.settings.VprofileDetails.value.max(0).min(1000)) + else NoProfile + + // If testing pickler, make sure to stop after pickling phase: + val stopAfter = + if (ctx.settings.YtestPickler.value) List("pickler") + else ctx.settings.YstopAfter.value + + val pluginPlan = ctx.base.addPluginPhases(ctx.base.phasePlan) + val phases = ctx.base.fusePhases(pluginPlan, + ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, stopAfter, ctx.settings.Ycheck.value) + ctx.base.usePhases(phases) + + def runPhases(using Context) = { + var lastPrintedTree: PrintedTree = NoPrintedTree + val profiler = ctx.profiler + var phasesWereAdjusted = false + + for (phase <- ctx.base.allPhases) + if (phase.isRunnable) + Stats.trackTime(s"$phase ms ") { + val start = System.currentTimeMillis + val profileBefore = profiler.beforePhase(phase) + units = phase.runOn(units) + profiler.afterPhase(phase, profileBefore) + if (ctx.settings.Xprint.value.containsPhase(phase)) + for (unit <- units) + lastPrintedTree = + printTree(lastPrintedTree)(using ctx.fresh.setPhase(phase.next).setCompilationUnit(unit)) + report.informTime(s"$phase ", start) + Stats.record(s"total trees at end of $phase", ast.Trees.ntrees) + for (unit <- units) + Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) + ctx.typerState.gc() + } + if !phasesWereAdjusted then + phasesWereAdjusted = true + if !Feature.ccEnabledSomewhere then + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase.prev) + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase) + + profiler.finished() + } + + val runCtx = ctx.fresh + runCtx.setProfiler(Profiler()) + unfusedPhases.foreach(_.initContext(runCtx)) + runPhases(using runCtx) + if (!ctx.reporter.hasErrors) + Rewrites.writeBack() + suppressions.runFinished(hasErrors = ctx.reporter.hasErrors) + while (finalizeActions.nonEmpty) { + val action = finalizeActions.remove(0).unsafeUnbox + action() + } + compiling = false + } + + /** Enter top-level definitions of classes and objects contained in source file `file`. + * The newly added symbols replace any previously entered symbols. + * If `typeCheck = true`, also run typer on the compilation unit, and set + * `rootTreeOrProvider`. + */ + def lateCompile(file: AbstractFile, typeCheck: Boolean)(using Context): Unit = + if (!files.contains(file) && !lateFiles.contains(file)) { + lateFiles += file + + val unit = CompilationUnit(ctx.getSource(file)) + val unitCtx = runContext.fresh + .setCompilationUnit(unit) + .withRootImports + + def process()(using Context) = + ctx.typer.lateEnterUnit(doTypeCheck => + if typeCheck then + if compiling then finalizeActions += doTypeCheck + else doTypeCheck() + ) + + process()(using unitCtx) + } + + private sealed trait PrintedTree + private /*final*/ case class SomePrintedTree(phase: String, tree: String) extends PrintedTree + private object NoPrintedTree extends PrintedTree + + private def printTree(last: PrintedTree)(using Context): PrintedTree = { + val unit = ctx.compilationUnit + val fusedPhase = ctx.phase.prevMega + val echoHeader = f"[[syntax trees at end of $fusedPhase%25s]] // ${unit.source}" + val tree = if ctx.isAfterTyper then unit.tpdTree else unit.untpdTree + val treeString = fusedPhase.show(tree) + + last match { + case SomePrintedTree(phase, lastTreeString) if lastTreeString == treeString => + report.echo(s"$echoHeader: unchanged since $phase") + last + + case SomePrintedTree(phase, lastTreeString) if ctx.settings.XprintDiff.value || ctx.settings.XprintDiffDel.value => + val diff = DiffUtil.mkColoredCodeDiff(treeString, lastTreeString, ctx.settings.XprintDiffDel.value) + report.echo(s"$echoHeader\n$diff\n") + SomePrintedTree(fusedPhase.phaseName, treeString) + + case _ => + report.echo(s"$echoHeader\n$treeString\n") + SomePrintedTree(fusedPhase.phaseName, treeString) + } + } + + def compileFromStrings(scalaSources: List[String], javaSources: List[String] = Nil): Unit = { + def sourceFile(source: String, isJava: Boolean): SourceFile = { + val uuid = java.util.UUID.randomUUID().toString + val ext = if (isJava) "java" else "scala" + val name = s"compileFromString-$uuid.$ext" + SourceFile.virtual(name, source) + } + val sources = + scalaSources.map(sourceFile(_, isJava = false)) ++ + javaSources.map(sourceFile(_, isJava = true)) + + compileSources(sources) + } + + /** Print summary of warnings and errors encountered */ + def printSummary(): Unit = { + printMaxConstraint() + val r = runContext.reporter + if !r.errorsReported then + profile.printSummary() + r.summarizeUnreportedWarnings() + r.printSummary() + } + + override def reset(): Unit = { + super[ImplicitRunInfo].reset() + super[ConstraintRunInfo].reset() + myCtx = null + myUnits = Nil + myUnitsCached = Nil + } + + /** Produces the following contexts, from outermost to innermost + * + * bootStrap: A context with next available runId and a scope consisting of + * the RootPackage _root_ + * start A context with RootClass as owner and the necessary initializations + * for type checking. + * imports For each element of RootImports, an import context + */ + protected def rootContext(using Context): Context = { + ctx.initialize() + ctx.base.setPhasePlan(comp.phases) + val rootScope = new MutableScope(0) + val bootstrap = ctx.fresh + .setPeriod(Period(comp.nextRunId, FirstPhaseId)) + .setScope(rootScope) + rootScope.enter(ctx.definitions.RootPackage)(using bootstrap) + var start = bootstrap.fresh + .setOwner(defn.RootClass) + .setTyper(new Typer) + .addMode(Mode.ImplicitsEnabled) + .setTyperState(ctx.typerState.fresh(ctx.reporter)) + if ctx.settings.YexplicitNulls.value && !Feature.enabledBySetting(nme.unsafeNulls) then + start = start.addMode(Mode.SafeNulls) + ctx.initialize()(using start) // re-initialize the base context with start + + // `this` must be unchecked for safe initialization because by being passed to setRun during + // initialization, it is not yet considered fully initialized by the initialization checker + start.setRun(this: @unchecked) + } + + private var myCtx: Context | Null = rootContext(using ictx) + + /** The context created for this run */ + given runContext[Dummy_so_its_a_def]: Context = myCtx.nn + assert(runContext.runId <= Periods.MaxPossibleRunId) +} diff --git a/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala b/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala new file mode 100644 index 000000000000..2e0d9a08f25d --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala @@ -0,0 +1,9 @@ +package dotty.tools.dotc + +import config.Properties._ +import config.CompilerCommand + +object ScalacCommand extends CompilerCommand: + override def cmdName: String = "scalac" + override def versionMsg: String = s"Scala compiler $versionString -- $copyrightString" + override def ifErrorsMsg: String = " scalac -help gives more information" diff --git a/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled b/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled new file mode 100644 index 000000000000..6bf7530faf24 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled @@ -0,0 +1,258 @@ +package dotty.tools +package dotc +package ast + +import core._ +import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ +import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._ + +// TODO: revise, integrate in a checking phase. +object CheckTrees { + + import tpd._ + + def check(p: Boolean, msg: => String = "")(using Context): Unit = assert(p, msg) + + def checkTypeArg(arg: Tree, bounds: TypeBounds)(using Context): Unit = { + check(arg.isValueType) + check(bounds contains arg.tpe) + } + + def escapingRefs(block: Block)(using Context): collection.Set[NamedType] = { + var hoisted: Set[Symbol] = Set() + lazy val locals = ctx.typeAssigner.localSyms(block.stats).toSet + def isLocal(sym: Symbol): Boolean = + (locals contains sym) && !isHoistableClass(sym) + def isHoistableClass(sym: Symbol) = + sym.isClass && { + (hoisted contains sym) || { + hoisted += sym + !classLeaks(sym.asClass) + } + } + def leakingTypes(tp: Type): collection.Set[NamedType] = + tp namedPartsWith (tp => isLocal(tp.symbol)) + def typeLeaks(tp: Type): Boolean = leakingTypes(tp).nonEmpty + def classLeaks(sym: ClassSymbol): Boolean = + (ctx.owner is Method) || // can't hoist classes out of method bodies + (sym.info.parents exists typeLeaks) || + (sym.decls.toList exists (t => typeLeaks(t.info))) + leakingTypes(block.tpe) + } + + def checkType(tree: Tree)(using Context): Unit = tree match { + case Ident(name) => + case Select(qualifier, name) => + check(qualifier.isValue) + check(qualifier.tpe =:= tree.tpe.normalizedPrefix) + val denot = qualifier.tpe.member(name) + check(denot.exists) + check(denot.hasAltWith(_.symbol == tree.symbol)) + case This(cls) => + case Super(qual, mixin) => + check(qual.isValue) + val cls = qual.tpe.typeSymbol + check(cls.isClass) + case Apply(fn, args) => + def checkArg(arg: Tree, name: Name, formal: Type): Unit = { + arg match { + case NamedArg(argName, _) => + check(argName == name) + case _ => + check(arg.isValue) + } + check(arg.tpe <:< formal) + } + val MethodType(paramNames, paramTypes) = fn.tpe.widen // checked already at construction + args.lazyZip(paramNames).lazyZip(paramTypes) foreach checkArg + case TypeApply(fn, args) => + val pt @ PolyType(_) = fn.tpe.widen // checked already at construction + args.lazyZip(pt.instantiateBounds(args map (_.tpe))) foreach checkTypeArg + case Literal(const: Constant) => + case New(tpt) => + check(tpt.isValueType) + val cls = tpt.tpe.typeSymbol + check(cls.isClass) + check(!(cls is AbstractOrTrait)) + case Pair(left, right) => + check(left.isValue) + check(right.isValue) + case Typed(expr, tpt) => + check(tpt.isValueType) + expr.tpe.widen match { + case tp: MethodType => + val cls = tpt.tpe.typeSymbol + check(cls.isClass) + check((cls is Trait) || + cls.primaryConstructor.info.paramTypess.flatten.isEmpty) + val absMembers = tpt.tpe.abstractTermMembers + check(absMembers.size == 1) + check(tp <:< absMembers.head.info) + case _ => + check(expr.isValueOrPattern) + check(expr.tpe <:< tpt.tpe.translateParameterized(defn.RepeatedParamClass, defn.SeqClass)) + } + case NamedArg(name, arg) => + case Assign(lhs, rhs) => + check(lhs.isValue); check(rhs.isValue) + lhs.tpe match { + case ltpe: TermRef => + check(ltpe.symbol is Mutable) + case _ => + check(false) + } + check(rhs.tpe <:< lhs.tpe.widen) + case tree @ Block(stats, expr) => + check(expr.isValue) + check(escapingRefs(tree).isEmpty) + case If(cond, thenp, elsep) => + check(cond.isValue); check(thenp.isValue); check(elsep.isValue) + check(cond.tpe isRef defn.BooleanClass) + case Closure(env, meth, target) => + meth.tpe.widen match { + case mt @ MethodType(_, paramTypes) => + if (target.isEmpty) { + check(env.length < paramTypes.length) + for ((arg, formal) <- env zip paramTypes) + check(arg.tpe <:< formal) + } + else + // env is stored in class, not method + target.tpe match { + case SAMType(targetMeth) => + check(mt <:< targetMeth.info) + } + } + case Match(selector, cases) => + check(selector.isValue) + // are any checks that relate selector and patterns desirable? + case CaseDef(pat, guard, body) => + check(pat.isValueOrPattern); check(guard.isValue); check(body.isValue) + check(guard.tpe.derivesFrom(defn.BooleanClass)) + case Return(expr, from) => + check(expr.isValue); check(from.isTerm) + check(from.tpe.termSymbol.isRealMethod) + case Try(block, handler, finalizer) => + check(block.isTerm) + check(finalizer.isTerm) + check(handler.isTerm) + check(handler.tpe derivesFrom defn.FunctionClass(1)) + check(handler.tpe.baseArgInfos(defn.FunctionClass(1)).head <:< defn.ThrowableType) + case Throw(expr) => + check(expr.isValue) + check(expr.tpe.derivesFrom(defn.ThrowableClass)) + case SeqLiteral(elems) => + val elemtp = tree.tpe.elemType + for (elem <- elems) { + check(elem.isValue) + check(elem.tpe <:< elemtp) + } + case TypeTree(original) => + if (!original.isEmpty) { + check(original.isValueType) + check(original.tpe == tree.tpe) + } + case SingletonTypeTree(ref) => + check(ref.isValue) + check(ref.symbol.isStable) + case SelectFromTypeTree(qualifier, name) => + check(qualifier.isValueType) + check(qualifier.tpe =:= tree.tpe.normalizedPrefix) + val denot = qualifier.tpe.member(name) + check(denot.exists) + check(denot.symbol == tree.symbol) + case AndTypeTree(left, right) => + check(left.isValueType); check(right.isValueType) + case OrTypeTree(left, right) => + check(left.isValueType); check(right.isValueType) + case RefinedTypeTree(tpt, refinements) => + check(tpt.isValueType) + def checkRefinements(forbidden: Set[Symbol], rs: List[Tree]): Unit = rs match { + case r :: rs1 => + val rsym = r.symbol + check(rsym.isTerm || rsym.isAbstractOrAliasType) + if (rsym.isAbstractType) check(tpt.tpe.member(rsym.name).exists) + check(rsym.info forallParts { + case nt: NamedType => !(forbidden contains nt.symbol) + case _ => true + }) + checkRefinements(forbidden - rsym, rs1) + case nil => + } + checkRefinements(ctx.typeAssigner.localSyms(refinements).toSet, refinements) + case AppliedTypeTree(tpt, args) => + check(tpt.isValueType) + val tparams = tpt.tpe.typeParams + check(sameLength(tparams, args)) + args.lazyZip(tparams map (_.info.bounds)) foreach checkTypeArg + case TypeBoundsTree(lo, hi) => + check(lo.isValueType); check(hi.isValueType) + check(lo.tpe <:< hi.tpe) + case Bind(sym, body) => + check(body.isValueOrPattern) + check(!(tree.symbol is Method)) + body match { + case Ident(nme.WILDCARD) => + case _ => check(body.tpe.widen =:= tree.symbol.info) + } + case Alternative(alts) => + for (alt <- alts) check(alt.isValueOrPattern) + case UnApply(fun, implicits, args) => // todo: review + check(fun.isTerm) + for (arg <- args) check(arg.isValueOrPattern) + val funtpe @ MethodType(_, _) = fun.tpe.widen + fun.symbol.name match { // check arg arity + case nme.unapplySeq => + // args need to be wrapped in (...: _*) + check(args.length == 1) + check(args.head.isInstanceOf[SeqLiteral]) + case nme.unapply => + val rtp = funtpe.resultType + if (rtp isRef defn.BooleanClass) + check(args.isEmpty) + else { + check(rtp isRef defn.OptionClass) + val normArgs = rtp.argTypesHi match { + case optionArg :: Nil => + optionArg.argTypesHi match { + case Nil => + optionArg :: Nil + case tupleArgs if defn.isTupleNType(optionArg) => + tupleArgs + } + case _ => + check(false) + Nil + } + check(sameLength(normArgs, args)) + } + } + case ValDef(mods, name, tpt, rhs) => + check(!(tree.symbol is Method)) + if (!rhs.isEmpty) { + check(rhs.isValue) + check(rhs.tpe <:< tpt.tpe) + } + case DefDef(mods, name, tparams, vparamss, tpt, rhs) => + check(tree.symbol is Method) + if (!rhs.isEmpty) { + check(rhs.isValue) + check(rhs.tpe <:< tpt.tpe) + } + case TypeDef(mods, name, tpt) => + check(tpt.isInstanceOf[Template] || tpt.tpe.isInstanceOf[TypeBounds]) + case Template(constr, parents, selfType, body) => + case Import(expr, selectors) => + check(expr.isValue) + check(expr.tpe.termSymbol.isStable) + case PackageDef(pid, stats) => + check(pid.isTerm) + check(pid.symbol is Package) + case Annotated(annot, arg) => + check(annot.isInstantiation) + check(annot.symbol.owner.isSubClass(defn.AnnotationClass)) + check(arg.isValueType || arg.isValue) + case EmptyTree => + } +} + diff --git a/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala new file mode 100644 index 000000000000..ba2c8f5f43e6 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala @@ -0,0 +1,1968 @@ +package dotty.tools +package dotc +package ast + +import core._ +import util.Spans._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._ +import Symbols._, StdNames._, Trees._, ContextOps._ +import Decorators._, transform.SymUtils._ +import NameKinds.{UniqueName, EvidenceParamName, DefaultGetterName, WildcardParamName} +import typer.{Namer, Checking} +import util.{Property, SourceFile, SourcePosition, Chars} +import config.Feature.{sourceVersion, migrateTo3, enabled} +import config.SourceVersion._ +import collection.mutable.ListBuffer +import reporting._ +import annotation.constructorOnly +import printing.Formatting.hl +import config.Printers + +import scala.annotation.internal.sharable + +object desugar { + import untpd._ + import DesugarEnums._ + + /** An attachment for companion modules of classes that have a `derives` clause. + * The position value indicates the start position of the template of the + * deriving class. + */ + val DerivingCompanion: Property.Key[SourcePosition] = Property.Key() + + /** An attachment for match expressions generated from a PatDef or GenFrom. + * Value of key == one of IrrefutablePatDef, IrrefutableGenFrom + */ + val CheckIrrefutable: Property.Key[MatchCheck] = Property.StickyKey() + + /** A multi-line infix operation with the infix operator starting a new line. + * Used for explaining potential errors. + */ + val MultiLineInfix: Property.Key[Unit] = Property.StickyKey() + + /** An attachment key to indicate that a ValDef originated from parameter untupling. + */ + val UntupledParam: Property.Key[Unit] = Property.StickyKey() + + /** What static check should be applied to a Match? */ + enum MatchCheck { + case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom + } + + /** Is `name` the name of a method that can be invalidated as a compiler-generated + * case class method if it clashes with a user-defined method? + */ + def isRetractableCaseClassMethodName(name: Name)(using Context): Boolean = name match { + case nme.apply | nme.unapply | nme.unapplySeq | nme.copy => true + case DefaultGetterName(nme.copy, _) => true + case _ => false + } + + /** Is `name` the name of a method that is added unconditionally to case classes? */ + def isDesugaredCaseClassMethodName(name: Name)(using Context): Boolean = + isRetractableCaseClassMethodName(name) || name.isSelectorName + +// ----- DerivedTypeTrees ----------------------------------- + + class SetterParamTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.info.resultType) + } + + class TypeRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.typeRef) + } + + class TermRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + def derivedTree(sym: Symbol)(using Context): tpd.Tree = tpd.ref(sym) + } + + /** A type tree that computes its type from an existing parameter. */ + class DerivedFromParamTree()(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + + /** Complete the appropriate constructors so that OriginalSymbol attachments are + * pushed to DerivedTypeTrees. + */ + override def ensureCompletions(using Context): Unit = { + def completeConstructor(sym: Symbol) = + sym.infoOrCompleter match { + case completer: Namer#ClassCompleter => + completer.completeConstructor(sym) + case _ => + } + + if (!ctx.owner.is(Package)) + if (ctx.owner.isClass) { + completeConstructor(ctx.owner) + if (ctx.owner.is(ModuleClass)) + completeConstructor(ctx.owner.linkedClass) + } + else ensureCompletions(using ctx.outer) + } + + /** Return info of original symbol, where all references to siblings of the + * original symbol (i.e. sibling and original symbol have the same owner) + * are rewired to same-named parameters or accessors in the scope enclosing + * the current scope. The current scope is the scope owned by the defined symbol + * itself, that's why we have to look one scope further out. If the resulting + * type is an alias type, dealias it. This is necessary because the + * accessor of a type parameter is a private type alias that cannot be accessed + * from subclasses. + */ + def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = { + val relocate = new TypeMap { + val originalOwner = sym.owner + def apply(tp: Type) = tp match { + case tp: NamedType if tp.symbol.exists && (tp.symbol.owner eq originalOwner) => + val defctx = mapCtx.outersIterator.dropWhile(_.scope eq mapCtx.scope).next() + var local = defctx.denotNamed(tp.name).suchThat(_.isParamOrAccessor).symbol + if (local.exists) (defctx.owner.thisType select local).dealiasKeepAnnots + else { + def msg = + s"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope.toList}" + ErrorType(msg).assertingErrorsReported(msg) + } + case _ => + mapOver(tp) + } + } + tpd.TypeTree(relocate(sym.info)) + } + } + + /** A type definition copied from `tdef` with a rhs typetree derived from it */ + def derivedTypeParam(tdef: TypeDef)(using Context): TypeDef = + cpy.TypeDef(tdef)( + rhs = DerivedFromParamTree().withSpan(tdef.rhs.span).watching(tdef) + ) + + /** A derived type definition watching `sym` */ + def derivedTypeParamWithVariance(sym: TypeSymbol)(using Context): TypeDef = + val variance = VarianceFlags & sym.flags + TypeDef(sym.name, DerivedFromParamTree().watching(sym)).withFlags(TypeParam | Synthetic | variance) + + /** A value definition copied from `vdef` with a tpt typetree derived from it */ + def derivedTermParam(vdef: ValDef)(using Context): ValDef = + cpy.ValDef(vdef)( + tpt = DerivedFromParamTree().withSpan(vdef.tpt.span).watching(vdef)) + +// ----- Desugar methods ------------------------------------------------- + + /** Setter generation is needed for: + * - non-private class members + * - all trait members + * - all package object members + */ + def isSetterNeeded(valDef: ValDef)(using Context): Boolean = { + val mods = valDef.mods + mods.is(Mutable) + && ctx.owner.isClass + && (!mods.is(Private) || ctx.owner.is(Trait) || ctx.owner.isPackageObject) + } + + /** var x: Int = expr + * ==> + * def x: Int = expr + * def x_=($1: ): Unit = () + * + * Generate setter where needed + */ + def valDef(vdef0: ValDef)(using Context): Tree = { + val vdef @ ValDef(_, tpt, rhs) = vdef0 + val mods = vdef.mods + + val valName = normalizeName(vdef, tpt).asTermName + val vdef1 = cpy.ValDef(vdef)(name = valName) + + if (isSetterNeeded(vdef)) { + // TODO: copy of vdef as getter needed? + // val getter = ValDef(mods, name, tpt, rhs) withPos vdef.pos? + // right now vdef maps via expandedTree to a thicket which concerns itself. + // I don't see a problem with that but if there is one we can avoid it by making a copy here. + val setterParam = makeSyntheticParameter(tpt = SetterParamTree().watching(vdef)) + // The rhs gets filled in later, when field is generated and getter has parameters (see Memoize miniphase) + val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else unitLiteral + val setter = cpy.DefDef(vdef)( + name = valName.setterName, + paramss = (setterParam :: Nil) :: Nil, + tpt = TypeTree(defn.UnitType), + rhs = setterRhs + ).withMods((mods | Accessor) &~ (CaseAccessor | GivenOrImplicit | Lazy)) + .dropEndMarker() // the end marker should only appear on the getter definition + Thicket(vdef1, setter) + } + else vdef1 + } + + def makeImplicitParameters(tpts: List[Tree], implicitFlag: FlagSet, forPrimaryConstructor: Boolean = false)(using Context): List[ValDef] = + for (tpt <- tpts) yield { + val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param + val epname = EvidenceParamName.fresh() + ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) + } + + def mapParamss(paramss: List[ParamClause]) + (mapTypeParam: TypeDef => TypeDef) + (mapTermParam: ValDef => ValDef)(using Context): List[ParamClause] = + paramss.mapConserve { + case TypeDefs(tparams) => tparams.mapConserve(mapTypeParam) + case ValDefs(vparams) => vparams.mapConserve(mapTermParam) + case _ => unreachable() + } + + /** 1. Expand context bounds to evidence params. E.g., + * + * def f[T >: L <: H : B](params) + * ==> + * def f[T >: L <: H](params)(implicit evidence$0: B[T]) + * + * 2. Expand default arguments to default getters. E.g, + * + * def f[T: B](x: Int = 1)(y: String = x + "m") = ... + * ==> + * def f[T](x: Int)(y: String)(implicit evidence$0: B[T]) = ... + * def f$default$1[T] = 1 + * def f$default$2[T](x: Int) = x + "m" + */ + private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = + addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) + + private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = + val DefDef(_, paramss, tpt, rhs) = meth + val evidenceParamBuf = ListBuffer[ValDef]() + + def desugarContextBounds(rhs: Tree): Tree = rhs match + case ContextBounds(tbounds, cxbounds) => + val iflag = if sourceVersion.isAtLeast(`future`) then Given else Implicit + evidenceParamBuf ++= makeImplicitParameters( + cxbounds, iflag, forPrimaryConstructor = isPrimaryConstructor) + tbounds + case LambdaTypeTree(tparams, body) => + cpy.LambdaTypeTree(rhs)(tparams, desugarContextBounds(body)) + case _ => + rhs + + val paramssNoContextBounds = + mapParamss(paramss) { + tparam => cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs)) + }(identity) + + rhs match + case MacroTree(call) => + cpy.DefDef(meth)(rhs = call).withMods(meth.mods | Macro | Erased) + case _ => + addEvidenceParams( + cpy.DefDef(meth)( + name = normalizeName(meth, tpt).asTermName, + paramss = paramssNoContextBounds), + evidenceParamBuf.toList) + end elimContextBounds + + def addDefaultGetters(meth: DefDef)(using Context): Tree = + + /** The longest prefix of parameter lists in paramss whose total number of + * ValDefs does not exceed `n` + */ + def takeUpTo(paramss: List[ParamClause], n: Int): List[ParamClause] = paramss match + case ValDefs(vparams) :: paramss1 => + val len = vparams.length + if len <= n then vparams :: takeUpTo(paramss1, n - len) else Nil + case TypeDefs(tparams) :: paramss1 => + tparams :: takeUpTo(paramss1, n) + case _ => + Nil + + def dropContextBounds(tparam: TypeDef): TypeDef = + def dropInRhs(rhs: Tree): Tree = rhs match + case ContextBounds(tbounds, _) => + tbounds + case rhs @ LambdaTypeTree(tparams, body) => + cpy.LambdaTypeTree(rhs)(tparams, dropInRhs(body)) + case _ => + rhs + cpy.TypeDef(tparam)(rhs = dropInRhs(tparam.rhs)) + + def paramssNoRHS = mapParamss(meth.paramss)(identity) { + vparam => + if vparam.rhs.isEmpty then vparam + else cpy.ValDef(vparam)(rhs = EmptyTree).withMods(vparam.mods | HasDefault) + } + + def getterParamss(n: Int): List[ParamClause] = + mapParamss(takeUpTo(paramssNoRHS, n)) { + tparam => dropContextBounds(toDefParam(tparam, keepAnnotations = true)) + } { + vparam => toDefParam(vparam, keepAnnotations = true, keepDefault = false) + } + + def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match + case ValDefs(vparam :: vparams) :: paramss1 => + def defaultGetter: DefDef = + DefDef( + name = DefaultGetterName(meth.name, n), + paramss = getterParamss(n), + tpt = TypeTree(), + rhs = vparam.rhs + ) + .withMods(Modifiers( + meth.mods.flags & (AccessFlags | Synthetic) | (vparam.mods.flags & Inline), + meth.mods.privateWithin)) + val rest = defaultGetters(vparams :: paramss1, n + 1) + if vparam.rhs.isEmpty then rest else defaultGetter :: rest + case _ :: paramss1 => // skip empty parameter lists and type parameters + defaultGetters(paramss1, n) + case Nil => + Nil + + val defGetters = defaultGetters(meth.paramss, 0) + if defGetters.isEmpty then meth + else Thicket(cpy.DefDef(meth)(paramss = paramssNoRHS) :: defGetters) + end addDefaultGetters + + /** Add an explicit ascription to the `expectedTpt` to every tail splice. + * + * - `'{ x }` -> `'{ x }` + * - `'{ $x }` -> `'{ $x: T }` + * - `'{ if (...) $x else $y }` -> `'{ if (...) ($x: T) else ($y: T) }` + * + * Note that the splice `$t: T` will be typed as `${t: Expr[T]}` + */ + def quotedPattern(tree: untpd.Tree, expectedTpt: untpd.Tree)(using Context): untpd.Tree = { + def adaptToExpectedTpt(tree: untpd.Tree): untpd.Tree = tree match { + // Add the expected type as an ascription + case _: untpd.Splice => + untpd.Typed(tree, expectedTpt).withSpan(tree.span) + case Typed(expr: untpd.Splice, tpt) => + cpy.Typed(tree)(expr, untpd.makeAndType(tpt, expectedTpt).withSpan(tpt.span)) + + // Propagate down the expected type to the leafs of the expression + case Block(stats, expr) => + cpy.Block(tree)(stats, adaptToExpectedTpt(expr)) + case If(cond, thenp, elsep) => + cpy.If(tree)(cond, adaptToExpectedTpt(thenp), adaptToExpectedTpt(elsep)) + case untpd.Parens(expr) => + cpy.Parens(tree)(adaptToExpectedTpt(expr)) + case Match(selector, cases) => + val newCases = cases.map(cdef => cpy.CaseDef(cdef)(body = adaptToExpectedTpt(cdef.body))) + cpy.Match(tree)(selector, newCases) + case untpd.ParsedTry(expr, handler, finalizer) => + cpy.ParsedTry(tree)(adaptToExpectedTpt(expr), adaptToExpectedTpt(handler), finalizer) + + // Tree does not need to be ascribed + case _ => + tree + } + adaptToExpectedTpt(tree) + } + + /** Add all evidence parameters in `params` as implicit parameters to `meth`. + * If the parameters of `meth` end in an implicit parameter list or using clause, + * evidence parameters are added in front of that list. Otherwise they are added + * as a separate parameter clause. + */ + private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = + params match + case Nil => + meth + case evidenceParams => + val paramss1 = meth.paramss.reverse match + case ValDefs(vparams @ (vparam :: _)) :: rparamss if vparam.mods.isOneOf(GivenOrImplicit) => + ((evidenceParams ++ vparams) :: rparamss).reverse + case _ => + meth.paramss :+ evidenceParams + cpy.DefDef(meth)(paramss = paramss1) + + /** The implicit evidence parameters of `meth`, as generated by `desugar.defDef` */ + private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = + meth.paramss.reverse match { + case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => + vparams.takeWhile(_.name.is(EvidenceParamName)) + case _ => + Nil + } + + @sharable private val synthetic = Modifiers(Synthetic) + + private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { + var mods = tparam.rawMods + if (!keepAnnotations) mods = mods.withAnnotations(Nil) + tparam.withMods(mods & EmptyFlags | Param) + } + private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { + var mods = vparam.rawMods + if (!keepAnnotations) mods = mods.withAnnotations(Nil) + val hasDefault = if keepDefault then HasDefault else EmptyFlags + vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault) | Param) + } + + def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = + paramss.foldLeft(fn) { (fn, params) => params match + case TypeDefs(params) => + TypeApply(fn, params.map(refOfDef)) + case (vparam: ValDef) :: _ if vparam.mods.is(Given) => + Apply(fn, params.map(refOfDef)).setApplyKind(ApplyKind.Using) + case _ => + Apply(fn, params.map(refOfDef)) + } + + /** The expansion of a class definition. See inline comments for what is involved */ + def classDef(cdef: TypeDef)(using Context): Tree = { + val impl @ Template(constr0, _, self, _) = cdef.rhs: @unchecked + val className = normalizeName(cdef, impl).asTypeName + val parents = impl.parents + val mods = cdef.mods + val companionMods = mods + .withFlags((mods.flags & (AccessFlags | Final)).toCommonFlags) + .withMods(Nil) + .withAnnotations(Nil) + + var defaultGetters: List[Tree] = Nil + + def decompose(ddef: Tree): DefDef = ddef match { + case meth: DefDef => meth + case Thicket((meth: DefDef) :: defaults) => + defaultGetters = defaults + meth + } + + val constr1 = decompose(defDef(impl.constr, isPrimaryConstructor = true)) + + // The original type and value parameters in the constructor already have the flags + // needed to be type members (i.e. param, and possibly also private and local unless + // prefixed by type or val). `tparams` and `vparamss` are the type parameters that + // go in `constr`, the constructor after desugaring. + + /** Does `tree' look like a reference to AnyVal? Temporary test before we have inline classes */ + def isAnyVal(tree: Tree): Boolean = tree match { + case Ident(tpnme.AnyVal) => true + case Select(qual, tpnme.AnyVal) => isScala(qual) + case _ => false + } + def isScala(tree: Tree): Boolean = tree match { + case Ident(nme.scala) => true + case Select(Ident(nme.ROOTPKG), nme.scala) => true + case _ => false + } + + def namePos = cdef.sourcePos.withSpan(cdef.nameSpan) + + val isObject = mods.is(Module) + val isCaseClass = mods.is(Case) && !isObject + val isCaseObject = mods.is(Case) && isObject + val isEnum = mods.isEnumClass && !mods.is(Module) + def isEnumCase = mods.isEnumCase + def isNonEnumCase = !isEnumCase && (isCaseClass || isCaseObject) + val isValueClass = parents.nonEmpty && isAnyVal(parents.head) + // This is not watertight, but `extends AnyVal` will be replaced by `inline` later. + + val originalTparams = constr1.leadingTypeParams + val originalVparamss = asTermOnly(constr1.trailingParamss) + lazy val derivedEnumParams = enumClass.typeParams.map(derivedTypeParamWithVariance) + val impliedTparams = + if (isEnumCase) { + val tparamReferenced = typeParamIsReferenced( + enumClass.typeParams, originalTparams, originalVparamss, parents) + if (originalTparams.isEmpty && (parents.isEmpty || tparamReferenced)) + derivedEnumParams.map(tdef => tdef.withFlags(tdef.mods.flags | PrivateLocal)) + else originalTparams + } + else originalTparams + + if mods.is(Trait) then + for vparams <- originalVparamss; vparam <- vparams do + if isByNameType(vparam.tpt) then + report.error(em"implementation restriction: traits cannot have by name parameters", vparam.srcPos) + + // Annotations on class _type_ parameters are set on the derived parameters + // but not on the constructor parameters. The reverse is true for + // annotations on class _value_ parameters. + val constrTparams = impliedTparams.map(toDefParam(_, keepAnnotations = false)) + val constrVparamss = + if (originalVparamss.isEmpty) { // ensure parameter list is non-empty + if (isCaseClass) + report.error(CaseClassMissingParamList(cdef), namePos) + ListOfNil + } + else if (isCaseClass && originalVparamss.head.exists(_.mods.isOneOf(GivenOrImplicit))) { + report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) + ListOfNil + } + else originalVparamss.nestedMap(toDefParam(_, keepAnnotations = true, keepDefault = true)) + val derivedTparams = + constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => + derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) + val derivedVparamss = + constrVparamss.nestedMap(vparam => + derivedTermParam(vparam).withAnnotations(Nil)) + + val constr = cpy.DefDef(constr1)(paramss = joinParams(constrTparams, constrVparamss)) + + val (normalizedBody, enumCases, enumCompanionRef) = { + // Add constructor type parameters and evidence implicit parameters + // to auxiliary constructors; set defaultGetters as a side effect. + def expandConstructor(tree: Tree) = tree match { + case ddef: DefDef if ddef.name.isConstructorName => + decompose( + defDef( + addEvidenceParams( + cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), + evidenceParams(constr1).map(toDefParam(_, keepAnnotations = false, keepDefault = false))))) + case stat => + stat + } + // The Identifiers defined by a case + def caseIds(tree: Tree): List[Ident] = tree match { + case tree: MemberDef => Ident(tree.name.toTermName) :: Nil + case PatDef(_, ids: List[Ident] @ unchecked, _, _) => ids + } + + val stats0 = impl.body.map(expandConstructor) + val stats = + if (ctx.owner eq defn.ScalaPackageClass) && defn.hasProblematicGetClass(className) then + stats0.filterConserve { + case ddef: DefDef => + ddef.name ne nme.getClass_ + case _ => + true + } + else + stats0 + + if (isEnum) { + val (enumCases, enumStats) = stats.partition(DesugarEnums.isEnumCase) + if (enumCases.isEmpty) + report.error(EnumerationsShouldNotBeEmpty(cdef), namePos) + else + enumCases.last.pushAttachment(DesugarEnums.DefinesEnumLookupMethods, ()) + val enumCompanionRef = TermRefTree() + val enumImport = + Import(enumCompanionRef, enumCases.flatMap(caseIds).map( + enumCase => + ImportSelector(enumCase.withSpan(enumCase.span.startPos)) + ) + ) + (enumImport :: enumStats, enumCases, enumCompanionRef) + } + else (stats, Nil, EmptyTree) + } + + def anyRef = ref(defn.AnyRefAlias.typeRef) + + val arity = constrVparamss.head.length + + val classTycon: Tree = TypeRefTree() // watching is set at end of method + + def appliedTypeTree(tycon: Tree, args: List[Tree]) = + (if (args.isEmpty) tycon else AppliedTypeTree(tycon, args)) + .withSpan(cdef.span.startPos) + + def isHK(tparam: Tree): Boolean = tparam match { + case TypeDef(_, LambdaTypeTree(tparams, body)) => true + case TypeDef(_, rhs: DerivedTypeTree) => isHK(rhs.watched) + case _ => false + } + + def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { + val targs = for (tparam <- tparams) yield { + val targ = refOfDef(tparam) + def fullyApplied(tparam: Tree): Tree = tparam match { + case TypeDef(_, LambdaTypeTree(tparams, body)) => + AppliedTypeTree(targ, tparams.map(_ => WildcardTypeBoundsTree())) + case TypeDef(_, rhs: DerivedTypeTree) => + fullyApplied(rhs.watched) + case _ => + targ + } + if (widenHK) fullyApplied(tparam) else targ + } + appliedTypeTree(tycon, targs) + } + + def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { + case PostfixOp(_, Ident(tpnme.raw.STAR)) => true + case _ => false + } + + // a reference to the class type bound by `cdef`, with type parameters coming from the constructor + val classTypeRef = appliedRef(classTycon) + + // a reference to `enumClass`, with type parameters coming from the case constructor + lazy val enumClassTypeRef = + if (enumClass.typeParams.isEmpty) + enumClassRef + else if (originalTparams.isEmpty) + appliedRef(enumClassRef) + else { + report.error(TypedCaseDoesNotExplicitlyExtendTypedEnum(enumClass, cdef) + , cdef.srcPos.startPos) + appliedTypeTree(enumClassRef, constrTparams map (_ => anyRef)) + } + + // new C[Ts](paramss) + lazy val creatorExpr = + val vparamss = constrVparamss match + case (vparam :: _) :: _ if vparam.mods.is(Implicit) => // add a leading () to match class parameters + Nil :: constrVparamss + case _ => + if constrVparamss.nonEmpty && constrVparamss.forall { + case vparam :: _ => vparam.mods.is(Given) + case _ => false + } + then constrVparamss :+ Nil // add a trailing () to match class parameters + else constrVparamss + val nu = vparamss.foldLeft(makeNew(classTypeRef)) { (nu, vparams) => + val app = Apply(nu, vparams.map(refOfDef)) + vparams match { + case vparam :: _ if vparam.mods.is(Given) => app.setApplyKind(ApplyKind.Using) + case _ => app + } + } + ensureApplied(nu) + + val copiedAccessFlags = if migrateTo3 then EmptyFlags else AccessFlags + + // Methods to add to a case class C[..](p1: T1, ..., pN: Tn)(moreParams) + // def _1: T1 = this.p1 + // ... + // def _N: TN = this.pN (unless already given as valdef or parameterless defdef) + // def copy(p1: T1 = p1..., pN: TN = pN)(moreParams) = + // new C[...](p1, ..., pN)(moreParams) + val (caseClassMeths, enumScaffolding) = { + def syntheticProperty(name: TermName, tpt: Tree, rhs: Tree) = + DefDef(name, Nil, tpt, rhs).withMods(synthetic) + + def productElemMeths = + val caseParams = derivedVparamss.head.toArray + val selectorNamesInBody = normalizedBody.collect { + case vdef: ValDef if vdef.name.isSelectorName => + vdef.name + case ddef: DefDef if ddef.name.isSelectorName && ddef.paramss.isEmpty => + ddef.name + } + for i <- List.range(0, arity) + selName = nme.selectorName(i) + if (selName ne caseParams(i).name) && !selectorNamesInBody.contains(selName) + yield syntheticProperty(selName, caseParams(i).tpt, + Select(This(EmptyTypeIdent), caseParams(i).name)) + + def enumCaseMeths = + if isEnumCase then + val (ordinal, scaffolding) = nextOrdinal(className, CaseKind.Class, definesEnumLookupMethods(cdef)) + (ordinalMethLit(ordinal) :: Nil, scaffolding) + else (Nil, Nil) + def copyMeths = { + val hasRepeatedParam = constrVparamss.nestedExists { + case ValDef(_, tpt, _) => isRepeated(tpt) + } + if (mods.is(Abstract) || hasRepeatedParam) Nil // cannot have default arguments for repeated parameters, hence copy method is not issued + else { + val copyFirstParams = derivedVparamss.head.map(vparam => + cpy.ValDef(vparam)(rhs = refOfDef(vparam))) + val copyRestParamss = derivedVparamss.tail.nestedMap(vparam => + cpy.ValDef(vparam)(rhs = EmptyTree)) + DefDef( + nme.copy, + joinParams(derivedTparams, copyFirstParams :: copyRestParamss), + TypeTree(), + creatorExpr + ).withMods(Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags, constr1.mods.privateWithin)) :: Nil + } + } + + if isCaseClass then + val (enumMeths, enumScaffolding) = enumCaseMeths + (copyMeths ::: enumMeths ::: productElemMeths, enumScaffolding) + else (Nil, Nil) + } + + var parents1: List[untpd.Tree] = parents // !cc! need explicit type to make capture checking pass + if (isEnumCase && parents.isEmpty) + parents1 = enumClassTypeRef :: Nil + if (isNonEnumCase) + parents1 = parents1 :+ scalaDot(str.Product.toTypeName) :+ scalaDot(nme.Serializable.toTypeName) + if (isEnum) + parents1 = parents1 :+ ref(defn.EnumClass) + + // derived type classes of non-module classes go to their companions + val (clsDerived, companionDerived) = + if (mods.is(Module)) (impl.derived, Nil) else (Nil, impl.derived) + + // The thicket which is the desugared version of the companion object + // synthetic object C extends parentTpt derives class-derived { defs } + def companionDefs(parentTpt: Tree, defs: List[Tree]) = { + val mdefs = moduleDef( + ModuleDef( + className.toTermName, Template(emptyConstructor, parentTpt :: Nil, companionDerived, EmptyValDef, defs)) + .withMods(companionMods | Synthetic)) + .withSpan(cdef.span).toList + if (companionDerived.nonEmpty) + for (case modClsDef @ TypeDef(_, _) <- mdefs) + modClsDef.putAttachment(DerivingCompanion, impl.srcPos.startPos) + mdefs + } + + val companionMembers = defaultGetters ::: enumCases + + // The companion object definitions, if a companion is needed, Nil otherwise. + // companion definitions include: + // 1. If class is a case class case class C[Ts](p1: T1, ..., pN: TN)(moreParams): + // def apply[Ts](p1: T1, ..., pN: TN)(moreParams) = new C[Ts](p1, ..., pN)(moreParams) (unless C is abstract) + // def unapply[Ts]($1: C[Ts]) = $1 // if not repeated + // def unapplySeq[Ts]($1: C[Ts]) = $1 // if repeated + // 2. The default getters of the constructor + // The parent of the companion object of a non-parameterized case class + // (T11, ..., T1N) => ... => (TM1, ..., TMN) => C + // For all other classes, the parent is AnyRef. + val companions = + if (isCaseClass) { + val applyMeths = + if (mods.is(Abstract)) Nil + else { + val appMods = + Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags).withPrivateWithin(constr1.mods.privateWithin) + val appParamss = + derivedVparamss.nestedZipWithConserve(constrVparamss)((ap, cp) => + ap.withMods(ap.mods | (cp.mods.flags & HasDefault))) + DefDef(nme.apply, joinParams(derivedTparams, appParamss), TypeTree(), creatorExpr) + .withMods(appMods) :: Nil + } + val unapplyMeth = { + val hasRepeatedParam = constrVparamss.head.exists { + case ValDef(_, tpt, _) => isRepeated(tpt) + } + val methName = if (hasRepeatedParam) nme.unapplySeq else nme.unapply + val unapplyParam = makeSyntheticParameter(tpt = classTypeRef) + val unapplyRHS = if (arity == 0) Literal(Constant(true)) else Ident(unapplyParam.name) + val unapplyResTp = if (arity == 0) Literal(Constant(true)) else TypeTree() + DefDef( + methName, + joinParams(derivedTparams, (unapplyParam :: Nil) :: Nil), + unapplyResTp, + unapplyRHS + ).withMods(synthetic) + } + val toStringMeth = + DefDef(nme.toString_, Nil, TypeTree(), Literal(Constant(className.toString))).withMods(Modifiers(Override | Synthetic)) + + companionDefs(anyRef, applyMeths ::: unapplyMeth :: toStringMeth :: companionMembers) + } + else if (companionMembers.nonEmpty || companionDerived.nonEmpty || isEnum) + companionDefs(anyRef, companionMembers) + else if (isValueClass) + companionDefs(anyRef, Nil) + else Nil + + enumCompanionRef match { + case ref: TermRefTree => // have the enum import watch the companion object + val (modVal: ValDef) :: _ = companions: @unchecked + ref.watching(modVal) + case _ => + } + + // For an implicit class C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, .., pMN: TMN), the method + // synthetic implicit C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, ..., pMN: TMN): C[Ts] = + // new C[Ts](p11, ..., p1N) ... (pM1, ..., pMN) = + val implicitWrappers = + if (!mods.isOneOf(GivenOrImplicit)) + Nil + else if (ctx.owner.is(Package)) { + report.error(TopLevelImplicitClass(cdef), cdef.srcPos) + Nil + } + else if (mods.is(Trait)) { + report.error(TypesAndTraitsCantBeImplicit(), cdef.srcPos) + Nil + } + else if (isCaseClass) { + report.error(ImplicitCaseClass(cdef), cdef.srcPos) + Nil + } + else if (arity != 1 && !mods.is(Given)) { + report.error(ImplicitClassPrimaryConstructorArity(), cdef.srcPos) + Nil + } + else { + val defParamss = constrVparamss match { + case Nil :: paramss => + paramss // drop leading () that got inserted by class + // TODO: drop this once we do not silently insert empty class parameters anymore + case paramss => paramss + } + // implicit wrapper is typechecked in same scope as constructor, so + // we can reuse the constructor parameters; no derived params are needed. + DefDef( + className.toTermName, joinParams(constrTparams, defParamss), + classTypeRef, creatorExpr) + .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | Final) + .withSpan(cdef.span) :: Nil + } + + val self1 = { + val selfType = if (self.tpt.isEmpty) classTypeRef else self.tpt + if (self.isEmpty) self + else cpy.ValDef(self)(tpt = selfType).withMods(self.mods | SelfName) + } + + val cdef1 = addEnumFlags { + val tparamAccessors = { + val impliedTparamsIt = impliedTparams.iterator + derivedTparams.map(_.withMods(impliedTparamsIt.next().mods)) + } + val caseAccessor = if (isCaseClass) CaseAccessor else EmptyFlags + val vparamAccessors = { + val originalVparamsIt = originalVparamss.iterator.flatten + derivedVparamss match { + case first :: rest => + first.map(_.withMods(originalVparamsIt.next().mods | caseAccessor)) ++ + rest.flatten.map(_.withMods(originalVparamsIt.next().mods)) + case _ => + Nil + } + } + if mods.isAllOf(Given | Inline | Transparent) then + report.error("inline given instances cannot be trasparent", cdef) + val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + cpy.TypeDef(cdef: TypeDef)( + name = className, + rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, + tparamAccessors ::: vparamAccessors ::: normalizedBody ::: caseClassMeths) + ).withMods(classMods) + } + + // install the watch on classTycon + classTycon match { + case tycon: DerivedTypeTree => tycon.watching(cdef1) + case _ => + } + + flatTree(cdef1 :: companions ::: implicitWrappers ::: enumScaffolding) + }.showing(i"desugared: $cdef --> $result", Printers.desugar) + + /** Expand + * + * package object name { body } + * + * to: + * + * package name { + * object `package` { body } + * } + */ + def packageModuleDef(mdef: ModuleDef)(using Context): Tree = + val impl = mdef.impl + val mods = mdef.mods + val moduleName = normalizeName(mdef, impl).asTermName + if mods.is(Package) then + checkPackageName(mdef) + PackageDef(Ident(moduleName), + cpy.ModuleDef(mdef)(nme.PACKAGE, impl).withMods(mods &~ Package) :: Nil) + else + mdef + + /** Expand + * + * object name extends parents { self => body } + * + * to: + * + * val name: name$ = New(name$) + * final class name$ extends parents { self: name.type => body } + */ + def moduleDef(mdef: ModuleDef)(using Context): Tree = { + val impl = mdef.impl + val mods = mdef.mods + val moduleName = normalizeName(mdef, impl).asTermName + def isEnumCase = mods.isEnumCase + Checking.checkWellFormedModule(mdef) + + if (mods.is(Package)) + packageModuleDef(mdef) + else if (isEnumCase) { + typeParamIsReferenced(enumClass.typeParams, Nil, Nil, impl.parents) + // used to check there are no illegal references to enum's type parameters in parents + expandEnumModule(moduleName, impl, mods, definesEnumLookupMethods(mdef), mdef.span) + } + else { + val clsName = moduleName.moduleClassName + val clsRef = Ident(clsName) + val modul = ValDef(moduleName, clsRef, New(clsRef, Nil)) + .withMods(mods.toTermFlags & RetainedModuleValFlags | ModuleValCreationFlags) + .withSpan(mdef.span.startPos) + val ValDef(selfName, selfTpt, _) = impl.self + val selfMods = impl.self.mods + if (!selfTpt.isEmpty) report.error(ObjectMayNotHaveSelfType(mdef), impl.self.srcPos) + val clsSelf = ValDef(selfName, SingletonTypeTree(Ident(moduleName)), impl.self.rhs) + .withMods(selfMods) + .withSpan(impl.self.span.orElse(impl.span.startPos)) + val clsTmpl = cpy.Template(impl)(self = clsSelf, body = impl.body) + val cls = TypeDef(clsName, clsTmpl) + .withMods(mods.toTypeFlags & RetainedModuleClassFlags | ModuleClassCreationFlags) + .withEndMarker(copyFrom = mdef) // copy over the end marker position to the module class def + Thicket(modul, classDef(cls).withSpan(mdef.span)) + } + } + + def extMethod(mdef: DefDef, extParamss: List[ParamClause])(using Context): DefDef = + cpy.DefDef(mdef)( + name = normalizeName(mdef, mdef.tpt).asTermName, + paramss = + if mdef.name.isRightAssocOperatorName then + val (typaramss, paramss) = mdef.paramss.span(isTypeParamClause) // first extract type parameters + + paramss match + case params :: paramss1 => // `params` must have a single parameter and without `given` flag + + def badRightAssoc(problem: String) = + report.error(i"right-associative extension method $problem", mdef.srcPos) + extParamss ++ mdef.paramss + + params match + case ValDefs(vparam :: Nil) => + if !vparam.mods.is(Given) then + // we merge the extension parameters with the method parameters, + // swapping the operator arguments: + // e.g. + // extension [A](using B)(c: C)(using D) + // def %:[E](f: F)(g: G)(using H): Res = ??? + // will be encoded as + // def %:[A](using B)[E](f: F)(c: C)(using D)(g: G)(using H): Res = ??? + val (leadingUsing, otherExtParamss) = extParamss.span(isUsingOrTypeParamClause) + leadingUsing ::: typaramss ::: params :: otherExtParamss ::: paramss1 + else + badRightAssoc("cannot start with using clause") + case _ => + badRightAssoc("must start with a single parameter") + case _ => + // no value parameters, so not an infix operator. + extParamss ++ mdef.paramss + else + extParamss ++ mdef.paramss + ).withMods(mdef.mods | ExtensionMethod) + + /** Transform extension construct to list of extension methods */ + def extMethods(ext: ExtMethods)(using Context): Tree = flatTree { + ext.methods map { + case exp: Export => exp + case mdef: DefDef => defDef(extMethod(mdef, ext.paramss)) + } + } + /** Transforms + * + * type t >: Low <: Hi + * to + * + * @patternType type $T >: Low <: Hi + * + * if the type has a pattern variable name + */ + def quotedPatternTypeDef(tree: TypeDef)(using Context): TypeDef = { + assert(ctx.mode.is(Mode.QuotedPattern)) + if tree.name.isVarPattern && !tree.isBackquoted then + val patternTypeAnnot = New(ref(defn.QuotedRuntimePatterns_patternTypeAnnot.typeRef)).withSpan(tree.span) + val mods = tree.mods.withAddedAnnotation(patternTypeAnnot) + tree.withMods(mods) + else if tree.name.startsWith("$") && !tree.isBackquoted then + report.error( + """Quoted pattern variable names starting with $ are not supported anymore. + |Use lower cases type pattern name instead. + |""".stripMargin, + tree.srcPos) + tree + else tree + } + + def checkPackageName(mdef: ModuleDef | PackageDef)(using Context): Unit = + + def check(name: Name, errSpan: Span): Unit = name match + case name: SimpleName if !errSpan.isSynthetic && name.exists(Chars.willBeEncoded) => + report.warning(em"The package name `$name` will be encoded on the classpath, and can lead to undefined behaviour.", mdef.source.atSpan(errSpan)) + case _ => + + def loop(part: RefTree): Unit = part match + case part @ Ident(name) => check(name, part.span) + case part @ Select(qual: RefTree, name) => + check(name, part.nameSpan) + loop(qual) + case _ => + + mdef match + case pdef: PackageDef => loop(pdef.pid) + case mdef: ModuleDef if mdef.mods.is(Package) => check(mdef.name, mdef.nameSpan) + case _ => + end checkPackageName + + /** The normalized name of `mdef`. This means + * 1. Check that the name does not redefine a Scala core class. + * If it does redefine, issue an error and return a mangled name instead + * of the original one. + * 2. If the name is missing (this can be the case for instance definitions), + * invent one instead. + */ + def normalizeName(mdef: MemberDef, impl: Tree)(using Context): Name = { + var name = mdef.name + if (name.isEmpty) name = name.likeSpaced(inventGivenOrExtensionName(impl)) + def errPos = mdef.source.atSpan(mdef.nameSpan) + if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) { + val kind = if (name.isTypeName) "class" else "object" + report.error(IllegalRedefinitionOfStandardKind(kind, name), errPos) + name = name.errorName + } + name + } + + /** Invent a name for an anonympus given of type or template `impl`. */ + def inventGivenOrExtensionName(impl: Tree)(using Context): SimpleName = + val str = impl match + case impl: Template => + if impl.parents.isEmpty then + report.error(AnonymousInstanceCannotBeEmpty(impl), impl.srcPos) + nme.ERROR.toString + else + impl.parents.map(inventTypeName(_)).mkString("given_", "_", "") + case impl: Tree => + "given_" ++ inventTypeName(impl) + str.toTermName.asSimpleName + + private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { + private def extractArgs(args: List[Tree])(using Context): String = + args.map(argNameExtractor.apply("", _)).mkString("_") + override def apply(x: String, tree: Tree)(using Context): String = + if (x.isEmpty) + tree match { + case Select(pre, nme.CONSTRUCTOR) => foldOver(x, pre) + case tree: RefTree => + if tree.name.isTypeName then tree.name.toString + else s"${tree.name}_type" + case tree: TypeDef => tree.name.toString + case tree: AppliedTypeTree if followArgs && tree.args.nonEmpty => + s"${apply(x, tree.tpt)}_${extractArgs(tree.args)}" + case InfixOp(left, op, right) => + if followArgs then s"${op.name}_${extractArgs(List(left, right))}" + else op.name.toString + case tree: LambdaTypeTree => + apply(x, tree.body) + case tree: Tuple => + extractArgs(tree.trees) + case tree: Function if tree.args.nonEmpty => + if followArgs then s"${extractArgs(tree.args)}_to_${apply("", tree.body)}" + else "Function" + case _ => foldOver(x, tree) + } + else x + } + private val typeNameExtractor = NameExtractor(followArgs = true) + private val argNameExtractor = NameExtractor(followArgs = false) + + private def inventTypeName(tree: Tree)(using Context): String = typeNameExtractor("", tree) + + /**This will check if this def tree is marked to define enum lookup methods, + * this is not recommended to call more than once per tree + */ + private def definesEnumLookupMethods(ddef: DefTree): Boolean = + ddef.removeAttachment(DefinesEnumLookupMethods).isDefined + + /** val p1, ..., pN: T = E + * ==> + * makePatDef[[val p1: T1 = E]]; ...; makePatDef[[val pN: TN = E]] + * + * case e1, ..., eN + * ==> + * expandSimpleEnumCase([case e1]); ...; expandSimpleEnumCase([case eN]) + */ + def patDef(pdef: PatDef)(using Context): Tree = flatTree { + val PatDef(mods, pats, tpt, rhs) = pdef + if mods.isEnumCase then + def expand(id: Ident, definesLookups: Boolean) = + expandSimpleEnumCase(id.name.asTermName, mods, definesLookups, + Span(id.span.start, id.span.end, id.span.start)) + + val ids = pats.asInstanceOf[List[Ident]] + if definesEnumLookupMethods(pdef) then + ids.init.map(expand(_, false)) ::: expand(ids.last, true) :: Nil + else + ids.map(expand(_, false)) + else { + val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) + pats1 map (makePatDef(pdef, mods, _, rhs)) + } + } + + /** The selector of a match, which depends of the given `checkMode`. + * @param sel the original selector + * @return if `checkMode` is + * - None : sel @unchecked + * - Exhaustive : sel + * - IrrefutablePatDef, + * IrrefutableGenFrom: sel with attachment `CheckIrrefutable -> checkMode` + */ + def makeSelector(sel: Tree, checkMode: MatchCheck)(using Context): Tree = + checkMode match + case MatchCheck.None => + Annotated(sel, New(ref(defn.UncheckedAnnot.typeRef))) + + case MatchCheck.Exhaustive => + sel + + case MatchCheck.IrrefutablePatDef | MatchCheck.IrrefutableGenFrom => + // TODO: use `pushAttachment` and investigate duplicate attachment + sel.withAttachment(CheckIrrefutable, checkMode) + sel + end match + + /** If `pat` is a variable pattern, + * + * val/var/lazy val p = e + * + * Otherwise, in case there is exactly one variable x_1 in pattern + * val/var/lazy val p = e ==> val/var/lazy val x_1 = (e: @unchecked) match (case p => (x_1)) + * + * in case there are zero or more than one variables in pattern + * val/var/lazy p = e ==> private[this] synthetic [lazy] val t$ = (e: @unchecked) match (case p => (x_1, ..., x_N)) + * val/var/def x_1 = t$._1 + * ... + * val/var/def x_N = t$._N + * If the original pattern variable carries a type annotation, so does the corresponding + * ValDef or DefDef. + */ + def makePatDef(original: Tree, mods: Modifiers, pat: Tree, rhs: Tree)(using Context): Tree = pat match { + case IdPattern(id, tpt) => + val id1 = + if id.name == nme.WILDCARD + then cpy.Ident(id)(WildcardParamName.fresh()) + else id + derivedValDef(original, id1, tpt, rhs, mods) + case _ => + + def filterWildcardGivenBinding(givenPat: Bind): Boolean = + givenPat.name != nme.WILDCARD + + def errorOnGivenBinding(bind: Bind)(using Context): Boolean = + report.error( + em"""${hl("given")} patterns are not allowed in a ${hl("val")} definition, + |please bind to an identifier and use an alias given.""".stripMargin, bind) + false + + def isTuplePattern(arity: Int): Boolean = pat match { + case Tuple(pats) if pats.size == arity => + pats.forall(isVarPattern) + case _ => false + } + val isMatchingTuple: Tree => Boolean = { + case Tuple(es) => isTuplePattern(es.length) + case _ => false + } + + // We can only optimize `val pat = if (...) e1 else e2` if: + // - `e1` and `e2` are both tuples of arity N + // - `pat` is a tuple of N variables or wildcard patterns like `(x1, x2, ..., xN)` + val tupleOptimizable = forallResults(rhs, isMatchingTuple) + + val inAliasGenerator = original match + case _: GenAlias => true + case _ => false + + val vars = + if (tupleOptimizable) // include `_` + pat match + case Tuple(pats) => pats.map { case id: Ident => id -> TypeTree() } + else + getVariables( + tree = pat, + shouldAddGiven = + if inAliasGenerator then + filterWildcardGivenBinding + else + errorOnGivenBinding + ) // no `_` + + val ids = for ((named, _) <- vars) yield Ident(named.name) + val matchExpr = + if (tupleOptimizable) rhs + else + val caseDef = CaseDef(pat, EmptyTree, makeTuple(ids)) + Match(makeSelector(rhs, MatchCheck.IrrefutablePatDef), caseDef :: Nil) + vars match { + case Nil if !mods.is(Lazy) => + matchExpr + case (named, tpt) :: Nil => + derivedValDef(original, named, tpt, matchExpr, mods) + case _ => + val tmpName = UniqueName.fresh() + val patMods = + mods & Lazy | Synthetic | (if (ctx.owner.isClass) PrivateLocal else EmptyFlags) + val firstDef = + ValDef(tmpName, TypeTree(), matchExpr) + .withSpan(pat.span.union(rhs.span)).withMods(patMods) + val useSelectors = vars.length <= 22 + def selector(n: Int) = + if useSelectors then Select(Ident(tmpName), nme.selectorName(n)) + else Apply(Select(Ident(tmpName), nme.apply), Literal(Constant(n)) :: Nil) + val restDefs = + for (((named, tpt), n) <- vars.zipWithIndex if named.name != nme.WILDCARD) + yield + if mods.is(Lazy) then + DefDef(named.name.asTermName, Nil, tpt, selector(n)) + .withMods(mods &~ Lazy) + .withSpan(named.span) + else + valDef( + ValDef(named.name.asTermName, tpt, selector(n)) + .withMods(mods) + .withSpan(named.span) + ) + flatTree(firstDef :: restDefs) + } + } + + /** Expand variable identifier x to x @ _ */ + def patternVar(tree: Tree)(using Context): Bind = { + val Ident(name) = unsplice(tree): @unchecked + Bind(name, Ident(nme.WILDCARD)).withSpan(tree.span) + } + + /** The type of tests that check whether a MemberDef is OK for some flag. + * The test succeeds if the partial function is defined and returns true. + */ + type MemberDefTest = PartialFunction[MemberDef, Boolean] + + val legalOpaque: MemberDefTest = { + case TypeDef(_, rhs) => + def rhsOK(tree: Tree): Boolean = tree match { + case bounds: TypeBoundsTree => !bounds.alias.isEmpty + case _: Template | _: MatchTypeTree => false + case LambdaTypeTree(_, body) => rhsOK(body) + case _ => true + } + rhsOK(rhs) + } + + def checkOpaqueAlias(tree: MemberDef)(using Context): MemberDef = + def check(rhs: Tree): MemberDef = rhs match + case bounds: TypeBoundsTree if bounds.alias.isEmpty => + report.error(i"opaque type must have a right-hand side", tree.srcPos) + tree.withMods(tree.mods.withoutFlags(Opaque)) + case LambdaTypeTree(_, body) => check(body) + case _ => tree + if !tree.mods.is(Opaque) then tree + else tree match + case TypeDef(_, rhs) => check(rhs) + case _ => tree + + /** Check that modifiers are legal for the definition `tree`. + * Right now, we only check for `opaque`. TODO: Move other modifier checks here. + */ + def checkModifiers(tree: Tree)(using Context): Tree = tree match { + case tree: MemberDef => + var tested: MemberDef = tree + def checkApplicable(flag: Flag, test: MemberDefTest): MemberDef = + if (tested.mods.is(flag) && !test.applyOrElse(tree, (md: MemberDef) => false)) { + report.error(ModifierNotAllowedForDefinition(flag), tree.srcPos) + tested.withMods(tested.mods.withoutFlags(flag)) + } else tested + tested = checkOpaqueAlias(tested) + tested = checkApplicable(Opaque, legalOpaque) + tested + case _ => + tree + } + + def defTree(tree: Tree)(using Context): Tree = + checkModifiers(tree) match { + case tree: ValDef => valDef(tree) + case tree: TypeDef => + if (tree.isClassDef) classDef(tree) + else if (ctx.mode.is(Mode.QuotedPattern)) quotedPatternTypeDef(tree) + else tree + case tree: DefDef => + if (tree.name.isConstructorName) tree // was already handled by enclosing classDef + else defDef(tree) + case tree: ModuleDef => moduleDef(tree) + case tree: PatDef => patDef(tree) + } + + /** { stats; } + * ==> + * { stats; () } + */ + def block(tree: Block)(using Context): Block = tree.expr match { + case EmptyTree => + cpy.Block(tree)(tree.stats, + unitLiteral.withSpan(if (tree.stats.isEmpty) tree.span else tree.span.endPos)) + case _ => + tree + } + + /** Translate infix operation expression + * + * l op r ==> l.op(r) if op is left-associative + * ==> r.op(l) if op is right-associative + */ + def binop(left: Tree, op: Ident, right: Tree)(using Context): Apply = { + def assignToNamedArg(arg: Tree) = arg match { + case Assign(Ident(name), rhs) => cpy.NamedArg(arg)(name, rhs) + case _ => arg + } + def makeOp(fn: Tree, arg: Tree, selectPos: Span) = + val sel = Select(fn, op.name).withSpan(selectPos) + if (left.sourcePos.endLine < op.sourcePos.startLine) + sel.pushAttachment(MultiLineInfix, ()) + arg match + case Parens(arg) => + Apply(sel, assignToNamedArg(arg) :: Nil) + case Tuple(args) if args.exists(_.isInstanceOf[Assign]) => + Apply(sel, args.mapConserve(assignToNamedArg)) + case Tuple(args) => + Apply(sel, arg :: Nil).setApplyKind(ApplyKind.InfixTuple) + case _ => + Apply(sel, arg :: Nil) + + if op.name.isRightAssocOperatorName then + makeOp(right, left, Span(op.span.start, right.span.end)) + else + makeOp(left, right, Span(left.span.start, op.span.end, op.span.start)) + } + + /** Translate throws type `A throws E1 | ... | En` to + * $throws[... $throws[A, E1] ... , En]. + */ + def throws(tpt: Tree, op: Ident, excepts: Tree)(using Context): AppliedTypeTree = excepts match + case Parens(excepts1) => + throws(tpt, op, excepts1) + case InfixOp(l, bar @ Ident(tpnme.raw.BAR), r) => + throws(throws(tpt, op, l), bar, r) + case e => + AppliedTypeTree( + TypeTree(defn.throwsAlias.typeRef).withSpan(op.span), tpt :: excepts :: Nil) + + /** Translate tuple expressions of arity <= 22 + * + * () ==> () + * (t) ==> t + * (t1, ..., tN) ==> TupleN(t1, ..., tN) + */ + def smallTuple(tree: Tuple)(using Context): Tree = { + val ts = tree.trees + val arity = ts.length + assert(arity <= Definitions.MaxTupleArity) + def tupleTypeRef = defn.TupleType(arity).nn + if (arity == 0) + if (ctx.mode is Mode.Type) TypeTree(defn.UnitType) else unitLiteral + else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts) + else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), ts) + } + + private def isTopLevelDef(stat: Tree)(using Context): Boolean = stat match + case _: ValDef | _: PatDef | _: DefDef | _: Export | _: ExtMethods => true + case stat: ModuleDef => + stat.mods.isOneOf(GivenOrImplicit) + case stat: TypeDef => + !stat.isClassDef || stat.mods.isOneOf(GivenOrImplicit) + case _ => + false + + /** Assuming `src` contains top-level definition, returns the name that should + * be using for the package object that will wrap them. + */ + def packageObjectName(src: SourceFile): TermName = + val fileName = src.file.name + val sourceName = fileName.take(fileName.lastIndexOf('.')) + (sourceName ++ str.TOPLEVEL_SUFFIX).toTermName + + /** Group all definitions that can't be at the toplevel in + * an object named `$package` where `` is the name of the source file. + * Definitions that can't be at the toplevel are: + * + * - all pattern, value and method definitions + * - non-class type definitions + * - implicit classes and objects + * - "companion objects" of wrapped type definitions + * (i.e. objects having the same name as a wrapped type) + */ + def packageDef(pdef: PackageDef)(using Context): PackageDef = { + checkPackageName(pdef) + val wrappedTypeNames = pdef.stats.collect { + case stat: TypeDef if isTopLevelDef(stat) => stat.name + } + def inPackageObject(stat: Tree) = + isTopLevelDef(stat) || { + stat match + case stat: ModuleDef => + wrappedTypeNames.contains(stat.name.stripModuleClassSuffix.toTypeName) + case _ => + false + } + val (nestedStats, topStats) = pdef.stats.partition(inPackageObject) + if (nestedStats.isEmpty) pdef + else { + val name = packageObjectName(ctx.source) + val grouped = + ModuleDef(name, Template(emptyConstructor, Nil, Nil, EmptyValDef, nestedStats)) + .withMods(Modifiers(Synthetic)) + cpy.PackageDef(pdef)(pdef.pid, topStats :+ grouped) + } + } + + /** Make closure corresponding to function. + * params => body + * ==> + * def $anonfun(params) = body + * Closure($anonfun) + */ + def makeClosure(params: List[ValDef], body: Tree, tpt: Tree | Null = null, isContextual: Boolean, span: Span)(using Context): Block = + Block( + DefDef(nme.ANON_FUN, params :: Nil, if (tpt == null) TypeTree() else tpt, body) + .withSpan(span) + .withMods(synthetic | Artifact), + Closure(Nil, Ident(nme.ANON_FUN), if (isContextual) ContextualEmptyTree else EmptyTree)) + + /** If `nparams` == 1, expand partial function + * + * { cases } + * ==> + * x$1 => (x$1 @unchecked?) match { cases } + * + * If `nparams` != 1, expand instead to + * + * (x$1, ..., x$n) => (x$0, ..., x${n-1} @unchecked?) match { cases } + */ + def makeCaseLambda(cases: List[CaseDef], checkMode: MatchCheck, nparams: Int = 1)(using Context): Function = { + val params = (1 to nparams).toList.map(makeSyntheticParameter(_)) + val selector = makeTuple(params.map(p => Ident(p.name))) + Function(params, Match(makeSelector(selector, checkMode), cases)) + } + + /** Map n-ary function `(x1: T1, ..., xn: Tn) => body` where n != 1 to unary function as follows: + * + * (x$1: (T1, ..., Tn)) => { + * def x1: T1 = x$1._1 + * ... + * def xn: Tn = x$1._n + * body + * } + * + * or if `isGenericTuple` + * + * (x$1: (T1, ... Tn) => { + * def x1: T1 = x$1.apply(0) + * ... + * def xn: Tn = x$1.apply(n-1) + * body + * } + * + * If some of the Ti's are absent, omit the : (T1, ..., Tn) type ascription + * in the selector. + */ + def makeTupledFunction(params: List[ValDef], body: Tree, isGenericTuple: Boolean)(using Context): Tree = { + val param = makeSyntheticParameter( + tpt = + if params.exists(_.tpt.isEmpty) then TypeTree() + else Tuple(params.map(_.tpt))) + def selector(n: Int) = + if (isGenericTuple) Apply(Select(refOfDef(param), nme.apply), Literal(Constant(n))) + else Select(refOfDef(param), nme.selectorName(n)) + val vdefs = + params.zipWithIndex.map { + case (param, idx) => + ValDef(param.name, param.tpt, selector(idx)) + .withSpan(param.span) + .withAttachment(UntupledParam, ()) + .withFlags(Synthetic) + } + Function(param :: Nil, Block(vdefs, body)) + } + + /** Convert a tuple pattern with given `elems` to a sequence of `ValDefs`, + * skipping elements that are not convertible. + */ + def patternsToParams(elems: List[Tree])(using Context): List[ValDef] = + def toParam(elem: Tree, tpt: Tree): Tree = + elem match + case Annotated(elem1, _) => toParam(elem1, tpt) + case Typed(elem1, tpt1) => toParam(elem1, tpt1) + case Ident(id: TermName) => ValDef(id, tpt, EmptyTree).withFlags(Param) + case _ => EmptyTree + elems.map(param => toParam(param, TypeTree()).withSpan(param.span)).collect { + case vd: ValDef => vd + } + + def makeContextualFunction(formals: List[Tree], body: Tree, isErased: Boolean)(using Context): Function = { + val mods = if (isErased) Given | Erased else Given + val params = makeImplicitParameters(formals, mods) + FunctionWithMods(params, body, Modifiers(mods)) + } + + private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { + val vdef = ValDef(named.name.asTermName, tpt, rhs) + .withMods(mods) + .withSpan(original.span.withPoint(named.span.start)) + val mayNeedSetter = valDef(vdef) + mayNeedSetter + } + + private def derivedDefDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(implicit src: SourceFile) = + DefDef(named.name.asTermName, Nil, tpt, rhs) + .withMods(mods) + .withSpan(original.span.withPoint(named.span.start)) + + /** Main desugaring method */ + def apply(tree: Tree, pt: Type = NoType)(using Context): Tree = { + + /** Create tree for for-comprehension `` or + * `` where mapName and flatMapName are chosen + * corresponding to whether this is a for-do or a for-yield. + * The creation performs the following rewrite rules: + * + * 1. + * + * for (P <- G) E ==> G.foreach (P => E) + * + * Here and in the following (P => E) is interpreted as the function (P => E) + * if P is a variable pattern and as the partial function { case P => E } otherwise. + * + * 2. + * + * for (P <- G) yield E ==> G.map (P => E) + * + * 3. + * + * for (P_1 <- G_1; P_2 <- G_2; ...) ... + * ==> + * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) + * + * 4. + * + * for (P <- G; E; ...) ... + * => + * for (P <- G.filter (P => E); ...) ... + * + * 5. For any N: + * + * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) + * ==> + * for (TupleN(P_1, P_2, ... P_N) <- + * for (x_1 @ P_1 <- G) yield { + * val x_2 @ P_2 = E_2 + * ... + * val x_N & P_N = E_N + * TupleN(x_1, ..., x_N) + * } ...) + * + * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated + * and the variable constituting P_i is used instead of x_i + * + * @param mapName The name to be used for maps (either map or foreach) + * @param flatMapName The name to be used for flatMaps (either flatMap or foreach) + * @param enums The enumerators in the for expression + * @param body The body of the for expression + */ + def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Tree], body: Tree): Tree = trace(i"make for ${ForYield(enums, body)}", show = true) { + + /** Let `pat` be `gen`'s pattern. Make a function value `pat => body`. + * If `pat` is a var pattern `id: T` then this gives `(id: T) => body`. + * Otherwise this gives `{ case pat => body }`, where `pat` is checked to be + * irrefutable if `gen`'s checkMode is GenCheckMode.Check. + */ + def makeLambda(gen: GenFrom, body: Tree): Tree = gen.pat match { + case IdPattern(named, tpt) if gen.checkMode != GenCheckMode.FilterAlways => + Function(derivedValDef(gen.pat, named, tpt, EmptyTree, Modifiers(Param)) :: Nil, body) + case _ => + val matchCheckMode = + if (gen.checkMode == GenCheckMode.Check || gen.checkMode == GenCheckMode.CheckAndFilter) MatchCheck.IrrefutableGenFrom + else MatchCheck.None + makeCaseLambda(CaseDef(gen.pat, EmptyTree, body) :: Nil, matchCheckMode) + } + + /** If `pat` is not an Identifier, a Typed(Ident, _), or a Bind, wrap + * it in a Bind with a fresh name. Return the transformed pattern, and the identifier + * that refers to the bound variable for the pattern. Wildcard Binds are + * also replaced by Binds with fresh names. + */ + def makeIdPat(pat: Tree): (Tree, Ident) = pat match { + case bind @ Bind(name, pat1) => + if name == nme.WILDCARD then + val name = UniqueName.fresh() + (cpy.Bind(pat)(name, pat1).withMods(bind.mods), Ident(name)) + else (pat, Ident(name)) + case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD => (id, id) + case Typed(id: Ident, _) if isVarPattern(id) && id.name != nme.WILDCARD => (pat, id) + case _ => + val name = UniqueName.fresh() + (Bind(name, pat), Ident(name)) + } + + /** Make a pattern filter: + * rhs.withFilter { case pat => true case _ => false } + * + * On handling irrefutable patterns: + * The idea is to wait until the pattern matcher sees a call + * + * xs withFilter { cases } + * + * where cases can be proven to be refutable i.e. cases would be + * equivalent to { case _ => true } + * + * In that case, compile to + * + * xs withFilter alwaysTrue + * + * where `alwaysTrue` is a predefined function value: + * + * val alwaysTrue: Any => Boolean = true + * + * In the libraries operations can take advantage of alwaysTrue to shortcircuit the + * withFilter call. + * + * def withFilter(f: Elem => Boolean) = + * if (f eq alwaysTrue) this // or rather identity filter monadic applied to this + * else real withFilter + */ + def makePatFilter(rhs: Tree, pat: Tree): Tree = { + val cases = List( + CaseDef(pat, EmptyTree, Literal(Constant(true))), + CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))) + Apply(Select(rhs, nme.withFilter), makeCaseLambda(cases, MatchCheck.None)) + } + + /** Is pattern `pat` irrefutable when matched against `rhs`? + * We only can do a simple syntactic check here; a more refined check + * is done later in the pattern matcher (see discussion in @makePatFilter). + */ + def isIrrefutable(pat: Tree, rhs: Tree): Boolean = { + def matchesTuple(pats: List[Tree], rhs: Tree): Boolean = rhs match { + case Tuple(trees) => (pats corresponds trees)(isIrrefutable) + case Parens(rhs1) => matchesTuple(pats, rhs1) + case Block(_, rhs1) => matchesTuple(pats, rhs1) + case If(_, thenp, elsep) => matchesTuple(pats, thenp) && matchesTuple(pats, elsep) + case Match(_, cases) => cases forall (matchesTuple(pats, _)) + case CaseDef(_, _, rhs1) => matchesTuple(pats, rhs1) + case Throw(_) => true + case _ => false + } + pat match { + case Bind(_, pat1) => isIrrefutable(pat1, rhs) + case Parens(pat1) => isIrrefutable(pat1, rhs) + case Tuple(pats) => matchesTuple(pats, rhs) + case _ => isVarPattern(pat) + } + } + + /** Is `pat` of the form `x`, `x T`, or `given T`? when used as the lhs of a generator, + * these are all considered irrefutable. + */ + def isVarBinding(pat: Tree): Boolean = pat match + case pat @ Bind(_, pat1) if pat.mods.is(Given) => isVarBinding(pat1) + case IdPattern(_) => true + case _ => false + + def needsNoFilter(gen: GenFrom): Boolean = gen.checkMode match + case GenCheckMode.FilterAlways => false // pattern was prefixed by `case` + case GenCheckMode.FilterNow | GenCheckMode.CheckAndFilter => isVarBinding(gen.pat) || isIrrefutable(gen.pat, gen.expr) + case GenCheckMode.Check => true + case GenCheckMode.Ignore => true + + /** rhs.name with a pattern filter on rhs unless `pat` is irrefutable when + * matched against `rhs`. + */ + def rhsSelect(gen: GenFrom, name: TermName) = { + val rhs = if (needsNoFilter(gen)) gen.expr else makePatFilter(gen.expr, gen.pat) + Select(rhs, name) + } + + enums match { + case (gen: GenFrom) :: Nil => + Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) + case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => + val cont = makeFor(mapName, flatMapName, rest, body) + Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) + case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => + val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) + val pats = valeqs map { case GenAlias(pat, _) => pat } + val rhss = valeqs map { case GenAlias(_, rhs) => rhs } + val (defpat0, id0) = makeIdPat(gen.pat) + val (defpats, ids) = (pats map makeIdPat).unzip + val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => + val mods = defpat match + case defTree: DefTree => defTree.mods + case _ => Modifiers() + makePatDef(valeq, mods, defpat, rhs) + } + val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) + val allpats = gen.pat :: pats + val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) + makeFor(mapName, flatMapName, vfrom1 :: rest1, body) + case (gen: GenFrom) :: test :: rest => + val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) + val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Ignore) + makeFor(mapName, flatMapName, genFrom :: rest, body) + case _ => + EmptyTree //may happen for erroneous input + } + } + + def makePolyFunction(targs: List[Tree], body: Tree, pt: Type): Tree = body match { + case Parens(body1) => + makePolyFunction(targs, body1, pt) + case Block(Nil, body1) => + makePolyFunction(targs, body1, pt) + case Function(vargs, res) => + assert(targs.nonEmpty) + // TODO: Figure out if we need a `PolyFunctionWithMods` instead. + val mods = body match { + case body: FunctionWithMods => body.mods + case _ => untpd.EmptyModifiers + } + val polyFunctionTpt = ref(defn.PolyFunctionType) + val applyTParams = targs.asInstanceOf[List[TypeDef]] + if (ctx.mode.is(Mode.Type)) { + // Desugar [T_1, ..., T_M] -> (P_1, ..., P_N) => R + // Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } + + val applyVParams = vargs.zipWithIndex.map { + case (p: ValDef, _) => p.withAddedFlags(mods.flags) + case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags) + } + RefinedTypeTree(polyFunctionTpt, List( + DefDef(nme.apply, applyTParams :: applyVParams :: Nil, res, EmptyTree).withFlags(Synthetic) + )) + } + else { + // Desugar [T_1, ..., T_M] -> (x_1: P_1, ..., x_N: P_N) => body + // with pt [S_1, ..., S_M] -> (O_1, ..., O_N) => R + // Into new scala.PolyFunction { def apply[T_1, ..., T_M](x_1: P_1, ..., x_N: P_N): R2 = body } + // where R2 is R, with all references to S_1..S_M replaced with T1..T_M. + + def typeTree(tp: Type) = tp match + case RefinedType(parent, nme.apply, PolyType(_, mt)) if parent.typeSymbol eq defn.PolyFunctionClass => + var bail = false + def mapper(tp: Type, topLevel: Boolean = false): Tree = tp match + case tp: TypeRef => ref(tp) + case tp: TypeParamRef => Ident(applyTParams(tp.paramNum).name) + case AppliedType(tycon, args) => AppliedTypeTree(mapper(tycon), args.map(mapper(_))) + case _ => if topLevel then TypeTree() else { bail = true; genericEmptyTree } + val mapped = mapper(mt.resultType, topLevel = true) + if bail then TypeTree() else mapped + case _ => TypeTree() + + val applyVParams = vargs.asInstanceOf[List[ValDef]] + .map(varg => varg.withAddedFlags(mods.flags | Param)) + New(Template(emptyConstructor, List(polyFunctionTpt), Nil, EmptyValDef, + List(DefDef(nme.apply, applyTParams :: applyVParams :: Nil, typeTree(pt), res)) + )) + } + case _ => + // may happen for erroneous input. An error will already have been reported. + assert(ctx.reporter.errorsReported) + EmptyTree + } + + // begin desugar + + // Special case for `Parens` desugaring: unlike all the desugarings below, + // its output is not a new tree but an existing one whose position should + // be preserved, so we shouldn't call `withPos` on it. + tree match { + case Parens(t) => + return t + case _ => + } + + val desugared = tree match { + case PolyFunction(targs, body) => + makePolyFunction(targs, body, pt) orElse tree + case SymbolLit(str) => + Apply( + ref(defn.ScalaSymbolClass.companionModule.termRef), + Literal(Constant(str)) :: Nil) + case InterpolatedString(id, segments) => + val strs = segments map { + case ts: Thicket => ts.trees.head + case t => t + } + val elems = segments flatMap { + case ts: Thicket => ts.trees.tail + case t => Nil + } map { (t: Tree) => t match + // !cc! explicitly typed parameter (t: Tree) is needed since otherwise + // we get an error similar to #16268. (The explicit type constrains the type of `segments` + // which is otherwise List[{*} tree]) + case Block(Nil, EmptyTree) => Literal(Constant(())) // for s"... ${} ..." + case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala + case t => t + } + // This is a deliberate departure from scalac, where StringContext is not rooted (See #4732) + Apply(Select(Apply(scalaDot(nme.StringContext), strs), id).withSpan(tree.span), elems) + case PostfixOp(t, op) => + if (ctx.mode is Mode.Type) && !isBackquoted(op) && op.name == tpnme.raw.STAR then + if ctx.isJava then + AppliedTypeTree(ref(defn.RepeatedParamType), t) + else + Annotated( + AppliedTypeTree(ref(defn.SeqType), t), + New(ref(defn.RepeatedAnnot.typeRef), Nil :: Nil)) + else + assert(ctx.mode.isExpr || ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive), ctx.mode) + Select(t, op.name) + case PrefixOp(op, t) => + val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme + Select(t, nspace.UNARY_PREFIX ++ op.name) + case ForDo(enums, body) => + makeFor(nme.foreach, nme.foreach, enums, body) orElse tree + case ForYield(enums, body) => + makeFor(nme.map, nme.flatMap, enums, body) orElse tree + case PatDef(mods, pats, tpt, rhs) => + val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) + flatTree(pats1 map (makePatDef(tree, mods, _, rhs))) + case ext: ExtMethods => + Block(List(ext), Literal(Constant(())).withSpan(ext.span)) + case CapturingTypeTree(refs, parent) => + // convert `{refs} T` to `T @retains refs` + // `{refs}-> T` to `-> (T @retainsByName refs)` + def annotate(annotName: TypeName, tp: Tree) = + Annotated(tp, New(scalaAnnotationDot(annotName), List(refs))) + parent match + case ByNameTypeTree(restpt) => + cpy.ByNameTypeTree(parent)(annotate(tpnme.retainsByName, restpt)) + case _ => + annotate(tpnme.retains, parent) + } + desugared.withSpan(tree.span) + } + + /** Turn a fucntion value `handlerFun` into a catch case for a try. + * If `handlerFun` is a partial function, translate to + * + * case ex => + * val ev$1 = handlerFun + * if ev$1.isDefinedAt(ex) then ev$1.apply(ex) else throw ex + * + * Otherwise translate to + * + * case ex => handlerFun.apply(ex) + */ + def makeTryCase(handlerFun: tpd.Tree)(using Context): CaseDef = + val handler = TypedSplice(handlerFun) + val excId = Ident(nme.DEFAULT_EXCEPTION_NAME) + val rhs = + if handlerFun.tpe.widen.isRef(defn.PartialFunctionClass) then + val tmpName = UniqueName.fresh() + val tmpId = Ident(tmpName) + val init = ValDef(tmpName, TypeTree(), handler) + val test = If( + Apply(Select(tmpId, nme.isDefinedAt), excId), + Apply(Select(tmpId, nme.apply), excId), + Throw(excId)) + Block(init :: Nil, test) + else + Apply(Select(handler, nme.apply), excId) + CaseDef(excId, EmptyTree, rhs) + + /** Create a class definition with the same info as the refined type given by `parent` + * and `refinements`. + * + * parent { refinements } + * ==> + * trait extends core { this: self => refinements } + * + * Here, `core` is the (possibly parameterized) class part of `parent`. + * If `parent` is the same as `core`, self is empty. Otherwise `self` is `parent`. + * + * Example: Given + * + * class C + * type T1 = C { type T <: A } + * + * the refined type + * + * T1 { type T <: B } + * + * is expanded to + * + * trait extends C { this: T1 => type T <: A } + * + * The result of this method is used for validity checking, is thrown away afterwards. + * @param parent The type of `parent` + */ + def refinedTypeToClass(parent: tpd.Tree, refinements: List[Tree])(using Context): TypeDef = { + def stripToCore(tp: Type): List[Type] = tp match { + case tp: AppliedType => tp :: Nil + case tp: TypeRef if tp.symbol.isClass => tp :: Nil // monomorphic class type + case tp: TypeProxy => stripToCore(tp.underlying) + case AndType(tp1, tp2) => stripToCore(tp1) ::: stripToCore(tp2) + case _ => defn.AnyType :: Nil + } + val parentCores = stripToCore(parent.tpe) + val untpdParent = TypedSplice(parent) + val (classParents, self) = + if (parentCores.length == 1 && (parent.tpe eq parentCores.head)) (untpdParent :: Nil, EmptyValDef) + else (parentCores map TypeTree, ValDef(nme.WILDCARD, untpdParent, EmptyTree)) + val impl = Template(emptyConstructor, classParents, Nil, self, refinements) + TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) + } + + /** Returns list of all pattern variables, possibly with their types, + * without duplicates + */ + private def getVariables(tree: Tree, shouldAddGiven: Context ?=> Bind => Boolean)(using Context): List[VarInfo] = { + val buf = ListBuffer[VarInfo]() + def seenName(name: Name) = buf exists (_._1.name == name) + def add(named: NameTree, t: Tree): Unit = + if (!seenName(named.name) && named.name.isTermName) buf += ((named, t)) + def collect(tree: Tree): Unit = tree match { + case tree @ Bind(nme.WILDCARD, tree1) => + if tree.mods.is(Given) then + val Typed(_, tpt) = tree1: @unchecked + if shouldAddGiven(tree) then + add(tree, tpt) + collect(tree1) + case tree @ Bind(_, Typed(tree1, tpt)) => + if !(tree.mods.is(Given) && !shouldAddGiven(tree)) then + add(tree, tpt) + collect(tree1) + case tree @ Bind(_, tree1) => + add(tree, TypeTree()) + collect(tree1) + case Typed(id: Ident, t) if isVarPattern(id) && id.name != nme.WILDCARD && !isWildcardStarArg(tree) => + add(id, t) + case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD => + add(id, TypeTree()) + case Apply(_, args) => + args foreach collect + case Typed(expr, _) => + collect(expr) + case NamedArg(_, arg) => + collect(arg) + case SeqLiteral(elems, _) => + elems foreach collect + case Alternative(trees) => + for (tree <- trees; (vble, _) <- getVariables(tree, shouldAddGiven)) + report.error(IllegalVariableInPatternAlternative(vble.symbol.name), vble.srcPos) + case Annotated(arg, _) => + collect(arg) + case InterpolatedString(_, segments) => + segments foreach collect + case InfixOp(left, _, right) => + collect(left) + collect(right) + case PrefixOp(_, od) => + collect(od) + case Parens(tree) => + collect(tree) + case Tuple(trees) => + trees foreach collect + case Thicket(trees) => + trees foreach collect + case Block(Nil, expr) => + collect(expr) + case Quote(expr) => + new UntypedTreeTraverser { + def traverse(tree: untpd.Tree)(using Context): Unit = tree match { + case Splice(expr) => collect(expr) + case _ => traverseChildren(tree) + } + }.traverse(expr) + case CapturingTypeTree(refs, parent) => + collect(parent) + case _ => + } + collect(tree) + buf.toList + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala b/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala new file mode 100644 index 000000000000..096a885dcf32 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala @@ -0,0 +1,310 @@ +package dotty.tools +package dotc +package ast + +import core._ +import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ +import Symbols._, StdNames._, Trees._ +import Decorators._ +import util.{Property, SourceFile} +import typer.ErrorReporting._ +import transform.SyntheticMembers.ExtendsSingletonMirror + +import scala.annotation.internal.sharable + +/** Helper methods to desugar enums */ +object DesugarEnums { + import untpd._ + + enum CaseKind: + case Simple, Object, Class + + final case class EnumConstraints(minKind: CaseKind, maxKind: CaseKind, enumCases: List[(Int, RefTree)]): + require(minKind.ordinal <= maxKind.ordinal && !(cached && enumCases.isEmpty)) + def requiresCreator = minKind == CaseKind.Simple + def isEnumeration = maxKind.ordinal < CaseKind.Class.ordinal + def cached = minKind.ordinal < CaseKind.Class.ordinal + end EnumConstraints + + /** Attachment containing the number of enum cases, the smallest kind that was seen so far, + * and a list of all the value cases with their ordinals. + */ + val EnumCaseCount: Property.Key[(Int, CaseKind, CaseKind, List[(Int, TermName)])] = Property.Key() + + /** Attachment signalling that when this definition is desugared, it should add any additional + * lookup methods for enums. + */ + val DefinesEnumLookupMethods: Property.Key[Unit] = Property.Key() + + /** The enumeration class that belongs to an enum case. This works no matter + * whether the case is still in the enum class or it has been transferred to the + * companion object. + */ + def enumClass(using Context): Symbol = { + val cls = ctx.owner + if (cls.is(Module)) cls.linkedClass else cls + } + + def enumCompanion(using Context): Symbol = { + val cls = ctx.owner + if (cls.is(Module)) cls.sourceModule else cls.linkedClass.sourceModule + } + + /** Is `tree` an (untyped) enum case? */ + def isEnumCase(tree: Tree)(using Context): Boolean = tree match { + case tree: MemberDef => tree.mods.isEnumCase + case PatDef(mods, _, _, _) => mods.isEnumCase + case _ => false + } + + /** A reference to the enum class `E`, possibly followed by type arguments. + * Each covariant type parameter is approximated by its lower bound. + * Each contravariant type parameter is approximated by its upper bound. + * It is an error if a type parameter is non-variant, or if its approximation + * refers to pther type parameters. + */ + def interpolatedEnumParent(span: Span)(using Context): Tree = { + val tparams = enumClass.typeParams + def isGround(tp: Type) = tp.subst(tparams, tparams.map(_ => NoType)) eq tp + val targs = tparams map { tparam => + if (tparam.is(Covariant) && isGround(tparam.info.bounds.lo)) + tparam.info.bounds.lo + else if (tparam.is(Contravariant) && isGround(tparam.info.bounds.hi)) + tparam.info.bounds.hi + else { + def problem = + if (!tparam.isOneOf(VarianceFlags)) "is invariant" + else "has bounds that depend on a type parameter in the same parameter list" + errorType(i"""cannot determine type argument for enum parent $enumClass, + |type parameter $tparam $problem""", ctx.source.atSpan(span)) + } + } + TypeTree(enumClass.typeRef.appliedTo(targs)).withSpan(span) + } + + /** A type tree referring to `enumClass` */ + def enumClassRef(using Context): Tree = + if (enumClass.exists) TypeTree(enumClass.typeRef) else TypeTree() + + /** Add implied flags to an enum class or an enum case */ + def addEnumFlags(cdef: TypeDef)(using Context): TypeDef = + if (cdef.mods.isEnumClass) cdef.withMods(cdef.mods.withAddedFlags(Abstract | Sealed, cdef.span)) + else if (isEnumCase(cdef)) cdef.withMods(cdef.mods.withAddedFlags(Final, cdef.span)) + else cdef + + private def valuesDot(name: PreName)(implicit src: SourceFile) = + Select(Ident(nme.DOLLAR_VALUES), name.toTermName) + + private def ArrayLiteral(values: List[Tree], tpt: Tree)(using Context): Tree = + val clazzOf = TypeApply(ref(defn.Predef_classOf.termRef), tpt :: Nil) + val ctag = Apply(TypeApply(ref(defn.ClassTagModule_apply.termRef), tpt :: Nil), clazzOf :: Nil) + val apply = Select(ref(defn.ArrayModule.termRef), nme.apply) + Apply(Apply(TypeApply(apply, tpt :: Nil), values), ctag :: Nil) + + /** The following lists of definitions for an enum type E and known value cases e_0, ..., e_n: + * + * private val $values = Array[E](this.e_0,...,this.e_n)(ClassTag[E](classOf[E])) + * def values = $values.clone + * def valueOf($name: String) = $name match { + * case "e_0" => this.e_0 + * ... + * case "e_n" => this.e_n + * case _ => throw new IllegalArgumentException("case not found: " + $name) + * } + */ + private def enumScaffolding(enumValues: List[RefTree])(using Context): List[Tree] = { + val rawEnumClassRef = rawRef(enumClass.typeRef) + extension (tpe: NamedType) def ofRawEnum = AppliedTypeTree(ref(tpe), rawEnumClassRef) + + val privateValuesDef = + ValDef(nme.DOLLAR_VALUES, TypeTree(), ArrayLiteral(enumValues, rawEnumClassRef)) + .withFlags(Private | Synthetic) + + val valuesDef = + DefDef(nme.values, Nil, defn.ArrayType.ofRawEnum, valuesDot(nme.clone_)) + .withFlags(Synthetic) + + val valuesOfBody: Tree = + val defaultCase = + val msg = Apply(Select(Literal(Constant("enum case not found: ")), nme.PLUS), Ident(nme.nameDollar)) + CaseDef(Ident(nme.WILDCARD), EmptyTree, + Throw(New(TypeTree(defn.IllegalArgumentExceptionType), List(msg :: Nil)))) + val stringCases = enumValues.map(enumValue => + CaseDef(Literal(Constant(enumValue.name.toString)), EmptyTree, enumValue) + ) ::: defaultCase :: Nil + Match(Ident(nme.nameDollar), stringCases) + val valueOfDef = DefDef(nme.valueOf, List(param(nme.nameDollar, defn.StringType) :: Nil), + TypeTree(), valuesOfBody) + .withFlags(Synthetic) + + privateValuesDef :: + valuesDef :: + valueOfDef :: Nil + } + + private def enumLookupMethods(constraints: EnumConstraints)(using Context): List[Tree] = + def scaffolding: List[Tree] = + if constraints.isEnumeration then enumScaffolding(constraints.enumCases.map(_._2)) else Nil + def valueCtor: List[Tree] = if constraints.requiresCreator then enumValueCreator :: Nil else Nil + def fromOrdinal: Tree = + def throwArg(ordinal: Tree) = + Throw(New(TypeTree(defn.NoSuchElementExceptionType), List(Select(ordinal, nme.toString_) :: Nil))) + if !constraints.cached then + fromOrdinalMeth(throwArg) + else + def default(ordinal: Tree) = + CaseDef(Ident(nme.WILDCARD), EmptyTree, throwArg(ordinal)) + if constraints.isEnumeration then + fromOrdinalMeth(ordinal => + Try(Apply(valuesDot(nme.apply), ordinal), default(ordinal) :: Nil, EmptyTree)) + else + fromOrdinalMeth(ordinal => + Match(ordinal, + constraints.enumCases.map((i, enumValue) => CaseDef(Literal(Constant(i)), EmptyTree, enumValue)) + :+ default(ordinal))) + + if !enumClass.exists then + // in the case of a double definition of an enum that only defines class cases (see tests/neg/i4470c.scala) + // it seems `enumClass` might be `NoSymbol`; in this case we provide no scaffolding. + Nil + else + scaffolding ::: valueCtor ::: fromOrdinal :: Nil + end enumLookupMethods + + /** A creation method for a value of enum type `E`, which is defined as follows: + * + * private def $new(_$ordinal: Int, $name: String) = new E with scala.runtime.EnumValue { + * def ordinal = _$ordinal // if `E` does not derive from `java.lang.Enum` + * } + */ + private def enumValueCreator(using Context) = { + val creator = New(Template( + constr = emptyConstructor, + parents = enumClassRef :: scalaRuntimeDot(tpnme.EnumValue) :: Nil, + derived = Nil, + self = EmptyValDef, + body = Nil + ).withAttachment(ExtendsSingletonMirror, ())) + DefDef(nme.DOLLAR_NEW, + List(List(param(nme.ordinalDollar_, defn.IntType), param(nme.nameDollar, defn.StringType))), + TypeTree(), creator).withFlags(Private | Synthetic) + } + + /** Is a type parameter in `enumTypeParams` referenced from an enum class case that has + * given type parameters `caseTypeParams`, value parameters `vparamss` and parents `parents`? + * Issues an error if that is the case but the reference is illegal. + * The reference could be illegal for two reasons: + * - explicit type parameters are given + * - it's a value case, i.e. no value parameters are given + */ + def typeParamIsReferenced( + enumTypeParams: List[TypeSymbol], + caseTypeParams: List[TypeDef], + vparamss: List[List[ValDef]], + parents: List[Tree])(using Context): Boolean = { + + object searchRef extends UntypedTreeAccumulator[Boolean] { + var tparamNames = enumTypeParams.map(_.name).toSet[Name] + def underBinders(binders: List[MemberDef], op: => Boolean): Boolean = { + val saved = tparamNames + tparamNames = tparamNames -- binders.map(_.name) + try op + finally tparamNames = saved + } + def apply(x: Boolean, tree: Tree)(using Context): Boolean = x || { + tree match { + case Ident(name) => + val matches = tparamNames.contains(name) + if (matches && (caseTypeParams.nonEmpty || vparamss.isEmpty)) + report.error(i"illegal reference to type parameter $name from enum case", tree.srcPos) + matches + case LambdaTypeTree(lambdaParams, body) => + underBinders(lambdaParams, foldOver(x, tree)) + case RefinedTypeTree(parent, refinements) => + val refinementDefs = refinements collect { case r: MemberDef => r } + underBinders(refinementDefs, foldOver(x, tree)) + case _ => foldOver(x, tree) + } + } + def apply(tree: Tree)(using Context): Boolean = + underBinders(caseTypeParams, apply(false, tree)) + } + + def typeHasRef(tpt: Tree) = searchRef(tpt) + def valDefHasRef(vd: ValDef) = typeHasRef(vd.tpt) + def parentHasRef(parent: Tree): Boolean = parent match { + case Apply(fn, _) => parentHasRef(fn) + case TypeApply(_, targs) => targs.exists(typeHasRef) + case Select(nu, nme.CONSTRUCTOR) => parentHasRef(nu) + case New(tpt) => typeHasRef(tpt) + case parent => parent.isType && typeHasRef(parent) + } + + vparamss.nestedExists(valDefHasRef) || parents.exists(parentHasRef) + } + + /** A pair consisting of + * - the next enum tag + * - scaffolding containing the necessary definitions for singleton enum cases + * unless that scaffolding was already generated by a previous call to `nextEnumKind`. + */ + def nextOrdinal(name: Name, kind: CaseKind, definesLookups: Boolean)(using Context): (Int, List[Tree]) = { + val (ordinal, seenMinKind, seenMaxKind, seenCases) = + ctx.tree.removeAttachment(EnumCaseCount).getOrElse((0, CaseKind.Class, CaseKind.Simple, Nil)) + val minKind = if kind.ordinal < seenMinKind.ordinal then kind else seenMinKind + val maxKind = if kind.ordinal > seenMaxKind.ordinal then kind else seenMaxKind + val cases = name match + case name: TermName => (ordinal, name) :: seenCases + case _ => seenCases + if definesLookups then + val thisRef = This(EmptyTypeIdent) + val cachedValues = cases.reverse.map((i, name) => (i, Select(thisRef, name))) + (ordinal, enumLookupMethods(EnumConstraints(minKind, maxKind, cachedValues))) + else + ctx.tree.pushAttachment(EnumCaseCount, (ordinal + 1, minKind, maxKind, cases)) + (ordinal, Nil) + } + + def param(name: TermName, typ: Type)(using Context): ValDef = param(name, TypeTree(typ)) + def param(name: TermName, tpt: Tree)(using Context): ValDef = ValDef(name, tpt, EmptyTree).withFlags(Param) + + def ordinalMeth(body: Tree)(using Context): DefDef = + DefDef(nme.ordinal, Nil, TypeTree(defn.IntType), body).withAddedFlags(Synthetic) + + def ordinalMethLit(ord: Int)(using Context): DefDef = + ordinalMeth(Literal(Constant(ord))) + + def fromOrdinalMeth(body: Tree => Tree)(using Context): DefDef = + DefDef(nme.fromOrdinal, (param(nme.ordinal, defn.IntType) :: Nil) :: Nil, + rawRef(enumClass.typeRef), body(Ident(nme.ordinal))).withFlags(Synthetic) + + /** Expand a module definition representing a parameterless enum case */ + def expandEnumModule(name: TermName, impl: Template, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = { + assert(impl.body.isEmpty) + if (!enumClass.exists) EmptyTree + else if (impl.parents.isEmpty) + expandSimpleEnumCase(name, mods, definesLookups, span) + else { + val (tag, scaffolding) = nextOrdinal(name, CaseKind.Object, definesLookups) + val impl1 = cpy.Template(impl)(parents = impl.parents :+ scalaRuntimeDot(tpnme.EnumValue), body = Nil) + .withAttachment(ExtendsSingletonMirror, ()) + val vdef = ValDef(name, TypeTree(), New(impl1)).withMods(mods.withAddedFlags(EnumValue, span)) + flatTree(vdef :: scaffolding).withSpan(span) + } + } + + /** Expand a simple enum case */ + def expandSimpleEnumCase(name: TermName, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = + if (!enumClass.exists) EmptyTree + else if (enumClass.typeParams.nonEmpty) { + val parent = interpolatedEnumParent(span) + val impl = Template(emptyConstructor, parent :: Nil, Nil, EmptyValDef, Nil) + expandEnumModule(name, impl, mods, definesLookups, span) + } + else { + val (tag, scaffolding) = nextOrdinal(name, CaseKind.Simple, definesLookups) + val creator = Apply(Ident(nme.DOLLAR_NEW), List(Literal(Constant(tag)), Literal(Constant(name.toString)))) + val vdef = ValDef(name, enumClassRef, creator).withMods(mods.withAddedFlags(EnumValue, span)) + flatTree(vdef :: scaffolding).withSpan(span) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala b/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala new file mode 100644 index 000000000000..040582476e96 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala @@ -0,0 +1,449 @@ +package dotty.tools.dotc +package ast + +import core._ +import Symbols._, Types._, Contexts._, Decorators._, util.Spans._, Flags._, Constants._ +import StdNames.{nme, tpnme} +import ast.Trees._ +import Names.Name +import Comments.Comment +import NameKinds.DefaultGetterName +import Annotations.Annotation + +object MainProxies { + + /** Generate proxy classes for @main functions and @myMain functions where myMain <:< MainAnnotation */ + def proxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + mainAnnotationProxies(stats) ++ mainProxies(stats) + } + + /** Generate proxy classes for @main functions. + * A function like + * + * @main def f(x: S, ys: T*) = ... + * + * would be translated to something like + * + * import CommandLineParser._ + * class f { + * @static def main(args: Array[String]): Unit = + * try + * f( + * parseArgument[S](args, 0), + * parseRemainingArguments[T](args, 1): _* + * ) + * catch case err: ParseError => showError(err) + * } + */ + private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + import tpd._ + def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap { + case stat: DefDef if stat.symbol.hasAnnotation(defn.MainAnnot) => + stat.symbol :: Nil + case stat @ TypeDef(name, impl: Template) if stat.symbol.is(Module) => + mainMethods(impl.body) + case _ => + Nil + } + mainMethods(stats).flatMap(mainProxy) + } + + import untpd._ + private def mainProxy(mainFun: Symbol)(using Context): List[TypeDef] = { + val mainAnnotSpan = mainFun.getAnnotation(defn.MainAnnot).get.tree.span + def pos = mainFun.sourcePos + val argsRef = Ident(nme.args) + + def addArgs(call: untpd.Tree, mt: MethodType, idx: Int): untpd.Tree = + if (mt.isImplicitMethod) { + report.error(s"@main method cannot have implicit parameters", pos) + call + } + else { + val args = mt.paramInfos.zipWithIndex map { + (formal, n) => + val (parserSym, formalElem) = + if (formal.isRepeatedParam) (defn.CLP_parseRemainingArguments, formal.argTypes.head) + else (defn.CLP_parseArgument, formal) + val arg = Apply( + TypeApply(ref(parserSym.termRef), TypeTree(formalElem) :: Nil), + argsRef :: Literal(Constant(idx + n)) :: Nil) + if (formal.isRepeatedParam) repeated(arg) else arg + } + val call1 = Apply(call, args) + mt.resType match { + case restpe: MethodType => + if (mt.paramInfos.lastOption.getOrElse(NoType).isRepeatedParam) + report.error(s"varargs parameter of @main method must come last", pos) + addArgs(call1, restpe, idx + args.length) + case _ => + call1 + } + } + + var result: List[TypeDef] = Nil + if (!mainFun.owner.isStaticOwner) + report.error(s"@main method is not statically accessible", pos) + else { + var call = ref(mainFun.termRef) + mainFun.info match { + case _: ExprType => + case mt: MethodType => + call = addArgs(call, mt, 0) + case _: PolyType => + report.error(s"@main method cannot have type parameters", pos) + case _ => + report.error(s"@main can only annotate a method", pos) + } + val errVar = Ident(nme.error) + val handler = CaseDef( + Typed(errVar, TypeTree(defn.CLP_ParseError.typeRef)), + EmptyTree, + Apply(ref(defn.CLP_showError.termRef), errVar :: Nil)) + val body = Try(call, handler :: Nil, EmptyTree) + val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) + .withFlags(Param) + /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. + * The annotations will be retype-checked in another scope that may not have the same imports. + */ + def insertTypeSplices = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match + case tree: tpd.Ident @unchecked => TypedSplice(tree) + case tree => super.transform(tree) + } + val annots = mainFun.annotations + .filterNot(_.matches(defn.MainAnnot)) + .map(annot => insertTypeSplices.transform(annot.tree)) + val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) + .withFlags(JavaStatic | Synthetic) + .withAnnotations(annots) + val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) + val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) + .withFlags(Final | Invisible) + + if (!ctx.reporter.hasErrors) + result = mainCls.withSpan(mainAnnotSpan.toSynthetic) :: Nil + } + result + } + + private type DefaultValueSymbols = Map[Int, Symbol] + private type ParameterAnnotationss = Seq[Seq[Annotation]] + + /** + * Generate proxy classes for main functions. + * A function like + * + * /** + * * Lorem ipsum dolor sit amet + * * consectetur adipiscing elit. + * * + * * @param x my param x + * * @param ys all my params y + * */ + * @myMain(80) def f( + * @myMain.Alias("myX") x: S, + * y: S, + * ys: T* + * ) = ... + * + * would be translated to something like + * + * final class f { + * static def main(args: Array[String]): Unit = { + * val annotation = new myMain(80) + * val info = new Info( + * name = "f", + * documentation = "Lorem ipsum dolor sit amet consectetur adipiscing elit.", + * parameters = Seq( + * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))), + * new scala.annotation.MainAnnotation.Parameter("y", "S", true, false, "", Seq()), + * new scala.annotation.MainAnnotation.Parameter("ys", "T", false, true, "all my params y", Seq()) + * ) + * ), + * val command = annotation.command(info, args) + * if command.isDefined then + * val cmd = command.get + * val args0: () => S = annotation.argGetter[S](info.parameters(0), cmd(0), None) + * val args1: () => S = annotation.argGetter[S](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) + * val args2: () => Seq[T] = annotation.varargGetter[T](info.parameters(2), cmd.drop(2)) + * annotation.run(() => f(args0(), args1(), args2()*)) + * } + * } + */ + private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + import tpd._ + + /** + * Computes the symbols of the default values of the function. Since they cannot be inferred anymore at this + * point of the compilation, they must be explicitly passed by [[mainProxy]]. + */ + def defaultValueSymbols(scope: Tree, funSymbol: Symbol): DefaultValueSymbols = + scope match { + case TypeDef(_, template: Template) => + template.body.flatMap((_: Tree) match { + case dd: DefDef if dd.name.is(DefaultGetterName) && dd.name.firstPart == funSymbol.name => + val DefaultGetterName.NumberedInfo(index) = dd.name.info: @unchecked + List(index -> dd.symbol) + case _ => Nil + }).toMap + case _ => Map.empty + } + + /** Computes the list of main methods present in the code. */ + def mainMethods(scope: Tree, stats: List[Tree]): List[(Symbol, ParameterAnnotationss, DefaultValueSymbols, Option[Comment])] = stats.flatMap { + case stat: DefDef => + val sym = stat.symbol + sym.annotations.filter(_.matches(defn.MainAnnotationClass)) match { + case Nil => + Nil + case _ :: Nil => + val paramAnnotations = stat.paramss.flatMap(_.map( + valdef => valdef.symbol.annotations.filter(_.matches(defn.MainAnnotationParameterAnnotation)) + )) + (sym, paramAnnotations.toVector, defaultValueSymbols(scope, sym), stat.rawComment) :: Nil + case mainAnnot :: others => + report.error(s"method cannot have multiple main annotations", mainAnnot.tree) + Nil + } + case stat @ TypeDef(_, impl: Template) if stat.symbol.is(Module) => + mainMethods(stat, impl.body) + case _ => + Nil + } + + // Assuming that the top-level object was already generated, all main methods will have a scope + mainMethods(EmptyTree, stats).flatMap(mainAnnotationProxy) + } + + private def mainAnnotationProxy(mainFun: Symbol, paramAnnotations: ParameterAnnotationss, defaultValueSymbols: DefaultValueSymbols, docComment: Option[Comment])(using Context): Option[TypeDef] = { + val mainAnnot = mainFun.getAnnotation(defn.MainAnnotationClass).get + def pos = mainFun.sourcePos + + val documentation = new Documentation(docComment) + + /** () => value */ + def unitToValue(value: Tree): Tree = + val defDef = DefDef(nme.ANON_FUN, List(Nil), TypeTree(), value) + Block(defDef, Closure(Nil, Ident(nme.ANON_FUN), EmptyTree)) + + /** Generate a list of trees containing the ParamInfo instantiations. + * + * A ParamInfo has the following shape + * ``` + * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))) + * ``` + */ + def parameterInfos(mt: MethodType): List[Tree] = + extension (tree: Tree) def withProperty(sym: Symbol, args: List[Tree]) = + Apply(Select(tree, sym.name), args) + + for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield + val param = paramName.toString + val paramType0 = if formal.isRepeatedParam then formal.argTypes.head.dealias else formal.dealias + val paramType = paramType0.dealias + val paramTypeOwner = paramType.typeSymbol.owner + val paramTypeStr = + if paramTypeOwner == defn.EmptyPackageClass then paramType.show + else paramTypeOwner.showFullName + "." + paramType.show + val hasDefault = defaultValueSymbols.contains(idx) + val isRepeated = formal.isRepeatedParam + val paramDoc = documentation.argDocs.getOrElse(param, "") + val paramAnnots = + val annotationTrees = paramAnnotations(idx).map(instantiateAnnotation).toList + Apply(ref(defn.SeqModule.termRef), annotationTrees) + + val constructorArgs = List(param, paramTypeStr, hasDefault, isRepeated, paramDoc) + .map(value => Literal(Constant(value))) + + New(TypeTree(defn.MainAnnotationParameter.typeRef), List(constructorArgs :+ paramAnnots)) + + end parameterInfos + + /** + * Creates a list of references and definitions of arguments. + * The goal is to create the + * `val args0: () => S = annotation.argGetter[S](0, cmd(0), None)` + * part of the code. + */ + def argValDefs(mt: MethodType): List[ValDef] = + for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield + val argName = nme.args ++ idx.toString + val isRepeated = formal.isRepeatedParam + val formalType = if isRepeated then formal.argTypes.head else formal + val getterName = if isRepeated then nme.varargGetter else nme.argGetter + val defaultValueGetterOpt = defaultValueSymbols.get(idx) match + case None => ref(defn.NoneModule.termRef) + case Some(dvSym) => + val value = unitToValue(ref(dvSym.termRef)) + Apply(ref(defn.SomeClass.companionModule.termRef), value) + val argGetter0 = TypeApply(Select(Ident(nme.annotation), getterName), TypeTree(formalType) :: Nil) + val index = Literal(Constant(idx)) + val paramInfo = Apply(Select(Ident(nme.info), nme.parameters), index) + val argGetter = + if isRepeated then Apply(argGetter0, List(paramInfo, Apply(Select(Ident(nme.cmd), nme.drop), List(index)))) + else Apply(argGetter0, List(paramInfo, Apply(Ident(nme.cmd), List(index)), defaultValueGetterOpt)) + ValDef(argName, TypeTree(), argGetter) + end argValDefs + + + /** Create a list of argument references that will be passed as argument to the main method. + * `args0`, ...`argn*` + */ + def argRefs(mt: MethodType): List[Tree] = + for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield + val argRef = Apply(Ident(nme.args ++ idx.toString), Nil) + if formal.isRepeatedParam then repeated(argRef) else argRef + end argRefs + + + /** Turns an annotation (e.g. `@main(40)`) into an instance of the class (e.g. `new scala.main(40)`). */ + def instantiateAnnotation(annot: Annotation): Tree = + val argss = { + def recurse(t: tpd.Tree, acc: List[List[Tree]]): List[List[Tree]] = t match { + case Apply(t, args: List[tpd.Tree]) => recurse(t, extractArgs(args) :: acc) + case _ => acc + } + + def extractArgs(args: List[tpd.Tree]): List[Tree] = + args.flatMap { + case Typed(SeqLiteral(varargs, _), _) => varargs.map(arg => TypedSplice(arg)) + case arg: Select if arg.name.is(DefaultGetterName) => Nil // Ignore default values, they will be added later by the compiler + case arg => List(TypedSplice(arg)) + } + + recurse(annot.tree, Nil) + } + + New(TypeTree(annot.symbol.typeRef), argss) + end instantiateAnnotation + + def generateMainClass(mainCall: Tree, args: List[Tree], parameterInfos: List[Tree]): TypeDef = + val cmdInfo = + val nameTree = Literal(Constant(mainFun.showName)) + val docTree = Literal(Constant(documentation.mainDoc)) + val paramInfos = Apply(ref(defn.SeqModule.termRef), parameterInfos) + New(TypeTree(defn.MainAnnotationInfo.typeRef), List(List(nameTree, docTree, paramInfos))) + + val annotVal = ValDef( + nme.annotation, + TypeTree(), + instantiateAnnotation(mainAnnot) + ) + val infoVal = ValDef( + nme.info, + TypeTree(), + cmdInfo + ) + val command = ValDef( + nme.command, + TypeTree(), + Apply( + Select(Ident(nme.annotation), nme.command), + List(Ident(nme.info), Ident(nme.args)) + ) + ) + val argsVal = ValDef( + nme.cmd, + TypeTree(), + Select(Ident(nme.command), nme.get) + ) + val run = Apply(Select(Ident(nme.annotation), nme.run), mainCall) + val body0 = If( + Select(Ident(nme.command), nme.isDefined), + Block(argsVal :: args, run), + EmptyTree + ) + val body = Block(List(annotVal, infoVal, command), body0) // TODO add `if (cmd.nonEmpty)` + + val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) + .withFlags(Param) + /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. + * The annotations will be retype-checked in another scope that may not have the same imports. + */ + def insertTypeSplices = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match + case tree: tpd.Ident @unchecked => TypedSplice(tree) + case tree => super.transform(tree) + } + val annots = mainFun.annotations + .filterNot(_.matches(defn.MainAnnotationClass)) + .map(annot => insertTypeSplices.transform(annot.tree)) + val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) + .withFlags(JavaStatic) + .withAnnotations(annots) + val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) + val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) + .withFlags(Final | Invisible) + mainCls.withSpan(mainAnnot.tree.span.toSynthetic) + end generateMainClass + + if (!mainFun.owner.isStaticOwner) + report.error(s"main method is not statically accessible", pos) + None + else mainFun.info match { + case _: ExprType => + Some(generateMainClass(unitToValue(ref(mainFun.termRef)), Nil, Nil)) + case mt: MethodType => + if (mt.isImplicitMethod) + report.error(s"main method cannot have implicit parameters", pos) + None + else mt.resType match + case restpe: MethodType => + report.error(s"main method cannot be curried", pos) + None + case _ => + Some(generateMainClass(unitToValue(Apply(ref(mainFun.termRef), argRefs(mt))), argValDefs(mt), parameterInfos(mt))) + case _: PolyType => + report.error(s"main method cannot have type parameters", pos) + None + case _ => + report.error(s"main can only annotate a method", pos) + None + } + } + + /** A class responsible for extracting the docstrings of a method. */ + private class Documentation(docComment: Option[Comment]): + import util.CommentParsing._ + + /** The main part of the documentation. */ + lazy val mainDoc: String = _mainDoc + /** The parameters identified by @param. Maps from parameter name to its documentation. */ + lazy val argDocs: Map[String, String] = _argDocs + + private var _mainDoc: String = "" + private var _argDocs: Map[String, String] = Map() + + docComment match { + case Some(comment) => if comment.isDocComment then parseDocComment(comment.raw) else _mainDoc = comment.raw + case None => + } + + private def cleanComment(raw: String): String = + var lines: Seq[String] = raw.trim.nn.split('\n').nn.toSeq + lines = lines.map(l => l.substring(skipLineLead(l, -1), l.length).nn.trim.nn) + var s = lines.foldLeft("") { + case ("", s2) => s2 + case (s1, "") if s1.last == '\n' => s1 // Multiple newlines are kept as single newlines + case (s1, "") => s1 + '\n' + case (s1, s2) if s1.last == '\n' => s1 + s2 + case (s1, s2) => s1 + ' ' + s2 + } + s.replaceAll(raw"\[\[", "").nn.replaceAll(raw"\]\]", "").nn.trim.nn + + private def parseDocComment(raw: String): Unit = + // Positions of the sections (@) in the docstring + val tidx: List[(Int, Int)] = tagIndex(raw) + + // Parse main comment + var mainComment: String = raw.substring(skipLineLead(raw, 0), startTag(raw, tidx)).nn + _mainDoc = cleanComment(mainComment) + + // Parse arguments comments + val argsCommentsSpans: Map[String, (Int, Int)] = paramDocs(raw, "@param", tidx) + val argsCommentsTextSpans = argsCommentsSpans.view.mapValues(extractSectionText(raw, _)) + val argsCommentsTexts = argsCommentsTextSpans.mapValues({ case (beg, end) => raw.substring(beg, end).nn }) + _argDocs = argsCommentsTexts.mapValues(cleanComment(_)).toMap + end Documentation +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala b/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala new file mode 100644 index 000000000000..054ffe66f323 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala @@ -0,0 +1,129 @@ +package dotty.tools.dotc +package ast + +import core.Contexts._ +import core.Decorators._ +import util.Spans._ +import Trees.{MemberDef, DefTree, WithLazyField} +import dotty.tools.dotc.core.Types.AnnotatedType +import dotty.tools.dotc.core.Types.ImportType +import dotty.tools.dotc.core.Types.Type + +/** Utility functions to go from typed to untyped ASTs */ +// TODO: Handle trees with mixed source files +object NavigateAST { + + /** The untyped tree corresponding to typed tree `tree` in the compilation + * unit specified by `ctx` + */ + def toUntyped(tree: tpd.Tree)(using Context): untpd.Tree = + untypedPath(tree, exactMatch = true) match { + case (utree: untpd.Tree) :: _ => + utree + case _ => + val loosePath = untypedPath(tree, exactMatch = false) + throw new + Error(i"""no untyped tree for $tree, pos = ${tree.sourcePos} + |best matching path =\n$loosePath%\n====\n% + |path positions = ${loosePath.map(_.sourcePos)}""") + } + + /** The reverse path of untyped trees starting with a tree that closest matches + * `tree` and ending in the untyped tree at the root of the compilation unit + * specified by `ctx`. + * @param exactMatch If `true`, the path must start with a node that exactly + * matches `tree`, or `Nil` is returned. + * If `false` the path might start with a node enclosing + * the logical position of `tree`. + * Note: A complication concerns member definitions. ValDefs and DefDefs + * have after desugaring a position that spans just the name of the symbol being + * defined and nothing else. So we look instead for an untyped tree approximating the + * envelope of the definition, and declare success if we find another DefTree. + */ + def untypedPath(tree: tpd.Tree, exactMatch: Boolean = false)(using Context): List[Positioned] = + tree match { + case tree: MemberDef[?] => + untypedPath(tree.span) match { + case path @ (last: DefTree[?]) :: _ => path + case path if !exactMatch => path + case _ => Nil + } + case _ => + untypedPath(tree.span) match { + case (path @ last :: _) if last.span == tree.span || !exactMatch => path + case _ => Nil + } + } + + /** The reverse part of the untyped root of the compilation unit of `ctx` to + * the given `span`. + */ + def untypedPath(span: Span)(using Context): List[Positioned] = + pathTo(span, List(ctx.compilationUnit.untpdTree)) + + + /** The reverse path from any node in `from` to the node that closest encloses `span`, + * or `Nil` if no such path exists. If a non-empty path is returned it starts with + * the node closest enclosing `span` and ends with one of the nodes in `from`. + * + * @param skipZeroExtent If true, skip over zero-extent nodes in the search. These nodes + * do not correspond to code the user wrote since their start and + * end point are the same, so this is useful when trying to reconcile + * nodes with source code. + */ + def pathTo(span: Span, from: List[Positioned], skipZeroExtent: Boolean = false)(using Context): List[Positioned] = { + def childPath(it: Iterator[Any], path: List[Positioned]): List[Positioned] = { + var bestFit: List[Positioned] = path + while (it.hasNext) { + val path1 = it.next() match { + case p: Positioned => singlePath(p, path) + case m: untpd.Modifiers => childPath(m.productIterator, path) + case xs: List[?] => childPath(xs.iterator, path) + case _ => path + } + if ((path1 ne path) && + ((bestFit eq path) || + bestFit.head.span != path1.head.span && + bestFit.head.span.contains(path1.head.span))) + bestFit = path1 + } + bestFit + } + /* + * Annotations trees are located in the Type + */ + def unpackAnnotations(t: Type, path: List[Positioned]): List[Positioned] = + t match { + case ann: AnnotatedType => + unpackAnnotations(ann.parent, childPath(ann.annot.tree.productIterator, path)) + case imp: ImportType => + childPath(imp.expr.productIterator, path) + case other => + path + } + def singlePath(p: Positioned, path: List[Positioned]): List[Positioned] = + if (p.span.exists && !(skipZeroExtent && p.span.isZeroExtent) && p.span.contains(span)) { + // FIXME: We shouldn't be manually forcing trees here, we should replace + // our usage of `productIterator` by something in `Positioned` that takes + // care of low-level details like this for us. + p match { + case p: WithLazyField[?] => + p.forceIfLazy + case _ => + } + val iterator = p match + case defdef: DefTree[?] => + p.productIterator ++ defdef.mods.productIterator + case _ => + p.productIterator + childPath(iterator, p :: path) + } + else { + p match { + case t: untpd.TypeTree => unpackAnnotations(t.typeOpt, path) + case _ => path + } + } + childPath(from.iterator, Nil) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala new file mode 100644 index 000000000000..fd30d441a6ee --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala @@ -0,0 +1,246 @@ +package dotty.tools +package dotc +package ast + +import util.Spans._ +import util.{SourceFile, SourcePosition, SrcPos} +import core.Contexts._ +import core.Decorators._ +import core.NameOps._ +import core.Flags.{JavaDefined, ExtensionMethod} +import core.StdNames.nme +import ast.Trees.mods +import annotation.constructorOnly +import annotation.internal.sharable + +/** A base class for things that have positions (currently: modifiers and trees) + */ +abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable, caps.Pure { + import Positioned.{ids, nextId, debugId} + + private var mySpan: Span = _ + + private var mySource: SourceFile = src + + /** A unique identifier in case -Yshow-tree-ids, or -Ydebug-tree-with-id + * is set, -1 otherwise. + */ + def uniqueId: Int = + if ids != null && ids.nn.containsKey(this) then ids.nn.get(this).nn else -1 + + private def allocateId() = + if ids != null then + val ownId = nextId + nextId += 1 + ids.nn.put(this: @unchecked, ownId) + if ownId == debugId then + println(s"Debug tree (id=$debugId) creation \n${this: @unchecked}\n") + Thread.dumpStack() + + allocateId() + + /** The span part of the item's position */ + def span: Span = mySpan + + def span_=(span: Span): Unit = + mySpan = span + + span = envelope(src) + + def source: SourceFile = mySource + + def sourcePos(using Context): SourcePosition = source.atSpan(span) + + /** This positioned item, widened to `SrcPos`. Used to make clear we only need the + * position, typically for error reporting. + */ + final def srcPos: SrcPos = this + + /** A positioned item like this one with given `span`. + * If the positioned item is source-derived, a clone is returned. + * If the positioned item is synthetic, the position is updated + * destructively and the item itself is returned. + */ + def withSpan(span: Span): this.type = + if (span == mySpan) this + else { + val newpd: this.type = + if !mySpan.exists then + if span.exists then envelope(source, span.startPos) // fill in children spans + this + else + cloneIn(source) + newpd.span = span + newpd + } + + /** The union of startSpan and the spans of all positioned children that + * have the same source as this node, except that Inlined nodes only + * consider their `call` child. + * + * Side effect: Any descendants without spans have but with the same source as this + * node have their span set to the end position of the envelope of all children to + * the left, or, if that one does not exist, to the start position of the envelope + * of all children to the right. + */ + def envelope(src: SourceFile, startSpan: Span = NoSpan): Span = (this: @unchecked) match { + case Trees.Inlined(call, _, _) => + call.span + case _ => + def include(span: Span, x: Any): Span = x match { + case p: Positioned => + if (p.source != src) span + else if (p.span.exists) span.union(p.span) + else if (span.exists) { + if (span.end != MaxOffset) + p.span = p.envelope(src, span.endPos) + span + } + else // No span available to assign yet, signal this by returning a span with MaxOffset end + Span(MaxOffset, MaxOffset) + case m: untpd.Modifiers => + include(include(span, m.mods), m.annotations) + case y :: ys => + include(include(span, y), ys) + case _ => span + } + val limit = productArity + def includeChildren(span: Span, n: Int): Span = + if (n < limit) includeChildren(include(span, productElement(n): @unchecked), n + 1) + else span + val span1 = includeChildren(startSpan, 0) + val span2 = + if (!span1.exists || span1.end != MaxOffset) + span1 + else if (span1.start == MaxOffset) + // No positioned child was found + NoSpan + else + ///println(s"revisit $uniqueId with $span1") + // We have some children left whose span could not be assigned. + // Go through it again with the known start position. + includeChildren(span1.startPos, 0) + span2.toSynthetic + } + + /** Clone this node but assign it a fresh id which marks it as a node in `file`. */ + def cloneIn(src: SourceFile): this.type = { + val newpd: this.type = clone.asInstanceOf[this.type] + newpd.allocateId() + newpd.mySource = src + newpd + } + + def contains(that: Positioned): Boolean = { + def isParent(x: Any): Boolean = x match { + case x: Positioned => + x.contains(that) + case m: untpd.Modifiers => + m.mods.exists(isParent) || m.annotations.exists(isParent) + case xs: List[?] => + xs.exists(isParent) + case _ => + false + } + (this eq that) || + (this.span contains that.span) && { + var n = productArity + var found = false + while (!found && n > 0) { + n -= 1 + found = isParent(productElement(n)) + } + found + } + } + + /** Check that all positioned items in this tree satisfy the following conditions: + * - Parent spans contain child spans + * - If item is a non-empty tree, it has a position + */ + def checkPos(nonOverlapping: Boolean)(using Context): Unit = try { + import untpd._ + var lastPositioned: Positioned | Null = null + var lastSpan = NoSpan + def check(p: Any): Unit = p match { + case p: Positioned => + assert(span contains p.span, + i"""position error, parent span does not contain child span + |parent = $this # $uniqueId, + |parent span = $span, + |child = $p # ${p.uniqueId}, + |child span = ${p.span}""".stripMargin) + p match { + case tree: Tree if !tree.isEmpty => + assert(tree.span.exists, + s"position error: position not set for $tree # ${tree.uniqueId}") + case _ => + } + if nonOverlapping then + this match { + case _: XMLBlock => + // FIXME: Trees generated by the XML parser do not satisfy `checkPos` + case _: WildcardFunction + if lastPositioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] => + // ignore transition from last wildcard parameter to body + case _ => + assert(!lastSpan.exists || !p.span.exists || lastSpan.end <= p.span.start, + i"""position error, child positions overlap or in wrong order + |parent = $this + |1st child = $lastPositioned + |1st child span = $lastSpan + |2nd child = $p + |2nd child span = ${p.span}""".stripMargin) + } + lastPositioned = p + lastSpan = p.span + p.checkPos(nonOverlapping) + case m: untpd.Modifiers => + m.annotations.foreach(check) + m.mods.foreach(check) + case xs: List[?] => + xs.foreach(check) + case _ => + } + this match { + case tree: DefDef if tree.name == nme.CONSTRUCTOR && tree.mods.is(JavaDefined) => + // Special treatment for constructors coming from Java: + // Leave out leading type params, they are copied with wrong positions from parent class + check(tree.mods) + check(tree.trailingParamss) + case tree: DefDef if tree.mods.is(ExtensionMethod) => + tree.paramss match + case vparams1 :: vparams2 :: rest if tree.name.isRightAssocOperatorName => + // omit check for right-associatiove extension methods; their parameters were swapped + case _ => + check(tree.paramss) + check(tree.tpt) + check(tree.rhs) + case _ => + val end = productArity + var n = 0 + while (n < end) { + check(productElement(n)) + n += 1 + } + } + } + catch { + case ex: AssertionError => + println(i"error while checking $this") + throw ex + } +} + +object Positioned { + @sharable private var debugId = Int.MinValue + @sharable private var ids: java.util.WeakHashMap[Positioned, Int] | Null = null + @sharable private var nextId: Int = 0 + + def init(using Context): Unit = + debugId = ctx.settings.YdebugTreeWithId.value + if ids == null && ctx.settings.YshowTreeIds.value + || debugId != ctx.settings.YdebugTreeWithId.default + then + ids = java.util.WeakHashMap() +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala new file mode 100644 index 000000000000..ff59a795d818 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala @@ -0,0 +1,1075 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Flags._, Trees._, Types._, Contexts._ +import Names._, StdNames._, NameOps._, Symbols._ +import typer.ConstFold +import reporting.trace +import dotty.tools.dotc.transform.SymUtils._ +import Decorators._ +import Constants.Constant +import scala.collection.mutable + +import scala.annotation.tailrec + +trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => + + // Note: the <: Type constraint looks necessary (and is needed to make the file compile in dotc). + // But Scalac accepts the program happily without it. Need to find out why. + + def unsplice(tree: Trees.Tree[T]): Trees.Tree[T] = tree + + def isDeclarationOrTypeDef(tree: Tree): Boolean = unsplice(tree) match { + case DefDef(_, _, _, EmptyTree) + | ValDef(_, _, EmptyTree) + | TypeDef(_, _) => true + case _ => false + } + + def isOpAssign(tree: Tree): Boolean = unsplice(tree) match { + case Apply(fn, _ :: _) => + unsplice(fn) match { + case Select(_, name) if name.isOpAssignmentName => true + case _ => false + } + case _ => false + } + + class MatchingArgs(params: List[Symbol], args: List[Tree])(using Context) { + def foreach(f: (Symbol, Tree) => Unit): Boolean = { + def recur(params: List[Symbol], args: List[Tree]): Boolean = params match { + case Nil => args.isEmpty + case param :: params1 => + if (param.info.isRepeatedParam) { + for (arg <- args) f(param, arg) + true + } + else args match { + case Nil => false + case arg :: args1 => + f(param, args.head) + recur(params1, args1) + } + } + recur(params, args) + } + def zipped: List[(Symbol, Tree)] = map((_, _)) + def map[R](f: (Symbol, Tree) => R): List[R] = { + val b = List.newBuilder[R] + foreach(b += f(_, _)) + b.result() + } + } + + /** The method part of an application node, possibly enclosed in a block + * with only valdefs as statements. the reason for also considering blocks + * is that named arguments can transform a call into a block, e.g. + * (b = foo, a = bar) + * is transformed to + * { val x$1 = foo + * val x$2 = bar + * (x$2, x$1) + * } + */ + def methPart(tree: Tree): Tree = stripApply(tree) match { + case TypeApply(fn, _) => methPart(fn) + case AppliedTypeTree(fn, _) => methPart(fn) // !!! should not be needed + case Block(stats, expr) => methPart(expr) + case mp => mp + } + + /** If this is an application, its function part, stripping all + * Apply nodes (but leaving TypeApply nodes in). Otherwise the tree itself. + */ + def stripApply(tree: Tree): Tree = unsplice(tree) match { + case Apply(fn, _) => stripApply(fn) + case _ => tree + } + + /** If this is a block, its expression part */ + def stripBlock(tree: Tree): Tree = unsplice(tree) match { + case Block(_, expr) => stripBlock(expr) + case Inlined(_, _, expr) => stripBlock(expr) + case _ => tree + } + + def stripInlined(tree: Tree): Tree = unsplice(tree) match { + case Inlined(_, _, expr) => stripInlined(expr) + case _ => tree + } + + def stripAnnotated(tree: Tree): Tree = tree match { + case Annotated(arg, _) => arg + case _ => tree + } + + /** The number of arguments in an application */ + def numArgs(tree: Tree): Int = unsplice(tree) match { + case Apply(fn, args) => numArgs(fn) + args.length + case TypeApply(fn, _) => numArgs(fn) + case Block(_, expr) => numArgs(expr) + case _ => 0 + } + + /** All term arguments of an application in a single flattened list */ + def allArguments(tree: Tree): List[Tree] = unsplice(tree) match { + case Apply(fn, args) => allArguments(fn) ::: args + case TypeApply(fn, _) => allArguments(fn) + case Block(_, expr) => allArguments(expr) + case _ => Nil + } + + /** Is tree explicitly parameterized with type arguments? */ + def hasExplicitTypeArgs(tree: Tree): Boolean = tree match + case TypeApply(tycon, args) => + args.exists(arg => !arg.span.isZeroExtent && !tycon.span.contains(arg.span)) + case _ => false + + /** Is tree a path? */ + def isPath(tree: Tree): Boolean = unsplice(tree) match { + case Ident(_) | This(_) | Super(_, _) => true + case Select(qual, _) => isPath(qual) + case _ => false + } + + /** Is tree a self constructor call this(...)? I.e. a call to a constructor of the + * same object? + */ + def isSelfConstrCall(tree: Tree): Boolean = methPart(tree) match { + case Ident(nme.CONSTRUCTOR) | Select(This(_), nme.CONSTRUCTOR) => true + case _ => false + } + + /** Is tree a super constructor call? + */ + def isSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { + case Select(Super(_, _), nme.CONSTRUCTOR) => true + case _ => false + } + + def isSuperSelection(tree: Tree): Boolean = unsplice(tree) match { + case Select(Super(_, _), _) => true + case _ => false + } + + def isSelfOrSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { + case Ident(nme.CONSTRUCTOR) + | Select(This(_), nme.CONSTRUCTOR) + | Select(Super(_, _), nme.CONSTRUCTOR) => true + case _ => false + } + + /** Is tree a backquoted identifier or definition */ + def isBackquoted(tree: Tree): Boolean = tree.hasAttachment(Backquoted) + + /** Is tree a variable pattern? */ + def isVarPattern(pat: Tree): Boolean = unsplice(pat) match { + case x: Ident => x.name.isVarPattern && !isBackquoted(x) + case _ => false + } + + /** The first constructor definition in `stats` */ + def firstConstructor(stats: List[Tree]): Tree = stats match { + case (meth: DefDef) :: _ if meth.name.isConstructorName => meth + case stat :: stats => firstConstructor(stats) + case nil => EmptyTree + } + + /** Is tpt a vararg type of the form T* or => T*? */ + def isRepeatedParamType(tpt: Tree)(using Context): Boolean = stripByNameType(tpt) match { + case tpt: TypeTree => tpt.typeOpt.isRepeatedParam + case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS), _) => true + case _ => false + } + + /** Is this argument node of the form *, or is it a reference to + * such an argument ? The latter case can happen when an argument is lifted. + */ + def isWildcardStarArg(tree: Tree)(using Context): Boolean = unbind(tree) match { + case Typed(Ident(nme.WILDCARD_STAR), _) => true + case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true + case Typed(_, tpt: TypeTree) => tpt.typeOpt.isRepeatedParam + case NamedArg(_, arg) => isWildcardStarArg(arg) + case arg => arg.typeOpt.widen.isRepeatedParam + } + + /** Is tree a type tree of the form `=> T` or (under pureFunctions) `{refs}-> T`? */ + def isByNameType(tree: Tree)(using Context): Boolean = + stripByNameType(tree) ne tree + + /** Strip `=> T` to `T` and (under pureFunctions) `{refs}-> T` to `T` */ + def stripByNameType(tree: Tree)(using Context): Tree = unsplice(tree) match + case ByNameTypeTree(t1) => t1 + case untpd.CapturingTypeTree(_, parent) => + val parent1 = stripByNameType(parent) + if parent1 eq parent then tree else parent1 + case _ => tree + + /** All type and value parameter symbols of this DefDef */ + def allParamSyms(ddef: DefDef)(using Context): List[Symbol] = + ddef.paramss.flatten.map(_.symbol) + + /** Does this argument list end with an argument of the form : _* ? */ + def isWildcardStarArgList(trees: List[Tree])(using Context): Boolean = + trees.nonEmpty && isWildcardStarArg(trees.last) + + /** Is the argument a wildcard argument of the form `_` or `x @ _`? + */ + def isWildcardArg(tree: Tree): Boolean = unbind(tree) match { + case Ident(nme.WILDCARD) => true + case _ => false + } + + /** Does this list contain a named argument tree? */ + def hasNamedArg(args: List[Any]): Boolean = args exists isNamedArg + val isNamedArg: Any => Boolean = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[_]] + + /** Is this pattern node a catch-all (wildcard or variable) pattern? */ + def isDefaultCase(cdef: CaseDef): Boolean = cdef match { + case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat) + case _ => false + } + + /** Does this CaseDef catch Throwable? */ + def catchesThrowable(cdef: CaseDef)(using Context): Boolean = + catchesAllOf(cdef, defn.ThrowableType) + + /** Does this CaseDef catch everything of a certain Type? */ + def catchesAllOf(cdef: CaseDef, threshold: Type)(using Context): Boolean = + isDefaultCase(cdef) || + cdef.guard.isEmpty && { + unbind(cdef.pat) match { + case Typed(Ident(nme.WILDCARD), tpt) => threshold <:< tpt.typeOpt + case _ => false + } + } + + /** Is this case guarded? */ + def isGuardedCase(cdef: CaseDef): Boolean = cdef.guard ne EmptyTree + + /** Is this parameter list a using clause? */ + def isUsingClause(params: ParamClause)(using Context): Boolean = params match + case ValDefs(vparam :: _) => + val sym = vparam.symbol + if sym.exists then sym.is(Given) else vparam.mods.is(Given) + case _ => + false + + def isUsingOrTypeParamClause(params: ParamClause)(using Context): Boolean = params match + case TypeDefs(_) => true + case _ => isUsingClause(params) + + def isTypeParamClause(params: ParamClause)(using Context): Boolean = params match + case TypeDefs(_) => true + case _ => false + + private val languageSubCategories = Set(nme.experimental, nme.deprecated) + + /** If `path` looks like a language import, `Some(name)` where name + * is `experimental` if that sub-module is imported, and the empty + * term name otherwise. + */ + def languageImport(path: Tree): Option[TermName] = path match + case Select(p1, name: TermName) if languageSubCategories.contains(name) => + languageImport(p1) match + case Some(EmptyTermName) => Some(name) + case _ => None + case p1: RefTree if p1.name == nme.language => + p1.qualifier match + case EmptyTree => Some(EmptyTermName) + case p2: RefTree if p2.name == nme.scala => + p2.qualifier match + case EmptyTree => Some(EmptyTermName) + case Ident(nme.ROOTPKG) => Some(EmptyTermName) + case _ => None + case _ => None + case _ => None + + /** The underlying pattern ignoring any bindings */ + def unbind(x: Tree): Tree = unsplice(x) match { + case Bind(_, y) => unbind(y) + case y => y + } + + /** The largest subset of {NoInits, PureInterface} that a + * trait or class with these parents can have as flags. + */ + def parentsKind(parents: List[Tree])(using Context): FlagSet = parents match { + case Nil => NoInitsInterface + case Apply(_, _ :: _) :: _ => EmptyFlags + case _ :: parents1 => parentsKind(parents1) + } + + /** Checks whether predicate `p` is true for all result parts of this expression, + * where we zoom into Ifs, Matches, and Blocks. + */ + def forallResults(tree: Tree, p: Tree => Boolean): Boolean = tree match { + case If(_, thenp, elsep) => forallResults(thenp, p) && forallResults(elsep, p) + case Match(_, cases) => cases forall (c => forallResults(c.body, p)) + case Block(_, expr) => forallResults(expr, p) + case _ => p(tree) + } +} + +trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => + import untpd._ + + /** The underlying tree when stripping any TypedSplice or Parens nodes */ + override def unsplice(tree: Tree): Tree = tree match { + case TypedSplice(tree1) => tree1 + case Parens(tree1) => unsplice(tree1) + case _ => tree + } + + def functionWithUnknownParamType(tree: Tree): Option[Tree] = tree match { + case Function(args, _) => + if (args.exists { + case ValDef(_, tpt, _) => tpt.isEmpty + case _ => false + }) Some(tree) + else None + case Match(EmptyTree, _) => + Some(tree) + case Block(Nil, expr) => + functionWithUnknownParamType(expr) + case _ => + None + } + + def isFunctionWithUnknownParamType(tree: Tree): Boolean = + functionWithUnknownParamType(tree).isDefined + + def isFunction(tree: Tree): Boolean = tree match + case Function(_, _) | Match(EmptyTree, _) => true + case Block(Nil, expr) => isFunction(expr) + case _ => false + + /** Is `tree` an context function or closure, possibly nested in a block? */ + def isContextualClosure(tree: Tree)(using Context): Boolean = unsplice(tree) match { + case tree: FunctionWithMods => tree.mods.is(Given) + case Function((param: untpd.ValDef) :: _, _) => param.mods.is(Given) + case Closure(_, meth, _) => true + case Block(Nil, expr) => isContextualClosure(expr) + case Block(DefDef(nme.ANON_FUN, params :: _, _, _) :: Nil, cl: Closure) => + if params.isEmpty then + cl.tpt.eq(untpd.ContextualEmptyTree) || defn.isContextFunctionType(cl.tpt.typeOpt) + else + isUsingClause(params) + case _ => false + } + + /** The largest subset of {NoInits, PureInterface} that a + * trait or class enclosing this statement can have as flags. + */ + private def defKind(tree: Tree)(using Context): FlagSet = unsplice(tree) match { + case EmptyTree | _: Import => NoInitsInterface + case tree: TypeDef => if (tree.isClassDef) NoInits else NoInitsInterface + case tree: DefDef => + if tree.unforcedRhs == EmptyTree + && tree.paramss.forall { + case ValDefs(vparams) => vparams.forall(_.rhs.isEmpty) + case _ => true + } + then + NoInitsInterface + else if tree.mods.is(Given) && tree.paramss.isEmpty then + EmptyFlags // might become a lazy val: TODO: check whether we need to suppress NoInits once we have new lazy val impl + else + NoInits + case tree: ValDef => if (tree.unforcedRhs == EmptyTree) NoInitsInterface else EmptyFlags + case _ => EmptyFlags + } + + /** The largest subset of {NoInits, PureInterface} that a + * trait or class with this body can have as flags. + */ + def bodyKind(body: List[Tree])(using Context): FlagSet = + body.foldLeft(NoInitsInterface)((fs, stat) => fs & defKind(stat)) + + /** Info of a variable in a pattern: The named tree and its type */ + type VarInfo = (NameTree, Tree) + + /** An extractor for trees of the form `id` or `id: T` */ + object IdPattern { + def unapply(tree: Tree)(using Context): Option[VarInfo] = tree match { + case id: Ident if id.name != nme.WILDCARD => Some(id, TypeTree()) + case Typed(id: Ident, tpt) => Some((id, tpt)) + case _ => None + } + } + + /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `{*}-> T`. + * Only trees of the form `=> T` are matched; trees written directly as `{*}-> T` + * are ignored by the extractor. + */ + object ImpureByNameTypeTree: + + def apply(tp: ByNameTypeTree)(using Context): untpd.CapturingTypeTree = + untpd.CapturingTypeTree( + untpd.captureRoot.withSpan(tp.span.startPos) :: Nil, tp) + + def unapply(tp: Tree)(using Context): Option[ByNameTypeTree] = tp match + case untpd.CapturingTypeTree(id @ Select(_, nme.CAPTURE_ROOT) :: Nil, bntp: ByNameTypeTree) + if id.span == bntp.span.startPos => Some(bntp) + case _ => None + end ImpureByNameTypeTree +} + +trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => + import TreeInfo._ + import tpd._ + + /** The purity level of this statement. + * @return Pure if statement has no side effects + * Idempotent if running the statement a second time has no side effects + * Impure otherwise + */ + def statPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { + case EmptyTree + | TypeDef(_, _) + | Import(_, _) + | DefDef(_, _, _, _) => + Pure + case vdef @ ValDef(_, _, _) => + if (vdef.symbol.flags is Mutable) Impure else exprPurity(vdef.rhs) `min` Pure + case _ => + Impure + // TODO: It seem like this should be exprPurity(tree) + // But if we do that the repl/vars test break. Need to figure out why that's the case. + } + + /** The purity level of this expression. See docs for PurityLevel for what that means + * + * Note that purity and idempotency are treated differently. + * References to modules and lazy vals are impure (side-effecting) both because + * side-effecting code may be executed and because the first reference + * takes a different code path than all to follow; but they are idempotent + * because running the expression a second time gives the cached result. + */ + def exprPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { + case EmptyTree + | This(_) + | Super(_, _) + | Literal(_) => + PurePath + case Ident(_) => + refPurity(tree) + case Select(qual, _) => + if (tree.symbol.is(Erased)) Pure + else refPurity(tree) `min` exprPurity(qual) + case New(_) | Closure(_, _, _) => + Pure + case TypeApply(fn, _) => + if (fn.symbol.is(Erased) || fn.symbol == defn.QuotedTypeModule_of || fn.symbol == defn.Predef_classOf) Pure else exprPurity(fn) + case Apply(fn, args) => + if isPureApply(tree, fn) then + minOf(exprPurity(fn), args.map(exprPurity)) `min` Pure + else if fn.symbol.is(Erased) then + Pure + else if fn.symbol.isStableMember /* && fn.symbol.is(Lazy) */ then + minOf(exprPurity(fn), args.map(exprPurity)) `min` Idempotent + else + Impure + case Typed(expr, _) => + exprPurity(expr) + case Block(stats, expr) => + minOf(exprPurity(expr), stats.map(statPurity)) + case Inlined(_, bindings, expr) => + minOf(exprPurity(expr), bindings.map(statPurity)) + case NamedArg(_, expr) => + exprPurity(expr) + case _ => + Impure + } + + private def minOf(l0: PurityLevel, ls: List[PurityLevel]) = ls.foldLeft(l0)(_ `min` _) + + def isPurePath(tree: Tree)(using Context): Boolean = tree.tpe match { + case tpe: ConstantType => exprPurity(tree) >= Pure + case _ => exprPurity(tree) == PurePath + } + + def isPureExpr(tree: Tree)(using Context): Boolean = + exprPurity(tree) >= Pure + + def isIdempotentPath(tree: Tree)(using Context): Boolean = tree.tpe match { + case tpe: ConstantType => exprPurity(tree) >= Idempotent + case _ => exprPurity(tree) >= IdempotentPath + } + + def isIdempotentExpr(tree: Tree)(using Context): Boolean = + exprPurity(tree) >= Idempotent + + def isPureBinding(tree: Tree)(using Context): Boolean = statPurity(tree) >= Pure + + /** Is the application `tree` with function part `fn` known to be pure? + * Function value and arguments can still be impure. + */ + def isPureApply(tree: Tree, fn: Tree)(using Context): Boolean = + def isKnownPureOp(sym: Symbol) = + sym.owner.isPrimitiveValueClass + || sym.owner == defn.StringClass + || defn.pureMethods.contains(sym) + tree.tpe.isInstanceOf[ConstantType] && tree.symbol != NoSymbol && isKnownPureOp(tree.symbol) // A constant expression with pure arguments is pure. + || fn.symbol.isStableMember && !fn.symbol.is(Lazy) // constructors of no-inits classes are stable + + /** The purity level of this reference. + * @return + * PurePath if reference is (nonlazy and stable) + * or to a parameterized function + * or its type is a constant type + * IdempotentPath if reference is lazy and stable + * Impure otherwise + * @DarkDimius: need to make sure that lazy accessor methods have Lazy and Stable + * flags set. + */ + def refPurity(tree: Tree)(using Context): PurityLevel = { + val sym = tree.symbol + if (!tree.hasType) Impure + else if !tree.tpe.widen.isParameterless then PurePath + else if sym.is(Erased) then PurePath + else if tree.tpe.isInstanceOf[ConstantType] then PurePath + else if (!sym.isStableMember) Impure + else if (sym.is(Module)) + if (sym.moduleClass.isNoInitsRealClass) PurePath else IdempotentPath + else if (sym.is(Lazy)) IdempotentPath + else if sym.isAllOf(InlineParam) then Impure + else PurePath + } + + def isPureRef(tree: Tree)(using Context): Boolean = + refPurity(tree) == PurePath + def isIdempotentRef(tree: Tree)(using Context): Boolean = + refPurity(tree) >= IdempotentPath + + /** (1) If `tree` is a constant expression, its value as a Literal, + * or `tree` itself otherwise. + * + * Note: Demanding idempotency instead of purity in literalize is strictly speaking too loose. + * Example + * + * object O { final val x = 42; println("43") } + * O.x + * + * Strictly speaking we can't replace `O.x` with `42`. But this would make + * most expressions non-constant. Maybe we can change the spec to accept this + * kind of eliding behavior. Or else enforce true purity in the compiler. + * The choice will be affected by what we will do with `inline` and with + * Singleton type bounds (see SIP 23). Presumably + * + * object O1 { val x: Singleton = 42; println("43") } + * object O2 { inline val x = 42; println("43") } + * + * should behave differently. + * + * O1.x should have the same effect as { println("43"); 42 } + * + * whereas + * + * O2.x = 42 + * + * Revisit this issue once we have standardized on `inline`. Then we can demand + * purity of the prefix unless the selection goes to a inline val. + * + * Note: This method should be applied to all term tree nodes that are not literals, + * that can be idempotent, and that can have constant types. So far, only nodes + * of the following classes qualify: + * + * Ident + * Select + * TypeApply + * + * (2) A primitive unary operator expression `pre.op` where `op` is one of `+`, `-`, `~`, `!` + * that has a constant type `ConstantType(v)` but that is not a constant expression + * (i.e. `pre` has side-effects) is translated to + * + * { pre; v } + * + * (3) An expression `pre.getClass[..]()` that has a constant type `ConstantType(v)` but where + * `pre` has side-effects is translated to: + * + * { pre; v } + * + * This avoids the situation where we have a Select node that does not have a symbol. + */ + def constToLiteral(tree: Tree)(using Context): Tree = { + assert(!tree.isType) + val tree1 = ConstFold(tree) + tree1.tpe.widenTermRefExpr.dealias.normalized match { + case ConstantType(Constant(_: Type)) if tree.isInstanceOf[Block] => + // We can't rewrite `{ class A; classOf[A] }` to `classOf[A]`, so we leave + // blocks returning a class literal alone, even if they're idempotent. + tree1 + case ConstantType(value) => + def dropOp(t: Tree): Tree = t match + case Select(pre, _) if t.tpe.isInstanceOf[ConstantType] => + // it's a primitive unary operator + pre + case Apply(TypeApply(Select(pre, nme.getClass_), _), Nil) => + pre + case _ => + tree1 + + val countsAsPure = + if dropOp(tree1).symbol.isInlineVal + then isIdempotentExpr(tree1) + else isPureExpr(tree1) + + if countsAsPure then Literal(value).withSpan(tree.span) + else + val pre = dropOp(tree1) + if pre eq tree1 then tree1 + else + // it's a primitive unary operator or getClass call; + // Simplify `pre.op` to `{ pre; v }` where `v` is the value of `pre.op` + Block(pre :: Nil, Literal(value)).withSpan(tree.span) + case _ => tree1 + } + } + + def isExtMethodApply(tree: Tree)(using Context): Boolean = methPart(tree) match + case Inlined(call, _, _) => isExtMethodApply(call) + case tree @ Select(qual, nme.apply) => tree.symbol.is(ExtensionMethod) || isExtMethodApply(qual) + case tree => tree.symbol.is(ExtensionMethod) + + /** Is symbol potentially a getter of a mutable variable? + */ + def mayBeVarGetter(sym: Symbol)(using Context): Boolean = { + def maybeGetterType(tpe: Type): Boolean = tpe match { + case _: ExprType => true + case tpe: MethodType => tpe.isImplicitMethod + case tpe: PolyType => maybeGetterType(tpe.resultType) + case _ => false + } + sym.owner.isClass && !sym.isStableMember && maybeGetterType(sym.info) + } + + /** Is tree a reference to a mutable variable, or to a potential getter + * that has a setter in the same class? + */ + def isVariableOrGetter(tree: Tree)(using Context): Boolean = { + def sym = tree.symbol + def isVar = sym.is(Mutable) + def isGetter = + mayBeVarGetter(sym) && sym.owner.info.member(sym.name.asTermName.setterName).exists + + unsplice(tree) match { + case Ident(_) => isVar + case Select(_, _) => isVar || isGetter + case Apply(_, _) => + methPart(tree) match { + case Select(qual, nme.apply) => qual.tpe.member(nme.update).exists + case _ => false + } + case _ => false + } + } + + /** Is tree a `this` node which belongs to `enclClass`? */ + def isSelf(tree: Tree, enclClass: Symbol)(using Context): Boolean = unsplice(tree) match { + case This(_) => tree.symbol == enclClass + case _ => false + } + + /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */ + def stripCast(tree: Tree)(using Context): Tree = { + def isCast(sel: Tree) = sel.symbol.isTypeCast + unsplice(tree) match { + case TypeApply(sel @ Select(inner, _), _) if isCast(sel) => + stripCast(inner) + case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCast(sel) => + stripCast(inner) + case t => + t + } + } + + /** The type arguments of a possibly curried call */ + def typeArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case TypeApply(fn, args) => loop(fn, args :: argss) + case Apply(fn, args) => loop(fn, argss) + case _ => argss + loop(tree, Nil) + + /** The term arguments of a possibly curried call */ + def termArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case Apply(fn, args) => loop(fn, args :: argss) + case TypeApply(fn, args) => loop(fn, argss) + case _ => argss + loop(tree, Nil) + + /** The type and term arguments of a possibly curried call, in the order they are given */ + def allArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case tree: GenericApply => loop(tree.fun, tree.args :: argss) + case _ => argss + loop(tree, Nil) + + /** The function part of a possibly curried call. Unlike `methPart` this one does + * not decompose blocks + */ + def funPart(tree: Tree): Tree = tree match + case tree: GenericApply => funPart(tree.fun) + case tree => tree + + /** Decompose a template body into parameters and other statements */ + def decomposeTemplateBody(body: List[Tree])(using Context): (List[Tree], List[Tree]) = + body.partition { + case stat: TypeDef => stat.symbol is Flags.Param + case stat: ValOrDefDef => + stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter + case _ => false + } + + /** An extractor for closures, either contained in a block or standalone. + */ + object closure { + def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match { + case Block(_, expr) => unapply(expr) + case Closure(env, meth, tpt) => Some(env, meth, tpt) + case Typed(expr, _) => unapply(expr) + case _ => None + } + } + + /** An extractor for def of a closure contained the block of the closure. */ + object closureDef { + def unapply(tree: Tree)(using Context): Option[DefDef] = tree match { + case Block((meth : DefDef) :: Nil, closure: Closure) if meth.symbol == closure.meth.symbol => + Some(meth) + case Block(Nil, expr) => + unapply(expr) + case Inlined(_, bindings, expr) if bindings.forall(isPureBinding) => + unapply(expr) + case _ => + None + } + } + + /** If tree is a closure, its body, otherwise tree itself */ + def closureBody(tree: Tree)(using Context): Tree = tree match { + case closureDef(meth) => meth.rhs + case _ => tree + } + + /** The variables defined by a pattern, in reverse order of their appearance. */ + def patVars(tree: Tree)(using Context): List[Symbol] = { + val acc = new TreeAccumulator[List[Symbol]] { + def apply(syms: List[Symbol], tree: Tree)(using Context) = tree match { + case Bind(_, body) => apply(tree.symbol :: syms, body) + case Annotated(tree, id @ Ident(tpnme.BOUNDTYPE_ANNOT)) => apply(id.symbol :: syms, tree) + case _ => foldOver(syms, tree) + } + } + acc(Nil, tree) + } + + /** Is this pattern node a catch-all or type-test pattern? */ + def isCatchCase(cdef: CaseDef)(using Context): Boolean = cdef match { + case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) => + isSimpleThrowable(tpt.tpe) + case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) => + isSimpleThrowable(tpt.tpe) + case _ => + isDefaultCase(cdef) + } + + private def isSimpleThrowable(tp: Type)(using Context): Boolean = tp match { + case tp @ TypeRef(pre, _) => + (pre == NoPrefix || pre.typeSymbol.isStatic) && + (tp.symbol derivesFrom defn.ThrowableClass) && !tp.symbol.is(Trait) + case _ => + false + } + + /** The symbols defined locally in a statement list */ + def localSyms(stats: List[Tree])(using Context): List[Symbol] = + val locals = new mutable.ListBuffer[Symbol] + for stat <- stats do + if stat.isDef && stat.symbol.exists then locals += stat.symbol + locals.toList + + /** If `tree` is a DefTree, the symbol defined by it, otherwise NoSymbol */ + def definedSym(tree: Tree)(using Context): Symbol = + if (tree.isDef) tree.symbol else NoSymbol + + /** Going from child to parent, the path of tree nodes that starts + * with a definition of symbol `sym` and ends with `root`, or Nil + * if no such path exists. + * Pre: `sym` must have a position. + */ + def defPath(sym: Symbol, root: Tree)(using Context): List[Tree] = trace.onDebug(s"defpath($sym with position ${sym.span}, ${root.show})") { + require(sym.span.exists, sym) + object accum extends TreeAccumulator[List[Tree]] { + def apply(x: List[Tree], tree: Tree)(using Context): List[Tree] = + if (tree.span.contains(sym.span)) + if (definedSym(tree) == sym) tree :: x + else { + val x1 = foldOver(x, tree) + if (x1 ne x) tree :: x1 else x1 + } + else x + } + accum(Nil, root) + } + + /** The top level classes in this tree, including only those module classes that + * are not a linked class of some other class in the result. + */ + def topLevelClasses(tree: Tree)(using Context): List[ClassSymbol] = tree match { + case PackageDef(_, stats) => stats.flatMap(topLevelClasses) + case tdef: TypeDef if tdef.symbol.isClass => tdef.symbol.asClass :: Nil + case _ => Nil + } + + /** The tree containing only the top-level classes and objects matching either `cls` or its companion object */ + def sliceTopLevel(tree: Tree, cls: ClassSymbol)(using Context): List[Tree] = tree match { + case PackageDef(pid, stats) => + val slicedStats = stats.flatMap(sliceTopLevel(_, cls)) + val isEffectivelyEmpty = slicedStats.forall(_.isInstanceOf[Import]) + if isEffectivelyEmpty then Nil + else cpy.PackageDef(tree)(pid, slicedStats) :: Nil + case tdef: TypeDef => + val sym = tdef.symbol + assert(sym.isClass) + if (cls == sym || cls == sym.linkedClass) tdef :: Nil + else Nil + case vdef: ValDef => + val sym = vdef.symbol + assert(sym.is(Module)) + if (cls == sym.companionClass || cls == sym.moduleClass) vdef :: Nil + else Nil + case tree => + tree :: Nil + } + + /** The statement sequence that contains a definition of `sym`, or Nil + * if none was found. + * For a tree to be found, The symbol must have a position and its definition + * tree must be reachable from come tree stored in an enclosing context. + */ + def definingStats(sym: Symbol)(using Context): List[Tree] = + if (!sym.span.exists || (ctx eq NoContext) || (ctx.compilationUnit eq NoCompilationUnit)) Nil + else defPath(sym, ctx.compilationUnit.tpdTree) match { + case defn :: encl :: _ => + def verify(stats: List[Tree]) = + if (stats exists (definedSym(_) == sym)) stats else Nil + encl match { + case Block(stats, _) => verify(stats) + case encl: Template => verify(encl.body) + case PackageDef(_, stats) => verify(stats) + case _ => Nil + } + case nil => + Nil + } + + /** If `tree` is an instance of `TupleN[...](e1, ..., eN)`, the arguments `e1, ..., eN` + * otherwise the empty list. + */ + def tupleArgs(tree: Tree)(using Context): List[Tree] = tree match { + case Block(Nil, expr) => tupleArgs(expr) + case Inlined(_, Nil, expr) => tupleArgs(expr) + case Apply(fn: NameTree, args) + if fn.name == nme.apply && + fn.symbol.owner.is(Module) && + defn.isTupleClass(fn.symbol.owner.companionClass) => args + case _ => Nil + } + + /** The qualifier part of a Select or Ident. + * For an Ident, this is the `This` of the current class. + */ + def qualifier(tree: Tree)(using Context): Tree = tree match { + case Select(qual, _) => qual + case tree: Ident => desugarIdentPrefix(tree) + case _ => This(ctx.owner.enclosingClass.asClass) + } + + /** Is this a (potentially applied) selection of a member of a structural type + * that is not a member of an underlying class or trait? + */ + def isStructuralTermSelectOrApply(tree: Tree)(using Context): Boolean = { + def isStructuralTermSelect(tree: Select) = + def hasRefinement(qualtpe: Type): Boolean = qualtpe.dealias match + case RefinedType(parent, rname, rinfo) => + rname == tree.name || hasRefinement(parent) + case tp: TypeProxy => + hasRefinement(tp.superType) + case tp: AndType => + hasRefinement(tp.tp1) || hasRefinement(tp.tp2) + case tp: OrType => + hasRefinement(tp.tp1) || hasRefinement(tp.tp2) + case _ => + false + !tree.symbol.exists + && tree.isTerm + && { + val qualType = tree.qualifier.tpe + hasRefinement(qualType) && !qualType.derivesFrom(defn.PolyFunctionClass) + } + def loop(tree: Tree): Boolean = tree match + case TypeApply(fun, _) => + loop(fun) + case Apply(fun, _) => + loop(fun) + case tree: Select => + isStructuralTermSelect(tree) + case _ => + false + loop(tree) + } + + /** Return a pair consisting of (supercall, rest) + * + * - supercall: the superclass call, excluding trait constr calls + * + * The supercall is always the first statement (if it exists) + */ + final def splitAtSuper(constrStats: List[Tree])(implicit ctx: Context): (List[Tree], List[Tree]) = + constrStats.toList match { + case (sc: Apply) :: rest if sc.symbol.isConstructor => (sc :: Nil, rest) + case (block @ Block(_, sc: Apply)) :: rest if sc.symbol.isConstructor => (block :: Nil, rest) + case stats => (Nil, stats) + } + + /** Structural tree comparison (since == on trees is reference equality). + * For the moment, only Ident, Select, Literal, Apply and TypeApply are supported + */ + extension (t1: Tree) { + def === (t2: Tree)(using Context): Boolean = (t1, t2) match { + case (t1: Ident, t2: Ident) => + t1.symbol == t2.symbol + case (t1 @ Select(q1, _), t2 @ Select(q2, _)) => + t1.symbol == t2.symbol && q1 === q2 + case (Literal(c1), Literal(c2)) => + c1 == c2 + case (Apply(f1, as1), Apply(f2, as2)) => + f1 === f2 && as1.corresponds(as2)(_ === _) + case (TypeApply(f1, ts1), TypeApply(f2, ts2)) => + f1 === f2 && ts1.tpes.corresponds(ts2.tpes)(_ =:= _) + case _ => + false + } + def hash(using Context): Int = + t1.getClass.hashCode * 37 + { + t1 match { + case t1: Ident => t1.symbol.hashCode + case t1 @ Select(q1, _) => t1.symbol.hashCode * 41 + q1.hash + case Literal(c1) => c1.hashCode + case Apply(f1, as1) => as1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.hash) + case TypeApply(f1, ts1) => ts1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.tpe.hash) + case _ => t1.hashCode + } + } + } + + def assertAllPositioned(tree: Tree)(using Context): Unit = + tree.foreachSubTree { + case t: WithoutTypeOrPos[_] => + case t => assert(t.span.exists, i"$t") + } + + /** Extractors for quotes */ + object Quoted { + /** Extracts the content of a quoted tree. + * The result can be the contents of a term or type quote, which + * will return a term or type tree respectively. + */ + def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = + if tree.symbol == defn.QuotedRuntime_exprQuote then + // quoted.runtime.Expr.quote[T]() + Some(tree.args.head) + else if tree.symbol == defn.QuotedTypeModule_of then + // quoted.Type.of[](quotes) + val TypeApply(_, body :: _) = tree.fun: @unchecked + Some(body) + else None + } + + /** Extractors for splices */ + object Spliced { + /** Extracts the content of a spliced expression tree. + * The result can be the contents of a term splice, which + * will return a term tree. + */ + def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = + if tree.symbol.isExprSplice then Some(tree.args.head) else None + } + + /** Extractors for type splices */ + object SplicedType { + /** Extracts the content of a spliced type tree. + * The result can be the contents of a type splice, which + * will return a type tree. + */ + def unapply(tree: tpd.Select)(using Context): Option[tpd.Tree] = + if tree.symbol.isTypeSplice then Some(tree.qualifier) else None + } + + /** Extractor for not-null assertions. + * A not-null assertion for reference `x` has the form `x.$asInstanceOf$[x.type & T]`. + */ + object AssertNotNull : + def apply(tree: tpd.Tree, tpnn: Type)(using Context): tpd.Tree = + tree.select(defn.Any_typeCast).appliedToType(AndType(tree.tpe, tpnn)) + + def unapply(tree: tpd.TypeApply)(using Context): Option[tpd.Tree] = tree match + case TypeApply(Select(qual: RefTree, nme.asInstanceOfPM), arg :: Nil) => + arg.tpe match + case AndType(ref, nn1) if qual.tpe eq ref => + qual.tpe.widen match + case OrNull(nn2) if nn1 eq nn2 => + Some(qual) + case _ => None + case _ => None + case _ => None + end AssertNotNull + + object ConstantValue { + def unapply(tree: Tree)(using Context): Option[Any] = + tree match + case Typed(expr, _) => unapply(expr) + case Inlined(_, Nil, expr) => unapply(expr) + case Block(Nil, expr) => unapply(expr) + case _ => + tree.tpe.widenTermRefExpr.normalized match + case ConstantType(Constant(x)) => Some(x) + case _ => None + } +} + +object TreeInfo { + /** A purity level is represented as a bitset (expressed as an Int) */ + class PurityLevel(val x: Int) extends AnyVal { + /** `this` contains the bits of `that` */ + def >= (that: PurityLevel): Boolean = (x & that.x) == that.x + + /** The intersection of the bits of `this` and `that` */ + def min(that: PurityLevel): PurityLevel = new PurityLevel(x & that.x) + } + + /** An expression is a stable path. Requires that expression is at least idempotent */ + val Path: PurityLevel = new PurityLevel(4) + + /** The expression has no side effects */ + val Pure: PurityLevel = new PurityLevel(3) + + /** Running the expression a second time has no side effects. Implied by `Pure`. */ + val Idempotent: PurityLevel = new PurityLevel(1) + + val Impure: PurityLevel = new PurityLevel(0) + + /** A stable path that is evaluated without side effects */ + val PurePath: PurityLevel = new PurityLevel(Pure.x | Path.x) + + /** A stable path that is also idempotent */ + val IdempotentPath: PurityLevel = new PurityLevel(Idempotent.x | Path.x) +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala new file mode 100644 index 000000000000..caf8d68442f6 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala @@ -0,0 +1,82 @@ +package dotty.tools.dotc +package ast + +import Trees._ +import core.Contexts._ +import core.ContextOps.enter +import core.Flags._ +import core.Symbols._ +import core.TypeError + +/** A TreeMap that maintains the necessary infrastructure to support + * contextual implicit searches (type-scope implicits are supported anyway). + * + * This incudes implicits defined in scope as well as imported implicits. + */ +class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { + import tpd._ + + def transformSelf(vd: ValDef)(using Context): ValDef = + cpy.ValDef(vd)(tpt = transform(vd.tpt)) + + private def nestedScopeCtx(defs: List[Tree])(using Context): Context = { + val nestedCtx = ctx.fresh.setNewScope + defs foreach { + case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => nestedCtx.enter(d.symbol) + case _ => + } + nestedCtx + } + + private def patternScopeCtx(pattern: Tree)(using Context): Context = { + val nestedCtx = ctx.fresh.setNewScope + new TreeTraverser { + def traverse(tree: Tree)(using Context): Unit = { + tree match { + case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => + nestedCtx.enter(d.symbol) + case _ => + } + traverseChildren(tree) + } + }.traverse(pattern) + nestedCtx + } + + override def transform(tree: Tree)(using Context): Tree = { + try tree match { + case Block(stats, expr) => + super.transform(tree)(using nestedScopeCtx(stats)) + case tree: DefDef => + inContext(localCtx(tree)) { + cpy.DefDef(tree)( + tree.name, + transformParamss(tree.paramss), + transform(tree.tpt), + transform(tree.rhs)(using nestedScopeCtx(tree.paramss.flatten))) + } + case impl @ Template(constr, parents, self, _) => + cpy.Template(tree)( + transformSub(constr), + transform(parents)(using ctx.superCallContext), + Nil, + transformSelf(self), + transformStats(impl.body, tree.symbol)) + case tree: CaseDef => + val patCtx = patternScopeCtx(tree.pat)(using ctx) + cpy.CaseDef(tree)( + transform(tree.pat), + transform(tree.guard)(using patCtx), + transform(tree.body)(using patCtx) + ) + case _ => + super.transform(tree) + } + catch { + case ex: TypeError => + report.error(ex, tree.srcPos) + tree + } + } +} + diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala new file mode 100644 index 000000000000..5139a46d6352 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala @@ -0,0 +1,232 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Types._, Contexts._, Flags._ +import Symbols._, Annotations._, Trees._, Symbols._, Constants.Constant +import Decorators._ +import dotty.tools.dotc.transform.SymUtils._ +import language.experimental.pureFunctions + +/** A map that applies three functions and a substitution together to a tree and + * makes sure they are coordinated so that the result is well-typed. The functions are + * @param typeMap A function from Type to Type that gets applied to the + * type of every tree node and to all locally defined symbols, + * followed by the substitution [substFrom := substTo]. + * @param treeMap A transformer that translates all encountered subtrees in + * prefix traversal orders + * @param oldOwners Previous owners. If a top-level local symbol in the mapped tree + * has one of these as an owner, the owner is replaced by the corresponding + * symbol in `newOwners`. + * @param newOwners New owners, replacing previous owners. + * @param substFrom The symbols that need to be substituted. + * @param substTo The substitution targets. + * + * The reason the substitution is broken out from the rest of the type map is + * that all symbols have to be substituted at the same time. If we do not do this, + * we risk data races on named types. Example: Say we have `outer#1.inner#2` and we + * have two substitutions S1 = [outer#1 := outer#3], S2 = [inner#2 := inner#4] where + * hashtags precede symbol ids. If we do S1 first, we get outer#2.inner#3. If we then + * do S2 we get outer#2.inner#4. But that means that the named type outer#2.inner + * gets two different denotations in the same period. Hence, if -Yno-double-bindings is + * set, we would get a data race assertion error. + */ +class TreeTypeMap( + val typeMap: Type -> Type = IdentityTypeMap, + val treeMap: tpd.Tree -> tpd.Tree = identity[tpd.Tree](_), // !cc! need explicit instantiation of default argument + val oldOwners: List[Symbol] = Nil, + val newOwners: List[Symbol] = Nil, + val substFrom: List[Symbol] = Nil, + val substTo: List[Symbol] = Nil, + cpy: tpd.TreeCopier = tpd.cpy)(using Context) extends tpd.TreeMap(cpy) { + import tpd._ + + def copy( + typeMap: Type -> Type, + treeMap: tpd.Tree -> tpd.Tree, + oldOwners: List[Symbol], + newOwners: List[Symbol], + substFrom: List[Symbol], + substTo: List[Symbol])(using Context): TreeTypeMap = + new TreeTypeMap(typeMap, treeMap, oldOwners, newOwners, substFrom, substTo) + + /** If `sym` is one of `oldOwners`, replace by corresponding symbol in `newOwners` */ + def mapOwner(sym: Symbol): Symbol = sym.subst(oldOwners, newOwners) + + /** Replace occurrences of `This(oldOwner)` in some prefix of a type + * by the corresponding `This(newOwner)`. + */ + private val mapOwnerThis = new TypeMap with cc.CaptureSet.IdempotentCaptRefMap { + private def mapPrefix(from: List[Symbol], to: List[Symbol], tp: Type): Type = from match { + case Nil => tp + case (cls: ClassSymbol) :: from1 => mapPrefix(from1, to.tail, tp.substThis(cls, to.head.thisType)) + case _ :: from1 => mapPrefix(from1, to.tail, tp) + } + def apply(tp: Type): Type = tp match { + case tp: NamedType => tp.derivedSelect(mapPrefix(oldOwners, newOwners, tp.prefix)) + case _ => mapOver(tp) + } + } + + def mapType(tp: Type): Type = + mapOwnerThis(typeMap(tp).substSym(substFrom, substTo)) + + private def updateDecls(prevStats: List[Tree], newStats: List[Tree]): Unit = + if (prevStats.isEmpty) assert(newStats.isEmpty) + else { + prevStats.head match { + case pdef: MemberDef => + val prevSym = pdef.symbol + val newSym = newStats.head.symbol + val newCls = newSym.owner.asClass + if (prevSym != newSym) newCls.replace(prevSym, newSym) + case _ => + } + updateDecls(prevStats.tail, newStats.tail) + } + + def transformInlined(tree: tpd.Inlined)(using Context): tpd.Tree = + val Inlined(call, bindings, expanded) = tree + val (tmap1, bindings1) = transformDefs(bindings) + val expanded1 = tmap1.transform(expanded) + cpy.Inlined(tree)(call, bindings1, expanded1) + + override def transform(tree: tpd.Tree)(using Context): tpd.Tree = treeMap(tree) match { + case impl @ Template(constr, parents, self, _) => + val tmap = withMappedSyms(localSyms(impl :: self :: Nil)) + cpy.Template(impl)( + constr = tmap.transformSub(constr), + parents = parents.mapconserve(transform), + self = tmap.transformSub(self), + body = impl.body mapconserve + (tmap.transform(_)(using ctx.withOwner(mapOwner(impl.symbol.owner)))) + ).withType(tmap.mapType(impl.tpe)) + case tree1 => + tree1.withType(mapType(tree1.tpe)) match { + case id: Ident if tpd.needsSelect(id.tpe) => + ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + case ddef @ DefDef(name, paramss, tpt, _) => + val (tmap1, paramss1) = transformAllParamss(paramss) + val res = cpy.DefDef(ddef)(name, paramss1, tmap1.transform(tpt), tmap1.transform(ddef.rhs)) + res.symbol.setParamssFromDefs(paramss1) + res.symbol.transformAnnotations { + case ann: BodyAnnotation => ann.derivedAnnotation(transform(ann.tree)) + case ann => ann + } + res + case tdef @ LambdaTypeTree(tparams, body) => + val (tmap1, tparams1) = transformDefs(tparams) + cpy.LambdaTypeTree(tdef)(tparams1, tmap1.transform(body)) + case blk @ Block(stats, expr) => + val (tmap1, stats1) = transformDefs(stats) + val expr1 = tmap1.transform(expr) + cpy.Block(blk)(stats1, expr1) + case inlined: Inlined => + transformInlined(inlined) + case cdef @ CaseDef(pat, guard, rhs) => + val tmap = withMappedSyms(patVars(pat)) + val pat1 = tmap.transform(pat) + val guard1 = tmap.transform(guard) + val rhs1 = tmap.transform(rhs) + cpy.CaseDef(cdef)(pat1, guard1, rhs1) + case labeled @ Labeled(bind, expr) => + val tmap = withMappedSyms(bind.symbol :: Nil) + val bind1 = tmap.transformSub(bind) + val expr1 = tmap.transform(expr) + cpy.Labeled(labeled)(bind1, expr1) + case tree @ Hole(_, _, args, content, tpt) => + val args1 = args.mapConserve(transform) + val content1 = transform(content) + val tpt1 = transform(tpt) + cpy.Hole(tree)(args = args1, content = content1, tpt = tpt1) + case lit @ Literal(Constant(tpe: Type)) => + cpy.Literal(lit)(Constant(mapType(tpe))) + case tree1 => + super.transform(tree1) + } + } + + override def transformStats(trees: List[tpd.Tree], exprOwner: Symbol)(using Context): List[Tree] = + transformDefs(trees)._2 + + def transformDefs[TT <: tpd.Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { + val tmap = withMappedSyms(tpd.localSyms(trees)) + (tmap, tmap.transformSub(trees)) + } + + private def transformAllParamss(paramss: List[ParamClause]): (TreeTypeMap, List[ParamClause]) = paramss match + case params :: paramss1 => + val (tmap1, params1: ParamClause) = ((params: @unchecked) match + case ValDefs(vparams) => transformDefs(vparams) + case TypeDefs(tparams) => transformDefs(tparams) + ): @unchecked + val (tmap2, paramss2) = tmap1.transformAllParamss(paramss1) + (tmap2, params1 :: paramss2) + case nil => + (this, paramss) + + def apply[ThisTree <: tpd.Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree] + + def apply(annot: Annotation): Annotation = annot.derivedAnnotation(apply(annot.tree)) + + /** The current tree map composed with a substitution [from -> to] */ + def withSubstitution(from: List[Symbol], to: List[Symbol]): TreeTypeMap = + if (from eq to) this + else { + // assert that substitution stays idempotent, assuming its parts are + // TODO: It might be better to cater for the asserted-away conditions, by + // setting up a proper substitution abstraction with a compose operator that + // guarantees idempotence. But this might be too inefficient in some cases. + // We'll cross that bridge when we need to. + assert(!from.exists(substTo contains _)) + assert(!to.exists(substFrom contains _)) + assert(!from.exists(newOwners contains _)) + assert(!to.exists(oldOwners contains _)) + copy( + typeMap, + treeMap, + from ++ oldOwners, + to ++ newOwners, + from ++ substFrom, + to ++ substTo) + } + + /** Apply `typeMap` and `ownerMap` to given symbols `syms` + * and return a treemap that contains the substitution + * between original and mapped symbols. + */ + def withMappedSyms(syms: List[Symbol]): TreeTypeMap = + withMappedSyms(syms, mapSymbols(syms, this)) + + /** The tree map with the substitution between originals `syms` + * and mapped symbols `mapped`. Also goes into mapped classes + * and substitutes their declarations. + */ + def withMappedSyms(syms: List[Symbol], mapped: List[Symbol]): TreeTypeMap = + if syms eq mapped then this + else + val substMap = withSubstitution(syms, mapped) + lazy val origCls = mapped.zip(syms).filter(_._1.isClass).toMap + mapped.filter(_.isClass).foldLeft(substMap) { (tmap, cls) => + val origDcls = cls.info.decls.toList.filterNot(_.is(TypeParam)) + val tmap0 = tmap.withSubstitution(origCls(cls).typeParams, cls.typeParams) + val mappedDcls = mapSymbols(origDcls, tmap0, mapAlways = true) + val tmap1 = tmap.withMappedSyms( + origCls(cls).typeParams ::: origDcls, + cls.typeParams ::: mappedDcls) + origDcls.lazyZip(mappedDcls).foreach(cls.asClass.replace) + tmap1 + } + + override def toString = + def showSyms(syms: List[Symbol]) = + syms.map(sym => s"$sym#${sym.id}").mkString(", ") + s"""TreeTypeMap( + |typeMap = $typeMap + |treeMap = $treeMap + |oldOwners = ${showSyms(oldOwners)} + |newOwners = ${showSyms(newOwners)} + |substFrom = ${showSyms(substFrom)} + |substTo = ${showSyms(substTo)}""".stripMargin +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala new file mode 100644 index 000000000000..82b027b0231a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala @@ -0,0 +1,1789 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Types._, Names._, NameOps._, Flags._, util.Spans._, Contexts._, Constants._ +import typer.{ ConstFold, ProtoTypes } +import SymDenotations._, Symbols._, Denotations._, StdNames._, Comments._ +import collection.mutable.ListBuffer +import printing.Printer +import printing.Texts.Text +import util.{Stats, Attachment, Property, SourceFile, NoSource, SrcPos, SourcePosition} +import config.Config +import config.Printers.overload +import annotation.internal.sharable +import annotation.unchecked.uncheckedVariance +import annotation.constructorOnly +import Decorators._ +import annotation.retains +import language.experimental.pureFunctions + +object Trees { + + type Untyped = Nothing + + /** The total number of created tree nodes, maintained if Stats.enabled */ + @sharable var ntrees: Int = 0 + + /** Property key for trees with documentation strings attached */ + val DocComment: Property.StickyKey[Comments.Comment] = Property.StickyKey() + + /** Property key for backquoted identifiers and definitions */ + val Backquoted: Property.StickyKey[Unit] = Property.StickyKey() + + /** Trees take a parameter indicating what the type of their `tpe` field + * is. Two choices: `Type` or `Untyped`. + * Untyped trees have type `Tree[Untyped]`. + * + * Tree typing uses a copy-on-write implementation: + * + * - You can never observe a `tpe` which is `null` (throws an exception) + * - So when creating a typed tree with `withType` we can re-use + * the existing tree transparently, assigning its `tpe` field. + * - It is impossible to embed untyped trees in typed ones. + * - Typed trees can be embedded in untyped ones provided they are rooted + * in a TypedSplice node. + * - Type checking an untyped tree should remove all embedded `TypedSplice` + * nodes. + */ + abstract class Tree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable, caps.Pure { + + if (Stats.enabled) ntrees += 1 + + /** The type constructor at the root of the tree */ + type ThisTree[T >: Untyped] <: Tree[T] + + protected var myTpe: T @uncheckedVariance = _ + + /** Destructively set the type of the tree. This should be called only when it is known that + * it is safe under sharing to do so. One use-case is in the withType method below + * which implements copy-on-write. Another use-case is in method interpolateAndAdapt in Typer, + * where we overwrite with a simplified version of the type itself. + */ + private[dotc] def overwriteType(tpe: T): Unit = + myTpe = tpe + + /** The type of the tree. In case of an untyped tree, + * an UnAssignedTypeException is thrown. (Overridden by empty trees) + */ + final def tpe: T @uncheckedVariance = { + if (myTpe == null) + throw UnAssignedTypeException(this) + myTpe + } + + /** Copy `tpe` attribute from tree `from` into this tree, independently + * whether it is null or not. + final def copyAttr[U >: Untyped](from: Tree[U]): ThisTree[T] = { + val t1 = this.withSpan(from.span) + val t2 = + if (from.myTpe != null) t1.withType(from.myTpe.asInstanceOf[Type]) + else t1 + t2.asInstanceOf[ThisTree[T]] + } + */ + + /** Return a typed tree that's isomorphic to this tree, but has given + * type. (Overridden by empty trees) + */ + def withType(tpe: Type)(using Context): ThisTree[Type] = { + if (tpe.isInstanceOf[ErrorType]) + assert(!Config.checkUnreportedErrors || + ctx.reporter.errorsReported || + ctx.settings.YshowPrintErrors.value + // under -Yshow-print-errors, errors might arise during printing, but they do not count as reported + ) + else if (Config.checkTreesConsistent) + checkChildrenTyped(productIterator) + withTypeUnchecked(tpe) + } + + /** Check that typed trees don't refer to untyped ones, except if + * - the parent tree is an import, or + * - the child tree is an identifier, or + * - errors were reported + */ + private def checkChildrenTyped(it: Iterator[Any])(using Context): Unit = + if (!this.isInstanceOf[Import[?]]) + while (it.hasNext) + it.next() match { + case x: Ident[?] => // untyped idents are used in a number of places in typed trees + case x: Tree[?] => + assert(x.hasType || ctx.reporter.errorsReported, + s"$this has untyped child $x") + case xs: List[?] => checkChildrenTyped(xs.iterator) + case _ => + } + + def withTypeUnchecked(tpe: Type): ThisTree[Type] = { + val tree = + (if (myTpe == null || + (myTpe.asInstanceOf[AnyRef] eq tpe.asInstanceOf[AnyRef])) this + else cloneIn(source)).asInstanceOf[Tree[Type]] + tree overwriteType tpe + tree.asInstanceOf[ThisTree[Type]] + } + + /** Does the tree have its type field set? Note: this operation is not + * referentially transparent, because it can observe the withType + * modifications. Should be used only in special circumstances (we + * need it for printing trees with optional type info). + */ + final def hasType: Boolean = myTpe != null + + final def typeOpt: Type = myTpe match { + case tp: Type => tp + case _ => NoType + } + + /** The denotation referred to by this tree. + * Defined for `DenotingTree`s and `ProxyTree`s, NoDenotation for other + * kinds of trees + */ + def denot(using Context): Denotation = NoDenotation + + /** Shorthand for `denot.symbol`. */ + final def symbol(using Context): Symbol = denot.symbol + + /** Does this tree represent a type? */ + def isType: Boolean = false + + /** Does this tree represent a term? */ + def isTerm: Boolean = false + + /** Is this a legal part of a pattern which is not at the same time a term? */ + def isPattern: Boolean = false + + /** Does this tree define a new symbol that is not defined elsewhere? */ + def isDef: Boolean = false + + /** Is this tree either the empty tree or the empty ValDef or an empty type ident? */ + def isEmpty: Boolean = false + + /** Convert tree to a list. Gives a singleton list, except + * for thickets which return their element trees. + */ + def toList: List[Tree[T]] = this :: Nil + + /** if this tree is the empty tree, the alternative, else this tree */ + inline def orElse[U >: Untyped <: T](inline that: Tree[U]): Tree[U] = + if (this eq genericEmptyTree) that else this + + /** The number of nodes in this tree */ + def treeSize: Int = { + var s = 1 + def addSize(elem: Any): Unit = elem match { + case t: Tree[?] => s += t.treeSize + case ts: List[?] => ts foreach addSize + case _ => + } + productIterator foreach addSize + s + } + + /** If this is a thicket, perform `op` on each of its trees + * otherwise, perform `op` ion tree itself. + */ + def foreachInThicket(op: Tree[T] => Unit): Unit = op(this) + + override def toText(printer: Printer): Text = printer.toText(this) + + def sameTree(that: Tree[?]): Boolean = { + def isSame(x: Any, y: Any): Boolean = + x.asInstanceOf[AnyRef].eq(y.asInstanceOf[AnyRef]) || { + x match { + case x: Tree[?] => + y match { + case y: Tree[?] => x.sameTree(y) + case _ => false + } + case x: List[?] => + y match { + case y: List[?] => x.corresponds(y)(isSame) + case _ => false + } + case _ => + false + } + } + this.getClass == that.getClass && { + val it1 = this.productIterator + val it2 = that.productIterator + it1.corresponds(it2)(isSame) + } + } + + override def hashCode(): Int = System.identityHashCode(this) + override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + } + + class UnAssignedTypeException[T >: Untyped](tree: Tree[T]) extends RuntimeException { + override def getMessage: String = s"type of $tree is not assigned" + } + + type LazyTree[-T >: Untyped] = Tree[T] | Lazy[Tree[T]] + type LazyTreeList[-T >: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] + + // ------ Categories of trees ----------------------------------- + + /** Instances of this class are trees for which isType is definitely true. + * Note that some trees have isType = true without being TypTrees (e.g. Ident, Annotated) + */ + trait TypTree[-T >: Untyped] extends Tree[T] { + type ThisTree[-T >: Untyped] <: TypTree[T] + override def isType: Boolean = true + } + + /** Instances of this class are trees for which isTerm is definitely true. + * Note that some trees have isTerm = true without being TermTrees (e.g. Ident, Annotated) + */ + trait TermTree[-T >: Untyped] extends Tree[T] { + type ThisTree[-T >: Untyped] <: TermTree[T] + override def isTerm: Boolean = true + } + + /** Instances of this class are trees which are not terms but are legal + * parts of patterns. + */ + trait PatternTree[-T >: Untyped] extends Tree[T] { + type ThisTree[-T >: Untyped] <: PatternTree[T] + override def isPattern: Boolean = true + } + + /** Tree's denotation can be derived from its type */ + abstract class DenotingTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[-T >: Untyped] <: DenotingTree[T] + override def denot(using Context): Denotation = typeOpt.stripped match + case tpe: NamedType => tpe.denot + case tpe: ThisType => tpe.cls.denot + case _ => NoDenotation + } + + /** Tree's denot/isType/isTerm properties come from a subtree + * identified by `forwardTo`. + */ + abstract class ProxyTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[-T >: Untyped] <: ProxyTree[T] + def forwardTo: Tree[T] + override def denot(using Context): Denotation = forwardTo.denot + override def isTerm: Boolean = forwardTo.isTerm + override def isType: Boolean = forwardTo.isType + } + + /** Tree has a name */ + abstract class NameTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { + type ThisTree[-T >: Untyped] <: NameTree[T] + def name: Name + } + + /** Tree refers by name to a denotation */ + abstract class RefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { + type ThisTree[-T >: Untyped] <: RefTree[T] + def qualifier: Tree[T] + override def isType: Boolean = name.isTypeName + override def isTerm: Boolean = name.isTermName + } + + /** Tree defines a new symbol */ + trait DefTree[-T >: Untyped] extends DenotingTree[T] { + type ThisTree[-T >: Untyped] <: DefTree[T] + + private var myMods: untpd.Modifiers | Null = _ + + private[dotc] def rawMods: untpd.Modifiers = + if (myMods == null) untpd.EmptyModifiers else myMods.uncheckedNN + + def withAnnotations(annots: List[untpd.Tree]): ThisTree[Untyped] = withMods(rawMods.withAnnotations(annots)) + + def withMods(mods: untpd.Modifiers): ThisTree[Untyped] = { + val tree = if (myMods == null || (myMods == mods)) this else cloneIn(source) + tree.setMods(mods) + tree.asInstanceOf[ThisTree[Untyped]] + } + + def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(untpd.Modifiers(flags)) + def withAddedFlags(flags: FlagSet): ThisTree[Untyped] = withMods(rawMods | flags) + + /** Destructively update modifiers. To be used with care. */ + def setMods(mods: untpd.Modifiers): Unit = myMods = mods + + override def isDef: Boolean = true + def namedType: NamedType = tpe.asInstanceOf[NamedType] + } + + extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = mdef.rawMods + + sealed trait WithEndMarker[-T >: Untyped]: + self: PackageDef[T] | NamedDefTree[T] => + + import WithEndMarker.* + + final def endSpan(using Context): Span = + if hasEndMarker then + val realName = srcName.stripModuleClassSuffix.lastPart + span.withStart(span.end - realName.length) + else + NoSpan + + /** The name in source code that represents this construct, + * and is the name that the user must write to create a valid + * end marker. + * e.g. a constructor definition is terminated in the source + * code by `end this`, so it's `srcName` should return `this`. + */ + protected def srcName(using Context): Name + + final def withEndMarker(): self.type = + self.withAttachment(HasEndMarker, ()) + + final def withEndMarker(copyFrom: WithEndMarker[?]): self.type = + if copyFrom.hasEndMarker then + this.withEndMarker() + else + this + + final def dropEndMarker(): self.type = + self.removeAttachment(HasEndMarker) + this + + protected def hasEndMarker: Boolean = self.hasAttachment(HasEndMarker) + + object WithEndMarker: + /** Property key that signals the tree was terminated + * with an `end` marker in the source code + */ + private val HasEndMarker: Property.StickyKey[Unit] = Property.StickyKey() + + end WithEndMarker + + abstract class NamedDefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + extends NameTree[T] with DefTree[T] with WithEndMarker[T] { + type ThisTree[-T >: Untyped] <: NamedDefTree[T] + + protected def srcName(using Context): Name = + if name == nme.CONSTRUCTOR then nme.this_ + else if symbol.isPackageObject then symbol.owner.name + else name + + /** The position of the name defined by this definition. + * This is a point position if the definition is synthetic, or a range position + * if the definition comes from source. + * It might also be that the definition does not have a position (for instance when synthesized by + * a calling chain from `viewExists`), in that case the return position is NoSpan. + * Overridden in Bind + */ + def nameSpan(using Context): Span = + if (span.exists) { + val point = span.point + if (rawMods.is(Synthetic) || span.isSynthetic || name.toTermName == nme.ERROR) Span(point) + else { + val realName = srcName.stripModuleClassSuffix.lastPart + Span(point, point + realName.length, point) + } + } + else span + + /** The source position of the name defined by this definition. + * This is a point position if the definition is synthetic, or a range position + * if the definition comes from source. + */ + def namePos(using Context): SourcePosition = source.atSpan(nameSpan) + } + + /** Tree defines a new symbol and carries modifiers. + * The position of a MemberDef contains only the defined identifier or pattern. + * The envelope of a MemberDef contains the whole definition and has its point + * on the opening keyword (or the next token after that if keyword is missing). + */ + abstract class MemberDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { + type ThisTree[-T >: Untyped] <: MemberDef[T] + + def rawComment: Option[Comment] = getAttachment(DocComment) + + def setComment(comment: Option[Comment]): this.type = { + comment.map(putAttachment(DocComment, _)) + this + } + + def name: Name + } + + /** A ValDef or DefDef tree */ + abstract class ValOrDefDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T] with WithLazyField[Tree[T]] { + type ThisTree[-T >: Untyped] <: ValOrDefDef[T] + def name: TermName + def tpt: Tree[T] + def unforcedRhs: LazyTree[T] = unforced + def rhs(using Context): Tree[T] = forceIfLazy + } + + trait ValOrTypeDef[-T >: Untyped] extends MemberDef[T]: + type ThisTree[-T >: Untyped] <: ValOrTypeDef[T] + + type ParamClause[T >: Untyped] = List[ValDef[T]] | List[TypeDef[T]] + + // ----------- Tree case classes ------------------------------------ + + /** name */ + case class Ident[-T >: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) + extends RefTree[T] { + type ThisTree[-T >: Untyped] = Ident[T] + def qualifier: Tree[T] = genericEmptyTree + + def isBackquoted: Boolean = hasAttachment(Backquoted) + } + + class SearchFailureIdent[-T >: Untyped] private[ast] (name: Name, expl: -> String)(implicit @constructorOnly src: SourceFile) + extends Ident[T](name) { + def explanation = expl + override def toString: String = s"SearchFailureIdent($explanation)" + } + + /** qualifier.name, or qualifier#name, if qualifier is a type */ + case class Select[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) + extends RefTree[T] { + type ThisTree[-T >: Untyped] = Select[T] + + override def denot(using Context): Denotation = typeOpt match + case ConstantType(_) if ConstFold.foldedUnops.contains(name) => + // Recover the denotation of a constant-folded selection + qualifier.typeOpt.member(name).atSignature(Signature.NotAMethod, name) + case _ => + super.denot + + def nameSpan(using Context): Span = + if span.exists then + val point = span.point + if name.toTermName == nme.ERROR then + Span(point) + else if qualifier.span.start > span.start then // right associative + val realName = name.stripModuleClassSuffix.lastPart + Span(span.start, span.start + realName.length, point) + else + Span(point, span.end, point) + else span + } + + class SelectWithSig[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) + extends Select[T](qualifier, name) { + override def toString: String = s"SelectWithSig($qualifier, $name, $sig)" + } + + /** qual.this */ + case class This[-T >: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] = This[T] + // Denotation of a This tree is always the underlying class; needs correction for modules. + override def denot(using Context): Denotation = + typeOpt match { + case tpe @ TermRef(pre, _) if tpe.symbol.is(Module) => + tpe.symbol.moduleClass.denot.asSeenFrom(pre) + case _ => + super.denot + } + } + + /** C.super[mix], where qual = C.this */ + case class Super[-T >: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] = Super[T] + def forwardTo: Tree[T] = qual + } + + abstract class GenericApply[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] <: GenericApply[T] + val fun: Tree[T] + val args: List[Tree[T]] + def forwardTo: Tree[T] = fun + } + + object GenericApply: + def unapply[T >: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match + case tree: GenericApply[T] => Some((tree.fun, tree.args)) + case _ => None + + /** The kind of application */ + enum ApplyKind: + case Regular // r.f(x) + case Using // r.f(using x) + case InfixTuple // r f (x1, ..., xN) where N != 1; needs to be treated specially for an error message in typedApply + + /** fun(args) */ + case class Apply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends GenericApply[T] { + type ThisTree[-T >: Untyped] = Apply[T] + + def setApplyKind(kind: ApplyKind) = + putAttachment(untpd.KindOfApply, kind) + this + + /** The kind of this application. Works reliably only for untyped trees; typed trees + * are under no obligation to update it correctly. + */ + def applyKind: ApplyKind = + attachmentOrElse(untpd.KindOfApply, ApplyKind.Regular) + } + + /** fun[args] */ + case class TypeApply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends GenericApply[T] { + type ThisTree[-T >: Untyped] = TypeApply[T] + } + + /** const */ + case class Literal[-T >: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) + extends Tree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] = Literal[T] + } + + /** new tpt, but no constructor call */ + case class New[-T >: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] = New[T] + } + + /** expr : tpt */ + case class Typed[-T >: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] = Typed[T] + def forwardTo: Tree[T] = expr + } + + /** name = arg, in a parameter list */ + case class NamedArg[-T >: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[-T >: Untyped] = NamedArg[T] + } + + /** name = arg, outside a parameter list */ + case class Assign[-T >: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = Assign[T] + } + + /** { stats; expr } */ + case class Block[-T >: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[-T >: Untyped] = Block[T] + override def isType: Boolean = expr.isType + override def isTerm: Boolean = !isType // this will classify empty trees as terms, which is necessary + } + + /** if cond then thenp else elsep */ + case class If[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = If[T] + def isInline = false + } + class InlineIf[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + extends If(cond, thenp, elsep) { + override def isInline = true + override def toString = s"InlineIf($cond, $thenp, $elsep)" + } + + /** A closure with an environment and a reference to a method. + * @param env The captured parameters of the closure + * @param meth A ref tree that refers to the method of the closure. + * The first (env.length) parameters of that method are filled + * with env values. + * @param tpt Either EmptyTree or a TypeTree. If tpt is EmptyTree the type + * of the closure is a function type, otherwise it is the type + * given in `tpt`, which must be a SAM type. + */ + case class Closure[-T >: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = Closure[T] + } + + /** selector match { cases } */ + case class Match[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = Match[T] + def isInline = false + } + class InlineMatch[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + extends Match(selector, cases) { + override def isInline = true + override def toString = s"InlineMatch($selector, $cases)" + } + + /** case pat if guard => body */ + case class CaseDef[-T >: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[-T >: Untyped] = CaseDef[T] + } + + /** label[tpt]: { expr } */ + case class Labeled[-T >: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + extends NameTree[T] { + type ThisTree[-T >: Untyped] = Labeled[T] + def name: Name = bind.name + } + + /** return expr + * where `from` refers to the method or label from which the return takes place + * After program transformations this is not necessarily the enclosing method, because + * closures can intervene. + */ + case class Return[-T >: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = Return[T] + } + + /** while (cond) { body } */ + case class WhileDo[-T >: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = WhileDo[T] + } + + /** try block catch cases finally finalizer */ + case class Try[-T >: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = Try[T] + } + + /** Seq(elems) + * @param tpt The element type of the sequence. + */ + case class SeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[-T >: Untyped] = SeqLiteral[T] + } + + /** Array(elems) */ + class JavaSeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends SeqLiteral(elems, elemtpt) { + override def toString: String = s"JavaSeqLiteral($elems, $elemtpt)" + } + + /** A tree representing inlined code. + * + * @param call Info about the original call that was inlined + * Until PostTyper, this is the full call, afterwards only + * a reference to the toplevel class from which the call was inlined. + * @param bindings Bindings for proxies to be used in the inlined code + * @param expansion The inlined tree, minus bindings. + * + * The full inlined code is equivalent to + * + * { bindings; expansion } + * + * The reason to keep `bindings` separate is because they are typed in a + * different context: `bindings` represent the arguments to the inlined + * call, whereas `expansion` represents the body of the inlined function. + */ + case class Inlined[-T >: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[-T >: Untyped] = Inlined[T] + override def isTerm = expansion.isTerm + override def isType = expansion.isType + } + + /** A type tree that represents an existing or inferred type */ + case class TypeTree[-T >: Untyped]()(implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] with TypTree[T] { + type ThisTree[-T >: Untyped] = TypeTree[T] + override def isEmpty: Boolean = !hasType + override def toString: String = + s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" + } + + /** A type tree whose type is inferred. These trees appear in two contexts + * - as an argument of a TypeApply. In that case its type is always a TypeVar + * - as a (result-)type of an inferred ValDef or DefDef. + * Every TypeVar is created as the type of one InferredTypeTree. + */ + class InferredTypeTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] + + /** ref.type */ + case class SingletonTypeTree[-T >: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] with TypTree[T] { + type ThisTree[-T >: Untyped] = SingletonTypeTree[T] + } + + /** tpt { refinements } */ + case class RefinedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TypTree[T] { + type ThisTree[-T >: Untyped] = RefinedTypeTree[T] + def forwardTo: Tree[T] = tpt + } + + /** tpt[args] */ + case class AppliedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TypTree[T] { + type ThisTree[-T >: Untyped] = AppliedTypeTree[T] + def forwardTo: Tree[T] = tpt + } + + /** [typeparams] -> tpt + * + * Note: the type of such a tree is not necessarily a `HKTypeLambda`, it can + * also be a `TypeBounds` where the upper bound is an `HKTypeLambda`, and the + * lower bound is either a reference to `Nothing` or an `HKTypeLambda`, + * this happens because these trees are typed by `HKTypeLambda#fromParams` which + * makes sure to move bounds outside of the type lambda itself to simplify their + * handling in the compiler. + * + * You may ask: why not normalize the trees too? That way, + * + * LambdaTypeTree(X, TypeBoundsTree(A, B)) + * + * would become, + * + * TypeBoundsTree(LambdaTypeTree(X, A), LambdaTypeTree(X, B)) + * + * which would maintain consistency between a tree and its type. The problem + * with this definition is that the same tree `X` appears twice, therefore + * we'd have to create two symbols for it which makes it harder to relate the + * source code written by the user with the trees used by the compiler (for + * example, to make "find all references" work in the IDE). + */ + case class LambdaTypeTree[-T >: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = LambdaTypeTree[T] + } + + case class TermLambdaTypeTree[-T >: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = TermLambdaTypeTree[T] + } + + /** [bound] selector match { cases } */ + case class MatchTypeTree[-T >: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = MatchTypeTree[T] + } + + /** => T */ + case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = ByNameTypeTree[T] + } + + /** >: lo <: hi + * >: lo <: hi = alias for RHS of bounded opaque type + */ + case class TypeBoundsTree[-T >: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = TypeBoundsTree[T] + } + + /** name @ body */ + case class Bind[-T >: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends NamedDefTree[T] with PatternTree[T] { + type ThisTree[-T >: Untyped] = Bind[T] + override def isType: Boolean = name.isTypeName + override def isTerm: Boolean = name.isTermName + + override def nameSpan(using Context): Span = + if span.exists then Span(span.start, span.start + name.toString.length) else span + } + + /** tree_1 | ... | tree_n */ + case class Alternative[-T >: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends PatternTree[T] { + type ThisTree[-T >: Untyped] = Alternative[T] + } + + /** The typed translation of `extractor(patterns)` in a pattern. The translation has the following + * components: + * + * @param fun is `extractor.unapply` (or, for backwards compatibility, `extractor.unapplySeq`) + * possibly with type parameters + * @param implicits Any implicit parameters passed to the unapply after the selector + * @param patterns The argument patterns in the pattern match. + * + * It is typed with same type as first `fun` argument + * Given a match selector `sel` a pattern UnApply(fun, implicits, patterns) is roughly translated as follows + * + * val result = fun(sel)(implicits) + * if (result.isDefined) "match patterns against result" + */ + case class UnApply[-T >: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with PatternTree[T] { + type ThisTree[-T >: Untyped] = UnApply[T] + def forwardTo = fun + } + + /** mods val name: tpt = rhs */ + case class ValDef[-T >: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + extends ValOrDefDef[T], ValOrTypeDef[T] { + type ThisTree[-T >: Untyped] = ValDef[T] + assert(isEmpty || (tpt ne genericEmptyTree)) + def unforced: LazyTree[T] = preRhs + protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x + } + + /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */ + case class DefDef[-T >: Untyped] private[ast] (name: TermName, + paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + extends ValOrDefDef[T] { + type ThisTree[-T >: Untyped] = DefDef[T] + assert(tpt ne genericEmptyTree) + def unforced: LazyTree[T] = preRhs + protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x + + def leadingTypeParams(using Context): List[TypeDef[T]] = paramss match + case (tparams @ (tparam: TypeDef[_]) :: _) :: _ => tparams.asInstanceOf[List[TypeDef[T]]] + case _ => Nil + + def trailingParamss(using Context): List[ParamClause[T]] = paramss match + case ((tparam: TypeDef[_]) :: _) :: paramss1 => paramss1 + case _ => paramss + + def termParamss(using Context): List[List[ValDef[T]]] = + (if ctx.erasedTypes then paramss else untpd.termParamssIn(paramss)) + .asInstanceOf[List[List[ValDef[T]]]] + } + + /** mods class name template or + * mods trait name template or + * mods type name = rhs or + * mods type name >: lo <: hi, if rhs = TypeBoundsTree(lo, hi) or + * mods type name >: lo <: hi = rhs if rhs = TypeBoundsTree(lo, hi, alias) and opaque in mods + */ + case class TypeDef[-T >: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + extends MemberDef[T], ValOrTypeDef[T] { + type ThisTree[-T >: Untyped] = TypeDef[T] + + /** Is this a definition of a class? */ + def isClassDef: Boolean = rhs.isInstanceOf[Template[?]] + + def isBackquoted: Boolean = hasAttachment(Backquoted) + } + + /** extends parents { self => body } + * @param parentsOrDerived A list of parents followed by a list of derived classes, + * if this is of class untpd.DerivingTemplate. + * Typed templates only have parents. + */ + case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parentsOrDerived: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + extends DefTree[T] with WithLazyField[List[Tree[T]]] { + type ThisTree[-T >: Untyped] = Template[T] + def unforcedBody: LazyTreeList[T] = unforced + def unforced: LazyTreeList[T] = preBody + protected def force(x: List[Tree[T @uncheckedVariance]]): Unit = preBody = x + def body(using Context): List[Tree[T]] = forceIfLazy + + def parents: List[Tree[T]] = parentsOrDerived // overridden by DerivingTemplate + def derived: List[untpd.Tree] = Nil // overridden by DerivingTemplate + } + + + abstract class ImportOrExport[-T >: Untyped](implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] { + type ThisTree[-T >: Untyped] <: ImportOrExport[T] + val expr: Tree[T] + val selectors: List[untpd.ImportSelector] + } + + /** import expr.selectors + * where a selector is either an untyped `Ident`, `name` or + * an untyped thicket consisting of `name` and `rename`. + */ + case class Import[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + extends ImportOrExport[T] { + type ThisTree[-T >: Untyped] = Import[T] + } + + /** export expr.selectors + * where a selector is either an untyped `Ident`, `name` or + * an untyped thicket consisting of `name` and `rename`. + */ + case class Export[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + extends ImportOrExport[T] { + type ThisTree[-T >: Untyped] = Export[T] + } + + /** package pid { stats } */ + case class PackageDef[-T >: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with WithEndMarker[T] { + type ThisTree[-T >: Untyped] = PackageDef[T] + def forwardTo: RefTree[T] = pid + protected def srcName(using Context): Name = pid.name + } + + /** arg @annot */ + case class Annotated[-T >: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] { + type ThisTree[-T >: Untyped] = Annotated[T] + def forwardTo: Tree[T] = arg + } + + trait WithoutTypeOrPos[-T >: Untyped] extends Tree[T] { + override def withTypeUnchecked(tpe: Type): ThisTree[Type] = this.asInstanceOf[ThisTree[Type]] + override def span: Span = NoSpan + override def span_=(span: Span): Unit = {} + } + + /** Temporary class that results from translation of ModuleDefs + * (and possibly other statements). + * The contained trees will be integrated when transformed with + * a `transform(List[Tree])` call. + */ + case class Thicket[-T >: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends Tree[T] with WithoutTypeOrPos[T] { + myTpe = NoType.asInstanceOf[T] + type ThisTree[-T >: Untyped] = Thicket[T] + + def mapElems(op: Tree[T] => Tree[T] @uncheckedVariance): Thicket[T] = { + val newTrees = trees.mapConserve(op) + if (trees eq newTrees) + this + else + Thicket[T](newTrees)(source).asInstanceOf[this.type] + } + + override def foreachInThicket(op: Tree[T] => Unit): Unit = + trees foreach (_.foreachInThicket(op)) + + override def isEmpty: Boolean = trees.isEmpty + override def toList: List[Tree[T]] = flatten(trees) + override def toString: String = if (isEmpty) "EmptyTree" else "Thicket(" + trees.mkString(", ") + ")" + override def span: Span = + def combine(s: Span, ts: List[Tree[T]]): Span = ts match + case t :: ts1 => combine(s.union(t.span), ts1) + case nil => s + combine(NoSpan, trees) + + override def withSpan(span: Span): this.type = + mapElems(_.withSpan(span)).asInstanceOf[this.type] + } + + class EmptyTree[T >: Untyped] extends Thicket(Nil)(NoSource) { + // assert(uniqueId != 1492) + override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyTree") + } + + class EmptyValDef[T >: Untyped] extends ValDef[T]( + nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T])(NoSource) with WithoutTypeOrPos[T] { + myTpe = NoType.asInstanceOf[T] + setMods(untpd.Modifiers(PrivateLocal)) + override def isEmpty: Boolean = true + override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyValDef") + } + + @sharable val theEmptyTree = new EmptyTree[Type]() + @sharable val theEmptyValDef = new EmptyValDef[Type]() + + def genericEmptyValDef[T >: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] + def genericEmptyTree[T >: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] + + /** Tree that replaces a level 1 splices in pickled (level 0) quotes. + * It is only used when picking quotes (will never be in a TASTy file). + * + * @param isTermHole If this hole is a term, otherwise it is a type hole. + * @param idx The index of the hole in it's enclosing level 0 quote. + * @param args The arguments of the splice to compute its content + * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. + * @param tpt Type of the hole + */ + case class Hole[-T >: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[-T >: Untyped] <: Hole[T] + override def isTerm: Boolean = isTermHole + override def isType: Boolean = !isTermHole + } + + def flatten[T >: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { + def recur(buf: ListBuffer[Tree[T]] | Null, remaining: List[Tree[T]]): ListBuffer[Tree[T]] | Null = + remaining match { + case Thicket(elems) :: remaining1 => + var buf1 = buf + if (buf1 == null) { + buf1 = new ListBuffer[Tree[T]] + var scanned = trees + while (scanned `ne` remaining) { + buf1 += scanned.head + scanned = scanned.tail + } + } + recur(recur(buf1, elems), remaining1) + case tree :: remaining1 => + if (buf != null) buf += tree + recur(buf, remaining1) + case nil => + buf + } + val buf = recur(null, trees) + if (buf != null) buf.toList else trees + } + + // ----- Lazy trees and tree sequences + + /** A tree that can have a lazy field + * The field is represented by some private `var` which is + * accessed by `unforced` and `force`. Forcing the field will + * set the `var` to the underlying value. + */ + trait WithLazyField[+T <: AnyRef] { + def unforced: T | Lazy[T] + protected def force(x: T @uncheckedVariance): Unit + def forceIfLazy(using Context): T = unforced match { + case lzy: Lazy[T @unchecked] => + val x = lzy.complete + force(x) + x + case x: T @ unchecked => x + } + } + + /** A base trait for lazy tree fields. + * These can be instantiated with Lazy instances which + * can delay tree construction until the field is first demanded. + */ + trait Lazy[+T <: AnyRef] { + def complete(using Context): T + } + + // ----- Generic Tree Instances, inherited from `tpt` and `untpd`. + + abstract class Instance[T >: Untyped <: Type] { inst => + + type Tree = Trees.Tree[T] + type TypTree = Trees.TypTree[T] + type TermTree = Trees.TermTree[T] + type PatternTree = Trees.PatternTree[T] + type DenotingTree = Trees.DenotingTree[T] + type ProxyTree = Trees.ProxyTree[T] + type NameTree = Trees.NameTree[T] + type RefTree = Trees.RefTree[T] + type DefTree = Trees.DefTree[T] + type NamedDefTree = Trees.NamedDefTree[T] + type MemberDef = Trees.MemberDef[T] + type ValOrDefDef = Trees.ValOrDefDef[T] + type ValOrTypeDef = Trees.ValOrTypeDef[T] + type LazyTree = Trees.LazyTree[T] + type LazyTreeList = Trees.LazyTreeList[T] + type ParamClause = Trees.ParamClause[T] + + type Ident = Trees.Ident[T] + type SearchFailureIdent = Trees.SearchFailureIdent[T] + type Select = Trees.Select[T] + type SelectWithSig = Trees.SelectWithSig[T] + type This = Trees.This[T] + type Super = Trees.Super[T] + type Apply = Trees.Apply[T] + type TypeApply = Trees.TypeApply[T] + type GenericApply = Trees.GenericApply[T] + type Literal = Trees.Literal[T] + type New = Trees.New[T] + type Typed = Trees.Typed[T] + type NamedArg = Trees.NamedArg[T] + type Assign = Trees.Assign[T] + type Block = Trees.Block[T] + type If = Trees.If[T] + type InlineIf = Trees.InlineIf[T] + type Closure = Trees.Closure[T] + type Match = Trees.Match[T] + type InlineMatch = Trees.InlineMatch[T] + type CaseDef = Trees.CaseDef[T] + type Labeled = Trees.Labeled[T] + type Return = Trees.Return[T] + type WhileDo = Trees.WhileDo[T] + type Try = Trees.Try[T] + type SeqLiteral = Trees.SeqLiteral[T] + type JavaSeqLiteral = Trees.JavaSeqLiteral[T] + type Inlined = Trees.Inlined[T] + type TypeTree = Trees.TypeTree[T] + type InferredTypeTree = Trees.InferredTypeTree[T] + type SingletonTypeTree = Trees.SingletonTypeTree[T] + type RefinedTypeTree = Trees.RefinedTypeTree[T] + type AppliedTypeTree = Trees.AppliedTypeTree[T] + type LambdaTypeTree = Trees.LambdaTypeTree[T] + type TermLambdaTypeTree = Trees.TermLambdaTypeTree[T] + type MatchTypeTree = Trees.MatchTypeTree[T] + type ByNameTypeTree = Trees.ByNameTypeTree[T] + type TypeBoundsTree = Trees.TypeBoundsTree[T] + type Bind = Trees.Bind[T] + type Alternative = Trees.Alternative[T] + type UnApply = Trees.UnApply[T] + type ValDef = Trees.ValDef[T] + type DefDef = Trees.DefDef[T] + type TypeDef = Trees.TypeDef[T] + type Template = Trees.Template[T] + type Import = Trees.Import[T] + type Export = Trees.Export[T] + type ImportOrExport = Trees.ImportOrExport[T] + type PackageDef = Trees.PackageDef[T] + type Annotated = Trees.Annotated[T] + type Thicket = Trees.Thicket[T] + + type Hole = Trees.Hole[T] + + @sharable val EmptyTree: Thicket = genericEmptyTree + @sharable val EmptyValDef: ValDef = genericEmptyValDef + @sharable val ContextualEmptyTree: Thicket = new EmptyTree() // an empty tree marking a contextual closure + + // ----- Auxiliary creation methods ------------------ + + def Thicket(): Thicket = EmptyTree + def Thicket(x1: Tree, x2: Tree)(implicit src: SourceFile): Thicket = new Thicket(x1 :: x2 :: Nil) + def Thicket(x1: Tree, x2: Tree, x3: Tree)(implicit src: SourceFile): Thicket = new Thicket(x1 :: x2 :: x3 :: Nil) + def Thicket(xs: List[Tree])(implicit src: SourceFile) = new Thicket(xs) + + def flatTree(xs: List[Tree])(implicit src: SourceFile): Tree = flatten(xs) match { + case x :: Nil => x + case ys => Thicket(ys) + } + + // ----- Helper classes for copying, transforming, accumulating ----------------- + + val cpy: TreeCopier + + /** A class for copying trees. The copy methods avoid creating a new tree + * If all arguments stay the same. + * + * Note: Some of the copy methods take a context. + * These are exactly those methods that are overridden in TypedTreeCopier + * so that they selectively retype themselves. Retyping needs a context. + */ + abstract class TreeCopier { + protected def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] + protected def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] + + /** Soucre of the copied tree */ + protected def sourceFile(tree: Tree): SourceFile = tree.source + + protected def finalize(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] = + Stats.record(s"TreeCopier.finalize/${tree.getClass == copied.getClass}") + postProcess(tree, copied.withSpan(tree.span).withAttachmentsFrom(tree)) + + protected def finalize(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] = + Stats.record(s"TreeCopier.finalize/${tree.getClass == copied.getClass}") + postProcess(tree, copied.withSpan(tree.span).withAttachmentsFrom(tree)) + + def Ident(tree: Tree)(name: Name)(using Context): Ident = tree match { + case tree: Ident if name == tree.name => tree + case _ => finalize(tree, untpd.Ident(name)(sourceFile(tree))) + } + def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = tree match { + case tree: SelectWithSig => + if ((qualifier eq tree.qualifier) && (name == tree.name)) tree + else finalize(tree, SelectWithSig(qualifier, name, tree.sig)(sourceFile(tree))) + case tree: Select if (qualifier eq tree.qualifier) && (name == tree.name) => tree + case _ => finalize(tree, untpd.Select(qualifier, name)(sourceFile(tree))) + } + /** Copy Ident or Select trees */ + def Ref(tree: RefTree)(name: Name)(using Context): RefTree = tree match { + case Ident(_) => Ident(tree)(name) + case Select(qual, _) => Select(tree)(qual, name) + } + def This(tree: Tree)(qual: untpd.Ident)(using Context): This = tree match { + case tree: This if (qual eq tree.qual) => tree + case _ => finalize(tree, untpd.This(qual)(sourceFile(tree))) + } + def Super(tree: Tree)(qual: Tree, mix: untpd.Ident)(using Context): Super = tree match { + case tree: Super if (qual eq tree.qual) && (mix eq tree.mix) => tree + case _ => finalize(tree, untpd.Super(qual, mix)(sourceFile(tree))) + } + def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = tree match { + case tree: Apply if (fun eq tree.fun) && (args eq tree.args) => tree + case _ => finalize(tree, untpd.Apply(fun, args)(sourceFile(tree))) + //.ensuring(res => res.uniqueId != 2213, s"source = $tree, ${tree.uniqueId}, ${tree.span}") + } + def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = tree match { + case tree: TypeApply if (fun eq tree.fun) && (args eq tree.args) => tree + case _ => finalize(tree, untpd.TypeApply(fun, args)(sourceFile(tree))) + } + def Literal(tree: Tree)(const: Constant)(using Context): Literal = tree match { + case tree: Literal if const == tree.const => tree + case _ => finalize(tree, untpd.Literal(const)(sourceFile(tree))) + } + def New(tree: Tree)(tpt: Tree)(using Context): New = tree match { + case tree: New if (tpt eq tree.tpt) => tree + case _ => finalize(tree, untpd.New(tpt)(sourceFile(tree))) + } + def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = tree match { + case tree: Typed if (expr eq tree.expr) && (tpt eq tree.tpt) => tree + case tree => finalize(tree, untpd.Typed(expr, tpt)(sourceFile(tree))) + } + def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = tree match { + case tree: NamedArg if (name == tree.name) && (arg eq tree.arg) => tree + case _ => finalize(tree, untpd.NamedArg(name, arg)(sourceFile(tree))) + } + def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = tree match { + case tree: Assign if (lhs eq tree.lhs) && (rhs eq tree.rhs) => tree + case _ => finalize(tree, untpd.Assign(lhs, rhs)(sourceFile(tree))) + } + def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = tree match { + case tree: Block if (stats eq tree.stats) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.Block(stats, expr)(sourceFile(tree))) + } + def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = tree match { + case tree: If if (cond eq tree.cond) && (thenp eq tree.thenp) && (elsep eq tree.elsep) => tree + case tree: InlineIf => finalize(tree, untpd.InlineIf(cond, thenp, elsep)(sourceFile(tree))) + case _ => finalize(tree, untpd.If(cond, thenp, elsep)(sourceFile(tree))) + } + def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = tree match { + case tree: Closure if (env eq tree.env) && (meth eq tree.meth) && (tpt eq tree.tpt) => tree + case _ => finalize(tree, untpd.Closure(env, meth, tpt)(sourceFile(tree))) + } + def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = tree match { + case tree: Match if (selector eq tree.selector) && (cases eq tree.cases) => tree + case tree: InlineMatch => finalize(tree, untpd.InlineMatch(selector, cases)(sourceFile(tree))) + case _ => finalize(tree, untpd.Match(selector, cases)(sourceFile(tree))) + } + def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = tree match { + case tree: CaseDef if (pat eq tree.pat) && (guard eq tree.guard) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.CaseDef(pat, guard, body)(sourceFile(tree))) + } + def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = tree match { + case tree: Labeled if (bind eq tree.bind) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.Labeled(bind, expr)(sourceFile(tree))) + } + def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = tree match { + case tree: Return if (expr eq tree.expr) && (from eq tree.from) => tree + case _ => finalize(tree, untpd.Return(expr, from)(sourceFile(tree))) + } + def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = tree match { + case tree: WhileDo if (cond eq tree.cond) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.WhileDo(cond, body)(sourceFile(tree))) + } + def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = tree match { + case tree: Try if (expr eq tree.expr) && (cases eq tree.cases) && (finalizer eq tree.finalizer) => tree + case _ => finalize(tree, untpd.Try(expr, cases, finalizer)(sourceFile(tree))) + } + def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = tree match { + case tree: JavaSeqLiteral => + if ((elems eq tree.elems) && (elemtpt eq tree.elemtpt)) tree + else finalize(tree, untpd.JavaSeqLiteral(elems, elemtpt)) + case tree: SeqLiteral if (elems eq tree.elems) && (elemtpt eq tree.elemtpt) => tree + case _ => finalize(tree, untpd.SeqLiteral(elems, elemtpt)(sourceFile(tree))) + } + def Inlined(tree: Tree)(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = tree match { + case tree: Inlined if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) => tree + case _ => finalize(tree, untpd.Inlined(call, bindings, expansion)(sourceFile(tree))) + } + def SingletonTypeTree(tree: Tree)(ref: Tree)(using Context): SingletonTypeTree = tree match { + case tree: SingletonTypeTree if (ref eq tree.ref) => tree + case _ => finalize(tree, untpd.SingletonTypeTree(ref)(sourceFile(tree))) + } + def RefinedTypeTree(tree: Tree)(tpt: Tree, refinements: List[Tree])(using Context): RefinedTypeTree = tree match { + case tree: RefinedTypeTree if (tpt eq tree.tpt) && (refinements eq tree.refinements) => tree + case _ => finalize(tree, untpd.RefinedTypeTree(tpt, refinements)(sourceFile(tree))) + } + def AppliedTypeTree(tree: Tree)(tpt: Tree, args: List[Tree])(using Context): AppliedTypeTree = tree match { + case tree: AppliedTypeTree if (tpt eq tree.tpt) && (args eq tree.args) => tree + case _ => finalize(tree, untpd.AppliedTypeTree(tpt, args)(sourceFile(tree))) + } + def LambdaTypeTree(tree: Tree)(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = tree match { + case tree: LambdaTypeTree if (tparams eq tree.tparams) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.LambdaTypeTree(tparams, body)(sourceFile(tree))) + } + def TermLambdaTypeTree(tree: Tree)(params: List[ValDef], body: Tree)(using Context): TermLambdaTypeTree = tree match { + case tree: TermLambdaTypeTree if (params eq tree.params) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.TermLambdaTypeTree(params, body)(sourceFile(tree))) + } + def MatchTypeTree(tree: Tree)(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = tree match { + case tree: MatchTypeTree if (bound eq tree.bound) && (selector eq tree.selector) && (cases eq tree.cases) => tree + case _ => finalize(tree, untpd.MatchTypeTree(bound, selector, cases)(sourceFile(tree))) + } + def ByNameTypeTree(tree: Tree)(result: Tree)(using Context): ByNameTypeTree = tree match { + case tree: ByNameTypeTree if (result eq tree.result) => tree + case _ => finalize(tree, untpd.ByNameTypeTree(result)(sourceFile(tree))) + } + def TypeBoundsTree(tree: Tree)(lo: Tree, hi: Tree, alias: Tree)(using Context): TypeBoundsTree = tree match { + case tree: TypeBoundsTree if (lo eq tree.lo) && (hi eq tree.hi) && (alias eq tree.alias) => tree + case _ => finalize(tree, untpd.TypeBoundsTree(lo, hi, alias)(sourceFile(tree))) + } + def Bind(tree: Tree)(name: Name, body: Tree)(using Context): Bind = tree match { + case tree: Bind if (name eq tree.name) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.Bind(name, body)(sourceFile(tree))) + } + def Alternative(tree: Tree)(trees: List[Tree])(using Context): Alternative = tree match { + case tree: Alternative if (trees eq tree.trees) => tree + case _ => finalize(tree, untpd.Alternative(trees)(sourceFile(tree))) + } + def UnApply(tree: Tree)(fun: Tree, implicits: List[Tree], patterns: List[Tree])(using Context): UnApply = tree match { + case tree: UnApply if (fun eq tree.fun) && (implicits eq tree.implicits) && (patterns eq tree.patterns) => tree + case _ => finalize(tree, untpd.UnApply(fun, implicits, patterns)(sourceFile(tree))) + } + def ValDef(tree: Tree)(name: TermName, tpt: Tree, rhs: LazyTree)(using Context): ValDef = tree match { + case tree: ValDef if (name == tree.name) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree + case _ => finalize(tree, untpd.ValDef(name, tpt, rhs)(sourceFile(tree))) + } + def DefDef(tree: Tree)(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(using Context): DefDef = tree match { + case tree: DefDef if (name == tree.name) && (paramss eq tree.paramss) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree + case _ => finalize(tree, untpd.DefDef(name, paramss, tpt, rhs)(sourceFile(tree))) + } + def TypeDef(tree: Tree)(name: TypeName, rhs: Tree)(using Context): TypeDef = tree match { + case tree: TypeDef if (name == tree.name) && (rhs eq tree.rhs) => tree + case _ => finalize(tree, untpd.TypeDef(name, rhs)(sourceFile(tree))) + } + def Template(tree: Tree)(constr: DefDef, parents: List[Tree], derived: List[untpd.Tree], self: ValDef, body: LazyTreeList)(using Context): Template = tree match { + case tree: Template if (constr eq tree.constr) && (parents eq tree.parents) && (derived eq tree.derived) && (self eq tree.self) && (body eq tree.unforcedBody) => tree + case tree => finalize(tree, untpd.Template(constr, parents, derived, self, body)(sourceFile(tree))) + } + def Import(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = tree match { + case tree: Import if (expr eq tree.expr) && (selectors eq tree.selectors) => tree + case _ => finalize(tree, untpd.Import(expr, selectors)(sourceFile(tree))) + } + def Export(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = tree match { + case tree: Export if (expr eq tree.expr) && (selectors eq tree.selectors) => tree + case _ => finalize(tree, untpd.Export(expr, selectors)(sourceFile(tree))) + } + def PackageDef(tree: Tree)(pid: RefTree, stats: List[Tree])(using Context): PackageDef = tree match { + case tree: PackageDef if (pid eq tree.pid) && (stats eq tree.stats) => tree + case _ => finalize(tree, untpd.PackageDef(pid, stats)(sourceFile(tree))) + } + def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = tree match { + case tree: Annotated if (arg eq tree.arg) && (annot eq tree.annot) => tree + case _ => finalize(tree, untpd.Annotated(arg, annot)(sourceFile(tree))) + } + def Thicket(tree: Tree)(trees: List[Tree])(using Context): Thicket = tree match { + case tree: Thicket if (trees eq tree.trees) => tree + case _ => finalize(tree, untpd.Thicket(trees)(sourceFile(tree))) + } + def Hole(tree: Tree)(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = tree match { + case tree: Hole if isTerm == tree.isTerm && idx == tree.idx && args.eq(tree.args) && content.eq(tree.content) && content.eq(tree.content) => tree + case _ => finalize(tree, untpd.Hole(isTerm, idx, args, content, tpt)(sourceFile(tree))) + } + + // Copier methods with default arguments; these demand that the original tree + // is of the same class as the copy. We only include trees with more than 2 elements here. + def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(using Context): If = + If(tree: Tree)(cond, thenp, elsep) + def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = + Closure(tree: Tree)(env, meth, tpt) + def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(using Context): CaseDef = + CaseDef(tree: Tree)(pat, guard, body) + def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(using Context): Try = + Try(tree: Tree)(expr, cases, finalizer) + def UnApply(tree: UnApply)(fun: Tree = tree.fun, implicits: List[Tree] = tree.implicits, patterns: List[Tree] = tree.patterns)(using Context): UnApply = + UnApply(tree: Tree)(fun, implicits, patterns) + def ValDef(tree: ValDef)(name: TermName = tree.name, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs)(using Context): ValDef = + ValDef(tree: Tree)(name, tpt, rhs) + def DefDef(tree: DefDef)(name: TermName = tree.name, paramss: List[ParamClause] = tree.paramss, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs)(using Context): DefDef = + DefDef(tree: Tree)(name, paramss, tpt, rhs) + def TypeDef(tree: TypeDef)(name: TypeName = tree.name, rhs: Tree = tree.rhs)(using Context): TypeDef = + TypeDef(tree: Tree)(name, rhs) + def Template(tree: Template)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, derived: List[untpd.Tree] = tree.derived, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody)(using Context): Template = + Template(tree: Tree)(constr, parents, derived, self, body) + def Hole(tree: Hole)(isTerm: Boolean = tree.isTerm, idx: Int = tree.idx, args: List[Tree] = tree.args, content: Tree = tree.content, tpt: Tree = tree.tpt)(using Context): Hole = + Hole(tree: Tree)(isTerm, idx, args, content, tpt) + + } + + /** Hook to indicate that a transform of some subtree should be skipped */ + protected def skipTransform(tree: Tree)(using Context): Boolean = false + + /** For untyped trees, this is just the identity. + * For typed trees, a context derived form `ctx` that records `call` as the + * innermost enclosing call for which the inlined version is currently + * processed. + */ + protected def inlineContext(call: Tree)(using Context): Context = ctx + + /** The context to use when mapping or accumulating over a tree */ + def localCtx(tree: Tree)(using Context): Context + + /** The context to use when transforming a tree. + * It ensures that the source is correct, and that the local context is used if + * that's necessary for transforming the whole tree. + * TODO: ensure transform is always called with the correct context as argument + * @see https://github.com/lampepfl/dotty/pull/13880#discussion_r836395977 + */ + def transformCtx(tree: Tree)(using Context): Context = + val sourced = + if tree.source.exists && tree.source != ctx.source + then ctx.withSource(tree.source) + else ctx + tree match + case t: (MemberDef | PackageDef | LambdaTypeTree | TermLambdaTypeTree) => + localCtx(t)(using sourced) + case _ => + sourced + + abstract class TreeMap(val cpy: TreeCopier = inst.cpy) { self => + def transform(tree: Tree)(using Context): Tree = { + inContext(transformCtx(tree)) { + Stats.record(s"TreeMap.transform/$getClass") + if (skipTransform(tree)) tree + else tree match { + case Ident(name) => + tree + case Select(qualifier, name) => + cpy.Select(tree)(transform(qualifier), name) + case This(qual) => + tree + case Super(qual, mix) => + cpy.Super(tree)(transform(qual), mix) + case Apply(fun, args) => + cpy.Apply(tree)(transform(fun), transform(args)) + case TypeApply(fun, args) => + cpy.TypeApply(tree)(transform(fun), transform(args)) + case Literal(const) => + tree + case New(tpt) => + cpy.New(tree)(transform(tpt)) + case Typed(expr, tpt) => + cpy.Typed(tree)(transform(expr), transform(tpt)) + case NamedArg(name, arg) => + cpy.NamedArg(tree)(name, transform(arg)) + case Assign(lhs, rhs) => + cpy.Assign(tree)(transform(lhs), transform(rhs)) + case blk: Block => + transformBlock(blk) + case If(cond, thenp, elsep) => + cpy.If(tree)(transform(cond), transform(thenp), transform(elsep)) + case Closure(env, meth, tpt) => + cpy.Closure(tree)(transform(env), transform(meth), transform(tpt)) + case Match(selector, cases) => + cpy.Match(tree)(transform(selector), transformSub(cases)) + case CaseDef(pat, guard, body) => + cpy.CaseDef(tree)(transform(pat), transform(guard), transform(body)) + case Labeled(bind, expr) => + cpy.Labeled(tree)(transformSub(bind), transform(expr)) + case Return(expr, from) => + cpy.Return(tree)(transform(expr), transformSub(from)) + case WhileDo(cond, body) => + cpy.WhileDo(tree)(transform(cond), transform(body)) + case Try(block, cases, finalizer) => + cpy.Try(tree)(transform(block), transformSub(cases), transform(finalizer)) + case SeqLiteral(elems, elemtpt) => + cpy.SeqLiteral(tree)(transform(elems), transform(elemtpt)) + case Inlined(call, bindings, expansion) => + cpy.Inlined(tree)(call, transformSub(bindings), transform(expansion)(using inlineContext(call))) + case TypeTree() => + tree + case SingletonTypeTree(ref) => + cpy.SingletonTypeTree(tree)(transform(ref)) + case RefinedTypeTree(tpt, refinements) => + cpy.RefinedTypeTree(tree)(transform(tpt), transformSub(refinements)) + case AppliedTypeTree(tpt, args) => + cpy.AppliedTypeTree(tree)(transform(tpt), transform(args)) + case LambdaTypeTree(tparams, body) => + cpy.LambdaTypeTree(tree)(transformSub(tparams), transform(body)) + case TermLambdaTypeTree(params, body) => + cpy.TermLambdaTypeTree(tree)(transformSub(params), transform(body)) + case MatchTypeTree(bound, selector, cases) => + cpy.MatchTypeTree(tree)(transform(bound), transform(selector), transformSub(cases)) + case ByNameTypeTree(result) => + cpy.ByNameTypeTree(tree)(transform(result)) + case TypeBoundsTree(lo, hi, alias) => + cpy.TypeBoundsTree(tree)(transform(lo), transform(hi), transform(alias)) + case Bind(name, body) => + cpy.Bind(tree)(name, transform(body)) + case Alternative(trees) => + cpy.Alternative(tree)(transform(trees)) + case UnApply(fun, implicits, patterns) => + cpy.UnApply(tree)(transform(fun), transform(implicits), transform(patterns)) + case EmptyValDef => + tree + case tree @ ValDef(name, tpt, _) => + val tpt1 = transform(tpt) + val rhs1 = transform(tree.rhs) + cpy.ValDef(tree)(name, tpt1, rhs1) + case tree @ DefDef(name, paramss, tpt, _) => + cpy.DefDef(tree)(name, transformParamss(paramss), transform(tpt), transform(tree.rhs)) + case tree @ TypeDef(name, rhs) => + cpy.TypeDef(tree)(name, transform(rhs)) + case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => + cpy.Template(tree)(transformSub(constr), transform(tree.parents), Nil, transformSub(self), transformStats(tree.body, tree.symbol)) + case Import(expr, selectors) => + cpy.Import(tree)(transform(expr), selectors) + case Export(expr, selectors) => + cpy.Export(tree)(transform(expr), selectors) + case PackageDef(pid, stats) => + cpy.PackageDef(tree)(transformSub(pid), transformStats(stats, ctx.owner)) + case Annotated(arg, annot) => + cpy.Annotated(tree)(transform(arg), transform(annot)) + case Thicket(trees) => + val trees1 = transform(trees) + if (trees1 eq trees) tree else Thicket(trees1) + case tree @ Hole(_, _, args, content, tpt) => + cpy.Hole(tree)(args = transform(args), content = transform(content), tpt = transform(tpt)) + case _ => + transformMoreCases(tree) + } + } + } + + def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = + transform(trees) + def transformBlock(blk: Block)(using Context): Block = + cpy.Block(blk)(transformStats(blk.stats, ctx.owner), transform(blk.expr)) + def transform(trees: List[Tree])(using Context): List[Tree] = + flatten(trees mapConserve (transform(_))) + def transformSub[Tr <: Tree](tree: Tr)(using Context): Tr = + transform(tree).asInstanceOf[Tr] + def transformSub[Tr <: Tree](trees: List[Tr])(using Context): List[Tr] = + transform(trees).asInstanceOf[List[Tr]] + def transformParams(params: ParamClause)(using Context): ParamClause = + transform(params).asInstanceOf[ParamClause] + def transformParamss(paramss: List[ParamClause])(using Context): List[ParamClause] = + paramss.mapConserve(transformParams) + + protected def transformMoreCases(tree: Tree)(using Context): Tree = { + assert(ctx.reporter.errorsReported) + tree + } + } + + abstract class TreeAccumulator[X] { self: TreeAccumulator[X] @retains(caps.*) => + // Ties the knot of the traversal: call `foldOver(x, tree))` to dive in the `tree` node. + def apply(x: X, tree: Tree)(using Context): X + + def apply(x: X, trees: List[Tree])(using Context): X = + def fold(x: X, trees: List[Tree]): X = trees match + case tree :: rest => fold(apply(x, tree), rest) + case Nil => x + fold(x, trees) + + def foldOver(x: X, tree: Tree)(using Context): X = + if (tree.source != ctx.source && tree.source.exists) + foldOver(x, tree)(using ctx.withSource(tree.source)) + else { + Stats.record(s"TreeAccumulator.foldOver/$getClass") + tree match { + case Ident(name) => + x + case Select(qualifier, name) => + this(x, qualifier) + case This(qual) => + x + case Super(qual, mix) => + this(x, qual) + case Apply(fun, args) => + this(this(x, fun), args) + case TypeApply(fun, args) => + this(this(x, fun), args) + case Literal(const) => + x + case New(tpt) => + this(x, tpt) + case Typed(expr, tpt) => + this(this(x, expr), tpt) + case NamedArg(name, arg) => + this(x, arg) + case Assign(lhs, rhs) => + this(this(x, lhs), rhs) + case Block(stats, expr) => + this(this(x, stats), expr) + case If(cond, thenp, elsep) => + this(this(this(x, cond), thenp), elsep) + case Closure(env, meth, tpt) => + this(this(this(x, env), meth), tpt) + case Match(selector, cases) => + this(this(x, selector), cases) + case CaseDef(pat, guard, body) => + this(this(this(x, pat), guard), body) + case Labeled(bind, expr) => + this(this(x, bind), expr) + case Return(expr, from) => + this(this(x, expr), from) + case WhileDo(cond, body) => + this(this(x, cond), body) + case Try(block, handler, finalizer) => + this(this(this(x, block), handler), finalizer) + case SeqLiteral(elems, elemtpt) => + this(this(x, elems), elemtpt) + case Inlined(call, bindings, expansion) => + this(this(x, bindings), expansion)(using inlineContext(call)) + case TypeTree() => + x + case SingletonTypeTree(ref) => + this(x, ref) + case RefinedTypeTree(tpt, refinements) => + this(this(x, tpt), refinements) + case AppliedTypeTree(tpt, args) => + this(this(x, tpt), args) + case LambdaTypeTree(tparams, body) => + inContext(localCtx(tree)) { + this(this(x, tparams), body) + } + case TermLambdaTypeTree(params, body) => + inContext(localCtx(tree)) { + this(this(x, params), body) + } + case MatchTypeTree(bound, selector, cases) => + this(this(this(x, bound), selector), cases) + case ByNameTypeTree(result) => + this(x, result) + case TypeBoundsTree(lo, hi, alias) => + this(this(this(x, lo), hi), alias) + case Bind(name, body) => + this(x, body) + case Alternative(trees) => + this(x, trees) + case UnApply(fun, implicits, patterns) => + this(this(this(x, fun), implicits), patterns) + case tree @ ValDef(_, tpt, _) => + inContext(localCtx(tree)) { + this(this(x, tpt), tree.rhs) + } + case tree @ DefDef(_, paramss, tpt, _) => + inContext(localCtx(tree)) { + this(this(paramss.foldLeft(x)(apply), tpt), tree.rhs) + } + case TypeDef(_, rhs) => + inContext(localCtx(tree)) { + this(x, rhs) + } + case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => + this(this(this(this(x, constr), parents), self), tree.body) + case Import(expr, _) => + this(x, expr) + case Export(expr, _) => + this(x, expr) + case PackageDef(pid, stats) => + this(this(x, pid), stats)(using localCtx(tree)) + case Annotated(arg, annot) => + this(this(x, arg), annot) + case Thicket(ts) => + this(x, ts) + case Hole(_, _, args, content, tpt) => + this(this(this(x, args), content), tpt) + case _ => + foldMoreCases(x, tree) + } + } + + def foldMoreCases(x: X, tree: Tree)(using Context): X = { + assert(ctx.reporter.hasUnreportedErrors + || ctx.reporter.errorsReported + || ctx.mode.is(Mode.Interactive), tree) + // In interactive mode, errors might come from previous runs. + // In case of errors it may be that typed trees point to untyped ones. + // The IDE can still traverse inside such trees, either in the run where errors + // are reported, or in subsequent ones. + x + } + } + + abstract class TreeTraverser extends TreeAccumulator[Unit] { + def traverse(tree: Tree)(using Context): Unit + def traverse(trees: List[Tree])(using Context) = apply((), trees) + def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) + protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) + } + + /** Fold `f` over all tree nodes, in depth-first, prefix order */ + class DeepFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { + def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) + } + + /** Fold `f` over all tree nodes, in depth-first, prefix order, but don't visit + * subtrees where `f` returns a different result for the root, i.e. `f(x, root) ne x`. + */ + class ShallowFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { + def apply(x: X, tree: Tree)(using Context): X = { + val x1 = f(x, tree) + if (x1.asInstanceOf[AnyRef] ne x.asInstanceOf[AnyRef]) x1 + else foldOver(x1, tree) + } + } + + def rename(tree: NameTree, newName: Name)(using Context): tree.ThisTree[T] = { + tree match { + case tree: Ident => cpy.Ident(tree)(newName) + case tree: Select => cpy.Select(tree)(tree.qualifier, newName) + case tree: Bind => cpy.Bind(tree)(newName, tree.body) + case tree: ValDef => cpy.ValDef(tree)(name = newName.asTermName) + case tree: DefDef => cpy.DefDef(tree)(name = newName.asTermName) + case tree: TypeDef => cpy.TypeDef(tree)(name = newName.asTypeName) + } + }.asInstanceOf[tree.ThisTree[T]] + + object TypeDefs: + def unapply(xs: List[Tree]): Option[List[TypeDef]] = xs match + case (x: TypeDef) :: _ => Some(xs.asInstanceOf[List[TypeDef]]) + case _ => None + + object ValDefs: + def unapply(xs: List[Tree]): Option[List[ValDef]] = xs match + case Nil => Some(Nil) + case (x: ValDef) :: _ => Some(xs.asInstanceOf[List[ValDef]]) + case _ => None + + def termParamssIn(paramss: List[ParamClause]): List[List[ValDef]] = paramss match + case ValDefs(vparams) :: paramss1 => + val paramss2 = termParamssIn(paramss1) + if paramss2 eq paramss1 then paramss.asInstanceOf[List[List[ValDef]]] + else vparams :: paramss2 + case _ :: paramss1 => + termParamssIn(paramss1) + case nil => + Nil + + /** If `tparams` is non-empty, add it to the left `paramss`, merging + * it with a leading type parameter list of `paramss`, if one exists. + */ + def joinParams(tparams: List[TypeDef], paramss: List[ParamClause]): List[ParamClause] = + if tparams.isEmpty then paramss + else paramss match + case TypeDefs(tparams1) :: paramss1 => (tparams ++ tparams1) :: paramss1 + case _ => tparams :: paramss + + def isTermOnly(paramss: List[ParamClause]): Boolean = paramss match + case Nil => true + case params :: paramss1 => + params match + case (param: untpd.TypeDef) :: _ => false + case _ => isTermOnly(paramss1) + + def asTermOnly(paramss: List[ParamClause]): List[List[ValDef]] = + assert(isTermOnly(paramss)) + paramss.asInstanceOf[List[List[ValDef]]] + + /** Delegate to FunProto or FunProtoTyped depending on whether the prefix is `untpd` or `tpd`. */ + protected def FunProto(args: List[Tree], resType: Type)(using Context): ProtoTypes.FunProto + + /** Construct the application `$receiver.$method[$targs]($args)` using overloading resolution + * to find a matching overload of `$method` if necessary. + * This is useful when overloading resolution needs to be performed in a phase after typer. + * Note that this will not perform any kind of implicit search. + * + * @param expectedType An expected type of the application used to guide overloading resolution + */ + def applyOverloaded( + receiver: tpd.Tree, method: TermName, args: List[Tree], targs: List[Type], + expectedType: Type)(using parentCtx: Context): tpd.Tree = { + given ctx: Context = parentCtx.retractMode(Mode.ImplicitsEnabled) + import dotty.tools.dotc.ast.tpd.TreeOps + + val typer = ctx.typer + val proto = FunProto(args, expectedType) + val denot = receiver.tpe.member(method) + if !denot.exists then + overload.println(i"members = ${receiver.tpe.decls}") + report.error(i"no member $receiver . $method", receiver.srcPos) + val selected = + if (denot.isOverloaded) { + def typeParamCount(tp: Type) = tp.widen match { + case tp: PolyType => tp.paramInfos.length + case _ => 0 + } + val allAlts = denot.alternatives + .map(denot => TermRef(receiver.tpe, denot.symbol)) + .filter(tr => typeParamCount(tr) == targs.length) + .filter { _.widen match { + case MethodTpe(_, _, x: MethodType) => !x.isImplicitMethod + case _ => true + }} + val alternatives = ctx.typer.resolveOverloaded(allAlts, proto) + assert(alternatives.size == 1, + i"${if (alternatives.isEmpty) "no" else "multiple"} overloads available for " + + i"$method on ${receiver.tpe.widenDealiasKeepAnnots} with targs: $targs%, %; args: $args%, %; expectedType: $expectedType." + + i"all alternatives: ${allAlts.map(_.symbol.showDcl).mkString(", ")}\n" + + i"matching alternatives: ${alternatives.map(_.symbol.showDcl).mkString(", ")}.") // this is parsed from bytecode tree. there's nothing user can do about it + alternatives.head + } + else TermRef(receiver.tpe, denot.symbol) + val fun = receiver.select(selected).appliedToTypes(targs) + + val apply = untpd.Apply(fun, args) + typer.ApplyTo(apply, fun, selected, proto, expectedType) + } + + + def resolveConstructor(atp: Type, args: List[Tree])(using Context): tpd.Tree = { + val targs = atp.argTypes + withoutMode(Mode.PatternOrTypeBits) { + applyOverloaded(tpd.New(atp.typeConstructor), nme.CONSTRUCTOR, args, targs, atp) + } + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/tpd.scala b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala new file mode 100644 index 000000000000..1f43daec4d37 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala @@ -0,0 +1,1546 @@ +package dotty.tools +package dotc +package ast + +import dotty.tools.dotc.transform.{ExplicitOuter, Erasure} +import typer.ProtoTypes +import transform.SymUtils._ +import transform.TypeUtils._ +import core._ +import Scopes.newScope +import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._, NameOps._ +import Symbols._, StdNames._, Annotations._, Trees._, Symbols._ +import Decorators._, DenotTransformers._ +import collection.{immutable, mutable} +import util.{Property, SourceFile} +import NameKinds.{TempResultName, OuterSelectName} +import typer.ConstFold + +import scala.annotation.tailrec +import scala.collection.mutable.ListBuffer +import language.experimental.pureFunctions + +/** Some creators for typed trees */ +object tpd extends Trees.Instance[Type] with TypedTreeInfo { + + private def ta(using Context) = ctx.typeAssigner + + def Ident(tp: NamedType)(using Context): Ident = + ta.assignType(untpd.Ident(tp.name), tp) + + def Select(qualifier: Tree, name: Name)(using Context): Select = + ta.assignType(untpd.Select(qualifier, name), qualifier) + + def Select(qualifier: Tree, tp: NamedType)(using Context): Select = + untpd.Select(qualifier, tp.name).withType(tp) + + def This(cls: ClassSymbol)(using Context): This = + untpd.This(untpd.Ident(cls.name)).withType(cls.thisType) + + def Super(qual: Tree, mix: untpd.Ident, mixinClass: Symbol)(using Context): Super = + ta.assignType(untpd.Super(qual, mix), qual, mixinClass) + + def Super(qual: Tree, mixName: TypeName, mixinClass: Symbol = NoSymbol)(using Context): Super = + Super(qual, if (mixName.isEmpty) untpd.EmptyTypeIdent else untpd.Ident(mixName), mixinClass) + + def Apply(fn: Tree, args: List[Tree])(using Context): Apply = fn match + case Block(Nil, expr) => + Apply(expr, args) + case _: RefTree | _: GenericApply | _: Inlined | _: Hole => + ta.assignType(untpd.Apply(fn, args), fn, args) + + def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = fn match + case Block(Nil, expr) => + TypeApply(expr, args) + case _: RefTree | _: GenericApply => + ta.assignType(untpd.TypeApply(fn, args), fn, args) + + def Literal(const: Constant)(using Context): Literal = + ta.assignType(untpd.Literal(const)) + + def unitLiteral(using Context): Literal = + Literal(Constant(())) + + def nullLiteral(using Context): Literal = + Literal(Constant(null)) + + def New(tpt: Tree)(using Context): New = + ta.assignType(untpd.New(tpt), tpt) + + def New(tp: Type)(using Context): New = New(TypeTree(tp)) + + def Typed(expr: Tree, tpt: Tree)(using Context): Typed = + ta.assignType(untpd.Typed(expr, tpt), tpt) + + def NamedArg(name: Name, arg: Tree)(using Context): NamedArg = + ta.assignType(untpd.NamedArg(name, arg), arg) + + def Assign(lhs: Tree, rhs: Tree)(using Context): Assign = + ta.assignType(untpd.Assign(lhs, rhs)) + + def Block(stats: List[Tree], expr: Tree)(using Context): Block = + ta.assignType(untpd.Block(stats, expr), stats, expr) + + /** Join `stats` in front of `expr` creating a new block if necessary */ + def seq(stats: List[Tree], expr: Tree)(using Context): Tree = + if (stats.isEmpty) expr + else expr match { + case Block(_, _: Closure) => + Block(stats, expr) // leave closures in their own block + case Block(estats, eexpr) => + cpy.Block(expr)(stats ::: estats, eexpr).withType(ta.avoidingType(eexpr, stats)) + case _ => + Block(stats, expr) + } + + def If(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = + ta.assignType(untpd.If(cond, thenp, elsep), thenp, elsep) + + def InlineIf(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = + ta.assignType(untpd.InlineIf(cond, thenp, elsep), thenp, elsep) + + def Closure(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = + ta.assignType(untpd.Closure(env, meth, tpt), meth, tpt) + + /** A function def + * + * vparams => expr + * + * gets expanded to + * + * { def $anonfun(vparams) = expr; Closure($anonfun) } + * + * where the closure's type is the target type of the expression (FunctionN, unless + * otherwise specified). + */ + def Closure(meth: TermSymbol, rhsFn: List[List[Tree]] => Tree, targs: List[Tree] = Nil, targetType: Type = NoType)(using Context): Block = { + val targetTpt = if (targetType.exists) TypeTree(targetType) else EmptyTree + val call = + if (targs.isEmpty) Ident(TermRef(NoPrefix, meth)) + else TypeApply(Ident(TermRef(NoPrefix, meth)), targs) + Block( + DefDef(meth, rhsFn) :: Nil, + Closure(Nil, call, targetTpt)) + } + + /** A closure whose anonymous function has the given method type */ + def Lambda(tpe: MethodType, rhsFn: List[Tree] => Tree)(using Context): Block = { + val meth = newAnonFun(ctx.owner, tpe) + Closure(meth, tss => rhsFn(tss.head).changeOwner(ctx.owner, meth)) + } + + def CaseDef(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = + ta.assignType(untpd.CaseDef(pat, guard, body), pat, body) + + def Match(selector: Tree, cases: List[CaseDef])(using Context): Match = + ta.assignType(untpd.Match(selector, cases), selector, cases) + + def InlineMatch(selector: Tree, cases: List[CaseDef])(using Context): Match = + ta.assignType(untpd.InlineMatch(selector, cases), selector, cases) + + def Labeled(bind: Bind, expr: Tree)(using Context): Labeled = + ta.assignType(untpd.Labeled(bind, expr)) + + def Labeled(sym: TermSymbol, expr: Tree)(using Context): Labeled = + Labeled(Bind(sym, EmptyTree), expr) + + def Return(expr: Tree, from: Tree)(using Context): Return = + ta.assignType(untpd.Return(expr, from)) + + def Return(expr: Tree, from: Symbol)(using Context): Return = + Return(expr, Ident(from.termRef)) + + def WhileDo(cond: Tree, body: Tree)(using Context): WhileDo = + ta.assignType(untpd.WhileDo(cond, body)) + + def Try(block: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = + ta.assignType(untpd.Try(block, cases, finalizer), block, cases) + + def SeqLiteral(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = + ta.assignType(untpd.SeqLiteral(elems, elemtpt), elems, elemtpt) + + def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(using Context): JavaSeqLiteral = + ta.assignType(untpd.JavaSeqLiteral(elems, elemtpt), elems, elemtpt).asInstanceOf[JavaSeqLiteral] + + def Inlined(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = + ta.assignType(untpd.Inlined(call, bindings, expansion), bindings, expansion) + + def TypeTree(tp: Type, inferred: Boolean = false)(using Context): TypeTree = + (if inferred then untpd.InferredTypeTree() else untpd.TypeTree()).withType(tp) + + def SingletonTypeTree(ref: Tree)(using Context): SingletonTypeTree = + ta.assignType(untpd.SingletonTypeTree(ref), ref) + + def RefinedTypeTree(parent: Tree, refinements: List[Tree], refineCls: ClassSymbol)(using Context): Tree = + ta.assignType(untpd.RefinedTypeTree(parent, refinements), parent, refinements, refineCls) + + def AppliedTypeTree(tycon: Tree, args: List[Tree])(using Context): AppliedTypeTree = + ta.assignType(untpd.AppliedTypeTree(tycon, args), tycon, args) + + def ByNameTypeTree(result: Tree)(using Context): ByNameTypeTree = + ta.assignType(untpd.ByNameTypeTree(result), result) + + def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = + ta.assignType(untpd.LambdaTypeTree(tparams, body), tparams, body) + + def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = + ta.assignType(untpd.MatchTypeTree(bound, selector, cases), bound, selector, cases) + + def TypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(using Context): TypeBoundsTree = + ta.assignType(untpd.TypeBoundsTree(lo, hi, alias), lo, hi, alias) + + def Bind(sym: Symbol, body: Tree)(using Context): Bind = + ta.assignType(untpd.Bind(sym.name, body), sym) + + /** A pattern corresponding to `sym: tpe` */ + def BindTyped(sym: TermSymbol, tpe: Type)(using Context): Bind = + Bind(sym, Typed(Underscore(tpe), TypeTree(tpe))) + + def Alternative(trees: List[Tree])(using Context): Alternative = + ta.assignType(untpd.Alternative(trees), trees) + + def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree], proto: Type)(using Context): UnApply = { + assert(fun.isInstanceOf[RefTree] || fun.isInstanceOf[GenericApply]) + ta.assignType(untpd.UnApply(fun, implicits, patterns), proto) + } + + def ValDef(sym: TermSymbol, rhs: LazyTree = EmptyTree, inferred: Boolean = false)(using Context): ValDef = + ta.assignType(untpd.ValDef(sym.name, TypeTree(sym.info, inferred), rhs), sym) + + def SyntheticValDef(name: TermName, rhs: Tree, flags: FlagSet = EmptyFlags)(using Context): ValDef = + ValDef(newSymbol(ctx.owner, name, Synthetic | flags, rhs.tpe.widen, coord = rhs.span), rhs) + + def DefDef(sym: TermSymbol, paramss: List[List[Symbol]], + resultType: Type, rhs: Tree)(using Context): DefDef = + sym.setParamss(paramss) + ta.assignType( + untpd.DefDef( + sym.name, + paramss.map { + case TypeSymbols(params) => params.map(param => TypeDef(param).withSpan(param.span)) + case TermSymbols(params) => params.map(param => ValDef(param).withSpan(param.span)) + case _ => unreachable() + }, + TypeTree(resultType), + rhs), + sym) + + def DefDef(sym: TermSymbol, rhs: Tree = EmptyTree)(using Context): DefDef = + ta.assignType(DefDef(sym, Function.const(rhs) _), sym) + + /** A DefDef with given method symbol `sym`. + * @rhsFn A function from parameter references + * to the method's right-hand side. + * Parameter symbols are taken from the `rawParamss` field of `sym`, or + * are freshly generated if `rawParamss` is empty. + */ + def DefDef(sym: TermSymbol, rhsFn: List[List[Tree]] => Tree)(using Context): DefDef = + + // Map method type `tp` with remaining parameters stored in rawParamss to + // final result type and all (given or synthesized) parameters + def recur(tp: Type, remaining: List[List[Symbol]]): (Type, List[List[Symbol]]) = tp match + case tp: PolyType => + val (tparams: List[TypeSymbol], remaining1) = remaining match + case tparams :: remaining1 => + assert(tparams.hasSameLengthAs(tp.paramNames) && tparams.head.isType) + (tparams.asInstanceOf[List[TypeSymbol]], remaining1) + case nil => + (newTypeParams(sym, tp.paramNames, EmptyFlags, tp.instantiateParamInfos(_)), Nil) + val (rtp, paramss) = recur(tp.instantiate(tparams.map(_.typeRef)), remaining1) + (rtp, tparams :: paramss) + case tp: MethodType => + val isParamDependent = tp.isParamDependent + val previousParamRefs: ListBuffer[TermRef] = + // It is ok to assign `null` here. + // If `isParamDependent == false`, the value of `previousParamRefs` is not used. + if isParamDependent then mutable.ListBuffer[TermRef]() else (null: ListBuffer[TermRef] | Null).uncheckedNN + + def valueParam(name: TermName, origInfo: Type): TermSymbol = + val maybeImplicit = + if tp.isContextualMethod then Given + else if tp.isImplicitMethod then Implicit + else EmptyFlags + val maybeErased = if tp.isErasedMethod then Erased else EmptyFlags + + def makeSym(info: Type) = newSymbol(sym, name, TermParam | maybeImplicit | maybeErased, info, coord = sym.coord) + + if isParamDependent then + val sym = makeSym(origInfo.substParams(tp, previousParamRefs.toList)) + previousParamRefs += sym.termRef + sym + else makeSym(origInfo) + end valueParam + + val (vparams: List[TermSymbol], remaining1) = + if tp.paramNames.isEmpty then (Nil, remaining) + else remaining match + case vparams :: remaining1 => + assert(vparams.hasSameLengthAs(tp.paramNames) && vparams.head.isTerm) + (vparams.asInstanceOf[List[TermSymbol]], remaining1) + case nil => + (tp.paramNames.lazyZip(tp.paramInfos).map(valueParam), Nil) + val (rtp, paramss) = recur(tp.instantiate(vparams.map(_.termRef)), remaining1) + (rtp, vparams :: paramss) + case _ => + assert(remaining.isEmpty) + (tp.widenExpr, Nil) + end recur + + val (rtp, paramss) = recur(sym.info, sym.rawParamss) + DefDef(sym, paramss, rtp, rhsFn(paramss.nestedMap(ref))) + end DefDef + + def TypeDef(sym: TypeSymbol)(using Context): TypeDef = + ta.assignType(untpd.TypeDef(sym.name, TypeTree(sym.info)), sym) + + def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], superArgs: List[Tree] = Nil)(using Context): TypeDef = { + val firstParent :: otherParents = cls.info.parents: @unchecked + val superRef = + if (cls.is(Trait)) TypeTree(firstParent) + else { + def isApplicable(ctpe: Type): Boolean = ctpe match { + case ctpe: PolyType => + isApplicable(ctpe.instantiate(firstParent.argTypes)) + case ctpe: MethodType => + (superArgs corresponds ctpe.paramInfos)(_.tpe <:< _) + case _ => + false + } + val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(constr => isApplicable(constr.info)) + New(firstParent, constr.symbol.asTerm, superArgs) + } + ClassDefWithParents(cls, constr, superRef :: otherParents.map(TypeTree(_)), body) + } + + def ClassDefWithParents(cls: ClassSymbol, constr: DefDef, parents: List[Tree], body: List[Tree])(using Context): TypeDef = { + val selfType = + if (cls.classInfo.selfInfo ne NoType) ValDef(newSelfSym(cls)) + else EmptyValDef + def isOwnTypeParam(stat: Tree) = + stat.symbol.is(TypeParam) && stat.symbol.owner == cls + val bodyTypeParams = body filter isOwnTypeParam map (_.symbol) + val newTypeParams = + for (tparam <- cls.typeParams if !(bodyTypeParams contains tparam)) + yield TypeDef(tparam) + val findLocalDummy = FindLocalDummyAccumulator(cls) + val localDummy = body.foldLeft(NoSymbol: Symbol)(findLocalDummy.apply) + .orElse(newLocalDummy(cls)) + val impl = untpd.Template(constr, parents, Nil, selfType, newTypeParams ++ body) + .withType(localDummy.termRef) + ta.assignType(untpd.TypeDef(cls.name, impl), cls) + } + + /** An anonymous class + * + * new parents { forwarders } + * + * where `forwarders` contains forwarders for all functions in `fns`. + * @param parents a non-empty list of class types + * @param fns a non-empty of functions for which forwarders should be defined in the class. + * The class has the same owner as the first function in `fns`. + * Its position is the union of all functions in `fns`. + */ + def AnonClass(parents: List[Type], fns: List[TermSymbol], methNames: List[TermName])(using Context): Block = { + AnonClass(fns.head.owner, parents, fns.map(_.span).reduceLeft(_ union _)) { cls => + def forwarder(fn: TermSymbol, name: TermName) = { + val fwdMeth = fn.copy(cls, name, Synthetic | Method | Final).entered.asTerm + for overridden <- fwdMeth.allOverriddenSymbols do + if overridden.is(Extension) then fwdMeth.setFlag(Extension) + if !overridden.is(Deferred) then fwdMeth.setFlag(Override) + DefDef(fwdMeth, ref(fn).appliedToArgss(_)) + } + fns.lazyZip(methNames).map(forwarder) + } + } + + /** An anonymous class + * + * new parents { body } + * + * with the specified owner and position. + */ + def AnonClass(owner: Symbol, parents: List[Type], coord: Coord)(body: ClassSymbol => List[Tree])(using Context): Block = + val parents1 = + if (parents.head.classSymbol.is(Trait)) { + val head = parents.head.parents.head + if (head.isRef(defn.AnyClass)) defn.AnyRefType :: parents else head :: parents + } + else parents + val cls = newNormalizedClassSymbol(owner, tpnme.ANON_CLASS, Synthetic | Final, parents1, coord = coord) + val constr = newConstructor(cls, Synthetic, Nil, Nil).entered + val cdef = ClassDef(cls, DefDef(constr), body(cls)) + Block(cdef :: Nil, New(cls.typeRef, Nil)) + + def Import(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = + ta.assignType(untpd.Import(expr, selectors), newImportSymbol(ctx.owner, expr)) + + def Export(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = + ta.assignType(untpd.Export(expr, selectors)) + + def PackageDef(pid: RefTree, stats: List[Tree])(using Context): PackageDef = + ta.assignType(untpd.PackageDef(pid, stats), pid) + + def Annotated(arg: Tree, annot: Tree)(using Context): Annotated = + ta.assignType(untpd.Annotated(arg, annot), arg, annot) + + def Throw(expr: Tree)(using Context): Tree = + ref(defn.throwMethod).appliedTo(expr) + + def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = + ta.assignType(untpd.Hole(isTermHole, idx, args, content, tpt), tpt) + + // ------ Making references ------------------------------------------------------ + + def prefixIsElidable(tp: NamedType)(using Context): Boolean = { + val typeIsElidable = tp.prefix match { + case pre: ThisType => + tp.isType || + pre.cls.isStaticOwner || + tp.symbol.isParamOrAccessor && !pre.cls.is(Trait) && ctx.owner.enclosingClass == pre.cls + // was ctx.owner.enclosingClass.derivesFrom(pre.cls) which was not tight enough + // and was spuriously triggered in case inner class would inherit from outer one + // eg anonymous TypeMap inside TypeMap.andThen + case pre: TermRef => + pre.symbol.is(Module) && pre.symbol.isStatic + case pre => + pre `eq` NoPrefix + } + typeIsElidable || + tp.symbol.is(JavaStatic) || + tp.symbol.hasAnnotation(defn.ScalaStaticAnnot) + } + + def needsSelect(tp: Type)(using Context): Boolean = tp match { + case tp: TermRef => !prefixIsElidable(tp) + case _ => false + } + + /** A tree representing the same reference as the given type */ + def ref(tp: NamedType, needLoad: Boolean = true)(using Context): Tree = + if (tp.isType) TypeTree(tp) + else if (prefixIsElidable(tp)) Ident(tp) + else if (tp.symbol.is(Module) && ctx.owner.isContainedIn(tp.symbol.moduleClass)) + followOuterLinks(This(tp.symbol.moduleClass.asClass)) + else if (tp.symbol hasAnnotation defn.ScalaStaticAnnot) + Ident(tp) + else + val pre = tp.prefix + if (pre.isSingleton) followOuterLinks(singleton(pre.dealias, needLoad)).select(tp) + else + val res = Select(TypeTree(pre), tp) + if needLoad && !res.symbol.isStatic then + throw new TypeError(em"cannot establish a reference to $res") + res + + def ref(sym: Symbol)(using Context): Tree = + ref(NamedType(sym.owner.thisType, sym.name, sym.denot)) + + private def followOuterLinks(t: Tree)(using Context) = t match { + case t: This if ctx.erasedTypes && !(t.symbol == ctx.owner.enclosingClass || t.symbol.isStaticOwner) => + // after erasure outer paths should be respected + ExplicitOuter.OuterOps(ctx).path(toCls = t.tpe.classSymbol) + case t => + t + } + + def singleton(tp: Type, needLoad: Boolean = true)(using Context): Tree = tp.dealias match { + case tp: TermRef => ref(tp, needLoad) + case tp: ThisType => This(tp.cls) + case tp: SkolemType => singleton(tp.narrow, needLoad) + case SuperType(qual, _) => singleton(qual, needLoad) + case ConstantType(value) => Literal(value) + } + + /** A path that corresponds to the given type `tp`. Error if `tp` is not a refinement + * of an addressable singleton type. + */ + def pathFor(tp: Type)(using Context): Tree = { + def recur(tp: Type): Tree = tp match { + case tp: NamedType => + tp.info match { + case TypeAlias(alias) => recur(alias) + case _: TypeBounds => EmptyTree + case _ => singleton(tp) + } + case tp: TypeProxy => recur(tp.superType) + case _ => EmptyTree + } + recur(tp).orElse { + report.error(em"$tp is not an addressable singleton type") + TypeTree(tp) + } + } + + /** A tree representing a `newXYZArray` operation of the right + * kind for the given element type in `elemTpe`. No type arguments or + * `length` arguments are given. + */ + def newArray(elemTpe: Type, returnTpe: Type, span: Span, dims: JavaSeqLiteral)(using Context): Tree = { + val elemClass = elemTpe.classSymbol + def newArr = + ref(defn.DottyArraysModule).select(defn.newArrayMethod).withSpan(span) + + if (!ctx.erasedTypes) { + assert(!TypeErasure.isGeneric(elemTpe), elemTpe) //needs to be done during typer. See Applications.convertNewGenericArray + newArr.appliedToTypeTrees(TypeTree(returnTpe) :: Nil).appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) + } + else // after erasure + newArr.appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) + } + + /** The wrapped array method name for an array of type elemtp */ + def wrapArrayMethodName(elemtp: Type)(using Context): TermName = { + val elemCls = elemtp.classSymbol + if (elemCls.isPrimitiveValueClass) nme.wrapXArray(elemCls.name) + else if (elemCls.derivesFrom(defn.ObjectClass) && !elemCls.isNotRuntimeClass) nme.wrapRefArray + else nme.genericWrapArray + } + + /** A tree representing a `wrapXYZArray(tree)` operation of the right + * kind for the given element type in `elemTpe`. + */ + def wrapArray(tree: Tree, elemtp: Type)(using Context): Tree = + val wrapper = ref(defn.getWrapVarargsArrayModule) + .select(wrapArrayMethodName(elemtp)) + .appliedToTypes(if (elemtp.isPrimitiveValueType) Nil else elemtp :: Nil) + val actualElem = wrapper.tpe.widen.firstParamTypes.head + wrapper.appliedTo(tree.ensureConforms(actualElem)) + + // ------ Creating typed equivalents of trees that exist only in untyped form ------- + + /** new C(args), calling the primary constructor of C */ + def New(tp: Type, args: List[Tree])(using Context): Apply = + New(tp, tp.dealias.typeSymbol.primaryConstructor.asTerm, args) + + /** new C(args), calling given constructor `constr` of C */ + def New(tp: Type, constr: TermSymbol, args: List[Tree])(using Context): Apply = { + val targs = tp.argTypes + val tycon = tp.typeConstructor + New(tycon) + .select(TermRef(tycon, constr)) + .appliedToTypes(targs) + .appliedToTermArgs(args) + } + + /** An object def + * + * object obs extends parents { decls } + * + * gets expanded to + * + * val obj = new obj$ + * class obj$ extends parents { this: obj.type => decls } + * + * (The following no longer applies: + * What's interesting here is that the block is well typed + * (because class obj$ is hoistable), but the type of the `obj` val is + * not expressible. What needs to happen in general when + * inferring the type of a val from its RHS, is: if the type contains + * a class that has the val itself as owner, then that class + * is remapped to have the val's owner as owner. Remapping could be + * done by cloning the class with the new owner and substituting + * everywhere in the tree. We know that remapping is safe + * because the only way a local class can appear in the RHS of a val is + * by being hoisted outside of a block, and the necessary checks are + * done at this point already. + * + * On the other hand, for method result type inference, if the type of + * the RHS of a method contains a class owned by the method, this would be + * an error.) + */ + def ModuleDef(sym: TermSymbol, body: List[Tree])(using Context): tpd.Thicket = { + val modcls = sym.moduleClass.asClass + val constrSym = modcls.primaryConstructor orElse newDefaultConstructor(modcls).entered + val constr = DefDef(constrSym.asTerm, EmptyTree) + val clsdef = ClassDef(modcls, constr, body) + val valdef = ValDef(sym, New(modcls.typeRef).select(constrSym).appliedToNone) + Thicket(valdef, clsdef) + } + + /** A `_` with given type */ + def Underscore(tp: Type)(using Context): Ident = untpd.Ident(nme.WILDCARD).withType(tp) + + def defaultValue(tpe: Type)(using Context): Tree = { + val tpw = tpe.widen + + if (tpw isRef defn.IntClass) Literal(Constant(0)) + else if (tpw isRef defn.LongClass) Literal(Constant(0L)) + else if (tpw isRef defn.BooleanClass) Literal(Constant(false)) + else if (tpw isRef defn.CharClass) Literal(Constant('\u0000')) + else if (tpw isRef defn.FloatClass) Literal(Constant(0f)) + else if (tpw isRef defn.DoubleClass) Literal(Constant(0d)) + else if (tpw isRef defn.ByteClass) Literal(Constant(0.toByte)) + else if (tpw isRef defn.ShortClass) Literal(Constant(0.toShort)) + else nullLiteral.select(defn.Any_asInstanceOf).appliedToType(tpe) + } + + private class FindLocalDummyAccumulator(cls: ClassSymbol)(using Context) extends TreeAccumulator[Symbol] { + def apply(sym: Symbol, tree: Tree)(using Context) = + if (sym.exists) sym + else if (tree.isDef) { + val owner = tree.symbol.owner + if (owner.isLocalDummy && owner.owner == cls) owner + else if (owner == cls) foldOver(sym, tree) + else sym + } + else foldOver(sym, tree) + } + + /** The owner to be used in a local context when traversing a tree */ + def localOwner(tree: Tree)(using Context): Symbol = + val sym = tree.symbol + (if sym.is(PackageVal) then sym.moduleClass else sym).orElse(ctx.owner) + + /** The local context to use when traversing trees */ + def localCtx(tree: Tree)(using Context): Context = ctx.withOwner(localOwner(tree)) + + override val cpy: TypedTreeCopier = // Type ascription needed to pick up any new members in TreeCopier (currently there are none) + TypedTreeCopier() + + val cpyBetweenPhases: TimeTravellingTreeCopier = TimeTravellingTreeCopier() + + class TypedTreeCopier extends TreeCopier { + def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[Type] = + copied.withTypeUnchecked(tree.tpe) + def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[Type] = + copied.withTypeUnchecked(tree.tpe) + + protected val untpdCpy = untpd.cpy + + override def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = { + val tree1 = untpdCpy.Select(tree)(qualifier, name) + tree match { + case tree: Select if qualifier.tpe eq tree.qualifier.tpe => + tree1.withTypeUnchecked(tree.tpe) + case _ => + val tree2: Select = tree.tpe match { + case tpe: NamedType => + val qualType = qualifier.tpe.widenIfUnstable + if qualType.isExactlyNothing then tree1.withTypeUnchecked(tree.tpe) + else tree1.withType(tpe.derivedSelect(qualType)) + case _ => tree1.withTypeUnchecked(tree.tpe) + } + ConstFold.Select(tree2) + } + } + + override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = { + val tree1 = untpdCpy.Apply(tree)(fun, args) + tree match { + case tree: Apply + if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, fun, args) + } + } + + override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = { + val tree1 = untpdCpy.TypeApply(tree)(fun, args) + tree match { + case tree: TypeApply + if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, fun, args) + } + } + + override def Literal(tree: Tree)(const: Constant)(using Context): Literal = + ta.assignType(untpdCpy.Literal(tree)(const)) + + override def New(tree: Tree)(tpt: Tree)(using Context): New = + ta.assignType(untpdCpy.New(tree)(tpt), tpt) + + override def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = + ta.assignType(untpdCpy.Typed(tree)(expr, tpt), tpt) + + override def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = + ta.assignType(untpdCpy.NamedArg(tree)(name, arg), arg) + + override def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = + ta.assignType(untpdCpy.Assign(tree)(lhs, rhs)) + + override def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = { + val tree1 = untpdCpy.Block(tree)(stats, expr) + tree match { + case tree: Block if (expr.tpe eq tree.expr.tpe) && (expr.tpe eq tree.tpe) => + // The last guard is a conservative check: if `tree.tpe` is different from `expr.tpe`, then + // it was computed from widening `expr.tpe`, and tree transforms might cause `expr.tpe.widen` + // to change even if `expr.tpe` itself didn't change, e.g: + // { val s = ...; s } + // If the type of `s` changed, then the type of the block might have changed, even though `expr.tpe` + // will still be `TermRef(NoPrefix, s)` + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, stats, expr) + } + } + + override def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = { + val tree1 = untpdCpy.If(tree)(cond, thenp, elsep) + tree match { + case tree: If if (thenp.tpe eq tree.thenp.tpe) && (elsep.tpe eq tree.elsep.tpe) && + ((tree.tpe eq thenp.tpe) || (tree.tpe eq elsep.tpe)) => + // The last guard is a conservative check similar to the one done in `Block` above, + // if `tree.tpe` is not identical to the type of one of its branch, it might have been + // computed from the widened type of the branches, so the same reasoning than + // in `Block` applies. + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, thenp, elsep) + } + } + + override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = { + val tree1 = untpdCpy.Closure(tree)(env, meth, tpt) + tree match { + case tree: Closure if sameTypes(env, tree.env) && (meth.tpe eq tree.meth.tpe) && (tpt.tpe eq tree.tpt.tpe) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, meth, tpt) + } + } + + override def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = { + val tree1 = untpdCpy.Match(tree)(selector, cases) + tree match { + case tree: Match if sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, selector, cases) + } + } + + override def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = { + val tree1 = untpdCpy.CaseDef(tree)(pat, guard, body) + tree match { + case tree: CaseDef if body.tpe eq tree.body.tpe => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, pat, body) + } + } + + override def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = + ta.assignType(untpdCpy.Labeled(tree)(bind, expr)) + + override def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = + ta.assignType(untpdCpy.Return(tree)(expr, from)) + + override def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = + ta.assignType(untpdCpy.WhileDo(tree)(cond, body)) + + override def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = { + val tree1 = untpdCpy.Try(tree)(expr, cases, finalizer) + tree match { + case tree: Try if (expr.tpe eq tree.expr.tpe) && sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, expr, cases) + } + } + + override def Inlined(tree: Tree)(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = { + val tree1 = untpdCpy.Inlined(tree)(call, bindings, expansion) + tree match { + case tree: Inlined if sameTypes(bindings, tree.bindings) && (expansion.tpe eq tree.expansion.tpe) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, bindings, expansion) + } + } + + override def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = { + val tree1 = untpdCpy.SeqLiteral(tree)(elems, elemtpt) + tree match { + case tree: SeqLiteral + if sameTypes(elems, tree.elems) && (elemtpt.tpe eq tree.elemtpt.tpe) => + tree1.withTypeUnchecked(tree.tpe) + case _ => + ta.assignType(tree1, elems, elemtpt) + } + } + + override def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = { + val tree1 = untpdCpy.Annotated(tree)(arg, annot) + tree match { + case tree: Annotated if (arg.tpe eq tree.arg.tpe) && (annot eq tree.annot) => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, arg, annot) + } + } + + override def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(using Context): If = + If(tree: Tree)(cond, thenp, elsep) + override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = + Closure(tree: Tree)(env, meth, tpt) + override def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(using Context): CaseDef = + CaseDef(tree: Tree)(pat, guard, body) + override def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(using Context): Try = + Try(tree: Tree)(expr, cases, finalizer) + } + + class TimeTravellingTreeCopier extends TypedTreeCopier { + override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = + tree match + case tree: Apply + if (tree.fun eq fun) && (tree.args eq args) + && tree.tpe.isInstanceOf[ConstantType] + && isPureExpr(tree) => tree + case _ => + ta.assignType(untpdCpy.Apply(tree)(fun, args), fun, args) + // Note: Reassigning the original type if `fun` and `args` have the same types as before + // does not work here in general: The computed type depends on the widened function type, not + // the function type itself. A tree transform may keep the function type the + // same but its widened type might change. + // However, we keep constant types of pure expressions. This uses the underlying assumptions + // that pure functions yielding a constant will not change in later phases. + + override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = + ta.assignType(untpdCpy.TypeApply(tree)(fun, args), fun, args) + // Same remark as for Apply + + override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = + ta.assignType(untpdCpy.Closure(tree)(env, meth, tpt), meth, tpt) + + override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = + Closure(tree: Tree)(env, meth, tpt) + } + + override def skipTransform(tree: Tree)(using Context): Boolean = tree.tpe.isError + + implicit class TreeOps[ThisTree <: tpd.Tree](private val tree: ThisTree) extends AnyVal { + + def isValue(using Context): Boolean = + tree.isTerm && tree.tpe.widen.isValueType + + def isValueOrPattern(using Context): Boolean = + tree.isValue || tree.isPattern + + def isValueType: Boolean = + tree.isType && tree.tpe.isValueType + + def isInstantiation: Boolean = tree match { + case Apply(Select(New(_), nme.CONSTRUCTOR), _) => true + case _ => false + } + + def shallowFold[T](z: T)(op: (T, tpd.Tree) => T)(using Context): T = + ShallowFolder(op).apply(z, tree) + + def deepFold[T](z: T)(op: (T, tpd.Tree) => T)(using Context): T = + DeepFolder(op).apply(z, tree) + + def find[T](pred: (tpd.Tree) => Boolean)(using Context): Option[tpd.Tree] = + shallowFold[Option[tpd.Tree]](None)((accum, tree) => if (pred(tree)) Some(tree) else accum) + + def subst(from: List[Symbol], to: List[Symbol])(using Context): ThisTree = + TreeTypeMap(substFrom = from, substTo = to).apply(tree) + + /** Change owner from `from` to `to`. If `from` is a weak owner, also change its + * owner to `to`, and continue until a non-weak owner is reached. + */ + def changeOwner(from: Symbol, to: Symbol)(using Context): ThisTree = { + @tailrec def loop(from: Symbol, froms: List[Symbol], tos: List[Symbol]): ThisTree = + if (from.isWeakOwner && !from.owner.isClass) + loop(from.owner, from :: froms, to :: tos) + else + //println(i"change owner ${from :: froms}%, % ==> $tos of $tree") + TreeTypeMap(oldOwners = from :: froms, newOwners = tos).apply(tree) + if (from == to) tree else loop(from, Nil, to :: Nil) + } + + /** + * Set the owner of every definition in this tree which is not itself contained in this + * tree to be `newowner` + */ + def changeNonLocalOwners(newOwner: Symbol)(using Context): Tree = { + val ownerAcc = new TreeAccumulator[immutable.Set[Symbol]] { + def apply(ss: immutable.Set[Symbol], tree: Tree)(using Context) = tree match { + case tree: DefTree => + val sym = tree.symbol + if sym.exists && !sym.owner.is(Package) then ss + sym.owner else ss + case _ => + foldOver(ss, tree) + } + } + val owners = ownerAcc(immutable.Set.empty[Symbol], tree).toList + val newOwners = List.fill(owners.size)(newOwner) + TreeTypeMap(oldOwners = owners, newOwners = newOwners).apply(tree) + } + + /** After phase `trans`, set the owner of every definition in this tree that was formerly + * owner by `from` to `to`. + */ + def changeOwnerAfter(from: Symbol, to: Symbol, trans: DenotTransformer)(using Context): ThisTree = + if (ctx.phase == trans.next) { + val traverser = new TreeTraverser { + def traverse(tree: Tree)(using Context) = tree match { + case tree: DefTree => + val sym = tree.symbol + val prevDenot = atPhase(trans)(sym.denot) + if (prevDenot.effectiveOwner == from.skipWeakOwner) { + val d = sym.copySymDenotation(owner = to) + d.installAfter(trans) + d.transformAfter(trans, d => if (d.owner eq from) d.copySymDenotation(owner = to) else d) + } + if (sym.isWeakOwner) traverseChildren(tree) + case _ => + traverseChildren(tree) + } + } + traverser.traverse(tree) + tree + } + else atPhase(trans.next)(changeOwnerAfter(from, to, trans)) + + /** A select node with the given selector name and a computed type */ + def select(name: Name)(using Context): Select = + Select(tree, name) + + /** A select node with the given selector name such that the designated + * member satisfies predicate `p`. Useful for disambiguating overloaded members. + */ + def select(name: Name, p: Symbol => Boolean)(using Context): Select = + select(tree.tpe.member(name).suchThat(p).symbol) + + /** A select node with the given type */ + def select(tp: NamedType)(using Context): Select = + untpd.Select(tree, tp.name).withType(tp) + + /** A select node that selects the given symbol. Note: Need to make sure this + * is in fact the symbol you would get when you select with the symbol's name, + * otherwise a data race may occur which would be flagged by -Yno-double-bindings. + */ + def select(sym: Symbol)(using Context): Select = { + val tp = + if (sym.isType) { + assert(!sym.is(TypeParam)) + TypeRef(tree.tpe, sym.asType) + } + else + TermRef(tree.tpe, sym.name.asTermName, sym.denot.asSeenFrom(tree.tpe)) + untpd.Select(tree, sym.name).withType(tp) + } + + /** A select node with the given selector name and signature and a computed type */ + def selectWithSig(name: Name, sig: Signature, target: Name)(using Context): Tree = + untpd.SelectWithSig(tree, name, sig).withType(tree.tpe.select(name.asTermName, sig, target)) + + /** A select node with selector name and signature taken from `sym`. + * Note: Use this method instead of select(sym) if the referenced symbol + * might be overridden in the type of the qualifier prefix. See note + * on select(sym: Symbol). + */ + def selectWithSig(sym: Symbol)(using Context): Tree = + selectWithSig(sym.name, sym.signature, sym.targetName) + + /** A unary apply node with given argument: `tree(arg)` */ + def appliedTo(arg: Tree)(using Context): Apply = + appliedToTermArgs(arg :: Nil) + + /** An apply node with given arguments: `tree(arg, args0, ..., argsN)` */ + def appliedTo(arg: Tree, args: Tree*)(using Context): Apply = + appliedToTermArgs(arg :: args.toList) + + /** An apply node with given argument list `tree(args(0), ..., args(args.length - 1))` */ + def appliedToTermArgs(args: List[Tree])(using Context): Apply = + Apply(tree, args) + + /** An applied node that accepts only varargs as arguments */ + def appliedToVarargs(args: List[Tree], tpt: Tree)(using Context): Apply = + appliedTo(repeated(args, tpt)) + + /** An apply or type apply node with given argument list */ + def appliedToArgs(args: List[Tree])(using Context): GenericApply = args match + case arg :: args1 if arg.isType => TypeApply(tree, args) + case _ => Apply(tree, args) + + /** The current tree applied to given argument lists: + * `tree (argss(0)) ... (argss(argss.length -1))` + */ + def appliedToArgss(argss: List[List[Tree]])(using Context): Tree = + argss.foldLeft(tree: Tree)(_.appliedToArgs(_)) + + /** The current tree applied to (): `tree()` */ + def appliedToNone(using Context): Apply = Apply(tree, Nil) + + /** The current tree applied to given type argument: `tree[targ]` */ + def appliedToType(targ: Type)(using Context): Tree = + appliedToTypes(targ :: Nil) + + /** The current tree applied to given type arguments: `tree[targ0, ..., targN]` */ + def appliedToTypes(targs: List[Type])(using Context): Tree = + appliedToTypeTrees(targs map (TypeTree(_))) + + /** The current tree applied to given type argument: `tree[targ]` */ + def appliedToTypeTree(targ: Tree)(using Context): Tree = + appliedToTypeTrees(targ :: Nil) + + /** The current tree applied to given type argument list: `tree[targs(0), ..., targs(targs.length - 1)]` */ + def appliedToTypeTrees(targs: List[Tree])(using Context): Tree = + if targs.isEmpty then tree else TypeApply(tree, targs) + + /** Apply to `()` unless tree's widened type is parameterless */ + def ensureApplied(using Context): Tree = + if (tree.tpe.widen.isParameterless) tree else tree.appliedToNone + + /** `tree == that` */ + def equal(that: Tree)(using Context): Tree = + if (that.tpe.widen.isRef(defn.NothingClass)) + Literal(Constant(false)) + else + applyOverloaded(tree, nme.EQ, that :: Nil, Nil, defn.BooleanType) + + /** `tree.isInstanceOf[tp]`, with special treatment of singleton types */ + def isInstance(tp: Type)(using Context): Tree = tp.dealias match { + case ConstantType(c) if c.tag == StringTag => + singleton(tp).equal(tree) + case tp: SingletonType => + if tp.widen.derivesFrom(defn.ObjectClass) then + tree.ensureConforms(defn.ObjectType).select(defn.Object_eq).appliedTo(singleton(tp)) + else + singleton(tp).equal(tree) + case _ => + tree.select(defn.Any_isInstanceOf).appliedToType(tp) + } + + /** tree.asInstanceOf[`tp`] */ + def asInstance(tp: Type)(using Context): Tree = { + assert(tp.isValueType, i"bad cast: $tree.asInstanceOf[$tp]") + tree.select(defn.Any_asInstanceOf).appliedToType(tp) + } + + /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ + def cast(tp: Type)(using Context): Tree = cast(TypeTree(tp)) + + /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ + def cast(tpt: TypeTree)(using Context): Tree = + assert(tpt.tpe.isValueType, i"bad cast: $tree.asInstanceOf[$tpt]") + tree.select(if (ctx.erasedTypes) defn.Any_asInstanceOf else defn.Any_typeCast) + .appliedToTypeTree(tpt) + + /** cast `tree` to `tp` (or its box/unbox/cast equivalent when after + * erasure and value and non-value types are mixed), + * unless tree's type already conforms to `tp`. + */ + def ensureConforms(tp: Type)(using Context): Tree = + if (tree.tpe <:< tp) tree + else if (!ctx.erasedTypes) cast(tp) + else Erasure.Boxing.adaptToType(tree, tp) + + /** `tree ne null` (might need a cast to be type correct) */ + def testNotNull(using Context): Tree = { + // If the receiver is of type `Nothing` or `Null`, add an ascription or cast + // so that the selection succeeds. + // e.g. `null.ne(null)` doesn't type, but `(null: AnyRef).ne(null)` does. + val receiver = + if tree.tpe.isBottomType then + if ctx.explicitNulls then tree.cast(defn.AnyRefType) + else Typed(tree, TypeTree(defn.AnyRefType)) + else tree.ensureConforms(defn.ObjectType) + // also need to cast the null literal to AnyRef in explicit nulls + val nullLit = if ctx.explicitNulls then nullLiteral.cast(defn.AnyRefType) else nullLiteral + receiver.select(defn.Object_ne).appliedTo(nullLit).withSpan(tree.span) + } + + /** If inititializer tree is `_`, the default value of its type, + * otherwise the tree itself. + */ + def wildcardToDefault(using Context): Tree = + if (isWildcardArg(tree)) defaultValue(tree.tpe) else tree + + /** `this && that`, for boolean trees `this`, `that` */ + def and(that: Tree)(using Context): Tree = + tree.select(defn.Boolean_&&).appliedTo(that) + + /** `this || that`, for boolean trees `this`, `that` */ + def or(that: Tree)(using Context): Tree = + tree.select(defn.Boolean_||).appliedTo(that) + + /** The translation of `tree = rhs`. + * This is either the tree as an assignment, or a setter call. + */ + def becomes(rhs: Tree)(using Context): Tree = { + val sym = tree.symbol + if (sym.is(Method)) { + val setter = sym.setter.orElse { + assert(sym.name.isSetterName && sym.info.firstParamTypes.nonEmpty, sym) + sym + } + val qual = tree match { + case id: Ident => desugarIdentPrefix(id) + case Select(qual, _) => qual + } + qual.select(setter).appliedTo(rhs) + } + else Assign(tree, rhs) + } + + /** tree @annot + * + * works differently for type trees and term trees + */ + def annotated(annot: Tree)(using Context): Tree = + if (tree.isTerm) + Typed(tree, TypeTree(AnnotatedType(tree.tpe.widenIfUnstable, Annotation(annot)))) + else + Annotated(tree, annot) + + /** A synthetic select with that will be turned into an outer path by ExplicitOuter. + * @param levels How many outer levels to select + * @param tp The type of the destination of the outer path. + */ + def outerSelect(levels: Int, tp: Type)(using Context): Tree = + untpd.Select(tree, OuterSelectName(EmptyTermName, levels)).withType(SkolemType(tp)) + + /** Replace Inlined nodes and InlineProxy references to underlying arguments */ + def underlyingArgument(using Context): Tree = { + val mapToUnderlying = new MapToUnderlying { + /** Should get the rhs of this binding + * Returns true if the symbol is a val or def generated by eta-expansion/inline + */ + override protected def skipLocal(sym: Symbol): Boolean = + sym.isOneOf(InlineProxy | Synthetic) + } + mapToUnderlying.transform(tree) + } + + /** Replace Ident nodes references to the underlying tree that defined them */ + def underlying(using Context): Tree = MapToUnderlying().transform(tree) + + // --- Higher order traversal methods ------------------------------- + + /** Apply `f` to each subtree of this tree */ + def foreachSubTree(f: Tree => Unit)(using Context): Unit = { + val traverser = new TreeTraverser { + def traverse(tree: Tree)(using Context) = foldOver(f(tree), tree) + } + traverser.traverse(tree) + } + + /** Is there a subtree of this tree that satisfies predicate `p`? */ + def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { + val acc = new TreeAccumulator[Boolean] { + def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) + } + acc(false, tree) + } + + /** All subtrees of this tree that satisfy predicate `p`. */ + def filterSubTrees(f: Tree => Boolean)(using Context): List[Tree] = { + val buf = mutable.ListBuffer[Tree]() + foreachSubTree { tree => if (f(tree)) buf += tree } + buf.toList + } + + /** Set this tree as the `defTree` of its symbol and return this tree */ + def setDefTree(using Context): ThisTree = { + val sym = tree.symbol + if (sym.exists) sym.defTree = tree + tree + } + + def etaExpandCFT(using Context): Tree = + def expand(target: Tree, tp: Type)(using Context): Tree = tp match + case defn.ContextFunctionType(argTypes, resType, isErased) => + val anonFun = newAnonFun( + ctx.owner, + MethodType.companion(isContextual = true, isErased = isErased)(argTypes, resType), + coord = ctx.owner.coord) + def lambdaBody(refss: List[List[Tree]]) = + expand(target.select(nme.apply).appliedToArgss(refss), resType)( + using ctx.withOwner(anonFun)) + Closure(anonFun, lambdaBody) + case _ => + target + expand(tree, tree.tpe.widen) + } + + inline val MapRecursionLimit = 10 + + extension (trees: List[Tree]) + + /** A map that expands to a recursive function. It's equivalent to + * + * flatten(trees.mapConserve(op)) + * + * and falls back to it after `MaxRecursionLimit` recursions. + * Before that it uses a simpler method that uses stackspace + * instead of heap. + * Note `op` is duplicated in the generated code, so it should be + * kept small. + */ + inline def mapInline(inline op: Tree => Tree): List[Tree] = + def recur(trees: List[Tree], count: Int): List[Tree] = + if count > MapRecursionLimit then + // use a slower implementation that avoids stack overflows + flatten(trees.mapConserve(op)) + else trees match + case tree :: rest => + val tree1 = op(tree) + val rest1 = recur(rest, count + 1) + if (tree1 eq tree) && (rest1 eq rest) then trees + else tree1 match + case Thicket(elems1) => elems1 ::: rest1 + case _ => tree1 :: rest1 + case nil => nil + recur(trees, 0) + + /** Transform statements while maintaining import contexts and expression contexts + * in the same way as Typer does. The code addresses additional concerns: + * - be tail-recursive where possible + * - don't re-allocate trees where nothing has changed + */ + inline def mapStatements[T]( + exprOwner: Symbol, + inline op: Tree => Context ?=> Tree, + inline wrapResult: List[Tree] => Context ?=> T)(using Context): T = + @tailrec + def loop(mapped: mutable.ListBuffer[Tree] | Null, unchanged: List[Tree], pending: List[Tree])(using Context): T = + pending match + case stat :: rest => + val statCtx = stat match + case _: DefTree | _: ImportOrExport => ctx + case _ => ctx.exprContext(stat, exprOwner) + val stat1 = op(stat)(using statCtx) + val restCtx = stat match + case stat: Import => ctx.importContext(stat, stat.symbol) + case _ => ctx + if stat1 eq stat then + loop(mapped, unchanged, rest)(using restCtx) + else + val buf = if mapped == null then new mutable.ListBuffer[Tree] else mapped + var xc = unchanged + while xc ne pending do + buf += xc.head + xc = xc.tail + stat1 match + case Thicket(stats1) => buf ++= stats1 + case _ => buf += stat1 + loop(buf, rest, rest)(using restCtx) + case nil => + wrapResult( + if mapped == null then unchanged + else mapped.prependToList(unchanged)) + + loop(null, trees, trees) + end mapStatements + end extension + + /** A treemap that generates the same contexts as the original typer for statements. + * This means: + * - statements that are not definitions get the exprOwner as owner + * - imports are reflected in the contexts of subsequent statements + */ + class TreeMapWithPreciseStatContexts(cpy: TreeCopier = tpd.cpy) extends TreeMap(cpy): + def transformStats[T](trees: List[Tree], exprOwner: Symbol, wrapResult: List[Tree] => Context ?=> T)(using Context): T = + trees.mapStatements(exprOwner, transform(_), wrapResult) + final override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = + transformStats(trees, exprOwner, sameStats) + override def transformBlock(blk: Block)(using Context) = + transformStats(blk.stats, ctx.owner, + stats1 => ctx ?=> cpy.Block(blk)(stats1, transform(blk.expr))) + + val sameStats: List[Tree] => Context ?=> List[Tree] = stats => stats + + /** Map Inlined nodes, NamedArgs, Blocks with no statements and local references to underlying arguments. + * Also drops Inline and Block with no statements. + */ + private class MapToUnderlying extends TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match { + case tree: Ident if isBinding(tree.symbol) && skipLocal(tree.symbol) => + tree.symbol.defTree match { + case defTree: ValOrDefDef => + val rhs = defTree.rhs + assert(!rhs.isEmpty) + transform(rhs) + case _ => tree + } + case Inlined(_, Nil, arg) => transform(arg) + case Block(Nil, arg) => transform(arg) + case NamedArg(_, arg) => transform(arg) + case tree => super.transform(tree) + } + + /** Should get the rhs of this binding */ + protected def skipLocal(sym: Symbol): Boolean = true + + /** Is this a symbol that of a local val or parameterless def for which we could get the rhs */ + private def isBinding(sym: Symbol)(using Context): Boolean = + sym.isTerm && !sym.is(Param) && !sym.owner.isClass && + !(sym.is(Method) && sym.info.isInstanceOf[MethodOrPoly]) // if is a method it is parameterless + } + + extension (xs: List[tpd.Tree]) + def tpes: List[Type] = xs match { + case x :: xs1 => x.tpe :: xs1.tpes + case nil => Nil + } + + /** A trait for loaders that compute trees. Currently implemented just by DottyUnpickler. */ + trait TreeProvider { + protected def computeRootTrees(using Context): List[Tree] + + private var myTrees: List[Tree] | Null = _ + + /** Get trees defined by this provider. Cache them if -Yretain-trees is set. */ + def rootTrees(using Context): List[Tree] = + if (ctx.settings.YretainTrees.value) { + if (myTrees == null) myTrees = computeRootTrees + myTrees.uncheckedNN + } + else computeRootTrees + + /** Get first tree defined by this provider, or EmptyTree if none exists */ + def tree(using Context): Tree = + rootTrees.headOption.getOrElse(EmptyTree) + + /** Is it possible that the tree to load contains a definition of or reference to `id`? */ + def mightContain(id: String)(using Context): Boolean = true + } + + // convert a numeric with a toXXX method + def primitiveConversion(tree: Tree, numericCls: Symbol)(using Context): Tree = { + val mname = "to".concat(numericCls.name) + val conversion = tree.tpe member(mname) + if (conversion.symbol.exists) + tree.select(conversion.symbol.termRef).ensureApplied + else if (tree.tpe.widen isRef numericCls) + tree + else { + report.warning(i"conversion from ${tree.tpe.widen} to ${numericCls.typeRef} will always fail at runtime.") + Throw(New(defn.ClassCastExceptionClass.typeRef, Nil)).withSpan(tree.span) + } + } + + /** A tree that corresponds to `Predef.classOf[$tp]` in source */ + def clsOf(tp: Type)(using Context): Tree = + if ctx.erasedTypes && !tp.isRef(defn.UnitClass) then + Literal(Constant(TypeErasure.erasure(tp))) + else + Literal(Constant(tp)) + + @tailrec + def sameTypes(trees: List[tpd.Tree], trees1: List[tpd.Tree]): Boolean = + if (trees.isEmpty) trees.isEmpty + else if (trees1.isEmpty) trees.isEmpty + else (trees.head.tpe eq trees1.head.tpe) && sameTypes(trees.tail, trees1.tail) + + /** If `tree`'s purity level is less than `level`, let-bind it so that it gets evaluated + * only once. I.e. produce a + * + * { val x = 'tree ; ~within('x) } + * + * instead of otherwise + * + * ~within('tree) + */ + def letBindUnless(level: TreeInfo.PurityLevel, tree: Tree)(within: Tree => Tree)(using Context): Tree = + if (exprPurity(tree) >= level) within(tree) + else { + val vdef = SyntheticValDef(TempResultName.fresh(), tree) + Block(vdef :: Nil, within(Ident(vdef.namedType))) + } + + /** Let bind `tree` unless `tree` is at least idempotent */ + def evalOnce(tree: Tree)(within: Tree => Tree)(using Context): Tree = + letBindUnless(TreeInfo.Idempotent, tree)(within) + + def runtimeCall(name: TermName, args: List[Tree])(using Context): Tree = + Ident(defn.ScalaRuntimeModule.requiredMethod(name).termRef).appliedToTermArgs(args) + + /** An extractor that pulls out type arguments */ + object MaybePoly: + def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match + case TypeApply(tree, targs) => Some(tree, targs) + case _ => Some(tree, Nil) + + object TypeArgs: + def unapply(ts: List[Tree]): Option[List[Tree]] = + if ts.nonEmpty && ts.head.isType then Some(ts) else None + + /** Split argument clauses into a leading type argument clause if it exists and + * remaining clauses + */ + def splitArgs(argss: List[List[Tree]]): (List[Tree], List[List[Tree]]) = argss match + case TypeArgs(targs) :: argss1 => (targs, argss1) + case _ => (Nil, argss) + + def joinArgs(targs: List[Tree], argss: List[List[Tree]]): List[List[Tree]] = + if targs.isEmpty then argss else targs :: argss + + /** A key to be used in a context property that tracks enclosing inlined calls */ + private val InlinedCalls = Property.Key[List[Tree]]() + + /** A key to be used in a context property that tracks the number of inlined trees */ + private val InlinedTrees = Property.Key[Counter]() + final class Counter { + var count: Int = 0 + } + + /** Record an enclosing inlined call. + * EmptyTree calls (for parameters) cancel the next-enclosing call in the list instead of being added to it. + * We assume parameters are never nested inside parameters. + */ + override def inlineContext(call: Tree)(using Context): Context = { + // We assume enclosingInlineds is already normalized, and only process the new call with the head. + val oldIC = enclosingInlineds + + val newIC = + if call.isEmpty then + oldIC match + case t1 :: ts2 => ts2 + case _ => oldIC + else + call :: oldIC + + val ctx1 = ctx.fresh.setProperty(InlinedCalls, newIC) + if oldIC.isEmpty then ctx1.setProperty(InlinedTrees, new Counter) else ctx1 + } + + /** All enclosing calls that are currently inlined, from innermost to outermost. + */ + def enclosingInlineds(using Context): List[Tree] = + ctx.property(InlinedCalls).getOrElse(Nil) + + /** Record inlined trees */ + def addInlinedTrees(n: Int)(using Context): Unit = + ctx.property(InlinedTrees).foreach(_.count += n) + + /** Check if the limit on the number of inlined trees has been reached */ + def reachedInlinedTreesLimit(using Context): Boolean = + ctx.property(InlinedTrees) match + case Some(c) => c.count > ctx.settings.XmaxInlinedTrees.value + case None => false + + /** The source file where the symbol of the `inline` method referred to by `call` + * is defined + */ + def sourceFile(call: Tree)(using Context): SourceFile = call.symbol.source + + /** Desugar identifier into a select node. Return the tree itself if not possible */ + def desugarIdent(tree: Ident)(using Context): RefTree = { + val qual = desugarIdentPrefix(tree) + if (qual.isEmpty) tree + else qual.select(tree.symbol) + } + + /** Recover identifier prefix (e.g. this) if it exists */ + def desugarIdentPrefix(tree: Ident)(using Context): Tree = tree.tpe match { + case TermRef(prefix: TermRef, _) => + prefix.info match + case mt: MethodType if mt.paramInfos.isEmpty && mt.resultType.typeSymbol.is(Module) => + ref(mt.resultType.typeSymbol.sourceModule) + case _ => + ref(prefix) + case TermRef(prefix: ThisType, _) => + This(prefix.cls) + case _ => + EmptyTree + } + + /** + * The symbols that are imported with `expr.name` + * + * @param expr The base of the import statement + * @param name The name that is being imported. + * @return All the symbols that would be imported with `expr.name`. + */ + def importedSymbols(expr: Tree, name: Name)(using Context): List[Symbol] = { + def lookup(name: Name): Symbol = expr.tpe.member(name).symbol + val symbols = + List(lookup(name.toTermName), + lookup(name.toTypeName), + lookup(name.moduleClassName), + lookup(name.sourceModuleName)) + + symbols.map(_.sourceSymbol).filter(_.exists).distinct + } + + /** + * All the symbols that are imported by the first selector of `imp` that matches + * `selectorPredicate`. + * + * @param imp The import statement to analyze + * @param selectorPredicate A test to find the selector to use. + * @return The symbols imported. + */ + def importedSymbols(imp: Import, + selectorPredicate: untpd.ImportSelector -> Boolean = util.common.alwaysTrue) + (using Context): List[Symbol] = + imp.selectors.find(selectorPredicate) match + case Some(sel) => importedSymbols(imp.expr, sel.name) + case _ => Nil + + /** + * The list of select trees that resolve to the same symbols as the ones that are imported + * by `imp`. + */ + def importSelections(imp: Import)(using Context): List[Select] = { + def imported(sym: Symbol, id: untpd.Ident, rename: Option[untpd.Ident]): List[Select] = { + // Give a zero-extent position to the qualifier to prevent it from being included several + // times in results in the language server. + val noPosExpr = focusPositions(imp.expr) + val selectTree = Select(noPosExpr, sym.name).withSpan(id.span) + rename match { + case None => + selectTree :: Nil + case Some(rename) => + // Get the type of the symbol that is actually selected, and construct a select + // node with the new name and the type of the real symbol. + val name = if (sym.name.isTypeName) rename.name.toTypeName else rename.name + val actual = Select(noPosExpr, sym.name) + val renameTree = Select(noPosExpr, name).withSpan(rename.span).withType(actual.tpe) + selectTree :: renameTree :: Nil + } + } + + imp.selectors.flatMap { sel => + if sel.isWildcard then Nil + else + val renamedOpt = sel.renamed match + case renamed: untpd.Ident => Some(renamed) + case untpd.EmptyTree => None + importedSymbols(imp.expr, sel.name).flatMap { sym => + imported(sym, sel.imported, renamedOpt) + } + } + } + + /** Creates the tuple type tree repesentation of the type trees in `ts` */ + def tupleTypeTree(elems: List[Tree])(using Context): Tree = { + val arity = elems.length + if arity <= Definitions.MaxTupleArity then + val tupleTp = defn.TupleType(arity) + if tupleTp != null then + AppliedTypeTree(TypeTree(tupleTp), elems) + else nestedPairsTypeTree(elems) + else nestedPairsTypeTree(elems) + } + + /** Creates the nested pairs type tree repesentation of the type trees in `ts` */ + def nestedPairsTypeTree(ts: List[Tree])(using Context): Tree = + ts.foldRight[Tree](TypeTree(defn.EmptyTupleModule.termRef))((x, acc) => AppliedTypeTree(TypeTree(defn.PairClass.typeRef), x :: acc :: Nil)) + + /** Replaces all positions in `tree` with zero-extent positions */ + private def focusPositions(tree: Tree)(using Context): Tree = { + val transformer = new tpd.TreeMap { + override def transform(tree: Tree)(using Context): Tree = + super.transform(tree).withSpan(tree.span.focus) + } + transformer.transform(tree) + } + + /** Convert a list of trees to a vararg-compatible tree. + * Used to make arguments for methods that accept varargs. + */ + def repeated(trees: List[Tree], tpt: Tree)(using Context): Tree = + ctx.typeAssigner.arrayToRepeated(JavaSeqLiteral(trees, tpt)) + + /** Create a tree representing a list containing all + * the elements of the argument list. A "list of tree to + * tree of list" conversion. + * + * @param trees the elements the list represented by + * the resulting tree should contain. + * @param tpe the type of the elements of the resulting list. + * + */ + def mkList(trees: List[Tree], tpt: Tree)(using Context): Tree = + ref(defn.ListModule).select(nme.apply) + .appliedToTypeTree(tpt) + .appliedToVarargs(trees, tpt) + + + protected def FunProto(args: List[Tree], resType: Type)(using Context) = + ProtoTypes.FunProtoTyped(args, resType)(ctx.typer, ApplyKind.Regular) +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala new file mode 100644 index 000000000000..eb729d33a091 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala @@ -0,0 +1,819 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Types._, Contexts._, Constants._, Names._, Flags._ +import dotty.tools.dotc.typer.ProtoTypes +import Symbols._, StdNames._, Trees._ +import util.{Property, SourceFile, NoSource} +import util.Spans.Span +import annotation.constructorOnly +import annotation.internal.sharable +import Decorators._ +import annotation.retains +import language.experimental.pureFunctions + +object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { + + // ----- Tree cases that exist in untyped form only ------------------ + + abstract class OpTree(implicit @constructorOnly src: SourceFile) extends Tree { + def op: Ident + override def isTerm: Boolean = op.isTerm + override def isType: Boolean = op.isType + } + + /** A typed subtree of an untyped tree needs to be wrapped in a TypedSplice + * @param owner The current owner at the time the tree was defined + * @param isExtensionReceiver The splice was created from the receiver `e` in an extension + * method call `e.f(...)` + */ + abstract case class TypedSplice(splice: tpd.Tree)(val owner: Symbol, val isExtensionReceiver: Boolean)(implicit @constructorOnly src: SourceFile) extends ProxyTree { + def forwardTo: tpd.Tree = splice + override def toString = + def ext = if isExtensionReceiver then ", isExtensionReceiver = true" else "" + s"TypedSplice($splice$ext)" + } + + object TypedSplice { + def apply(tree: tpd.Tree, isExtensionReceiver: Boolean = false)(using Context): TypedSplice = + new TypedSplice(tree)(ctx.owner, isExtensionReceiver) {} + } + + /** mods object name impl */ + case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) + extends MemberDef { + type ThisTree[-T >: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef + def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) + } + + /** An untyped template with a derives clause. Derived parents are added to the end + * of the `parents` list. `derivedCount` keeps track of how many there are. + * This representation was chosen because it balances two concerns: + * - maximize overlap between DerivingTemplate and Template for code streamlining + * - keep invariant that elements of untyped trees align with source positions + */ + class DerivingTemplate(constr: DefDef, parentsOrDerived: List[Tree], self: ValDef, preBody: LazyTreeList, derivedCount: Int)(implicit @constructorOnly src: SourceFile) + extends Template(constr, parentsOrDerived, self, preBody) { + override val parents = parentsOrDerived.dropRight(derivedCount) + override val derived = parentsOrDerived.takeRight(derivedCount) + } + + case class ParsedTry(expr: Tree, handler: Tree, finalizer: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + + case class SymbolLit(str: String)(implicit @constructorOnly src: SourceFile) extends TermTree + + /** An interpolated string + * @param segments a list of two element tickets consisting of string literal and argument tree, + * possibly with a simple string literal as last element of the list + */ + case class InterpolatedString(id: TermName, segments: List[Tree])(implicit @constructorOnly src: SourceFile) + extends TermTree + + /** A function type or closure */ + case class Function(args: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { + override def isTerm: Boolean = body.isTerm + override def isType: Boolean = body.isType + } + + /** A function type or closure with `implicit`, `erased`, or `given` modifiers */ + class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers)(implicit @constructorOnly src: SourceFile) + extends Function(args, body) + + /** A polymorphic function type */ + case class PolyFunction(targs: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { + override def isTerm = body.isTerm + override def isType = body.isType + } + + /** A function created from a wildcard expression + * @param placeholderParams a list of definitions of synthetic parameters. + * @param body the function body where wildcards are replaced by + * references to synthetic parameters. + * This is equivalent to Function, except that forms a special case for the overlapping + * positions tests. + */ + class WildcardFunction(placeholderParams: List[ValDef], body: Tree)(implicit @constructorOnly src: SourceFile) + extends Function(placeholderParams, body) + + case class InfixOp(left: Tree, op: Ident, right: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree + case class PostfixOp(od: Tree, op: Ident)(implicit @constructorOnly src: SourceFile) extends OpTree + case class PrefixOp(op: Ident, od: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree + case class Parens(t: Tree)(implicit @constructorOnly src: SourceFile) extends ProxyTree { + def forwardTo: Tree = t + } + case class Tuple(trees: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree { + override def isTerm: Boolean = trees.isEmpty || trees.head.isTerm + override def isType: Boolean = !isTerm + } + case class Throw(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class Quote(quoted: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class Splice(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree { + def isInBraces: Boolean = span.end != expr.span.end + } + case class ForYield(enums: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class ForDo(enums: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class GenFrom(pat: Tree, expr: Tree, checkMode: GenCheckMode)(implicit @constructorOnly src: SourceFile) extends Tree + case class GenAlias(pat: Tree, expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree + case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree + case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree + case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree + case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree + + case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { + // TODO: Make bound a typed tree? + + /** It's a `given` selector */ + val isGiven: Boolean = imported.name.isEmpty + + /** It's a `given` or `_` selector */ + val isWildcard: Boolean = isGiven || imported.name == nme.WILDCARD + + /** The imported name, EmptyTermName if it's a given selector */ + val name: TermName = imported.name.asInstanceOf[TermName] + + /** The renamed part (which might be `_`), if present, or `name`, if missing */ + val rename: TermName = renamed match + case Ident(rename: TermName) => rename + case _ => name + } + + case class Number(digits: String, kind: NumberKind)(implicit @constructorOnly src: SourceFile) extends TermTree + + enum NumberKind { + case Whole(radix: Int) + case Decimal + case Floating + } + + /** {x1, ..., xN} T (only relevant under captureChecking) */ + case class CapturingTypeTree(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree + + /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ + case class DependentTypeTree(tp: List[Symbol] -> Type)(implicit @constructorOnly src: SourceFile) extends Tree + + @sharable object EmptyTypeIdent extends Ident(tpnme.EMPTY)(NoSource) with WithoutTypeOrPos[Untyped] { + override def isEmpty: Boolean = true + } + + def WildcardTypeBoundsTree()(using src: SourceFile): TypeBoundsTree = TypeBoundsTree(EmptyTree, EmptyTree, EmptyTree) + object WildcardTypeBoundsTree: + def unapply(tree: untpd.Tree): Boolean = tree match + case TypeBoundsTree(EmptyTree, EmptyTree, _) => true + case _ => false + + + /** A block generated by the XML parser, only treated specially by + * `Positioned#checkPos` */ + class XMLBlock(stats: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends Block(stats, expr) + + /** An enum to control checking or filtering of patterns in GenFrom trees */ + enum GenCheckMode { + case Ignore // neither filter nor check since filtering was done before + case Check // check that pattern is irrefutable + case CheckAndFilter // both check and filter (transitional period starting with 3.2) + case FilterNow // filter out non-matching elements if we are not in 3.2 or later + case FilterAlways // filter out non-matching elements since pattern is prefixed by `case` + } + + // ----- Modifiers ----------------------------------------------------- + /** Mod is intended to record syntactic information about modifiers, it's + * NOT a replacement of FlagSet. + * + * For any query about semantic information, check `flags` instead. + */ + sealed abstract class Mod(val flags: FlagSet)(implicit @constructorOnly src: SourceFile) + extends Positioned + + object Mod { + case class Private()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Private) + + case class Protected()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Protected) + + case class Var()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Mutable) + + case class Implicit()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Implicit) + + case class Given()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Given) + + case class Erased()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Erased) + + case class Final()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Final) + + case class Sealed()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Sealed) + + case class Opaque()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Opaque) + + case class Open()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Open) + + case class Override()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Override) + + case class Abstract()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Abstract) + + case class Lazy()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Lazy) + + case class Inline()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Inline) + + case class Transparent()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Transparent) + + case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) + + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ + case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) + } + + /** Modifiers and annotations for definitions + * + * @param flags The set flags + * @param privateWithin If a private or protected has is followed by a + * qualifier [q], the name q, "" as a typename otherwise. + * @param annotations The annotations preceding the modifiers + */ + case class Modifiers ( + flags: FlagSet = EmptyFlags, + privateWithin: TypeName = tpnme.EMPTY, + annotations: List[Tree] = Nil, + mods: List[Mod] = Nil) { + + def is(flag: Flag): Boolean = flags.is(flag) + def is(flag: Flag, butNot: FlagSet): Boolean = flags.is(flag, butNot = butNot) + def isOneOf(fs: FlagSet): Boolean = flags.isOneOf(fs) + def isOneOf(fs: FlagSet, butNot: FlagSet): Boolean = flags.isOneOf(fs, butNot = butNot) + def isAllOf(fc: FlagSet): Boolean = flags.isAllOf(fc) + + def | (fs: FlagSet): Modifiers = withFlags(flags | fs) + def & (fs: FlagSet): Modifiers = withFlags(flags & fs) + def &~(fs: FlagSet): Modifiers = withFlags(flags &~ fs) + + def toTypeFlags: Modifiers = withFlags(flags.toTypeFlags) + def toTermFlags: Modifiers = withFlags(flags.toTermFlags) + + def withFlags(flags: FlagSet): Modifiers = + if (this.flags == flags) this + else copy(flags = flags) + + def withoutFlags(flags: FlagSet): Modifiers = + if (this.isOneOf(flags)) + Modifiers(this.flags &~ flags, this.privateWithin, this.annotations, this.mods.filterNot(_.flags.isOneOf(flags))) + else this + + def withAddedMod(mod: Mod): Modifiers = + if (mods.exists(_ eq mod)) this + else withMods(mods :+ mod) + + private def compatible(flags1: FlagSet, flags2: FlagSet): Boolean = + flags1.isEmpty || flags2.isEmpty + || flags1.isTermFlags && flags2.isTermFlags + || flags1.isTypeFlags && flags2.isTypeFlags + + /** Add `flags` to thos modifier set, checking that there are no type/term conflicts. + * If there are conflicts, issue an error and return the modifiers consisting of + * the added flags only. The reason to do it this way is that the added flags usually + * describe the core of a construct whereas the existing set are the modifiers + * given in the source. + */ + def withAddedFlags(flags: FlagSet, span: Span)(using Context): Modifiers = + if this.flags.isAllOf(flags) then this + else if compatible(this.flags, flags) then this | flags + else + val what = if flags.isTermFlags then "values" else "types" + report.error(em"${(flags & ModifierFlags).flagsString} $what cannot be ${this.flags.flagsString}", ctx.source.atSpan(span)) + Modifiers(flags) + + /** Modifiers with given list of Mods. It is checked that + * all modifiers are already accounted for in `flags` and `privateWithin`. + */ + def withMods(ms: List[Mod]): Modifiers = + if (mods eq ms) this + else { + if (ms.nonEmpty) + for (m <- ms) + assert(flags.isAllOf(m.flags) + || m.isInstanceOf[Mod.Private] && !privateWithin.isEmpty + || (m.isInstanceOf[Mod.Abstract] || m.isInstanceOf[Mod.Override]) && flags.is(AbsOverride), + s"unaccounted modifier: $m in $this with flags ${flags.flagsString} when adding $ms") + copy(mods = ms) + } + + def withAddedAnnotation(annot: Tree): Modifiers = + if (annotations.exists(_ eq annot)) this + else withAnnotations(annotations :+ annot) + + def withAnnotations(annots: List[Tree]): Modifiers = + if (annots eq annotations) this + else copy(annotations = annots) + + def withPrivateWithin(pw: TypeName): Modifiers = + if (pw.isEmpty) this + else copy(privateWithin = pw) + + def hasFlags: Boolean = flags != EmptyFlags + def hasAnnotations: Boolean = annotations.nonEmpty + def hasPrivateWithin: Boolean = privateWithin != tpnme.EMPTY + def hasMod(cls: Class[?]) = mods.exists(_.getClass == cls) + + private def isEnum = is(Enum, butNot = JavaDefined) + + def isEnumCase: Boolean = isEnum && is(Case) + def isEnumClass: Boolean = isEnum && !is(Case) + } + + @sharable val EmptyModifiers: Modifiers = Modifiers() + + // ----- TypeTrees that refer to other tree's symbols ------------------- + + /** A type tree that gets its type from some other tree's symbol. Enters the + * type tree in the References attachment of the `from` tree as a side effect. + */ + abstract class DerivedTypeTree(implicit @constructorOnly src: SourceFile) extends TypeTree { + + private var myWatched: Tree = EmptyTree + + /** The watched tree; used only for printing */ + def watched: Tree = myWatched + + /** Install the derived type tree as a dependency on `original` */ + def watching(original: DefTree): this.type = { + myWatched = original + val existing = original.attachmentOrElse(References, Nil) + original.putAttachment(References, this :: existing) + this + } + + /** Install the derived type tree as a dependency on `sym` */ + def watching(sym: Symbol): this.type = withAttachment(OriginalSymbol, sym) + + /** A hook to ensure that all necessary symbols are completed so that + * OriginalSymbol attachments are propagated to this tree + */ + def ensureCompletions(using Context): Unit = () + + /** The method that computes the tree with the derived type */ + def derivedTree(originalSym: Symbol)(using Context): tpd.Tree + } + + /** Property key containing TypeTrees whose type is computed + * from the symbol in this type. These type trees have marker trees + * TypeRefOfSym or InfoOfSym as their originals. + */ + val References: Property.Key[List[DerivedTypeTree]] = Property.Key() + + /** Property key for TypeTrees marked with TypeRefOfSym or InfoOfSym + * which contains the symbol of the original tree from which this + * TypeTree is derived. + */ + val OriginalSymbol: Property.Key[Symbol] = Property.Key() + + /** Property key for contextual Apply trees of the form `fn given arg` */ + val KindOfApply: Property.StickyKey[ApplyKind] = Property.StickyKey() + + // ------ Creation methods for untyped only ----------------- + + def Ident(name: Name)(implicit src: SourceFile): Ident = new Ident(name) + def SearchFailureIdent(name: Name, explanation: -> String)(implicit src: SourceFile): SearchFailureIdent = new SearchFailureIdent(name, explanation) + def Select(qualifier: Tree, name: Name)(implicit src: SourceFile): Select = new Select(qualifier, name) + def SelectWithSig(qualifier: Tree, name: Name, sig: Signature)(implicit src: SourceFile): Select = new SelectWithSig(qualifier, name, sig) + def This(qual: Ident)(implicit src: SourceFile): This = new This(qual) + def Super(qual: Tree, mix: Ident)(implicit src: SourceFile): Super = new Super(qual, mix) + def Apply(fun: Tree, args: List[Tree])(implicit src: SourceFile): Apply = new Apply(fun, args) + def TypeApply(fun: Tree, args: List[Tree])(implicit src: SourceFile): TypeApply = new TypeApply(fun, args) + def Literal(const: Constant)(implicit src: SourceFile): Literal = new Literal(const) + def New(tpt: Tree)(implicit src: SourceFile): New = new New(tpt) + def Typed(expr: Tree, tpt: Tree)(implicit src: SourceFile): Typed = new Typed(expr, tpt) + def NamedArg(name: Name, arg: Tree)(implicit src: SourceFile): NamedArg = new NamedArg(name, arg) + def Assign(lhs: Tree, rhs: Tree)(implicit src: SourceFile): Assign = new Assign(lhs, rhs) + def Block(stats: List[Tree], expr: Tree)(implicit src: SourceFile): Block = new Block(stats, expr) + def If(cond: Tree, thenp: Tree, elsep: Tree)(implicit src: SourceFile): If = new If(cond, thenp, elsep) + def InlineIf(cond: Tree, thenp: Tree, elsep: Tree)(implicit src: SourceFile): If = new InlineIf(cond, thenp, elsep) + def Closure(env: List[Tree], meth: Tree, tpt: Tree)(implicit src: SourceFile): Closure = new Closure(env, meth, tpt) + def Match(selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): Match = new Match(selector, cases) + def InlineMatch(selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): Match = new InlineMatch(selector, cases) + def CaseDef(pat: Tree, guard: Tree, body: Tree)(implicit src: SourceFile): CaseDef = new CaseDef(pat, guard, body) + def Labeled(bind: Bind, expr: Tree)(implicit src: SourceFile): Labeled = new Labeled(bind, expr) + def Return(expr: Tree, from: Tree)(implicit src: SourceFile): Return = new Return(expr, from) + def WhileDo(cond: Tree, body: Tree)(implicit src: SourceFile): WhileDo = new WhileDo(cond, body) + def Try(expr: Tree, cases: List[CaseDef], finalizer: Tree)(implicit src: SourceFile): Try = new Try(expr, cases, finalizer) + def SeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): SeqLiteral = new SeqLiteral(elems, elemtpt) + def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): JavaSeqLiteral = new JavaSeqLiteral(elems, elemtpt) + def Inlined(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(implicit src: SourceFile): Inlined = new Inlined(call, bindings, expansion) + def TypeTree()(implicit src: SourceFile): TypeTree = new TypeTree() + def InferredTypeTree()(implicit src: SourceFile): TypeTree = new InferredTypeTree() + def SingletonTypeTree(ref: Tree)(implicit src: SourceFile): SingletonTypeTree = new SingletonTypeTree(ref) + def RefinedTypeTree(tpt: Tree, refinements: List[Tree])(implicit src: SourceFile): RefinedTypeTree = new RefinedTypeTree(tpt, refinements) + def AppliedTypeTree(tpt: Tree, args: List[Tree])(implicit src: SourceFile): AppliedTypeTree = new AppliedTypeTree(tpt, args) + def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(implicit src: SourceFile): LambdaTypeTree = new LambdaTypeTree(tparams, body) + def TermLambdaTypeTree(params: List[ValDef], body: Tree)(implicit src: SourceFile): TermLambdaTypeTree = new TermLambdaTypeTree(params, body) + def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): MatchTypeTree = new MatchTypeTree(bound, selector, cases) + def ByNameTypeTree(result: Tree)(implicit src: SourceFile): ByNameTypeTree = new ByNameTypeTree(result) + def TypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(implicit src: SourceFile): TypeBoundsTree = new TypeBoundsTree(lo, hi, alias) + def Bind(name: Name, body: Tree)(implicit src: SourceFile): Bind = new Bind(name, body) + def Alternative(trees: List[Tree])(implicit src: SourceFile): Alternative = new Alternative(trees) + def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree])(implicit src: SourceFile): UnApply = new UnApply(fun, implicits, patterns) + def ValDef(name: TermName, tpt: Tree, rhs: LazyTree)(implicit src: SourceFile): ValDef = new ValDef(name, tpt, rhs) + def DefDef(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(implicit src: SourceFile): DefDef = new DefDef(name, paramss, tpt, rhs) + def TypeDef(name: TypeName, rhs: Tree)(implicit src: SourceFile): TypeDef = new TypeDef(name, rhs) + def Template(constr: DefDef, parents: List[Tree], derived: List[Tree], self: ValDef, body: LazyTreeList)(implicit src: SourceFile): Template = + if (derived.isEmpty) new Template(constr, parents, self, body) + else new DerivingTemplate(constr, parents ++ derived, self, body, derived.length) + def Import(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Import = new Import(expr, selectors) + def Export(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Export = new Export(expr, selectors) + def PackageDef(pid: RefTree, stats: List[Tree])(implicit src: SourceFile): PackageDef = new PackageDef(pid, stats) + def Annotated(arg: Tree, annot: Tree)(implicit src: SourceFile): Annotated = new Annotated(arg, annot) + def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(implicit src: SourceFile): Hole = new Hole(isTermHole, idx, args, content, tpt) + + // ------ Additional creation methods for untyped only ----------------- + + /** new T(args1)...(args_n) + * ==> + * new T.[Ts](args1)...(args_n) + * + * where `Ts` are the class type arguments of `T` or its class type alias. + * Note: we also keep any type arguments as parts of `T`. This is necessary to allow + * navigation into these arguments from the IDE, and to do the right thing in + * PrepareInlineable. + */ + def New(tpt: Tree, argss: List[List[Tree]])(using Context): Tree = + ensureApplied(argss.foldLeft(makeNew(tpt))(Apply(_, _))) + + /** A new expression with constrictor and possibly type arguments. See + * `New(tpt, argss)` for details. + */ + def makeNew(tpt: Tree)(using Context): Tree = { + val (tycon, targs) = tpt match { + case AppliedTypeTree(tycon, targs) => + (tycon, targs) + case TypedSplice(tpt1: tpd.Tree) => + val argTypes = tpt1.tpe.dealias.argTypesLo + def wrap(tpe: Type) = TypeTree(tpe).withSpan(tpt.span) + (tpt, argTypes.map(wrap)) + case _ => + (tpt, Nil) + } + val nu: Tree = Select(New(tycon), nme.CONSTRUCTOR) + if (targs.nonEmpty) TypeApply(nu, targs) else nu + } + + def Block(stat: Tree, expr: Tree)(implicit src: SourceFile): Block = + Block(stat :: Nil, expr) + + def Apply(fn: Tree, arg: Tree)(implicit src: SourceFile): Apply = + Apply(fn, arg :: Nil) + + def ensureApplied(tpt: Tree)(implicit src: SourceFile): Tree = tpt match { + case _: Apply => tpt + case _ => Apply(tpt, Nil) + } + + def AppliedTypeTree(tpt: Tree, arg: Tree)(implicit src: SourceFile): AppliedTypeTree = + AppliedTypeTree(tpt, arg :: Nil) + + def TypeTree(tpe: Type)(using Context): TypedSplice = + TypedSplice(TypeTree().withTypeUnchecked(tpe)) + + def InferredTypeTree(tpe: Type)(using Context): TypedSplice = + TypedSplice(new InferredTypeTree().withTypeUnchecked(tpe)) + + def unitLiteral(implicit src: SourceFile): Literal = Literal(Constant(())) + + def ref(tp: NamedType)(using Context): Tree = + TypedSplice(tpd.ref(tp)) + + def ref(sym: Symbol)(using Context): Tree = + TypedSplice(tpd.ref(sym)) + + def rawRef(tp: NamedType)(using Context): Tree = + if tp.typeParams.isEmpty then ref(tp) + else AppliedTypeTree(ref(tp), tp.typeParams.map(_ => WildcardTypeBoundsTree())) + + def rootDot(name: Name)(implicit src: SourceFile): Select = Select(Ident(nme.ROOTPKG), name) + def scalaDot(name: Name)(implicit src: SourceFile): Select = Select(rootDot(nme.scala), name) + def scalaAnnotationDot(name: Name)(using SourceFile): Select = Select(scalaDot(nme.annotation), name) + def scalaRuntimeDot(name: Name)(using SourceFile): Select = Select(scalaDot(nme.runtime), name) + def scalaUnit(implicit src: SourceFile): Select = scalaDot(tpnme.Unit) + def scalaAny(implicit src: SourceFile): Select = scalaDot(tpnme.Any) + def javaDotLangDot(name: Name)(implicit src: SourceFile): Select = Select(Select(Ident(nme.java), nme.lang), name) + + def captureRoot(using Context): Select = + Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) + + def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = + DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) + + def emptyConstructor(using Context): DefDef = + makeConstructor(Nil, Nil) + + def makeSelfDef(name: TermName, tpt: Tree)(using Context): ValDef = + ValDef(name, tpt, EmptyTree).withFlags(PrivateLocal) + + def makeTupleOrParens(ts: List[Tree])(using Context): Tree = ts match { + case t :: Nil => Parens(t) + case _ => Tuple(ts) + } + + def makeTuple(ts: List[Tree])(using Context): Tree = ts match { + case t :: Nil => t + case _ => Tuple(ts) + } + + def makeAndType(left: Tree, right: Tree)(using Context): AppliedTypeTree = + AppliedTypeTree(ref(defn.andType.typeRef), left :: right :: Nil) + + def makeParameter(pname: TermName, tpe: Tree, mods: Modifiers, isBackquoted: Boolean = false)(using Context): ValDef = { + val vdef = ValDef(pname, tpe, EmptyTree) + if (isBackquoted) vdef.pushAttachment(Backquoted, ()) + vdef.withMods(mods | Param) + } + + def makeSyntheticParameter(n: Int = 1, tpt: Tree | Null = null, flags: FlagSet = SyntheticTermParam)(using Context): ValDef = + ValDef(nme.syntheticParamName(n), if (tpt == null) TypeTree() else tpt, EmptyTree) + .withFlags(flags) + + def lambdaAbstract(params: List[ValDef] | List[TypeDef], tpt: Tree)(using Context): Tree = + params match + case Nil => tpt + case (vd: ValDef) :: _ => TermLambdaTypeTree(params.asInstanceOf[List[ValDef]], tpt) + case _ => LambdaTypeTree(params.asInstanceOf[List[TypeDef]], tpt) + + def lambdaAbstractAll(paramss: List[List[ValDef] | List[TypeDef]], tpt: Tree)(using Context): Tree = + paramss.foldRight(tpt)(lambdaAbstract) + + /** A reference to given definition. If definition is a repeated + * parameter, the reference will be a repeated argument. + */ + def refOfDef(tree: MemberDef)(using Context): Tree = tree match { + case ValDef(_, PostfixOp(_, Ident(tpnme.raw.STAR)), _) => repeated(Ident(tree.name)) + case _ => Ident(tree.name) + } + + /** A repeated argument such as `arg: _*` */ + def repeated(arg: Tree)(using Context): Typed = Typed(arg, Ident(tpnme.WILDCARD_STAR)) + + +// --------- Copier/Transformer/Accumulator classes for untyped trees ----- + + def localCtx(tree: Tree)(using Context): Context = ctx + + override val cpy: UntypedTreeCopier = UntypedTreeCopier() + + class UntypedTreeCopier extends TreeCopier { + + def postProcess(tree: Tree, copied: Tree): copied.ThisTree[Untyped] = + copied.asInstanceOf[copied.ThisTree[Untyped]] + + def postProcess(tree: Tree, copied: MemberDef): copied.ThisTree[Untyped] = { + tree match { + case tree: MemberDef => copied.withMods(tree.rawMods) + case _ => copied + } + }.asInstanceOf[copied.ThisTree[Untyped]] + + def ModuleDef(tree: Tree)(name: TermName, impl: Template)(using Context): ModuleDef = tree match { + case tree: ModuleDef if (name eq tree.name) && (impl eq tree.impl) => tree + case _ => finalize(tree, untpd.ModuleDef(name, impl)(tree.source)) + } + def ParsedTry(tree: Tree)(expr: Tree, handler: Tree, finalizer: Tree)(using Context): TermTree = tree match { + case tree: ParsedTry if (expr eq tree.expr) && (handler eq tree.handler) && (finalizer eq tree.finalizer) => tree + case _ => finalize(tree, untpd.ParsedTry(expr, handler, finalizer)(tree.source)) + } + def SymbolLit(tree: Tree)(str: String)(using Context): TermTree = tree match { + case tree: SymbolLit if str == tree.str => tree + case _ => finalize(tree, untpd.SymbolLit(str)(tree.source)) + } + def InterpolatedString(tree: Tree)(id: TermName, segments: List[Tree])(using Context): TermTree = tree match { + case tree: InterpolatedString if (id eq tree.id) && (segments eq tree.segments) => tree + case _ => finalize(tree, untpd.InterpolatedString(id, segments)(tree.source)) + } + def Function(tree: Tree)(args: List[Tree], body: Tree)(using Context): Tree = tree match { + case tree: Function if (args eq tree.args) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.Function(args, body)(tree.source)) + } + def PolyFunction(tree: Tree)(targs: List[Tree], body: Tree)(using Context): Tree = tree match { + case tree: PolyFunction if (targs eq tree.targs) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.PolyFunction(targs, body)(tree.source)) + } + def InfixOp(tree: Tree)(left: Tree, op: Ident, right: Tree)(using Context): Tree = tree match { + case tree: InfixOp if (left eq tree.left) && (op eq tree.op) && (right eq tree.right) => tree + case _ => finalize(tree, untpd.InfixOp(left, op, right)(tree.source)) + } + def PostfixOp(tree: Tree)(od: Tree, op: Ident)(using Context): Tree = tree match { + case tree: PostfixOp if (od eq tree.od) && (op eq tree.op) => tree + case _ => finalize(tree, untpd.PostfixOp(od, op)(tree.source)) + } + def PrefixOp(tree: Tree)(op: Ident, od: Tree)(using Context): Tree = tree match { + case tree: PrefixOp if (op eq tree.op) && (od eq tree.od) => tree + case _ => finalize(tree, untpd.PrefixOp(op, od)(tree.source)) + } + def Parens(tree: Tree)(t: Tree)(using Context): ProxyTree = tree match { + case tree: Parens if t eq tree.t => tree + case _ => finalize(tree, untpd.Parens(t)(tree.source)) + } + def Tuple(tree: Tree)(trees: List[Tree])(using Context): Tree = tree match { + case tree: Tuple if trees eq tree.trees => tree + case _ => finalize(tree, untpd.Tuple(trees)(tree.source)) + } + def Throw(tree: Tree)(expr: Tree)(using Context): TermTree = tree match { + case tree: Throw if expr eq tree.expr => tree + case _ => finalize(tree, untpd.Throw(expr)(tree.source)) + } + def Quote(tree: Tree)(quoted: Tree)(using Context): Tree = tree match { + case tree: Quote if quoted eq tree.quoted => tree + case _ => finalize(tree, untpd.Quote(quoted)(tree.source)) + } + def Splice(tree: Tree)(expr: Tree)(using Context): Tree = tree match { + case tree: Splice if expr eq tree.expr => tree + case _ => finalize(tree, untpd.Splice(expr)(tree.source)) + } + def ForYield(tree: Tree)(enums: List[Tree], expr: Tree)(using Context): TermTree = tree match { + case tree: ForYield if (enums eq tree.enums) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.ForYield(enums, expr)(tree.source)) + } + def ForDo(tree: Tree)(enums: List[Tree], body: Tree)(using Context): TermTree = tree match { + case tree: ForDo if (enums eq tree.enums) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.ForDo(enums, body)(tree.source)) + } + def GenFrom(tree: Tree)(pat: Tree, expr: Tree, checkMode: GenCheckMode)(using Context): Tree = tree match { + case tree: GenFrom if (pat eq tree.pat) && (expr eq tree.expr) && (checkMode == tree.checkMode) => tree + case _ => finalize(tree, untpd.GenFrom(pat, expr, checkMode)(tree.source)) + } + def GenAlias(tree: Tree)(pat: Tree, expr: Tree)(using Context): Tree = tree match { + case tree: GenAlias if (pat eq tree.pat) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.GenAlias(pat, expr)(tree.source)) + } + def ContextBounds(tree: Tree)(bounds: TypeBoundsTree, cxBounds: List[Tree])(using Context): TypTree = tree match { + case tree: ContextBounds if (bounds eq tree.bounds) && (cxBounds eq tree.cxBounds) => tree + case _ => finalize(tree, untpd.ContextBounds(bounds, cxBounds)(tree.source)) + } + def PatDef(tree: Tree)(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(using Context): Tree = tree match { + case tree: PatDef if (mods eq tree.mods) && (pats eq tree.pats) && (tpt eq tree.tpt) && (rhs eq tree.rhs) => tree + case _ => finalize(tree, untpd.PatDef(mods, pats, tpt, rhs)(tree.source)) + } + def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match + case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree + case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) + def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { + case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree + case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) + } + def Number(tree: Tree)(digits: String, kind: NumberKind)(using Context): Tree = tree match { + case tree: Number if (digits == tree.digits) && (kind == tree.kind) => tree + case _ => finalize(tree, untpd.Number(digits, kind)) + } + def CapturingTypeTree(tree: Tree)(refs: List[Tree], parent: Tree)(using Context): Tree = tree match + case tree: CapturingTypeTree if (refs eq tree.refs) && (parent eq tree.parent) => tree + case _ => finalize(tree, untpd.CapturingTypeTree(refs, parent)) + + def TypedSplice(tree: Tree)(splice: tpd.Tree)(using Context): ProxyTree = tree match { + case tree: TypedSplice if splice `eq` tree.splice => tree + case _ => finalize(tree, untpd.TypedSplice(splice)(using ctx)) + } + def MacroTree(tree: Tree)(expr: Tree)(using Context): Tree = tree match { + case tree: MacroTree if expr `eq` tree.expr => tree + case _ => finalize(tree, untpd.MacroTree(expr)(tree.source)) + } + } + + abstract class UntypedTreeMap(cpy: UntypedTreeCopier = untpd.cpy) extends TreeMap(cpy) { + override def transformMoreCases(tree: Tree)(using Context): Tree = tree match { + case ModuleDef(name, impl) => + cpy.ModuleDef(tree)(name, transformSub(impl)) + case tree: DerivingTemplate => + cpy.Template(tree)(transformSub(tree.constr), transform(tree.parents), + transform(tree.derived), transformSub(tree.self), transformStats(tree.body, tree.symbol)) + case ParsedTry(expr, handler, finalizer) => + cpy.ParsedTry(tree)(transform(expr), transform(handler), transform(finalizer)) + case SymbolLit(str) => + cpy.SymbolLit(tree)(str) + case InterpolatedString(id, segments) => + cpy.InterpolatedString(tree)(id, segments.mapConserve(transform)) + case Function(args, body) => + cpy.Function(tree)(transform(args), transform(body)) + case PolyFunction(targs, body) => + cpy.PolyFunction(tree)(transform(targs), transform(body)) + case InfixOp(left, op, right) => + cpy.InfixOp(tree)(transform(left), op, transform(right)) + case PostfixOp(od, op) => + cpy.PostfixOp(tree)(transform(od), op) + case PrefixOp(op, od) => + cpy.PrefixOp(tree)(op, transform(od)) + case Parens(t) => + cpy.Parens(tree)(transform(t)) + case Tuple(trees) => + cpy.Tuple(tree)(transform(trees)) + case Throw(expr) => + cpy.Throw(tree)(transform(expr)) + case Quote(t) => + cpy.Quote(tree)(transform(t)) + case Splice(expr) => + cpy.Splice(tree)(transform(expr)) + case ForYield(enums, expr) => + cpy.ForYield(tree)(transform(enums), transform(expr)) + case ForDo(enums, body) => + cpy.ForDo(tree)(transform(enums), transform(body)) + case GenFrom(pat, expr, checkMode) => + cpy.GenFrom(tree)(transform(pat), transform(expr), checkMode) + case GenAlias(pat, expr) => + cpy.GenAlias(tree)(transform(pat), transform(expr)) + case ContextBounds(bounds, cxBounds) => + cpy.ContextBounds(tree)(transformSub(bounds), transform(cxBounds)) + case PatDef(mods, pats, tpt, rhs) => + cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) + case ExtMethods(paramss, methods) => + cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) + case ImportSelector(imported, renamed, bound) => + cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) + case Number(_, _) | TypedSplice(_) => + tree + case MacroTree(expr) => + cpy.MacroTree(tree)(transform(expr)) + case CapturingTypeTree(refs, parent) => + cpy.CapturingTypeTree(tree)(transform(refs), transform(parent)) + case _ => + super.transformMoreCases(tree) + } + } + + abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { + self: UntypedTreeAccumulator[X] @retains(caps.*) => + override def foldMoreCases(x: X, tree: Tree)(using Context): X = tree match { + case ModuleDef(name, impl) => + this(x, impl) + case tree: DerivingTemplate => + this(this(this(this(this(x, tree.constr), tree.parents), tree.derived), tree.self), tree.body) + case ParsedTry(expr, handler, finalizer) => + this(this(this(x, expr), handler), finalizer) + case SymbolLit(str) => + x + case InterpolatedString(id, segments) => + this(x, segments) + case Function(args, body) => + this(this(x, args), body) + case PolyFunction(targs, body) => + this(this(x, targs), body) + case InfixOp(left, op, right) => + this(this(this(x, left), op), right) + case PostfixOp(od, op) => + this(this(x, od), op) + case PrefixOp(op, od) => + this(this(x, op), od) + case Parens(t) => + this(x, t) + case Tuple(trees) => + this(x, trees) + case Throw(expr) => + this(x, expr) + case Quote(t) => + this(x, t) + case Splice(expr) => + this(x, expr) + case ForYield(enums, expr) => + this(this(x, enums), expr) + case ForDo(enums, body) => + this(this(x, enums), body) + case GenFrom(pat, expr, _) => + this(this(x, pat), expr) + case GenAlias(pat, expr) => + this(this(x, pat), expr) + case ContextBounds(bounds, cxBounds) => + this(this(x, bounds), cxBounds) + case PatDef(mods, pats, tpt, rhs) => + this(this(this(x, pats), tpt), rhs) + case ExtMethods(paramss, methods) => + this(paramss.foldLeft(x)(apply), methods) + case ImportSelector(imported, renamed, bound) => + this(this(this(x, imported), renamed), bound) + case Number(_, _) => + x + case TypedSplice(splice) => + this(x, splice) + case MacroTree(expr) => + this(x, expr) + case CapturingTypeTree(refs, parent) => + this(this(x, refs), parent) + case _ => + super.foldMoreCases(x, tree) + } + } + + abstract class UntypedTreeTraverser extends UntypedTreeAccumulator[Unit] { + def traverse(tree: Tree)(using Context): Unit + def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) + protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) + } + + /** Fold `f` over all tree nodes, in depth-first, prefix order */ + class UntypedDeepFolder[X](f: (X, Tree) => X) extends UntypedTreeAccumulator[X] { + def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) + } + + /** Is there a subtree of this tree that satisfies predicate `p`? */ + extension (tree: Tree) def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { + val acc = new UntypedTreeAccumulator[Boolean] { + def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) + } + acc(false, tree) + } + + protected def FunProto(args: List[Tree], resType: Type)(using Context) = + ProtoTypes.FunProto(args, resType)(ctx.typer, ApplyKind.Regular) +} diff --git a/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala b/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala new file mode 100644 index 000000000000..56b3f5ba5047 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala @@ -0,0 +1,19 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.* + +/** A one-element cache for the boxed version of an unboxed capturing type */ +class BoxedTypeCache: + private var boxed: Type = compiletime.uninitialized + private var unboxed: Type = NoType + + def apply(tp: AnnotatedType)(using Context): Type = + if tp ne unboxed then + unboxed = tp + val CapturingType(parent, refs) = tp: @unchecked + boxed = CapturingType(parent, refs, boxed = true) + boxed +end BoxedTypeCache \ No newline at end of file diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala new file mode 100644 index 000000000000..fd89159e2076 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala @@ -0,0 +1,76 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.*, Annotations.* +import ast.Trees.* +import ast.{tpd, untpd} +import Decorators.* +import config.Printers.capt +import printing.Printer +import printing.Texts.Text + +/** An annotation representing a capture set and whether it is boxed. + * It simulates a normal @retains annotation except that it is more efficient, + * supports variables as capture sets, and adds a `boxed` flag. + * These annotations are created during capture checking. Before that + * there are only regular @retains and @retainsByName annotations. + * @param refs the capture set + * @param boxed whether the type carrying the annotation is boxed + * @param cls the underlying class (either annotation.retains or annotation.retainsByName) + */ +case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) extends Annotation: + import CaptureAnnotation.* + import tpd.* + + /** A cache for boxed version of a capturing type with this annotation */ + val boxedType = BoxedTypeCache() + + /** Reconstitute annotation tree from capture set */ + override def tree(using Context) = + val elems = refs.elems.toList.map { + case cr: TermRef => ref(cr) + case cr: TermParamRef => untpd.Ident(cr.paramName).withType(cr) + case cr: ThisType => This(cr.cls) + } + val arg = repeated(elems, TypeTree(defn.AnyType)) + New(symbol.typeRef, arg :: Nil) + + override def symbol(using Context) = cls + + override def derivedAnnotation(tree: Tree)(using Context): Annotation = this + + def derivedAnnotation(refs: CaptureSet, boxed: Boolean)(using Context): Annotation = + if (this.refs eq refs) && (this.boxed == boxed) then this + else CaptureAnnotation(refs, boxed)(cls) + + override def sameAnnotation(that: Annotation)(using Context): Boolean = that match + case CaptureAnnotation(refs, boxed) => + this.refs == refs && this.boxed == boxed && this.symbol == that.symbol + case _ => false + + override def mapWith(tm: TypeMap)(using Context) = + val elems = refs.elems.toList + val elems1 = elems.mapConserve(tm) + if elems1 eq elems then this + else if elems1.forall(_.isInstanceOf[CaptureRef]) + then derivedAnnotation(CaptureSet(elems1.asInstanceOf[List[CaptureRef]]*), boxed) + else EmptyAnnotation + + override def refersToParamOf(tl: TermLambda)(using Context): Boolean = + refs.elems.exists { + case TermParamRef(tl1, _) => tl eq tl1 + case _ => false + } + + override def toText(printer: Printer): Text = refs.toText(printer) + + override def hash: Int = + (refs.hashCode << 1) | (if boxed then 1 else 0) + + override def eql(that: Annotation) = that match + case that: CaptureAnnotation => (this.refs eq that.refs) && (this.boxed == that.boxed) + case _ => false + +end CaptureAnnotation diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala new file mode 100644 index 000000000000..3dfd1324ae1e --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala @@ -0,0 +1,253 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* +import ast.{tpd, untpd} +import Decorators.*, NameOps.* +import config.Printers.capt +import util.Property.Key +import tpd.* +import config.Feature + +private val Captures: Key[CaptureSet] = Key() +private val BoxedType: Key[BoxedTypeCache] = Key() + +/** The arguments of a @retains or @retainsByName annotation */ +private[cc] def retainedElems(tree: Tree)(using Context): List[Tree] = tree match + case Apply(_, Typed(SeqLiteral(elems, _), _) :: Nil) => elems + case _ => Nil + +/** An exception thrown if a @retains argument is not syntactically a CaptureRef */ +class IllegalCaptureRef(tpe: Type) extends Exception + +extension (tree: Tree) + + /** Map tree with CaptureRef type to its type, throw IllegalCaptureRef otherwise */ + def toCaptureRef(using Context): CaptureRef = tree.tpe match + case ref: CaptureRef => ref + case tpe => throw IllegalCaptureRef(tpe) + + /** Convert a @retains or @retainsByName annotation tree to the capture set it represents. + * For efficience, the result is cached as an Attachment on the tree. + */ + def toCaptureSet(using Context): CaptureSet = + tree.getAttachment(Captures) match + case Some(refs) => refs + case None => + val refs = CaptureSet(retainedElems(tree).map(_.toCaptureRef)*) + .showing(i"toCaptureSet $tree --> $result", capt) + tree.putAttachment(Captures, refs) + refs + + /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of + * a by name parameter type, turning the latter into an impure by name parameter type. + */ + def adaptByNameArgUnderPureFuns(using Context): Tree = + if Feature.pureFunsEnabledSomewhere then + val rbn = defn.RetainsByNameAnnot + Annotated(tree, + New(rbn.typeRef).select(rbn.primaryConstructor).appliedTo( + Typed( + SeqLiteral(ref(defn.captureRoot) :: Nil, TypeTree(defn.AnyType)), + TypeTree(defn.RepeatedParamType.appliedTo(defn.AnyType)) + ) + ) + ) + else tree + +extension (tp: Type) + + /** @pre `tp` is a CapturingType */ + def derivedCapturingType(parent: Type, refs: CaptureSet)(using Context): Type = tp match + case tp @ CapturingType(p, r) => + if (parent eq p) && (refs eq r) then tp + else CapturingType(parent, refs, tp.isBoxed) + + /** If this is a unboxed capturing type with nonempty capture set, its boxed version. + * Or, if type is a TypeBounds of capturing types, the version where the bounds are boxed. + * The identity for all other types. + */ + def boxed(using Context): Type = tp.dealias match + case tp @ CapturingType(parent, refs) if !tp.isBoxed && !refs.isAlwaysEmpty => + tp.annot match + case ann: CaptureAnnotation => + ann.boxedType(tp) + case ann => + ann.tree.getAttachment(BoxedType) match + case None => ann.tree.putAttachment(BoxedType, BoxedTypeCache()) + case _ => + ann.tree.attachment(BoxedType)(tp) + case tp: RealTypeBounds => + tp.derivedTypeBounds(tp.lo.boxed, tp.hi.boxed) + case _ => + tp + + /** If `sym` is a type parameter, the boxed version of `tp`, otherwise `tp` */ + def boxedIfTypeParam(sym: Symbol)(using Context) = + if sym.is(TypeParam) then tp.boxed else tp + + /** The boxed version of `tp`, unless `tycon` is a function symbol */ + def boxedUnlessFun(tycon: Type)(using Context) = + if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionSymbol(tycon.typeSymbol) + then tp + else tp.boxed + + /** The capture set consisting of all top-level captures of `tp` that appear under a box. + * Unlike for `boxed` this also considers parents of capture types, unions and + * intersections, and type proxies other than abstract types. + */ + def boxedCaptureSet(using Context): CaptureSet = + def getBoxed(tp: Type): CaptureSet = tp match + case tp @ CapturingType(parent, refs) => + val pcs = getBoxed(parent) + if tp.isBoxed then refs ++ pcs else pcs + case tp: TypeRef if tp.symbol.isAbstractType => CaptureSet.empty + case tp: TypeProxy => getBoxed(tp.superType) + case tp: AndType => getBoxed(tp.tp1) ** getBoxed(tp.tp2) + case tp: OrType => getBoxed(tp.tp1) ++ getBoxed(tp.tp2) + case _ => CaptureSet.empty + getBoxed(tp) + + /** Is the boxedCaptureSet of this type nonempty? */ + def isBoxedCapturing(using Context) = !tp.boxedCaptureSet.isAlwaysEmpty + + /** If this type is a capturing type, the version with boxed statues as given by `boxed`. + * If it is a TermRef of a capturing type, and the box status flips, widen to a capturing + * type that captures the TermRef. + */ + def forceBoxStatus(boxed: Boolean)(using Context): Type = tp.widenDealias match + case tp @ CapturingType(parent, refs) if tp.isBoxed != boxed => + val refs1 = tp match + case ref: CaptureRef if ref.isTracked => ref.singletonCaptureSet + case _ => refs + CapturingType(parent, refs1, boxed) + case _ => + tp + + /** Map capturing type to their parents. Capturing types accessible + * via dealising are also stripped. + */ + def stripCapturing(using Context): Type = tp.dealiasKeepAnnots match + case CapturingType(parent, _) => + parent.stripCapturing + case atd @ AnnotatedType(parent, annot) => + atd.derivedAnnotatedType(parent.stripCapturing, annot) + case _ => + tp + + /** Under pureFunctions, map regular function type to impure function type + */ + def adaptFunctionTypeUnderPureFuns(using Context): Type = tp match + case AppliedType(fn, args) + if Feature.pureFunsEnabledSomewhere && defn.isFunctionClass(fn.typeSymbol) => + val fname = fn.typeSymbol.name + defn.FunctionType( + fname.functionArity, + isContextual = fname.isContextFunction, + isErased = fname.isErasedFunction, + isImpure = true).appliedTo(args) + case _ => + tp + + /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of + * a by name parameter type, turning the latter into an impure by name parameter type. + */ + def adaptByNameArgUnderPureFuns(using Context): Type = + if Feature.pureFunsEnabledSomewhere then + AnnotatedType(tp, + CaptureAnnotation(CaptureSet.universal, boxed = false)(defn.RetainsByNameAnnot)) + else + tp + + def isCapturingType(using Context): Boolean = + tp match + case CapturingType(_, _) => true + case _ => false + + /** Is type known to be always pure by its class structure, + * so that adding a capture set to it would not make sense? + */ + def isAlwaysPure(using Context): Boolean = tp.dealias match + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then sym.isPureClass + else tp.superType.isAlwaysPure + case CapturingType(parent, refs) => + parent.isAlwaysPure || refs.isAlwaysEmpty + case tp: TypeProxy => + tp.superType.isAlwaysPure + case tp: AndType => + tp.tp1.isAlwaysPure || tp.tp2.isAlwaysPure + case tp: OrType => + tp.tp1.isAlwaysPure && tp.tp2.isAlwaysPure + case _ => + false + +extension (sym: Symbol) + + /** A class is pure if: + * - one its base types has an explicitly declared self type with an empty capture set + * - or it is a value class + * - or it is Nothing or Null + */ + def isPureClass(using Context): Boolean = sym match + case cls: ClassSymbol => + val AnyValClass = defn.AnyValClass + cls.baseClasses.exists(bc => + bc == AnyValClass + || { + val selfType = bc.givenSelfType + selfType.exists && selfType.captureSet.isAlwaysEmpty + }) + || cls == defn.NothingClass + || cls == defn.NullClass + case _ => + false + + /** Does this symbol allow results carrying the universal capability? + * Currently this is true only for function type applies (since their + * results are unboxed) and `erasedValue` since this function is magic in + * that is allows to conjure global capabilies from nothing (aside: can we find a + * more controlled way to achieve this?). + * But it could be generalized to other functions that so that they can take capability + * classes as arguments. + */ + def allowsRootCapture(using Context): Boolean = + sym == defn.Compiletime_erasedValue + || defn.isFunctionClass(sym.maybeOwner) + + /** When applying `sym`, would the result type be unboxed? + * This is the case if the result type contains a top-level reference to an enclosing + * class or method type parameter and the method does not allow root capture. + * If the type parameter is instantiated to a boxed type, that type would + * have to be unboxed in the method's result. + */ + def unboxesResult(using Context): Boolean = + def containsEnclTypeParam(tp: Type): Boolean = tp.strippedDealias match + case tp @ TypeRef(pre: ThisType, _) => tp.symbol.is(Param) + case tp: TypeParamRef => true + case tp: AndOrType => containsEnclTypeParam(tp.tp1) || containsEnclTypeParam(tp.tp2) + case tp: RefinedType => containsEnclTypeParam(tp.parent) || containsEnclTypeParam(tp.refinedInfo) + case _ => false + containsEnclTypeParam(sym.info.finalResultType) + && !sym.allowsRootCapture + && sym != defn.Caps_unsafeBox + && sym != defn.Caps_unsafeUnbox + +extension (tp: AnnotatedType) + /** Is this a boxed capturing type? */ + def isBoxed(using Context): Boolean = tp.annot match + case ann: CaptureAnnotation => ann.boxed + case _ => false + +extension (ts: List[Type]) + /** Equivalent to ts.mapconserve(_.boxedUnlessFun(tycon)) but more efficient where + * it is the identity. + */ + def boxedUnlessFun(tycon: Type)(using Context) = + if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionClass(tycon.typeSymbol) + then ts + else ts.mapconserve(_.boxed) + diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala new file mode 100644 index 000000000000..48ff614f2910 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala @@ -0,0 +1,906 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Flags.*, Contexts.*, Decorators.* +import config.Printers.capt +import Annotations.Annotation +import annotation.threadUnsafe +import annotation.constructorOnly +import annotation.internal.sharable +import reporting.trace +import printing.{Showable, Printer} +import printing.Texts.* +import util.{SimpleIdentitySet, Property} +import util.common.alwaysTrue +import scala.collection.mutable +import config.Config.ccAllowUnsoundMaps +import language.experimental.pureFunctions + +/** A class for capture sets. Capture sets can be constants or variables. + * Capture sets support inclusion constraints <:< where <:< is subcapturing. + * + * They also allow + * - mapping with functions from elements to capture sets + * - filtering with predicates on elements + * - intersecting wo capture sets + * + * That is, constraints can be of the forms + * + * cs1 <:< cs2 + * cs1 = ∪ {f(x) | x ∈ cs2} where f is a function from capture references to capture sets. + * cs1 = ∪ {x | x ∈ cs2, p(x)} where p is a predicate on capture references + * cs1 = cs2 ∩ cs2 + * + * We call the resulting constraint system "monadic set constraints". + * To support capture propagation across maps, mappings are supported only + * if the mapped function is either a bijection or if it is idempotent + * on capture references (c.f. doc comment on `map` below). + */ +sealed abstract class CaptureSet extends Showable, caps.Pure: + import CaptureSet.* + + /** The elements of this capture set. For capture variables, + * the elements known so far. + */ + def elems: Refs + + /** Is this capture set constant (i.e. not an unsolved capture variable)? + * Solved capture variables count as constant. + */ + def isConst: Boolean + + /** Is this capture set always empty? For unsolved capture veriables, returns + * always false. + */ + def isAlwaysEmpty: Boolean + + /** Is this capture set definitely non-empty? */ + final def isNotEmpty: Boolean = !elems.isEmpty + + /** Convert to Const. @pre: isConst */ + def asConst: Const = this match + case c: Const => c + case v: Var => + assert(v.isConst) + Const(v.elems) + + /** Cast to variable. @pre: !isConst */ + def asVar: Var = + assert(!isConst) + asInstanceOf[Var] + + /** Does this capture set contain the root reference `*` as element? */ + final def isUniversal(using Context) = + elems.exists { + case ref: TermRef => ref.symbol == defn.captureRoot + case _ => false + } + + /** Add new elements to this capture set if allowed. + * @pre `newElems` is not empty and does not overlap with `this.elems`. + * Constant capture sets never allow to add new elements. + * Variables allow it if and only if the new elements can be included + * in all their dependent sets. + * @param origin The set where the elements come from, or `empty` if not known. + * @return CompareResult.OK if elements were added, or a conflicting + * capture set that prevents addition otherwise. + */ + protected def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult + + /** If this is a variable, add `cs` as a dependent set */ + protected def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult + + /** If `cs` is a variable, add this capture set as one of its dependent sets */ + protected def addAsDependentTo(cs: CaptureSet)(using Context): this.type = + cs.addDependent(this)(using ctx, UnrecordedState) + this + + /** Try to include all references of `elems` that are not yet accounted for by this + * capture set. Inclusion is via `addNewElems`. + * @param origin The set where the elements come from, or `empty` if not known. + * @return CompareResult.OK if all unaccounted elements could be added, + * capture set that prevents addition otherwise. + */ + protected final def tryInclude(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val unaccounted = elems.filter(!accountsFor(_)) + if unaccounted.isEmpty then CompareResult.OK + else addNewElems(unaccounted, origin) + + /** Equivalent to `tryInclude({elem}, origin)`, but more efficient */ + protected final def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + if accountsFor(elem) then CompareResult.OK + else addNewElems(elem.singletonCaptureSet.elems, origin) + + /* x subsumes y if x is the same as y, or x is a this reference and y refers to a field of x */ + extension (x: CaptureRef) private def subsumes(y: CaptureRef) = + (x eq y) + || y.match + case y: TermRef => y.prefix eq x + case _ => false + + /** {x} <:< this where <:< is subcapturing, but treating all variables + * as frozen. + */ + def accountsFor(x: CaptureRef)(using Context): Boolean = + reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true) { + elems.exists(_.subsumes(x)) + || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK + } + + /** A more optimistic version of accountsFor, which does not take variable supersets + * of the `x` reference into account. A set might account for `x` if it accounts + * for `x` in a state where we assume all supersets of `x` have just the elements + * known at this point. On the other hand if x's capture set has no known elements, + * a set `cs` might account for `x` only if it subsumes `x` or it contains the + * root capability `*`. + */ + def mightAccountFor(x: CaptureRef)(using Context): Boolean = + reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { + elems.exists(elem => elem.subsumes(x) || elem.isRootCapability) + || !x.isRootCapability + && { + val elems = x.captureSetOfInfo.elems + !elems.isEmpty && elems.forall(mightAccountFor) + } + } + + /** A more optimistic version of subCaptures used to choose one of two typing rules + * for selections and applications. `cs1 mightSubcapture cs2` if `cs2` might account for + * every element currently known to be in `cs1`. + */ + def mightSubcapture(that: CaptureSet)(using Context): Boolean = + elems.forall(that.mightAccountFor) + + /** The subcapturing test. + * @param frozen if true, no new variables or dependent sets are allowed to + * be added when making this test. An attempt to add either + * will result in failure. + */ + final def subCaptures(that: CaptureSet, frozen: Boolean)(using Context): CompareResult = + subCaptures(that)(using ctx, if frozen then FrozenState else VarState()) + + /** The subcapturing test, using a given VarState */ + private def subCaptures(that: CaptureSet)(using Context, VarState): CompareResult = + def recur(elems: List[CaptureRef]): CompareResult = elems match + case elem :: elems1 => + var result = that.tryInclude(elem, this) + if !result.isOK && !elem.isRootCapability && summon[VarState] != FrozenState then + result = elem.captureSetOfInfo.subCaptures(that) + if result.isOK then + recur(elems1) + else + varState.rollBack() + result + case Nil => + addDependent(that) + recur(elems.toList) + .showing(i"subcaptures $this <:< $that = ${result.show}", capt) + + /** Two capture sets are considered =:= equal if they mutually subcapture each other + * in a frozen state. + */ + def =:= (that: CaptureSet)(using Context): Boolean = + this.subCaptures(that, frozen = true).isOK + && that.subCaptures(this, frozen = true).isOK + + /** The smallest capture set (via <:<) that is a superset of both + * `this` and `that` + */ + def ++ (that: CaptureSet)(using Context): CaptureSet = + if this.subCaptures(that, frozen = true).isOK then that + else if that.subCaptures(this, frozen = true).isOK then this + else if this.isConst && that.isConst then Const(this.elems ++ that.elems) + else Var(this.elems ++ that.elems).addAsDependentTo(this).addAsDependentTo(that) + + /** The smallest superset (via <:<) of this capture set that also contains `ref`. + */ + def + (ref: CaptureRef)(using Context): CaptureSet = + this ++ ref.singletonCaptureSet + + /** The largest capture set (via <:<) that is a subset of both `this` and `that` + */ + def **(that: CaptureSet)(using Context): CaptureSet = + if this.subCaptures(that, frozen = true).isOK then this + else if that.subCaptures(this, frozen = true).isOK then that + else if this.isConst && that.isConst then Const(elemIntersection(this, that)) + else Intersected(this, that) + + /** The largest subset (via <:<) of this capture set that does not account for + * any of the elements in the constant capture set `that` + */ + def -- (that: CaptureSet.Const)(using Context): CaptureSet = + val elems1 = elems.filter(!that.accountsFor(_)) + if elems1.size == elems.size then this + else if this.isConst then Const(elems1) + else Diff(asVar, that) + + /** The largest subset (via <:<) of this capture set that does not account for `ref` */ + def - (ref: CaptureRef)(using Context): CaptureSet = + this -- ref.singletonCaptureSet + + /** The largest subset (via <:<) of this capture set that only contains elements + * for which `p` is true. + */ + def filter(p: CaptureRef -> Boolean)(using Context): CaptureSet = + if this.isConst then + val elems1 = elems.filter(p) + if elems1 == elems then this + else Const(elems.filter(p)) + else Filtered(asVar, p) + + /** Capture set obtained by applying `tm` to all elements of the current capture set + * and joining the results. If the current capture set is a variable, the same + * transformation is applied to all future additions of new elements. + * + * Note: We have a problem how we handle the situation where we have a mapped set + * + * cs2 = tm(cs1) + * + * and then the propagation solver adds a new element `x` to `cs2`. What do we + * know in this case about `cs1`? We can answer this question in a sound way only + * if `tm` is a bijection on capture references or it is idempotent on capture references. + * (see definition in IdempotentCapRefMap). + * If `tm` is a bijection we know that `tm^-1(x)` must be in `cs1`. If `tm` is idempotent + * one possible solution is that `x` is in `cs1`, which is what we assume in this case. + * That strategy is sound but not complete. + * + * If `tm` is some other map, we don't know how to handle this case. For now, + * we simply refuse to handle other maps. If they do need to be handled, + * `OtherMapped` provides some approximation to a solution, but it is neither + * sound nor complete. + */ + def map(tm: TypeMap)(using Context): CaptureSet = tm match + case tm: BiTypeMap => + val mappedElems = elems.map(tm.forward) + if isConst then + if mappedElems == elems then this + else Const(mappedElems) + else BiMapped(asVar, tm, mappedElems) + case tm: IdentityCaptRefMap => + this + case _ => + val mapped = mapRefs(elems, tm, tm.variance) + if isConst then + if mapped.isConst && mapped.elems == elems then this + else mapped + else Mapped(asVar, tm, tm.variance, mapped) + + /** A mapping resulting from substituting parameters of a BindingType to a list of types */ + def substParams(tl: BindingType, to: List[Type])(using Context) = + map(Substituters.SubstParamsMap(tl, to)) + + /** Invoke handler if this set has (or later aquires) the root capability `*` */ + def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = + if isUniversal then handler() + this + + /** An upper approximation of this capture set, i.e. a constant set that is + * subcaptured by this set. If the current set is a variable + * it is the intersection of all upper approximations of known supersets + * of the variable. + * The upper approximation is meaningful only if it is constant. If not, + * `upperApprox` can return an arbitrary capture set variable. + * `upperApprox` is used in `solve`. + */ + protected def upperApprox(origin: CaptureSet)(using Context): CaptureSet + + /** Assuming set this set dependds on was just solved to be constant, propagate this info + * to this set. This might result in the set being solved to be constant + * itself. + */ + protected def propagateSolved()(using Context): Unit = () + + /** This capture set with a description that tells where it comes from */ + def withDescription(description: String): CaptureSet + + /** The provided description (using `withDescription`) for this capture set or else "" */ + def description: String + + /** A regular @retains or @retainsByName annotation with the elements of this set as arguments. */ + def toRegularAnnotation(cls: Symbol)(using Context): Annotation = + Annotation(CaptureAnnotation(this, boxed = false)(cls).tree) + + override def toText(printer: Printer): Text = + Str("{") ~ Text(elems.toList.map(printer.toTextCaptureRef), ", ") ~ Str("}") ~~ description + +object CaptureSet: + type Refs = SimpleIdentitySet[CaptureRef] + type Vars = SimpleIdentitySet[Var] + type Deps = SimpleIdentitySet[CaptureSet] + + @sharable private var varId = 0 + + /** If set to `true`, capture stack traces that tell us where sets are created */ + private final val debugSets = false + + private val emptySet = SimpleIdentitySet.empty + + /** The empty capture set `{}` */ + val empty: CaptureSet.Const = Const(emptySet) + + /** The universal capture set `{*}` */ + def universal(using Context): CaptureSet = + defn.captureRoot.termRef.singletonCaptureSet + + /** Used as a recursion brake */ + @sharable private[dotc] val Pending = Const(SimpleIdentitySet.empty) + + /** The empty capture set with a description that says it's the elf type of an + * exception class. + */ + val emptyOfException: CaptureSet.Const = Const(emptySet, "of an exception class") + + def apply(elems: CaptureRef*)(using Context): CaptureSet.Const = + if elems.isEmpty then empty + else Const(SimpleIdentitySet(elems.map(_.normalizedRef)*)) + + def apply(elems: Refs)(using Context): CaptureSet.Const = + if elems.isEmpty then empty else Const(elems) + + /** The subclass of constant capture sets with given elements `elems` */ + class Const private[CaptureSet] (val elems: Refs, val description: String = "") extends CaptureSet: + def isConst = true + def isAlwaysEmpty = elems.isEmpty + + def addNewElems(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + CompareResult.fail(this) + + def addDependent(cs: CaptureSet)(using Context, VarState) = CompareResult.OK + + def upperApprox(origin: CaptureSet)(using Context): CaptureSet = this + + def withDescription(description: String): Const = Const(elems, description) + + override def toString = elems.toString + end Const + + /** The subclass of captureset variables with given initial elements */ + class Var(initialElems: Refs = emptySet) extends CaptureSet: + + /** A unique identification number for diagnostics */ + val id = + varId += 1 + varId + + /** A variable is solved if it is aproximated to a from-then-on constant set. */ + private var isSolved: Boolean = false + + /** The elements currently known to be in the set */ + var elems: Refs = initialElems + + /** The sets currently known to be dependent sets (i.e. new additions to this set + * are propagated to these dependent sets.) + */ + var deps: Deps = emptySet + + def isConst = isSolved + def isAlwaysEmpty = false + + /** A handler to be invoked if the root reference `*` is added to this set + * The handler is pure in the sense that it will only output diagnostics. + */ + var rootAddedHandler: () -> Context ?-> Unit = () => () + + var description: String = "" + + /** Record current elements in given VarState provided it does not yet + * contain an entry for this variable. + */ + private def recordElemsState()(using VarState): Boolean = + varState.getElems(this) match + case None => varState.putElems(this, elems) + case _ => true + + /** Record current dependent sets in given VarState provided it does not yet + * contain an entry for this variable. + */ + private[CaptureSet] def recordDepsState()(using VarState): Boolean = + varState.getDeps(this) match + case None => varState.putDeps(this, deps) + case _ => true + + /** Reset elements to what was recorded in `state` */ + def resetElems()(using state: VarState): Unit = + elems = state.elems(this) + + /** Reset dependent sets to what was recorded in `state` */ + def resetDeps()(using state: VarState): Unit = + deps = state.deps(this) + + def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + if !isConst && recordElemsState() then + elems ++= newElems + if isUniversal then rootAddedHandler() + // assert(id != 2 || elems.size != 2, this) + (CompareResult.OK /: deps) { (r, dep) => + r.andAlso(dep.tryInclude(newElems, this)) + } + else // fail if variable is solved or given VarState is frozen + CompareResult.fail(this) + + def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult = + if (cs eq this) || cs.isUniversal || isConst then + CompareResult.OK + else if recordDepsState() then + deps += cs + CompareResult.OK + else + CompareResult.fail(this) + + override def disallowRootCapability(handler: () -> Context ?-> Unit)(using Context): this.type = + rootAddedHandler = handler + super.disallowRootCapability(handler) + + private var computingApprox = false + + /** Roughly: the intersection of all constant known supersets of this set. + * The aim is to find an as-good-as-possible constant set that is a superset + * of this set. The universal set {*} is a sound fallback. + */ + final def upperApprox(origin: CaptureSet)(using Context): CaptureSet = + if computingApprox then universal + else if isConst then this + else + computingApprox = true + try computeApprox(origin).ensuring(_.isConst) + finally computingApprox = false + + /** The intersection of all upper approximations of dependent sets */ + protected def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + (universal /: deps) { (acc, sup) => acc ** sup.upperApprox(this) } + + /** Widen the variable's elements to its upper approximation and + * mark it as constant from now on. This is used for contra-variant type variables + * in the results of defs and vals. + */ + def solve()(using Context): Unit = + if !isConst then + val approx = upperApprox(empty) + //println(i"solving var $this $approx ${approx.isConst} deps = ${deps.toList}") + val newElems = approx.elems -- elems + if newElems.isEmpty || addNewElems(newElems, empty)(using ctx, VarState()).isOK then + markSolved() + + /** Mark set as solved and propagate this info to all dependent sets */ + def markSolved()(using Context): Unit = + isSolved = true + deps.foreach(_.propagateSolved()) + + def withDescription(description: String): this.type = + this.description = + if this.description.isEmpty then description + else s"${this.description} and $description" + this + + /** Used for diagnostics and debugging: A string that traces the creation + * history of a variable by following source links. Each variable on the + * path is characterized by the variable's id and the first letter of the + * variable's class name. The path ends in a plain variable with letter `V` that + * is not derived from some other variable. + */ + protected def ids(using Context): String = + val trail = this.match + case dv: DerivedVar => dv.source.ids + case _ => "" + s"$id${getClass.getSimpleName.nn.take(1)}$trail" + + /** Adds variables to the ShownVars context property if that exists, which + * establishes a record of all variables printed in an error message. + * Prints variables wih ids under -Ycc-debug. + */ + override def toText(printer: Printer): Text = inContext(printer.printerContext) { + for vars <- ctx.property(ShownVars) do vars += this + super.toText(printer) ~ (Str(ids) provided !isConst && ctx.settings.YccDebug.value) + } + + override def toString = s"Var$id$elems" + end Var + + /** A variable that is derived from some other variable via a map or filter. */ + abstract class DerivedVar(initialElems: Refs)(using @constructorOnly ctx: Context) + extends Var(initialElems): + + // For debugging: A trace where a set was created. Note that logically it would make more + // sense to place this variable in Mapped, but that runs afoul of the initializatuon checker. + val stack = if debugSets && this.isInstanceOf[Mapped] then (new Throwable).getStackTrace().nn.take(20) else null + + /** The variable from which this variable is derived */ + def source: Var + + addAsDependentTo(source) + + override def propagateSolved()(using Context) = + if source.isConst && !isConst then markSolved() + end DerivedVar + + /** A variable that changes when `source` changes, where all additional new elements are mapped + * using ∪ { tm(x) | x <- source.elems }. + * @param source the original set that is mapped + * @param tm the type map, which is assumed to be idempotent on capture refs + * (except if ccUnsoundMaps is enabled) + * @param variance the assumed variance with which types with capturesets of size >= 2 are approximated + * (i.e. co: full capture set, contra: empty set, nonvariant is not allowed.) + * @param initial The initial mappings of source's elements at the point the Mapped set is created. + */ + class Mapped private[CaptureSet] + (val source: Var, tm: TypeMap, variance: Int, initial: CaptureSet)(using @constructorOnly ctx: Context) + extends DerivedVar(initial.elems): + addAsDependentTo(initial) // initial mappings could change by propagation + + private def mapIsIdempotent = tm.isInstanceOf[IdempotentCaptRefMap] + + assert(ccAllowUnsoundMaps || mapIsIdempotent, tm.getClass) + + private def whereCreated(using Context): String = + if stack == null then "" + else i""" + |Stack trace of variable creation:" + |${stack.mkString("\n")}""" + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val added = + if origin eq source then // elements have to be mapped + mapRefs(newElems, tm, variance) + else + // elements are added by subcapturing propagation with this Mapped set + // as superset; no mapping is necessary or allowed. + Const(newElems) + super.addNewElems(added.elems, origin) + .andAlso { + if added.isConst then CompareResult.OK + else if added.asVar.recordDepsState() then { addAsDependentTo(added); CompareResult.OK } + else CompareResult.fail(this) + } + .andAlso { + if (origin ne source) && mapIsIdempotent then + // `tm` is idempotent, propagate back elems from image set. + // This is sound, since we know that for `r in newElems: tm(r) = r`, hence + // `r` is _one_ possible solution in `source` that would make an `r` appear in this set. + // It's not necessarily the only possible solution, so the scheme is incomplete. + source.tryInclude(newElems, this) + else if !mapIsIdempotent && variance <= 0 && !origin.isConst && (origin ne initial) && (origin ne source) then + // The map is neither a BiTypeMap nor an idempotent type map. + // In that case there's no much we can do. + // The scheme then does not propagate added elements back to source and rejects adding + // elements from variable sources in contra- and non-variant positions. In essence, + // we approximate types resulting from such maps by returning a possible super type + // from the actual type. But this is neither sound nor complete. + report.warning(i"trying to add elems ${CaptureSet(newElems)} from unrecognized source $origin of mapped set $this$whereCreated") + CompareResult.fail(this) + else + CompareResult.OK + } + + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + if source eq origin then + // it's a mapping of origin, so not a superset of `origin`, + // therefore don't contribute to the intersection. + universal + else + source.upperApprox(this).map(tm) + + override def propagateSolved()(using Context) = + if initial.isConst then super.propagateSolved() + + override def toString = s"Mapped$id($source, elems = $elems)" + end Mapped + + /** A mapping where the type map is required to be a bijection. + * Parameters as in Mapped. + */ + final class BiMapped private[CaptureSet] + (val source: Var, bimap: BiTypeMap, initialElems: Refs)(using @constructorOnly ctx: Context) + extends DerivedVar(initialElems): + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + if origin eq source then + super.addNewElems(newElems.map(bimap.forward), origin) + else + super.addNewElems(newElems, origin) + .andAlso { + source.tryInclude(newElems.map(bimap.backward), this) + .showing(i"propagating new elems ${CaptureSet(newElems)} backward from $this to $source", capt) + } + + /** For a BiTypeMap, supertypes of the mapped type also constrain + * the source via the inverse type mapping and vice versa. That is, if + * B = f(A) and B <: C, then A <: f^-1(C), so C should flow into + * the upper approximation of A. + * Conversely if A <: C2, then we also know that B <: f(C2). + * These situations are modeled by the two branches of the conditional below. + */ + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + val supApprox = super.computeApprox(this) + if source eq origin then supApprox.map(bimap.inverseTypeMap) + else source.upperApprox(this).map(bimap) ** supApprox + + override def toString = s"BiMapped$id($source, elems = $elems)" + end BiMapped + + /** A variable with elements given at any time as { x <- source.elems | p(x) } */ + class Filtered private[CaptureSet] + (val source: Var, p: CaptureRef -> Boolean)(using @constructorOnly ctx: Context) + extends DerivedVar(source.elems.filter(p)): + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val filtered = newElems.filter(p) + if origin eq source then + super.addNewElems(filtered, origin) + else + // Filtered elements have to be back-propagated to source. + // Elements that don't satisfy `p` are not allowed. + super.addNewElems(newElems, origin) + .andAlso { + if filtered.size == newElems.size then source.tryInclude(newElems, this) + else CompareResult.fail(this) + } + + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + if source eq origin then + // it's a filter of origin, so not a superset of `origin`, + // therefore don't contribute to the intersection. + universal + else + source.upperApprox(this).filter(p) + + override def toString = s"${getClass.getSimpleName}$id($source, elems = $elems)" + end Filtered + + /** A variable with elements given at any time as { x <- source.elems | !other.accountsFor(x) } */ + class Diff(source: Var, other: Const)(using Context) + extends Filtered(source, !other.accountsFor(_)) + + class Intersected(cs1: CaptureSet, cs2: CaptureSet)(using Context) + extends Var(elemIntersection(cs1, cs2)): + addAsDependentTo(cs1) + addAsDependentTo(cs2) + deps += cs1 + deps += cs2 + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val added = + if origin eq cs1 then newElems.filter(cs2.accountsFor) + else if origin eq cs2 then newElems.filter(cs1.accountsFor) + else newElems + // If origin is not cs1 or cs2, then newElems will be propagated to + // cs1, cs2 since they are in deps. + super.addNewElems(added, origin) + + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + if (origin eq cs1) || (origin eq cs2) then + // it's a combination of origin with some other set, so not a superset of `origin`, + // therefore don't contribute to the intersection. + universal + else + CaptureSet(elemIntersection(cs1.upperApprox(this), cs2.upperApprox(this))) + + override def propagateSolved()(using Context) = + if cs1.isConst && cs2.isConst && !isConst then markSolved() + end Intersected + + def elemIntersection(cs1: CaptureSet, cs2: CaptureSet)(using Context): Refs = + cs1.elems.filter(cs2.mightAccountFor) ++ cs2.elems.filter(cs1.mightAccountFor) + + /** Extrapolate tm(r) according to `variance`. Let r1 be the result of tm(r). + * - If r1 is a tracked CaptureRef, return {r1} + * - If r1 has an empty capture set, return {} + * - Otherwise, + * - if the variance is covariant, return r1's capture set + * - if the variance is contravariant, return {} + * - Otherwise assertion failure + */ + def extrapolateCaptureRef(r: CaptureRef, tm: TypeMap, variance: Int)(using Context): CaptureSet = + val r1 = tm(r) + val upper = r1.captureSet + def isExact = + upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) + if variance > 0 || isExact then upper + else if variance < 0 then CaptureSet.empty + else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting") + + /** Apply `f` to each element in `xs`, and join result sets with `++` */ + def mapRefs(xs: Refs, f: CaptureRef => CaptureSet)(using Context): CaptureSet = + ((empty: CaptureSet) /: xs)((cs, x) => cs ++ f(x)) + + /** Apply extrapolated `tm` to each element in `xs`, and join result sets with `++` */ + def mapRefs(xs: Refs, tm: TypeMap, variance: Int)(using Context): CaptureSet = + mapRefs(xs, extrapolateCaptureRef(_, tm, variance)) + + /** Return true iff + * - arg1 is a TypeBounds >: CL T <: CH T of two capturing types with equal parents. + * - arg2 is a capturing type CA U + * - CH <: CA <: CL + * In other words, we can unify CL, CH and CA. + */ + def subCapturesRange(arg1: TypeBounds, arg2: Type)(using Context): Boolean = arg1 match + case TypeBounds(CapturingType(lo, loRefs), CapturingType(hi, hiRefs)) if lo =:= hi => + given VarState = VarState() + val cs2 = arg2.captureSet + hiRefs.subCaptures(cs2).isOK && cs2.subCaptures(loRefs).isOK + case _ => + false + + /** A TypeMap with the property that every capture reference in the image + * of the map is mapped to itself. I.e. for all capture references r1, r2, + * if M(r1) == r2 then M(r2) == r2. + */ + trait IdempotentCaptRefMap extends TypeMap + + /** A TypeMap that is the identity on capture references */ + trait IdentityCaptRefMap extends TypeMap + + type CompareResult = CompareResult.TYPE + + /** The result of subcapturing comparisons is an opaque type CompareResult.TYPE. + * This is either OK, indicating success, or + * another capture set, indicating failure. The failure capture set + * is the one that did not allow propagaton of elements into it. + */ + object CompareResult: + opaque type TYPE = CaptureSet + val OK: TYPE = Const(emptySet) + def fail(cs: CaptureSet): TYPE = cs + + extension (result: TYPE) + /** The result is OK */ + def isOK: Boolean = result eq OK + /** If not isOK, the blocking capture set */ + def blocking: CaptureSet = result + inline def andAlso(op: Context ?=> TYPE)(using Context): TYPE = if result.isOK then op else result + def show(using Context): String = if result.isOK then "OK" else i"$result" + end CompareResult + + /** A VarState serves as a snapshot mechanism that can undo + * additions of elements or super sets if an operation fails + */ + class VarState: + + /** A map from captureset variables to their elements at the time of the snapshot. */ + private val elemsMap: util.EqHashMap[Var, Refs] = new util.EqHashMap + + /** A map from captureset variables to their dependent sets at the time of the snapshot. */ + private val depsMap: util.EqHashMap[Var, Deps] = new util.EqHashMap + + /** The recorded elements of `v` (it's required that a recording was made) */ + def elems(v: Var): Refs = elemsMap(v) + + /** Optionally the recorded elements of `v`, None if nothing was recorded for `v` */ + def getElems(v: Var): Option[Refs] = elemsMap.get(v) + + /** Record elements, return whether this was allowed. + * By default, recording is allowed but the special state FrozenState + * overrides this. + */ + def putElems(v: Var, elems: Refs): Boolean = { elemsMap(v) = elems; true } + + /** The recorded dependent sets of `v` (it's required that a recording was made) */ + def deps(v: Var): Deps = depsMap(v) + + /** Optionally the recorded dependent sets of `v`, None if nothing was recorded for `v` */ + def getDeps(v: Var): Option[Deps] = depsMap.get(v) + + /** Record dependent sets, return whether this was allowed. + * By default, recording is allowed but the special state FrozenState + * overrides this. + */ + def putDeps(v: Var, deps: Deps): Boolean = { depsMap(v) = deps; true } + + /** Roll back global state to what was recorded in this VarState */ + def rollBack(): Unit = + elemsMap.keysIterator.foreach(_.resetElems()(using this)) + depsMap.keysIterator.foreach(_.resetDeps()(using this)) + end VarState + + /** A special state that does not allow to record elements or dependent sets. + * In effect this means that no new elements or dependent sets can be added + * in this state (since the previous state cannot be recorded in a snapshot) + */ + @sharable + object FrozenState extends VarState: + override def putElems(v: Var, refs: Refs) = false + override def putDeps(v: Var, deps: Deps) = false + override def rollBack(): Unit = () + + @sharable + /** A special state that turns off recording of elements. Used only + * in `addSub` to prevent cycles in recordings. + */ + private object UnrecordedState extends VarState: + override def putElems(v: Var, refs: Refs) = true + override def putDeps(v: Var, deps: Deps) = true + override def rollBack(): Unit = () + + /** The current VarState, as passed by the implicit context */ + def varState(using state: VarState): VarState = state + + /* Not needed: + def ofClass(cinfo: ClassInfo, argTypes: List[Type])(using Context): CaptureSet = + CaptureSet.empty + def captureSetOf(tp: Type): CaptureSet = tp match + case tp: TypeRef if tp.symbol.is(ParamAccessor) => + def mapArg(accs: List[Symbol], tps: List[Type]): CaptureSet = accs match + case acc :: accs1 if tps.nonEmpty => + if acc == tp.symbol then tps.head.captureSet + else mapArg(accs1, tps.tail) + case _ => + empty + mapArg(cinfo.cls.paramAccessors, argTypes) + case _ => + tp.captureSet + val css = + for + parent <- cinfo.parents if parent.classSymbol == defn.RetainingClass + arg <- parent.argInfos + yield captureSetOf(arg) + css.foldLeft(empty)(_ ++ _) + */ + + /** The capture set of the type underlying a CaptureRef */ + def ofInfo(ref: CaptureRef)(using Context): CaptureSet = ref match + case ref: TermRef if ref.isRootCapability => ref.singletonCaptureSet + case _ => ofType(ref.underlying) + + /** Capture set of a type */ + def ofType(tp: Type)(using Context): CaptureSet = + def recur(tp: Type): CaptureSet = tp.dealias match + case tp: TermRef => + tp.captureSet + case tp: TermParamRef => + tp.captureSet + case _: TypeRef => + if tp.classSymbol.hasAnnotation(defn.CapabilityAnnot) then universal else empty + case _: TypeParamRef => + empty + case CapturingType(parent, refs) => + recur(parent) ++ refs + case AppliedType(tycon, args) => + val cs = recur(tycon) + tycon.typeParams match + case tparams @ (LambdaParam(tl, _) :: _) => cs.substParams(tl, args) + case _ => cs + case tp: TypeProxy => + recur(tp.underlying) + case AndType(tp1, tp2) => + recur(tp1) ** recur(tp2) + case OrType(tp1, tp2) => + recur(tp1) ++ recur(tp2) + case _ => + empty + recur(tp) + .showing(i"capture set of $tp = $result", capt) + + private val ShownVars: Property.Key[mutable.Set[Var]] = Property.Key() + + /** Perform `op`. Under -Ycc-debug, collect and print info about all variables reachable + * via `(_.deps)*` from the variables that were shown in `op`. + */ + def withCaptureSetsExplained[T](op: Context ?=> T)(using ctx: Context): T = + if ctx.settings.YccDebug.value then + val shownVars = mutable.Set[Var]() + inContext(ctx.withProperty(ShownVars, Some(shownVars))) { + try op + finally + val reachable = mutable.Set[Var]() + val todo = mutable.Queue[Var]() ++= shownVars + def incl(cv: Var): Unit = + if !reachable.contains(cv) then todo += cv + while todo.nonEmpty do + val cv = todo.dequeue() + if !reachable.contains(cv) then + reachable += cv + cv.deps.foreach { + case cv: Var => incl(cv) + case _ => + } + cv match + case cv: DerivedVar => incl(cv.source) + case _ => + val allVars = reachable.toArray.sortBy(_.id) + println(i"Capture set dependencies:") + for cv <- allVars do + println(i" ${cv.show.padTo(20, ' ')} :: ${cv.deps.toList}%, %") + } + else op +end CaptureSet diff --git a/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala b/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala new file mode 100644 index 000000000000..e9862f1f20b8 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala @@ -0,0 +1,72 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.* + +/** A (possibly boxed) capturing type. This is internally represented as an annotated type with a @retains + * or @retainsByName annotation, but the extractor will succeed only at phase CheckCaptures. + * That way, we can ignore caturing information until phase CheckCaptures since it is + * wrapped in a plain annotation. + * + * The same trick does not work for the boxing information. Boxing is context dependent, so + * we have to add that information in the Setup step preceding CheckCaptures. Boxes are + * added for all type arguments of methods. For type arguments of applied types a different + * strategy is used where we box arguments of applied types that are not functions when + * accessing the argument. + * + * An alternative strategy would add boxes also to arguments of applied types during setup. + * But this would have to be done for all possibly accessibly types from the compiled units + * as well as their dependencies. It's difficult to do this in a DenotationTransformer without + * accidentally forcing symbol infos. That's why this alternative was not implemented. + * If we would go back on this it would make sense to also treat captuyring types different + * from annotations and to generate them all during Setup and in DenotationTransformers. + */ +object CapturingType: + + /** Smart constructor that drops empty capture sets and fuses compatible capturiong types. + * An outer type capturing type A can be fused with an inner capturing type B if their + * boxing status is the same or if A is boxed. + */ + def apply(parent: Type, refs: CaptureSet, boxed: Boolean = false)(using Context): Type = + if refs.isAlwaysEmpty then parent + else parent match + case parent @ CapturingType(parent1, refs1) if boxed || !parent.isBoxed => + apply(parent1, refs ++ refs1, boxed) + case _ => + AnnotatedType(parent, CaptureAnnotation(refs, boxed)(defn.RetainsAnnot)) + + /** An extractor that succeeds only during CheckCapturingPhase. Boxing statis is + * returned separately by CaptureOps.isBoxed. + */ + def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = + if ctx.phase == Phases.checkCapturesPhase + && tp.annot.symbol == defn.RetainsAnnot + && !ctx.mode.is(Mode.IgnoreCaptures) + then + EventuallyCapturingType.unapply(tp) + else None + +end CapturingType + +/** An extractor for types that will be capturing types at phase CheckCaptures. Also + * included are types that indicate captures on enclosing call-by-name parameters + * before phase ElimByName. + */ +object EventuallyCapturingType: + + def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = + val sym = tp.annot.symbol + if sym == defn.RetainsAnnot || sym == defn.RetainsByNameAnnot then + tp.annot match + case ann: CaptureAnnotation => + Some((tp.parent, ann.refs)) + case ann => + try Some((tp.parent, ann.tree.toCaptureSet)) + catch case ex: IllegalCaptureRef => None + else None + +end EventuallyCapturingType + + diff --git a/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala new file mode 100644 index 000000000000..97f8e1eea405 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala @@ -0,0 +1,949 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Phases.*, DenotTransformers.*, SymDenotations.* +import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* +import Types.*, StdNames.*, Denotations.* +import config.Printers.{capt, recheckr} +import config.{Config, Feature} +import ast.{tpd, untpd, Trees} +import Trees.* +import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents} +import typer.Checking.{checkBounds, checkAppliedTypesIn} +import util.{SimpleIdentitySet, EqHashMap, SrcPos} +import transform.SymUtils.* +import transform.{Recheck, PreRecheck} +import Recheck.* +import scala.collection.mutable +import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} +import StdNames.nme +import NameKinds.DefaultGetterName +import reporting.trace +import language.experimental.pureFunctions + +/** The capture checker */ +object CheckCaptures: + import ast.tpd.* + + class Pre extends PreRecheck, SymTransformer: + + override def isEnabled(using Context) = true + + /** Reset `private` flags of parameter accessors so that we can refine them + * in Setup if they have non-empty capture sets. Special handling of some + * symbols defined for case classes. + */ + def transformSym(sym: SymDenotation)(using Context): SymDenotation = + if sym.isAllOf(PrivateParamAccessor) && !sym.hasAnnotation(defn.ConstructorOnlyAnnot) then + sym.copySymDenotation(initFlags = sym.flags &~ Private | Recheck.ResetPrivate) + else if Synthetics.needsTransform(sym) then + Synthetics.transformToCC(sym) + else + sym + end Pre + + /** A class describing environments. + * @param owner the current owner + * @param nestedInOwner true if the environment is a temporary one nested in the owner's environment, + * and does not have a different actual owner symbol (this happens when doing box adaptation). + * @param captured the caputure set containing all references to tracked free variables outside of boxes + * @param isBoxed true if the environment is inside a box (in which case references are not counted) + * @param outer0 the next enclosing environment + */ + case class Env( + owner: Symbol, + nestedInOwner: Boolean, + captured: CaptureSet, + isBoxed: Boolean, + outer0: Env | Null + ): + def outer = outer0.nn + + def isOutermost = outer0 == null + + /** If an environment is open it tracks free references */ + def isOpen = !captured.isAlwaysEmpty && !isBoxed + end Env + + /** Similar normal substParams, but this is an approximating type map that + * maps parameters in contravariant capture sets to the empty set. + * TODO: check what happens with non-variant. + */ + final class SubstParamsMap(from: BindingType, to: List[Type])(using Context) + extends ApproximatingTypeMap, IdempotentCaptRefMap: + def apply(tp: Type): Type = tp match + case tp: ParamRef => + if tp.binder == from then to(tp.paramNum) else tp + case tp: NamedType => + if tp.prefix `eq` NoPrefix then tp + else tp.derivedSelect(apply(tp.prefix)) + case _: ThisType => + tp + case _ => + mapOver(tp) + + /** Check that a @retains annotation only mentions references that can be tracked. + * This check is performed at Typer. + */ + def checkWellformed(ann: Tree)(using Context): Unit = + for elem <- retainedElems(ann) do + elem.tpe match + case ref: CaptureRef => + if !ref.canBeTracked then + report.error(em"$elem cannot be tracked since it is not a parameter or local value", elem.srcPos) + case tpe => + report.error(em"$elem: $tpe is not a legal element of a capture set", elem.srcPos) + + /** If `tp` is a capturing type, check that all references it mentions have non-empty + * capture sets. Also: warn about redundant capture annotations. + * This check is performed after capture sets are computed in phase cc. + */ + def checkWellformedPost(tp: Type, pos: SrcPos)(using Context): Unit = tp match + case CapturingType(parent, refs) => + for ref <- refs.elems do + if ref.captureSetOfInfo.elems.isEmpty then + report.error(em"$ref cannot be tracked since its capture set is empty", pos) + else if parent.captureSet.accountsFor(ref) then + report.warning(em"redundant capture: $parent already accounts for $ref", pos) + case _ => + + /** Warn if `ann`, which is a tree of a @retains annotation, defines some elements that + * are already accounted for by other elements of the same annotation. + * Note: We need to perform the check on the original annotation rather than its + * capture set since the conversion to a capture set already eliminates redundant elements. + */ + def warnIfRedundantCaptureSet(ann: Tree)(using Context): Unit = + // The lists `elems(i) :: prev.reverse :: elems(0),...,elems(i-1),elems(i+1),elems(n)` + // where `n == elems.length-1`, i <- 0..n`. + // I.e. + // choices(Nil, elems) = [[elems(i), elems(0), ..., elems(i-1), elems(i+1), .... elems(n)] | i <- 0..n] + def choices(prev: List[Tree], elems: List[Tree]): List[List[Tree]] = elems match + case Nil => Nil + case elem :: elems => + List(elem :: (prev reverse_::: elems)) ++ choices(elem :: prev, elems) + for case first :: others <- choices(Nil, retainedElems(ann)) do + val firstRef = first.toCaptureRef + val remaining = CaptureSet(others.map(_.toCaptureRef)*) + if remaining.accountsFor(firstRef) then + report.warning(em"redundant capture: $remaining already accounts for $firstRef", ann.srcPos) + +class CheckCaptures extends Recheck, SymTransformer: + thisPhase => + + import ast.tpd.* + import CheckCaptures.* + + def phaseName: String = "cc" + override def isEnabled(using Context) = true + + def newRechecker()(using Context) = CaptureChecker(ctx) + + override def run(using Context): Unit = + if Feature.ccEnabled then + checkOverrides.traverse(ctx.compilationUnit.tpdTree) + super.run + + override def transformSym(sym: SymDenotation)(using Context): SymDenotation = + if Synthetics.needsTransform(sym) then Synthetics.transformFromCC(sym) + else super.transformSym(sym) + + /** Check overrides again, taking capture sets into account. + * TODO: Can we avoid doing overrides checks twice? + * We need to do them here since only at this phase CaptureTypes are relevant + * But maybe we can then elide the check during the RefChecks phase under captureChecking? + */ + def checkOverrides = new TreeTraverser: + def traverse(t: Tree)(using Context) = + t match + case t: Template => checkAllOverrides(ctx.owner.asClass) + case _ => + traverseChildren(t) + + class CaptureChecker(ictx: Context) extends Rechecker(ictx): + import ast.tpd.* + + override def keepType(tree: Tree) = + super.keepType(tree) + || tree.isInstanceOf[Try] // type of `try` needs tp be checked for * escapes + + /** Instantiate capture set variables appearing contra-variantly to their + * upper approximation. + */ + private def interpolator(startingVariance: Int = 1)(using Context) = new TypeTraverser: + variance = startingVariance + override def traverse(t: Type) = + t match + case CapturingType(parent, refs: CaptureSet.Var) => + if variance < 0 then + capt.println(i"solving $t") + refs.solve() + traverse(parent) + case t @ RefinedType(_, nme.apply, rinfo) if defn.isFunctionOrPolyType(t) => + traverse(rinfo) + case tp: TypeVar => + case tp: TypeRef => + traverse(tp.prefix) + case _ => + traverseChildren(t) + + /** If `tpt` is an inferred type, interpolate capture set variables appearing contra- + * variantly in it. + */ + private def interpolateVarsIn(tpt: Tree)(using Context): Unit = + if tpt.isInstanceOf[InferredTypeTree] then + interpolator().traverse(tpt.knownType) + .showing(i"solved vars in ${tpt.knownType}", capt) + + /** Assert subcapturing `cs1 <: cs2` */ + def assertSub(cs1: CaptureSet, cs2: CaptureSet)(using Context) = + assert(cs1.subCaptures(cs2, frozen = false).isOK, i"$cs1 is not a subset of $cs2") + + /** Check subcapturing `{elem} <: cs`, report error on failure */ + def checkElem(elem: CaptureRef, cs: CaptureSet, pos: SrcPos)(using Context) = + val res = elem.singletonCaptureSet.subCaptures(cs, frozen = false) + if !res.isOK then + report.error(i"$elem cannot be referenced here; it is not included in the allowed capture set ${res.blocking}", pos) + + /** Check subcapturing `cs1 <: cs2`, report error on failure */ + def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos)(using Context) = + val res = cs1.subCaptures(cs2, frozen = false) + if !res.isOK then + def header = + if cs1.elems.size == 1 then i"reference ${cs1.elems.toList}%, % is not" + else i"references $cs1 are not all" + report.error(i"$header included in allowed capture set ${res.blocking}", pos) + + /** The current environment */ + private var curEnv: Env = Env(NoSymbol, nestedInOwner = false, CaptureSet.empty, isBoxed = false, null) + + private val myCapturedVars: util.EqHashMap[Symbol, CaptureSet] = EqHashMap() + + /** If `sym` is a class or method nested inside a term, a capture set variable representing + * the captured variables of the environment associated with `sym`. + */ + def capturedVars(sym: Symbol)(using Context) = + myCapturedVars.getOrElseUpdate(sym, + if sym.ownersIterator.exists(_.isTerm) then CaptureSet.Var() + else CaptureSet.empty) + + /** For all nested environments up to `limit` perform `op` */ + def forallOuterEnvsUpTo(limit: Symbol)(op: Env => Unit)(using Context): Unit = + def recur(env: Env): Unit = + if env.isOpen && env.owner != limit then + op(env) + if !env.isOutermost then + var nextEnv = env.outer + if env.owner.isConstructor then + if nextEnv.owner != limit && !nextEnv.isOutermost then + recur(nextEnv.outer) + else recur(nextEnv) + recur(curEnv) + + /** Include `sym` in the capture sets of all enclosing environments nested in the + * the environment in which `sym` is defined. + */ + def markFree(sym: Symbol, pos: SrcPos)(using Context): Unit = + if sym.exists then + val ref = sym.termRef + if ref.isTracked then + forallOuterEnvsUpTo(sym.enclosure) { env => + capt.println(i"Mark $sym with cs ${ref.captureSet} free in ${env.owner}") + checkElem(ref, env.captured, pos) + } + + /** Make sure (projected) `cs` is a subset of the capture sets of all enclosing + * environments. At each stage, only include references from `cs` that are outside + * the environment's owner + */ + def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = + if !cs.isAlwaysEmpty then + forallOuterEnvsUpTo(ctx.owner.topLevelClass) { env => + val included = cs.filter { + case ref: TermRef => + (env.nestedInOwner || env.owner != ref.symbol.owner) + && env.owner.isContainedIn(ref.symbol.owner) + case ref: ThisType => + (env.nestedInOwner || env.owner != ref.cls) + && env.owner.isContainedIn(ref.cls) + case _ => false + } + capt.println(i"Include call capture $included in ${env.owner}") + checkSubset(included, env.captured, pos) + } + + /** Include references captured by the called method in the current environment stack */ + def includeCallCaptures(sym: Symbol, pos: SrcPos)(using Context): Unit = + if sym.exists && curEnv.isOpen then markFree(capturedVars(sym), pos) + + override def recheckIdent(tree: Ident)(using Context): Type = + if tree.symbol.is(Method) then includeCallCaptures(tree.symbol, tree.srcPos) + else markFree(tree.symbol, tree.srcPos) + super.recheckIdent(tree) + + /** A specialized implementation of the selection rule. + * + * E |- f: Cf f { m: Cr R } + * ------------------------ + * E |- f.m: C R + * + * The implementation picks as `C` one of `{f}` or `Cr`, depending on the + * outcome of a `mightSubcapture` test. It picks `{f}` if this might subcapture Cr + * and Cr otherwise. + */ + override def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context) = { + def disambiguate(denot: Denotation): Denotation = denot match + case MultiDenotation(denot1, denot2) => + // This case can arise when we try to merge multiple types that have different + // capture sets on some part. For instance an asSeenFrom might produce + // a bi-mapped capture set arising from a substition. Applying the same substitution + // to the same type twice will nevertheless produce different capture setsw which can + // lead to a failure in disambiguation since neither alternative is better than the + // other in a frozen constraint. An example test case is disambiguate-select.scala. + // We address the problem by disambiguating while ignoring all capture sets as a fallback. + withMode(Mode.IgnoreCaptures) { + disambiguate(denot1).meet(disambiguate(denot2), qualType) + } + case _ => denot + + val selType = recheckSelection(tree, qualType, name, disambiguate) + val selCs = selType.widen.captureSet + if selCs.isAlwaysEmpty || selType.widen.isBoxedCapturing || qualType.isBoxedCapturing then + selType + else + val qualCs = qualType.captureSet + capt.println(i"intersect $qualType, ${selType.widen}, $qualCs, $selCs in $tree") + if qualCs.mightSubcapture(selCs) + && !selCs.mightSubcapture(qualCs) + && !pt.stripCapturing.isInstanceOf[SingletonType] + then + selType.widen.stripCapturing.capturing(qualCs) + .showing(i"alternate type for select $tree: $selType --> $result, $qualCs / $selCs", capt) + else + selType + }//.showing(i"recheck sel $tree, $qualType = $result") + + /** A specialized implementation of the apply rule. + * + * E |- f: Cf (Ra -> Cr Rr) + * E |- a: Ca Ra + * ------------------------ + * E |- f a: C Rr + * + * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the + * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr + * and Cr otherwise. + */ + override def recheckApply(tree: Apply, pt: Type)(using Context): Type = + val meth = tree.fun.symbol + includeCallCaptures(meth, tree.srcPos) + def mapArgUsing(f: Type => Type) = + val arg :: Nil = tree.args: @unchecked + val argType0 = f(recheckStart(arg, pt)) + val argType = super.recheckFinish(argType0, arg, pt) + super.recheckFinish(argType, tree, pt) + + if meth == defn.Caps_unsafeBox then + mapArgUsing(_.forceBoxStatus(true)) + else if meth == defn.Caps_unsafeUnbox then + mapArgUsing(_.forceBoxStatus(false)) + else if meth == defn.Caps_unsafeBoxFunArg then + mapArgUsing { + case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual, isErased) => + defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual, isErased) + } + else + super.recheckApply(tree, pt) match + case appType @ CapturingType(appType1, refs) => + tree.fun match + case Select(qual, _) + if !tree.fun.symbol.isConstructor + && !qual.tpe.isBoxedCapturing + && !tree.args.exists(_.tpe.isBoxedCapturing) + && qual.tpe.captureSet.mightSubcapture(refs) + && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) + => + val callCaptures = tree.args.foldLeft(qual.tpe.captureSet)((cs, arg) => + cs ++ arg.tpe.captureSet) + appType.derivedCapturingType(appType1, callCaptures) + .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) + case _ => appType + case appType => appType + end recheckApply + + /** Handle an application of method `sym` with type `mt` to arguments of types `argTypes`. + * This means: + * - Instantiate result type with actual arguments + * - If call is to a constructor: + * - remember types of arguments corresponding to tracked + * parameters in refinements. + * - add capture set of instantiated class to capture set of result type. + */ + override def instantiate(mt: MethodType, argTypes: List[Type], sym: Symbol)(using Context): Type = + val ownType = + if mt.isResultDependent then SubstParamsMap(mt, argTypes)(mt.resType) + else mt.resType + + if sym.isConstructor then + val cls = sym.owner.asClass + + /** First half of result pair: + * Refine the type of a constructor call `new C(t_1, ..., t_n)` + * to C{val x_1: T_1, ..., x_m: T_m} where x_1, ..., x_m are the tracked + * parameters of C and T_1, ..., T_m are the types of the corresponding arguments. + * + * Second half: union of all capture sets of arguments to tracked parameters. + */ + def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = + mt.paramNames.lazyZip(argTypes).foldLeft((core, initCs)) { (acc, refine) => + val (core, allCaptures) = acc + val (getterName, argType) = refine + val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol + if getter.termRef.isTracked && !getter.is(Private) + then (RefinedType(core, getterName, argType), allCaptures ++ argType.captureSet) + else (core, allCaptures) + } + + def augmentConstructorType(core: Type, initCs: CaptureSet): Type = core match + case core: MethodType => + // more parameters to follow; augment result type + core.derivedLambdaType(resType = augmentConstructorType(core.resType, initCs)) + case CapturingType(parent, refs) => + // can happen for curried constructors if instantiate of a previous step + // added capture set to result. + augmentConstructorType(parent, initCs ++ refs) + case _ => + val (refined, cs) = addParamArgRefinements(core, initCs) + refined.capturing(cs) + + augmentConstructorType(ownType, CaptureSet.empty) match + case augmented: MethodType => + augmented + case augmented => + // add capture sets of class and constructor to final result of constructor call + augmented.capturing(capturedVars(cls) ++ capturedVars(sym)) + .showing(i"constr type $mt with $argTypes%, % in $cls = $result", capt) + else ownType + end instantiate + + override def recheckClosure(tree: Closure, pt: Type)(using Context): Type = + val cs = capturedVars(tree.meth.symbol) + capt.println(i"typing closure $tree with cvs $cs") + super.recheckClosure(tree, pt).capturing(cs) + .showing(i"rechecked $tree / $pt = $result", capt) + + /** Additionally to normal processing, update types of closures if the expected type + * is a function with only pure parameters. In that case, make the anonymous function + * also have the same parameters as the prototype. + * TODO: Develop a clearer rationale for this. + * TODO: Can we generalize this to arbitrary parameters? + * Currently some tests fail if we do this. (e.g. neg.../stackAlloc.scala, others) + */ + override def recheckBlock(block: Block, pt: Type)(using Context): Type = + block match + case closureDef(mdef) => + pt.dealias match + case defn.FunctionOf(ptformals, _, _, _) + if ptformals.nonEmpty && ptformals.forall(_.captureSet.isAlwaysEmpty) => + // Redo setup of the anonymous function so that formal parameters don't + // get capture sets. This is important to avoid false widenings to `*` + // when taking the base type of the actual closures's dependent function + // type so that it conforms to the expected non-dependent function type. + // See withLogFile.scala for a test case. + val meth = mdef.symbol + // First, undo the previous setup which installed a completer for `meth`. + atPhase(preRecheckPhase.prev)(meth.denot.copySymDenotation()) + .installAfter(preRecheckPhase) + + // Next, update all parameter symbols to match expected formals + meth.paramSymss.head.lazyZip(ptformals).foreach { (psym, pformal) => + psym.updateInfoBetween(preRecheckPhase, thisPhase, pformal.mapExprType) + } + // Next, update types of parameter ValDefs + mdef.paramss.head.lazyZip(ptformals).foreach { (param, pformal) => + val ValDef(_, tpt, _) = param: @unchecked + tpt.rememberTypeAlways(pformal) + } + // Next, install a new completer reflecting the new parameters for the anonymous method + val mt = meth.info.asInstanceOf[MethodType] + val completer = new LazyType: + def complete(denot: SymDenotation)(using Context) = + denot.info = mt.companion(ptformals, mdef.tpt.knownType) + .showing(i"simplify info of $meth to $result", capt) + recheckDef(mdef, meth) + meth.updateInfoBetween(preRecheckPhase, thisPhase, completer) + case _ => + case _ => + super.recheckBlock(block, pt) + + override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Unit = + try + if !sym.is(Module) then // Modules are checked by checking the module class + super.recheckValDef(tree, sym) + finally + if !sym.is(Param) then + // Parameters with inferred types belong to anonymous methods. We need to wait + // for more info from the context, so we cannot interpolate. Note that we cannot + // expect to have all necessary info available at the point where the anonymous + // function is compiled since we do not propagate expected types into blocks. + interpolateVarsIn(tree.tpt) + + override def recheckDefDef(tree: DefDef, sym: Symbol)(using Context): Unit = + if !Synthetics.isExcluded(sym) then + val saved = curEnv + val localSet = capturedVars(sym) + if !localSet.isAlwaysEmpty then curEnv = Env(sym, nestedInOwner = false, localSet, isBoxed = false, curEnv) + try super.recheckDefDef(tree, sym) + finally + interpolateVarsIn(tree.tpt) + curEnv = saved + + /** Class-specific capture set relations: + * 1. The capture set of a class includes the capture sets of its parents. + * 2. The capture set of the self type of a class includes the capture set of the class. + * 3. The capture set of the self type of a class includes the capture set of every class parameter, + * unless the parameter is marked @constructorOnly. + */ + override def recheckClassDef(tree: TypeDef, impl: Template, cls: ClassSymbol)(using Context): Type = + val saved = curEnv + val localSet = capturedVars(cls) + for parent <- impl.parents do // (1) + checkSubset(capturedVars(parent.tpe.classSymbol), localSet, parent.srcPos) + if !localSet.isAlwaysEmpty then curEnv = Env(cls, nestedInOwner = false, localSet, isBoxed = false, curEnv) + try + val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") + checkSubset(localSet, thisSet, tree.srcPos) // (2) + for param <- cls.paramGetters do + if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then + checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + if cls.derivesFrom(defn.ThrowableClass) then + checkSubset(thisSet, CaptureSet.emptyOfException, tree.srcPos) + super.recheckClassDef(tree, impl, cls) + finally + curEnv = saved + + /** If type is of the form `T @requiresCapability(x)`, + * mark `x` as free in the current environment. This is used to require the + * correct `CanThrow` capability when encountering a `throw`. + */ + override def recheckTyped(tree: Typed)(using Context): Type = + tree.tpt.tpe match + case AnnotatedType(_, annot) if annot.symbol == defn.RequiresCapabilityAnnot => + annot.tree match + case Apply(_, cap :: Nil) => + markFree(cap.symbol, tree.srcPos) + case _ => + case _ => + super.recheckTyped(tree) + + /* Currently not needed, since capture checking takes place after ElimByName. + * Keep around in case we need to get back to it + def recheckByNameArg(tree: Tree, pt: Type)(using Context): Type = + val closureDef(mdef) = tree: @unchecked + val arg = mdef.rhs + val localSet = CaptureSet.Var() + curEnv = Env(mdef.symbol, localSet, isBoxed = false, curEnv) + val result = + try + inContext(ctx.withOwner(mdef.symbol)) { + recheckStart(arg, pt).capturing(localSet) + } + finally curEnv = curEnv.outer + recheckFinish(result, arg, pt) + */ + + /** If expected type `pt` is boxed and the tree is a function or a reference, + * don't propagate free variables. + * Otherwise, if the result type is boxed, simulate an unboxing by + * adding all references in the boxed capture set to the current environment. + */ + override def recheck(tree: Tree, pt: Type = WildcardType)(using Context): Type = + if tree.isTerm && pt.isBoxedCapturing then + val saved = curEnv + + tree match + case _: RefTree | closureDef(_) => + curEnv = Env(curEnv.owner, nestedInOwner = false, CaptureSet.Var(), isBoxed = true, curEnv) + case _ => + + try super.recheck(tree, pt) + finally curEnv = saved + else + val res = super.recheck(tree, pt) + if tree.isTerm then markFree(res.boxedCaptureSet, tree.srcPos) + res + + /** If `tree` is a reference or an application where the result type refers + * to an enclosing class or method parameter of the reference, check that the result type + * does not capture the universal capability. This is justified since the + * result type would have to be implicitly unboxed. + * TODO: Can we find a cleaner way to achieve this? Logically, this should be part + * of simulated boxing and unboxing. + */ + override def recheckFinish(tpe: Type, tree: Tree, pt: Type)(using Context): Type = + val typeToCheck = tree match + case _: Ident | _: Select | _: Apply | _: TypeApply if tree.symbol.unboxesResult => + tpe + case _: Try => + tpe + case _ => + NoType + def checkNotUniversal(tp: Type): Unit = tp.widenDealias match + case wtp @ CapturingType(parent, refs) => + refs.disallowRootCapability { () => + val kind = if tree.isInstanceOf[ValDef] then "mutable variable" else "expression" + report.error( + em"""The $kind's type $wtp is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime.""", + tree.srcPos) + } + checkNotUniversal(parent) + case _ => + checkNotUniversal(typeToCheck) + super.recheckFinish(tpe, tree, pt) + + /** Massage `actual` and `expected` types using the methods below before checking conformance */ + override def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = + val expected1 = addOuterRefs(expected, actual) + val actual1 = adaptBoxed(actual, expected1, tree.srcPos) + //println(i"check conforms $actual1 <<< $expected1") + super.checkConformsExpr(actual1, expected1, tree) + + /** For the expected type, implement the rule outlined in #14390: + * - when checking an expression `a: Ca Ta` against an expected type `Ce Te`, + * - where the capture set `Ce` contains Cls.this, + * - and where and all method definitions enclosing `a` inside class `Cls` + * have only pure parameters, + * - add to `Ce` all references to variables or this-references in `Ca` + * that are outside `Cls`. These are all accessed through `Cls.this`, + * so we can assume they are already accounted for by `Ce` and adding + * them explicitly to `Ce` changes nothing. + */ + private def addOuterRefs(expected: Type, actual: Type)(using Context): Type = + def isPure(info: Type): Boolean = info match + case info: PolyType => isPure(info.resType) + case info: MethodType => info.paramInfos.forall(_.captureSet.isAlwaysEmpty) && isPure(info.resType) + case _ => true + def isPureContext(owner: Symbol, limit: Symbol): Boolean = + if owner == limit then true + else if !owner.exists then false + else isPure(owner.info) && isPureContext(owner.owner, limit) + def augment(erefs: CaptureSet, arefs: CaptureSet): CaptureSet = + (erefs /: erefs.elems) { (erefs, eref) => + eref match + case eref: ThisType if isPureContext(ctx.owner, eref.cls) => + erefs ++ arefs.filter { + case aref: TermRef => eref.cls.isProperlyContainedIn(aref.symbol.owner) + case aref: ThisType => eref.cls.isProperlyContainedIn(aref.cls) + case _ => false + } + case _ => + erefs + } + expected match + case CapturingType(ecore, erefs) => + val erefs1 = augment(erefs, actual.captureSet) + if erefs1 ne erefs then + capt.println(i"augmented $expected from ${actual.captureSet} --> $erefs1") + expected.derivedCapturingType(ecore, erefs1) + case _ => + expected + + /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions */ + def adaptBoxed(actual: Type, expected: Type, pos: SrcPos)(using Context): Type = + + /** Adapt function type `actual`, which is `aargs -> ares` (possibly with dependencies) + * to `expected` type. + * It returns the adapted type along with the additionally captured variable + * during adaptation. + * @param reconstruct how to rebuild the adapted function type + */ + def adaptFun(actual: Type, aargs: List[Type], ares: Type, expected: Type, + covariant: Boolean, boxed: Boolean, + reconstruct: (List[Type], Type) => Type): (Type, CaptureSet) = + val saved = curEnv + curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) + + try + val (eargs, eres) = expected.dealias.stripCapturing match + case defn.FunctionOf(eargs, eres, _, _) => (eargs, eres) + case expected: MethodType => (expected.paramInfos, expected.resType) + case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(expected) => (rinfo.paramInfos, rinfo.resType) + case _ => (aargs.map(_ => WildcardType), WildcardType) + val aargs1 = aargs.zipWithConserve(eargs) { (aarg, earg) => adapt(aarg, earg, !covariant) } + val ares1 = adapt(ares, eres, covariant) + + val resTp = + if (ares1 eq ares) && (aargs1 eq aargs) then actual + else reconstruct(aargs1, ares1) + + (resTp, curEnv.captured) + finally + curEnv = saved + + /** Adapt type function type `actual` to the expected type. + * @see [[adaptFun]] + */ + def adaptTypeFun( + actual: Type, ares: Type, expected: Type, + covariant: Boolean, boxed: Boolean, + reconstruct: Type => Type): (Type, CaptureSet) = + val saved = curEnv + curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) + + try + val eres = expected.dealias.stripCapturing match + case RefinedType(_, _, rinfo: PolyType) => rinfo.resType + case expected: PolyType => expected.resType + case _ => WildcardType + + val ares1 = adapt(ares, eres, covariant) + + val resTp = + if ares1 eq ares then actual + else reconstruct(ares1) + + (resTp, curEnv.captured) + finally + curEnv = saved + end adaptTypeFun + + def adaptInfo(actual: Type, expected: Type, covariant: Boolean): String = + val arrow = if covariant then "~~>" else "<~~" + i"adapting $actual $arrow $expected" + + /** Destruct a capturing type `tp` to a tuple (cs, tp0, boxed), + * where `tp0` is not a capturing type. + * + * If `tp` is a nested capturing type, the return tuple always represents + * the innermost capturing type. The outer capture annotations can be + * reconstructed with the returned function. + */ + def destructCapturingType(tp: Type, reconstruct: Type -> Type = (x: Type) => x) // !cc! need monomorphic default argument + : (Type, CaptureSet, Boolean, Type -> Type) = + tp.dealias match + case tp @ CapturingType(parent, cs) => + if parent.dealias.isCapturingType then + destructCapturingType(parent, res => reconstruct(tp.derivedCapturingType(res, cs))) + else + (parent, cs, tp.isBoxed, reconstruct) + case actual => + (actual, CaptureSet(), false, reconstruct) + + def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { + if expected.isInstanceOf[WildcardType] then actual + else + val (parent, cs, actualIsBoxed, recon: (Type -> Type)) = destructCapturingType(actual) + + val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing + val insertBox = needsAdaptation && covariant != actualIsBoxed + + val (parent1, cs1) = parent match { + case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => + val (parent1, leaked) = adaptFun(parent, args.init, args.last, expected, covariant, insertBox, + (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) + (parent1, leaked ++ cs) + case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(actual) => + // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) + val (parent1, leaked) = adaptFun(parent, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, + (aargs1, ares1) => + rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) + .toFunctionType(isJava = false, alwaysDependent = true)) + (parent1, leaked ++ cs) + case actual: MethodType => + val (parent1, leaked) = adaptFun(parent, actual.paramInfos, actual.resType, expected, covariant, insertBox, + (aargs1, ares1) => + actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) + (parent1, leaked ++ cs) + case actual @ RefinedType(p, nme, rinfo: PolyType) if defn.isFunctionOrPolyType(actual) => + val (parent1, leaked) = adaptTypeFun(parent, rinfo.resType, expected, covariant, insertBox, + ares1 => + val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) + val actual1 = actual.derivedRefinedType(p, nme, rinfo1) + actual1 + ) + (parent1, leaked ++ cs) + case _ => + (parent, cs) + } + + if needsAdaptation then + val criticalSet = // the set which is not allowed to have `*` + if covariant then cs1 // can't box with `*` + else expected.captureSet // can't unbox with `*` + if criticalSet.isUniversal && expected.isValueType then + // We can't box/unbox the universal capability. Leave `actual` as it is + // so we get an error in checkConforms. This tends to give better error + // messages than disallowing the root capability in `criticalSet`. + if ctx.settings.YccDebug.value then + println(i"cannot box/unbox $actual vs $expected") + actual + else + // Disallow future addition of `*` to `criticalSet`. + criticalSet.disallowRootCapability { () => + report.error( + em"""$actual cannot be box-converted to $expected + |since one of their capture sets contains the root capability `*`""", + pos) + } + if !insertBox then // unboxing + markFree(criticalSet, pos) + recon(CapturingType(parent1, cs1, !actualIsBoxed)) + else + recon(CapturingType(parent1, cs1, actualIsBoxed)) + } + + var actualw = actual.widenDealias + actual match + case ref: CaptureRef if ref.isTracked => + actualw match + case CapturingType(p, refs) => + actualw = actualw.derivedCapturingType(p, ref.singletonCaptureSet) + // given `a: C T`, improve `C T` to `{a} T` + case _ => + case _ => + val adapted = adapt(actualw, expected, covariant = true) + if adapted ne actualw then + capt.println(i"adapt boxed $actual vs $expected ===> $adapted") + adapted + else actual + end adaptBoxed + + override def checkUnit(unit: CompilationUnit)(using Context): Unit = + Setup(preRecheckPhase, thisPhase, recheckDef) + .traverse(ctx.compilationUnit.tpdTree) + //println(i"SETUP:\n${Recheck.addRecheckedTypes.transform(ctx.compilationUnit.tpdTree)}") + withCaptureSetsExplained { + super.checkUnit(unit) + checkSelfTypes(unit.tpdTree) + postCheck(unit.tpdTree) + if ctx.settings.YccDebug.value then + show(unit.tpdTree) // this does not print tree, but makes its variables visible for dependency printing + } + + /** Check that self types of subclasses conform to self types of super classes. + * (See comment below how this is achieved). The check assumes that classes + * without an explicit self type have the universal capture set `{*}` on the + * self type. If a class without explicit self type is not `effectivelyFinal` + * it is checked that the inferred self type is universal, in order to assure + * that joint and separate compilation give the same result. + */ + def checkSelfTypes(unit: tpd.Tree)(using Context): Unit = + val parentTrees = mutable.HashMap[Symbol, List[Tree]]() + unit.foreachSubTree { + case cdef @ TypeDef(_, impl: Template) => parentTrees(cdef.symbol) = impl.parents + case _ => + } + // Perform self type checking. The problem here is that `checkParents` compares a + // self type of a subclass with the result of an asSeenFrom of the self type of the + // superclass. That's no good. We need to constrain the original superclass self type + // capture set, not the set mapped by asSeenFrom. + // + // Instead, we proceed from parent classes to child classes. For every class + // we first check its parents, and then interpolate the self type to an + // upper approximation that satisfies all constraints on its capture set. + // That means all capture sets of parent self types are constants, so mapping + // them with asSeenFrom is OK. + while parentTrees.nonEmpty do + val roots = parentTrees.keysIterator.filter { + cls => !parentTrees(cls).exists(ptree => parentTrees.contains(ptree.tpe.classSymbol)) + } + assert(roots.nonEmpty) + for case root: ClassSymbol <- roots do + checkSelfAgainstParents(root, root.baseClasses) + val selfType = root.asClass.classInfo.selfType + interpolator(startingVariance = -1).traverse(selfType) + if !root.isEffectivelySealed then + def matchesExplicitRefsInBaseClass(refs: CaptureSet, cls: ClassSymbol): Boolean = + cls.baseClasses.tail.exists { psym => + val selfType = psym.asClass.givenSelfType + selfType.exists && selfType.captureSet.elems == refs.elems + } + selfType match + case CapturingType(_, refs: CaptureSet.Var) + if !refs.isUniversal && !matchesExplicitRefsInBaseClass(refs, root) => + // Forbid inferred self types unless they are already implied by an explicit + // self type in a parent. + report.error( + i"""$root needs an explicitly declared self type since its + |inferred self type $selfType + |is not visible in other compilation units that define subclasses.""", + root.srcPos) + case _ => + parentTrees -= root + capt.println(i"checked $root with $selfType") + end checkSelfTypes + + /** Perform the following kinds of checks + * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. + * - Check that externally visible `val`s or `def`s have empty capture sets. If not, + * suggest an explicit type. This is so that separate compilation (where external + * symbols have empty capture sets) gives the same results as joint compilation. + */ + def postCheck(unit: tpd.Tree)(using Context): Unit = + unit.foreachSubTree { + case _: InferredTypeTree => + case tree: TypeTree if !tree.span.isZeroExtent => + tree.knownType.foreachPart { tp => + checkWellformedPost(tp, tree.srcPos) + tp match + case AnnotatedType(_, annot) if annot.symbol == defn.RetainsAnnot => + warnIfRedundantCaptureSet(annot.tree) + case _ => + } + case t: ValOrDefDef + if t.tpt.isInstanceOf[InferredTypeTree] && !Synthetics.isExcluded(t.symbol) => + val sym = t.symbol + val isLocal = + sym.owner.ownersIterator.exists(_.isTerm) + || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) + def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly + sym.is(Private) // private symbols can always have inferred types + || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be + // too annoying. This is a hole since a defualt getter's result type + // might leak into a type variable. + || // non-local symbols cannot have inferred types since external capture types are not inferred + isLocal // local symbols still need explicit types if + && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference + def isNotPureThis(ref: CaptureRef) = ref match { + case ref: ThisType => !ref.cls.isPureClass + case _ => true + } + if !canUseInferred then + val inferred = t.tpt.knownType + def checkPure(tp: Type) = tp match + case CapturingType(_, refs) + if !refs.elems.filter(isNotPureThis).isEmpty => + val resultStr = if t.isInstanceOf[DefDef] then " result" else "" + report.error( + em"""Non-local $sym cannot have an inferred$resultStr type + |$inferred + |with non-empty capture set $refs. + |The type needs to be declared explicitly.""", t.srcPos) + case _ => + inferred.foreachPart(checkPure, StopAt.Static) + case t @ TypeApply(fun, args) => + fun.knownType.widen match + case tl: PolyType => + val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => + arg.withType(arg.knownType.forceBoxStatus( + bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) + } + checkBounds(normArgs, tl) + case _ => + case _ => + } + if !ctx.reporter.errorsReported then + // We dont report errors hre if previous errors were reported, because other + // errors often result in bad applied types, but flagging these bad types gives + // often worse error messages than the original errors. + val checkApplied = new TreeTraverser: + def traverse(t: Tree)(using Context) = t match + case tree: InferredTypeTree => + case tree: New => + case tree: TypeTree => checkAppliedTypesIn(tree.withKnownType) + case _ => traverseChildren(t) + checkApplied.traverse(unit) + end CaptureChecker +end CheckCaptures diff --git a/tests/pos-with-compiler-cc/dotc/cc/Setup.scala b/tests/pos-with-compiler-cc/dotc/cc/Setup.scala new file mode 100644 index 000000000000..95f2e71437a8 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/Setup.scala @@ -0,0 +1,482 @@ +package dotty.tools +package dotc +package cc + +import core._ +import Phases.*, DenotTransformers.*, SymDenotations.* +import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* +import Types.*, StdNames.* +import config.Printers.capt +import ast.tpd +import transform.Recheck.* +import CaptureSet.IdentityCaptRefMap +import Synthetics.isExcluded + +/** A tree traverser that prepares a compilation unit to be capture checked. + * It does the following: + * - For every inferred type, drop any retains annotations, + * add capture sets to all its parts, add refinements to class types and function types. + * (c.f. mapInferred) + * - For explicit capturing types, expand throws aliases to the underlying (pure) function, + * and add some implied capture sets to curried functions (c.f. expandThrowsAlias, expandAbbreviations). + * - Add capture sets to self types of classes and objects, unless the self type was written explicitly. + * - Box the types of mutable variables and type arguments to methods (type arguments of types + * are boxed on access). + * - Link the external types of val and def symbols with the inferred types based on their parameter symbols. + */ +class Setup( + preRecheckPhase: DenotTransformer, + thisPhase: DenotTransformer, + recheckDef: (tpd.ValOrDefDef, Symbol) => Context ?=> Unit) +extends tpd.TreeTraverser: + import tpd.* + + /** Create dependent function with underlying function class `tycon` and given + * arguments `argTypes` and result `resType`. + */ + private def depFun(tycon: Type, argTypes: List[Type], resType: Type)(using Context): Type = + MethodType.companion( + isContextual = defn.isContextFunctionClass(tycon.classSymbol), + isErased = defn.isErasedFunctionClass(tycon.classSymbol) + )(argTypes, resType) + .toFunctionType(isJava = false, alwaysDependent = true) + + /** If `tp` is an unboxed capturing type or a function returning an unboxed capturing type, + * convert it to be boxed. + */ + private def box(tp: Type)(using Context): Type = + def recur(tp: Type): Type = tp.dealias match + case tp @ CapturingType(parent, refs) if !tp.isBoxed => + tp.boxed + case tp1 @ AppliedType(tycon, args) if defn.isNonRefinedFunction(tp1) => + val res = args.last + val boxedRes = recur(res) + if boxedRes eq res then tp + else tp1.derivedAppliedType(tycon, args.init :+ boxedRes) + case tp1 @ RefinedType(_, _, rinfo) if defn.isFunctionType(tp1) => + val boxedRinfo = recur(rinfo) + if boxedRinfo eq rinfo then tp + else boxedRinfo.toFunctionType(isJava = false, alwaysDependent = true) + case tp1: MethodOrPoly => + val res = tp1.resType + val boxedRes = recur(res) + if boxedRes eq res then tp + else tp1.derivedLambdaType(resType = boxedRes) + case _ => tp + tp match + case tp: MethodOrPoly => tp // don't box results of methods outside refinements + case _ => recur(tp) + + /** Perform the following transformation steps everywhere in a type: + * 1. Drop retains annotations + * 2. Turn plain function types into dependent function types, so that + * we can refer to their parameters in capture sets. Currently this is + * only done at the toplevel, i.e. for function types that are not + * themselves argument types of other function types. Without this restriction + * pos.../lists.scala and pos/...curried-shorthands.scala fail. + * Need to figure out why. + * 3. Refine other class types C by adding capture set variables to their parameter getters + * (see addCaptureRefinements) + * 4. Add capture set variables to all types that can be tracked + * + * Polytype bounds are only cleaned using step 1, but not otherwise transformed. + */ + private def mapInferred(using Context) = new TypeMap: + + /** Drop @retains annotations everywhere */ + object cleanup extends TypeMap: + def apply(t: Type) = t match + case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => + apply(parent) + case _ => + mapOver(t) + + /** Refine a possibly applied class type C where the class has tracked parameters + * x_1: T_1, ..., x_n: T_n to C { val x_1: CV_1 T_1, ..., val x_n: CV_n T_n } + * where CV_1, ..., CV_n are fresh capture sets. + */ + def addCaptureRefinements(tp: Type): Type = tp match + case _: TypeRef | _: AppliedType if tp.typeParams.isEmpty => + tp.typeSymbol match + case cls: ClassSymbol + if !defn.isFunctionClass(cls) && !cls.is(JavaDefined) => + // We assume that Java classes can refer to capturing Scala types only indirectly, + // using type parameters. Hence, no need to refine them. + cls.paramGetters.foldLeft(tp) { (core, getter) => + if getter.termRef.isTracked then + val getterType = tp.memberInfo(getter).strippedDealias + RefinedType(core, getter.name, CapturingType(getterType, CaptureSet.Var())) + .showing(i"add capture refinement $tp --> $result", capt) + else + core + } + case _ => tp + case _ => tp + + private def superTypeIsImpure(tp: Type): Boolean = { + tp.dealias match + case CapturingType(_, refs) => + !refs.isAlwaysEmpty + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then + sym == defn.AnyClass + // we assume Any is a shorthand of {*} Any, so if Any is an upper + // bound, the type is taken to be impure. + else superTypeIsImpure(tp.superType) + case tp: (RefinedOrRecType | MatchType) => + superTypeIsImpure(tp.underlying) + case tp: AndType => + superTypeIsImpure(tp.tp1) || needsVariable(tp.tp2) + case tp: OrType => + superTypeIsImpure(tp.tp1) && superTypeIsImpure(tp.tp2) + case _ => + false + }.showing(i"super type is impure $tp = $result", capt) + + /** Should a capture set variable be added on type `tp`? */ + def needsVariable(tp: Type): Boolean = { + tp.typeParams.isEmpty && tp.match + case tp: (TypeRef | AppliedType) => + val tp1 = tp.dealias + if tp1 ne tp then needsVariable(tp1) + else + val sym = tp1.typeSymbol + if sym.isClass then + !sym.isPureClass && sym != defn.AnyClass + else superTypeIsImpure(tp1) + case tp: (RefinedOrRecType | MatchType) => + needsVariable(tp.underlying) + case tp: AndType => + needsVariable(tp.tp1) && needsVariable(tp.tp2) + case tp: OrType => + needsVariable(tp.tp1) || needsVariable(tp.tp2) + case CapturingType(parent, refs) => + needsVariable(parent) + && refs.isConst // if refs is a variable, no need to add another + && !refs.isUniversal // if refs is {*}, an added variable would not change anything + case _ => + false + }.showing(i"can have inferred capture $tp = $result", capt) + + /** Add a capture set variable to `tp` if necessary, or maybe pull out + * an embedded capture set variable from a part of `tp`. + */ + def addVar(tp: Type) = tp match + case tp @ RefinedType(parent @ CapturingType(parent1, refs), rname, rinfo) => + CapturingType(tp.derivedRefinedType(parent1, rname, rinfo), refs, parent.isBoxed) + case tp: RecType => + tp.parent match + case parent @ CapturingType(parent1, refs) => + CapturingType(tp.derivedRecType(parent1), refs, parent.isBoxed) + case _ => + tp // can return `tp` here since unlike RefinedTypes, RecTypes are never created + // by `mapInferred`. Hence if the underlying type admits capture variables + // a variable was already added, and the first case above would apply. + case AndType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => + assert(refs1.asVar.elems.isEmpty) + assert(refs2.asVar.elems.isEmpty) + assert(tp1.isBoxed == tp2.isBoxed) + CapturingType(AndType(parent1, parent2), refs1 ** refs2, tp1.isBoxed) + case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => + assert(refs1.asVar.elems.isEmpty) + assert(refs2.asVar.elems.isEmpty) + assert(tp1.isBoxed == tp2.isBoxed) + CapturingType(OrType(parent1, parent2, tp.isSoft), refs1 ++ refs2, tp1.isBoxed) + case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2) => + CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) + case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => + CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) + case _ if needsVariable(tp) => + val cs = tp.dealias match + case CapturingType(_, refs) => CaptureSet.Var(refs.elems) + case _ => CaptureSet.Var() + CapturingType(tp, cs) + case _ => + tp + + private var isTopLevel = true + + private def mapNested(ts: List[Type]): List[Type] = + val saved = isTopLevel + isTopLevel = false + try ts.mapConserve(this) finally isTopLevel = saved + + def apply(t: Type) = + val tp = expandThrowsAlias(t) + val tp1 = tp match + case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => + // Drop explicit retains annotations + apply(parent) + case tp @ AppliedType(tycon, args) => + val tycon1 = this(tycon) + if defn.isNonRefinedFunction(tp) then + // Convert toplevel generic function types to dependent functions + val args0 = args.init + var res0 = args.last + val args1 = mapNested(args0) + val res1 = this(res0) + if isTopLevel then + depFun(tycon1, args1, res1) + .showing(i"add function refinement $tp --> $result", capt) + else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then + tp + else + tp.derivedAppliedType(tycon1, args1 :+ res1) + else + tp.derivedAppliedType(tycon1, args.mapConserve(arg => this(arg))) + case tp @ RefinedType(core, rname, rinfo) if defn.isFunctionType(tp) => + val rinfo1 = apply(rinfo) + if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) + else tp + case tp: MethodType => + tp.derivedLambdaType( + paramInfos = mapNested(tp.paramInfos), + resType = this(tp.resType)) + case tp: TypeLambda => + // Don't recurse into parameter bounds, just cleanup any stray retains annotations + tp.derivedLambdaType( + paramInfos = tp.paramInfos.mapConserve(cleanup(_).bounds), + resType = this(tp.resType)) + case _ => + mapOver(tp) + addVar(addCaptureRefinements(tp1)) + end apply + end mapInferred + + private def transformInferredType(tp: Type, boxed: Boolean)(using Context): Type = + val tp1 = mapInferred(tp) + if boxed then box(tp1) else tp1 + + /** Expand some aliases of function types to the underlying functions. + * Right now, these are only $throws aliases, but this could be generalized. + */ + private def expandThrowsAlias(tp: Type)(using Context) = tp match + case AppliedType(tycon, res :: exc :: Nil) if tycon.typeSymbol == defn.throwsAlias => + // hard-coded expansion since $throws aliases in stdlib are defined with `?=>` rather than `?->` + defn.FunctionOf(defn.CanThrowClass.typeRef.appliedTo(exc) :: Nil, res, isContextual = true, isErased = true) + case _ => tp + + private def expandThrowsAliases(using Context) = new TypeMap: + def apply(t: Type) = t match + case _: AppliedType => + val t1 = expandThrowsAlias(t) + if t1 ne t then apply(t1) else mapOver(t) + case _: LazyRef => + t + case t @ AnnotatedType(t1, ann) => + // Don't map capture sets, since that would implicitly normalize sets that + // are not well-formed. + t.derivedAnnotatedType(apply(t1), ann) + case _ => + mapOver(t) + + /** Fill in capture sets of curried function types from left to right, using + * a combination of the following two rules: + * + * 1. Expand `{c} (x: A) -> (y: B) -> C` + * to `{c} (x: A) -> {c} (y: B) -> C` + * 2. Expand `(x: A) -> (y: B) -> C` where `x` is tracked + * to `(x: A) -> {x} (y: B) -> C` + * + * TODO: Should we also propagate capture sets to the left? + */ + private def expandAbbreviations(using Context) = new TypeMap: + + /** Propagate `outerCs` as well as all tracked parameters as capture set to the result type + * of the dependent function type `tp`. + */ + def propagateDepFunctionResult(tp: Type, outerCs: CaptureSet): Type = tp match + case RefinedType(parent, nme.apply, rinfo: MethodType) => + val localCs = CaptureSet(rinfo.paramRefs.filter(_.isTracked)*) + val rinfo1 = rinfo.derivedLambdaType( + resType = propagateEnclosing(rinfo.resType, CaptureSet.empty, outerCs ++ localCs)) + if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) + else tp + + /** If `tp` is a function type: + * - add `outerCs` as its capture set, + * - propagate `currentCs`, `outerCs`, and all tracked parameters of `tp` to the right. + */ + def propagateEnclosing(tp: Type, currentCs: CaptureSet, outerCs: CaptureSet): Type = tp match + case tp @ AppliedType(tycon, args) if defn.isFunctionClass(tycon.typeSymbol) => + val tycon1 = this(tycon) + val args1 = args.init.mapConserve(this) + val tp1 = + if args1.exists(!_.captureSet.isAlwaysEmpty) then + val propagated = propagateDepFunctionResult( + depFun(tycon, args1, args.last), currentCs ++ outerCs) + propagated match + case RefinedType(_, _, mt: MethodType) => + if mt.isCaptureDependent then propagated + else + // No need to introduce dependent type, switch back to generic function type + tp.derivedAppliedType(tycon1, args1 :+ mt.resType) + else + val resType1 = propagateEnclosing( + args.last, CaptureSet.empty, currentCs ++ outerCs) + tp.derivedAppliedType(tycon1, args1 :+ resType1) + tp1.capturing(outerCs) + case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionType(tp) => + propagateDepFunctionResult(mapOver(tp), currentCs ++ outerCs) + .capturing(outerCs) + case _ => + mapOver(tp) + + def apply(tp: Type): Type = tp match + case CapturingType(parent, cs) => + tp.derivedCapturingType(propagateEnclosing(parent, cs, CaptureSet.empty), cs) + case _ => + propagateEnclosing(tp, CaptureSet.empty, CaptureSet.empty) + end expandAbbreviations + + private def transformExplicitType(tp: Type, boxed: Boolean)(using Context): Type = + val tp1 = expandThrowsAliases(if boxed then box(tp) else tp) + if tp1 ne tp then capt.println(i"expanded: $tp --> $tp1") + if ctx.settings.YccNoAbbrev.value then tp1 + else expandAbbreviations(tp1) + + /** Transform type of type tree, and remember the transformed type as the type the tree */ + private def transformTT(tree: TypeTree, boxed: Boolean, exact: Boolean)(using Context): Unit = + if !tree.hasRememberedType then + tree.rememberType( + if tree.isInstanceOf[InferredTypeTree] && !exact + then transformInferredType(tree.tpe, boxed) + else transformExplicitType(tree.tpe, boxed)) + + /** Substitute parameter symbols in `from` to paramRefs in corresponding + * method or poly types `to`. We use a single BiTypeMap to do everything. + * @param from a list of lists of type or term parameter symbols of a curried method + * @param to a list of method or poly types corresponding one-to-one to the parameter lists + */ + private class SubstParams(from: List[List[Symbol]], to: List[LambdaType])(using Context) + extends DeepTypeMap, BiTypeMap: + + def apply(t: Type): Type = t match + case t: NamedType => + val sym = t.symbol + def outer(froms: List[List[Symbol]], tos: List[LambdaType]): Type = + def inner(from: List[Symbol], to: List[ParamRef]): Type = + if from.isEmpty then outer(froms.tail, tos.tail) + else if sym eq from.head then to.head + else inner(from.tail, to.tail) + if tos.isEmpty then t + else inner(froms.head, tos.head.paramRefs) + outer(from, to) + case _ => + mapOver(t) + + def inverse(t: Type): Type = t match + case t: ParamRef => + def recur(from: List[LambdaType], to: List[List[Symbol]]): Type = + if from.isEmpty then t + else if t.binder eq from.head then to.head(t.paramNum).namedType + else recur(from.tail, to.tail) + recur(to, from) + case _ => + mapOver(t) + end SubstParams + + /** Update info of `sym` for CheckCaptures phase only */ + private def updateInfo(sym: Symbol, info: Type)(using Context) = + sym.updateInfoBetween(preRecheckPhase, thisPhase, info) + + def traverse(tree: Tree)(using Context): Unit = + tree match + case tree: DefDef => + if isExcluded(tree.symbol) then + return + tree.tpt match + case tpt: TypeTree if tree.symbol.allOverriddenSymbols.hasNext => + tree.paramss.foreach(traverse) + transformTT(tpt, boxed = false, exact = true) + traverse(tree.rhs) + //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") + case _ => + traverseChildren(tree) + case tree @ ValDef(_, tpt: TypeTree, _) => + transformTT(tpt, + boxed = tree.symbol.is(Mutable), // types of mutable variables are boxed + exact = tree.symbol.allOverriddenSymbols.hasNext // types of symbols that override a parent don't get a capture set + ) + traverse(tree.rhs) + case tree @ TypeApply(fn, args) => + traverse(fn) + for case arg: TypeTree <- args do + transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed + case _ => + traverseChildren(tree) + tree match + case tree: TypeTree => + transformTT(tree, boxed = false, exact = false) // other types are not boxed + case tree: ValOrDefDef => + val sym = tree.symbol + + // replace an existing symbol info with inferred types where capture sets of + // TypeParamRefs and TermParamRefs put in correspondence by BiTypeMaps with the + // capture sets of the types of the method's parameter symbols and result type. + def integrateRT( + info: Type, // symbol info to replace + psymss: List[List[Symbol]], // the local (type and term) parameter symbols corresponding to `info` + prevPsymss: List[List[Symbol]], // the local parameter symbols seen previously in reverse order + prevLambdas: List[LambdaType] // the outer method and polytypes generated previously in reverse order + ): Type = + info match + case mt: MethodOrPoly => + val psyms = psymss.head + mt.companion(mt.paramNames)( + mt1 => + if !psyms.exists(_.isUpdatedAfter(preRecheckPhase)) && !mt.isParamDependent && prevLambdas.isEmpty then + mt.paramInfos + else + val subst = SubstParams(psyms :: prevPsymss, mt1 :: prevLambdas) + psyms.map(psym => subst(psym.info).asInstanceOf[mt.PInfo]), + mt1 => + integrateRT(mt.resType, psymss.tail, psyms :: prevPsymss, mt1 :: prevLambdas) + ) + case info: ExprType => + info.derivedExprType(resType = + integrateRT(info.resType, psymss, prevPsymss, prevLambdas)) + case _ => + val restp = tree.tpt.knownType + if prevLambdas.isEmpty then restp + else SubstParams(prevPsymss, prevLambdas)(restp) + + if tree.tpt.hasRememberedType && !sym.isConstructor then + val newInfo = integrateRT(sym.info, sym.paramSymss, Nil, Nil) + .showing(i"update info $sym: ${sym.info} --> $result", capt) + if newInfo ne sym.info then + val completer = new LazyType: + def complete(denot: SymDenotation)(using Context) = + denot.info = newInfo + recheckDef(tree, sym) + updateInfo(sym, completer) + case tree: Bind => + val sym = tree.symbol + updateInfo(sym, transformInferredType(sym.info, boxed = false)) + case tree: TypeDef => + tree.symbol match + case cls: ClassSymbol => + val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo + if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then + // add capture set to self type of nested classes if no self type is given explicitly + val localRefs = CaptureSet.Var() + val newInfo = ClassInfo(prefix, cls, ps, decls, + CapturingType(cinfo.selfType, localRefs) + .showing(i"inferred self type for $cls: $result", capt)) + updateInfo(cls, newInfo) + cls.thisType.asInstanceOf[ThisType].invalidateCaches() + if cls.is(ModuleClass) then + // if it's a module, the capture set of the module reference is the capture set of the self type + val modul = cls.sourceModule + updateInfo(modul, CapturingType(modul.info, localRefs)) + modul.termRef.invalidateCaches() + case _ => + val info = atPhase(preRecheckPhase)(tree.symbol.info) + val newInfo = transformExplicitType(info, boxed = false) + if newInfo ne info then + updateInfo(tree.symbol, newInfo) + capt.println(i"update info of ${tree.symbol} from $info to $newInfo") + case _ => + end traverse +end Setup diff --git a/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala b/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala new file mode 100644 index 000000000000..dacbd27e0f35 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala @@ -0,0 +1,189 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Symbols.*, SymDenotations.*, Contexts.*, Flags.*, Types.*, Decorators.* +import StdNames.nme +import Names.Name +import NameKinds.DefaultGetterName +import Phases.checkCapturesPhase +import config.Printers.capt + +/** Classification and transformation methods for synthetic + * case class methods that need to be treated specially. + * In particular, compute capturing types for some of these methods which + * have inferred (result-)types that need to be established under separate + * compilation. + */ +object Synthetics: + private def isSyntheticCopyMethod(sym: SymDenotation)(using Context) = + sym.name == nme.copy && sym.is(Synthetic) && sym.owner.isClass && sym.owner.is(Case) + + private def isSyntheticCompanionMethod(sym: SymDenotation, names: Name*)(using Context): Boolean = + names.contains(sym.name) && sym.is(Synthetic) && sym.owner.is(Module) && sym.owner.companionClass.is(Case) + + private def isSyntheticCopyDefaultGetterMethod(sym: SymDenotation)(using Context) = sym.name match + case DefaultGetterName(nme.copy, _) => sym.is(Synthetic) && sym.owner.isClass && sym.owner.is(Case) + case _ => false + + /** Is `sym` a synthetic apply, copy, or copy default getter method? + * The types of these symbols are transformed in a special way without + * looking at the definitions's RHS + */ + def needsTransform(symd: SymDenotation)(using Context): Boolean = + isSyntheticCopyMethod(symd) + || isSyntheticCompanionMethod(symd, nme.apply, nme.unapply) + || isSyntheticCopyDefaultGetterMethod(symd) + || (symd.symbol eq defn.Object_eq) + || (symd.symbol eq defn.Object_ne) + + /** Method is excluded from regular capture checking. + * Excluded are synthetic class members + * - that override a synthesized case class symbol, or + * - the fromProduct method, or + * - members transformed specially as indicated by `needsTransform`. + */ + def isExcluded(sym: Symbol)(using Context): Boolean = + sym.is(Synthetic) + && sym.owner.isClass + && ( defn.caseClassSynthesized.exists( + ccsym => sym.overriddenSymbol(ccsym.owner.asClass) == ccsym) + || isSyntheticCompanionMethod(sym, nme.fromProduct) + || needsTransform(sym)) + + /** Add capture dependencies to the type of the `apply` or `copy` method of a case class. + * An apply method in a case class like this: + * case class CC(a: {d} A, b: B, {*} c: C) + * would get type + * def apply(a': {d} A, b: B, {*} c': C): {a', c'} CC { val a = {a'} A, val c = {c'} C } + * where `'` is used to indicate the difference between parameter symbol and refinement name. + * Analogous for the copy method. + */ + private def addCaptureDeps(info: Type)(using Context): Type = info match + case info: MethodType => + val trackedParams = info.paramRefs.filter(atPhase(checkCapturesPhase)(_.isTracked)) + def augmentResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = augmentResult(tp.resType)) + case _ => + val refined = trackedParams.foldLeft(tp) { (parent, pref) => + RefinedType(parent, pref.paramName, + CapturingType( + atPhase(ctx.phase.next)(pref.underlying.stripCapturing), + CaptureSet(pref))) + } + CapturingType(refined, CaptureSet(trackedParams*)) + if trackedParams.isEmpty then info + else augmentResult(info).showing(i"augment apply/copy type $info to $result", capt) + case info: PolyType => + info.derivedLambdaType(resType = addCaptureDeps(info.resType)) + case _ => + info + + /** Drop capture dependencies from the type of `apply` or `copy` method of a case class */ + private def dropCaptureDeps(tp: Type)(using Context): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = dropCaptureDeps(tp.resType)) + case CapturingType(parent, _) => + dropCaptureDeps(parent) + case RefinedType(parent, _, _) => + dropCaptureDeps(parent) + case _ => + tp + + /** Add capture information to the type of the default getter of a case class copy method */ + private def addDefaultGetterCapture(info: Type, owner: Symbol, idx: Int)(using Context): Type = info match + case info: MethodOrPoly => + info.derivedLambdaType(resType = addDefaultGetterCapture(info.resType, owner, idx)) + case info: ExprType => + info.derivedExprType(addDefaultGetterCapture(info.resType, owner, idx)) + case EventuallyCapturingType(parent, _) => + addDefaultGetterCapture(parent, owner, idx) + case info @ AnnotatedType(parent, annot) => + info.derivedAnnotatedType(addDefaultGetterCapture(parent, owner, idx), annot) + case _ if idx < owner.asClass.paramGetters.length => + val param = owner.asClass.paramGetters(idx) + val pinfo = param.info + atPhase(ctx.phase.next) { + if pinfo.captureSet.isAlwaysEmpty then info + else CapturingType(pinfo.stripCapturing, CaptureSet(param.termRef)) + } + case _ => + info + + /** Drop capture information from the type of the default getter of a case class copy method */ + private def dropDefaultGetterCapture(info: Type)(using Context): Type = info match + case info: MethodOrPoly => + info.derivedLambdaType(resType = dropDefaultGetterCapture(info.resType)) + case CapturingType(parent, _) => + parent + case info @ AnnotatedType(parent, annot) => + info.derivedAnnotatedType(dropDefaultGetterCapture(parent), annot) + case _ => + info + + /** Augment an unapply of type `(x: C): D` to `(x: {*} C): {x} D` */ + private def addUnapplyCaptures(info: Type)(using Context): Type = info match + case info: MethodType => + val paramInfo :: Nil = info.paramInfos: @unchecked + val newParamInfo = + CapturingType(paramInfo, CaptureSet.universal) + val trackedParam = info.paramRefs.head + def newResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = newResult(tp.resType)) + case _ => + CapturingType(tp, CaptureSet(trackedParam)) + info.derivedLambdaType(paramInfos = newParamInfo :: Nil, resType = newResult(info.resType)) + .showing(i"augment unapply type $info to $result", capt) + case info: PolyType => + info.derivedLambdaType(resType = addUnapplyCaptures(info.resType)) + + /** Drop added capture information from the type of an `unapply` */ + private def dropUnapplyCaptures(info: Type)(using Context): Type = info match + case info: MethodType => + info.paramInfos match + case CapturingType(oldParamInfo, _) :: Nil => + def oldResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = oldResult(tp.resType)) + case CapturingType(tp, _) => + tp + info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + case _ => + info + case info: PolyType => + info.derivedLambdaType(resType = dropUnapplyCaptures(info.resType)) + + /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method + * of a case class, transform it to account for capture information. + * The method is run in phase CheckCaptures.Pre + * @pre needsTransform(sym) + */ + def transformToCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match + case DefaultGetterName(nme.copy, n) => + sym.copySymDenotation(info = addDefaultGetterCapture(sym.info, sym.owner, n)) + case nme.unapply => + sym.copySymDenotation(info = addUnapplyCaptures(sym.info)) + case nme.apply | nme.copy => + sym.copySymDenotation(info = addCaptureDeps(sym.info)) + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = + MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType)) + + /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method + * of a case class, transform it back to what it was before the CC phase. + * @pre needsTransform(sym) + */ + def transformFromCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match + case DefaultGetterName(nme.copy, n) => + sym.copySymDenotation(info = dropDefaultGetterCapture(sym.info)) + case nme.unapply => + sym.copySymDenotation(info = dropUnapplyCaptures(sym.info)) + case nme.apply | nme.copy => + sym.copySymDenotation(info = dropCaptureDeps(sym.info)) + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = defn.methOfAnyRef(defn.BooleanType)) + +end Synthetics \ No newline at end of file diff --git a/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala new file mode 100644 index 000000000000..51b261583feb --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools +package dotc.classpath + +import scala.language.unsafeNulls + +import java.net.URL +import scala.collection.mutable.ArrayBuffer +import scala.collection.immutable.ArraySeq +import dotc.util + +import dotty.tools.io.{ AbstractFile, ClassPath, ClassRepresentation, EfficientClassPath } + +/** + * A classpath unifying multiple class- and sourcepath entries. + * The Classpath can obtain entries for classes and sources independently + * so it tries to do operations quite optimally - iterating only these collections + * which are needed in the given moment and only as far as it's necessary. + * + * @param aggregates classpath instances containing entries which this class processes + */ +case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { + override def findClassFile(className: String): Option[AbstractFile] = { + val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) + aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClassFile(className)).collectFirst { + case Some(x) => x + } + } + private val packageIndex: collection.mutable.Map[String, Seq[ClassPath]] = collection.mutable.Map() + private def aggregatesForPackage(pkg: PackageName): Seq[ClassPath] = packageIndex.synchronized { + packageIndex.getOrElseUpdate(pkg.dottedString, aggregates.filter(_.hasPackage(pkg))) + } + + override def findClass(className: String): Option[ClassRepresentation] = { + val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) + + def findEntry(isSource: Boolean): Option[ClassRepresentation] = + aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClass(className)).collectFirst { + case Some(s: SourceFileEntry) if isSource => s + case Some(s: ClassFileEntry) if !isSource => s + } + + val classEntry = findEntry(isSource = false) + val sourceEntry = findEntry(isSource = true) + + (classEntry, sourceEntry) match { + case (Some(c: ClassFileEntry), Some(s: SourceFileEntry)) => Some(ClassAndSourceFilesEntry(c.file, s.file)) + case (c @ Some(_), _) => c + case (_, s) => s + } + } + + override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs) + + override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct + + override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*) + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct + aggregatedPackages + } + + override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = + getDistinctEntries(_.classes(inPackage)) + + override private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = + getDistinctEntries(_.sources(inPackage)) + + override private[dotty] def hasPackage(pkg: PackageName): Boolean = aggregates.exists(_.hasPackage(pkg)) + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = { + val packages: java.util.HashSet[PackageEntry] = new java.util.HashSet[PackageEntry]() + val classesAndSourcesBuffer = collection.mutable.ArrayBuffer[ClassRepresentation]() + val onPackage: PackageEntry => Unit = packages.add(_) + val onClassesAndSources: ClassRepresentation => Unit = classesAndSourcesBuffer += _ + + aggregates.foreach { cp => + try { + cp match { + case ecp: EfficientClassPath => + ecp.list(inPackage, onPackage, onClassesAndSources) + case _ => + val entries = cp.list(inPackage) + entries._1.foreach(entry => packages.add(entry)) + classesAndSourcesBuffer ++= entries._2 + } + } catch { + case ex: java.io.IOException => + val e = FatalError(ex.getMessage) + e.initCause(ex) + throw e + } + } + + val distinctPackages: Seq[PackageEntry] = { + val arr = packages.toArray(new Array[PackageEntry](packages.size())) + ArraySeq.unsafeWrapArray(arr) + } + val distinctClassesAndSources = mergeClassesAndSources(classesAndSourcesBuffer) + ClassPathEntries(distinctPackages, distinctClassesAndSources) + } + + /** + * Returns only one entry for each name. If there's both a source and a class entry, it + * creates an entry containing both of them. If there would be more than one class or source + * entries for the same class it always would use the first entry of each type found on a classpath. + */ + private def mergeClassesAndSources(entries: scala.collection.Seq[ClassRepresentation]): Seq[ClassRepresentation] = { + // based on the implementation from MergedClassPath + var count = 0 + val indices = util.HashMap[String, Int]() + val mergedEntries = new ArrayBuffer[ClassRepresentation](entries.size) + for { + entry <- entries + } { + val name = entry.name + if (indices.contains(name)) { + val index = indices(name) + val existing = mergedEntries(index) + + if (existing.binary.isEmpty && entry.binary.isDefined) + mergedEntries(index) = ClassAndSourceFilesEntry(entry.binary.get, existing.source.get) + if (existing.source.isEmpty && entry.source.isDefined) + mergedEntries(index) = ClassAndSourceFilesEntry(existing.binary.get, entry.source.get) + } + else { + indices(name) = count + mergedEntries += entry + count += 1 + } + } + if (mergedEntries.isEmpty) Nil else mergedEntries.toIndexedSeq + } + + private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = { + val seenNames = util.HashSet[String]() + val entriesBuffer = new ArrayBuffer[EntryType](1024) + for { + cp <- aggregates + entry <- getEntries(cp) if !seenNames.contains(entry.name) + } + { + entriesBuffer += entry + seenNames += entry.name + } + entriesBuffer.toIndexedSeq + } +} + +object AggregateClassPath { + def createAggregate(parts: ClassPath*): ClassPath = { + val elems = new ArrayBuffer[ClassPath]() + parts foreach { + case AggregateClassPath(ps) => elems ++= ps + case p => elems += p + } + if (elems.size == 1) elems.head + else AggregateClassPath(elems.toIndexedSeq) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala new file mode 100644 index 000000000000..176b6acf9c6c --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import dotty.tools.io.AbstractFile +import dotty.tools.io.ClassRepresentation + +case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]) { + def toTuple: (scala.collection.Seq[PackageEntry], scala.collection.Seq[ClassRepresentation]) = (packages, classesAndSources) +} + +object ClassPathEntries { + val empty = ClassPathEntries(Seq.empty, Seq.empty) +} + +trait ClassFileEntry extends ClassRepresentation { + def file: AbstractFile +} + +trait SourceFileEntry extends ClassRepresentation { + def file: AbstractFile +} + +case class PackageName(dottedString: String) { + val dirPathTrailingSlashJar: String = FileUtils.dirPathInJar(dottedString) + "/" + + val dirPathTrailingSlash: String = + if (java.io.File.separatorChar == '/') + dirPathTrailingSlashJar + else + FileUtils.dirPath(dottedString) + java.io.File.separator + + def isRoot: Boolean = dottedString.isEmpty + + def entryName(entry: String): String = { + if (isRoot) entry else { + val builder = new java.lang.StringBuilder(dottedString.length + 1 + entry.length) + builder.append(dottedString) + builder.append('.') + builder.append(entry) + builder.toString + } + } +} + +trait PackageEntry { + def name: String +} + +private[dotty] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry { + final def fileName: String = file.name + def name: String = FileUtils.stripClassExtension(file.name) // class name + + def binary: Option[AbstractFile] = Some(file) + def source: Option[AbstractFile] = None +} + +private[dotty] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry { + final def fileName: String = file.name + def name: String = FileUtils.stripSourceExtension(file.name) + + def binary: Option[AbstractFile] = None + def source: Option[AbstractFile] = Some(file) +} + +private[dotty] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepresentation { + final def fileName: String = classFile.name + def name: String = FileUtils.stripClassExtension(classFile.name) + + def binary: Option[AbstractFile] = Some(classFile) + def source: Option[AbstractFile] = Some(srcFile) +} + +private[dotty] case class PackageEntryImpl(name: String) extends PackageEntry + +private[dotty] trait NoSourcePaths { + def asSourcePathString: String = "" + private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = Seq.empty +} + +private[dotty] trait NoClassPaths { + def findClassFile(className: String): Option[AbstractFile] = None + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = Seq.empty +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala b/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala new file mode 100644 index 000000000000..ac8b69381938 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import dotty.tools.io.{AbstractFile, VirtualDirectory} +import FileUtils._ +import dotty.tools.io.ClassPath +import dotty.tools.dotc.core.Contexts._ + +/** + * Provides factory methods for classpath. When creating classpath instances for a given path, + * it uses proper type of classpath depending on a types of particular files containing sources or classes. + */ +class ClassPathFactory { + /** + * Create a new classpath based on the abstract file. + */ + def newClassPath(file: AbstractFile)(using Context): ClassPath = ClassPathFactory.newClassPath(file) + + /** + * Creators for sub classpaths which preserve this context. + */ + def sourcesInPath(path: String)(using Context): List[ClassPath] = + for { + file <- expandPath(path, expandStar = false) + dir <- Option(AbstractFile getDirectory file) + } + yield createSourcePath(dir) + + + def expandPath(path: String, expandStar: Boolean = true): List[String] = dotty.tools.io.ClassPath.expandPath(path, expandStar) + + def expandDir(extdir: String): List[String] = dotty.tools.io.ClassPath.expandDir(extdir) + + def contentsOfDirsInPath(path: String)(using Context): List[ClassPath] = + for { + dir <- expandPath(path, expandStar = false) + name <- expandDir(dir) + entry <- Option(AbstractFile.getDirectory(name)) + } + yield newClassPath(entry) + + def classesInExpandedPath(path: String)(using Context): IndexedSeq[ClassPath] = + classesInPathImpl(path, expand = true).toIndexedSeq + + def classesInPath(path: String)(using Context): List[ClassPath] = classesInPathImpl(path, expand = false) + + def classesInManifest(useManifestClassPath: Boolean)(using Context): List[ClassPath] = + if (useManifestClassPath) dotty.tools.io.ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url)) + else Nil + + // Internal + protected def classesInPathImpl(path: String, expand: Boolean)(using Context): List[ClassPath] = + for { + file <- expandPath(path, expand) + dir <- { + def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None + Option(AbstractFile.getDirectory(file)).orElse(asImage) + } + } + yield newClassPath(dir) + + private def createSourcePath(file: AbstractFile)(using Context): ClassPath = + if (file.isJarOrZip) + ZipAndJarSourcePathFactory.create(file) + else if (file.isDirectory) + new DirectorySourcePath(file.file) + else + sys.error(s"Unsupported sourcepath element: $file") +} + +object ClassPathFactory { + def newClassPath(file: AbstractFile)(using Context): ClassPath = file match { + case vd: VirtualDirectory => VirtualDirectoryClassPath(vd) + case _ => + if (file.isJarOrZip) + ZipAndJarClassPathFactory.create(file) + else if (file.isDirectory) + new DirectoryClassPath(file.file) + else + sys.error(s"Unsupported classpath element: $file") + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala new file mode 100644 index 000000000000..a5678970411b --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala @@ -0,0 +1,313 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import scala.language.unsafeNulls + +import java.io.{File => JFile} +import java.net.URL +import java.nio.file.{FileSystems, Files} + +import dotty.tools.dotc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} +import dotty.tools.io.{AbstractFile, PlainFile, ClassPath, ClassRepresentation, EfficientClassPath, JDK9Reflectors} +import FileUtils._ +import PlainFile.toPlainFile + +import scala.jdk.CollectionConverters._ +import scala.collection.immutable.ArraySeq +import scala.util.control.NonFatal +import language.experimental.pureFunctions + +/** + * A trait allowing to look for classpath entries in directories. It provides common logic for + * classes handling class and source files. + * It makes use of the fact that in the case of nested directories it's easy to find a file + * when we have a name of a package. + * It abstracts over the file representation to work with both JFile and AbstractFile. + */ +trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { + type F + + val dir: F + + protected def emptyFiles: Array[F] // avoids reifying ClassTag[F] + protected def getSubDir(dirName: String): Option[F] + protected def listChildren(dir: F, filter: Option[F -> Boolean] = (None: Option[F -> Boolean])): Array[F] // !cc! need explicit typing of default argument + protected def getName(f: F): String + protected def toAbstractFile(f: F): AbstractFile + protected def isPackage(f: F): Boolean + + protected def createFileEntry(file: AbstractFile): FileEntryType + protected def isMatchingFile(f: F): Boolean + + private def getDirectory(forPackage: PackageName): Option[F] = + if (forPackage.isRoot) + Some(dir) + else + getSubDir(forPackage.dirPathTrailingSlash) + + override private[dotty] def hasPackage(pkg: PackageName): Boolean = getDirectory(pkg).isDefined + + private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + val dirForPackage = getDirectory(inPackage) + val nestedDirs: Array[F] = dirForPackage match { + case None => emptyFiles + case Some(directory) => listChildren(directory, Some(isPackage)) + } + ArraySeq.unsafeWrapArray(nestedDirs).map(f => PackageEntryImpl(inPackage.entryName(getName(f)))) + } + + protected def files(inPackage: PackageName): Seq[FileEntryType] = { + val dirForPackage = getDirectory(inPackage) + val files: Array[F] = dirForPackage match { + case None => emptyFiles + case Some(directory) => listChildren(directory, Some(isMatchingFile)) + } + files.iterator.map(f => createFileEntry(toAbstractFile(f))).toSeq + } + + override def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = { + val dirForPackage = getDirectory(inPackage) + dirForPackage match { + case None => + case Some(directory) => + for (file <- listChildren(directory)) { + if (isPackage(file)) + onPackageEntry(PackageEntryImpl(inPackage.entryName(getName(file)))) + else if (isMatchingFile(file)) + onClassesAndSources(createFileEntry(toAbstractFile(file))) + } + } + } +} + +trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends DirectoryLookup[FileEntryType] { + type F = JFile + + protected def emptyFiles: Array[JFile] = Array.empty + protected def getSubDir(packageDirName: String): Option[JFile] = { + val packageDir = new JFile(dir, packageDirName) + if (packageDir.exists && packageDir.isDirectory) Some(packageDir) + else None + } + protected def listChildren(dir: JFile, filter: Option[JFile -> Boolean]): Array[JFile] = { + val listing = filter match { + case Some(f) => dir.listFiles(mkFileFilter(f)) + case None => dir.listFiles() + } + + if (listing != null) { + // Sort by file name for stable order of directory .class entries in package scope. + // This gives stable results ordering of base type sequences for unrelated classes + // with the same base type depth. + // + // Notably, this will stably infer`Product with Serializable` + // as the type of `case class C(); case class D(); List(C(), D()).head`, rather than the opposite order. + // On Mac, the HFS performs this sorting transparently, but on Linux the order is unspecified. + // + // Note this behaviour can be enabled in javac with `javac -XDsortfiles`, but that's only + // intended to improve determinism of the compiler for compiler hackers. + java.util.Arrays.sort(listing, + new java.util.Comparator[JFile] { + def compare(o1: JFile, o2: JFile) = o1.getName.compareTo(o2.getName) + }) + listing + } + else Array() + } + protected def getName(f: JFile): String = f.getName + protected def toAbstractFile(f: JFile): AbstractFile = f.toPath.toPlainFile + protected def isPackage(f: JFile): Boolean = f.isPackage + + assert(dir != null, "Directory file in DirectoryFileLookup cannot be null") + + def asURLs: Seq[URL] = Seq(dir.toURI.toURL) + def asClassPathStrings: Seq[String] = Seq(dir.getPath) +} + +object JrtClassPath { + import java.nio.file._, java.net.URI + def apply(release: Option[String]): Option[ClassPath] = { + import scala.util.Properties._ + if (!isJavaAtLeast("9")) None + else { + // Longer term we'd like an official API for this in the JDK + // Discussion: http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/thread.html#11738 + + val currentMajorVersion: Int = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() + release match { + case Some(v) if v.toInt < currentMajorVersion => + try { + val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") + if (Files.notExists(ctSym)) None + else Some(new CtSymClassPath(ctSym, v.toInt)) + } catch { + case NonFatal(_) => None + } + case _ => + try { + val fs = FileSystems.getFileSystem(URI.create("jrt:/")) + Some(new JrtClassPath(fs)) + } catch { + case _: ProviderNotFoundException | _: FileSystemNotFoundException => None + } + } + } + } +} + +/** + * Implementation `ClassPath` based on the JDK 9 encapsulated runtime modules (JEP-220) + * + * https://bugs.openjdk.java.net/browse/JDK-8066492 is the most up to date reference + * for the structure of the jrt:// filesystem. + * + * The implementation assumes that no classes exist in the empty package. + */ +final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths { + import java.nio.file.Path, java.nio.file._ + type F = Path + private val dir: Path = fs.getPath("/packages") + + // e.g. "java.lang" -> Seq("/modules/java.base") + private val packageToModuleBases: Map[String, Seq[Path]] = { + val ps = Files.newDirectoryStream(dir).iterator().asScala + def lookup(pack: Path): Seq[Path] = + Files.list(pack).iterator().asScala.map(l => if (Files.isSymbolicLink(l)) Files.readSymbolicLink(l) else l).toList + ps.map(p => (p.toString.stripPrefix("/packages/"), lookup(p))).toMap + } + + /** Empty string represents root package */ + override private[dotty] def hasPackage(pkg: PackageName): Boolean = packageToModuleBases.contains(pkg.dottedString) + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = + packageToModuleBases.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector + + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = + if (inPackage.isRoot) Nil + else + packageToModuleBases.getOrElse(inPackage.dottedString, Nil).flatMap(x => + Files.list(x.resolve(inPackage.dirPathTrailingSlash)).iterator().asScala.filter(_.getFileName.toString.endsWith(".class"))).map(x => + ClassFileEntryImpl(x.toPlainFile)).toVector + + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = + if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) + else ClassPathEntries(packages(inPackage), classes(inPackage)) + + def asURLs: Seq[URL] = Seq(new URL("jrt:/")) + // We don't yet have a scheme to represent the JDK modules in our `-classpath`. + // java models them as entries in the new "module path", we'll probably need to follow this. + def asClassPathStrings: Seq[String] = Nil + + def findClassFile(className: String): Option[AbstractFile] = + if (!className.contains(".")) None + else { + val (inPackage, _) = separatePkgAndClassNames(className) + packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap{ x => + val file = x.resolve(FileUtils.dirPath(className) + ".class") + if (Files.exists(file)) file.toPlainFile :: Nil else Nil + }.take(1).toList.headOption + } +} + +/** + * Implementation `ClassPath` based on the \$JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 + */ +final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { + import java.nio.file.Path, java.nio.file._ + + private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader) + private val root: Path = fileSystem.getRootDirectories.iterator.next + private val roots = Files.newDirectoryStream(root).iterator.asScala.toList + + // http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/011737.html + private def codeFor(major: Int): String = if (major < 10) major.toString else ('A' + (major - 10)).toChar.toString + + private val releaseCode: String = codeFor(release) + private def fileNameMatchesRelease(fileName: String) = !fileName.contains("-") && fileName.contains(releaseCode) // exclude `9-modules` + private val rootsForRelease: List[Path] = roots.filter(root => fileNameMatchesRelease(root.getFileName.toString)) + + // e.g. "java.lang" -> Seq(/876/java/lang, /87/java/lang, /8/java/lang)) + private val packageIndex: scala.collection.Map[String, scala.collection.Seq[Path]] = { + val index = collection.mutable.AnyRefMap[String, collection.mutable.ListBuffer[Path]]() + val isJava12OrHigher = scala.util.Properties.isJavaAtLeast("12") + rootsForRelease.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach { p => + val moduleNamePathElementCount = if (isJava12OrHigher) 1 else 0 + if (p.getNameCount > root.getNameCount + moduleNamePathElementCount) { + val packageDotted = p.subpath(moduleNamePathElementCount + root.getNameCount, p.getNameCount).toString.replace('/', '.') + index.getOrElseUpdate(packageDotted, new collection.mutable.ListBuffer) += p + } + }) + index + } + + /** Empty string represents root package */ + override private[dotty] def hasPackage(pkg: PackageName) = packageIndex.contains(pkg.dottedString) + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + packageIndex.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector + } + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = { + if (inPackage.isRoot) Nil + else { + val sigFiles = packageIndex.getOrElse(inPackage.dottedString, Nil).iterator.flatMap(p => + Files.list(p).iterator.asScala.filter(_.getFileName.toString.endsWith(".sig"))) + sigFiles.map(f => ClassFileEntryImpl(f.toPlainFile)).toVector + } + } + + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = + if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) + else ClassPathEntries(packages(inPackage), classes(inPackage)) + + def asURLs: Seq[URL] = Nil + def asClassPathStrings: Seq[String] = Nil + def findClassFile(className: String): Option[AbstractFile] = { + if (!className.contains(".")) None + else { + val (inPackage, classSimpleName) = separatePkgAndClassNames(className) + packageIndex.getOrElse(inPackage, Nil).iterator.flatMap { p => + val path = p.resolve(classSimpleName + ".sig") + if (Files.exists(path)) path.toPlainFile :: Nil else Nil + }.take(1).toList.headOption + } + } +} + +case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { + override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply + + def findClassFile(className: String): Option[AbstractFile] = { + val relativePath = FileUtils.dirPath(className) + val classFile = new JFile(dir, relativePath + ".class") + if (classFile.exists) { + Some(classFile.toPath.toPlainFile) + } + else None + } + + protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) + protected def isMatchingFile(f: JFile): Boolean = f.isClass + + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) +} + +case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths { + def asSourcePathString: String = asClassPathString + + protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file) + protected def isMatchingFile(f: JFile): Boolean = endsScalaOrJava(f.getName) + + override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className) map SourceFileEntryImpl.apply + + private def findSourceFile(className: String): Option[AbstractFile] = { + val relativePath = FileUtils.dirPath(className) + val sourceFile = LazyList("scala", "java") + .map(ext => new JFile(dir, relativePath + "." + ext)) + .collectFirst { case file if file.exists() => file } + + sourceFile.map(_.toPath.toPlainFile) + } + + private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala new file mode 100644 index 000000000000..0f5ac16b40bf --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools +package dotc.classpath + +import scala.language.unsafeNulls + +import java.io.{File => JFile, FileFilter} +import java.net.URL +import dotty.tools.io.AbstractFile +import language.experimental.pureFunctions + +/** + * Common methods related to Java files and abstract files used in the context of classpath + */ +object FileUtils { + extension (file: AbstractFile) { + def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) + + def isClass: Boolean = !file.isDirectory && file.hasExtension("class") && !file.name.endsWith("$class.class") + // FIXME: drop last condition when we stop being compatible with Scala 2.11 + + def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) + + // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? + def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip") + + /** + * Safe method returning a sequence containing one URL representing this file, when underlying file exists, + * and returning given default value in other case + */ + def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL) + } + + extension (file: JFile) { + def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) + + def isClass: Boolean = file.isFile && file.getName.endsWith(".class") && !file.getName.endsWith("$class.class") + // FIXME: drop last condition when we stop being compatible with Scala 2.11 + } + + private val SUFFIX_CLASS = ".class" + private val SUFFIX_SCALA = ".scala" + private val SUFFIX_JAVA = ".java" + private val SUFFIX_SIG = ".sig" + + def stripSourceExtension(fileName: String): String = + if (endsScala(fileName)) stripClassExtension(fileName) + else if (endsJava(fileName)) stripJavaExtension(fileName) + else throw new FatalError("Unexpected source file ending: " + fileName) + + def dirPath(forPackage: String): String = forPackage.replace('.', JFile.separatorChar) + + def dirPathInJar(forPackage: String): String = forPackage.replace('.', '/') + + inline private def ends (filename:String, suffix:String) = filename.endsWith(suffix) && filename.length > suffix.length + + def endsClass(fileName: String): Boolean = + ends (fileName, SUFFIX_CLASS) || fileName.endsWith(SUFFIX_SIG) + + def endsScalaOrJava(fileName: String): Boolean = + endsScala(fileName) || endsJava(fileName) + + def endsJava(fileName: String): Boolean = + ends (fileName, SUFFIX_JAVA) + + def endsScala(fileName: String): Boolean = + ends (fileName, SUFFIX_SCALA) + + def stripClassExtension(fileName: String): String = + fileName.substring(0, fileName.lastIndexOf('.')) + + def stripJavaExtension(fileName: String): String = + fileName.substring(0, fileName.length - 5) // equivalent of fileName.length - SUFFIX_JAVA.length + + // probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed + // because then some tests in partest don't pass + def mayBeValidPackage(dirName: String): Boolean = + (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.') + + def mkFileFilter(f: JFile -> Boolean): FileFilter = new FileFilter { + def accept(pathname: JFile): Boolean = f(pathname) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala new file mode 100644 index 000000000000..ea7412f15d8a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import dotty.tools.io.ClassPath.RootPackage + +/** + * Common methods related to package names represented as String + */ +object PackageNameUtils { + + /** + * @param fullClassName full class name with package + * @return (package, simple class name) + */ + inline def separatePkgAndClassNames(fullClassName: String): (String, String) = { + val lastDotIndex = fullClassName.lastIndexOf('.') + if (lastDotIndex == -1) + (RootPackage, fullClassName) + else + (fullClassName.substring(0, lastDotIndex).nn, fullClassName.substring(lastDotIndex + 1).nn) + } + + def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "." + + /** + * `true` if `packageDottedName` is a package directly nested in `inPackage`, for example: + * - `packageContains("scala", "scala.collection")` + * - `packageContains("", "scala")` + */ + def packageContains(inPackage: String, packageDottedName: String) = { + if (packageDottedName.contains(".")) + packageDottedName.startsWith(inPackage) && packageDottedName.lastIndexOf('.') == inPackage.length + else inPackage == "" + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala new file mode 100644 index 000000000000..ac80d543b539 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala @@ -0,0 +1,55 @@ +package dotty.tools.dotc.classpath + +import scala.language.unsafeNulls + +import dotty.tools.io.ClassRepresentation +import dotty.tools.io.{AbstractFile, VirtualDirectory} +import FileUtils._ +import java.net.URL + +import dotty.tools.io.ClassPath +import language.experimental.pureFunctions + +case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { + type F = AbstractFile + + // From AbstractFileClassLoader + private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = { + var file: AbstractFile = base + val dirParts = pathParts.init.iterator + while (dirParts.hasNext) { + val dirPart = dirParts.next + file = file.lookupName(dirPart, directory = true) + if (file == null) + return null + } + file.lookupName(pathParts.last, directory = directory) + } + + protected def emptyFiles: Array[AbstractFile] = Array.empty + protected def getSubDir(packageDirName: String): Option[AbstractFile] = + Option(lookupPath(dir)(packageDirName.split(java.io.File.separator).toIndexedSeq, directory = true)) + protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile -> Boolean]): Array[F] = filter match { + case Some(f) => dir.iterator.filter(f).toArray + case _ => dir.toArray + } + def getName(f: AbstractFile): String = f.name + def toAbstractFile(f: AbstractFile): AbstractFile = f + def isPackage(f: AbstractFile): Boolean = f.isPackage + + // mimic the behavior of the old nsc.util.DirectoryClassPath + def asURLs: Seq[URL] = Seq(new URL(dir.name)) + def asClassPathStrings: Seq[String] = Seq(dir.path) + + override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply + + def findClassFile(className: String): Option[AbstractFile] = { + val relativePath = FileUtils.dirPath(className) + ".class" + Option(lookupPath(dir)(relativePath.split(java.io.File.separator).toIndexedSeq, directory = false)) + } + + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) + + protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) + protected def isMatchingFile(f: AbstractFile): Boolean = f.isClass +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala b/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala new file mode 100644 index 000000000000..865f95551a0b --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -0,0 +1,205 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc +package classpath + +import scala.language.unsafeNulls + +import java.io.File +import java.net.URL +import java.nio.file.Files +import java.nio.file.attribute.{BasicFileAttributes, FileTime} + +import scala.annotation.tailrec +import dotty.tools.io.{AbstractFile, ClassPath, ClassRepresentation, FileZipArchive, ManifestResources} +import dotty.tools.dotc.core.Contexts._ +import FileUtils._ + +/** + * A trait providing an optional cache for classpath entries obtained from zip and jar files. + * It allows us to e.g. reduce significantly memory used by PresentationCompilers in Scala IDE + * when there are a lot of projects having a lot of common dependencies. + */ +sealed trait ZipAndJarFileLookupFactory { + private val cache = new FileBasedCache[ClassPath] + + def create(zipFile: AbstractFile)(using Context): ClassPath = + val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) + if (ctx.settings.YdisableFlatCpCaching.value || zipFile.file == null) createForZipFile(zipFile, release) + else createUsingCache(zipFile, release) + + protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath + + private def createUsingCache(zipFile: AbstractFile, release: Option[String]): ClassPath = + cache.getOrCreate(zipFile.file.toPath, () => createForZipFile(zipFile, release)) +} + +/** + * Manages creation of classpath for class files placed in zip and jar files. + * It should be the only way of creating them as it provides caching. + */ +object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { + private case class ZipArchiveClassPath(zipFile: File, override val release: Option[String]) + extends ZipArchiveFileLookup[ClassFileEntryImpl] + with NoSourcePaths { + + override def findClassFile(className: String): Option[AbstractFile] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + file(PackageName(pkg), simpleClassName + ".class").map(_.file) + } + + // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. + override def findClass(className: String): Option[ClassRepresentation] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + file(PackageName(pkg), simpleClassName + ".class") + } + + override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) + + override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file) + override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass + } + + /** + * This type of classpath is closely related to the support for JSR-223. + * Its usage can be observed e.g. when running: + * jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala + * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry: + * Name: scala/Function2$mcFJD$sp.class + */ + private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths { + override def findClassFile(className: String): Option[AbstractFile] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + classes(PackageName(pkg)).find(_.name == simpleClassName).map(_.file) + } + + override def asClassPathStrings: Seq[String] = Seq(file.path) + + override def asURLs: Seq[URL] = file.toURLs() + + import ManifestResourcesClassPath.PackageFileInfo + import ManifestResourcesClassPath.PackageInfo + + /** + * A cache mapping package name to abstract file for package directory and subpackages of given package. + * + * ManifestResources can iterate through the collections of entries from e.g. remote jar file. + * We can't just specify the path to the concrete directory etc. so we can't just 'jump' into + * given package, when it's needed. On the other hand we can iterate over entries to get + * AbstractFiles, iterate over entries of these files etc. + * + * Instead of traversing a tree of AbstractFiles once and caching all entries or traversing each time, + * when we need subpackages of a given package or its classes, we traverse once and cache only packages. + * Classes for given package can be then easily loaded when they are needed. + */ + private lazy val cachedPackages: util.HashMap[String, PackageFileInfo] = { + val packages = util.HashMap[String, PackageFileInfo]() + + def getSubpackages(dir: AbstractFile): List[AbstractFile] = + (for (file <- dir if file.isPackage) yield file).toList + + @tailrec + def traverse(packagePrefix: String, + filesForPrefix: List[AbstractFile], + subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match { + case pkgFile :: remainingFiles => + val subpackages = getSubpackages(pkgFile) + val fullPkgName = packagePrefix + pkgFile.name + packages(fullPkgName) = PackageFileInfo(pkgFile, subpackages) + val newPackagePrefix = fullPkgName + "." + subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages)) + traverse(packagePrefix, remainingFiles, subpackagesQueue) + case Nil if subpackagesQueue.nonEmpty => + val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue() + traverse(packagePrefix, filesForPrefix, subpackagesQueue) + case _ => + } + + val subpackages = getSubpackages(file) + packages(ClassPath.RootPackage) = PackageFileInfo(file, subpackages) + traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue()) + packages + } + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = cachedPackages.get(inPackage.dottedString) match { + case None => Seq.empty + case Some(PackageFileInfo(_, subpackages)) => + subpackages.map(packageFile => PackageEntryImpl(inPackage.entryName(packageFile.name))) + } + + override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = cachedPackages.get(inPackage.dottedString) match { + case None => Seq.empty + case Some(PackageFileInfo(pkg, _)) => + (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file)).toSeq + } + + override private[dotty] def hasPackage(pkg: PackageName) = cachedPackages.contains(pkg.dottedString) + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = ClassPathEntries(packages(inPackage), classes(inPackage)) + } + + private object ManifestResourcesClassPath { + case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile]) + case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) + } + + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = + if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) + else ZipArchiveClassPath(zipFile.file, release) + + private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match { + case manifestRes: ManifestResources => + ManifestResourcesClassPath(manifestRes) + case _ => + val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile" + throw new IllegalArgumentException(errorMsg) + } +} + +/** + * Manages creation of classpath for source files placed in zip and jar files. + * It should be the only way of creating them as it provides caching. + */ +object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { + private case class ZipArchiveSourcePath(zipFile: File) + extends ZipArchiveFileLookup[SourceFileEntryImpl] + with NoClassPaths { + + def release: Option[String] = None + + override def asSourcePathString: String = asClassPathString + + override private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) + + override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file) + override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource + } + + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = ZipArchiveSourcePath(zipFile.file) +} + +final class FileBasedCache[T] { + private case class Stamp(lastModified: FileTime, fileKey: Object) + private val cache = collection.mutable.Map.empty[java.nio.file.Path, (Stamp, T)] + + def getOrCreate(path: java.nio.file.Path, create: () => T): T = cache.synchronized { + val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) + val lastModified = attrs.lastModifiedTime() + // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp + val fileKey = attrs.fileKey() + val stamp = Stamp(lastModified, fileKey) + cache.get(path) match { + case Some((cachedStamp, cached)) if cachedStamp == stamp => cached + case _ => + val value = create() + cache.put(path, (stamp, value)) + value + } + } + + def clear(): Unit = cache.synchronized { + // TODO support closing + // cache.valuesIterator.foreach(_.close()) + cache.clear() + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala b/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala new file mode 100644 index 000000000000..e241feee8244 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import scala.language.unsafeNulls + +import java.io.File +import java.net.URL + +import dotty.tools.io.{ AbstractFile, FileZipArchive } +import FileUtils._ +import dotty.tools.io.{EfficientClassPath, ClassRepresentation} + +/** + * A trait allowing to look for classpath entries of given type in zip and jar files. + * It provides common logic for classes handling class and source files. + * It's aware of things like e.g. META-INF directory which is correctly skipped. + */ +trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { + val zipFile: File + def release: Option[String] + + assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null") + + override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL) + override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath) + + private val archive = new FileZipArchive(zipFile.toPath, release) + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + for { + dirEntry <- findDirEntry(inPackage).toSeq + entry <- dirEntry.iterator if entry.isPackage + } + yield PackageEntryImpl(inPackage.entryName(entry.name)) + } + + protected def files(inPackage: PackageName): Seq[FileEntryType] = + for { + dirEntry <- findDirEntry(inPackage).toSeq + entry <- dirEntry.iterator if isRequiredFileType(entry) + } + yield createFileEntry(entry) + + protected def file(inPackage: PackageName, name: String): Option[FileEntryType] = + for { + dirEntry <- findDirEntry(inPackage) + entry <- Option(dirEntry.lookupName(name, directory = false)) + if isRequiredFileType(entry) + } + yield createFileEntry(entry) + + override def hasPackage(pkg: PackageName) = findDirEntry(pkg).isDefined + def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = + findDirEntry(inPackage) match { + case Some(dirEntry) => + for (entry <- dirEntry.iterator) { + if (entry.isPackage) + onPackageEntry(PackageEntryImpl(inPackage.entryName(entry.name))) + else if (isRequiredFileType(entry)) + onClassesAndSources(createFileEntry(entry)) + } + case None => + } + + private def findDirEntry(pkg: PackageName): Option[archive.DirEntry] = + archive.allDirs.get(pkg.dirPathTrailingSlashJar) + + protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType + protected def isRequiredFileType(file: AbstractFile): Boolean +} diff --git a/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala b/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala new file mode 100644 index 000000000000..68c900e405da --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala @@ -0,0 +1,198 @@ +package dotty.tools.dotc +package config + +import scala.language.unsafeNulls + +import Settings._ +import core.Contexts._ +import printing.Highlighting + +import scala.util.chaining.given +import scala.PartialFunction.cond + +trait CliCommand: + + type ConcreteSettings <: CommonScalaSettings with Settings.SettingGroup + + def versionMsg: String + + def ifErrorsMsg: String + + /** The name of the command */ + def cmdName: String + + def isHelpFlag(using settings: ConcreteSettings)(using SettingsState): Boolean + + def helpMsg(using settings: ConcreteSettings)(using SettingsState, Context): String + + private def explainAdvanced = """ + |-- Notes on option parsing -- + |Boolean settings are always false unless set. + |Where multiple values are accepted, they should be comma-separated. + | example: -Xplugin:plugin1,plugin2 + | means one or a comma-separated list of: + | - (partial) phase names with an optional "+" suffix to include the next phase + | - the string "all" + | example: -Xprint:all prints all phases. + | example: -Xprint:typer,mixin prints the typer and mixin phases. + | example: -Ylog:erasure+ logs the erasure phase and the phase after the erasure phase. + | This is useful because during the tree transform of phase X, we often + | already are in phase X + 1. + """ + + /** Distill arguments into summary detailing settings, errors and files to main */ + def distill(args: Array[String], sg: Settings.SettingGroup)(ss: SettingsState = sg.defaultState)(using Context): ArgsSummary = + + // expand out @filename to the contents of that filename + def expandedArguments = args.toList flatMap { + case x if x startsWith "@" => CommandLineParser.expandArg(x) + case x => List(x) + } + + sg.processArguments(expandedArguments, processAll = true, settingsState = ss) + end distill + + /** Creates a help message for a subset of options based on cond */ + protected def availableOptionsMsg(p: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = + // result is (Option Name, descrption\ndefault: value\nchoices: x, y, z + def help(s: Setting[?]): (String, String) = + // For now, skip the default values that do not make sense for the end user, such as 'false' for the version command. + def defaultValue = s.default match + case _: Int | _: String => s.default.toString + case _ => "" + val info = List(shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices ${s.legalChoices}" else "") + (s.name, info.filter(_.nonEmpty).mkString("\n")) + end help + + val ss = settings.allSettings.filter(p).toList.sortBy(_.name) + val formatter = Columnator("", "", maxField = 30) + val fresh = ContextBase().initialCtx.fresh.setSettings(summon[SettingsState]) + formatter(List(ss.map(help) :+ ("@", "A text file containing compiler arguments (options and source files).")))(using fresh) + end availableOptionsMsg + + protected def shortUsage: String = s"Usage: $cmdName " + + protected def createUsageMsg(label: String, shouldExplain: Boolean, cond: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = + val prefix = List( + Some(shortUsage), + Some(explainAdvanced).filter(_ => shouldExplain), + Some(label + " options include:") + ).flatten.mkString("\n") + + prefix + "\n" + availableOptionsMsg(cond) + + protected def isStandard(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + !isVerbose(s) && !isWarning(s) && !isAdvanced(s) && !isPrivate(s) || s.name == "-Werror" || s.name == "-Wconf" + protected def isVerbose(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-V") && s.name != "-V" + protected def isWarning(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-W") && s.name != "-W" || s.name == "-Xlint" + protected def isAdvanced(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-X") && s.name != "-X" + protected def isPrivate(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-Y") && s.name != "-Y" + protected def shortHelp(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): String = + s.description.linesIterator.next() + protected def isHelping(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + cond(s.value) { + case ss: List[?] if s.isMultivalue => ss.contains("help") + case s: String => "help" == s + } + + /** Messages explaining usage and options */ + protected def usageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("where possible standard", shouldExplain = false, isStandard) + protected def vusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible verbose", shouldExplain = true, isVerbose) + protected def wusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible warning", shouldExplain = true, isWarning) + protected def xusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible advanced", shouldExplain = true, isAdvanced) + protected def yusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible private", shouldExplain = true, isPrivate) + + /** Used for the formatted output of -Xshow-phases */ + protected def phasesMessage(using Context): String = + val phases = new Compiler().phases + val formatter = Columnator("phase name", "description", maxField = 25) + formatter(phases.map(mega => mega.map(p => (p.phaseName, p.description)))) + + /** Provide usage feedback on argument summary, assuming that all settings + * are already applied in context. + * @return Either Some list of files passed as arguments or None if further processing should be interrupted. + */ + def checkUsage(summary: ArgsSummary, sourcesRequired: Boolean)(using settings: ConcreteSettings)(using SettingsState, Context): Option[List[String]] = + // Print all warnings encountered during arguments parsing + summary.warnings.foreach(report.warning(_)) + + if summary.errors.nonEmpty then + summary.errors foreach (report.error(_)) + report.echo(ifErrorsMsg) + None + else if settings.version.value then + report.echo(versionMsg) + None + else if isHelpFlag then + report.echo(helpMsg) + None + else if (sourcesRequired && summary.arguments.isEmpty) + report.echo(usageMessage) + None + else + Some(summary.arguments) + + extension [T](setting: Setting[T]) + protected def value(using ss: SettingsState): T = setting.valueIn(ss) + + extension (s: String) + def padLeft(width: Int): String = String.format(s"%${width}s", s) + + // Formatting for -help and -Vphases in two columns, handling long field1 and wrapping long field2 + class Columnator(heading1: String, heading2: String, maxField: Int, separation: Int = 2): + def apply(texts: List[List[(String, String)]])(using Context): String = StringBuilder().tap(columnate(_, texts)).toString + + private def columnate(sb: StringBuilder, texts: List[List[(String, String)]])(using Context): Unit = + import Highlighting.* + val colors = Seq(Green(_), Yellow(_), Magenta(_), Cyan(_), Red(_)) + val nocolor = texts.length == 1 + def color(index: Int): String => Highlight = if nocolor then NoColor(_) else colors(index % colors.length) + val maxCol = ctx.settings.pageWidth.value + val field1 = maxField.min(texts.flatten.map(_._1.length).filter(_ < maxField).max) // widest field under maxField + val field2 = if field1 + separation + maxField < maxCol then maxCol - field1 - separation else 0 // skinny window -> terminal wrap + val separator = " " * separation + val EOL = "\n" + def formatField1(text: String): String = if text.length <= field1 then text.padLeft(field1) else text + EOL + "".padLeft(field1) + def formatField2(text: String): String = + def loopOverField2(fld: String): List[String] = + if field2 == 0 || fld.length <= field2 then List(fld) + else + fld.lastIndexOf(" ", field2) match + case -1 => List(fld) + case i => val (prefix, rest) = fld.splitAt(i) ; prefix :: loopOverField2(rest.trim) + text.split("\n").toList.flatMap(loopOverField2).filter(_.nonEmpty).mkString(EOL + "".padLeft(field1) + separator) + end formatField2 + def format(first: String, second: String, index: Int, colorPicker: Int => String => Highlight) = + sb.append(colorPicker(index)(formatField1(first)).show) + .append(separator) + .append(formatField2(second)) + .append(EOL): Unit + def fancy(first: String, second: String, index: Int) = format(first, second, index, color) + def plain(first: String, second: String) = format(first, second, 0, _ => NoColor(_)) + + if heading1.nonEmpty then + plain(heading1, heading2) + plain("-" * heading1.length, "-" * heading2.length) + + def emit(index: Int)(textPair: (String, String)): Unit = fancy(textPair._1, textPair._2, index) + def group(index: Int)(body: Int => Unit): Unit = + if !ctx.useColors then plain(s"{", "") + body(index) + if !ctx.useColors then plain(s"}", "") + + texts.zipWithIndex.foreach { (text, index) => + text match + case List(single) => emit(index)(single) + case Nil => + case mega => group(index)(i => mega.foreach(emit(i))) + } + end Columnator diff --git a/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala b/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala new file mode 100644 index 000000000000..2e76561c9913 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala @@ -0,0 +1,125 @@ +package dotty.tools.dotc.config + +import java.lang.Character.isWhitespace +import java.nio.file.{Files, Paths} +import scala.annotation.tailrec +import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters.* + +/** Split a line of text using shell conventions. + */ +object CommandLineParser: + inline private val DQ = '"' + inline private val SQ = '\'' + inline private val EOF = -1 + + /** Split the line into tokens separated by whitespace. + * + * Single or double quotes can be embedded to preserve internal whitespace: + * + * `""" echo "hello, world!" """` => "echo" :: "hello, world!" :: Nil + * `""" echo hello,' 'world! """` => "echo" :: "hello, world!" :: Nil + * `""" echo \"hello, world!\" """` => "echo" :: "\"hello," :: "world!\"" :: Nil + * + * The embedded quotes are stripped. Escaping backslash is not stripped. + * + * Invoke `errorFn` with a descriptive message if an end quote is missing. + */ + def tokenize(line: String, errorFn: String => Unit): List[String] = + + var accum: List[String] = Nil + + var pos = 0 + var start = 0 + val qpos = new ArrayBuffer[Int](16) // positions of paired quotes in current token + + inline def cur = if done then EOF else line.charAt(pos): Int + inline def bump() = pos += 1 + inline def done = pos >= line.length + + // Skip to the given unescaped end quote; false on no more input. + def skipToEndQuote(q: Int): Boolean = + var escaped = false + def terminal = cur match + case _ if escaped => escaped = false ; false + case '\\' => escaped = true ; false + case `q` | EOF => true + case _ => false + while !terminal do bump() + !done + + // Skip to the next whitespace word boundary; record unescaped embedded quotes; false on missing quote. + def skipToDelim(): Boolean = + var escaped = false + inline def quote() = { qpos += pos ; bump() } + @tailrec def advance(): Boolean = cur match + case _ if escaped => escaped = false ; bump() ; advance() + case '\\' => escaped = true ; bump() ; advance() + case q @ (DQ | SQ) => { quote() ; skipToEndQuote(q) } && { quote() ; advance() } + case EOF => true + case c if isWhitespace(c) => true + case _ => bump(); advance() + advance() + + def copyText(): String = + val buf = new java.lang.StringBuilder + var p = start + var i = 0 + while p < pos do + if i >= qpos.size then + buf.append(line, p, pos) + p = pos + else if p == qpos(i) then + buf.append(line, qpos(i)+1, qpos(i+1)) + p = qpos(i+1)+1 + i += 2 + else + buf.append(line, p, qpos(i)) + p = qpos(i) + buf.toString + + // the current token, stripped of any embedded quotes. + def text(): String = + val res = + if qpos.isEmpty then line.substring(start, pos) + else if qpos(0) == start && qpos(1) == pos then line.substring(start+1, pos-1) + else copyText() + qpos.clear() + res.nn + + inline def badquote() = errorFn(s"Unmatched quote [${qpos.last}](${line.charAt(qpos.last)})") + + inline def skipWhitespace() = while isWhitespace(cur) do bump() + + @tailrec def loop(): List[String] = + skipWhitespace() + start = pos + if done then + accum.reverse + else if !skipToDelim() then + badquote() + Nil + else + accum ::= text() + loop() + end loop + + loop() + end tokenize + + def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x)) + + /** Expands all arguments starting with @ to the contents of the file named like each argument. + */ + def expandArg(arg: String): List[String] = + val path = Paths.get(arg.stripPrefix("@")) + if !Files.exists(path) then + System.err.nn.println(s"Argument file ${path.nn.getFileName} could not be found") + Nil + else + def stripComment(s: String) = s.indexOf('#') match { case -1 => s case i => s.substring(0, i) } + val lines = Files.readAllLines(path).nn + val params = lines.asScala.map(stripComment).filter(!_.nn.isEmpty).mkString(" ") + tokenize(params) + + class ParseException(msg: String) extends RuntimeException(msg) diff --git a/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala b/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala new file mode 100644 index 000000000000..41e123472a75 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala @@ -0,0 +1,26 @@ +package dotty.tools.dotc +package config + +import Settings._ +import core.Contexts._ + +abstract class CompilerCommand extends CliCommand: + type ConcreteSettings = ScalaSettings + + final def helpMsg(using settings: ScalaSettings)(using SettingsState, Context): String = + settings.allSettings.find(isHelping) match + case Some(s) => s.description + case _ => + if (settings.help.value) usageMessage + else if (settings.Vhelp.value) vusageMessage + else if (settings.Whelp.value) wusageMessage + else if (settings.Xhelp.value) xusageMessage + else if (settings.Yhelp.value) yusageMessage + else if (settings.showPlugins.value) ctx.base.pluginDescriptions + else if (settings.XshowPhases.value) phasesMessage + else "" + + final def isHelpFlag(using settings: ScalaSettings)(using SettingsState): Boolean = + import settings._ + val flags = Set(help, Vhelp, Whelp, Xhelp, Yhelp, showPlugins, XshowPhases) + flags.exists(_.value) || allSettings.exists(isHelping) diff --git a/tests/pos-with-compiler-cc/dotc/config/Config.scala b/tests/pos-with-compiler-cc/dotc/config/Config.scala new file mode 100644 index 000000000000..17e3ec352e7c --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Config.scala @@ -0,0 +1,248 @@ +package dotty.tools.dotc.config + +object Config { + + inline val cacheMembersNamed = true + inline val cacheAsSeenFrom = true + inline val cacheMemberNames = true + inline val cacheImplicitScopes = true + inline val cacheMatchReduced = true + + /** If true, the `runWithOwner` operation uses a re-usable context, + * similar to explore. This requires that the context does not escape + * the call. If false, `runWithOwner` runs its operation argument + * in a fresh context. + */ + inline val reuseOwnerContexts = true + + inline val checkCacheMembersNamed = false + + /** When updating a constraint bound, check that the constrained parameter + * does not appear at the top-level of either of its bounds. + */ + inline val checkConstraintsNonCyclic = false + + /** Check that each constraint resulting from a subtype test + * is satisfiable. Also check that a type variable instantiation + * satisfies its constraints. + * Note that this can fail when bad bounds are in scope, like in + * tests/neg/i4721a.scala. + */ + inline val checkConstraintsSatisfiable = false + + /** Check that each constraint is fully propagated. i.e. + * If P <: Q then the upper bound of P is a subtype of the upper bound of Q + * and the lower bound of Q is a subtype of the lower bound of P. + */ + inline val checkConstraintsPropagated = false + + /** Check that constraint bounds do not contain wildcard types */ + inline val checkNoWildcardsInConstraint = false + + /** If a constraint is over a type lambda `tl` and `tvar` is one of + * the type variables associated with `tl` in the constraint, check + * that the origin of `tvar` is a parameter of `tl`. + */ + inline val checkConsistentVars = false + + /** Check that constraints of globally committable typer states are closed. + * NOTE: When enabled, the check can cause CyclicReference errors because + * it traverses all elements of a type. Such failures were observed when + * compiling all of dotty together (source seems to be in GenBCode which + * accesses javac's settings.) + * + * It is recommended to turn this option on only when chasing down + * a TypeParamRef instantiation error. See comment in Types.TypeVar.instantiate. + */ + inline val debugCheckConstraintsClosed = false + + /** Check that no type appearing as the info of a SymDenotation contains + * skolem types. + */ + inline val checkNoSkolemsInInfo = false + + /** Check that Name#toString is not called directly from backend by analyzing + * the stack trace of each toString call on names. This is very expensive, + * so not suitable for continuous testing. But it can be used to find a problem + * when running a specific test. + */ + inline val checkBackendNames = false + + /** Check that re-used type comparers are in their initialization state */ + inline val checkTypeComparerReset = false + + /** Type comparer will fail with an assert if the upper bound + * of a constrained parameter becomes Nothing. This should be turned + * on only for specific debugging as normally instantiation to Nothing + * is not an error condition. + */ + inline val failOnInstantiationToNothing = false + + /** Enable noDoubleDef checking if option "-YnoDoubleDefs" is set. + * The reason to have an option as well as the present global switch is + * that the noDoubleDef checking is done in a hotspot, and we do not + * want to incur the overhead of checking an option each time. + */ + inline val checkNoDoubleBindings = true + + /** Check positions for consistency after parsing */ + inline val checkPositions = true + + /** Check that typed trees don't point to untyped ones */ + inline val checkTreesConsistent = false + + /** Show subtype traces for all deep subtype recursions */ + inline val traceDeepSubTypeRecursions = false + + /** When explaining subtypes and this flag is set, also show the classes of the compared types. */ + inline val verboseExplainSubtype = false + + /** If this flag is set, take the fast path when comparing same-named type-aliases and types */ + inline val fastPathForRefinedSubtype = true + + /** If this flag is set, and we compute `T1[X1]` & `T2[X2]` as a new + * upper bound of a constrained parameter, try to align the arguments by computing + * `S1 =:= S2` (which might instantiate type parameters). + * This rule is contentious because it cuts the constraint set. + * + * For more info, see the comment in `TypeComparer#glbArgs`. + */ + inline val alignArgsInAnd = true + + /** If this flag is set, higher-kinded applications are checked for validity + */ + inline val checkHKApplications = false + + /** If this flag is set, method types are checked for valid parameter references + */ + inline val checkMethodTypes = false + + /** If this flag is set, it is checked that TypeRefs don't refer directly + * to themselves. + */ + inline val checkTypeRefCycles = false + + /** If this flag is set, we check that types assigned to trees are error types only + * if some error was already reported. There are complicicated scenarios where this + * is not true. An example is TestNonCyclic in posTwice. If we remove the + * first (unused) import `import dotty.tools.dotc.core.Types.Type` in `CompilationUnit`, + * we end up assigning a CyclicReference error type to an import expression `annotation` + * before the cyclic reference is reported. What happens is that the error was reported + * as a result of a completion in a not-yet committed typerstate. So we cannot enforce + * this in all circumstances. But since it is almost always true it is useful to + * keep the Config option for debugging. + */ + inline val checkUnreportedErrors = false + + /** If this flag is set, it is checked that class type parameters are + * only references with NoPrefix or ThisTypes as prefixes. This option + * is usually disabled, because there are still some legitimate cases where + * this can arise (e.g. for pos/Map.scala, in LambdaType.integrate). + */ + inline val checkTypeParamRefs = false + + /** The recursion depth for showing a summarized string */ + inline val summarizeDepth = 2 + + /** Check that variances of lambda arguments match the + * variance of the underlying lambda class. + */ + inline val checkLambdaVariance = false + + /** Check that certain types cannot be created in erasedTypes phases. + * Note: Turning this option on will get some false negatives, since it is + * possible that And/Or types are still created during erasure as the result + * of some operation on an existing type. + */ + inline val checkUnerased = false + + /** Check that atoms-based comparisons match regular comparisons that do not + * take atoms into account. The two have to give the same results, since + * atoms comparison is intended to be just an optimization. + */ + inline val checkAtomsComparisons = false + + /** In `derivedSelect`, rewrite + * + * (S & T)#A --> S#A & T#A + * (S | T)#A --> S#A | T#A + * + * Not sure whether this is useful. Preliminary measurements show a slowdown of about + * 7% for the build when this option is enabled. + */ + inline val splitProjections = false + + /** If this flag is on, always rewrite an application `S[Ts]` where `S` is an alias for + * `[Xs] -> U` to `[Xs := Ts]U`. + * Turning this flag on was observed to give a ~6% speedup on the JUnit test suite. + */ + inline val simplifyApplications = true + + /** Assume -indent by default */ + inline val defaultIndent = true + + /** If set, prints a trace of all symbol completions */ + inline val showCompletions = false + + /** If set, method results that are context functions are flattened by adding + * the parameters of the context function results to the methods themselves. + * This is an optimization that reduces closure allocations. + */ + inline val flattenContextFunctionResults = true + + /** If set, enables tracing */ + inline val tracingEnabled = false + + /** Initial capacity of the uniques HashMap. + * Note: This should be a power of two to work with util.HashSet + */ + inline val initialUniquesCapacity = 0x8000 + + /** How many recursive calls to NamedType#underlying are performed before logging starts. */ + inline val LogPendingUnderlyingThreshold = 50 + + /** How many recursive calls to isSubType are performed before logging starts. */ + inline val LogPendingSubTypesThreshold = 50 + + /** How many recursive calls to findMember are performed before logging names starts + * Note: this threshold has to be chosen carefully. Too large, and programs + * like tests/pos/IterableSelfRec go into polynomial (or even exponential?) + * compile time slowdown. Too small and normal programs will cause the compiler to + * do inefficient operations on findMember. The current value is determined + * so that (1) IterableSelfRec still compiles in reasonable time (< 10sec) (2) Compiling + * dotty itself only causes small pending names lists to be generated (we measured + * at max 6 elements) and these lists are never searched with contains. + */ + inline val LogPendingFindMemberThreshold = 9 + + /** When in IDE, turn StaleSymbol errors into warnings instead of crashing */ + inline val ignoreStaleInIDE = true + + /** If true, `Denotation#asSeenFrom` is allowed to return an existing + * `SymDenotation` instead of allocating a new `SingleDenotation` if + * the two would only differ in their `prefix` (SymDenotation always + * have `NoPrefix` as their prefix). + * This is done for performance reasons: when compiling Dotty itself this + * reduces the number of allocated denotations by ~50%. + */ + inline val reuseSymDenotations = true + + /** If `checkLevelsOnConstraints` is true, check levels of type variables + * and create fresh ones as needed when bounds are first entered intot he constraint. + * If `checkLevelsOnInstantiation` is true, allow level-incorrect constraints but + * fix levels on type variable instantiation. + */ + inline val checkLevelsOnConstraints = false + inline val checkLevelsOnInstantiation = true + + /** If true, print capturing types in the form `{c} T`. + * If false, print them in the form `T @retains(c)`. + */ + inline val printCaptureSetsAsPrefix = true + + /** If true, allow mappping capture set variables under captureChecking with maps that are neither + * bijective nor idempotent. We currently do now know how to do this correctly in all + * cases, though. + */ + inline val ccAllowUnsoundMaps = false +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Feature.scala b/tests/pos-with-compiler-cc/dotc/config/Feature.scala new file mode 100644 index 000000000000..c482bbe0911f --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Feature.scala @@ -0,0 +1,168 @@ +package dotty.tools +package dotc +package config + +import core._ +import Contexts._, Symbols._, Names._ +import StdNames.nme +import Decorators.* +import util.{SrcPos, NoSourcePosition} +import SourceVersion._ +import reporting.Message +import NameKinds.QualifiedName +import language.experimental.pureFunctions + +object Feature: + + def experimental(str: PreName): TermName = + QualifiedName(nme.experimental, str.toTermName) + + private def deprecated(str: PreName): TermName = + QualifiedName(nme.deprecated, str.toTermName) + + private val namedTypeArguments = experimental("namedTypeArguments") + private val genericNumberLiterals = experimental("genericNumberLiterals") + val scala2macros = experimental("macros") + + val dependent = experimental("dependent") + val erasedDefinitions = experimental("erasedDefinitions") + val symbolLiterals = deprecated("symbolLiterals") + val fewerBraces = experimental("fewerBraces") + val saferExceptions = experimental("saferExceptions") + val pureFunctions = experimental("pureFunctions") + val captureChecking = experimental("captureChecking") + + val globalOnlyImports: Set[TermName] = Set(pureFunctions, captureChecking) + + /** Is `feature` enabled by by a command-line setting? The enabling setting is + * + * -language:feature + * + * where is the fully qualified name of `owner`, followed by a ".", + * but subtracting the prefix `scala.language.` at the front. + */ + def enabledBySetting(feature: TermName)(using Context): Boolean = + ctx.base.settings.language.value.contains(feature.toString) + + /** Is `feature` enabled by by an import? This is the case if the feature + * is imported by a named import + * + * import owner.feature + * + * and there is no visible nested import that excludes the feature, as in + * + * import owner.{ feature => _ } + */ + def enabledByImport(feature: TermName)(using Context): Boolean = + //atPhase(typerPhase) { + val info = ctx.importInfo + info != null && info.featureImported(feature) + //} + + /** Is `feature` enabled by either a command line setting or an import? + * @param feature The name of the feature + * @param owner The prefix symbol (nested in `scala.language`) where the + * feature is defined. + */ + def enabled(feature: TermName)(using Context): Boolean = + enabledBySetting(feature) || enabledByImport(feature) + + /** Is auto-tupling enabled? */ + def autoTuplingEnabled(using Context): Boolean = !enabled(nme.noAutoTupling) + + def dynamicsEnabled(using Context): Boolean = enabled(nme.dynamics) + + def dependentEnabled(using Context) = enabled(dependent) + + def namedTypeArgsEnabled(using Context) = enabled(namedTypeArguments) + + def genericNumberLiteralsEnabled(using Context) = enabled(genericNumberLiterals) + + def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) + + /** Is pureFunctions enabled for this compilation unit? */ + def pureFunsEnabled(using Context) = + enabledBySetting(pureFunctions) + || ctx.compilationUnit.knowsPureFuns + || ccEnabled + + /** Is captureChecking enabled for this compilation unit? */ + def ccEnabled(using Context) = + enabledBySetting(captureChecking) + || ctx.compilationUnit.needsCaptureChecking + + /** Is pureFunctions enabled for any of the currently compiled compilation units? */ + def pureFunsEnabledSomewhere(using Context) = + enabledBySetting(pureFunctions) + || ctx.run != null && ctx.run.nn.pureFunsImportEncountered + || ccEnabledSomewhere + + /** Is captureChecking enabled for any of the currently compiled compilation units? */ + def ccEnabledSomewhere(using Context) = + enabledBySetting(captureChecking) + || ctx.run != null && ctx.run.nn.ccImportEncountered + + def sourceVersionSetting(using Context): SourceVersion = + SourceVersion.valueOf(ctx.settings.source.value) + + def sourceVersion(using Context): SourceVersion = + ctx.compilationUnit.sourceVersion match + case Some(v) => v + case none => sourceVersionSetting + + def migrateTo3(using Context): Boolean = sourceVersion == `3.0-migration` + + /** If current source migrates to `version`, issue given warning message + * and return `true`, otherwise return `false`. + */ + def warnOnMigration(msg: Message, pos: SrcPos, version: SourceVersion)(using Context): Boolean = + if sourceVersion.isMigrating && sourceVersion.stable == version + || (version == `3.0` || version == `3.1`) && migrateTo3 + then + report.migrationWarning(msg, pos) + true + else + false + + def checkExperimentalFeature(which: String, srcPos: SrcPos, note: -> String = "")(using Context) = + if !isExperimentalEnabled then + report.error(i"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) + + def checkExperimentalDef(sym: Symbol, srcPos: SrcPos)(using Context) = + if !isExperimentalEnabled then + val symMsg = + if sym.hasAnnotation(defn.ExperimentalAnnot) then + i"$sym is marked @experimental" + else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then + i"${sym.owner} is marked @experimental" + else + i"$sym inherits @experimental" + report.error(s"$symMsg and therefore may only be used in an experimental scope.", srcPos) + + /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ + def checkExperimentalSettings(using Context): Unit = + for setting <- ctx.settings.language.value + if setting.startsWith("experimental.") && setting != "experimental.macros" + do checkExperimentalFeature(s"feature $setting", NoSourcePosition) + + def isExperimentalEnabled(using Context): Boolean = + Properties.experimental && !ctx.settings.YnoExperimental.value + + /** Handle language import `import language..` if it is one + * of the global imports `pureFunctions` or `captureChecking`. In this case + * make the compilation unit's and current run's fields accordingly. + * @return true iff import that was handled + */ + def handleGlobalLanguageImport(prefix: TermName, imported: Name)(using Context): Boolean = + val fullFeatureName = QualifiedName(prefix, imported.asTermName) + if fullFeatureName == pureFunctions then + ctx.compilationUnit.knowsPureFuns = true + if ctx.run != null then ctx.run.nn.pureFunsImportEncountered = true + true + else if fullFeatureName == captureChecking then + ctx.compilationUnit.needsCaptureChecking = true + if ctx.run != null then ctx.run.nn.ccImportEncountered = true + true + else + false +end Feature diff --git a/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala b/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala new file mode 100644 index 000000000000..2b2f35e49451 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala @@ -0,0 +1,69 @@ +package dotty.tools +package dotc +package config + +import io._ +import classpath.AggregateClassPath +import core._ +import Symbols._, Types._, Contexts._, StdNames._ +import Flags._ +import transform.ExplicitOuter, transform.SymUtils._ + +class JavaPlatform extends Platform { + + private var currentClassPath: Option[ClassPath] = None + + def classPath(using Context): ClassPath = { + if (currentClassPath.isEmpty) + currentClassPath = Some(new PathResolver().result) + val cp = currentClassPath.get + cp + } + + // The given symbol is a method with the right name and signature to be a runnable java program. + def isMainMethod(sym: Symbol)(using Context): Boolean = + (sym.name == nme.main) && (sym.info match { + case MethodTpe(_, defn.ArrayOf(el) :: Nil, restpe) => el =:= defn.StringType && (restpe isRef defn.UnitClass) + case _ => false + }) + + /** Update classpath with a substituted subentry */ + def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = currentClassPath.get match { + case AggregateClassPath(entries) => + currentClassPath = Some(AggregateClassPath(entries map (e => subst.getOrElse(e, e)))) + case cp: ClassPath => + currentClassPath = Some(subst.getOrElse(cp, cp)) + } + + def rootLoader(root: TermSymbol)(using Context): SymbolLoader = new SymbolLoaders.PackageLoader(root, classPath) + + /** Is the SAMType `cls` also a SAM under the rules of the JVM? */ + def isSam(cls: ClassSymbol)(using Context): Boolean = + cls.isAllOf(NoInitsTrait) && + cls.superClass == defn.ObjectClass && + cls.directlyInheritedTraits.forall(_.is(NoInits)) && + !ExplicitOuter.needsOuterIfReferenced(cls) && + cls.typeRef.fields.isEmpty // Superaccessors already show up as abstract methods here, so no test necessary + + /** We could get away with excluding BoxedBooleanClass for the + * purpose of equality testing since it need not compare equal + * to anything but other booleans, but it should be present in + * case this is put to other uses. + */ + def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean = { + val d = defn + import d._ + (sym == ObjectClass) || + (sym == JavaSerializableClass) || + (sym == ComparableClass) || + (sym derivesFrom BoxedNumberClass) || + (sym derivesFrom BoxedCharClass) || + (sym derivesFrom BoxedBooleanClass) + } + + def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = + true + + def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader = + new ClassfileLoader(bin) +} diff --git a/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala b/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala new file mode 100644 index 000000000000..0411c5604768 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala @@ -0,0 +1,117 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +import io._ + +/** A class for holding mappings from source directories to + * their output location. This functionality can be accessed + * only programmatically. The command line compiler uses a + * single output location, but tools may use this functionality + * to set output location per source directory. + */ +class OutputDirs { + /** Pairs of source directory - destination directory. */ + private var outputDirs: List[(AbstractFile, AbstractFile)] = Nil + + /** If this is not None, the output location where all + * classes should go. + */ + private var singleOutDir: Option[AbstractFile] = None + + /** Add a destination directory for sources found under srcdir. + * Both directories should exits. + */ + def add(srcDir: String, outDir: String): Unit = + add(checkDir(AbstractFile.getDirectory(srcDir), srcDir), + checkDir(AbstractFile.getDirectory(outDir), outDir)) + + /** Check that dir is exists and is a directory. */ + private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = ( + if (dir != null && dir.isDirectory) + dir + // was: else if (allowJar && dir == null && Path.isJarOrZip(name, false)) + else if (allowJar && dir == null && Jar.isJarOrZip(File(name), false)) + new PlainFile(Path(name)) + else + throw new FatalError(name + " does not exist or is not a directory")) + + /** Set the single output directory. From now on, all files will + * be dumped in there, regardless of previous calls to 'add'. + */ + def setSingleOutput(outDir: String): Unit = { + val dst = AbstractFile.getDirectory(outDir) + setSingleOutput(checkDir(dst, outDir, true)) + } + + def getSingleOutput: Option[AbstractFile] = singleOutDir + + /** Set the single output directory. From now on, all files will + * be dumped in there, regardless of previous calls to 'add'. + */ + def setSingleOutput(dir: AbstractFile): Unit = + singleOutDir = Some(dir) + + def add(src: AbstractFile, dst: AbstractFile): Unit = { + singleOutDir = None + outputDirs ::= ((src, dst)) + } + + /** Return the list of source-destination directory pairs. */ + def outputs: List[(AbstractFile, AbstractFile)] = outputDirs + + /** Return the output directory for the given file. + */ + def outputDirFor(src: AbstractFile): AbstractFile = { + def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = + src.path.startsWith(srcDir.path) + + singleOutDir match { + case Some(d) => d + case None => + (outputs find (isBelow _).tupled) match { + case Some((_, d)) => d + case _ => + throw new FatalError("Could not find an output directory for " + + src.path + " in " + outputs) + } + } + } + + /** Return the source file path(s) which correspond to the given + * classfile path and SourceFile attribute value, subject to the + * condition that source files are arranged in the filesystem + * according to Java package layout conventions. + * + * The given classfile path must be contained in at least one of + * the specified output directories. If it does not then this + * method returns Nil. + * + * Note that the source file is not required to exist, so assuming + * a valid classfile path this method will always return a list + * containing at least one element. + * + * Also that if two or more source path elements target the same + * output directory there will be two or more candidate source file + * paths. + */ + def srcFilesFor(classFile: AbstractFile, srcPath: String): List[AbstractFile] = { + def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = + classFile.path.startsWith(outDir.path) + + singleOutDir match { + case Some(d) => + d match { + case _: VirtualDirectory | _: io.ZipArchive => Nil + case _ => List(d.lookupPathUnchecked(srcPath, false)) + } + case None => + (outputs filter (isBelow _).tupled) match { + case Nil => Nil + case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false)) + } + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala b/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala new file mode 100644 index 000000000000..afa30e38dc2a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala @@ -0,0 +1,268 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +import WrappedProperties.AccessControl +import io.{ClassPath, Directory, Path} +import classpath.{AggregateClassPath, ClassPathFactory, JrtClassPath} +import ClassPath.split +import PartialFunction.condOpt +import core.Contexts._ +import Settings._ +import dotty.tools.io.File + +object PathResolver { + + // Imports property/environment functions which suppress + // security exceptions. + import AccessControl._ + + def firstNonEmpty(xs: String*): String = xs find (_ != "") getOrElse "" + + /** Map all classpath elements to absolute paths and reconstruct the classpath. + */ + def makeAbsolute(cp: String): String = ClassPath.map(cp, x => Path(x).toAbsolute.path) + + /** pretty print class path + */ + def ppcp(s: String): String = split(s) match { + case Nil => "" + case Seq(x) => x + case xs => xs.map("\n" + _).mkString + } + + /** Values found solely by inspecting environment or property variables. + */ + object Environment { + private def searchForBootClasspath = ( + systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse "" + ) + + /** Environment variables which java pays attention to so it + * seems we do as well. + */ + def classPathEnv: String = envOrElse("CLASSPATH", "") + def sourcePathEnv: String = envOrElse("SOURCEPATH", "") + + def javaBootClassPath: String = propOrElse("sun.boot.class.path", searchForBootClasspath) + + def javaExtDirs: String = propOrEmpty("java.ext.dirs") + def scalaHome: String = propOrEmpty("scala.home") + def scalaExtDirs: String = propOrEmpty("scala.ext.dirs") + + /** The java classpath and whether to use it. + */ + def javaUserClassPath: String = propOrElse("java.class.path", "") + def useJavaClassPath: Boolean = propOrFalse("scala.usejavacp") + + override def toString: String = s""" + |object Environment { + | scalaHome = $scalaHome (useJavaClassPath = $useJavaClassPath) + | javaBootClassPath = <${javaBootClassPath.length} chars> + | javaExtDirs = ${ppcp(javaExtDirs)} + | javaUserClassPath = ${ppcp(javaUserClassPath)} + | scalaExtDirs = ${ppcp(scalaExtDirs)} + |}""".trim.stripMargin + } + + /** Default values based on those in Environment as interpreted according + * to the path resolution specification. + */ + object Defaults { + def scalaSourcePath: String = Environment.sourcePathEnv + def javaBootClassPath: String = Environment.javaBootClassPath + def javaUserClassPath: String = Environment.javaUserClassPath + def javaExtDirs: String = Environment.javaExtDirs + def useJavaClassPath: Boolean = Environment.useJavaClassPath + + def scalaHome: String = Environment.scalaHome + def scalaHomeDir: Directory = Directory(scalaHome) + def scalaHomeExists: Boolean = scalaHomeDir.isDirectory + def scalaLibDir: Directory = (scalaHomeDir / "lib").toDirectory + def scalaClassesDir: Directory = (scalaHomeDir / "classes").toDirectory + + def scalaLibAsJar: File = (scalaLibDir / "scala-library.jar").toFile + def scalaLibAsDir: Directory = (scalaClassesDir / "library").toDirectory + + def scalaLibDirFound: Option[Directory] = + if (scalaLibAsJar.isFile) Some(scalaLibDir) + else if (scalaLibAsDir.isDirectory) Some(scalaClassesDir) + else None + + def scalaLibFound: String = + if (scalaLibAsJar.isFile) scalaLibAsJar.path + else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path + else "" + + // XXX It must be time for someone to figure out what all these things + // are intended to do. This is disabled here because it was causing all + // the scala jars to end up on the classpath twice: one on the boot + // classpath as set up by the runner (or regular classpath under -nobootcp) + // and then again here. + def scalaBootClassPath: String = "" + // scalaLibDirFound match { + // case Some(dir) if scalaHomeExists => + // val paths = ClassPath expandDir dir.path + // join(paths: _*) + // case _ => "" + // } + + def scalaExtDirs: String = Environment.scalaExtDirs + + def scalaPluginPath: String = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path + + override def toString: String = """ + |object Defaults { + | scalaHome = %s + | javaBootClassPath = %s + | scalaLibDirFound = %s + | scalaLibFound = %s + | scalaBootClassPath = %s + | scalaPluginPath = %s + |}""".trim.stripMargin.format( + scalaHome, + ppcp(javaBootClassPath), + scalaLibDirFound, scalaLibFound, + ppcp(scalaBootClassPath), ppcp(scalaPluginPath) + ) + } + + def fromPathString(path: String)(using Context): ClassPath = { + val settings = ctx.settings.classpath.update(path) + inContext(ctx.fresh.setSettings(settings)) { + new PathResolver().result + } + } + + /** Show values in Environment and Defaults when no argument is provided. + * Otherwise, show values in Calculated as if those options had been given + * to a scala runner. + */ + def main(args: Array[String]): Unit = + if (args.isEmpty) { + println(Environment) + println(Defaults) + } + else inContext(ContextBase().initialCtx) { + val ArgsSummary(sstate, rest, errors, warnings) = + ctx.settings.processArguments(args.toList, true, ctx.settingsState) + errors.foreach(println) + val pr = inContext(ctx.fresh.setSettings(sstate)) { + new PathResolver() + } + println(" COMMAND: 'scala %s'".format(args.mkString(" "))) + println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) + + pr.result match { + case cp: AggregateClassPath => + println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") + } + } +} + +import PathResolver.{Defaults, ppcp} + +class PathResolver(using c: Context) { + import c.base.settings + + private val classPathFactory = new ClassPathFactory + + private def cmdLineOrElse(name: String, alt: String) = + commandLineFor(name) match { + case Some("") | None => alt + case Some(x) => x + } + + private def commandLineFor(s: String): Option[String] = condOpt(s) { + case "javabootclasspath" => settings.javabootclasspath.value + case "javaextdirs" => settings.javaextdirs.value + case "bootclasspath" => settings.bootclasspath.value + case "extdirs" => settings.extdirs.value + case "classpath" | "cp" => settings.classpath.value + case "sourcepath" => settings.sourcepath.value + } + + /** Calculated values based on any given command line options, falling back on + * those in Defaults. + */ + object Calculated { + def scalaHome: String = Defaults.scalaHome + def useJavaClassPath: Boolean = settings.usejavacp.value || Defaults.useJavaClassPath + def javaBootClassPath: String = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath) + def javaExtDirs: String = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs) + def javaUserClassPath: String = if (useJavaClassPath) Defaults.javaUserClassPath else "" + def scalaBootClassPath: String = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath) + def scalaExtDirs: String = cmdLineOrElse("extdirs", Defaults.scalaExtDirs) + /** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as: + * [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect + * [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg) + * [scaladoc] ^ + * Because bootstrapping looks at the sourcepath and creates the package "reflect" in "" it will cause the + * typedIdentifier to pick .reflect instead of the .scala.reflect package. Thus, no bootstrapping for scaladoc! + */ + def sourcePath: String = cmdLineOrElse("sourcepath", Defaults.scalaSourcePath) + + def userClassPath: String = + if (!settings.classpath.isDefault) settings.classpath.value + else sys.env.getOrElse("CLASSPATH", ".") + + import classPathFactory._ + + // Assemble the elements! + def basis: List[Traversable[ClassPath]] = + val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) + + List( + JrtClassPath(release), // 1. The Java 9+ classpath (backed by the jrt:/ virtual system, if available) + classesInPath(javaBootClassPath), // 2. The Java bootstrap class path. + contentsOfDirsInPath(javaExtDirs), // 3. The Java extension class path. + classesInExpandedPath(javaUserClassPath), // 4. The Java application class path. + classesInPath(scalaBootClassPath), // 5. The Scala boot class path. + contentsOfDirsInPath(scalaExtDirs), // 6. The Scala extension class path. + classesInExpandedPath(userClassPath), // 7. The Scala application class path. + sourcesInPath(sourcePath) // 8. The Scala source path. + ) + + lazy val containers: List[ClassPath] = basis.flatten.distinct + + override def toString: String = """ + |object Calculated { + | scalaHome = %s + | javaBootClassPath = %s + | javaExtDirs = %s + | javaUserClassPath = %s + | useJavaClassPath = %s + | scalaBootClassPath = %s + | scalaExtDirs = %s + | userClassPath = %s + | sourcePath = %s + |}""".trim.stripMargin.format( + scalaHome, + ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath), + useJavaClassPath, + ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath), + ppcp(sourcePath) + ) + } + + def containers: List[ClassPath] = Calculated.containers + + lazy val result: ClassPath = { + val cp = AggregateClassPath(containers.toIndexedSeq) + + if (settings.YlogClasspath.value) { + Console.println("Classpath built from " + settings.toConciseString(ctx.settingsState)) + Console.println("Defaults: " + PathResolver.Defaults) + Console.println("Calculated: " + Calculated) + + val xs = (Calculated.basis drop 2).flatten.distinct + println("After java boot/extdirs classpath has %d entries:" format xs.size) + xs foreach (x => println(" " + x)) + } + cp + } + + def asURLs: Seq[java.net.URL] = result.asURLs +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Platform.scala b/tests/pos-with-compiler-cc/dotc/config/Platform.scala new file mode 100644 index 000000000000..0faacf1bcebb --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Platform.scala @@ -0,0 +1,46 @@ +package dotty.tools +package dotc +package config + +import io.{ClassPath, AbstractFile} +import core.Contexts._, core.Symbols._ +import core.SymbolLoader +import core.StdNames.nme +import core.Flags.Module + +/** The platform dependent pieces of Global. + */ +abstract class Platform { + + /** The root symbol loader. */ + def rootLoader(root: TermSymbol)(using Context): SymbolLoader + + /** The compiler classpath. */ + def classPath(using Context): ClassPath + + /** Update classpath with a substitution that maps entries to entries */ + def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit + + /** Any platform-specific phases. */ + //def platformPhases: List[SubComponent] + + /** Is the SAMType `cls` also a SAM under the rules of the platform? */ + def isSam(cls: ClassSymbol)(using Context): Boolean + + /** The various ways a boxed primitive might materialize at runtime. */ + def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean + + /** Is the given class symbol eligible for Java serialization-specific methods? */ + def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean + + /** Create a new class loader to load class file `bin` */ + def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader + + /** The given symbol is a method with the right name and signature to be a runnable program. */ + def isMainMethod(sym: Symbol)(using Context): Boolean + + /** The given class has a main method. */ + final def hasMainMethod(sym: Symbol)(using Context): Boolean = + sym.info.member(nme.main).hasAltWith(d => + isMainMethod(d.symbol) && (sym.is(Module) || d.symbol.isStatic)) +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Printers.scala b/tests/pos-with-compiler-cc/dotc/config/Printers.scala new file mode 100644 index 000000000000..ecb189de9bb3 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Printers.scala @@ -0,0 +1,52 @@ +package dotty.tools.dotc.config + +object Printers { + + class Printer { + def println(msg: => String): Unit = System.out.nn.println(msg) + } + + object noPrinter extends Printer { + inline override def println(msg: => String): Unit = () + } + + val default = new Printer + + val capt = noPrinter + val constr = noPrinter + val core = noPrinter + val checks = noPrinter + val config = noPrinter + val cyclicErrors = noPrinter + val debug = noPrinter + val derive = noPrinter + val desugar = noPrinter + val scaladoc = noPrinter + val exhaustivity = noPrinter + val gadts = noPrinter + val gadtsConstr = noPrinter + val hk = noPrinter + val implicits = noPrinter + val implicitsDetailed = noPrinter + val lexical = noPrinter + val init = noPrinter + val inlining = noPrinter + val interactiv = noPrinter + val matchTypes = noPrinter + val nullables = noPrinter + val overload = noPrinter + val patmatch = noPrinter + val pickling = noPrinter + val quotePickling = noPrinter + val plugins = noPrinter + val recheckr = noPrinter + val refcheck = noPrinter + val simplify = noPrinter + val staging = noPrinter + val subtyping = noPrinter + val tailrec = noPrinter + val transforms = noPrinter + val typr = noPrinter + val unapp = noPrinter + val variances = noPrinter +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Properties.scala b/tests/pos-with-compiler-cc/dotc/config/Properties.scala new file mode 100644 index 000000000000..1e9cc82112af --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Properties.scala @@ -0,0 +1,142 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +import scala.annotation.internal.sharable + +import java.io.IOException +import java.util.jar.Attributes.{ Name => AttributeName } +import java.nio.charset.StandardCharsets + +/** Loads `library.properties` from the jar. */ +object Properties extends PropertiesTrait { + protected def propCategory: String = "compiler" + protected def pickJarBasedOn: Class[PropertiesTrait] = classOf[PropertiesTrait] + + /** Scala manifest attributes. + */ + @sharable val ScalaCompilerVersion: AttributeName = new AttributeName("Scala-Compiler-Version") +} + +trait PropertiesTrait { + protected def propCategory: String // specializes the remainder of the values + protected def pickJarBasedOn: Class[?] // props file comes from jar containing this + + /** The name of the properties file */ + protected val propFilename: String = "/" + propCategory + ".properties" + + /** The loaded properties */ + @sharable protected lazy val scalaProps: java.util.Properties = { + val props = new java.util.Properties + val stream = pickJarBasedOn getResourceAsStream propFilename + if (stream ne null) + quietlyDispose(props load stream, stream.close) + + props + } + + private def quietlyDispose(action: => Unit, disposal: => Unit) = + try { action } + finally + try { disposal } + catch { case _: IOException => } + + def propIsSet(name: String): Boolean = System.getProperty(name) != null + def propIsSetTo(name: String, value: String): Boolean = propOrNull(name) == value + def propOrElse(name: String, alt: String): String = System.getProperty(name, alt) + def propOrEmpty(name: String): String = propOrElse(name, "") + def propOrNull(name: String): String = propOrElse(name, null) + def propOrNone(name: String): Option[String] = Option(propOrNull(name)) + def propOrFalse(name: String): Boolean = propOrNone(name) exists (x => List("yes", "on", "true") contains x.toLowerCase) + def setProp(name: String, value: String): String = System.setProperty(name, value) + def clearProp(name: String): String = System.clearProperty(name) + + def envOrElse(name: String, alt: String): String = Option(System getenv name) getOrElse alt + def envOrNone(name: String): Option[String] = Option(System getenv name) + + // for values based on propFilename + def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt) + def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "") + def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name)) + + /** Either the development or release version if known, otherwise + * the empty string. + */ + def versionNumberString: String = scalaPropOrEmpty("version.number") + + /** The version number of the jar this was loaded from, + * or `"(unknown)"` if it cannot be determined. + */ + val simpleVersionString: String = { + val v = scalaPropOrElse("version.number", "(unknown)") + v + ( + if (v.contains("SNAPSHOT") || v.contains("NIGHTLY")) + "-git-" + scalaPropOrElse("git.hash", "(unknown)") + else + "" + ) + } + + /** The version number of the jar this was loaded from plus `"version "` prefix, + * or `"version (unknown)"` if it cannot be determined. + */ + val versionString: String = "version " + simpleVersionString + + /** Whether the current version of compiler is experimental + * + * 1. Snapshot, nightly releases and non-bootstrapped compiler are experimental. + * 2. Features supported by experimental versions of the compiler: + * - research plugins + */ + val experimental: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") + + val copyrightString: String = scalaPropOrElse("copyright.string", "(c) 2002-2017 LAMP/EPFL") + + /** This is the encoding to use reading in source files, overridden with -encoding + * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. + */ + def sourceEncoding: String = scalaPropOrElse("file.encoding", StandardCharsets.UTF_8.name) + def sourceReader: String = scalaPropOrElse("source.reader", "scala.tools.nsc.io.SourceReader") + + /** This is the default text encoding, overridden (unreliably) with + * `JAVA_OPTS="-Dfile.encoding=Foo"` + */ + def encodingString: String = propOrElse("file.encoding", StandardCharsets.UTF_8.name) + + /** The default end of line character. + */ + def lineSeparator: String = propOrElse("line.separator", "\n") + + /** Various well-known properties. + */ + def javaClassPath: String = propOrEmpty("java.class.path") + def javaHome: String = propOrEmpty("java.home") + def javaVendor: String = propOrEmpty("java.vendor") + def javaVersion: String = propOrEmpty("java.version") + def javaVmInfo: String = propOrEmpty("java.vm.info") + def javaVmName: String = propOrEmpty("java.vm.name") + def javaVmVendor: String = propOrEmpty("java.vm.vendor") + def javaVmVersion: String = propOrEmpty("java.vm.version") + def osName: String = propOrEmpty("os.name") + def scalaHome: String = propOrEmpty("scala.home") + def tmpDir: String = propOrEmpty("java.io.tmpdir") + def userDir: String = propOrEmpty("user.dir") + def userHome: String = propOrEmpty("user.home") + def userName: String = propOrEmpty("user.name") + + /** Some derived values. + */ + def isWin: Boolean = osName startsWith "Windows" + def isMac: Boolean = javaVendor startsWith "Apple" + + // This is looking for javac, tools.jar, etc. + // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME, + // and finally the system property based javaHome. + def jdkHome: String = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome)) + + def versionMsg: String = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString) + def scalaCmd: String = if (isWin) "scala.bat" else "scala" + def scalacCmd: String = if (isWin) "scalac.bat" else "scalac" +} diff --git a/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala b/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala new file mode 100644 index 000000000000..0275e0d6a227 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala @@ -0,0 +1,35 @@ +package dotty.tools.dotc.config + +import dotty.tools.dotc.core._ +import Contexts._ +import Symbols._ + +import dotty.tools.backend.sjs.JSDefinitions + +object SJSPlatform { + /** The `SJSPlatform` for the current context. */ + def sjsPlatform(using Context): SJSPlatform = + ctx.platform.asInstanceOf[SJSPlatform] +} + +class SJSPlatform()(using Context) extends JavaPlatform { + + /** Scala.js-specific definitions. */ + val jsDefinitions: JSDefinitions = new JSDefinitions() + + /** Is the SAMType `cls` also a SAM under the rules of the Scala.js back-end? */ + override def isSam(cls: ClassSymbol)(using Context): Boolean = + defn.isFunctionClass(cls) + || cls.superClass == jsDefinitions.JSFunctionClass + + /** Is the given class symbol eligible for Java serialization-specific methods? + * + * This is not simply false because we still want to add them to Scala classes + * and objects. They might be transitively used by macros and other compile-time + * code. It feels safer to have them be somewhat equivalent to the ones we would + * get in a JVM project. The JVM back-end will slap an extends `java.io.Serializable` + * to them, so we should be consistent and also emit the proper serialization methods. + */ + override def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = + !sym.isSubClass(jsDefinitions.JSAnyClass) +} diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala new file mode 100644 index 000000000000..407171f1a0dd --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala @@ -0,0 +1,21 @@ +package dotty.tools.dotc.config + +enum ScalaRelease(val majorVersion: Int, val minorVersion: Int) extends Ordered[ScalaRelease]: + case Release3_0 extends ScalaRelease(3, 0) + case Release3_1 extends ScalaRelease(3, 1) + case Release3_2 extends ScalaRelease(3, 2) + + def show = s"$majorVersion.$minorVersion" + + def compare(that: ScalaRelease) = + val ord = summon[Ordering[(Int, Int)]] + ord.compare((majorVersion, minorVersion), (that.majorVersion, that.minorVersion)) + +object ScalaRelease: + def latest = Release3_1 + + def parse(name: String) = name match + case "3.0" => Some(Release3_0) + case "3.1" => Some(Release3_1) + case "3.2" => Some(Release3_2) + case _ => None diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala new file mode 100644 index 000000000000..a2dba94ad9fc --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala @@ -0,0 +1,346 @@ +package dotty.tools.dotc +package config + +import scala.language.unsafeNulls + +import dotty.tools.dotc.config.PathResolver.Defaults +import dotty.tools.dotc.config.Settings.{Setting, SettingGroup} +import dotty.tools.dotc.config.SourceVersion +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.rewrites.Rewrites +import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory} + +import scala.util.chaining._ + +class ScalaSettings extends SettingGroup with AllScalaSettings + +object ScalaSettings: + // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` + private val minTargetVersion = 8 + private val maxTargetVersion = 19 + + def supportedTargetVersions: List[String] = + (minTargetVersion to maxTargetVersion).toList.map(_.toString) + + def supportedReleaseVersions: List[String] = + if scala.util.Properties.isJavaAtLeast("9") then + val jdkVersion = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() + val maxVersion = Math.min(jdkVersion, maxTargetVersion) + (minTargetVersion to maxVersion).toList.map(_.toString) + else List(minTargetVersion).map(_.toString) + + def supportedScalaReleaseVersions: List[String] = + ScalaRelease.values.toList.map(_.show) + + def supportedSourceVersions: List[String] = + SourceVersion.values.toList.map(_.toString) + + def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") + + def defaultPageWidth: Int = { + val defaultWidth = 80 + val columnsVar = System.getenv("COLUMNS") + if columnsVar != null then columnsVar.toInt + else if Properties.isWin then + val ansiconVar = System.getenv("ANSICON") // eg. "142x32766 (142x26)" + if ansiconVar != null && ansiconVar.matches("[0-9]+x.*") then + ansiconVar.substring(0, ansiconVar.indexOf("x")).toInt + else defaultWidth + else defaultWidth + } + +trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSettings, WarningSettings, XSettings, YSettings: + self: SettingGroup => + + /* Path related settings */ + val semanticdbTarget: Setting[String] = PathSetting("-semanticdb-target", "Specify an alternative output directory for SemanticDB files.", "") + + val source: Setting[String] = ChoiceSetting("-source", "source version", "source version", ScalaSettings.supportedSourceVersions, SourceVersion.defaultSourceVersion.toString, aliases = List("--source")) + val uniqid: Setting[Boolean] = BooleanSetting("-uniqid", "Uniquely tag all identifiers in debugging output.", aliases = List("--unique-id")) + val rewrite: Setting[Option[Rewrites]] = OptionSetting[Rewrites]("-rewrite", "When used in conjunction with a `...-migration` source version, rewrites sources to migrate to new version.", aliases = List("--rewrite")) + val fromTasty: Setting[Boolean] = BooleanSetting("-from-tasty", "Compile classes from tasty files. The arguments are .tasty or .jar files.", aliases = List("--from-tasty")) + + val newSyntax: Setting[Boolean] = BooleanSetting("-new-syntax", "Require `then` and `do` in control expressions.") + val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") + val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") + val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) + val YindentColons: Setting[Boolean] = BooleanSetting("-Yindent-colons", "(disabled: use -language:experimental.fewerBraces instead)") + + /* Decompiler settings */ + val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) + val printLines: Setting[Boolean] = BooleanSetting("-print-lines", "Show source code line numbers.", aliases = List("--print-lines")) + + /* Scala.js-related settings */ + val scalajsGenStaticForwardersForNonTopLevelObjects: Setting[Boolean] = BooleanSetting("-scalajs-genStaticForwardersForNonTopLevelObjects", "Generate static forwarders even for non-top-level objects (Scala.js only)") + val scalajsMapSourceURI: Setting[List[String]] = MultiStringSetting("-scalajs-mapSourceURI", "uri1[->uri2]", "rebases source URIs from uri1 to uri2 (or to a relative URI) for source maps (Scala.js only)") + + val projectUrl: Setting[String] = StringSetting ( + "-project-url", + "project repository homepage", + "The source repository of your project.", + "" + ) + + val wikiSyntax: Setting[Boolean] = BooleanSetting("-Xwiki-syntax", "Retains the Scala2 behavior of using Wiki Syntax in Scaladoc.") + + val jvmargs = PrefixSetting("-J", "-J", "Pass directly to the runtime system.") + val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.") +end AllScalaSettings + +/** Settings shared by compiler and scaladoc */ +trait CommonScalaSettings: + self: SettingGroup => + + /* Path related settings */ + val bootclasspath: Setting[String] = PathSetting("-bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath, aliases = List("--boot-class-path")) + val extdirs: Setting[String] = PathSetting("-extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs, aliases = List("--extension-directories")) + val javabootclasspath: Setting[String] = PathSetting("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath, aliases = List("--java-boot-class-path")) + val javaextdirs: Setting[String] = PathSetting("-javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs, aliases = List("--java-extension-directories")) + val sourcepath: Setting[String] = PathSetting("-sourcepath", "Specify location(s) of source files.", Defaults.scalaSourcePath, aliases = List("--source-path")) + val sourceroot: Setting[String] = PathSetting("-sourceroot", "Specify workspace root directory.", ".") + + val classpath: Setting[String] = PathSetting("-classpath", "Specify where to find user class files.", ScalaSettings.defaultClasspath, aliases = List("-cp", "--class-path")) + val outputDir: Setting[AbstractFile] = OutputSetting("-d", "directory|jar", "Destination for generated classfiles.", + new PlainDirectory(Directory("."))) + val color: Setting[String] = ChoiceSetting("-color", "mode", "Colored output", List("always", "never"/*, "auto"*/), "always"/* "auto"*/, aliases = List("--color")) + val verbose: Setting[Boolean] = BooleanSetting("-verbose", "Output messages about what the compiler is doing.", aliases = List("--verbose")) + val version: Setting[Boolean] = BooleanSetting("-version", "Print product version and exit.", aliases = List("--version")) + val help: Setting[Boolean] = BooleanSetting("-help", "Print a synopsis of standard options.", aliases = List("--help", "-h")) + val pageWidth: Setting[Int] = IntSetting("-pagewidth", "Set page width", ScalaSettings.defaultPageWidth, aliases = List("--page-width")) + val silentWarnings: Setting[Boolean] = BooleanSetting("-nowarn", "Silence all warnings.", aliases = List("--no-warnings")) + + val javaOutputVersion: Setting[String] = ChoiceSetting("-java-output-version", "version", "Compile code with classes specific to the given version of the Java platform available on the classpath and emit bytecode for this version. Corresponds to -release flag in javac.", ScalaSettings.supportedReleaseVersions, "", aliases = List("-release", "--release")) + + val deprecation: Setting[Boolean] = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.", aliases = List("--deprecation")) + val feature: Setting[Boolean] = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.", aliases = List("--feature")) + val explain: Setting[Boolean] = BooleanSetting("-explain", "Explain errors in more detail.", aliases = List("--explain")) + // -explain-types setting is necessary for cross compilation, since it is mentioned in sbt-tpolecat, for instance + // it is otherwise subsumed by -explain, and should be dropped as soon as we can. + val explainTypes: Setting[Boolean] = BooleanSetting("-explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) + val unchecked: Setting[Boolean] = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) + val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.", aliases = List("--language")) + + /* Coverage settings */ + val coverageOutputDir = PathSetting("-coverage-out", "Destination for coverage classfiles and instrumentation data.", "", aliases = List("--coverage-out")) + + /* Other settings */ + val encoding: Setting[String] = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding, aliases = List("--encoding")) + val usejavacp: Setting[Boolean] = BooleanSetting("-usejavacp", "Utilize the java.class.path in classpath resolution.", aliases = List("--use-java-class-path")) + val scalajs: Setting[Boolean] = BooleanSetting("-scalajs", "Compile in Scala.js mode (requires scalajs-library.jar on the classpath).", aliases = List("--scalajs")) +end CommonScalaSettings + +/** -P "plugin" settings. Various tools might support plugins. */ +private sealed trait PluginSettings: + self: SettingGroup => + val plugin: Setting[List[String]] = MultiStringSetting ("-Xplugin", "paths", "Load a plugin from each classpath.") + val disable: Setting[List[String]] = MultiStringSetting ("-Xplugin-disable", "plugin", "Disable plugins by name.") + val require: Setting[List[String]] = MultiStringSetting ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.") + val showPlugins: Setting[Boolean] = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.") + val pluginsDir: Setting[String] = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath) + val pluginOptions: Setting[List[String]] = MultiStringSetting ("-P", "plugin:opt", "Pass an option to a plugin, e.g. -P::") + +/** -V "Verbose" settings */ +private sealed trait VerboseSettings: + self: SettingGroup => + val Vhelp: Setting[Boolean] = BooleanSetting("-V", "Print a synopsis of verbose options.") + val Xprint: Setting[List[String]] = PhasesSetting("-Vprint", "Print out program after", aliases = List("-Xprint")) + val XshowPhases: Setting[Boolean] = BooleanSetting("-Vphases", "List compiler phases.", aliases = List("-Xshow-phases")) + + val Vprofile: Setting[Boolean] = BooleanSetting("-Vprofile", "Show metrics about sources and internal representations to estimate compile-time complexity.") + val VprofileSortedBy = ChoiceSetting("-Vprofile-sorted-by", "key", "Show metrics about sources and internal representations sorted by given column name", List("name", "path", "lines", "tokens", "tasty", "complexity"), "") + val VprofileDetails = IntSetting("-Vprofile-details", "Show metrics about sources and internal representations of the most complex methods", 0) + val VreplMaxPrintElements: Setting[Int] = IntSetting("-Vrepl-max-print-elements", "Number of elements to be printed before output is truncated.", 1000) + val VreplMaxPrintCharacters: Setting[Int] = IntSetting("-Vrepl-max-print-characters", "Number of characters to be printed before output is truncated.", 50000) + +/** -W "Warnings" settings + */ +private sealed trait WarningSettings: + self: SettingGroup => + val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") + val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) + + val Wunused: Setting[List[String]] = MultiChoiceSetting( + name = "-Wunused", + helpArg = "warning", + descr = "Enable or disable specific `unused` warnings", + choices = List("nowarn", "all"), + default = Nil + ) + object WunusedHas: + def allOr(s: String)(using Context) = Wunused.value.pipe(us => us.contains("all") || us.contains(s)) + def nowarn(using Context) = allOr("nowarn") + + val Wconf: Setting[List[String]] = MultiStringSetting( + "-Wconf", + "patterns", + default = List(), + descr = + s"""Configure compiler warnings. + |Syntax: -Wconf::,:,... + |multiple are combined with &, i.e., &...& + | + | + | - Any message: any + | + | - Message categories: cat=deprecation, cat=feature, cat=unchecked + | + | - Message content: msg=regex + | The regex need only match some part of the message, not all of it. + | + | - Message id: id=E129 + | The message id is printed with the warning. + | + | - Message name: name=PureExpressionInStatementPosition + | The message name is printed with the warning in verbose warning mode. + | + |In verbose warning mode the compiler prints matching filters for warnings. + |Verbose mode can be enabled globally using `-Wconf:any:verbose`, or locally + |using the @nowarn annotation (example: `@nowarn("v") def test = try 1`). + | + | + | - error / e + | - warning / w + | - verbose / v (emit warning, show additional help for writing `-Wconf` filters) + | - info / i (infos are not counted as warnings and not affected by `-Werror`) + | - silent / s + | + |The default configuration is empty. + | + |User-defined configurations are added to the left. The leftmost rule matching + |a warning message defines the action. + | + |Examples: + | - change every warning into an error: -Wconf:any:error + | - silence deprecations: -Wconf:cat=deprecation:s + | + |Note: on the command-line you might need to quote configurations containing `*` or `&` + |to prevent the shell from expanding patterns.""".stripMargin, + ) + +/** -X "Extended" or "Advanced" settings */ +private sealed trait XSettings: + self: SettingGroup => + + val Xhelp: Setting[Boolean] = BooleanSetting("-X", "Print a synopsis of advanced options.") + val XnoForwarders: Setting[Boolean] = BooleanSetting("-Xno-forwarders", "Do not generate static forwarders in mirror classes.") + val XmaxInlines: Setting[Int] = IntSetting("-Xmax-inlines", "Maximal number of successive inlines.", 32) + val XmaxInlinedTrees: Setting[Int] = IntSetting("-Xmax-inlined-trees", "Maximal number of inlined trees.", 2_000_000) + val Xmigration: Setting[ScalaVersion] = VersionSetting("-Xmigration", "Warn about constructs whose behavior may have changed since version.") + val XprintTypes: Setting[Boolean] = BooleanSetting("-Xprint-types", "Print tree types (debugging option).") + val XprintDiff: Setting[Boolean] = BooleanSetting("-Xprint-diff", "Print changed parts of the tree since last print.") + val XprintDiffDel: Setting[Boolean] = BooleanSetting("-Xprint-diff-del", "Print changed parts of the tree since last print including deleted parts.") + val XprintInline: Setting[Boolean] = BooleanSetting("-Xprint-inline", "Show where inlined code comes from.") + val XprintSuspension: Setting[Boolean] = BooleanSetting("-Xprint-suspension", "Show when code is suspended until macros are compiled.") + val Xprompt: Setting[Boolean] = BooleanSetting("-Xprompt", "Display a prompt after each error (debugging option).") + val XreplDisableDisplay: Setting[Boolean] = BooleanSetting("-Xrepl-disable-display", "Do not display definitions in REPL.") + val XverifySignatures: Setting[Boolean] = BooleanSetting("-Xverify-signatures", "Verify generic signatures in generated bytecode.") + val XignoreScala2Macros: Setting[Boolean] = BooleanSetting("-Xignore-scala2-macros", "Ignore errors when compiling code that calls Scala2 macros, these will fail at runtime.") + val XimportSuggestionTimeout: Setting[Int] = IntSetting("-Ximport-suggestion-timeout", "Timeout (in ms) for searching for import suggestions when errors are reported.", 8000) + val Xsemanticdb: Setting[Boolean] = BooleanSetting("-Xsemanticdb", "Store information in SemanticDB.", aliases = List("-Ysemanticdb")) + val XuncheckedJavaOutputVersion: Setting[String] = ChoiceSetting("-Xunchecked-java-output-version", "target", "Emit bytecode for the specified version of the Java platform. This might produce bytecode that will break at runtime. Corresponds to -target flag in javac. When on JDK 9+, consider -java-output-version as a safer alternative.", ScalaSettings.supportedTargetVersions, "", aliases = List("-Xtarget", "--Xtarget")) + val XcheckMacros: Setting[Boolean] = BooleanSetting("-Xcheck-macros", "Check some invariants of macro generated code while expanding macros", aliases = List("--Xcheck-macros")) + val XmainClass: Setting[String] = StringSetting("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") + val XimplicitSearchLimit: Setting[Int] = IntSetting("-Ximplicit-search-limit", "Maximal number of expressions to be generated in an implicit search", 50000) + + val XmixinForceForwarders = ChoiceSetting( + name = "-Xmixin-force-forwarders", + helpArg = "mode", + descr = "Generate forwarder methods in classes inhering concrete methods from traits.", + choices = List("true", "junit", "false"), + default = "true") + + object mixinForwarderChoices { + def isTruthy(using Context) = XmixinForceForwarders.value == "true" + def isAtLeastJunit(using Context) = isTruthy || XmixinForceForwarders.value == "junit" + } + + val XmacroSettings: Setting[List[String]] = MultiStringSetting("-Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") +end XSettings + +/** -Y "Forking" as in forked tongue or "Private" settings */ +private sealed trait YSettings: + self: SettingGroup => + + val Yhelp: Setting[Boolean] = BooleanSetting("-Y", "Print a synopsis of private options.") + val Ycheck: Setting[List[String]] = PhasesSetting("-Ycheck", "Check the tree at the end of") + val YcheckMods: Setting[Boolean] = BooleanSetting("-Ycheck-mods", "Check that symbols and their defining trees have modifiers in sync.") + val Ydebug: Setting[Boolean] = BooleanSetting("-Ydebug", "Increase the quantity of debugging output.") + val YdebugTrace: Setting[Boolean] = BooleanSetting("-Ydebug-trace", "Trace core operations.") + val YdebugFlags: Setting[Boolean] = BooleanSetting("-Ydebug-flags", "Print all flags of definitions.") + val YdebugMissingRefs: Setting[Boolean] = BooleanSetting("-Ydebug-missing-refs", "Print a stacktrace when a required symbol is missing.") + val YdebugNames: Setting[Boolean] = BooleanSetting("-Ydebug-names", "Show internal representation of names.") + val YdebugPos: Setting[Boolean] = BooleanSetting("-Ydebug-pos", "Show full source positions including spans.") + val YdebugTreeWithId: Setting[Int] = IntSetting("-Ydebug-tree-with-id", "Print the stack trace when the tree with the given id is created.", Int.MinValue) + val YdebugTypeError: Setting[Boolean] = BooleanSetting("-Ydebug-type-error", "Print the stack trace when a TypeError is caught", false) + val YdebugError: Setting[Boolean] = BooleanSetting("-Ydebug-error", "Print the stack trace when any error is caught.", false) + val YdebugUnpickling: Setting[Boolean] = BooleanSetting("-Ydebug-unpickling", "Print the stack trace when an error occurs when reading Tasty.", false) + val YtermConflict: Setting[String] = ChoiceSetting("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") + val Ylog: Setting[List[String]] = PhasesSetting("-Ylog", "Log operations during") + val YlogClasspath: Setting[Boolean] = BooleanSetting("-Ylog-classpath", "Output information about what classpath is being applied.") + val YdisableFlatCpCaching: Setting[Boolean] = BooleanSetting("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") + + val Yscala2Unpickler: Setting[String] = StringSetting("-Yscala2-unpickler", "", "Control where we may get Scala 2 symbols from. This is either \"always\", \"never\", or a classpath.", "always") + + val YnoImports: Setting[Boolean] = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") + val YnoGenericSig: Setting[Boolean] = BooleanSetting("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.") + val YnoPredef: Setting[Boolean] = BooleanSetting("-Yno-predef", "Compile without importing Predef.") + val Yskip: Setting[List[String]] = PhasesSetting("-Yskip", "Skip") + val Ydumpclasses: Setting[String] = StringSetting("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") + val YstopAfter: Setting[List[String]] = PhasesSetting("-Ystop-after", "Stop after", aliases = List("-stop")) // backward compat + val YstopBefore: Setting[List[String]] = PhasesSetting("-Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully + val YshowSuppressedErrors: Setting[Boolean] = BooleanSetting("-Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally suppressed.") + val YdetailedStats: Setting[Boolean] = BooleanSetting("-Ydetailed-stats", "Show detailed internal compiler stats (needs Stats.enabled to be set to true).") + val YkindProjector: Setting[String] = ChoiceSetting("-Ykind-projector", "[underscores, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable") + val YprintPos: Setting[Boolean] = BooleanSetting("-Yprint-pos", "Show tree positions.") + val YprintPosSyms: Setting[Boolean] = BooleanSetting("-Yprint-pos-syms", "Show symbol definitions positions.") + val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting("-Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") + val YnoPatmatOpt: Setting[Boolean] = BooleanSetting("-Yno-patmat-opt", "Disable all pattern matching optimizations.") + val YplainPrinter: Setting[Boolean] = BooleanSetting("-Yplain-printer", "Pretty-print using a plain printer.") + val YprintSyms: Setting[Boolean] = BooleanSetting("-Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") + val YprintDebug: Setting[Boolean] = BooleanSetting("-Yprint-debug", "When printing trees, print some extra information useful for debugging.") + val YprintDebugOwners: Setting[Boolean] = BooleanSetting("-Yprint-debug-owners", "When printing trees, print owners of definitions.") + val YprintLevel: Setting[Boolean] = BooleanSetting("-Yprint-level", "print nesting levels of symbols and type variables.") + val YshowPrintErrors: Setting[Boolean] = BooleanSetting("-Yshow-print-errors", "Don't suppress exceptions thrown during tree printing.") + val YtestPickler: Setting[Boolean] = BooleanSetting("-Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") + val YcheckReentrant: Setting[Boolean] = BooleanSetting("-Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") + val YdropComments: Setting[Boolean] = BooleanSetting("-Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments")) + val YcookComments: Setting[Boolean] = BooleanSetting("-Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments")) + val YreadComments: Setting[Boolean] = BooleanSetting("-Yread-docs", "Read documentation from tasty.") + val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") + val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") + val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") + val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") + val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") + val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty") + val YnoExperimental: Setting[Boolean] = BooleanSetting("-Yno-experimental", "Disable experimental language features") + + val YprofileEnabled: Setting[Boolean] = BooleanSetting("-Yprofile-enabled", "Enable profiling.") + val YprofileDestination: Setting[String] = StringSetting("-Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") + //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileExternalTool: Setting[List[String]] = PhasesSetting("-Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer") + //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting("-Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") + //.withPostSetHook( _ => YprofileEnabled.value = true ) + + // Experimental language features + val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting("-Yno-kind-polymorphism", "Disable kind polymorphism.") + val YexplicitNulls: Setting[Boolean] = BooleanSetting("-Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") + val YcheckInit: Setting[Boolean] = BooleanSetting("-Ysafe-init", "Ensure safe initialization of objects") + val YrequireTargetName: Setting[Boolean] = BooleanSetting("-Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation") + val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only)") + val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references") + val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with captureChecking language import, suppress type abbreviations") + + /** Area-specific debug output */ + val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") + val YnoDoubleBindings: Setting[Boolean] = BooleanSetting("-Yno-double-bindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).") + val YshowVarBounds: Setting[Boolean] = BooleanSetting("-Yshow-var-bounds", "Print type variables with their bounds.") + + val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting("-Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") + + val Yinstrument: Setting[Boolean] = BooleanSetting("-Yinstrument", "Add instrumentation code that counts allocations and closure creations.") + val YinstrumentDefs: Setting[Boolean] = BooleanSetting("-Yinstrument-defs", "Add instrumentation code that counts method calls; needs -Yinstrument to be set, too.") + + val YforceInlineWhileTyping: Setting[Boolean] = BooleanSetting("-Yforce-inline-while-typing", "Make non-transparent inline methods inline when typing. Emulates the old inlining behavior of 3.0.0-M3.") +end YSettings + diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala new file mode 100644 index 000000000000..7fdf57478f1a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala @@ -0,0 +1,188 @@ +/* @author James Iry + */ +package dotty.tools +package dotc.config + +import scala.language.unsafeNulls + +import scala.annotation.internal.sharable +import scala.util.{Try, Success, Failure} + +/** + * Represents a single Scala version in a manner that + * supports easy comparison and sorting. + */ +sealed abstract class ScalaVersion extends Ordered[ScalaVersion] { + def unparse: String +} + +/** + * A scala version that sorts higher than all actual versions + */ +@sharable case object NoScalaVersion extends ScalaVersion { + def unparse: String = "none" + + def compare(that: ScalaVersion): Int = that match { + case NoScalaVersion => 0 + case _ => 1 + } +} + +/** + * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion + * may or may not be a released version - i.e. this same class is used to represent + * final, release candidate, milestone, and development builds. The build argument is used + * to segregate builds + */ +case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion { + def unparse: String = s"${major}.${minor}.${rev}.${build.unparse}" + + def compare(that: ScalaVersion): Int = that match { + case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) => + // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these + // comparisons a lot so I'm using brute force direct style code + if (major < thatMajor) -1 + else if (major > thatMajor) 1 + else if (minor < thatMinor) -1 + else if (minor > thatMinor) 1 + else if (rev < thatRev) -1 + else if (rev > thatRev) 1 + else build compare thatBuild + case AnyScalaVersion => 1 + case NoScalaVersion => -1 + } +} + +/** + * A Scala version that sorts lower than all actual versions + */ +@sharable case object AnyScalaVersion extends ScalaVersion { + def unparse: String = "any" + + def compare(that: ScalaVersion): Int = that match { + case AnyScalaVersion => 0 + case _ => -1 + } +} + +/** + * Methods for parsing ScalaVersions + */ +@sharable object ScalaVersion { + private val dot = "\\." + private val dash = "\\-" + private def not(s:String) = s"[^${s}]" + private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r + + def parse(versionString : String): Try[ScalaVersion] = { + def failure = Failure(new NumberFormatException( + s"There was a problem parsing ${versionString}. " + + "Versions should be in the form major[.minor[.revision]] " + + "where each part is a positive number, as in 2.10.1. " + + "The minor and revision parts are optional." + )) + + def toInt(s: String) = s match { + case null | "" => 0 + case _ => s.toInt + } + + def isInt(s: String) = Try(toInt(s)).isSuccess + + import ScalaBuild._ + + def toBuild(s: String) = s match { + case null | "FINAL" => Final + case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2))) + case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1))) + case _ => Development(s) + } + + try versionString match { + case "" | "any" => Success(AnyScalaVersion) + case "none" => Success(NoScalaVersion) + case R(_, majorS, _, minorS, _, revS, _, buildS) => + Success(SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))) + case _ => failure + } + catch { + case e: NumberFormatException => failure + } + } + + /** + * The version of the compiler running now + */ + val current: ScalaVersion = parse(util.Properties.versionNumberString).get +} + +/** + * Represents the data after the dash in major.minor.rev-build + */ +abstract class ScalaBuild extends Ordered[ScalaBuild] { + /** + * Return a version of this build information that can be parsed back into the + * same ScalaBuild + */ + def unparse: String +} + +object ScalaBuild { + + /** A development, test, nightly, snapshot or other "unofficial" build + */ + case class Development(id: String) extends ScalaBuild { + def unparse: String = s"-${id}" + + def compare(that: ScalaBuild): Int = that match { + // sorting two development builds based on id is reasonably valid for two versions created with the same schema + // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions + // this is a pragmatic compromise + case Development(thatId) => id compare thatId + // assume a development build is newer than anything else, that's not really true, but good luck + // mapping development build versions to other build types + case _ => 1 + } + } + + /** A final build + */ + case object Final extends ScalaBuild { + def unparse: String = "" + + def compare(that: ScalaBuild): Int = that match { + case Final => 0 + // a final is newer than anything other than a development build or another final + case Development(_) => -1 + case _ => 1 + } + } + + /** A candidate for final release + */ + case class RC(n: Int) extends ScalaBuild { + def unparse: String = s"-RC${n}" + + def compare(that: ScalaBuild): Int = that match { + // compare two rcs based on their RC numbers + case RC(thatN) => n - thatN + // an rc is older than anything other than a milestone or another rc + case Milestone(_) => 1 + case _ => -1 + } + } + + /** An intermediate release + */ + case class Milestone(n: Int) extends ScalaBuild { + def unparse: String = s"-M${n}" + + def compare(that: ScalaBuild): Int = that match { + // compare two milestones based on their milestone numbers + case Milestone(thatN) => n - thatN + // a milestone is older than anything other than another milestone + case _ => -1 + } + } +} + diff --git a/tests/pos-with-compiler-cc/dotc/config/Settings.scala b/tests/pos-with-compiler-cc/dotc/config/Settings.scala new file mode 100644 index 000000000000..277833afbd5d --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Settings.scala @@ -0,0 +1,295 @@ +package dotty.tools.dotc +package config + +import scala.language.unsafeNulls + +import core.Contexts._ + +import dotty.tools.io.{AbstractFile, Directory, JarArchive, PlainDirectory} + +import annotation.tailrec +import collection.mutable.ArrayBuffer +import reflect.ClassTag +import scala.util.{Success, Failure} + +object Settings: + + val BooleanTag: ClassTag[Boolean] = ClassTag.Boolean + val IntTag: ClassTag[Int] = ClassTag.Int + val StringTag: ClassTag[String] = ClassTag(classOf[String]) + val ListTag: ClassTag[List[?]] = ClassTag(classOf[List[?]]) + val VersionTag: ClassTag[ScalaVersion] = ClassTag(classOf[ScalaVersion]) + val OptionTag: ClassTag[Option[?]] = ClassTag(classOf[Option[?]]) + val OutputTag: ClassTag[AbstractFile] = ClassTag(classOf[AbstractFile]) + + class SettingsState(initialValues: Seq[Any]): + private val values = ArrayBuffer(initialValues: _*) + private var _wasRead: Boolean = false + + override def toString: String = s"SettingsState(values: ${values.toList})" + + def value(idx: Int): Any = + _wasRead = true + values(idx) + + def update(idx: Int, x: Any): SettingsState = + if (_wasRead) then SettingsState(values.toSeq).update(idx, x) + else + values(idx) = x + this + end SettingsState + + case class ArgsSummary( + sstate: SettingsState, + arguments: List[String], + errors: List[String], + warnings: List[String]) { + + def fail(msg: String): Settings.ArgsSummary = + ArgsSummary(sstate, arguments.tail, errors :+ msg, warnings) + + def warn(msg: String): Settings.ArgsSummary = + ArgsSummary(sstate, arguments.tail, errors, warnings :+ msg) + } + + case class Setting[T: ClassTag] private[Settings] ( + name: String, + description: String, + default: T, + helpArg: String = "", + choices: Option[Seq[?]] = None, + prefix: String = "", + aliases: List[String] = Nil, + depends: List[(Setting[?], Any)] = Nil, + propertyClass: Option[Class[?]] = None)(private[Settings] val idx: Int) { + + private var changed: Boolean = false + + def valueIn(state: SettingsState): T = state.value(idx).asInstanceOf[T] + + def updateIn(state: SettingsState, x: Any): SettingsState = x match + case _: T => state.update(idx, x) + case _ => throw IllegalArgumentException(s"found: $x of type ${x.getClass.getName}, required: ${implicitly[ClassTag[T]]}") + + def isDefaultIn(state: SettingsState): Boolean = valueIn(state) == default + + def isMultivalue: Boolean = implicitly[ClassTag[T]] == ListTag + + def legalChoices: String = + choices match { + case Some(xs) if xs.isEmpty => "" + case Some(r: Range) => s"${r.head}..${r.last}" + case Some(xs) => xs.mkString(", ") + case None => "" + } + + def tryToSet(state: ArgsSummary): ArgsSummary = { + val ArgsSummary(sstate, arg :: args, errors, warnings) = state: @unchecked + def update(value: Any, args: List[String]): ArgsSummary = + var dangers = warnings + val value1 = + if changed && isMultivalue then + val value0 = value.asInstanceOf[List[String]] + val current = valueIn(sstate).asInstanceOf[List[String]] + value0.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") + current ++ value0 + else + if changed then dangers :+= s"Flag $name set repeatedly" + value + changed = true + ArgsSummary(updateIn(sstate, value1), args, errors, dangers) + end update + + def fail(msg: String, args: List[String]) = + ArgsSummary(sstate, args, errors :+ msg, warnings) + + def missingArg = + fail(s"missing argument for option $name", args) + + def setString(argValue: String, args: List[String]) = + choices match + case Some(xs) if !xs.contains(argValue) => + fail(s"$argValue is not a valid choice for $name", args) + case _ => + update(argValue, args) + + def setInt(argValue: String, args: List[String]) = + try + val x = argValue.toInt + choices match + case Some(r: Range) if x < r.head || r.last < x => + fail(s"$argValue is out of legal range ${r.head}..${r.last} for $name", args) + case Some(xs) if !xs.contains(x) => + fail(s"$argValue is not a valid choice for $name", args) + case _ => + update(x, args) + catch case _: NumberFormatException => + fail(s"$argValue is not an integer argument for $name", args) + + def doSet(argRest: String) = ((implicitly[ClassTag[T]], args): @unchecked) match { + case (BooleanTag, _) => + update(true, args) + case (OptionTag, _) => + update(Some(propertyClass.get.getConstructor().newInstance()), args) + case (ListTag, _) => + if (argRest.isEmpty) missingArg + else + val strings = argRest.split(",").toList + choices match + case Some(valid) => strings.filterNot(valid.contains) match + case Nil => update(strings, args) + case invalid => fail(s"invalid choice(s) for $name: ${invalid.mkString(",")}", args) + case _ => update(strings, args) + case (StringTag, _) if argRest.nonEmpty || choices.exists(_.contains("")) => + setString(argRest, args) + case (StringTag, arg2 :: args2) => + if (arg2 startsWith "-") missingArg + else setString(arg2, args2) + case (OutputTag, arg :: args) => + val path = Directory(arg) + val isJar = path.extension == "jar" + if (!isJar && !path.isDirectory) + fail(s"'$arg' does not exist or is not a directory or .jar file", args) + else { + val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) + update(output, args) + } + case (IntTag, args) if argRest.nonEmpty => + setInt(argRest, args) + case (IntTag, arg2 :: args2) => + setInt(arg2, args2) + case (VersionTag, _) => + ScalaVersion.parse(argRest) match { + case Success(v) => update(v, args) + case Failure(ex) => fail(ex.getMessage, args) + } + case (_, Nil) => + missingArg + } + + def matches(argName: String) = (name :: aliases).exists(_ == argName) + + if (prefix != "" && arg.startsWith(prefix)) + doSet(arg drop prefix.length) + else if (prefix == "" && matches(arg.takeWhile(_ != ':'))) + doSet(arg.dropWhile(_ != ':').drop(1)) + else + state + } + } + + object Setting: + extension [T](setting: Setting[T]) + def value(using Context): T = setting.valueIn(ctx.settingsState) + def update(x: T)(using Context): SettingsState = setting.updateIn(ctx.settingsState, x) + def isDefault(using Context): Boolean = setting.isDefaultIn(ctx.settingsState) + + class SettingGroup { + + private val _allSettings = new ArrayBuffer[Setting[?]] + def allSettings: Seq[Setting[?]] = _allSettings.toSeq + + def defaultState: SettingsState = new SettingsState(allSettings map (_.default)) + + def userSetSettings(state: SettingsState): Seq[Setting[?]] = + allSettings filterNot (_.isDefaultIn(state)) + + def toConciseString(state: SettingsState): String = + userSetSettings(state).mkString("(", " ", ")") + + private def checkDependencies(state: ArgsSummary): ArgsSummary = + userSetSettings(state.sstate).foldLeft(state)(checkDependenciesOfSetting) + + private def checkDependenciesOfSetting(state: ArgsSummary, setting: Setting[?]) = + setting.depends.foldLeft(state) { (s, dep) => + val (depSetting, reqValue) = dep + if (depSetting.valueIn(state.sstate) == reqValue) s + else s.fail(s"incomplete option ${setting.name} (requires ${depSetting.name})") + } + + /** Iterates over the arguments applying them to settings where applicable. + * Then verifies setting dependencies are met. + * + * This takes a boolean indicating whether to keep + * processing if an argument is seen which is not a command line option. + * This is an expedience for the moment so that you can say + * + * scalac -d /tmp foo.scala -optimise + * + * while also allowing + * + * scala Program opt opt + * + * to get their arguments. + */ + @tailrec + final def processArguments(state: ArgsSummary, processAll: Boolean, skipped: List[String]): ArgsSummary = + def stateWithArgs(args: List[String]) = ArgsSummary(state.sstate, args, state.errors, state.warnings) + state.arguments match + case Nil => + checkDependencies(stateWithArgs(skipped)) + case "--" :: args => + checkDependencies(stateWithArgs(skipped ++ args)) + case x :: _ if x startsWith "-" => + @tailrec def loop(settings: List[Setting[?]]): ArgsSummary = settings match + case setting :: settings1 => + val state1 = setting.tryToSet(state) + if state1 ne state then state1 + else loop(settings1) + case Nil => + state.warn(s"bad option '$x' was ignored") + processArguments(loop(allSettings.toList), processAll, skipped) + case arg :: args => + if processAll then processArguments(stateWithArgs(args), processAll, skipped :+ arg) + else state + end processArguments + + def processArguments(arguments: List[String], processAll: Boolean, settingsState: SettingsState = defaultState): ArgsSummary = + processArguments(ArgsSummary(settingsState, arguments, Nil, Nil), processAll, Nil) + + def publish[T](settingf: Int => Setting[T]): Setting[T] = { + val setting = settingf(_allSettings.length) + _allSettings += setting + setting + } + + def BooleanSetting(name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil): Setting[Boolean] = + publish(Setting(name, descr, initialValue, aliases = aliases)) + + def StringSetting(name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = + publish(Setting(name, descr, default, helpArg, aliases = aliases)) + + def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil): Setting[String] = + publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + + def MultiChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + + def IntSetting(name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = + publish(Setting(name, descr, default, aliases = aliases)) + + def IntChoiceSetting(name: String, descr: String, choices: Seq[Int], default: Int): Setting[Int] = + publish(Setting(name, descr, default, choices = Some(choices))) + + def MultiStringSetting(name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(name, descr, default, helpArg, aliases = aliases)) + + def OutputSetting(name: String, helpArg: String, descr: String, default: AbstractFile): Setting[AbstractFile] = + publish(Setting(name, descr, default, helpArg)) + + def PathSetting(name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = + publish(Setting(name, descr, default, aliases = aliases)) + + def PhasesSetting(name: String, descr: String, default: String = "", aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(name, descr, if (default.isEmpty) Nil else List(default), aliases = aliases)) + + def PrefixSetting(name: String, pre: String, descr: String): Setting[List[String]] = + publish(Setting(name, descr, Nil, prefix = pre)) + + def VersionSetting(name: String, descr: String, default: ScalaVersion = NoScalaVersion): Setting[ScalaVersion] = + publish(Setting(name, descr, default)) + + def OptionSetting[T: ClassTag](name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = + publish(Setting(name, descr, None, propertyClass = Some(implicitly[ClassTag[T]].runtimeClass), aliases = aliases)) + } +end Settings diff --git a/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala b/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala new file mode 100644 index 000000000000..545e2f2d9b42 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala @@ -0,0 +1,31 @@ +package dotty.tools +package dotc +package config + +import core.Decorators.* +import util.Property + +enum SourceVersion: + case `3.0-migration`, `3.0`, `3.1` // Note: do not add `3.1-migration` here, 3.1 is the same language as 3.0. + case `3.2-migration`, `3.2` + case `future-migration`, `future` + + val isMigrating: Boolean = toString.endsWith("-migration") + + def stable: SourceVersion = + if isMigrating then SourceVersion.values(ordinal + 1) else this + + def isAtLeast(v: SourceVersion) = stable.ordinal >= v.ordinal + +object SourceVersion extends Property.Key[SourceVersion]: + def defaultSourceVersion = `3.2` + + /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ + val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) + + /** language versions that the compiler recognises. */ + val validSourceVersionNames = values.toList.map(_.toString.toTermName) + + /** All source versions that can be recognised from a language import. e.g. `import language.3.1` */ + val allSourceVersionNames = validSourceVersionNames ::: illegalSourceVersionNames +end SourceVersion diff --git a/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala b/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala new file mode 100644 index 000000000000..5b79432a97e7 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala @@ -0,0 +1,42 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +/** For placing a wrapper function around property functions. + * Motivated by places like google app engine throwing exceptions + * on property lookups. + */ +trait WrappedProperties extends PropertiesTrait { + def wrap[T](body: => T): Option[T] + + protected def propCategory: String = "wrapped" + protected def pickJarBasedOn: Class[?] = this.getClass + + override def propIsSet(name: String): Boolean = wrap(super.propIsSet(name)) exists (x => x) + override def propOrElse(name: String, alt: String): String = wrap(super.propOrElse(name, alt)) getOrElse alt + override def setProp(name: String, value: String): String = wrap(super.setProp(name, value)).orNull + override def clearProp(name: String): String = wrap(super.clearProp(name)).orNull + override def envOrElse(name: String, alt: String): String = wrap(super.envOrElse(name, alt)) getOrElse alt + override def envOrNone(name: String): Option[String] = wrap(super.envOrNone(name)).flatten + + def systemProperties: Iterator[(String, String)] = { + import scala.jdk.CollectionConverters._ + wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty + } +} + +object WrappedProperties { + object AccessControl extends WrappedProperties { + def wrap[T](body: => T): Option[T] = + try Some(body) + catch { + // the actual exception we are concerned with is AccessControlException, + // but that's deprecated on JDK 17, so catching its superclass is a convenient + // way to avoid a deprecation warning + case _: SecurityException => + None + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala new file mode 100644 index 000000000000..d33b1d39942e --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala @@ -0,0 +1,273 @@ +package dotty.tools +package dotc +package core + +import Symbols._, Types._, Contexts._, Constants._ +import dotty.tools.dotc.ast.tpd, tpd.* +import util.Spans.Span +import printing.{Showable, Printer} +import printing.Texts.Text +import annotation.internal.sharable +import language.experimental.pureFunctions + +object Annotations { + + def annotClass(tree: Tree)(using Context) = + if (tree.symbol.isConstructor) tree.symbol.owner + else tree.tpe.typeSymbol + + abstract class Annotation extends Showable, caps.Pure { + + def tree(using Context): Tree + + def symbol(using Context): Symbol = annotClass(tree) + + def hasSymbol(sym: Symbol)(using Context) = symbol == sym + + def matches(cls: Symbol)(using Context): Boolean = symbol.derivesFrom(cls) + + def appliesToModule: Boolean = true // for now; see remark in SymDenotations + + def derivedAnnotation(tree: Tree)(using Context): Annotation = + if (tree eq this.tree) this else Annotation(tree) + + /** All arguments to this annotation in a single flat list */ + def arguments(using Context): List[Tree] = tpd.allArguments(tree) + + def argument(i: Int)(using Context): Option[Tree] = { + val args = arguments + if (i < args.length) Some(args(i)) else None + } + def argumentConstant(i: Int)(using Context): Option[Constant] = + for (case ConstantType(c) <- argument(i) map (_.tpe.widenTermRefExpr.normalized)) yield c + + def argumentConstantString(i: Int)(using Context): Option[String] = + for (case Constant(s: String) <- argumentConstant(i)) yield s + + /** The tree evaluaton is in progress. */ + def isEvaluating: Boolean = false + + /** The tree evaluation has finished. */ + def isEvaluated: Boolean = true + + /** Normally, applies a type map to all tree nodes of this annotation, but can + * be overridden. Returns EmptyAnnotation if type type map produces a range + * type, since ranges cannot be types of trees. + */ + def mapWith(tm: TypeMap)(using Context) = + val args = arguments + if args.isEmpty then this + else + val findDiff = new TreeAccumulator[Type]: + def apply(x: Type, tree: Tree)(using Context): Type = + if tm.isRange(x) then x + else + val tp1 = tm(tree.tpe) + foldOver(if tp1 frozen_=:= tree.tpe then x else tp1, tree) + val diff = findDiff(NoType, args) + if tm.isRange(diff) then EmptyAnnotation + else if diff.exists then derivedAnnotation(tm.mapOver(tree)) + else this + + /** Does this annotation refer to a parameter of `tl`? */ + def refersToParamOf(tl: TermLambda)(using Context): Boolean = + val args = arguments + if args.isEmpty then false + else tree.existsSubTree { + case id: Ident => id.tpe.stripped match + case TermParamRef(tl1, _) => tl eq tl1 + case _ => false + case _ => false + } + + /** A string representation of the annotation. Overridden in BodyAnnotation. + */ + def toText(printer: Printer): Text = printer.annotText(this) + + def ensureCompleted(using Context): Unit = tree + + def sameAnnotation(that: Annotation)(using Context): Boolean = + symbol == that.symbol && tree.sameTree(that.tree) + + /** Operations for hash-consing, can be overridden */ + def hash: Int = System.identityHashCode(this) + def eql(that: Annotation) = this eq that + } + + case class ConcreteAnnotation(t: Tree) extends Annotation: + def tree(using Context): Tree = t + + abstract class LazyAnnotation extends Annotation { + protected var mySym: Symbol | (Context ?-> Symbol) | Null + override def symbol(using parentCtx: Context): Symbol = + assert(mySym != null) + mySym match { + case symFn: (Context ?-> Symbol) @unchecked => + mySym = null + mySym = atPhaseBeforeTransforms(symFn) + // We should always produce the same annotation tree, no matter when the + // annotation is evaluated. Setting the phase to a pre-transformation phase + // seems to be enough to ensure this (note that after erasure, `ctx.typer` + // will be the Erasure typer, but that doesn't seem to affect the annotation + // trees we create, so we leave it as is) + case sym: Symbol if sym.defRunId != parentCtx.runId => + mySym = sym.denot.current.symbol + case _ => + } + mySym.asInstanceOf[Symbol] + + protected var myTree: Tree | (Context ?-> Tree) | Null + def tree(using Context): Tree = + assert(myTree != null) + myTree match { + case treeFn: (Context ?-> Tree) @unchecked => + myTree = null + myTree = atPhaseBeforeTransforms(treeFn) + case _ => + } + myTree.asInstanceOf[Tree] + + override def isEvaluating: Boolean = myTree == null + override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] + } + + class DeferredSymAndTree(symFn: Context ?-> Symbol, treeFn: Context ?-> Tree) + extends LazyAnnotation: + protected var mySym: Symbol | (Context ?-> Symbol) | Null = ctx ?=> symFn(using ctx) + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) + + /** An annotation indicating the body of a right-hand side, + * typically of an inline method. Treated specially in + * pickling/unpickling and TypeTreeMaps + */ + abstract class BodyAnnotation extends Annotation { + override def symbol(using Context): ClassSymbol = defn.BodyAnnot + override def derivedAnnotation(tree: Tree)(using Context): Annotation = + if (tree eq this.tree) this else ConcreteBodyAnnotation(tree) + override def arguments(using Context): List[Tree] = Nil + override def ensureCompleted(using Context): Unit = () + override def toText(printer: Printer): Text = "@Body" + } + + class ConcreteBodyAnnotation(body: Tree) extends BodyAnnotation { + def tree(using Context): Tree = body + } + + abstract class LazyBodyAnnotation extends BodyAnnotation { + // Copy-pasted from LazyAnnotation to avoid having to turn it into a trait + protected var myTree: Tree | (Context ?-> Tree) | Null + def tree(using Context): Tree = + assert(myTree != null) + myTree match { + case treeFn: (Context ?-> Tree) @unchecked => + myTree = null + myTree = atPhaseBeforeTransforms(treeFn) + case _ => + } + myTree.asInstanceOf[Tree] + + override def isEvaluating: Boolean = myTree == null + override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] + } + + object LazyBodyAnnotation { + def apply(bodyFn: Context ?-> Tree): LazyBodyAnnotation = + new LazyBodyAnnotation: + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> bodyFn(using ctx) + } + + object Annotation { + + def apply(tree: Tree): ConcreteAnnotation = ConcreteAnnotation(tree) + + def apply(cls: ClassSymbol)(using Context): Annotation = + apply(cls, Nil) + + def apply(cls: ClassSymbol, arg: Tree)(using Context): Annotation = + apply(cls, arg :: Nil) + + def apply(cls: ClassSymbol, arg1: Tree, arg2: Tree)(using Context): Annotation = + apply(cls, arg1 :: arg2 :: Nil) + + def apply(cls: ClassSymbol, args: List[Tree])(using Context): Annotation = + apply(cls.typeRef, args) + + def apply(atp: Type, arg: Tree)(using Context): Annotation = + apply(atp, arg :: Nil) + + def apply(atp: Type, arg1: Tree, arg2: Tree)(using Context): Annotation = + apply(atp, arg1 :: arg2 :: Nil) + + def apply(atp: Type, args: List[Tree])(using Context): Annotation = + apply(New(atp, args)) + + /** Create an annotation where the tree is computed lazily. */ + def deferred(sym: Symbol)(treeFn: Context ?-> Tree): Annotation = + new LazyAnnotation { + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) + protected var mySym: Symbol | (Context ?-> Symbol) | Null = sym + } + + /** Create an annotation where the symbol and the tree are computed lazily. */ + def deferredSymAndTree(symFn: Context ?-> Symbol)(treeFn: Context ?-> Tree): Annotation = + DeferredSymAndTree(symFn, treeFn) + + /** Extractor for child annotations */ + object Child { + + /** A deferred annotation to the result of a given child computation */ + def later(delayedSym: Context ?-> Symbol, span: Span)(using Context): Annotation = { + def makeChildLater(using Context) = { + val sym = delayedSym + New(defn.ChildAnnot.typeRef.appliedTo(sym.owner.thisType.select(sym.name, sym)), Nil) + .withSpan(span) + } + deferred(defn.ChildAnnot)(makeChildLater) + } + + /** A regular, non-deferred Child annotation */ + def apply(sym: Symbol, span: Span)(using Context): Annotation = later(sym, span) + + def unapply(ann: Annotation)(using Context): Option[Symbol] = + if (ann.symbol == defn.ChildAnnot) { + val AppliedType(_, (arg: NamedType) :: Nil) = ann.tree.tpe: @unchecked + Some(arg.symbol) + } + else None + } + + def makeSourceFile(path: String)(using Context): Annotation = + apply(defn.SourceFileAnnot, Literal(Constant(path))) + } + + @sharable val EmptyAnnotation = Annotation(EmptyTree) + + def ThrowsAnnotation(cls: ClassSymbol)(using Context): Annotation = { + val tref = cls.typeRef + Annotation(defn.ThrowsAnnot.typeRef.appliedTo(tref), Ident(tref)) + } + + /** Extracts the type of the thrown exception from an annotation. + * + * Supports both "old-style" `@throws(classOf[Exception])` + * as well as "new-style" `@throws[Exception]("cause")` annotations. + */ + object ThrownException { + def unapply(a: Annotation)(using Context): Option[Type] = + if (a.symbol ne defn.ThrowsAnnot) + None + else a.argumentConstant(0) match { + // old-style: @throws(classOf[Exception]) (which is throws[T](classOf[Exception])) + case Some(Constant(tpe: Type)) => + Some(tpe) + // new-style: @throws[Exception], @throws[Exception]("cause") + case _ => + stripApply(a.tree) match { + case TypeApply(_, List(tpt)) => + Some(tpt.tpe) + case _ => + None + } + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Atoms.scala b/tests/pos-with-compiler-cc/dotc/core/Atoms.scala new file mode 100644 index 000000000000..bcaaf6794107 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Atoms.scala @@ -0,0 +1,36 @@ +package dotty.tools +package dotc +package core + +import Types._ + +/** Indicates the singleton types that a type must or may consist of. + * @param lo The lower bound: singleton types in this set are guaranteed + * to be in the carrier type. + * @param hi The upper bound: all singleton types in the carrier type are + * guaranteed to be in this set + * If the underlying type of a singleton type is another singleton type, + * only the latter type ends up in the sets. + */ +enum Atoms: + case Range(lo: Set[Type], hi: Set[Type]) + case Unknown + + def & (that: Atoms): Atoms = this match + case Range(lo1, hi1) => + that match + case Range(lo2, hi2) => Range(lo1 & lo2, hi1 & hi2) + case Unknown => Range(Set.empty, hi1) + case Unknown => + that match + case Range(lo2, hi2) => Range(Set.empty, hi2) + case Unknown => Unknown + + def | (that: Atoms): Atoms = this match + case Range(lo1, hi1) => + that match + case Range(lo2, hi2) => Range(lo1 | lo2, hi1 | hi2) + case Unknown => Unknown + case Unknown => Unknown + +end Atoms diff --git a/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala b/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala new file mode 100644 index 000000000000..a61701eee2d7 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala @@ -0,0 +1,215 @@ +package dotty.tools +package dotc +package core + +import Contexts._, Types._, Symbols._, Names._, Flags._ +import Denotations.SingleDenotation +import Decorators._ +import collection.mutable +import config.SourceVersion.future +import config.Feature.sourceVersion + +/** Realizability status */ +object CheckRealizable { + + sealed abstract class Realizability(val msg: String) { + def andAlso(other: => Realizability): Realizability = + if (this == Realizable) other else this + def mapError(f: Realizability => Realizability): Realizability = + if (this == Realizable) this else f(this) + } + + object Realizable extends Realizability("") + + object NotConcrete extends Realizability(" is not a concrete type") + + class NotFinal(sym: Symbol)(using Context) + extends Realizability(i" refers to nonfinal $sym") + + class HasProblemBounds(name: Name, info: Type)(using Context) + extends Realizability(i" has a member $name with possibly conflicting bounds ${info.bounds.lo} <: ... <: ${info.bounds.hi}") + + class HasProblemBaseArg(typ: Type, argBounds: TypeBounds)(using Context) + extends Realizability(i" has a base type $typ with possibly conflicting parameter bounds ${argBounds.lo} <: ... <: ${argBounds.hi}") + + class HasProblemBase(base1: Type, base2: Type)(using Context) + extends Realizability(i" has conflicting base types $base1 and $base2") + + class HasProblemField(fld: SingleDenotation, problem: Realizability)(using Context) + extends Realizability(i" has a member $fld which is not a legal path\nsince ${fld.symbol.name}: ${fld.info}${problem.msg}") + + class ProblemInUnderlying(tp: Type, problem: Realizability)(using Context) + extends Realizability(i"s underlying type ${tp}${problem.msg}") { + assert(problem != Realizable) + } + + def realizability(tp: Type)(using Context): Realizability = + new CheckRealizable().realizability(tp) + + def boundsRealizability(tp: Type)(using Context): Realizability = + new CheckRealizable().boundsRealizability(tp) + + private val LateInitializedFlags = Lazy | Erased +} + +/** Compute realizability status. + * + * A type T is realizable iff it is inhabited by non-null values. This ensures that its type members have good bounds + * (in the sense from DOT papers). A type projection T#L is legal if T is realizable, and can be understood as + * Scala 2's `v.L forSome { val v: T }`. + * + * In general, a realizable type can have multiple inhabitants, hence it need not be stable (in the sense of + * Type.isStable). + */ +class CheckRealizable(using Context) { + import CheckRealizable._ + + /** A set of all fields that have already been checked. Used + * to avoid infinite recursions when analyzing recursive types. + */ + private val checkedFields: mutable.Set[Symbol] = mutable.LinkedHashSet[Symbol]() + + /** Is symbol's definitition a lazy or erased val? + * (note we exclude modules here, because their realizability is ensured separately) + */ + private def isLateInitialized(sym: Symbol) = sym.isOneOf(LateInitializedFlags, butNot = Module) + + /** The realizability status of given type `tp`*/ + def realizability(tp: Type): Realizability = tp.dealias match { + /* + * A `TermRef` for a path `p` is realizable if + * - `p`'s type is stable and realizable, or + * - its underlying path is idempotent (that is, *stable*), total, and not null. + * We don't check yet the "not null" clause: that will require null-safety checking. + * + * We assume that stability of tp.prefix is checked elsewhere, since that's necessary for the path to be legal in + * the first place. + */ + case tp: TermRef => + val sym = tp.symbol + lazy val tpInfoRealizable = realizability(tp.info) + if (sym.is(StableRealizable)) realizability(tp.prefix) + else { + val r = + if (sym.isStableMember && !isLateInitialized(sym)) + // it's realizable because we know that a value of type `tp` has been created at run-time + Realizable + else if (!sym.isEffectivelyFinal) + // it's potentially not realizable since it might be overridden with a member of nonrealizable type + new NotFinal(sym) + else + // otherwise we need to look at the info to determine realizability + // roughly: it's realizable if the info does not have bad bounds + tpInfoRealizable.mapError(r => new ProblemInUnderlying(tp, r)) + r andAlso { + if (sym.isStableMember) sym.setFlag(StableRealizable) // it's known to be stable and realizable + realizability(tp.prefix) + } mapError { r => + // A mutable path is in fact stable and realizable if it has a realizable singleton type. + if (tp.info.isStable && tpInfoRealizable == Realizable) { + sym.setFlag(StableRealizable) + Realizable + } + else r + } + } + case _: SingletonType | NoPrefix => + Realizable + case tp => + def isConcrete(tp: Type): Boolean = tp.dealias match { + case tp: TypeRef => tp.symbol.isClass + case tp: TypeParamRef => false + case tp: TypeProxy => isConcrete(tp.underlying) + case tp: AndType => isConcrete(tp.tp1) && isConcrete(tp.tp2) + case tp: OrType => isConcrete(tp.tp1) && isConcrete(tp.tp2) + case _ => false + } + if (!isConcrete(tp)) NotConcrete + else boundsRealizability(tp).andAlso(memberRealizability(tp)) + } + + private def refinedNames(tp: Type): Set[Name] = tp.dealias match { + case tp: RefinedType => refinedNames(tp.parent) + tp.refinedName + case tp: AndType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) + case tp: OrType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) + case tp: TypeProxy => refinedNames(tp.superType) + case _ => Set.empty + } + + /** `Realizable` if `tp` has good bounds, a `HasProblem...` instance + * pointing to a bad bounds member otherwise. "Has good bounds" means: + * + * - all type members have good bounds (except for opaque helpers) + * - all refinements of the underlying type have good bounds (except for opaque companions) + * - all base types are class types, and if their arguments are wildcards + * they have good bounds. + * - base types do not appear in multiple instances with different arguments. + * (depending on the simplification scheme for AndTypes employed, this could + * also lead to base types with bad bounds). + */ + private def boundsRealizability(tp: Type) = { + + val memberProblems = withMode(Mode.CheckBoundsOrSelfType) { + for { + mbr <- tp.nonClassTypeMembers + if !(mbr.info.loBound <:< mbr.info.hiBound) + } + yield new HasProblemBounds(mbr.name, mbr.info) + } + + val refinementProblems = withMode(Mode.CheckBoundsOrSelfType) { + for { + name <- refinedNames(tp) + if (name.isTypeName) + mbr <- tp.member(name).alternatives + if !(mbr.info.loBound <:< mbr.info.hiBound) + } + yield + new HasProblemBounds(name, mbr.info) + } + + def baseTypeProblems(base: Type) = base match { + case AndType(base1, base2) => + new HasProblemBase(base1, base2) :: Nil + case base => + base.argInfos.collect { + case bounds @ TypeBounds(lo, hi) if !(lo <:< hi) => + new HasProblemBaseArg(base, bounds) + } + } + val baseProblems = + tp.baseClasses.map(_.baseTypeOf(tp)).flatMap(baseTypeProblems) + + baseProblems.foldLeft( + refinementProblems.foldLeft( + memberProblems.foldLeft( + Realizable: Realizability)(_ andAlso _))(_ andAlso _))(_ andAlso _) + } + + /** `Realizable` if all of `tp`'s non-strict fields have realizable types, + * a `HasProblemField` instance pointing to a bad field otherwise. + */ + private def memberRealizability(tp: Type) = { + def checkField(sofar: Realizability, fld: SingleDenotation): Realizability = + sofar andAlso { + if (checkedFields.contains(fld.symbol) || fld.symbol.isOneOf(Private | Mutable | LateInitializedFlags)) + // if field is private it cannot be part of a visible path + // if field is mutable it cannot be part of a path + // if field is lazy or erased it does not need to be initialized when the owning object is + // so in all cases the field does not influence realizability of the enclosing object. + Realizable + else { + checkedFields += fld.symbol + realizability(fld.info).mapError(r => new HasProblemField(fld, r)) + } + } + if sourceVersion.isAtLeast(future) then + // check fields only from version 3.x. + // Reason: An embedded field could well be nullable, which means it + // should not be part of a path and need not be checked; but we cannot recognize + // this situation until we have a typesystem that tracks nullability. + tp.fields.foldLeft(Realizable: Realizability)(checkField) + else + Realizable + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Comments.scala b/tests/pos-with-compiler-cc/dotc/core/Comments.scala new file mode 100644 index 000000000000..1b20b75ad8ac --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Comments.scala @@ -0,0 +1,462 @@ +package dotty.tools +package dotc +package core + +import scala.language.unsafeNulls + +import ast.{ untpd, tpd } +import Symbols._, Contexts._ +import util.{SourceFile, ReadOnlyMap} +import util.Spans._ +import util.CommentParsing._ +import util.Property.Key +import parsing.Parsers.Parser +import reporting.ProperDefinitionNotFound + +object Comments { + val ContextDoc: Key[ContextDocstrings] = new Key[ContextDocstrings] + + /** Decorator for getting docbase out of context */ + given CommentsContext: AnyRef with + extension (c: Context) def docCtx: Option[ContextDocstrings] = c.property(ContextDoc) + + /** Context for Docstrings, contains basic functionality for getting + * docstrings via `Symbol` and expanding templates + */ + class ContextDocstrings { + + private val _docstrings: MutableSymbolMap[Comment] = MutableSymbolMap[Comment](512) // FIXME: 2nd [Comment] needed or "not a class type" + + val templateExpander: CommentExpander = new CommentExpander + + def docstrings: ReadOnlyMap[Symbol, Comment] = _docstrings + + def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym) + + def addDocstring(sym: Symbol, doc: Option[Comment]): Unit = + doc.foreach(d => _docstrings.update(sym, d)) + } + + /** + * A `Comment` contains the unformatted docstring, it's position and potentially more + * information that is populated when the comment is "cooked". + * + * @param span The position span of this `Comment`. + * @param raw The raw comment, as seen in the source code, without any expansion. + * @param expanded If this comment has been expanded, it's expansion, otherwise `None`. + * @param usecases The usecases for this comment. + */ + final case class Comment( + span: Span, + raw: String, + expanded: Option[String], + usecases: List[UseCase], + variables: Map[String, String], + ) { + + /** Has this comment been cooked or expanded? */ + def isExpanded: Boolean = expanded.isDefined + + /** The body of this comment, without the `@usecase` and `@define` sections, after expansion. */ + lazy val expandedBody: Option[String] = + expanded.map(removeSections(_, "@usecase", "@define")) + + val isDocComment: Boolean = Comment.isDocComment(raw) + + /** + * Expands this comment by giving its content to `f`, and then parsing the `@usecase` sections. + * Typically, `f` will take care of expanding the variables. + * + * @param f The expansion function. + * @return The expanded comment, with the `usecases` populated. + */ + def expand(f: String => String)(using Context): Comment = { + val expandedComment = f(raw) + val useCases = Comment.parseUsecases(expandedComment, span) + Comment(span, raw, Some(expandedComment), useCases, Map.empty) + } + } + + object Comment { + + def isDocComment(comment: String): Boolean = comment.startsWith("/**") + + def apply(span: Span, raw: String): Comment = + Comment(span, raw, None, Nil, Map.empty) + + private def parseUsecases(expandedComment: String, span: Span)(using Context): List[UseCase] = + if (!isDocComment(expandedComment)) + Nil + else + tagIndex(expandedComment) + .filter { startsWithTag(expandedComment, _, "@usecase") } + .map { case (start, end) => decomposeUseCase(expandedComment, span, start, end) } + + /** Turns a usecase section into a UseCase, with code changed to: + * {{{ + * // From: + * def foo: A + * // To: + * def foo: A = ??? + * }}} + */ + private def decomposeUseCase(body: String, span: Span, start: Int, end: Int)(using Context): UseCase = { + def subPos(start: Int, end: Int) = + if (span == NoSpan) NoSpan + else { + val start1 = span.start + start + val end1 = span.end + end + span withStart start1 withPoint start1 withEnd end1 + } + + val codeStart = skipWhitespace(body, start + "@usecase".length) + val codeEnd = skipToEol(body, codeStart) + val code = body.substring(codeStart, codeEnd) + " = ???" + val codePos = subPos(codeStart, codeEnd) + + UseCase(code, codePos) + } + } + + final case class UseCase(code: String, codePos: Span, untpdCode: untpd.Tree, tpdCode: Option[tpd.DefDef]) { + def typed(tpdCode: tpd.DefDef): UseCase = copy(tpdCode = Some(tpdCode)) + } + + object UseCase { + def apply(code: String, codePos: Span)(using Context): UseCase = { + val tree = { + val tree = new Parser(SourceFile.virtual("", code)).localDef(codePos.start) + tree match { + case tree: untpd.DefDef => + val newName = ctx.compilationUnit.freshNames.newName(tree.name, NameKinds.DocArtifactName) + untpd.cpy.DefDef(tree)(name = newName) + case _ => + report.error(ProperDefinitionNotFound(), ctx.source.atSpan(codePos)) + tree + } + } + UseCase(code, codePos, tree, None) + } + } + + /** + * Port of DocComment.scala from nsc + * @author Martin Odersky + * @author Felix Mulder + */ + class CommentExpander { + import dotc.config.Printers.scaladoc + import scala.collection.mutable + + def expand(sym: Symbol, site: Symbol)(using Context): String = { + val parent = if (site != NoSymbol) site else sym + defineVariables(parent) + expandedDocComment(sym, parent) + } + + /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing. + * + * @param sym The symbol for which doc comment is returned + * @param site The class for which doc comments are generated + * @throws ExpansionLimitExceeded when more than 10 successive expansions + * of the same string are done, which is + * interpreted as a recursive variable definition. + */ + def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(using Context): String = { + // when parsing a top level class or module, use the (module-)class itself to look up variable definitions + val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym + else site + expandVariables(cookedDocComment(sym, docStr), sym, parent) + } + + private def template(raw: String): String = + removeSections(raw, "@define") + + private def defines(raw: String): List[String] = { + val sections = tagIndex(raw) + val defines = sections filter { startsWithTag(raw, _, "@define") } + val usecases = sections filter { startsWithTag(raw, _, "@usecase") } + val end = startTag(raw, (defines ::: usecases).sortBy(_._1)) + + defines map { case (start, end) => raw.substring(start, end) } + } + + private def replaceInheritDocToInheritdoc(docStr: String): String = + docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc") + + /** The cooked doc comment of an overridden symbol */ + protected def superComment(sym: Symbol)(using Context): Option[String] = + allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "") + + private val cookedDocComments = MutableSymbolMap[String]() + + /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by + * missing sections of an inherited doc comment. + * If a symbol does not have a doc comment but some overridden version of it does, + * the doc comment of the overridden version is copied instead. + */ + def cookedDocComment(sym: Symbol, docStr: String = "")(using Context): String = cookedDocComments.getOrElseUpdate(sym, { + var ownComment = + if (docStr.length == 0) ctx.docCtx.flatMap(_.docstring(sym).map(c => template(c.raw))).getOrElse("") + else template(docStr) + ownComment = replaceInheritDocToInheritdoc(ownComment) + + superComment(sym) match { + case None => + // SI-8210 - The warning would be false negative when this symbol is a setter + if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter) + scaladoc.println(s"${sym.span}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.") + ownComment.replace("@inheritdoc", "") + case Some(sc) => + if (ownComment == "") sc + else expandInheritdoc(sc, merge(sc, ownComment, sym), sym) + } + }) + + private def isMovable(str: String, sec: (Int, Int)): Boolean = + startsWithTag(str, sec, "@param") || + startsWithTag(str, sec, "@tparam") || + startsWithTag(str, sec, "@return") + + def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = { + val srcSections = tagIndex(src) + val dstSections = tagIndex(dst) + val srcParams = paramDocs(src, "@param", srcSections) + val dstParams = paramDocs(dst, "@param", dstSections) + val srcTParams = paramDocs(src, "@tparam", srcSections) + val dstTParams = paramDocs(dst, "@tparam", dstSections) + val out = new StringBuilder + var copied = 0 + var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _))) + + if (copyFirstPara) { + val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment + (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections) + out append src.substring(0, eop).trim + copied = 3 + tocopy = 3 + } + + def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match { + case Some((start, end)) => + if (end > tocopy) tocopy = end + case None => + srcSec match { + case Some((start1, end1)) => + out append dst.substring(copied, tocopy).trim + out append "\n" + copied = tocopy + out append src.substring(start1, end1).trim + case None => + } + } + + //TODO: enable this once you know how to get `sym.paramss` + /* + for (params <- sym.paramss; param <- params) + mergeSection(srcParams get param.name.toString, dstParams get param.name.toString) + for (tparam <- sym.typeParams) + mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString) + + mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections)) + mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections)) + */ + + if (out.length == 0) dst + else { + out append dst.substring(copied) + out.toString + } + } + + /** + * Expand inheritdoc tags + * - for the main comment we transform the inheritdoc into the super variable, + * and the variable expansion can expand it further + * - for the param, tparam and throws sections we must replace comments on the spot + * + * This is done separately, for two reasons: + * 1. It takes longer to run compared to merge + * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely + * impacts performance + * + * @param parent The source (or parent) comment + * @param child The child (overriding member or usecase) comment + * @param sym The child symbol + * @return The child comment with the inheritdoc sections expanded + */ + def expandInheritdoc(parent: String, child: String, sym: Symbol): String = + if (child.indexOf("@inheritdoc") == -1) + child + else { + val parentSections = tagIndex(parent) + val childSections = tagIndex(child) + val parentTagMap = sectionTagMap(parent, parentSections) + val parentNamedParams = Map() + + ("@param" -> paramDocs(parent, "@param", parentSections)) + + ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) + + ("@throws" -> paramDocs(parent, "@throws", parentSections)) + + val out = new StringBuilder + + def replaceInheritdoc(childSection: String, parentSection: => String) = + if (childSection.indexOf("@inheritdoc") == -1) + childSection + else + childSection.replace("@inheritdoc", parentSection) + + def getParentSection(section: (Int, Int)): String = { + + def getSectionHeader = extractSectionTag(child, section) match { + case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section) + case other => other + } + + def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String = + paramMap.get(param) match { + case Some(section) => + // Cleanup the section tag and parameter + val sectionTextBounds = extractSectionText(parent, section) + cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2)) + case None => + scaladoc.println(s"""${sym.span}: the """" + getSectionHeader + "\" annotation of the " + sym + + " comment contains @inheritdoc, but the corresponding section in the parent is not defined.") + "" + } + + child.substring(section._1, section._1 + 7) match { + case param@("@param "|"@tparam"|"@throws") => + sectionString(extractSectionParam(child, section), parentNamedParams(param.trim)) + case _ => + sectionString(extractSectionTag(child, section), parentTagMap) + } + } + + def mainComment(str: String, sections: List[(Int, Int)]): String = + if (str.trim.length > 3) + str.trim.substring(3, startTag(str, sections)) + else + "" + + // Append main comment + out.append("/**") + out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections))) + + // Append sections + for (section <- childSections) + out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section))) + + out.append("*/") + out.toString + } + + protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(using Context): String = { + val expandLimit = 10 + + def expandInternal(str: String, depth: Int): String = { + if (depth >= expandLimit) + throw new ExpansionLimitExceeded(str) + + val out = new StringBuilder + var copied, idx = 0 + // excluding variables written as \$foo so we can use them when + // necessary to document things like Symbol#decode + def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\' + while (idx < str.length) + if ((str charAt idx) != '$' || isEscaped) + idx += 1 + else { + val vstart = idx + idx = skipVariable(str, idx + 1) + def replaceWith(repl: String) = { + out append str.substring(copied, vstart) + out append repl + copied = idx + } + variableName(str.substring(vstart + 1, idx)) match { + case "super" => + superComment(sym) foreach { sc => + val superSections = tagIndex(sc) + replaceWith(sc.substring(3, startTag(sc, superSections))) + for (sec @ (start, end) <- superSections) + if (!isMovable(sc, sec)) out append sc.substring(start, end) + } + case "" => idx += 1 + case vname => + lookupVariable(vname, site) match { + case Some(replacement) => replaceWith(replacement) + case None => + scaladoc.println(s"Variable $vname undefined in comment for $sym in $site") + } + } + } + if (out.length == 0) str + else { + out append str.substring(copied) + expandInternal(out.toString, depth + 1) + } + } + + // We suppressed expanding \$ throughout the recursion, and now we + // need to replace \$ with $ so it looks as intended. + expandInternal(initialStr, 0).replace("""\$""", "$") + } + + def defineVariables(sym: Symbol)(using Context): Unit = { + val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r + + val raw = ctx.docCtx.flatMap(_.docstring(sym).map(_.raw)).getOrElse("") + defs(sym) ++= defines(raw).map { + str => { + val start = skipWhitespace(str, "@define".length) + val (key, value) = str.splitAt(skipVariable(str, start)) + key.drop(start) -> value + } + } map { + case (key, Trim(value)) => + variableName(key) -> value.replaceAll("\\s+\\*+$", "") + } + } + + /** Maps symbols to the variable -> replacement maps that are defined + * in their doc comments + */ + private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map() + + /** Lookup definition of variable. + * + * @param vble The variable for which a definition is searched + * @param site The class for which doc comments are generated + */ + def lookupVariable(vble: String, site: Symbol)(using Context): Option[String] = site match { + case NoSymbol => None + case _ => + val searchList = + if (site.flags.is(Flags.Module)) site :: site.info.baseClasses + else site.info.baseClasses + + searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match { + case Some(str) if str startsWith "$" => lookupVariable(str.tail, site) + case res => res orElse lookupVariable(vble, site.owner) + } + } + + /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing + * If a symbol does not have a doc comment but some overridden version of it does, + * the position of the doc comment of the overridden version is returned instead. + */ + def docCommentPos(sym: Symbol)(using Context): Span = + ctx.docCtx.flatMap(_.docstring(sym).map(_.span)).getOrElse(NoSpan) + + /** A version which doesn't consider self types, as a temporary measure: + * an infinite loop has broken out between superComment and cookedDocComment + * since r23926. + */ + private def allInheritedOverriddenSymbols(sym: Symbol)(using Context): List[Symbol] = + if (!sym.owner.isClass) Nil + else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..` + //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol) + + class ExpansionLimitExceeded(str: String) extends Exception + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Constants.scala b/tests/pos-with-compiler-cc/dotc/core/Constants.scala new file mode 100644 index 000000000000..f45e9e5217de --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Constants.scala @@ -0,0 +1,261 @@ +package dotty.tools +package dotc +package core + +import Types._, Symbols._, Contexts._ +import printing.Printer +import printing.Texts.Text + +object Constants { + + inline val NoTag = 0 + inline val UnitTag = 1 + inline val BooleanTag = 2 + inline val ByteTag = 3 + inline val ShortTag = 4 + inline val CharTag = 5 + inline val IntTag = 6 + inline val LongTag = 7 + inline val FloatTag = 8 + inline val DoubleTag = 9 + inline val StringTag = 10 + inline val NullTag = 11 + inline val ClazzTag = 12 + + class Constant(val value: Any, val tag: Int) extends printing.Showable with Product1[Any] { + import java.lang.Double.doubleToRawLongBits + import java.lang.Float.floatToRawIntBits + + def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue + def isShortRange: Boolean = isIntRange && Short.MinValue <= intValue && intValue <= Short.MaxValue + def isCharRange: Boolean = isIntRange && Char.MinValue <= intValue && intValue <= Char.MaxValue + def isIntRange: Boolean = ByteTag <= tag && tag <= IntTag + def isLongRange: Boolean = ByteTag <= tag && tag <= LongTag + def isFloatRange: Boolean = ByteTag <= tag && tag <= FloatTag + def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag + def isNonUnitAnyVal: Boolean = BooleanTag <= tag && tag <= DoubleTag + def isAnyVal: Boolean = UnitTag <= tag && tag <= DoubleTag + + def tpe(using Context): Type = tag match { + case UnitTag => defn.UnitType + case BooleanTag => defn.BooleanType + case ByteTag => defn.ByteType + case ShortTag => defn.ShortType + case CharTag => defn.CharType + case IntTag => defn.IntType + case LongTag => defn.LongType + case FloatTag => defn.FloatType + case DoubleTag => defn.DoubleType + case StringTag => defn.StringType + case NullTag => defn.NullType + case ClazzTag => defn.ClassType(typeValue) + } + + /** We need the equals method to take account of tags as well as values. + */ + override def equals(other: Any): Boolean = other match { + case that: Constant => + this.tag == that.tag && equalHashValue == that.equalHashValue + case _ => false + } + + def isNaN: Boolean = value match { + case f: Float => f.isNaN + case d: Double => d.isNaN + case _ => false + } + + def booleanValue: Boolean = + if (tag == BooleanTag) value.asInstanceOf[Boolean] + else throw new Error("value " + value + " is not a boolean") + + def byteValue: Byte = tag match { + case ByteTag => value.asInstanceOf[Byte] + case ShortTag => value.asInstanceOf[Short].toByte + case CharTag => value.asInstanceOf[Char].toByte + case IntTag => value.asInstanceOf[Int].toByte + case LongTag => value.asInstanceOf[Long].toByte + case FloatTag => value.asInstanceOf[Float].toByte + case DoubleTag => value.asInstanceOf[Double].toByte + case _ => throw new Error("value " + value + " is not a Byte") + } + + def shortValue: Short = tag match { + case ByteTag => value.asInstanceOf[Byte].toShort + case ShortTag => value.asInstanceOf[Short] + case CharTag => value.asInstanceOf[Char].toShort + case IntTag => value.asInstanceOf[Int].toShort + case LongTag => value.asInstanceOf[Long].toShort + case FloatTag => value.asInstanceOf[Float].toShort + case DoubleTag => value.asInstanceOf[Double].toShort + case _ => throw new Error("value " + value + " is not a Short") + } + + def charValue: Char = tag match { + case ByteTag => value.asInstanceOf[Byte].toChar + case ShortTag => value.asInstanceOf[Short].toChar + case CharTag => value.asInstanceOf[Char] + case IntTag => value.asInstanceOf[Int].toChar + case LongTag => value.asInstanceOf[Long].toChar + case FloatTag => value.asInstanceOf[Float].toChar + case DoubleTag => value.asInstanceOf[Double].toChar + case _ => throw new Error("value " + value + " is not a Char") + } + + def intValue: Int = tag match { + case ByteTag => value.asInstanceOf[Byte].toInt + case ShortTag => value.asInstanceOf[Short].toInt + case CharTag => value.asInstanceOf[Char].toInt + case IntTag => value.asInstanceOf[Int] + case LongTag => value.asInstanceOf[Long].toInt + case FloatTag => value.asInstanceOf[Float].toInt + case DoubleTag => value.asInstanceOf[Double].toInt + case _ => throw new Error("value " + value + " is not an Int") + } + + def longValue: Long = tag match { + case ByteTag => value.asInstanceOf[Byte].toLong + case ShortTag => value.asInstanceOf[Short].toLong + case CharTag => value.asInstanceOf[Char].toLong + case IntTag => value.asInstanceOf[Int].toLong + case LongTag => value.asInstanceOf[Long] + case FloatTag => value.asInstanceOf[Float].toLong + case DoubleTag => value.asInstanceOf[Double].toLong + case _ => throw new Error("value " + value + " is not a Long") + } + + def floatValue: Float = tag match { + case ByteTag => value.asInstanceOf[Byte].toFloat + case ShortTag => value.asInstanceOf[Short].toFloat + case CharTag => value.asInstanceOf[Char].toFloat + case IntTag => value.asInstanceOf[Int].toFloat + case LongTag => value.asInstanceOf[Long].toFloat + case FloatTag => value.asInstanceOf[Float] + case DoubleTag => value.asInstanceOf[Double].toFloat + case _ => throw new Error("value " + value + " is not a Float") + } + + def doubleValue: Double = tag match { + case ByteTag => value.asInstanceOf[Byte].toDouble + case ShortTag => value.asInstanceOf[Short].toDouble + case CharTag => value.asInstanceOf[Char].toDouble + case IntTag => value.asInstanceOf[Int].toDouble + case LongTag => value.asInstanceOf[Long].toDouble + case FloatTag => value.asInstanceOf[Float].toDouble + case DoubleTag => value.asInstanceOf[Double] + case _ => throw new Error("value " + value + " is not a Double") + } + + /** Convert constant value to conform to given type. + */ + def convertTo(pt: Type)(using Context): Constant | Null = { + def classBound(pt: Type): Type = pt.dealias.stripTypeVar match { + case tref: TypeRef if !tref.symbol.isClass && tref.info.exists => + classBound(tref.info.bounds.lo) + case param: TypeParamRef => + ctx.typerState.constraint.entry(param) match { + case TypeBounds(lo, hi) => + if (hi.classSymbol.isPrimitiveValueClass) hi //constrain further with high bound + else classBound(lo) + case NoType => classBound(param.binder.paramInfos(param.paramNum).lo) + case inst => classBound(inst) + } + case pt => pt + } + pt match + case ConstantType(value) if value == this => this + case _: SingletonType => null + case _ => + val target = classBound(pt).typeSymbol + if (target == tpe.typeSymbol) + this + else if ((target == defn.ByteClass) && isByteRange) + Constant(byteValue) + else if (target == defn.ShortClass && isShortRange) + Constant(shortValue) + else if (target == defn.CharClass && isCharRange) + Constant(charValue) + else if (target == defn.IntClass && isIntRange) + Constant(intValue) + else if (target == defn.LongClass && isLongRange) + Constant(longValue) + else if (target == defn.FloatClass && isFloatRange) + Constant(floatValue) + else if (target == defn.DoubleClass && isNumeric) + Constant(doubleValue) + else + null + } + + def stringValue: String = value.toString + + def toText(printer: Printer): Text = printer.toText(this) + + def typeValue: Type = value.asInstanceOf[Type] + + /** + * Consider two `NaN`s to be identical, despite non-equality + * Consider -0d to be distinct from 0d, despite equality + * + * We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`) + * to avoid treating different encodings of `NaN` as the same constant. + * You probably can't express different `NaN` varieties as compile time + * constants in regular Scala code, but it is conceivable that you could + * conjure them with a macro. + */ + private def equalHashValue: Any = value match { + case f: Float => floatToRawIntBits(f) + case d: Double => doubleToRawLongBits(d) + case v => v + } + + override def hashCode: Int = { + import scala.util.hashing.MurmurHash3._ + val seed = 17 + var h = seed + h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide. + h = mix(h, equalHashValue.##) + finalizeHash(h, length = 2) + } + + override def toString: String = s"Constant($value)" + def canEqual(x: Any): Boolean = true + def get: Any = value + def isEmpty: Boolean = false + def _1: Any = value + } + + object Constant { + def apply(x: Null): Constant = new Constant(x, NullTag) + def apply(x: Unit): Constant = new Constant(x, UnitTag) + def apply(x: Boolean): Constant = new Constant(x, BooleanTag) + def apply(x: Byte): Constant = new Constant(x, ByteTag) + def apply(x: Short): Constant = new Constant(x, ShortTag) + def apply(x: Int): Constant = new Constant(x, IntTag) + def apply(x: Long): Constant = new Constant(x, LongTag) + def apply(x: Float): Constant = new Constant(x, FloatTag) + def apply(x: Double): Constant = new Constant(x, DoubleTag) + def apply(x: String): Constant = new Constant(x, StringTag) + def apply(x: Char): Constant = new Constant(x, CharTag) + def apply(x: Type): Constant = new Constant(x, ClazzTag) + def apply(value: Any): Constant = + new Constant(value, + value match { + case null => NullTag + case x: Unit => UnitTag + case x: Boolean => BooleanTag + case x: Byte => ByteTag + case x: Short => ShortTag + case x: Int => IntTag + case x: Long => LongTag + case x: Float => FloatTag + case x: Double => DoubleTag + case x: String => StringTag + case x: Char => CharTag + case x: Type => ClazzTag + } + ) + + def unapply(c: Constant): Constant = c + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Constraint.scala b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala new file mode 100644 index 000000000000..07b6e71cdcc9 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala @@ -0,0 +1,196 @@ +package dotty.tools +package dotc +package core + +import Types._, Contexts._ +import printing.Showable + +/** Constraint over undetermined type parameters. Constraints are built + * over values of the following types: + * + * - TypeLambda A constraint constrains the type parameters of a set of TypeLambdas + * - TypeParamRef The parameters of the constrained type lambdas + * - TypeVar Every constrained parameter might be associated with a TypeVar + * that has the TypeParamRef as origin. + */ +abstract class Constraint extends Showable { + + type This <: Constraint + + /** Does the constraint's domain contain the type parameters of `tl`? */ + def contains(tl: TypeLambda): Boolean + + /** Does the constraint's domain contain the type parameter `param`? */ + def contains(param: TypeParamRef): Boolean + + /** Does this constraint contain the type variable `tvar` and is it uninstantiated? */ + def contains(tvar: TypeVar): Boolean + + /** The constraint entry for given type parameter `param`, or NoType if `param` is not part of + * the constraint domain. Note: Low level, implementation dependent. + */ + def entry(param: TypeParamRef): Type + + /** The type variable corresponding to parameter `param`, or + * NoType, if `param` is not in constrained or is not paired with a type variable. + */ + def typeVarOfParam(param: TypeParamRef): Type + + /** Is it known that `param1 <:< param2`? */ + def isLess(param1: TypeParamRef, param2: TypeParamRef): Boolean + + /** The parameters that are known to be smaller wrt <: than `param` */ + def lower(param: TypeParamRef): List[TypeParamRef] + + /** The parameters that are known to be greater wrt <: than `param` */ + def upper(param: TypeParamRef): List[TypeParamRef] + + /** The lower dominator set. + * + * This is like `lower`, except that each parameter returned is no smaller than every other returned parameter. + */ + def minLower(param: TypeParamRef): List[TypeParamRef] + + /** The upper dominator set. + * + * This is like `upper`, except that each parameter returned is no greater than every other returned parameter. + */ + def minUpper(param: TypeParamRef): List[TypeParamRef] + + /** lower(param) \ lower(butNot) */ + def exclusiveLower(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] + + /** upper(param) \ upper(butNot) */ + def exclusiveUpper(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] + + /** The constraint bounds for given type parameter `param`. + * Poly params that are known to be smaller or greater than `param` + * are not contained in the return bounds. + * @pre `param` is not part of the constraint domain. + */ + def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds + + /** A new constraint which is derived from this constraint by adding + * entries for all type parameters of `poly`. + * @param tvars A list of type variables associated with the params, + * or Nil if the constraint will just be checked for + * satisfiability but will solved to give instances of + * type variables. + */ + def add(poly: TypeLambda, tvars: List[TypeVar])(using Context): This + + /** A new constraint which is derived from this constraint by updating + * the entry for parameter `param` to `tp`. + * `tp` can be one of the following: + * + * - A TypeBounds value, indicating new constraint bounds + * - Another type, indicating a solution for the parameter + * + * @pre `this contains param`. + */ + def updateEntry(param: TypeParamRef, tp: Type)(using Context): This + + /** A constraint that includes the relationship `p1 <: p2`. + * `<:` relationships between parameters ("edges") are propagated, but + * non-parameter bounds are left alone. + * + * @param direction Must be set to `KeepParam1` or `KeepParam2` when + * `p2 <: p1` is already true depending on which parameter + * the caller intends to keep. This will avoid propagating + * bounds that will be redundant after `p1` and `p2` are + * unified. + */ + def addLess(p1: TypeParamRef, p2: TypeParamRef, + direction: UnificationDirection = UnificationDirection.NoUnification)(using Context): This + + /** A new constraint which is derived from this constraint by removing + * the type parameter `param` from the domain and replacing all top-level occurrences + * of the parameter elsewhere in the constraint by type `tp`, or a conservative + * approximation of it if that is needed to avoid cycles. + * Occurrences nested inside a refinement or prefix are not affected. + */ + def replace(param: TypeParamRef, tp: Type)(using Context): This + + /** Is entry associated with `tl` removable? This is the case if + * all type parameters of the entry are associated with type variables + * which have their `inst` fields set. + */ + def isRemovable(tl: TypeLambda): Boolean + + /** A new constraint with all entries coming from `tl` removed. */ + def remove(tl: TypeLambda)(using Context): This + + /** A new constraint with entry `from` replaced with `to` + * Rerences to `from` from within other constraint bounds are updated to `to`. + * Type variables are left alone. + */ + def subst(from: TypeLambda, to: TypeLambda)(using Context): This + + /** Is `tv` marked as hard in the constraint? */ + def isHard(tv: TypeVar): Boolean + + /** The same as this constraint, but with `tv` marked as hard. */ + def withHard(tv: TypeVar)(using Context): This + + /** Gives for each instantiated type var that does not yet have its `inst` field + * set, the instance value stored in the constraint. Storing instances in constraints + * is done only in a temporary way for contexts that may be retracted + * without also retracting the type var as a whole. + */ + def instType(tvar: TypeVar): Type + + /** The given `tl` in case it is not contained in this constraint, + * a fresh copy of `tl` otherwise. + */ + def ensureFresh(tl: TypeLambda)(using Context): TypeLambda + + /** The type lambdas constrained by this constraint */ + def domainLambdas: List[TypeLambda] + + /** The type lambda parameters constrained by this constraint */ + def domainParams: List[TypeParamRef] + + /** Check whether predicate holds for all parameters in constraint */ + def forallParams(p: TypeParamRef => Boolean): Boolean + + /** Perform operation `op` on all typevars that do not have their `inst` field set. */ + def foreachTypeVar(op: TypeVar => Unit): Unit + + /** The uninstantiated typevars of this constraint, which still have a bounds constraint + */ + def uninstVars: collection.Seq[TypeVar] + + /** Whether `tl` is present in both `this` and `that` but is associated with + * different TypeVars there, meaning that the constraints cannot be merged. + */ + def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean + + /** Check that no constrained parameter contains itself as a bound */ + def checkNonCyclic()(using Context): this.type + + /** Does `param` occur at the toplevel in `tp` ? + * Toplevel means: the type itself or a factor in some + * combination of `&` or `|` types. + */ + def occursAtToplevel(param: TypeParamRef, tp: Type)(using Context): Boolean + + /** Check that constraint only refers to TypeParamRefs bound by itself */ + def checkClosed()(using Context): Unit + + /** Check that every typevar om this constraint has as origin a type parameter + * of athe type lambda that is associated with the typevar itself. + */ + def checkConsistentVars()(using Context): Unit +} + +/** When calling `Constraint#addLess(p1, p2, ...)`, the caller might end up + * unifying one parameter with the other, this enum lets `addLess` know which + * direction the unification will take. + */ +enum UnificationDirection: + /** Neither p1 nor p2 will be instantiated. */ + case NoUnification + /** `p2 := p1`, p1 left uninstantiated. */ + case KeepParam1 + /** `p1 := p2`, p2 left uninstantiated. */ + case KeepParam2 diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala new file mode 100644 index 000000000000..a3d8cabba971 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala @@ -0,0 +1,879 @@ +package dotty.tools +package dotc +package core + +import Types._ +import Contexts._ +import Symbols._ +import Decorators._ +import Flags._ +import config.Config +import config.Printers.typr +import typer.ProtoTypes.{newTypeVar, representedParamRef} +import UnificationDirection.* +import NameKinds.AvoidNameKind +import util.SimpleIdentitySet +import NullOpsDecorator.stripNull + +/** Methods for adding constraints and solving them. + * + * What goes into a Constraint as opposed to a ConstrainHandler? + * + * Constraint code is purely functional: Operations get constraints and produce new ones. + * Constraint code does not have access to a type-comparer. Anything regarding lubs and glbs has to be done + * elsewhere. + * + * By comparison: Constraint handlers are parts of type comparers and can use their functionality. + * Constraint handlers update the current constraint as a side effect. + */ +trait ConstraintHandling { + + def constr: config.Printers.Printer = config.Printers.constr + + protected def isSub(tp1: Type, tp2: Type)(using Context): Boolean + protected def isSame(tp1: Type, tp2: Type)(using Context): Boolean + + protected def constraint: Constraint + protected def constraint_=(c: Constraint): Unit + + private var addConstraintInvocations = 0 + + /** If the constraint is frozen we cannot add new bounds to the constraint. */ + protected var frozenConstraint: Boolean = false + + /** Potentially a type lambda that is still instantiatable, even though the constraint + * is generally frozen. + */ + protected var caseLambda: Type = NoType + + /** If set, align arguments `S1`, `S2`when taking the glb + * `T1 { X = S1 } & T2 { X = S2 }` of a constraint upper bound for some type parameter. + * Aligning means computing `S1 =:= S2` which may change the current constraint. + * See note in TypeComparer#distributeAnd. + */ + protected var homogenizeArgs: Boolean = false + + /** We are currently comparing type lambdas. Used as a flag for + * optimization: when `false`, no need to do an expensive `pruneLambdaParams` + */ + protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty + + /** Used for match type reduction: If false, we don't recognize an abstract type + * to be a subtype type of any of its base classes. This is in place only at the + * toplevel; it is turned on again when we add parts of the scrutinee to the constraint. + */ + protected var canWidenAbstract: Boolean = true + + protected var myNecessaryConstraintsOnly = false + /** When collecting the constraints needed for a particular subtyping + * judgment to be true, we sometimes need to approximate the constraint + * set (see `TypeComparer#either` for example). + * + * Normally, this means adding extra constraints which may not be necessary + * for the subtyping judgment to be true, but if this variable is set to true + * we will instead under-approximate and keep only the constraints that must + * always be present for the subtyping judgment to hold. + * + * This is needed for GADT bounds inference to be sound, but it is also used + * when constraining a method call based on its expected type to avoid adding + * constraints that would later prevent us from typechecking method + * arguments, see or-inf.scala and and-inf.scala for examples. + */ + protected def necessaryConstraintsOnly(using Context): Boolean = + ctx.mode.is(Mode.GadtConstraintInference) || myNecessaryConstraintsOnly + + /** If `trustBounds = false` we perform comparisons in a pessimistic way as follows: + * Given an abstract type `A >: L <: H`, a subtype comparison of any type + * with `A` will compare against both `L` and `H`. E.g. + * + * T <:< A if T <:< L and T <:< H + * A <:< T if L <:< T and H <:< T + * + * This restricted form makes sure we don't "forget" types when forming + * unions and intersections with abstract types that have bad bounds. E.g. + * the following example from neg/i8900.scala that @smarter came up with: + * We have a type variable X with constraints + * + * X >: 1, X >: x.M + * + * where `x` is a locally nested variable and `x.M` has bad bounds + * + * x.M >: Int | String <: Int & String + * + * If we trust bounds, then the lower bound of `X` is `x.M` since `x.M >: 1`. + * Then even if we correct levels on instantiation to eliminate the local `x`, + * it is alreay too late, we'd get `Int & String` as instance, which does not + * satisfy the original constraint `X >: 1`. + * + * But if `trustBounds` is false, we do not conclude the `x.M >: 1` since + * we compare both bounds and the upper bound `Int & String` is not a supertype + * of `1`. So the lower bound is `1 | x.M` and when we level-avoid that we + * get `1 | Int & String`, which simplifies to `Int`. + */ + private var myTrustBounds = true + + inline def withUntrustedBounds(op: => Type): Type = + val saved = myTrustBounds + myTrustBounds = false + try op finally myTrustBounds = saved + + def trustBounds: Boolean = + !Config.checkLevelsOnInstantiation || myTrustBounds + + def checkReset() = + assert(addConstraintInvocations == 0) + assert(frozenConstraint == false) + assert(caseLambda == NoType) + assert(homogenizeArgs == false) + assert(comparedTypeLambdas == Set.empty) + + def nestingLevel(param: TypeParamRef)(using Context) = constraint.typeVarOfParam(param) match + case tv: TypeVar => tv.nestingLevel + case _ => + // This should only happen when reducing match types (in + // TrackingTypeComparer#matchCases) or in uncommitable TyperStates (as + // asserted in ProtoTypes.constrained) and is special-cased in `levelOK` + // below. + Int.MaxValue + + /** Is `level` <= `maxLevel` or legal in the current context? */ + def levelOK(level: Int, maxLevel: Int)(using Context): Boolean = + level <= maxLevel + || ctx.isAfterTyper || !ctx.typerState.isCommittable // Leaks in these cases shouldn't break soundness + || level == Int.MaxValue // See `nestingLevel` above. + || !Config.checkLevelsOnConstraints + + /** If `param` is nested deeper than `maxLevel`, try to instantiate it to a + * fresh type variable of level `maxLevel` and return the new variable. + * If this isn't possible, throw a TypeError. + */ + def atLevel(maxLevel: Int, param: TypeParamRef)(using Context): TypeParamRef = + if levelOK(nestingLevel(param), maxLevel) then + return param + LevelAvoidMap(0, maxLevel)(param) match + case freshVar: TypeVar => freshVar.origin + case _ => throw new TypeError( + i"Could not decrease the nesting level of ${param} from ${nestingLevel(param)} to $maxLevel in $constraint") + + def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = constraint.nonParamBounds(param) + + /** The full lower bound of `param` includes both the `nonParamBounds` and the + * params in the constraint known to be `<: param`, except that + * params with a `nestingLevel` higher than `param` will be instantiated + * to a fresh param at a legal level. See the documentation of `TypeVar` + * for details. + */ + def fullLowerBound(param: TypeParamRef)(using Context): Type = + val maxLevel = nestingLevel(param) + var loParams = constraint.minLower(param) + if maxLevel != Int.MaxValue then + loParams = loParams.mapConserve(atLevel(maxLevel, _)) + loParams.foldLeft(nonParamBounds(param).lo)(_ | _) + + /** The full upper bound of `param`, see the documentation of `fullLowerBounds` above. */ + def fullUpperBound(param: TypeParamRef)(using Context): Type = + val maxLevel = nestingLevel(param) + var hiParams = constraint.minUpper(param) + if maxLevel != Int.MaxValue then + hiParams = hiParams.mapConserve(atLevel(maxLevel, _)) + hiParams.foldLeft(nonParamBounds(param).hi)(_ & _) + + /** Full bounds of `param`, including other lower/upper params. + * + * Note that underlying operations perform subtype checks - for this reason, recursing on `fullBounds` + * of some param when comparing types might lead to infinite recursion. Consider `bounds` instead. + */ + def fullBounds(param: TypeParamRef)(using Context): TypeBounds = + nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param)) + + /** An approximating map that prevents types nested deeper than maxLevel as + * well as WildcardTypes from leaking into the constraint. + */ + class LevelAvoidMap(topLevelVariance: Int, maxLevel: Int)(using Context) extends TypeOps.AvoidMap: + variance = topLevelVariance + + def toAvoid(tp: NamedType): Boolean = + tp.prefix == NoPrefix && !tp.symbol.isStatic && !levelOK(tp.symbol.nestingLevel, maxLevel) + + /** Return a (possibly fresh) type variable of a level no greater than `maxLevel` which is: + * - lower-bounded by `tp` if variance >= 0 + * - upper-bounded by `tp` if variance <= 0 + * If this isn't possible, return the empty range. + */ + def legalVar(tp: TypeVar): Type = + val oldParam = tp.origin + val nameKind = + if variance > 0 then AvoidNameKind.UpperBound + else if variance < 0 then AvoidNameKind.LowerBound + else AvoidNameKind.BothBounds + + /** If it exists, return the first param in the list created in a previous call to `legalVar(tp)` + * with the appropriate level and variance. + */ + def findParam(params: List[TypeParamRef]): Option[TypeParamRef] = + params.find(p => + nestingLevel(p) <= maxLevel && representedParamRef(p) == oldParam && + (p.paramName.is(AvoidNameKind.BothBounds) || + variance != 0 && p.paramName.is(nameKind))) + + // First, check if we can reuse an existing parameter, this is more than an optimization + // since it avoids an infinite loop in tests/pos/i8900-cycle.scala + findParam(constraint.lower(oldParam)).orElse(findParam(constraint.upper(oldParam))) match + case Some(param) => + constraint.typeVarOfParam(param) + case _ => + // Otherwise, try to return a fresh type variable at `maxLevel` with + // the appropriate constraints. + val name = nameKind(oldParam.paramName.toTermName).toTypeName + val freshVar = newTypeVar(TypeBounds.upper(tp.topType), name, + nestingLevel = maxLevel, represents = oldParam) + val ok = + if variance < 0 then + addLess(freshVar.origin, oldParam) + else if variance > 0 then + addLess(oldParam, freshVar.origin) + else + unify(freshVar.origin, oldParam) + if ok then freshVar else emptyRange + end legalVar + + override def apply(tp: Type): Type = tp match + case tp: TypeVar if !tp.isInstantiated && !levelOK(tp.nestingLevel, maxLevel) => + legalVar(tp) + // TypeParamRef can occur in tl bounds + case tp: TypeParamRef => + constraint.typeVarOfParam(tp) match + case tvar: TypeVar => + apply(tvar) + case _ => super.apply(tp) + case _ => + super.apply(tp) + + override def mapWild(t: WildcardType) = + if ctx.mode.is(Mode.TypevarsMissContext) then super.mapWild(t) + else + val tvar = newTypeVar(apply(t.effectiveBounds).toBounds, nestingLevel = maxLevel) + tvar + end LevelAvoidMap + + /** Approximate `rawBound` if needed to make it a legal bound of `param` by + * avoiding wildcards and types with a level strictly greater than its + * `nestingLevel`. + * + * Note that level-checking must be performed here and cannot be delayed + * until instantiation because if we allow level-incorrect bounds, then we + * might end up reasoning with bad bounds outside of the scope where they are + * defined. This can lead to level-correct but unsound instantiations as + * demonstrated by tests/neg/i8900.scala. + */ + protected def legalBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Type = + // Over-approximate for soundness. + var variance = if isUpper then -1 else 1 + // ...unless we can only infer necessary constraints, in which case we + // flip the variance to under-approximate. + if necessaryConstraintsOnly then variance = -variance + + val approx = new LevelAvoidMap(variance, nestingLevel(param)): + override def legalVar(tp: TypeVar): Type = + // `legalVar` will create a type variable whose bounds depend on + // `variance`, but whether the variance is positive or negative, + // we can still infer necessary constraints since just creating a + // type variable doesn't reduce the set of possible solutions. + // Therefore, we can safely "unflip" the variance flipped above. + // This is necessary for i8900-unflip.scala to typecheck. + val v = if necessaryConstraintsOnly then -this.variance else this.variance + atVariance(v)(super.legalVar(tp)) + approx(rawBound) + end legalBound + + protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = + if !constraint.contains(param) then true + else if !isUpper && param.occursIn(rawBound) then + // We don't allow recursive lower bounds when defining a type, + // so we shouldn't allow them as constraints either. + false + else + val bound = legalBound(param, rawBound, isUpper) + val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) + val equalBounds = (if isUpper then lo else hi) eq bound + if equalBounds && !bound.existsPart(_ eq param, StopAt.Static) then + // The narrowed bounds are equal and not recursive, + // so we can remove `param` from the constraint. + constraint = constraint.replace(param, bound) + true + else + // Narrow one of the bounds of type parameter `param` + // If `isUpper` is true, ensure that `param <: `bound`, otherwise ensure + // that `param >: bound`. + val narrowedBounds = + val saved = homogenizeArgs + homogenizeArgs = Config.alignArgsInAnd + try + withUntrustedBounds( + if isUpper then oldBounds.derivedTypeBounds(lo, hi & bound) + else oldBounds.derivedTypeBounds(lo | bound, hi)) + finally + homogenizeArgs = saved + //println(i"narrow bounds for $param from $oldBounds to $narrowedBounds") + val c1 = constraint.updateEntry(param, narrowedBounds) + (c1 eq constraint) + || { + constraint = c1 + val TypeBounds(lo, hi) = constraint.entry(param): @unchecked + isSub(lo, hi) + } + end addOneBound + + protected def addBoundTransitively(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = + + /** Adjust the bound `tp` in the following ways: + * + * 1. Toplevel occurrences of TypeRefs that are instantiated in the current + * constraint are also dereferenced. + * 2. Toplevel occurrences of ExprTypes lead to a `NoType` return, which + * causes the addOneBound operation to fail. + * + * An occurrence is toplevel if it is the bound itself, or a term in some + * combination of `&` or `|` types. + */ + def adjust(tp: Type): Type = tp match + case tp: AndOrType => + val p1 = adjust(tp.tp1) + val p2 = adjust(tp.tp2) + if p1.exists && p2.exists then tp.derivedAndOrType(p1, p2) else NoType + case tp: TypeVar if constraint.contains(tp.origin) => + adjust(tp.underlying) + case tp: ExprType => + // ExprTypes are not value types, so type parameters should not + // be instantiated to ExprTypes. A scenario where such an attempted + // instantiation can happen is if we unify (=> T) => () with A => () + // where A is a TypeParamRef. See the comment on EtaExpansion.etaExpand + // why types such as (=> T) => () can be constructed and i7969.scala + // as a test where this happens. + // Note that scalac by contrast allows such instantiations. But letting + // type variables be ExprTypes has its own problems (e.g. you can't write + // the resulting types down) and is largely unknown terrain. + NoType + case _ => + tp + + def description = i"constraint $param ${if isUpper then "<:" else ":>"} $rawBound to\n$constraint" + constr.println(i"adding $description$location") + if isUpper && rawBound.isRef(defn.NothingClass) && ctx.typerState.isGlobalCommittable then + def msg = i"!!! instantiated to Nothing: $param, constraint = $constraint" + if Config.failOnInstantiationToNothing + then assert(false, msg) + else report.log(msg) + def others = if isUpper then constraint.lower(param) else constraint.upper(param) + val bound = adjust(rawBound) + bound.exists + && addOneBound(param, bound, isUpper) && others.forall(addOneBound(_, bound, isUpper)) + .showing(i"added $description = $result$location", constr) + end addBoundTransitively + + protected def addLess(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { + def description = i"ordering $p1 <: $p2 to\n$constraint" + val res = + if (constraint.isLess(p2, p1)) unify(p2, p1) + else { + val down1 = p1 :: constraint.exclusiveLower(p1, p2) + val up2 = p2 :: constraint.exclusiveUpper(p2, p1) + val lo1 = constraint.nonParamBounds(p1).lo + val hi2 = constraint.nonParamBounds(p2).hi + constr.println(i"adding $description down1 = $down1, up2 = $up2$location") + constraint = constraint.addLess(p1, p2) + down1.forall(addOneBound(_, hi2, isUpper = true)) && + up2.forall(addOneBound(_, lo1, isUpper = false)) + } + constr.println(i"added $description = $res$location") + res + } + + def location(using Context) = "" // i"in ${ctx.typerState.stateChainStr}" // use for debugging + + /** Unify p1 with p2: one parameter will be kept in the constraint, the + * other will be removed and its bounds transferred to the remaining one. + * + * If p1 and p2 have different `nestingLevel`, the parameter with the lowest + * level will be kept and the transferred bounds from the other parameter + * will be adjusted for level-correctness. + */ + private def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { + constr.println(s"unifying $p1 $p2") + if !constraint.isLess(p1, p2) then + constraint = constraint.addLess(p1, p2) + + val level1 = nestingLevel(p1) + val level2 = nestingLevel(p2) + val pKept = if level1 <= level2 then p1 else p2 + val pRemoved = if level1 <= level2 then p2 else p1 + + val down = constraint.exclusiveLower(p2, p1) + val up = constraint.exclusiveUpper(p1, p2) + + constraint = constraint.addLess(p2, p1, direction = if pKept eq p1 then KeepParam2 else KeepParam1) + + val boundKept = constraint.nonParamBounds(pKept).substParam(pRemoved, pKept) + var boundRemoved = constraint.nonParamBounds(pRemoved).substParam(pRemoved, pKept) + + if level1 != level2 then + boundRemoved = LevelAvoidMap(-1, math.min(level1, level2))(boundRemoved) + val TypeBounds(lo, hi) = boundRemoved: @unchecked + // After avoidance, the interval might be empty, e.g. in + // tests/pos/i8900-promote.scala: + // >: x.type <: Singleton + // becomes: + // >: Int <: Singleton + // In that case, we can still get a legal constraint + // by replacing the lower-bound to get: + // >: Int & Singleton <: Singleton + if !isSub(lo, hi) then + boundRemoved = TypeBounds(lo & hi, hi) + + val newBounds = (boundKept & boundRemoved).bounds + constraint = constraint.updateEntry(pKept, newBounds).replace(pRemoved, pKept) + + val lo = newBounds.lo + val hi = newBounds.hi + isSub(lo, hi) && + down.forall(addOneBound(_, hi, isUpper = true)) && + up.forall(addOneBound(_, lo, isUpper = false)) + } + + protected def isSubType(tp1: Type, tp2: Type, whenFrozen: Boolean)(using Context): Boolean = + if (whenFrozen) + isSubTypeWhenFrozen(tp1, tp2) + else + isSub(tp1, tp2) + + inline final def inFrozenConstraint[T](op: => T): T = { + val savedFrozen = frozenConstraint + val savedLambda = caseLambda + frozenConstraint = true + caseLambda = NoType + try op + finally { + frozenConstraint = savedFrozen + caseLambda = savedLambda + } + } + + final def isSubTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSub(tp1, tp2)) + final def isSameTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSame(tp1, tp2)) + + /** Test whether the lower bounds of all parameters in this + * constraint are a solution to the constraint. + */ + protected final def isSatisfiable(using Context): Boolean = + constraint.forallParams { param => + val TypeBounds(lo, hi) = constraint.entry(param): @unchecked + isSub(lo, hi) || { + report.log(i"sub fail $lo <:< $hi") + false + } + } + + /** Fix instance type `tp` by avoidance so that it does not contain references + * to types at level > `maxLevel`. + * @param tp the type to be fixed + * @param fromBelow whether type was obtained from lower bound + * @param maxLevel the maximum level of references allowed + * @param param the parameter that was instantiated + */ + private def fixLevels(tp: Type, fromBelow: Boolean, maxLevel: Int, param: TypeParamRef)(using Context) = + + def needsFix(tp: NamedType) = + (tp.prefix eq NoPrefix) && tp.symbol.nestingLevel > maxLevel + + /** An accumulator that determines whether levels need to be fixed + * and computes on the side sets of nested type variables that need + * to be instantiated. + */ + def needsLeveling = new TypeAccumulator[Boolean]: + if !fromBelow then variance = -1 + + def apply(need: Boolean, tp: Type) = + need || tp.match + case tp: NamedType => + needsFix(tp) + || !stopBecauseStaticOrLocal(tp) && apply(need, tp.prefix) + case tp: TypeVar => + val inst = tp.instanceOpt + if inst.exists then apply(need, inst) + else if tp.nestingLevel > maxLevel then + // Change the nesting level of inner type variable to `maxLevel`. + // This means that the type variable will be instantiated later to a + // less nested type. If there are other references to the same type variable + // that do not come from the type undergoing `fixLevels`, this could lead + // to coarser types than intended. An alternative is to instantiate the + // type variable right away, but this also loses information. See + // i15934.scala for a test where the current strategey works but an early instantiation + // of `tp` would fail. + constr.println(i"widening nesting level of type variable $tp from ${tp.nestingLevel} to $maxLevel") + ctx.typerState.setNestingLevel(tp, maxLevel) + true + else false + case _ => + foldOver(need, tp) + end needsLeveling + + def levelAvoid = new TypeOps.AvoidMap: + if !fromBelow then variance = -1 + def toAvoid(tp: NamedType) = needsFix(tp) + + if Config.checkLevelsOnInstantiation && !ctx.isAfterTyper && needsLeveling(false, tp) then + typr.println(i"instance $tp for $param needs leveling to $maxLevel") + levelAvoid(tp) + else tp + end fixLevels + + /** Solve constraint set for given type parameter `param`. + * If `fromBelow` is true the parameter is approximated by its lower bound, + * otherwise it is approximated by its upper bound, unless the upper bound + * contains a reference to the parameter itself (such occurrences can arise + * for F-bounded types, `addOneBound` ensures that they never occur in the + * lower bound). + * The solved type is not allowed to contain references to types nested deeper + * than `maxLevel`. + * Wildcard types in bounds are approximated by their upper or lower bounds. + * The constraint is left unchanged. + * @return the instantiating type + * @pre `param` is in the constraint's domain. + */ + final def approximation(param: TypeParamRef, fromBelow: Boolean, maxLevel: Int)(using Context): Type = + constraint.entry(param) match + case entry: TypeBounds => + val useLowerBound = fromBelow || param.occursIn(entry.hi) + val rawInst = withUntrustedBounds( + if useLowerBound then fullLowerBound(param) else fullUpperBound(param)) + val levelInst = fixLevels(rawInst, fromBelow, maxLevel, param) + if levelInst ne rawInst then + typr.println(i"level avoid for $maxLevel: $rawInst --> $levelInst") + typr.println(i"approx $param, from below = $fromBelow, inst = $levelInst") + levelInst + case inst => + assert(inst.exists, i"param = $param\nconstraint = $constraint") + inst + end approximation + + /** If `tp` is an intersection such that some operands are transparent trait instances + * and others are not, replace as many transparent trait instances as possible with Any + * as long as the result is still a subtype of `bound`. But fall back to the + * original type if the resulting widened type is a supertype of all dropped + * types (since in this case the type was not a true intersection of transparent traits + * and other types to start with). + */ + def dropTransparentTraits(tp: Type, bound: Type)(using Context): Type = + var kept: Set[Type] = Set() // types to keep since otherwise bound would not fit + var dropped: List[Type] = List() // the types dropped so far, last one on top + + def dropOneTransparentTrait(tp: Type): Type = + val tpd = tp.dealias + if tpd.typeSymbol.isTransparentTrait && !tpd.isLambdaSub && !kept.contains(tpd) then + dropped = tpd :: dropped + defn.AnyType + else tpd match + case AndType(tp1, tp2) => + val tp1w = dropOneTransparentTrait(tp1) + if tp1w ne tp1 then tp1w & tp2 + else + val tp2w = dropOneTransparentTrait(tp2) + if tp2w ne tp2 then tp1 & tp2w + else tpd + case _ => + tp + + def recur(tp: Type): Type = + val tpw = dropOneTransparentTrait(tp) + if tpw eq tp then tp + else if tpw <:< bound then recur(tpw) + else + kept += dropped.head + dropped = dropped.tail + recur(tp) + + val saved = ctx.typerState.snapshot() + val tpw = recur(tp) + if (tpw eq tp) || dropped.forall(_ frozen_<:< tpw) then + // Rollback any constraint change that would lead to `tp` no longer + // being a valid solution. + ctx.typerState.resetTo(saved) + tp + else + tpw + end dropTransparentTraits + + /** If `tp` is an applied match type alias which is also an unreducible application + * of a higher-kinded type to a wildcard argument, widen to the match type's bound, + * in order to avoid an unreducible application of higher-kinded type ... in inferred type" + * error in PostTyper. Fixes #11246. + */ + def widenIrreducible(tp: Type)(using Context): Type = tp match + case tp @ AppliedType(tycon, _) if tycon.isLambdaSub && tp.hasWildcardArg => + tp.superType match + case MatchType(bound, _, _) => bound + case _ => tp + case _ => + tp + + /** Widen inferred type `inst` with upper `bound`, according to the following rules: + * 1. If `inst` is a singleton type, or a union containing some singleton types, + * widen (all) the singleton type(s), provided the result is a subtype of `bound`. + * (i.e. `inst.widenSingletons <:< bound` succeeds with satisfiable constraint) + * 2a. If `inst` is a union type and `widenUnions` is true, approximate the union type + * from above by an intersection of all common base types, provided the result + * is a subtype of `bound`. + * 2b. If `inst` is a union type and `widenUnions` is false, turn it into a hard + * union type (except for unions | Null, which are kept in the state they were). + * 3. Widen some irreducible applications of higher-kinded types to wildcard arguments + * (see @widenIrreducible). + * 4. Drop transparent traits from intersections (see @dropTransparentTraits). + * + * Don't do these widenings if `bound` is a subtype of `scala.Singleton`. + * Also, if the result of these widenings is a TypeRef to a module class, + * and this type ref is different from `inst`, replace by a TermRef to + * its source module instead. + * + * At this point we also drop the @Repeated annotation to avoid inferring type arguments with it, + * as those could leak the annotation to users (see run/inferred-repeated-result). + */ + def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = + def widenOr(tp: Type) = + if widenUnions then + val tpw = tp.widenUnion + if (tpw ne tp) && (tpw <:< bound) then tpw else tp + else tp.hardenUnions + + def widenSingle(tp: Type) = + val tpw = tp.widenSingletons + if (tpw ne tp) && (tpw <:< bound) then tpw else tp + + def isSingleton(tp: Type): Boolean = tp match + case WildcardType(optBounds) => optBounds.exists && isSingleton(optBounds.bounds.hi) + case _ => isSubTypeWhenFrozen(tp, defn.SingletonType) + + val wideInst = + if isSingleton(bound) then inst + else dropTransparentTraits(widenIrreducible(widenOr(widenSingle(inst))), bound) + wideInst match + case wideInst: TypeRef if wideInst.symbol.is(Module) => + TermRef(wideInst.prefix, wideInst.symbol.sourceModule) + case _ => + wideInst.dropRepeatedAnnot + end widenInferred + + /** Convert all toplevel union types in `tp` to hard unions */ + extension (tp: Type) private def hardenUnions(using Context): Type = tp.widen match + case tp: AndType => + tp.derivedAndType(tp.tp1.hardenUnions, tp.tp2.hardenUnions) + case tp: RefinedType => + tp.derivedRefinedType(tp.parent.hardenUnions, tp.refinedName, tp.refinedInfo) + case tp: RecType => + tp.rebind(tp.parent.hardenUnions) + case tp: HKTypeLambda => + tp.derivedLambdaType(resType = tp.resType.hardenUnions) + case tp: OrType => + val tp1 = tp.stripNull + if tp1 ne tp then tp.derivedOrType(tp1.hardenUnions, defn.NullType) + else tp.derivedOrType(tp.tp1.hardenUnions, tp.tp2.hardenUnions, soft = false) + case _ => + tp + + /** The instance type of `param` in the current constraint (which contains `param`). + * If `fromBelow` is true, the instance type is the lub of the parameter's + * lower bounds; otherwise it is the glb of its upper bounds. However, + * a lower bound instantiation can be a singleton type only if the upper bound + * is also a singleton type. + * The instance type is not allowed to contain references to types nested deeper + * than `maxLevel`. + */ + def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int)(using Context): Type = { + val approx = approximation(param, fromBelow, maxLevel).simplified + if fromBelow then + val widened = widenInferred(approx, param, widenUnions) + // Widening can add extra constraints, in particular the widened type might + // be a type variable which is now instantiated to `param`, and therefore + // cannot be used as an instantiation of `param` without creating a loop. + // If that happens, we run `instanceType` again to find a new instantation. + // (we do not check for non-toplevel occurences: those should never occur + // since `addOneBound` disallows recursive lower bounds). + if constraint.occursAtToplevel(param, widened) then + instanceType(param, fromBelow, widenUnions, maxLevel) + else + widened + else + approx + } + + /** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have + * for all poly params `p` defined in `c2` as `p >: L2 <: U2`: + * + * c1 defines p with bounds p >: L1 <: U1, and + * L2 <: L1, and + * U1 <: U2 + * + * Both `c1` and `c2` are required to derive from constraint `pre`, without adding + * any new type variables but possibly narrowing already registered ones with further bounds. + */ + protected final def subsumes(c1: Constraint, c2: Constraint, pre: Constraint)(using Context): Boolean = + if (c2 eq pre) true + else if (c1 eq pre) false + else { + val saved = constraint + try + // We iterate over params of `pre`, instead of `c2` as the documentation may suggest. + // As neither `c1` nor `c2` can have more params than `pre`, this only matters in one edge case. + // Constraint#forallParams only iterates over params that can be directly constrained. + // If `c2` has, compared to `pre`, instantiated a param and we iterated over params of `c2`, + // we could miss that param being instantiated to an incompatible type in `c1`. + pre.forallParams(p => + c1.entry(p).exists + && c2.upper(p).forall(c1.isLess(p, _)) + && isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p)) + ) + finally constraint = saved + } + + /** The current bounds of type parameter `param` */ + def bounds(param: TypeParamRef)(using Context): TypeBounds = { + val e = constraint.entry(param) + if (e.exists) e.bounds + else { + // TODO: should we change the type of paramInfos to nullable? + val pinfos: List[param.binder.PInfo] | Null = param.binder.paramInfos + if (pinfos != null) pinfos(param.paramNum) // pinfos == null happens in pos/i536.scala + else TypeBounds.empty + } + } + + /** Add type lambda `tl`, possibly with type variables `tvars`, to current constraint + * and propagate all bounds. + * @param tvars See Constraint#add + */ + def addToConstraint(tl: TypeLambda, tvars: List[TypeVar])(using Context): Boolean = + checkPropagated(i"initialized $tl") { + constraint = constraint.add(tl, tvars) + tl.paramRefs.forall { param => + val lower = constraint.lower(param) + val upper = constraint.upper(param) + constraint.entry(param) match { + case bounds: TypeBounds => + if lower.nonEmpty && !bounds.lo.isRef(defn.NothingClass) + || upper.nonEmpty && !bounds.hi.isAny + then constr.println(i"INIT*** $tl") + lower.forall(addOneBound(_, bounds.hi, isUpper = true)) && + upper.forall(addOneBound(_, bounds.lo, isUpper = false)) + case x => + // Happens if param was already solved while processing earlier params of the same TypeLambda. + // See #4720. + true + } + } + } + + /** Can `param` be constrained with new bounds? */ + final def canConstrain(param: TypeParamRef): Boolean = + (!frozenConstraint || (caseLambda `eq` param.binder)) && constraint.contains(param) + + /** Is `param` assumed to be a sub- and super-type of any other type? + * This holds if `TypeVarsMissContext` is set unless `param` is a part + * of a MatchType that is currently normalized. + */ + final def assumedTrue(param: TypeParamRef)(using Context): Boolean = + ctx.mode.is(Mode.TypevarsMissContext) && (caseLambda `ne` param.binder) + + /** Add constraint `param <: bound` if `fromBelow` is false, `param >: bound` otherwise. + * `bound` is assumed to be in normalized form, as specified in `firstTry` and + * `secondTry` of `TypeComparer`. In particular, it should not be an alias type, + * lazy ref, typevar, wildcard type, error type. In addition, upper bounds may + * not be AndTypes and lower bounds may not be OrTypes. This is assured by the + * way isSubType is organized. + */ + protected def addConstraint(param: TypeParamRef, bound: Type, fromBelow: Boolean)(using Context): Boolean = + if !bound.isValueTypeOrLambda then return false + + /** When comparing lambdas we might get constraints such as + * `A <: X0` or `A = List[X0]` where `A` is a constrained parameter + * and `X0` is a lambda parameter. The constraint for `A` is not allowed + * to refer to such a lambda parameter because the lambda parameter is + * not visible where `A` is defined. Consequently, we need to + * approximate the bound so that the lambda parameter does not appear in it. + * If `tp` is an upper bound, we need to approximate with something smaller, + * otherwise something larger. + * Test case in pos/i94-nada.scala. This test crashes with an illegal instance + * error in Test2 when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is + * missing. + */ + def avoidLambdaParams(tp: Type) = + if comparedTypeLambdas.nonEmpty then + val approx = new ApproximatingTypeMap { + if (!fromBelow) variance = -1 + def apply(t: Type): Type = t match { + case t @ TypeParamRef(tl: TypeLambda, n) if comparedTypeLambdas contains tl => + val bounds = tl.paramInfos(n) + range(bounds.lo, bounds.hi) + case tl: TypeLambda => + val saved = comparedTypeLambdas + comparedTypeLambdas -= tl + try mapOver(tl) + finally comparedTypeLambdas = saved + case _ => + mapOver(t) + } + } + approx(tp) + else tp + + def addParamBound(bound: TypeParamRef) = + constraint.entry(param) match { + case _: TypeBounds => + if (fromBelow) addLess(bound, param) else addLess(param, bound) + case tp => + if (fromBelow) isSub(bound, tp) else isSub(tp, bound) + } + + def kindCompatible(tp1: Type, tp2: Type): Boolean = + val tparams1 = tp1.typeParams + val tparams2 = tp2.typeParams + tparams1.corresponds(tparams2)((p1, p2) => kindCompatible(p1.paramInfo, p2.paramInfo)) + && (tparams1.isEmpty || kindCompatible(tp1.hkResult, tp2.hkResult)) + || tp1.hasAnyKind + || tp2.hasAnyKind + + def description = i"constr $param ${if (fromBelow) ">:" else "<:"} $bound:\n$constraint" + + //checkPropagated(s"adding $description")(true) // DEBUG in case following fails + checkPropagated(s"added $description") { + addConstraintInvocations += 1 + val saved = canWidenAbstract + canWidenAbstract = true + try bound match + case bound: TypeParamRef if constraint contains bound => + addParamBound(bound) + case _ => + val pbound = avoidLambdaParams(bound) + kindCompatible(param, pbound) && addBoundTransitively(param, pbound, !fromBelow) + finally + canWidenAbstract = saved + addConstraintInvocations -= 1 + } + end addConstraint + + /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */ + def checkPropagated(msg: => String)(result: Boolean)(using Context): Boolean = { + if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) + inFrozenConstraint { + for (p <- constraint.domainParams) { + def check(cond: => Boolean, q: TypeParamRef, ordering: String, explanation: String): Unit = + assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg") + for (u <- constraint.upper(p)) + check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated") + for (l <- constraint.lower(p)) { + check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated") + check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing") + } + } + } + result + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala new file mode 100644 index 000000000000..d2b1246a8149 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala @@ -0,0 +1,23 @@ +package dotty.tools.dotc +package core + +import Contexts._ +import config.Printers.{default, typr} + +trait ConstraintRunInfo { self: Run => + private var maxSize = 0 + private var maxConstraint: Constraint | Null = _ + def recordConstraintSize(c: Constraint, size: Int): Unit = + if (size > maxSize) { + maxSize = size + maxConstraint = c + } + def printMaxConstraint()(using Context): Unit = + if maxSize > 0 then + val printer = if ctx.settings.YdetailedStats.value then default else typr + printer.println(s"max constraint size: $maxSize") + try printer.println(s"max constraint = ${maxConstraint.nn.show}") + catch case ex: StackOverflowError => printer.println("max constraint cannot be printed due to stack overflow") + + protected def reset(): Unit = maxConstraint = null +} diff --git a/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala b/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala new file mode 100644 index 000000000000..aa85f714a8e5 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala @@ -0,0 +1,113 @@ +package dotty.tools.dotc +package core + +import Contexts._, Symbols._, Types._, Flags._ +import Denotations._, SymDenotations._ +import Names.Name, StdNames.nme +import ast.untpd + +/** Extension methods for contexts where we want to keep the ctx. syntax */ +object ContextOps: + + extension (ctx: Context) + + /** Enter symbol into current class, if current class is owner of current context, + * or into current scope, if not. Should always be called instead of scope.enter + * in order to make sure that updates to class members are reflected in + * finger prints. + */ + def enter(sym: Symbol): Symbol = inContext(ctx) { + ctx.owner match + case cls: ClassSymbol => cls.classDenot.enter(sym) + case _ => ctx.scope.openForMutations.enter(sym) + sym + } + + /** The denotation with the given `name` and all `required` flags in current context + */ + def denotNamed(name: Name, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = + inContext(ctx) { + if (ctx.owner.isClass) + if (ctx.outer.owner == ctx.owner) { // inner class scope; check whether we are referring to self + if (ctx.scope.size == 1) { + val elem = ctx.scope.lastEntry.nn + if (elem.name == name) return elem.sym.denot // return self + } + val pre = ctx.owner.thisType + if ctx.isJava then javaFindMember(name, pre, required, excluded) + else pre.findMember(name, pre, required, excluded) + } + else // we are in the outermost context belonging to a class; self is invisible here. See inClassContext. + ctx.owner.findMember(name, ctx.owner.thisType, required, excluded) + else + ctx.scope.denotsNamed(name).filterWithFlags(required, excluded).toDenot(NoPrefix) + } + + final def javaFindMember(name: Name, pre: Type, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = + assert(ctx.isJava) + inContext(ctx) { + + val preSym = pre.typeSymbol + + // 1. Try to search in current type and parents. + val directSearch = pre.findMember(name, pre, required, excluded) + + // 2. Try to search in companion class if current is an object. + def searchCompanionClass = if preSym.is(Flags.Module) then + preSym.companionClass.thisType.findMember(name, pre, required, excluded) + else NoDenotation + + // 3. Try to search in companion objects of super classes. + // In Java code, static inner classes, which we model as members of the companion object, + // can be referenced from an ident in a subclass or by a selection prefixed by the subclass. + def searchSuperCompanionObjects = + val toSearch = if preSym.is(Flags.Module) then + if preSym.companionClass.exists then + preSym.companionClass.asClass.baseClasses + else Nil + else + preSym.asClass.baseClasses + + toSearch.iterator.map { bc => + val pre1 = bc.companionModule.namedType + pre1.findMember(name, pre1, required, excluded) + }.find(_.exists).getOrElse(NoDenotation) + + if preSym.isClass then + directSearch orElse searchCompanionClass orElse searchSuperCompanionObjects + else + directSearch + } + + /** A fresh local context with given tree and owner. + * Owner might not exist (can happen for self valdefs), in which case + * no owner is set in result context + */ + def localContext(tree: untpd.Tree, owner: Symbol): FreshContext = inContext(ctx) { + val freshCtx = ctx.fresh.setTree(tree) + if owner.exists then freshCtx.setOwner(owner) else freshCtx + } + + /** Context where `sym` is defined, assuming we are in a nested context. */ + def defContext(sym: Symbol): Context = inContext(ctx) { + ctx.outersIterator + .dropWhile(_.owner != sym) + .dropWhile(_.owner == sym) + .next() + } + + /** A new context for the interior of a class */ + def inClassContext(selfInfo: TypeOrSymbol): Context = inContext(ctx) { + val localCtx: Context = ctx.fresh.setNewScope + selfInfo match { + case sym: Symbol if sym.exists && sym.name != nme.WILDCARD => localCtx.scope.openForMutations.enter(sym) + case _ => + } + localCtx + } + + def packageContext(tree: untpd.PackageDef, pkg: Symbol): Context = inContext(ctx) { + if (pkg.is(Package)) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree) + else ctx + } +end ContextOps diff --git a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala new file mode 100644 index 000000000000..68620c6d3fe7 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala @@ -0,0 +1,1000 @@ +package dotty.tools +package dotc +package core + +import interfaces.CompilerCallback +import Decorators._ +import Periods._ +import Names._ +import Phases._ +import Types._ +import Symbols._ +import Scopes._ +import Uniques._ +import ast.Trees._ +import ast.untpd +import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} +import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} +import inlines.Inliner +import Nullables._ +import Implicits.ContextualImplicits +import config.Settings._ +import config.Config +import reporting._ +import io.{AbstractFile, NoAbstractFile, PlainFile, Path} +import scala.io.Codec +import collection.mutable +import printing._ +import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings} +import classfile.ReusableDataReader +import StdNames.nme + +import scala.annotation.internal.sharable + +import DenotTransformers.DenotTransformer +import dotty.tools.dotc.profile.Profiler +import util.Property.Key +import util.Store +import xsbti.AnalysisCallback +import plugins._ +import java.util.concurrent.atomic.AtomicInteger +import java.nio.file.InvalidPathException +import language.experimental.pureFunctions + +object Contexts { + + private val (compilerCallbackLoc, store1) = Store.empty.newLocation[CompilerCallback]() + private val (sbtCallbackLoc, store2) = store1.newLocation[AnalysisCallback]() + private val (printerFnLoc, store3) = store2.newLocation[Context -> Printer](new RefinedPrinter(_)) + private val (settingsStateLoc, store4) = store3.newLocation[SettingsState]() + private val (compilationUnitLoc, store5) = store4.newLocation[CompilationUnit]() + private val (runLoc, store6) = store5.newLocation[Run | Null]() + private val (profilerLoc, store7) = store6.newLocation[Profiler]() + private val (notNullInfosLoc, store8) = store7.newLocation[List[NotNullInfo]]() + private val (importInfoLoc, store9) = store8.newLocation[ImportInfo | Null]() + private val (typeAssignerLoc, store10) = store9.newLocation[TypeAssigner](TypeAssigner) + + private val initialStore = store10 + + /** The current context */ + inline def ctx(using ctx: Context): Context = ctx + + /** Run `op` with given context */ + inline def inContext[T](c: Context)(inline op: Context ?=> T): T = + op(using c) + + /** Execute `op` at given period */ + inline def atPeriod[T](pd: Period)(inline op: Context ?=> T)(using Context): T = + op(using ctx.fresh.setPeriod(pd)) + + /** Execute `op` at given phase id */ + inline def atPhase[T](pid: PhaseId)(inline op: Context ?=> T)(using Context): T = + op(using ctx.withPhase(pid)) + + /** Execute `op` at given phase */ + inline def atPhase[T](phase: Phase)(inline op: Context ?=> T)(using Context): T = + op(using ctx.withPhase(phase)) + + inline def atNextPhase[T](inline op: Context ?=> T)(using Context): T = + atPhase(ctx.phase.next)(op) + + /** Execute `op` at the current phase if it's before the first transform phase, + * otherwise at the last phase before the first transform phase. + * + * Note: this should be used instead of `atPhaseNoLater(ctx.picklerPhase)` + * because the later won't work if the `Pickler` phase is not present (for example, + * when using `QuoteCompiler`). + */ + inline def atPhaseBeforeTransforms[T](inline op: Context ?=> T)(using Context): T = + atPhaseNoLater(firstTransformPhase.prev)(op) + + inline def atPhaseNoLater[T](limit: Phase)(inline op: Context ?=> T)(using Context): T = + op(using if !limit.exists || ctx.phase <= limit then ctx else ctx.withPhase(limit)) + + inline def atPhaseNoEarlier[T](limit: Phase)(inline op: Context ?=> T)(using Context): T = + op(using if !limit.exists || limit <= ctx.phase then ctx else ctx.withPhase(limit)) + + inline def inMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = + op(using if mode != ctx.mode then ctx.fresh.setMode(mode) else ctx) + + inline def withMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = + inMode(ctx.mode | mode)(op) + + inline def withoutMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = + inMode(ctx.mode &~ mode)(op) + + /** A context is passed basically everywhere in dotc. + * This is convenient but carries the risk of captured contexts in + * objects that turn into space leaks. To combat this risk, here are some + * conventions to follow: + * + * - Never let an implicit context be an argument of a class whose instances + * live longer than the context. + * - Classes that need contexts for their initialization take an explicit parameter + * named `initctx`. They pass initctx to all positions where it is needed + * (and these positions should all be part of the intialization sequence of the class). + * - Classes that need contexts that survive initialization are instead passed + * a "condensed context", typically named `cctx` (or they create one). Condensed contexts + * just add some basic information to the context base without the + * risk of capturing complete trees. + * - To make sure these rules are kept, it would be good to do a sanity + * check using bytecode inspection with javap or scalap: Keep track + * of all class fields of type context; allow them only in whitelisted + * classes (which should be short-lived). + */ + abstract class Context(val base: ContextBase) { thiscontext => + + given Context = this + + /** All outer contexts, ending in `base.initialCtx` and then `NoContext` */ + def outersIterator: Iterator[Context] = new Iterator[Context] { + var current = thiscontext + def hasNext = current != NoContext + def next = { val c = current; current = current.outer; c } + } + + /** The outer context */ + private var _outer: Context = _ + protected def outer_=(outer: Context): Unit = _outer = outer + final def outer: Context = _outer + + /** The current context */ + private var _period: Period = _ + protected def period_=(period: Period): Unit = { + assert(period.firstPhaseId == period.lastPhaseId, period) + _period = period + } + final def period: Period = _period + + /** The scope nesting level */ + private var _mode: Mode = _ + protected def mode_=(mode: Mode): Unit = _mode = mode + final def mode: Mode = _mode + + /** The current owner symbol */ + private var _owner: Symbol = _ + protected def owner_=(owner: Symbol): Unit = _owner = owner + final def owner: Symbol = _owner + + /** The current tree */ + private var _tree: Tree[? >: Untyped]= _ + protected def tree_=(tree: Tree[? >: Untyped]): Unit = _tree = tree + final def tree: Tree[? >: Untyped] = _tree + + /** The current scope */ + private var _scope: Scope = _ + protected def scope_=(scope: Scope): Unit = _scope = scope + final def scope: Scope = _scope + + /** The current typerstate */ + private var _typerState: TyperState = _ + protected def typerState_=(typerState: TyperState): Unit = _typerState = typerState + final def typerState: TyperState = _typerState + + /** The current bounds in force for type parameters appearing in a GADT */ + private var _gadt: GadtConstraint = _ + protected def gadt_=(gadt: GadtConstraint): Unit = _gadt = gadt + final def gadt: GadtConstraint = _gadt + + /** The history of implicit searches that are currently active */ + private var _searchHistory: SearchHistory = _ + protected def searchHistory_= (searchHistory: SearchHistory): Unit = _searchHistory = searchHistory + final def searchHistory: SearchHistory = _searchHistory + + /** The current source file */ + private var _source: SourceFile = _ + protected def source_=(source: SourceFile): Unit = _source = source + final def source: SourceFile = _source + + /** A map in which more contextual properties can be stored + * Typically used for attributes that are read and written only in special situations. + */ + private var _moreProperties: Map[Key[Any], Any] = _ + protected def moreProperties_=(moreProperties: Map[Key[Any], Any]): Unit = _moreProperties = moreProperties + final def moreProperties: Map[Key[Any], Any] = _moreProperties + + def property[T](key: Key[T]): Option[T] = + moreProperties.get(key).asInstanceOf[Option[T]] + + /** A store that can be used by sub-components. + * Typically used for attributes that are defined only once per compilation unit. + * Access to store entries is much faster than access to properties, and only + * slightly slower than a normal field access would be. + */ + private var _store: Store = _ + protected def store_=(store: Store): Unit = _store = store + final def store: Store = _store + + /** The compiler callback implementation, or null if no callback will be called. */ + def compilerCallback: CompilerCallback = store(compilerCallbackLoc) + + /** The sbt callback implementation if we are run from sbt, null otherwise */ + def sbtCallback: AnalysisCallback = store(sbtCallbackLoc) + + /** The current plain printer */ + def printerFn: Context -> Printer = store(printerFnLoc) + + /** A function creating a printer */ + def printer: Printer = + val pr = printerFn(this) + if this.settings.YplainPrinter.value then pr.plain else pr + + /** The current settings values */ + def settingsState: SettingsState = store(settingsStateLoc) + + /** The current compilation unit */ + def compilationUnit: CompilationUnit = store(compilationUnitLoc) + + /** The current compiler-run */ + def run: Run | Null = store(runLoc) + + /** The current compiler-run profiler */ + def profiler: Profiler = store(profilerLoc) + + /** The paths currently known to be not null */ + def notNullInfos: List[NotNullInfo] = store(notNullInfosLoc) + + /** The currently active import info */ + def importInfo: ImportInfo | Null = store(importInfoLoc) + + /** The current type assigner or typer */ + def typeAssigner: TypeAssigner = store(typeAssignerLoc) + + /** The new implicit references that are introduced by this scope */ + protected var implicitsCache: ContextualImplicits | Null = null + def implicits: ContextualImplicits = { + if (implicitsCache == null) + implicitsCache = { + val implicitRefs: List[ImplicitRef] = + if (isClassDefContext) + try owner.thisType.implicitMembers + catch { + case ex: CyclicReference => Nil + } + else if (isImportContext) importInfo.nn.importedImplicits + else if (isNonEmptyScopeContext) scope.implicitDecls + else Nil + val outerImplicits = + if (isImportContext && importInfo.nn.unimported.exists) + outer.implicits exclude importInfo.nn.unimported + else + outer.implicits + if (implicitRefs.isEmpty) outerImplicits + else new ContextualImplicits(implicitRefs, outerImplicits, isImportContext)(this) + } + implicitsCache.nn + } + + /** Either the current scope, or, if the current context owner is a class, + * the declarations of the current class. + */ + def effectiveScope(using Context): Scope = + val myOwner: Symbol | Null = owner + if myOwner != null && myOwner.isClass then myOwner.asClass.unforcedDecls + else scope + + def nestingLevel: Int = effectiveScope.nestingLevel + + /** Sourcefile corresponding to given abstract file, memoized */ + def getSource(file: AbstractFile, codec: -> Codec = Codec(settings.encoding.value)) = { + util.Stats.record("Context.getSource") + base.sources.getOrElseUpdate(file, SourceFile(file, codec)) + } + + /** SourceFile with given path name, memoized */ + def getSource(path: TermName): SourceFile = getFile(path) match + case NoAbstractFile => NoSource + case file => getSource(file) + + /** SourceFile with given path, memoized */ + def getSource(path: String): SourceFile = getSource(path.toTermName) + + /** AbstraFile with given path name, memoized */ + def getFile(name: TermName): AbstractFile = base.files.get(name) match + case Some(file) => + file + case None => + try + val file = new PlainFile(Path(name.toString)) + base.files(name) = file + file + catch + case ex: InvalidPathException => + report.error(s"invalid file path: ${ex.getMessage}") + NoAbstractFile + + /** AbstractFile with given path, memoized */ + def getFile(name: String): AbstractFile = getFile(name.toTermName) + + + private var related: SimpleIdentityMap[Phase | SourceFile, Context] | Null = null + + private def lookup(key: Phase | SourceFile): Context | Null = + util.Stats.record("Context.related.lookup") + if related == null then + related = SimpleIdentityMap.empty + null + else + related.nn(key) + + private def withPhase(phase: Phase, pid: PhaseId): Context = + util.Stats.record("Context.withPhase") + val curId = phaseId + if curId == pid then + this + else + var ctx1 = lookup(phase) + if ctx1 == null then + util.Stats.record("Context.withPhase.new") + ctx1 = fresh.setPhase(pid) + related = related.nn.updated(phase, ctx1) + ctx1 + + final def withPhase(phase: Phase): Context = withPhase(phase, phase.id) + final def withPhase(pid: PhaseId): Context = withPhase(base.phases(pid), pid) + + final def withSource(source: SourceFile): Context = + util.Stats.record("Context.withSource") + if this.source eq source then + this + else + var ctx1 = lookup(source) + if ctx1 == null then + util.Stats.record("Context.withSource.new") + val ctx2 = fresh.setSource(source) + if ctx2.compilationUnit eq NoCompilationUnit then + // `source` might correspond to a file not necessarily + // in the current project (e.g. when inlining library code), + // so set `mustExist` to false. + ctx2.setCompilationUnit(CompilationUnit(source, mustExist = false)) + ctx1 = ctx2 + related = related.nn.updated(source, ctx2) + ctx1 + + // `creationTrace`-related code. To enable, uncomment the code below and the + // call to `setCreationTrace()` in this file. + /* + /** If -Ydebug is on, the top of the stack trace where this context + * was created, otherwise `null`. + */ + private var creationTrace: Array[StackTraceElement] = _ + + private def setCreationTrace() = + creationTrace = (new Throwable).getStackTrace().take(20) + + /** Print all enclosing context's creation stacktraces */ + def printCreationTraces() = { + println("=== context creation trace =======") + for (ctx <- outersIterator) { + println(s">>>>>>>>> $ctx") + if (ctx.creationTrace != null) println(ctx.creationTrace.mkString("\n")) + } + println("=== end context creation trace ===") + } + */ + + /** The current reporter */ + def reporter: Reporter = typerState.reporter + + final def phase: Phase = base.phases(period.firstPhaseId) + final def runId = period.runId + final def phaseId = period.phaseId + + final def lastPhaseId = base.phases.length - 1 + + /** Does current phase use an erased types interpretation? */ + final def erasedTypes = phase.erasedTypes + + /** Are we in a Java compilation unit? */ + final def isJava: Boolean = compilationUnit.isJava + + /** Is current phase after TyperPhase? */ + final def isAfterTyper = base.isAfterTyper(phase) + final def isTyper = base.isTyper(phase) + + /** Is this a context for the members of a class definition? */ + def isClassDefContext: Boolean = + owner.isClass && (owner ne outer.owner) + + /** Is this a context that introduces an import clause? */ + def isImportContext: Boolean = + (this ne NoContext) + && (outer ne NoContext) + && (this.importInfo nen outer.importInfo) + + /** Is this a context that introduces a non-empty scope? */ + def isNonEmptyScopeContext: Boolean = + (this.scope ne outer.scope) && !this.scope.isEmpty + + /** Is this a context for typechecking an inlined body? */ + def isInlineContext: Boolean = + typer.isInstanceOf[Inliner#InlineTyper] + + /** The next outer context whose tree is a template or package definition + * Note: Currently unused + def enclTemplate: Context = { + var c = this + while (c != NoContext && !c.tree.isInstanceOf[Template[?]] && !c.tree.isInstanceOf[PackageDef[?]]) + c = c.outer + c + }*/ + + /** The context for a supercall. This context is used for elaborating + * the parents of a class and their arguments. + * The context is computed from the current class context. It has + * + * - as owner: The primary constructor of the class + * - as outer context: The context enclosing the class context + * - as scope: The parameter accessors in the class context + * + * The reasons for this peculiar choice of attributes are as follows: + * + * - The constructor must be the owner, because that's where any local methods or closures + * should go. + * - The context may not see any class members (inherited or defined), and should + * instead see definitions defined in the outer context which might be shadowed by + * such class members. That's why the outer context must be the outer context of the class. + * - At the same time the context should see the parameter accessors of the current class, + * that's why they get added to the local scope. An alternative would have been to have the + * context see the constructor parameters instead, but then we'd need a final substitution step + * from constructor parameters to class parameter accessors. + */ + def superCallContext: Context = { + val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors: _*) + superOrThisCallContext(owner.primaryConstructor, locals) + } + + /** The context for the arguments of a this(...) constructor call. + * The context is computed from the local auxiliary constructor context. + * It has + * + * - as owner: The auxiliary constructor + * - as outer context: The context enclosing the enclosing class context + * - as scope: The parameters of the auxiliary constructor. + */ + def thisCallArgContext: Context = { + val constrCtx = outersIterator.dropWhile(_.outer.owner == owner).next() + superOrThisCallContext(owner, constrCtx.scope) + .setTyperState(typerState) + .setGadt(gadt) + .fresh + .setScope(this.scope) + } + + /** The super- or this-call context with given owner and locals. */ + private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = { + var classCtx = outersIterator.dropWhile(!_.isClassDefContext).next() + classCtx.outer.fresh.setOwner(owner) + .setScope(locals) + .setMode(classCtx.mode) + } + + /** The context of expression `expr` seen as a member of a statement sequence */ + def exprContext(stat: Tree[? >: Untyped], exprOwner: Symbol): Context = + if (exprOwner == this.owner) this + else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext + else fresh.setOwner(exprOwner) + + /** A new context that summarizes an import statement */ + def importContext(imp: Import[?], sym: Symbol): FreshContext = + fresh.setImportInfo(ImportInfo(sym, imp.selectors, imp.expr)) + + /** Is the debug option set? */ + def debug: Boolean = base.settings.Ydebug.value + + /** Is the verbose option set? */ + def verbose: Boolean = base.settings.verbose.value + + /** Should use colors when printing? */ + def useColors: Boolean = + base.settings.color.value == "always" + + /** Is the explicit nulls option set? */ + def explicitNulls: Boolean = base.settings.YexplicitNulls.value + + /** Initialize all context fields, except typerState, which has to be set separately + * @param outer The outer context + * @param origin The context from which fields are copied + */ + private[Contexts] def init(outer: Context, origin: Context): this.type = { + _outer = outer + _period = origin.period + _mode = origin.mode + _owner = origin.owner + _tree = origin.tree + _scope = origin.scope + _gadt = origin.gadt + _searchHistory = origin.searchHistory + _source = origin.source + _moreProperties = origin.moreProperties + _store = origin.store + this + } + + def reuseIn(outer: Context): this.type = + implicitsCache = null + related = null + init(outer, outer) + + /** A fresh clone of this context embedded in this context. */ + def fresh: FreshContext = freshOver(this) + + /** A fresh clone of this context embedded in the specified `outer` context. */ + def freshOver(outer: Context): FreshContext = + util.Stats.record("Context.fresh") + FreshContext(base).init(outer, this).setTyperState(this.typerState) + + final def withOwner(owner: Symbol): Context = + if (owner ne this.owner) fresh.setOwner(owner) else this + + final def withTyperState(typerState: TyperState): Context = + if typerState ne this.typerState then fresh.setTyperState(typerState) else this + + final def withUncommittedTyperState: Context = + withTyperState(typerState.uncommittedAncestor) + + final def withProperty[T](key: Key[T], value: Option[T]): Context = + if (property(key) == value) this + else value match { + case Some(v) => fresh.setProperty(key, v) + case None => fresh.dropProperty(key) + } + + def typer: Typer = this.typeAssigner match { + case typer: Typer => typer + case _ => new Typer + } + + override def toString: String = + def iinfo(using Context) = + val info = ctx.importInfo + if (info == null) "" else i"${info.selectors}%, %" + def cinfo(using Context) = + val core = s" owner = ${ctx.owner}, scope = ${ctx.scope}, import = $iinfo" + if (ctx ne NoContext) && (ctx.implicits ne ctx.outer.implicits) then + s"$core, implicits = ${ctx.implicits}" + else + core + s"""Context( + |${outersIterator.map(ctx => cinfo(using ctx)).mkString("\n\n")})""".stripMargin + + def settings: ScalaSettings = base.settings + def definitions: Definitions = base.definitions + def platform: Platform = base.platform + def pendingUnderlying: util.HashSet[Type] = base.pendingUnderlying + def uniqueNamedTypes: Uniques.NamedTypeUniques = base.uniqueNamedTypes + def uniques: util.WeakHashSet[Type] = base.uniques + + def initialize()(using Context): Unit = base.initialize() + } + + /** A condensed context provides only a small memory footprint over + * a Context base, and therefore can be stored without problems in + * long-lived objects. + abstract class CondensedContext extends Context { + override def condensed = this + } + */ + + /** A fresh context allows selective modification + * of its attributes using the with... methods. + */ + class FreshContext(base: ContextBase) extends Context(base) { + def setPeriod(period: Period): this.type = + util.Stats.record("Context.setPeriod") + this.period = period + this + def setMode(mode: Mode): this.type = + util.Stats.record("Context.setMode") + this.mode = mode + this + def setOwner(owner: Symbol): this.type = + util.Stats.record("Context.setOwner") + assert(owner != NoSymbol) + this.owner = owner + this + def setTree(tree: Tree[? >: Untyped]): this.type = + util.Stats.record("Context.setTree") + this.tree = tree + this + def setScope(scope: Scope): this.type = { this.scope = scope; this } + def setNewScope: this.type = + util.Stats.record("Context.setScope") + this.scope = newScope + this + def setTyperState(typerState: TyperState): this.type = { this.typerState = typerState; this } + def setNewTyperState(): this.type = setTyperState(typerState.fresh(committable = true)) + def setExploreTyperState(): this.type = setTyperState(typerState.fresh(committable = false)) + def setReporter(reporter: Reporter): this.type = setTyperState(typerState.fresh().setReporter(reporter)) + def setTyper(typer: Typer): this.type = { this.scope = typer.scope; setTypeAssigner(typer) } + def setGadt(gadt: GadtConstraint): this.type = + util.Stats.record("Context.setGadt") + this.gadt = gadt + this + def setFreshGADTBounds: this.type = setGadt(gadt.fresh) + def setSearchHistory(searchHistory: SearchHistory): this.type = + util.Stats.record("Context.setSearchHistory") + this.searchHistory = searchHistory + this + def setSource(source: SourceFile): this.type = + util.Stats.record("Context.setSource") + this.source = source + this + private def setMoreProperties(moreProperties: Map[Key[Any], Any]): this.type = + util.Stats.record("Context.setMoreProperties") + this.moreProperties = moreProperties + this + private def setStore(store: Store): this.type = + util.Stats.record("Context.setStore") + this.store = store + this + def setImplicits(implicits: ContextualImplicits): this.type = { this.implicitsCache = implicits; this } + + def setCompilationUnit(compilationUnit: CompilationUnit): this.type = { + setSource(compilationUnit.source) + updateStore(compilationUnitLoc, compilationUnit) + } + + def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) + def setSbtCallback(callback: AnalysisCallback): this.type = updateStore(sbtCallbackLoc, callback) + def setPrinterFn(printer: Context -> Printer): this.type = updateStore(printerFnLoc, printer) + def setSettings(settingsState: SettingsState): this.type = updateStore(settingsStateLoc, settingsState) + def setRun(run: Run | Null): this.type = updateStore(runLoc, run) + def setProfiler(profiler: Profiler): this.type = updateStore(profilerLoc, profiler) + def setNotNullInfos(notNullInfos: List[NotNullInfo]): this.type = updateStore(notNullInfosLoc, notNullInfos) + def setImportInfo(importInfo: ImportInfo): this.type = + importInfo.mentionsFeature(nme.unsafeNulls) match + case Some(true) => + setMode(this.mode &~ Mode.SafeNulls) + case Some(false) if ctx.settings.YexplicitNulls.value => + setMode(this.mode | Mode.SafeNulls) + case _ => + updateStore(importInfoLoc, importInfo) + def setTypeAssigner(typeAssigner: TypeAssigner): this.type = updateStore(typeAssignerLoc, typeAssigner) + + def setProperty[T](key: Key[T], value: T): this.type = + setMoreProperties(moreProperties.updated(key, value)) + + def dropProperty(key: Key[?]): this.type = + setMoreProperties(moreProperties - key) + + def addLocation[T](initial: T): Store.Location[T] = { + val (loc, store1) = store.newLocation(initial) + setStore(store1) + loc + } + + def addLocation[T](): Store.Location[T] = { + val (loc, store1) = store.newLocation[T]() + setStore(store1) + loc + } + + def updateStore[T](loc: Store.Location[T], value: T): this.type = + setStore(store.updated(loc, value)) + + def setPhase(pid: PhaseId): this.type = setPeriod(Period(runId, pid)) + def setPhase(phase: Phase): this.type = setPeriod(Period(runId, phase.start, phase.end)) + + def setSetting[T](setting: Setting[T], value: T): this.type = + setSettings(setting.updateIn(settingsState, value)) + + def setDebug: this.type = setSetting(base.settings.Ydebug, true) + } + + given ops: AnyRef with + extension (c: Context) + def addNotNullInfo(info: NotNullInfo) = + c.withNotNullInfos(c.notNullInfos.extendWith(info)) + + def addNotNullRefs(refs: Set[TermRef]) = + c.addNotNullInfo(NotNullInfo(refs, Set())) + + def withNotNullInfos(infos: List[NotNullInfo]): Context = + if c.notNullInfos eq infos then c else c.fresh.setNotNullInfos(infos) + + def relaxedOverrideContext: Context = + c.withModeBits(c.mode &~ Mode.SafeNulls | Mode.RelaxedOverriding) + end ops + + // TODO: Fix issue when converting ModeChanges and FreshModeChanges to extension givens + extension (c: Context) { + final def withModeBits(mode: Mode): Context = + if (mode != c.mode) c.fresh.setMode(mode) else c + + final def addMode(mode: Mode): Context = withModeBits(c.mode | mode) + final def retractMode(mode: Mode): Context = withModeBits(c.mode &~ mode) + } + + extension (c: FreshContext) { + final def addMode(mode: Mode): c.type = c.setMode(c.mode | mode) + final def retractMode(mode: Mode): c.type = c.setMode(c.mode &~ mode) + } + + private def exploreCtx(using Context): FreshContext = + util.Stats.record("explore") + val base = ctx.base + import base._ + val nestedCtx = + if exploresInUse < exploreContexts.size then + exploreContexts(exploresInUse).reuseIn(ctx) + else + val ts = TyperState() + .setReporter(ExploringReporter()) + .setCommittable(false) + val c = FreshContext(ctx.base).init(ctx, ctx).setTyperState(ts) + exploreContexts += c + c + exploresInUse += 1 + val nestedTS = nestedCtx.typerState + nestedTS.init(ctx.typerState, ctx.typerState.constraint) + nestedCtx + + private def wrapUpExplore(ectx: Context) = + ectx.reporter.asInstanceOf[ExploringReporter].reset() + ectx.base.exploresInUse -= 1 + + inline def explore[T](inline op: Context ?=> T)(using Context): T = + val ectx = exploreCtx + try op(using ectx) finally wrapUpExplore(ectx) + + inline def exploreInFreshCtx[T](inline op: FreshContext ?=> T)(using Context): T = + val ectx = exploreCtx + try op(using ectx) finally wrapUpExplore(ectx) + + private def changeOwnerCtx(owner: Symbol)(using Context): Context = + val base = ctx.base + import base._ + val nestedCtx = + if changeOwnersInUse < changeOwnerContexts.size then + changeOwnerContexts(changeOwnersInUse).reuseIn(ctx) + else + val c = FreshContext(ctx.base).init(ctx, ctx) + changeOwnerContexts += c + c + changeOwnersInUse += 1 + nestedCtx.setOwner(owner).setTyperState(ctx.typerState) + + /** Run `op` in current context, with a mode is temporarily set as specified. + */ + inline def runWithOwner[T](owner: Symbol)(inline op: Context ?=> T)(using Context): T = + if Config.reuseOwnerContexts then + try op(using changeOwnerCtx(owner)) + finally ctx.base.changeOwnersInUse -= 1 + else + op(using ctx.fresh.setOwner(owner)) + + /** The type comparer of the kind created by `maker` to be used. + * This is the currently active type comparer CMP if + * - CMP is associated with the current context, and + * - CMP is of the kind created by maker or maker creates a plain type comparer. + * Note: plain TypeComparers always take on the kind of the outer comparer if they are in the same context. + * In other words: tracking or explaining is a sticky property in the same context. + */ + private def comparer(using Context): TypeComparer = + util.Stats.record("comparing") + val base = ctx.base + if base.comparersInUse > 0 + && (base.comparers(base.comparersInUse - 1).comparerContext eq ctx) + then + base.comparers(base.comparersInUse - 1).currentInstance + else + val result = + if base.comparersInUse < base.comparers.size then + base.comparers(base.comparersInUse) + else + val result = TypeComparer(ctx) + base.comparers += result + result + base.comparersInUse += 1 + result.init(ctx) + result + + inline def comparing[T](inline op: TypeComparer => T)(using Context): T = + util.Stats.record("comparing") + val saved = ctx.base.comparersInUse + try op(comparer) + finally ctx.base.comparersInUse = saved + end comparing + + /** A class defining the initial context with given context base + * and set of possible settings. + */ + private class InitialContext(base: ContextBase, settingsGroup: SettingGroup) extends FreshContext(base) { + outer = NoContext + period = InitialPeriod + mode = Mode.None + typerState = TyperState.initialState() + owner = NoSymbol + tree = untpd.EmptyTree + moreProperties = Map(MessageLimiter -> DefaultMessageLimiter()) + scope = EmptyScope + source = NoSource + store = initialStore + .updated(settingsStateLoc, settingsGroup.defaultState) + .updated(notNullInfosLoc, Nil) + .updated(compilationUnitLoc, NoCompilationUnit) + searchHistory = new SearchRoot + gadt = GadtConstraint.empty + } + + @sharable object NoContext extends Context((null: ContextBase | Null).uncheckedNN) { + source = NoSource + override val implicits: ContextualImplicits = new ContextualImplicits(Nil, null, false)(this: @unchecked) + } + + /** A context base defines state and associated methods that exist once per + * compiler run. + */ + class ContextBase extends ContextState + with Phases.PhasesBase + with Plugins { + + /** The applicable settings */ + val settings: ScalaSettings = new ScalaSettings + + /** The initial context */ + val initialCtx: Context = new InitialContext(this, settings) + + /** The platform, initialized by `initPlatform()`. */ + private var _platform: Platform | Null = _ + + /** The platform */ + def platform: Platform = { + val p = _platform + if p == null then + throw new IllegalStateException( + "initialize() must be called before accessing platform") + p + } + + protected def newPlatform(using Context): Platform = + if (settings.scalajs.value) new SJSPlatform + else new JavaPlatform + + /** The loader that loads the members of _root_ */ + def rootLoader(root: TermSymbol)(using Context): SymbolLoader = platform.rootLoader(root) + + /** The standard definitions */ + val definitions: Definitions = new Definitions + + // Set up some phases to get started */ + usePhases(List(SomePhase)) + + /** Initializes the `ContextBase` with a starting context. + * This initializes the `platform` and the `definitions`. + */ + def initialize()(using Context): Unit = { + _platform = newPlatform + definitions.init() + } + + def fusedContaining(p: Phase): Phase = + allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase) + } + + /** The essential mutable state of a context base, collected into a common class */ + class ContextState { + // Symbols state + + /** Counter for unique symbol ids */ + private var _nextSymId: Int = 0 + def nextSymId: Int = { _nextSymId += 1; _nextSymId } + + /** Sources and Files that were loaded */ + val sources: util.HashMap[AbstractFile, SourceFile] = util.HashMap[AbstractFile, SourceFile]() + val files: util.HashMap[TermName, AbstractFile] = util.HashMap() + + // Types state + /** A table for hash consing unique types */ + private[core] val uniques: Uniques = Uniques() + + /** A table for hash consing unique applied types */ + private[dotc] val uniqueAppliedTypes: AppliedUniques = AppliedUniques() + + /** A table for hash consing unique named types */ + private[core] val uniqueNamedTypes: NamedTypeUniques = NamedTypeUniques() + + var emptyTypeBounds: TypeBounds | Null = null + var emptyWildcardBounds: WildcardType | Null = null + + /** Number of findMember calls on stack */ + private[core] var findMemberCount: Int = 0 + + /** List of names which have a findMemberCall on stack, + * after Config.LogPendingFindMemberThreshold is reached. + */ + private[core] var pendingMemberSearches: List[Name] = Nil + + /** The number of recursive invocation of underlying on a NamedType + * during a controlled operation. + */ + private[core] var underlyingRecursions: Int = 0 + + /** The set of named types on which a currently active invocation + * of underlying during a controlled operation exists. */ + private[core] val pendingUnderlying: util.HashSet[Type] = util.HashSet[Type]() + + /** A map from ErrorType to associated message. We use this map + * instead of storing messages directly in ErrorTypes in order + * to avoid space leaks - the message usually captures a context. + */ + private[core] val errorTypeMsg: mutable.Map[Types.ErrorType, Message] = mutable.Map() + + // Phases state + + private[core] var phasesPlan: List[List[Phase]] = _ + + /** Phases by id */ + private[dotc] var phases: Array[Phase] = _ + + /** Phases with consecutive Transforms grouped into a single phase, Empty array if fusion is disabled */ + private[core] var fusedPhases: Array[Phase] = Array.empty[Phase] + + /** Next denotation transformer id */ + private[core] var nextDenotTransformerId: Array[Int] = _ + + private[core] var denotTransformers: Array[DenotTransformer] = _ + + /** Flag to suppress inlining, set after overflow */ + private[dotc] var stopInlining: Boolean = false + + /** A variable that records that some error was reported in a globally committable context. + * The error will not necessarlily be emitted, since it could still be that + * the enclosing context will be aborted. The variable is used as a smoke test + * to turn off assertions that might be wrong if the program is erroneous. To + * just test for `ctx.reporter.errorsReported` is not always enough, since it + * could be that the context in which the assertion is tested is a completer context + * that's different from the context where the error was reported. See i13218.scala + * for a test. + */ + private[dotc] var errorsToBeReported = false + + // Reporters state + private[dotc] var indent: Int = 0 + + protected[dotc] val indentTab: String = " " + + private[Contexts] val exploreContexts = new mutable.ArrayBuffer[FreshContext] + private[Contexts] var exploresInUse: Int = 0 + + private[Contexts] val changeOwnerContexts = new mutable.ArrayBuffer[FreshContext] + private[Contexts] var changeOwnersInUse: Int = 0 + + private[Contexts] val comparers = new mutable.ArrayBuffer[TypeComparer] + private[Contexts] var comparersInUse: Int = 0 + + private var charArray = new Array[Char](256) + + private[core] val reusableDataReader = ReusableInstance(new ReusableDataReader()) + + private[dotc] var wConfCache: (List[String], WConf) = _ + + def sharedCharArray(len: Int): Array[Char] = + while len > charArray.length do + charArray = new Array[Char](charArray.length * 2) + charArray + + def reset(): Unit = + uniques.clear() + uniqueAppliedTypes.clear() + uniqueNamedTypes.clear() + emptyTypeBounds = null + emptyWildcardBounds = null + errorsToBeReported = false + errorTypeMsg.clear() + sources.clear() + files.clear() + comparers.clear() // forces re-evaluation of top and bottom classes in TypeComparer + + // Test that access is single threaded + + /** The thread on which `checkSingleThreaded was invoked last */ + @sharable private var thread: Thread | Null = null + + /** Check that we are on the same thread as before */ + def checkSingleThreaded(): Unit = + if (thread == null) thread = Thread.currentThread() + else assert(thread == Thread.currentThread(), "illegal multithreaded access to ContextBase") + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala new file mode 100644 index 000000000000..679e22b48c9e --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala @@ -0,0 +1,312 @@ +package dotty.tools +package dotc +package core + +import scala.annotation.tailrec +import scala.collection.mutable.ListBuffer +import scala.util.control.NonFatal + +import Contexts._, Names._, Phases._, Symbols._ +import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ +import transform.MegaPhase +import reporting.{Message, NoExplanation} +import language.experimental.pureFunctions + +/** This object provides useful implicit decorators for types defined elsewhere */ +object Decorators { + + /** Extension methods for toType/TermName methods on PreNames. + */ + extension (pn: PreName) + def toTermName: TermName = pn match + case s: String => termName(s) + case n: Name => n.toTermName + def toTypeName: TypeName = pn match + case s: String => typeName(s) + case n: Name => n.toTypeName + + extension (s: String) + def splitWhere(f: Char => Boolean, doDropIndex: Boolean): Option[(String, String)] = + def splitAt(idx: Int, doDropIndex: Boolean): Option[(String, String)] = + if (idx == -1) None + else Some((s.take(idx), s.drop(if (doDropIndex) idx + 1 else idx))) + splitAt(s.indexWhere(f), doDropIndex) + + /** Create a term name from a string slice, using a common buffer. + * This avoids some allocation relative to `termName(s)` + */ + def sliceToTermName(start: Int, end: Int)(using Context): SimpleName = + val len = end - start + val chars = ctx.base.sharedCharArray(len) + s.getChars(start, end, chars, 0) + termName(chars, 0, len) + + def sliceToTypeName(start: Int, end: Int)(using Context): TypeName = + sliceToTermName(start, end).toTypeName + + def concat(name: Name)(using Context): SimpleName = name match + case name: SimpleName => + val len = s.length + name.length + var chars = ctx.base.sharedCharArray(len) + s.getChars(0, s.length, chars, 0) + if name.length != 0 then name.getChars(0, name.length, chars, s.length) + termName(chars, 0, len) + case name: TypeName => s.concat(name.toTermName) + case _ => termName(s.concat(name.toString).nn) + + def indented(width: Int): String = + val padding = " " * width + padding + s.replace("\n", "\n" + padding) + end extension + + extension (str: -> String) + def toMessage: Message = reporting.NoExplanation(str) + + /** Implements a findSymbol method on iterators of Symbols that + * works like find but avoids Option, replacing None with NoSymbol. + */ + extension (it: Iterator[Symbol]) + final def findSymbol(p: Symbol => Boolean): Symbol = { + while (it.hasNext) { + val sym = it.next() + if (p(sym)) return sym + } + NoSymbol + } + + inline val MaxFilterRecursions = 10 + + /** Implements filterConserve, zipWithConserve methods + * on lists that avoid duplication of list nodes where feasible. + */ + implicit class ListDecorator[T](val xs: List[T]) extends AnyVal { + + final def mapconserve[U](f: T => U): List[U] = { + @tailrec + def loop(mapped: ListBuffer[U] | Null, unchanged: List[U], pending: List[T]): List[U] = + if (pending.isEmpty) + if (mapped == null) unchanged + else mapped.prependToList(unchanged) + else { + val head0 = pending.head + val head1 = f(head0) + + if (head1.asInstanceOf[AnyRef] eq head0.asInstanceOf[AnyRef]) + loop(mapped, unchanged, pending.tail) + else { + val b = if (mapped == null) new ListBuffer[U] else mapped + var xc = unchanged + while (xc ne pending) { + b += xc.head + xc = xc.tail + } + b += head1 + val tail0 = pending.tail + loop(b, tail0.asInstanceOf[List[U]], tail0) + } + } + loop(null, xs.asInstanceOf[List[U]], xs) + } + + /** Like `xs filter p` but returns list `xs` itself - instead of a copy - + * if `p` is true for all elements. + */ + def filterConserve(p: T => Boolean): List[T] = + + def addAll(buf: ListBuffer[T], from: List[T], until: List[T]): ListBuffer[T] = + if from eq until then buf else addAll(buf += from.head, from.tail, until) + + def loopWithBuffer(buf: ListBuffer[T], xs: List[T]): List[T] = xs match + case x :: xs1 => + if p(x) then buf += x + loopWithBuffer(buf, xs1) + case nil => buf.toList + + def loop(keep: List[T], explore: List[T], keepCount: Int, recCount: Int): List[T] = + explore match + case x :: rest => + if p(x) then + loop(keep, rest, keepCount + 1, recCount) + else if keepCount <= 3 && recCount <= MaxFilterRecursions then + val rest1 = loop(rest, rest, 0, recCount + 1) + keepCount match + case 0 => rest1 + case 1 => keep.head :: rest1 + case 2 => keep.head :: keep.tail.head :: rest1 + case 3 => val tl = keep.tail; keep.head :: tl.head :: tl.tail.head :: rest1 + else + loopWithBuffer(addAll(new ListBuffer[T], keep, explore), rest) + case nil => + keep + + loop(xs, xs, 0, 0) + end filterConserve + + /** Like `xs.lazyZip(ys).map(f)`, but returns list `xs` itself + * - instead of a copy - if function `f` maps all elements of + * `xs` to themselves. Also, it is required that `ys` is at least + * as long as `xs`. + */ + def zipWithConserve[U, V <: T](ys: List[U])(f: (T, U) => V): List[V] = + if (xs.isEmpty || ys.isEmpty) Nil + else { + val x1 = f(xs.head, ys.head) + val xs1 = xs.tail.zipWithConserve(ys.tail)(f) + if (x1.asInstanceOf[AnyRef] eq xs.head.asInstanceOf[AnyRef]) && (xs1 eq xs.tail) + then xs.asInstanceOf[List[V]] + else x1 :: xs1 + } + + /** Like `xs.lazyZip(xs.indices).map(f)`, but returns list `xs` itself + * - instead of a copy - if function `f` maps all elements of + * `xs` to themselves. + */ + def mapWithIndexConserve[U <: T](f: (T, Int) => U): List[U] = + + @tailrec + def addAll(buf: ListBuffer[T], from: List[T], until: List[T]): ListBuffer[T] = + if from eq until then buf else addAll(buf += from.head, from.tail, until) + + @tailrec + def loopWithBuffer(buf: ListBuffer[U], explore: List[T], idx: Int): List[U] = explore match + case Nil => buf.toList + case t :: rest => loopWithBuffer(buf += f(t, idx), rest, idx + 1) + + @tailrec + def loop(keep: List[T], explore: List[T], idx: Int): List[U] = explore match + case Nil => keep.asInstanceOf[List[U]] + case t :: rest => + val u = f(t, idx) + if u.asInstanceOf[AnyRef] eq t.asInstanceOf[AnyRef] then + loop(keep, rest, idx + 1) + else + val buf = addAll(new ListBuffer[T], keep, explore).asInstanceOf[ListBuffer[U]] + loopWithBuffer(buf += u, rest, idx + 1) + + loop(xs, xs, 0) + end mapWithIndexConserve + + /** True if two lists have the same length. Since calling length on linear sequences + * is Θ(n), it is an inadvisable way to test length equality. This method is Θ(n min m). + */ + final def hasSameLengthAs[U](ys: List[U]): Boolean = { + @tailrec def loop(xs: List[T], ys: List[U]): Boolean = + if (xs.isEmpty) ys.isEmpty + else ys.nonEmpty && loop(xs.tail, ys.tail) + loop(xs, ys) + } + + @tailrec final def eqElements(ys: List[AnyRef]): Boolean = xs match { + case x :: _ => + ys match { + case y :: _ => + x.asInstanceOf[AnyRef].eq(y) && + xs.tail.eqElements(ys.tail) + case _ => false + } + case nil => ys.isEmpty + } + + /** Union on lists seen as sets */ + def | (ys: List[T]): List[T] = xs ::: (ys filterNot (xs contains _)) + + /** Intersection on lists seen as sets */ + def & (ys: List[T]): List[T] = xs filter (ys contains _) + } + + extension [T, U](xss: List[List[T]]) + def nestedMap(f: T => U): List[List[U]] = xss match + case xs :: xss1 => xs.map(f) :: xss1.nestedMap(f) + case nil => Nil + def nestedMapConserve(f: T => U): List[List[U]] = + xss.mapconserve(_.mapconserve(f)) + def nestedZipWithConserve(yss: List[List[U]])(f: (T, U) => T): List[List[T]] = + xss.zipWithConserve(yss)((xs, ys) => xs.zipWithConserve(ys)(f)) + def nestedExists(p: T => Boolean): Boolean = xss match + case xs :: xss1 => xs.exists(p) || xss1.nestedExists(p) + case nil => false + end extension + + extension (text: Text) + def show(using Context): String = text.mkString(ctx.settings.pageWidth.value, ctx.settings.printLines.value) + + /** Test whether a list of strings representing phases contains + * a given phase. See [[config.CompilerCommand#explainAdvanced]] for the + * exact meaning of "contains" here. + */ + extension (names: List[String]) + def containsPhase(phase: Phase): Boolean = + names.nonEmpty && { + phase match { + case phase: MegaPhase => phase.miniPhases.exists(x => names.containsPhase(x)) + case _ => + names exists { name => + name == "all" || { + val strippedName = name.stripSuffix("+") + val logNextPhase = name != strippedName + phase.phaseName.startsWith(strippedName) || + (logNextPhase && phase.prev.phaseName.startsWith(strippedName)) + } + } + } + } + + extension [T](x: T) + def showing[U]( + op: WrappedResult[U] ?=> String, + printer: config.Printers.Printer = config.Printers.default)(using c: Conversion[T, U] | Null = null): T = { + // either the use of `$result` was driven by the expected type of `Shown` + // which led to the summoning of `Conversion[T, Shown]` (which we'll invoke) + // or no such conversion was found so we'll consume the result as it is instead + val obj = if c == null then x.asInstanceOf[U] else c(x) + printer.println(op(using WrappedResult(obj))) + x + } + + /** Instead of `toString` call `show` on `Showable` values, falling back to `toString` if an exception is raised. */ + def tryToShow(using Context): String = x match + case x: Showable => + try x.show + catch + case ex: CyclicReference => "... (caught cyclic reference) ..." + case NonFatal(ex) + if !ctx.mode.is(Mode.PrintShowExceptions) && !ctx.settings.YshowPrintErrors.value => + val msg = ex match { case te: TypeError => te.toMessage case _ => ex.getMessage } + s"[cannot display due to $msg, raw string = $x]" + case _ => String.valueOf(x).nn + + extension [T](x: T) + def assertingErrorsReported(using Context): T = { + assert(ctx.reporter.errorsReported) + x + } + def assertingErrorsReported(msg: => String)(using Context): T = { + assert(ctx.reporter.errorsReported, msg) + x + } + + extension [T <: AnyRef](xs: ::[T]) + def derivedCons(x1: T, xs1: List[T]) = + if (xs.head eq x1) && (xs.tail eq xs1) then xs else x1 :: xs1 + + extension (sc: StringContext) + /** General purpose string formatting */ + def i(args: Shown*)(using Context): String = + new StringFormatter(sc).assemble(args) + + /** Formatting for error messages: Like `i` but suppress follow-on + * error messages after the first one if some of their arguments are "non-sensical". + */ + def em(args: Shown*)(using Context): String = + forErrorMessages(new StringFormatter(sc).assemble(args)) + + /** Formatting with added explanations: Like `em`, but add explanations to + * give more info about type variables and to disambiguate where needed. + */ + def ex(args: Shown*)(using Context): String = + explained(new StringFormatter(sc).assemble(args)) + + extension [T <: AnyRef](arr: Array[T]) + def binarySearch(x: T | Null): Int = java.util.Arrays.binarySearch(arr.asInstanceOf[Array[Object | Null]], x) + +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala new file mode 100644 index 000000000000..e68dc9102961 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala @@ -0,0 +1,2379 @@ +package dotty.tools +package dotc +package core + +import scala.annotation.{threadUnsafe => tu} +import Types._, Contexts._, Symbols._, SymDenotations._, StdNames._, Names._, Phases._ +import Flags._, Scopes._, Decorators._, NameOps._, Periods._, NullOpsDecorator._ +import unpickleScala2.Scala2Unpickler.ensureConstructor +import scala.collection.mutable +import collection.mutable +import Denotations.{SingleDenotation, staticRef} +import util.{SimpleIdentityMap, SourceFile, NoSource} +import typer.ImportInfo.RootRef +import Comments.CommentsContext +import Comments.Comment +import util.Spans.NoSpan +import config.Feature +import Symbols.requiredModuleRef +import cc.{CapturingType, CaptureSet, EventuallyCapturingType} + +import scala.annotation.tailrec +import language.experimental.pureFunctions + +object Definitions { + + /** The maximum number of elements in a tuple or product. + * This should be removed once we go to hlists. + */ + val MaxTupleArity: Int = 22 + + /** The maximum arity N of a function type that's implemented + * as a trait `scala.FunctionN`. Functions of higher arity are possible, + * but are mapped in erasure to functions taking a single parameter of type + * Object[]. + * The limit 22 is chosen for Scala2x interop. It could be something + * else without affecting the set of programs that can be compiled. + */ + val MaxImplementedFunctionArity: Int = MaxTupleArity +} + +/** A class defining symbols and types of standard definitions + * + */ +class Definitions { + import Definitions._ + + private var initCtx: Context = _ + private given currentContext[Dummy_so_its_a_def]: Context = initCtx + + private def newPermanentSymbol[N <: Name](owner: Symbol, name: N, flags: FlagSet, info: Type) = + newSymbol(owner, name, flags | Permanent, info) + + private def newPermanentClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, infoFn: ClassSymbol => Type) = + newClassSymbol(owner, name, flags | Permanent | NoInits | Open, infoFn) + + private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef]): ClassSymbol = + enterCompleteClassSymbol(owner, name, flags, parents, newScope(owner.nestingLevel + 1)) + + private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope) = + newCompleteClassSymbol(owner, name, flags | Permanent | NoInits | Open, parents, decls).entered + + private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + scope.enter(newPermanentSymbol(cls, name, flags, TypeBounds.empty)) + + private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + enterTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope) + + private def enterSyntheticTypeParam(cls: ClassSymbol, paramFlags: FlagSet, scope: MutableScope, suffix: String = "T0") = + enterTypeParam(cls, suffix.toTypeName, paramFlags, scope) + + // NOTE: Ideally we would write `parentConstrs: => Type*` but SIP-24 is only + // implemented in Dotty and not in Scala 2. + // See . + private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: -> Seq[Type]): ClassSymbol = { + val completer = new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = { + val cls = denot.asClass.classSymbol + val paramDecls = newScope + val typeParam = enterSyntheticTypeParam(cls, paramFlags, paramDecls) + def instantiate(tpe: Type) = + if (tpe.typeParams.nonEmpty) tpe.appliedTo(typeParam.typeRef) + else tpe + val parents = parentConstrs.toList map instantiate + denot.info = ClassInfo(ScalaPackageClass.thisType, cls, parents, paramDecls) + } + } + newPermanentClassSymbol(ScalaPackageClass, name, Artifact, completer).entered + } + + /** The trait FunctionN, ContextFunctionN, ErasedFunctionN or ErasedContextFunction, for some N + * @param name The name of the trait to be created + * + * FunctionN traits follow this template: + * + * trait FunctionN[-T0,...-T{N-1}, +R] extends Object { + * def apply($x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * That is, they follow the template given for Function2..Function22 in the + * standard library, but without `tupled` and `curried` methods and without + * a `toString`. + * + * ContextFunctionN traits follow this template: + * + * trait ContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { + * def apply(using $x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * ErasedFunctionN traits follow this template: + * + * trait ErasedFunctionN[-T0,...,-T{N-1}, +R] extends Object { + * def apply(erased $x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * ErasedContextFunctionN traits follow this template: + * + * trait ErasedContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { + * def apply(using erased $x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * ErasedFunctionN and ErasedContextFunctionN erase to Function0. + * + * ImpureXYZFunctionN follow this template: + * + * type ImpureXYZFunctionN[-T0,...,-T{N-1}, +R] = {*} XYZFunctionN[T0,...,T{N-1}, R] + */ + private def newFunctionNType(name: TypeName): Symbol = { + val impure = name.startsWith("Impure") + val completer = new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = { + val arity = name.functionArity + if impure then + val argParamNames = List.tabulate(arity)(tpnme.syntheticTypeParamName) + val argVariances = List.fill(arity)(Contravariant) + val underlyingName = name.asSimpleName.drop(6) + val underlyingClass = ScalaPackageVal.requiredClass(underlyingName) + denot.info = TypeAlias( + HKTypeLambda(argParamNames :+ "R".toTypeName, argVariances :+ Covariant)( + tl => List.fill(arity + 1)(TypeBounds.empty), + tl => CapturingType(underlyingClass.typeRef.appliedTo(tl.paramRefs), + CaptureSet.universal) + )) + else + val cls = denot.asClass.classSymbol + val decls = newScope + val paramNamePrefix = tpnme.scala ++ str.NAME_JOIN ++ name ++ str.EXPAND_SEPARATOR + val argParamRefs = List.tabulate(arity) { i => + enterTypeParam(cls, paramNamePrefix ++ "T" ++ (i + 1).toString, Contravariant, decls).typeRef + } + val resParamRef = enterTypeParam(cls, paramNamePrefix ++ "R", Covariant, decls).typeRef + val methodType = MethodType.companion( + isContextual = name.isContextFunction, + isImplicit = false, + isErased = name.isErasedFunction) + decls.enter(newMethod(cls, nme.apply, methodType(argParamRefs, resParamRef), Deferred)) + denot.info = + ClassInfo(ScalaPackageClass.thisType, cls, ObjectType :: Nil, decls) + } + } + if impure then + newPermanentSymbol(ScalaPackageClass, name, EmptyFlags, completer) + else + newPermanentClassSymbol(ScalaPackageClass, name, Trait | NoInits, completer) + } + + private def newMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol = + newPermanentSymbol(cls, name, flags | Method, info).asTerm + + private def enterMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol = + newMethod(cls, name, info, flags).entered + + private def enterPermanentSymbol(name: Name, info: Type, flags: FlagSet = EmptyFlags): Symbol = + val sym = newPermanentSymbol(ScalaPackageClass, name, flags, info) + ScalaPackageClass.currentPackageDecls.enter(sym) + sym + + private def enterAliasType(name: TypeName, tpe: Type, flags: FlagSet = EmptyFlags): TypeSymbol = + enterPermanentSymbol(name, TypeAlias(tpe), flags).asType + + private def enterBinaryAlias(name: TypeName, op: (Type, Type) => Type): TypeSymbol = + enterAliasType(name, + HKTypeLambda(TypeBounds.empty :: TypeBounds.empty :: Nil)( + tl => op(tl.paramRefs(0), tl.paramRefs(1)))) + + private def enterPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int, + resultTypeFn: PolyType -> Type, + flags: FlagSet = EmptyFlags, + bounds: TypeBounds = TypeBounds.empty, + useCompleter: Boolean = false) = { + val tparamNames = PolyType.syntheticParamNames(typeParamCount) + val tparamInfos = tparamNames map (_ => bounds) + def ptype = PolyType(tparamNames)(_ => tparamInfos, resultTypeFn) + val info = + if (useCompleter) + new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = + denot.info = ptype + } + else ptype + enterMethod(cls, name, info, flags) + } + + private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType -> Type, flags: FlagSet) = + enterPolyMethod(cls, name, 1, resultTypeFn, flags) + + private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef | Null] = { + val arr = new Array[TypeRef | Null](arity + 1) + for (i <- countFrom to arity) arr(i) = requiredClassRef(name + i) + arr + } + + private def completeClass(cls: ClassSymbol, ensureCtor: Boolean = true): ClassSymbol = { + if (ensureCtor) ensureConstructor(cls, cls.denot.asClass, EmptyScope) + if (cls.linkedClass.exists) cls.linkedClass.markAbsent() + cls + } + + @tu lazy val RootClass: ClassSymbol = newPackageSymbol( + NoSymbol, nme.ROOT, (root, rootcls) => ctx.base.rootLoader(root)).moduleClass.asClass + @tu lazy val RootPackage: TermSymbol = newSymbol( + NoSymbol, nme.ROOTPKG, PackageCreationFlags, TypeRef(NoPrefix, RootClass)) + + @tu lazy val EmptyPackageVal: TermSymbol = newPackageSymbol( + RootClass, nme.EMPTY_PACKAGE, (emptypkg, emptycls) => ctx.base.rootLoader(emptypkg)).entered + @tu lazy val EmptyPackageClass: ClassSymbol = EmptyPackageVal.moduleClass.asClass + + /** A package in which we can place all methods and types that are interpreted specially by the compiler */ + @tu lazy val OpsPackageVal: TermSymbol = newCompletePackageSymbol(RootClass, nme.OPS_PACKAGE).entered + @tu lazy val OpsPackageClass: ClassSymbol = OpsPackageVal.moduleClass.asClass + + @tu lazy val ScalaPackageVal: TermSymbol = requiredPackage(nme.scala) + @tu lazy val ScalaMathPackageVal: TermSymbol = requiredPackage("scala.math") + @tu lazy val ScalaPackageClass: ClassSymbol = { + val cls = ScalaPackageVal.moduleClass.asClass + cls.info.decls.openForMutations.useSynthesizer( + name => + if (name.isTypeName && name.isSyntheticFunction) newFunctionNType(name.asTypeName) + else NoSymbol) + cls + } + @tu lazy val ScalaPackageObject: Symbol = requiredModule("scala.package") + @tu lazy val ScalaRuntimePackageVal: TermSymbol = requiredPackage("scala.runtime") + @tu lazy val ScalaRuntimePackageClass: ClassSymbol = ScalaRuntimePackageVal.moduleClass.asClass + @tu lazy val JavaPackageVal: TermSymbol = requiredPackage(nme.java) + @tu lazy val JavaPackageClass: ClassSymbol = JavaPackageVal.moduleClass.asClass + @tu lazy val JavaLangPackageVal: TermSymbol = requiredPackage(jnme.JavaLang) + @tu lazy val JavaLangPackageClass: ClassSymbol = JavaLangPackageVal.moduleClass.asClass + + // fundamental modules + @tu lazy val SysPackage : Symbol = requiredModule("scala.sys.package") + @tu lazy val Sys_error: Symbol = SysPackage.moduleClass.requiredMethod(nme.error) + + @tu lazy val ScalaXmlPackageClass: Symbol = getPackageClassIfDefined("scala.xml") + + @tu lazy val CompiletimePackageClass: Symbol = requiredPackage("scala.compiletime").moduleClass + @tu lazy val Compiletime_codeOf: Symbol = CompiletimePackageClass.requiredMethod("codeOf") + @tu lazy val Compiletime_erasedValue : Symbol = CompiletimePackageClass.requiredMethod("erasedValue") + @tu lazy val Compiletime_uninitialized: Symbol = CompiletimePackageClass.requiredMethod("uninitialized") + @tu lazy val Compiletime_error : Symbol = CompiletimePackageClass.requiredMethod(nme.error) + @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") + @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") + @tu lazy val Compiletime_constValueOpt: Symbol = CompiletimePackageClass.requiredMethod("constValueOpt") + @tu lazy val Compiletime_summonFrom : Symbol = CompiletimePackageClass.requiredMethod("summonFrom") + @tu lazy val Compiletime_summonInline : Symbol = CompiletimePackageClass.requiredMethod("summonInline") + @tu lazy val CompiletimeTestingPackage: Symbol = requiredPackage("scala.compiletime.testing") + @tu lazy val CompiletimeTesting_typeChecks: Symbol = CompiletimeTestingPackage.requiredMethod("typeChecks") + @tu lazy val CompiletimeTesting_typeCheckErrors: Symbol = CompiletimeTestingPackage.requiredMethod("typeCheckErrors") + @tu lazy val CompiletimeTesting_ErrorClass: ClassSymbol = requiredClass("scala.compiletime.testing.Error") + @tu lazy val CompiletimeTesting_Error: Symbol = requiredModule("scala.compiletime.testing.Error") + @tu lazy val CompiletimeTesting_Error_apply = CompiletimeTesting_Error.requiredMethod(nme.apply) + @tu lazy val CompiletimeTesting_ErrorKind: Symbol = requiredModule("scala.compiletime.testing.ErrorKind") + @tu lazy val CompiletimeTesting_ErrorKind_Parser: Symbol = CompiletimeTesting_ErrorKind.requiredMethod("Parser") + @tu lazy val CompiletimeTesting_ErrorKind_Typer: Symbol = CompiletimeTesting_ErrorKind.requiredMethod("Typer") + @tu lazy val CompiletimeOpsPackage: Symbol = requiredPackage("scala.compiletime.ops") + @tu lazy val CompiletimeOpsAnyModuleClass: Symbol = requiredModule("scala.compiletime.ops.any").moduleClass + @tu lazy val CompiletimeOpsIntModuleClass: Symbol = requiredModule("scala.compiletime.ops.int").moduleClass + @tu lazy val CompiletimeOpsLongModuleClass: Symbol = requiredModule("scala.compiletime.ops.long").moduleClass + @tu lazy val CompiletimeOpsFloatModuleClass: Symbol = requiredModule("scala.compiletime.ops.float").moduleClass + @tu lazy val CompiletimeOpsDoubleModuleClass: Symbol = requiredModule("scala.compiletime.ops.double").moduleClass + @tu lazy val CompiletimeOpsStringModuleClass: Symbol = requiredModule("scala.compiletime.ops.string").moduleClass + @tu lazy val CompiletimeOpsBooleanModuleClass: Symbol = requiredModule("scala.compiletime.ops.boolean").moduleClass + + /** Note: We cannot have same named methods defined in Object and Any (and AnyVal, for that matter) + * because after erasure the Any and AnyVal references get remapped to the Object methods + * which would result in a double binding assertion failure. + * Instead we do the following: + * + * - Have some methods exist only in Any, and remap them with the Erasure denotation + * transformer to be owned by Object. + * - Have other methods exist only in Object. + * To achieve this, we synthesize all Any and Object methods; Object methods no longer get + * loaded from a classfile. + */ + @tu lazy val AnyClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.Any, Abstract, Nil), ensureCtor = false) + def AnyType: TypeRef = AnyClass.typeRef + @tu lazy val MatchableClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.Matchable, Trait, AnyType :: Nil), ensureCtor = false) + def MatchableType: TypeRef = MatchableClass.typeRef + @tu lazy val AnyValClass: ClassSymbol = + val res = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.AnyVal, Abstract, List(AnyType, MatchableType))) + // Mark companion as absent, so that class does not get re-completed + val companion = ScalaPackageVal.info.decl(nme.AnyVal).symbol + companion.moduleClass.markAbsent() + companion.markAbsent() + res + + def AnyValType: TypeRef = AnyValClass.typeRef + + @tu lazy val Any_== : TermSymbol = enterMethod(AnyClass, nme.EQ, methOfAny(BooleanType), Final) + @tu lazy val Any_!= : TermSymbol = enterMethod(AnyClass, nme.NE, methOfAny(BooleanType), Final) + @tu lazy val Any_equals: TermSymbol = enterMethod(AnyClass, nme.equals_, methOfAny(BooleanType)) + @tu lazy val Any_hashCode: TermSymbol = enterMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType)) + @tu lazy val Any_toString: TermSymbol = enterMethod(AnyClass, nme.toString_, MethodType(Nil, StringType)) + @tu lazy val Any_## : TermSymbol = enterMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final) + @tu lazy val Any_isInstanceOf: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final) + @tu lazy val Any_asInstanceOf: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, _.paramRefs(0), Final) + @tu lazy val Any_typeTest: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.isInstanceOfPM, _ => BooleanType, Final | SyntheticArtifact) + @tu lazy val Any_typeCast: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.asInstanceOfPM, _.paramRefs(0), Final | SyntheticArtifact | StableRealizable) + // generated by pattern matcher and explicit nulls, eliminated by erasure + + /** def getClass[A >: this.type](): Class[? <: A] */ + @tu lazy val Any_getClass: TermSymbol = + enterPolyMethod( + AnyClass, nme.getClass_, 1, + pt => MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.upper(pt.paramRefs(0)))), + Final, + bounds = TypeBounds.lower(AnyClass.thisType)) + + def AnyMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, + Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_asInstanceOf, Any_typeTest, Any_typeCast) + + @tu lazy val ObjectClass: ClassSymbol = { + val cls = requiredClass("java.lang.Object") + assert(!cls.isCompleted, "race for completing java.lang.Object") + cls.info = ClassInfo(cls.owner.thisType, cls, List(AnyType, MatchableType), newScope) + cls.setFlag(NoInits | JavaDefined) + + ensureConstructor(cls, cls.denot.asClass, EmptyScope) + val companion = JavaLangPackageVal.info.decl(nme.Object).symbol.asTerm + NamerOps.makeConstructorCompanion(companion, cls) + cls + } + def ObjectType: TypeRef = ObjectClass.typeRef + + /** A type alias of Object used to represent any reference to Object in a Java + * signature, the secret sauce is that subtype checking treats it specially: + * + * tp <:< FromJavaObject + * + * is equivalent to: + * + * tp <:< Any + * + * This is useful to avoid usability problems when interacting with Java + * code where Object is the top type. This is safe because this type will + * only appear in signatures of Java definitions in positions where `Object` + * might appear, let's enumerate all possible cases this gives us: + * + * 1. At the top level: + * + * // A.java + * void meth1(Object arg) {} + * void meth2(T arg) {} // T implicitly extends Object + * + * // B.scala + * meth1(1) // OK + * meth2(1) // OK + * + * This is safe even though Int is not a subtype of Object, because Erasure + * will detect the mismatch and box the value type. + * + * 2. In a class type parameter: + * + * // A.java + * void meth3(scala.List arg) {} + * void meth4(scala.List arg) {} + * + * // B.scala + * meth3(List[Int](1)) // OK + * meth4(List[Int](1)) // OK + * + * At erasure, type parameters are removed and value types are boxed. + * + * 3. As the type parameter of an array: + * + * // A.java + * void meth5(Object[] arg) {} + * void meth6(T[] arg) {} + * + * // B.scala + * meth5(Array[Int](1)) // error: Array[Int] is not a subtype of Array[Object] + * meth6(Array[Int](1)) // error: Array[Int] is not a subtype of Array[T & Object] + * + * + * This is a bit more subtle: at erasure, Arrays keep their type parameter, + * and primitive Arrays are not subtypes of reference Arrays on the JVM, + * so we can't pass an Array of Int where a reference Array is expected. + * Array is invariant in Scala, so `meth5` is safe even if we use `FromJavaObject`, + * but generic Arrays are treated specially: we always add `& Object` (and here + * we mean the normal java.lang.Object type) to these types when they come from + * Java signatures (see `translateJavaArrayElementType`), this ensure that `meth6` + * is safe to use. + * + * 4. As the repeated argument of a varargs method: + * + * // A.java + * void meth7(Object... args) {} + * void meth8(T... args) {} + * + * // B.scala + * meth7(1) // OK (creates a reference array) + * meth8(1) // OK (creates a primitive array and copies it into a reference array at Erasure) + * val ai = Array[Int](1) + * meth7(ai: _*) // OK (will copy the array at Erasure) + * meth8(ai: _*) // OK (will copy the array at Erasure) + * + * Java repeated arguments are erased to arrays, so it would be safe to treat + * them in the same way: add an `& Object` to the parameter type to disallow + * passing primitives, but that would be very inconvenient as it is common to + * want to pass a primitive to an Object repeated argument (e.g. + * `String.format("foo: %d", 1)`). So instead we type them _without_ adding the + * `& Object` and let `ElimRepeated` and `Erasure` take care of doing any necessary adaptation + * (note that adapting a primitive array to a reference array requires + * copying the whole array, so this transformation only preserves semantics + * if the callee does not try to mutate the varargs array which is a reasonable + * assumption to make). + * + * + * This mechanism is similar to `ObjectTpeJavaRef` in Scala 2, except that we + * create a new symbol with its own name, this is needed because this type + * can show up in inferred types and therefore needs to be preserved when + * pickling so that unpickled trees pass `-Ycheck`. + * + * Note that by default we pretty-print `FromJavaObject` as `Object` or simply omit it + * if it's the sole upper-bound of a type parameter, use `-Yprint-debug` to explicitly + * display it. + */ + @tu lazy val FromJavaObjectSymbol: TypeSymbol = + newPermanentSymbol(OpsPackageClass, tpnme.FromJavaObject, JavaDefined, TypeAlias(ObjectType)).entered + def FromJavaObjectType: TypeRef = FromJavaObjectSymbol.typeRef + + @tu lazy val AnyRefAlias: TypeSymbol = enterAliasType(tpnme.AnyRef, ObjectType) + def AnyRefType: TypeRef = AnyRefAlias.typeRef + + @tu lazy val Object_eq: TermSymbol = enterMethod(ObjectClass, nme.eq, methOfAnyRef(BooleanType), Final) + @tu lazy val Object_ne: TermSymbol = enterMethod(ObjectClass, nme.ne, methOfAnyRef(BooleanType), Final) + @tu lazy val Object_synchronized: TermSymbol = enterPolyMethod(ObjectClass, nme.synchronized_, 1, + pt => MethodType(List(pt.paramRefs(0)), pt.paramRefs(0)), Final) + @tu lazy val Object_clone: TermSymbol = enterMethod(ObjectClass, nme.clone_, MethodType(Nil, ObjectType), Protected) + @tu lazy val Object_finalize: TermSymbol = enterMethod(ObjectClass, nme.finalize_, MethodType(Nil, UnitType), Protected) + @tu lazy val Object_notify: TermSymbol = enterMethod(ObjectClass, nme.notify_, MethodType(Nil, UnitType), Final) + @tu lazy val Object_notifyAll: TermSymbol = enterMethod(ObjectClass, nme.notifyAll_, MethodType(Nil, UnitType), Final) + @tu lazy val Object_wait: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(Nil, UnitType), Final) + @tu lazy val Object_waitL: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: Nil, UnitType), Final) + @tu lazy val Object_waitLI: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: IntType :: Nil, UnitType), Final) + + def ObjectMethods: List[TermSymbol] = List(Object_eq, Object_ne, Object_synchronized, Object_clone, + Object_finalize, Object_notify, Object_notifyAll, Object_wait, Object_waitL, Object_waitLI) + + /** Methods in Object and Any that do not have a side effect */ + @tu lazy val pureMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, + Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_typeTest, Object_eq, Object_ne) + + @tu lazy val AnyKindClass: ClassSymbol = { + val cls = newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil, newScope(0)) + if (!ctx.settings.YnoKindPolymorphism.value) + // Enable kind-polymorphism by exposing scala.AnyKind + cls.entered + cls + } + def AnyKindType: TypeRef = AnyKindClass.typeRef + + @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) + @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) + + /** Method representing a throw */ + @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, + MethodType(List(ThrowableType), NothingType)) + + @tu lazy val NothingClass: ClassSymbol = enterCompleteClassSymbol( + ScalaPackageClass, tpnme.Nothing, AbstractFinal, List(AnyType)) + def NothingType: TypeRef = NothingClass.typeRef + @tu lazy val NullClass: ClassSymbol = { + // When explicit-nulls is enabled, Null becomes a direct subtype of Any and Matchable + val parents = if ctx.explicitNulls then AnyType :: MatchableType :: Nil else ObjectType :: Nil + enterCompleteClassSymbol(ScalaPackageClass, tpnme.Null, AbstractFinal, parents) + } + def NullType: TypeRef = NullClass.typeRef + + @tu lazy val InvokerModule = requiredModule("scala.runtime.coverage.Invoker") + @tu lazy val InvokedMethodRef = InvokerModule.requiredMethodRef("invoked") + + @tu lazy val ImplicitScrutineeTypeSym = + newPermanentSymbol(ScalaPackageClass, tpnme.IMPLICITkw, EmptyFlags, TypeBounds.empty).entered + def ImplicitScrutineeTypeRef: TypeRef = ImplicitScrutineeTypeSym.typeRef + + @tu lazy val ScalaPredefModule: Symbol = requiredModule("scala.Predef") + @tu lazy val Predef_conforms : Symbol = ScalaPredefModule.requiredMethod(nme.conforms_) + @tu lazy val Predef_classOf : Symbol = ScalaPredefModule.requiredMethod(nme.classOf) + @tu lazy val Predef_identity : Symbol = ScalaPredefModule.requiredMethod(nme.identity) + @tu lazy val Predef_undefined: Symbol = ScalaPredefModule.requiredMethod(nme.???) + @tu lazy val ScalaPredefModuleClass: ClassSymbol = ScalaPredefModule.moduleClass.asClass + + @tu lazy val SubTypeClass: ClassSymbol = requiredClass("scala.<:<") + @tu lazy val SubType_refl: Symbol = SubTypeClass.companionModule.requiredMethod(nme.refl) + + @tu lazy val DummyImplicitClass: ClassSymbol = requiredClass("scala.DummyImplicit") + + @tu lazy val ScalaRuntimeModule: Symbol = requiredModule("scala.runtime.ScalaRunTime") + def runtimeMethodRef(name: PreName): TermRef = ScalaRuntimeModule.requiredMethodRef(name) + def ScalaRuntime_drop: Symbol = runtimeMethodRef(nme.drop).symbol + @tu lazy val ScalaRuntime__hashCode: Symbol = ScalaRuntimeModule.requiredMethod(nme._hashCode_) + @tu lazy val ScalaRuntime_toArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toArray) + @tu lazy val ScalaRuntime_toObjectArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toObjectArray) + + @tu lazy val BoxesRunTimeModule: Symbol = requiredModule("scala.runtime.BoxesRunTime") + @tu lazy val BoxesRunTimeModule_externalEquals: Symbol = BoxesRunTimeModule.info.decl(nme.equals_).suchThat(toDenot(_).info.firstParamTypes.size == 2).symbol + @tu lazy val ScalaStaticsModule: Symbol = requiredModule("scala.runtime.Statics") + def staticsMethodRef(name: PreName): TermRef = ScalaStaticsModule.requiredMethodRef(name) + def staticsMethod(name: PreName): TermSymbol = ScalaStaticsModule.requiredMethod(name) + + @tu lazy val DottyArraysModule: Symbol = requiredModule("scala.runtime.Arrays") + def newGenericArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newGenericArray") + def newArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newArray") + + def getWrapVarargsArrayModule: Symbol = ScalaRuntimeModule + + // The set of all wrap{X, Ref}Array methods, where X is a value type + val WrapArrayMethods: PerRun[collection.Set[Symbol]] = new PerRun({ + val methodNames = ScalaValueTypes.map(ast.tpd.wrapArrayMethodName) `union` Set(nme.wrapRefArray) + methodNames.map(getWrapVarargsArrayModule.requiredMethod(_)) + }) + + @tu lazy val ListClass: Symbol = requiredClass("scala.collection.immutable.List") + @tu lazy val ListModule: Symbol = requiredModule("scala.collection.immutable.List") + @tu lazy val NilModule: Symbol = requiredModule("scala.collection.immutable.Nil") + @tu lazy val ConsClass: Symbol = requiredClass("scala.collection.immutable.::") + @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") + + @tu lazy val SingletonClass: ClassSymbol = + // needed as a synthetic class because Scala 2.x refers to it in classfiles + // but does not define it as an explicit class. + enterCompleteClassSymbol( + ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final, + List(AnyType), EmptyScope) + @tu lazy val SingletonType: TypeRef = SingletonClass.typeRef + + @tu lazy val CollectionSeqType: TypeRef = requiredClassRef("scala.collection.Seq") + @tu lazy val SeqType: TypeRef = requiredClassRef("scala.collection.immutable.Seq") + def SeqClass(using Context): ClassSymbol = SeqType.symbol.asClass + @tu lazy val Seq_apply : Symbol = SeqClass.requiredMethod(nme.apply) + @tu lazy val Seq_head : Symbol = SeqClass.requiredMethod(nme.head) + @tu lazy val Seq_drop : Symbol = SeqClass.requiredMethod(nme.drop) + @tu lazy val Seq_lengthCompare: Symbol = SeqClass.requiredMethod(nme.lengthCompare, List(IntType)) + @tu lazy val Seq_length : Symbol = SeqClass.requiredMethod(nme.length) + @tu lazy val Seq_toSeq : Symbol = SeqClass.requiredMethod(nme.toSeq) + @tu lazy val SeqModule: Symbol = requiredModule("scala.collection.immutable.Seq") + + + @tu lazy val StringOps: Symbol = requiredClass("scala.collection.StringOps") + @tu lazy val StringOps_format: Symbol = StringOps.requiredMethod(nme.format) + + @tu lazy val ArrayType: TypeRef = requiredClassRef("scala.Array") + def ArrayClass(using Context): ClassSymbol = ArrayType.symbol.asClass + @tu lazy val Array_apply : Symbol = ArrayClass.requiredMethod(nme.apply) + @tu lazy val Array_update : Symbol = ArrayClass.requiredMethod(nme.update) + @tu lazy val Array_length : Symbol = ArrayClass.requiredMethod(nme.length) + @tu lazy val Array_clone : Symbol = ArrayClass.requiredMethod(nme.clone_) + @tu lazy val ArrayConstructor: Symbol = ArrayClass.requiredMethod(nme.CONSTRUCTOR) + + @tu lazy val ArrayModule: Symbol = requiredModule("scala.Array") + def ArrayModuleClass: Symbol = ArrayModule.moduleClass + + @tu lazy val IArrayModule: Symbol = requiredModule("scala.IArray") + def IArrayModuleClass: Symbol = IArrayModule.moduleClass + + @tu lazy val UnitType: TypeRef = valueTypeRef("scala.Unit", java.lang.Void.TYPE, UnitEnc, nme.specializedTypeNames.Void) + def UnitClass(using Context): ClassSymbol = UnitType.symbol.asClass + def UnitModuleClass(using Context): Symbol = UnitType.symbol.asClass.linkedClass + @tu lazy val BooleanType: TypeRef = valueTypeRef("scala.Boolean", java.lang.Boolean.TYPE, BooleanEnc, nme.specializedTypeNames.Boolean) + def BooleanClass(using Context): ClassSymbol = BooleanType.symbol.asClass + @tu lazy val Boolean_! : Symbol = BooleanClass.requiredMethod(nme.UNARY_!) + @tu lazy val Boolean_&& : Symbol = BooleanClass.requiredMethod(nme.ZAND) // ### harmonize required... calls + @tu lazy val Boolean_|| : Symbol = BooleanClass.requiredMethod(nme.ZOR) + @tu lazy val Boolean_== : Symbol = + BooleanClass.info.member(nme.EQ).suchThat(_.info.firstParamTypes match { + case List(pt) => pt.isRef(BooleanClass) + case _ => false + }).symbol + @tu lazy val Boolean_!= : Symbol = + BooleanClass.info.member(nme.NE).suchThat(_.info.firstParamTypes match { + case List(pt) => pt.isRef(BooleanClass) + case _ => false + }).symbol + + @tu lazy val ByteType: TypeRef = valueTypeRef("scala.Byte", java.lang.Byte.TYPE, ByteEnc, nme.specializedTypeNames.Byte) + def ByteClass(using Context): ClassSymbol = ByteType.symbol.asClass + @tu lazy val ShortType: TypeRef = valueTypeRef("scala.Short", java.lang.Short.TYPE, ShortEnc, nme.specializedTypeNames.Short) + def ShortClass(using Context): ClassSymbol = ShortType.symbol.asClass + @tu lazy val CharType: TypeRef = valueTypeRef("scala.Char", java.lang.Character.TYPE, CharEnc, nme.specializedTypeNames.Char) + def CharClass(using Context): ClassSymbol = CharType.symbol.asClass + @tu lazy val IntType: TypeRef = valueTypeRef("scala.Int", java.lang.Integer.TYPE, IntEnc, nme.specializedTypeNames.Int) + def IntClass(using Context): ClassSymbol = IntType.symbol.asClass + @tu lazy val Int_- : Symbol = IntClass.requiredMethod(nme.MINUS, List(IntType)) + @tu lazy val Int_+ : Symbol = IntClass.requiredMethod(nme.PLUS, List(IntType)) + @tu lazy val Int_/ : Symbol = IntClass.requiredMethod(nme.DIV, List(IntType)) + @tu lazy val Int_* : Symbol = IntClass.requiredMethod(nme.MUL, List(IntType)) + @tu lazy val Int_== : Symbol = IntClass.requiredMethod(nme.EQ, List(IntType)) + @tu lazy val Int_>= : Symbol = IntClass.requiredMethod(nme.GE, List(IntType)) + @tu lazy val Int_<= : Symbol = IntClass.requiredMethod(nme.LE, List(IntType)) + @tu lazy val LongType: TypeRef = valueTypeRef("scala.Long", java.lang.Long.TYPE, LongEnc, nme.specializedTypeNames.Long) + def LongClass(using Context): ClassSymbol = LongType.symbol.asClass + @tu lazy val Long_+ : Symbol = LongClass.requiredMethod(nme.PLUS, List(LongType)) + @tu lazy val Long_* : Symbol = LongClass.requiredMethod(nme.MUL, List(LongType)) + @tu lazy val Long_/ : Symbol = LongClass.requiredMethod(nme.DIV, List(LongType)) + + @tu lazy val FloatType: TypeRef = valueTypeRef("scala.Float", java.lang.Float.TYPE, FloatEnc, nme.specializedTypeNames.Float) + def FloatClass(using Context): ClassSymbol = FloatType.symbol.asClass + @tu lazy val DoubleType: TypeRef = valueTypeRef("scala.Double", java.lang.Double.TYPE, DoubleEnc, nme.specializedTypeNames.Double) + def DoubleClass(using Context): ClassSymbol = DoubleType.symbol.asClass + + @tu lazy val BoxedUnitClass: ClassSymbol = requiredClass("scala.runtime.BoxedUnit") + def BoxedUnit_UNIT(using Context): TermSymbol = BoxedUnitClass.linkedClass.requiredValue("UNIT") + def BoxedUnit_TYPE(using Context): TermSymbol = BoxedUnitClass.linkedClass.requiredValue("TYPE") + + @tu lazy val BoxedBooleanClass: ClassSymbol = requiredClass("java.lang.Boolean") + @tu lazy val BoxedByteClass : ClassSymbol = requiredClass("java.lang.Byte") + @tu lazy val BoxedShortClass : ClassSymbol = requiredClass("java.lang.Short") + @tu lazy val BoxedCharClass : ClassSymbol = requiredClass("java.lang.Character") + @tu lazy val BoxedIntClass : ClassSymbol = requiredClass("java.lang.Integer") + @tu lazy val BoxedLongClass : ClassSymbol = requiredClass("java.lang.Long") + @tu lazy val BoxedFloatClass : ClassSymbol = requiredClass("java.lang.Float") + @tu lazy val BoxedDoubleClass : ClassSymbol = requiredClass("java.lang.Double") + + @tu lazy val BoxedBooleanModule: TermSymbol = requiredModule("java.lang.Boolean") + @tu lazy val BoxedByteModule : TermSymbol = requiredModule("java.lang.Byte") + @tu lazy val BoxedShortModule : TermSymbol = requiredModule("java.lang.Short") + @tu lazy val BoxedCharModule : TermSymbol = requiredModule("java.lang.Character") + @tu lazy val BoxedIntModule : TermSymbol = requiredModule("java.lang.Integer") + @tu lazy val BoxedLongModule : TermSymbol = requiredModule("java.lang.Long") + @tu lazy val BoxedFloatModule : TermSymbol = requiredModule("java.lang.Float") + @tu lazy val BoxedDoubleModule : TermSymbol = requiredModule("java.lang.Double") + @tu lazy val BoxedUnitModule : TermSymbol = requiredModule("java.lang.Void") + + @tu lazy val ByNameParamClass2x: ClassSymbol = enterSpecialPolyClass(tpnme.BYNAME_PARAM_CLASS, Covariant, Seq(AnyType)) + + @tu lazy val RepeatedParamClass: ClassSymbol = enterSpecialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType)) + + // fundamental classes + @tu lazy val StringClass: ClassSymbol = requiredClass("java.lang.String") + def StringType: Type = StringClass.typeRef + @tu lazy val StringModule: Symbol = StringClass.linkedClass + @tu lazy val String_+ : TermSymbol = enterMethod(StringClass, nme.raw.PLUS, methOfAny(StringType), Final) + @tu lazy val String_valueOf_Object: Symbol = StringModule.info.member(nme.valueOf).suchThat(_.info.firstParamTypes match { + case List(pt) => pt.isAny || pt.stripNull.isAnyRef + case _ => false + }).symbol + + @tu lazy val JavaCloneableClass: ClassSymbol = requiredClass("java.lang.Cloneable") + @tu lazy val NullPointerExceptionClass: ClassSymbol = requiredClass("java.lang.NullPointerException") + @tu lazy val IndexOutOfBoundsException: ClassSymbol = requiredClass("java.lang.IndexOutOfBoundsException") + @tu lazy val ClassClass: ClassSymbol = requiredClass("java.lang.Class") + @tu lazy val BoxedNumberClass: ClassSymbol = requiredClass("java.lang.Number") + @tu lazy val ClassCastExceptionClass: ClassSymbol = requiredClass("java.lang.ClassCastException") + @tu lazy val ClassCastExceptionClass_stringConstructor: TermSymbol = ClassCastExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { + case List(pt) => + pt.stripNull.isRef(StringClass) + case _ => false + }).symbol.asTerm + @tu lazy val ArithmeticExceptionClass: ClassSymbol = requiredClass("java.lang.ArithmeticException") + @tu lazy val ArithmeticExceptionClass_stringConstructor: TermSymbol = ArithmeticExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { + case List(pt) => + pt.stripNull.isRef(StringClass) + case _ => false + }).symbol.asTerm + + @tu lazy val JavaSerializableClass: ClassSymbol = requiredClass("java.io.Serializable") + + @tu lazy val ComparableClass: ClassSymbol = requiredClass("java.lang.Comparable") + + @tu lazy val SystemClass: ClassSymbol = requiredClass("java.lang.System") + @tu lazy val SystemModule: Symbol = SystemClass.linkedClass + + @tu lazy val NoSuchElementExceptionClass = requiredClass("java.util.NoSuchElementException") + def NoSuchElementExceptionType = NoSuchElementExceptionClass.typeRef + @tu lazy val IllegalArgumentExceptionClass = requiredClass("java.lang.IllegalArgumentException") + def IllegalArgumentExceptionType = IllegalArgumentExceptionClass.typeRef + + // in scalac modified to have Any as parent + + @tu lazy val ThrowableType: TypeRef = requiredClassRef("java.lang.Throwable") + def ThrowableClass(using Context): ClassSymbol = ThrowableType.symbol.asClass + @tu lazy val ExceptionClass: ClassSymbol = requiredClass("java.lang.Exception") + @tu lazy val RuntimeExceptionClass: ClassSymbol = requiredClass("java.lang.RuntimeException") + + @tu lazy val SerializableType: TypeRef = JavaSerializableClass.typeRef + def SerializableClass(using Context): ClassSymbol = SerializableType.symbol.asClass + + @tu lazy val JavaBigIntegerClass: ClassSymbol = requiredClass("java.math.BigInteger") + @tu lazy val JavaBigDecimalClass: ClassSymbol = requiredClass("java.math.BigDecimal") + @tu lazy val JavaCalendarClass: ClassSymbol = requiredClass("java.util.Calendar") + @tu lazy val JavaDateClass: ClassSymbol = requiredClass("java.util.Date") + @tu lazy val JavaFormattableClass: ClassSymbol = requiredClass("java.util.Formattable") + + @tu lazy val JavaEnumClass: ClassSymbol = { + val cls = requiredClass("java.lang.Enum") + // jl.Enum has a single constructor protected(name: String, ordinal: Int). + // We remove the arguments from the primary constructor, and enter + // a new constructor symbol with 2 arguments, so that both + // `X extends jl.Enum[X]` and `X extends jl.Enum[X](name, ordinal)` + // pass typer and go through jl.Enum-specific checks in RefChecks. + cls.infoOrCompleter match { + case completer: ClassfileLoader => + cls.info = new ClassfileLoader(completer.classfile) { + override def complete(root: SymDenotation)(using Context): Unit = { + super.complete(root) + val constr = cls.primaryConstructor + val noArgInfo = constr.info match { + case info: PolyType => + info.resType match { + case meth: MethodType => + info.derivedLambdaType( + resType = meth.derivedLambdaType( + paramNames = Nil, paramInfos = Nil)) + } + } + val argConstr = constr.copy().entered + constr.info = noArgInfo + constr.termRef.recomputeDenot() + } + } + cls + } + } + def JavaEnumType = JavaEnumClass.typeRef + + @tu lazy val StringBuilderClass: ClassSymbol = requiredClass("scala.collection.mutable.StringBuilder") + @tu lazy val MatchErrorClass : ClassSymbol = requiredClass("scala.MatchError") + @tu lazy val ConversionClass : ClassSymbol = requiredClass("scala.Conversion").typeRef.symbol.asClass + + @tu lazy val StringAddClass : ClassSymbol = requiredClass("scala.runtime.StringAdd") + @tu lazy val StringAdd_+ : Symbol = StringAddClass.requiredMethod(nme.raw.PLUS) + + @tu lazy val StringContextClass: ClassSymbol = requiredClass("scala.StringContext") + @tu lazy val StringContext_s : Symbol = StringContextClass.requiredMethod(nme.s) + @tu lazy val StringContext_raw: Symbol = StringContextClass.requiredMethod(nme.raw_) + @tu lazy val StringContext_f : Symbol = StringContextClass.requiredMethod(nme.f) + @tu lazy val StringContext_parts: Symbol = StringContextClass.requiredMethod(nme.parts) + @tu lazy val StringContextModule: Symbol = StringContextClass.companionModule + @tu lazy val StringContextModule_apply: Symbol = StringContextModule.requiredMethod(nme.apply) + @tu lazy val StringContextModule_standardInterpolator: Symbol = StringContextModule.requiredMethod(nme.standardInterpolator) + @tu lazy val StringContextModule_processEscapes: Symbol = StringContextModule.requiredMethod(nme.processEscapes) + + @tu lazy val PartialFunctionClass: ClassSymbol = requiredClass("scala.PartialFunction") + @tu lazy val PartialFunction_isDefinedAt: Symbol = PartialFunctionClass.requiredMethod(nme.isDefinedAt) + @tu lazy val PartialFunction_applyOrElse: Symbol = PartialFunctionClass.requiredMethod(nme.applyOrElse) + + @tu lazy val AbstractPartialFunctionClass: ClassSymbol = requiredClass("scala.runtime.AbstractPartialFunction") + @tu lazy val FunctionXXLClass: ClassSymbol = requiredClass("scala.runtime.FunctionXXL") + @tu lazy val ScalaSymbolClass: ClassSymbol = requiredClass("scala.Symbol") + @tu lazy val DynamicClass: ClassSymbol = requiredClass("scala.Dynamic") + @tu lazy val OptionClass: ClassSymbol = requiredClass("scala.Option") + @tu lazy val SomeClass: ClassSymbol = requiredClass("scala.Some") + @tu lazy val NoneModule: Symbol = requiredModule("scala.None") + + @tu lazy val EnumClass: ClassSymbol = requiredClass("scala.reflect.Enum") + @tu lazy val Enum_ordinal: Symbol = EnumClass.requiredMethod(nme.ordinal) + + @tu lazy val EnumValueSerializationProxyClass: ClassSymbol = requiredClass("scala.runtime.EnumValueSerializationProxy") + @tu lazy val EnumValueSerializationProxyConstructor: TermSymbol = + EnumValueSerializationProxyClass.requiredMethod(nme.CONSTRUCTOR, List(ClassType(TypeBounds.empty), IntType)) + + @tu lazy val ProductClass: ClassSymbol = requiredClass("scala.Product") + @tu lazy val Product_canEqual : Symbol = ProductClass.requiredMethod(nme.canEqual_) + @tu lazy val Product_productArity : Symbol = ProductClass.requiredMethod(nme.productArity) + @tu lazy val Product_productElement : Symbol = ProductClass.requiredMethod(nme.productElement) + @tu lazy val Product_productElementName: Symbol = ProductClass.requiredMethod(nme.productElementName) + @tu lazy val Product_productPrefix : Symbol = ProductClass.requiredMethod(nme.productPrefix) + + @tu lazy val IteratorClass: ClassSymbol = requiredClass("scala.collection.Iterator") + def IteratorModule(using Context): Symbol = IteratorClass.companionModule + + @tu lazy val ModuleSerializationProxyClass: ClassSymbol = requiredClass("scala.runtime.ModuleSerializationProxy") + @tu lazy val ModuleSerializationProxyConstructor: TermSymbol = + ModuleSerializationProxyClass.requiredMethod(nme.CONSTRUCTOR, List(ClassType(TypeBounds.empty))) + + @tu lazy val MirrorClass: ClassSymbol = requiredClass("scala.deriving.Mirror") + @tu lazy val Mirror_ProductClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Product") + @tu lazy val Mirror_Product_fromProduct: Symbol = Mirror_ProductClass.requiredMethod(nme.fromProduct) + @tu lazy val Mirror_SumClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Sum") + @tu lazy val Mirror_SingletonClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Singleton") + @tu lazy val Mirror_SingletonProxyClass: ClassSymbol = requiredClass("scala.deriving.Mirror.SingletonProxy") + + @tu lazy val LanguageModule: Symbol = requiredModule("scala.language") + @tu lazy val LanguageModuleClass: Symbol = LanguageModule.moduleClass.asClass + @tu lazy val LanguageExperimentalModule: Symbol = requiredModule("scala.language.experimental") + @tu lazy val LanguageDeprecatedModule: Symbol = requiredModule("scala.language.deprecated") + @tu lazy val NonLocalReturnControlClass: ClassSymbol = requiredClass("scala.runtime.NonLocalReturnControl") + @tu lazy val SelectableClass: ClassSymbol = requiredClass("scala.Selectable") + @tu lazy val WithoutPreciseParameterTypesClass: Symbol = requiredClass("scala.Selectable.WithoutPreciseParameterTypes") + + @tu lazy val ManifestClass: ClassSymbol = requiredClass("scala.reflect.Manifest") + @tu lazy val ManifestFactoryModule: Symbol = requiredModule("scala.reflect.ManifestFactory") + @tu lazy val ClassManifestFactoryModule: Symbol = requiredModule("scala.reflect.ClassManifestFactory") + @tu lazy val OptManifestClass: ClassSymbol = requiredClass("scala.reflect.OptManifest") + @tu lazy val NoManifestModule: Symbol = requiredModule("scala.reflect.NoManifest") + + @tu lazy val ReflectPackageClass: Symbol = requiredPackage("scala.reflect.package").moduleClass + @tu lazy val ClassTagClass: ClassSymbol = requiredClass("scala.reflect.ClassTag") + @tu lazy val ClassTagModule: Symbol = ClassTagClass.companionModule + @tu lazy val ClassTagModule_apply: Symbol = ClassTagModule.requiredMethod(nme.apply) + + @tu lazy val TypeTestClass: ClassSymbol = requiredClass("scala.reflect.TypeTest") + @tu lazy val TypeTest_unapply: Symbol = TypeTestClass.requiredMethod(nme.unapply) + @tu lazy val TypeTestModule_identity: Symbol = TypeTestClass.companionModule.requiredMethod(nme.identity) + + @tu lazy val QuotedExprClass: ClassSymbol = requiredClass("scala.quoted.Expr") + + @tu lazy val QuotesClass: ClassSymbol = requiredClass("scala.quoted.Quotes") + @tu lazy val Quotes_reflect: Symbol = QuotesClass.requiredValue("reflect") + @tu lazy val Quotes_reflect_asTerm: Symbol = Quotes_reflect.requiredMethod("asTerm") + @tu lazy val Quotes_reflect_Apply: Symbol = Quotes_reflect.requiredValue("Apply") + @tu lazy val Quotes_reflect_Apply_apply: Symbol = Quotes_reflect_Apply.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_TypeApply: Symbol = Quotes_reflect.requiredValue("TypeApply") + @tu lazy val Quotes_reflect_TypeApply_apply: Symbol = Quotes_reflect_TypeApply.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_Assign: Symbol = Quotes_reflect.requiredValue("Assign") + @tu lazy val Quotes_reflect_Assign_apply: Symbol = Quotes_reflect_Assign.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_Inferred: Symbol = Quotes_reflect.requiredValue("Inferred") + @tu lazy val Quotes_reflect_Inferred_apply: Symbol = Quotes_reflect_Inferred.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_Literal: Symbol = Quotes_reflect.requiredValue("Literal") + @tu lazy val Quotes_reflect_Literal_apply: Symbol = Quotes_reflect_Literal.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_TreeMethods: Symbol = Quotes_reflect.requiredMethod("TreeMethods") + @tu lazy val Quotes_reflect_TreeMethods_asExpr: Symbol = Quotes_reflect_TreeMethods.requiredMethod("asExpr") + @tu lazy val Quotes_reflect_TypeRepr: Symbol = Quotes_reflect.requiredValue("TypeRepr") + @tu lazy val Quotes_reflect_TypeRepr_of: Symbol = Quotes_reflect_TypeRepr.requiredMethod("of") + @tu lazy val Quotes_reflect_TypeRepr_typeConstructorOf: Symbol = Quotes_reflect_TypeRepr.requiredMethod("typeConstructorOf") + @tu lazy val Quotes_reflect_TypeReprMethods: Symbol = Quotes_reflect.requiredValue("TypeReprMethods") + @tu lazy val Quotes_reflect_TypeReprMethods_asType: Symbol = Quotes_reflect_TypeReprMethods.requiredMethod("asType") + @tu lazy val Quotes_reflect_TypeTreeType: Symbol = Quotes_reflect.requiredType("TypeTree") + @tu lazy val Quotes_reflect_TermType: Symbol = Quotes_reflect.requiredType("Term") + @tu lazy val Quotes_reflect_BooleanConstant: Symbol = Quotes_reflect.requiredValue("BooleanConstant") + @tu lazy val Quotes_reflect_ByteConstant: Symbol = Quotes_reflect.requiredValue("ByteConstant") + @tu lazy val Quotes_reflect_ShortConstant: Symbol = Quotes_reflect.requiredValue("ShortConstant") + @tu lazy val Quotes_reflect_IntConstant: Symbol = Quotes_reflect.requiredValue("IntConstant") + @tu lazy val Quotes_reflect_LongConstant: Symbol = Quotes_reflect.requiredValue("LongConstant") + @tu lazy val Quotes_reflect_FloatConstant: Symbol = Quotes_reflect.requiredValue("FloatConstant") + @tu lazy val Quotes_reflect_DoubleConstant: Symbol = Quotes_reflect.requiredValue("DoubleConstant") + @tu lazy val Quotes_reflect_CharConstant: Symbol = Quotes_reflect.requiredValue("CharConstant") + @tu lazy val Quotes_reflect_StringConstant: Symbol = Quotes_reflect.requiredValue("StringConstant") + @tu lazy val Quotes_reflect_UnitConstant: Symbol = Quotes_reflect.requiredValue("UnitConstant") + @tu lazy val Quotes_reflect_NullConstant: Symbol = Quotes_reflect.requiredValue("NullConstant") + @tu lazy val Quotes_reflect_ClassOfConstant: Symbol = Quotes_reflect.requiredValue("ClassOfConstant") + + + @tu lazy val QuoteUnpicklerClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteUnpickler") + @tu lazy val QuoteUnpickler_unpickleExprV2: Symbol = QuoteUnpicklerClass.requiredMethod("unpickleExprV2") + @tu lazy val QuoteUnpickler_unpickleTypeV2: Symbol = QuoteUnpicklerClass.requiredMethod("unpickleTypeV2") + + @tu lazy val QuoteMatchingClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteMatching") + @tu lazy val QuoteMatching_ExprMatch: Symbol = QuoteMatchingClass.requiredMethod("ExprMatch") + @tu lazy val QuoteMatching_TypeMatch: Symbol = QuoteMatchingClass.requiredMethod("TypeMatch") + + @tu lazy val ToExprModule: Symbol = requiredModule("scala.quoted.ToExpr") + @tu lazy val ToExprModule_BooleanToExpr: Symbol = ToExprModule.requiredMethod("BooleanToExpr") + @tu lazy val ToExprModule_ByteToExpr: Symbol = ToExprModule.requiredMethod("ByteToExpr") + @tu lazy val ToExprModule_ShortToExpr: Symbol = ToExprModule.requiredMethod("ShortToExpr") + @tu lazy val ToExprModule_IntToExpr: Symbol = ToExprModule.requiredMethod("IntToExpr") + @tu lazy val ToExprModule_LongToExpr: Symbol = ToExprModule.requiredMethod("LongToExpr") + @tu lazy val ToExprModule_FloatToExpr: Symbol = ToExprModule.requiredMethod("FloatToExpr") + @tu lazy val ToExprModule_DoubleToExpr: Symbol = ToExprModule.requiredMethod("DoubleToExpr") + @tu lazy val ToExprModule_CharToExpr: Symbol = ToExprModule.requiredMethod("CharToExpr") + @tu lazy val ToExprModule_StringToExpr: Symbol = ToExprModule.requiredMethod("StringToExpr") + + @tu lazy val QuotedRuntimeModule: Symbol = requiredModule("scala.quoted.runtime.Expr") + @tu lazy val QuotedRuntime_exprQuote : Symbol = QuotedRuntimeModule.requiredMethod("quote") + @tu lazy val QuotedRuntime_exprSplice : Symbol = QuotedRuntimeModule.requiredMethod("splice") + @tu lazy val QuotedRuntime_exprNestedSplice : Symbol = QuotedRuntimeModule.requiredMethod("nestedSplice") + + @tu lazy val QuotedRuntime_SplicedTypeAnnot: ClassSymbol = requiredClass("scala.quoted.runtime.SplicedType") + + @tu lazy val QuotedRuntimePatterns: Symbol = requiredModule("scala.quoted.runtime.Patterns") + @tu lazy val QuotedRuntimePatterns_patternHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHole") + @tu lazy val QuotedRuntimePatterns_patternHigherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHigherOrderHole") + @tu lazy val QuotedRuntimePatterns_higherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("higherOrderHole") + @tu lazy val QuotedRuntimePatterns_patternTypeAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("patternType") + @tu lazy val QuotedRuntimePatterns_fromAboveAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("fromAbove") + + @tu lazy val QuotedTypeClass: ClassSymbol = requiredClass("scala.quoted.Type") + @tu lazy val QuotedType_splice: Symbol = QuotedTypeClass.requiredType(tpnme.Underlying) + + @tu lazy val QuotedTypeModule: Symbol = QuotedTypeClass.companionModule + @tu lazy val QuotedTypeModule_of: Symbol = QuotedTypeModule.requiredMethod("of") + + @tu lazy val CanEqualClass: ClassSymbol = getClassIfDefined("scala.Eql").orElse(requiredClass("scala.CanEqual")).asClass + def CanEqual_canEqualAny(using Context): TermSymbol = + val methodName = if CanEqualClass.name == tpnme.Eql then nme.eqlAny else nme.canEqualAny + CanEqualClass.companionModule.requiredMethod(methodName) + + @tu lazy val CanThrowClass: ClassSymbol = requiredClass("scala.CanThrow") + @tu lazy val throwsAlias: Symbol = ScalaRuntimePackageVal.requiredType(tpnme.THROWS) + + @tu lazy val TypeBoxClass: ClassSymbol = requiredClass("scala.runtime.TypeBox") + @tu lazy val TypeBox_CAP: TypeSymbol = TypeBoxClass.requiredType(tpnme.CAP) + + @tu lazy val MatchCaseClass: ClassSymbol = requiredClass("scala.runtime.MatchCase") + @tu lazy val NotGivenClass: ClassSymbol = requiredClass("scala.util.NotGiven") + @tu lazy val NotGiven_value: Symbol = NotGivenClass.companionModule.requiredMethod(nme.value) + + @tu lazy val ValueOfClass: ClassSymbol = requiredClass("scala.ValueOf") + + @tu lazy val FromDigitsClass: ClassSymbol = requiredClass("scala.util.FromDigits") + @tu lazy val FromDigits_WithRadixClass: ClassSymbol = requiredClass("scala.util.FromDigits.WithRadix") + @tu lazy val FromDigits_DecimalClass: ClassSymbol = requiredClass("scala.util.FromDigits.Decimal") + @tu lazy val FromDigits_FloatingClass: ClassSymbol = requiredClass("scala.util.FromDigits.Floating") + + @tu lazy val XMLTopScopeModule: Symbol = requiredModule("scala.xml.TopScope") + + @tu lazy val MainAnnotationClass: ClassSymbol = requiredClass("scala.annotation.MainAnnotation") + @tu lazy val MainAnnotationInfo: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Info") + @tu lazy val MainAnnotationParameter: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Parameter") + @tu lazy val MainAnnotationParameterAnnotation: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.ParameterAnnotation") + @tu lazy val MainAnnotationCommand: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Command") + + @tu lazy val CommandLineParserModule: Symbol = requiredModule("scala.util.CommandLineParser") + @tu lazy val CLP_ParseError: ClassSymbol = CommandLineParserModule.requiredClass("ParseError").typeRef.symbol.asClass + @tu lazy val CLP_parseArgument: Symbol = CommandLineParserModule.requiredMethod("parseArgument") + @tu lazy val CLP_parseRemainingArguments: Symbol = CommandLineParserModule.requiredMethod("parseRemainingArguments") + @tu lazy val CLP_showError: Symbol = CommandLineParserModule.requiredMethod("showError") + + @tu lazy val TupleTypeRef: TypeRef = requiredClassRef("scala.Tuple") + def TupleClass(using Context): ClassSymbol = TupleTypeRef.symbol.asClass + @tu lazy val Tuple_cons: Symbol = TupleClass.requiredMethod("*:") + @tu lazy val EmptyTupleModule: Symbol = requiredModule("scala.EmptyTuple") + @tu lazy val NonEmptyTupleTypeRef: TypeRef = requiredClassRef("scala.NonEmptyTuple") + def NonEmptyTupleClass(using Context): ClassSymbol = NonEmptyTupleTypeRef.symbol.asClass + lazy val NonEmptyTuple_tail: Symbol = NonEmptyTupleClass.requiredMethod("tail") + @tu lazy val PairClass: ClassSymbol = requiredClass("scala.*:") + + @tu lazy val TupleXXLClass: ClassSymbol = requiredClass("scala.runtime.TupleXXL") + def TupleXXLModule(using Context): Symbol = TupleXXLClass.companionModule + + def TupleXXL_fromIterator(using Context): Symbol = TupleXXLModule.requiredMethod("fromIterator") + + @tu lazy val RuntimeTupleMirrorTypeRef: TypeRef = requiredClassRef("scala.runtime.TupleMirror") + + @tu lazy val RuntimeTuplesModule: Symbol = requiredModule("scala.runtime.Tuples") + @tu lazy val RuntimeTuplesModuleClass: Symbol = RuntimeTuplesModule.moduleClass + @tu lazy val RuntimeTuples_consIterator: Symbol = RuntimeTuplesModule.requiredMethod("consIterator") + @tu lazy val RuntimeTuples_concatIterator: Symbol = RuntimeTuplesModule.requiredMethod("concatIterator") + @tu lazy val RuntimeTuples_apply: Symbol = RuntimeTuplesModule.requiredMethod("apply") + @tu lazy val RuntimeTuples_cons: Symbol = RuntimeTuplesModule.requiredMethod("cons") + @tu lazy val RuntimeTuples_size: Symbol = RuntimeTuplesModule.requiredMethod("size") + @tu lazy val RuntimeTuples_tail: Symbol = RuntimeTuplesModule.requiredMethod("tail") + @tu lazy val RuntimeTuples_concat: Symbol = RuntimeTuplesModule.requiredMethod("concat") + @tu lazy val RuntimeTuples_toArray: Symbol = RuntimeTuplesModule.requiredMethod("toArray") + @tu lazy val RuntimeTuples_productToArray: Symbol = RuntimeTuplesModule.requiredMethod("productToArray") + @tu lazy val RuntimeTuples_isInstanceOfTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfTuple") + @tu lazy val RuntimeTuples_isInstanceOfEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfEmptyTuple") + @tu lazy val RuntimeTuples_isInstanceOfNonEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfNonEmptyTuple") + + @tu lazy val TupledFunctionTypeRef: TypeRef = requiredClassRef("scala.util.TupledFunction") + def TupledFunctionClass(using Context): ClassSymbol = TupledFunctionTypeRef.symbol.asClass + def RuntimeTupleFunctionsModule(using Context): Symbol = requiredModule("scala.runtime.TupledFunctions") + + @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") + @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("*") + @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") + @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") + @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") + @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") + + // Annotation base classes + @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") + @tu lazy val ClassfileAnnotationClass: ClassSymbol = requiredClass("scala.annotation.ClassfileAnnotation") + @tu lazy val StaticAnnotationClass: ClassSymbol = requiredClass("scala.annotation.StaticAnnotation") + @tu lazy val RefiningAnnotationClass: ClassSymbol = requiredClass("scala.annotation.RefiningAnnotation") + + // Annotation classes + @tu lazy val AnnotationDefaultAnnot: ClassSymbol = requiredClass("scala.annotation.internal.AnnotationDefault") + @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") + @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") + @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") + @tu lazy val CapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.capability") + @tu lazy val ChildAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Child") + @tu lazy val ContextResultCountAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ContextResultCount") + @tu lazy val ProvisionalSuperClassAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ProvisionalSuperClass") + @tu lazy val DeprecatedAnnot: ClassSymbol = requiredClass("scala.deprecated") + @tu lazy val DeprecatedOverridingAnnot: ClassSymbol = requiredClass("scala.deprecatedOverriding") + @tu lazy val ImplicitAmbiguousAnnot: ClassSymbol = requiredClass("scala.annotation.implicitAmbiguous") + @tu lazy val ImplicitNotFoundAnnot: ClassSymbol = requiredClass("scala.annotation.implicitNotFound") + @tu lazy val InlineParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InlineParam") + @tu lazy val ErasedParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ErasedParam") + @tu lazy val InvariantBetweenAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InvariantBetween") + @tu lazy val MainAnnot: ClassSymbol = requiredClass("scala.main") + @tu lazy val MappedAlternativeAnnot: ClassSymbol = requiredClass("scala.annotation.internal.MappedAlternative") + @tu lazy val MigrationAnnot: ClassSymbol = requiredClass("scala.annotation.migration") + @tu lazy val NowarnAnnot: ClassSymbol = requiredClass("scala.annotation.nowarn") + @tu lazy val TransparentTraitAnnot: ClassSymbol = requiredClass("scala.annotation.transparentTrait") + @tu lazy val NativeAnnot: ClassSymbol = requiredClass("scala.native") + @tu lazy val RepeatedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Repeated") + @tu lazy val SourceFileAnnot: ClassSymbol = requiredClass("scala.annotation.internal.SourceFile") + @tu lazy val ScalaSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaSignature") + @tu lazy val ScalaLongSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaLongSignature") + @tu lazy val ScalaStrictFPAnnot: ClassSymbol = requiredClass("scala.annotation.strictfp") + @tu lazy val ScalaStaticAnnot: ClassSymbol = requiredClass("scala.annotation.static") + @tu lazy val SerialVersionUIDAnnot: ClassSymbol = requiredClass("scala.SerialVersionUID") + @tu lazy val TailrecAnnot: ClassSymbol = requiredClass("scala.annotation.tailrec") + @tu lazy val ThreadUnsafeAnnot: ClassSymbol = requiredClass("scala.annotation.threadUnsafe") + @tu lazy val ConstructorOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.constructorOnly") + @tu lazy val CompileTimeOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.compileTimeOnly") + @tu lazy val SwitchAnnot: ClassSymbol = requiredClass("scala.annotation.switch") + @tu lazy val ExperimentalAnnot: ClassSymbol = requiredClass("scala.annotation.experimental") + @tu lazy val ThrowsAnnot: ClassSymbol = requiredClass("scala.throws") + @tu lazy val TransientAnnot: ClassSymbol = requiredClass("scala.transient") + @tu lazy val UncheckedAnnot: ClassSymbol = requiredClass("scala.unchecked") + @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") + @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") + @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") + @tu lazy val WithPureFunsAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WithPureFuns") + @tu lazy val FieldMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.field") + @tu lazy val GetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.getter") + @tu lazy val ParamMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.param") + @tu lazy val SetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.setter") + @tu lazy val ShowAsInfixAnnot: ClassSymbol = requiredClass("scala.annotation.showAsInfix") + @tu lazy val FunctionalInterfaceAnnot: ClassSymbol = requiredClass("java.lang.FunctionalInterface") + @tu lazy val TargetNameAnnot: ClassSymbol = requiredClass("scala.annotation.targetName") + @tu lazy val VarargsAnnot: ClassSymbol = requiredClass("scala.annotation.varargs") + @tu lazy val SinceAnnot: ClassSymbol = requiredClass("scala.annotation.since") + @tu lazy val RequiresCapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.internal.requiresCapability") + @tu lazy val RetainsAnnot: ClassSymbol = requiredClass("scala.annotation.retains") + @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") + + @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") + + // A list of meta-annotations that are relevant for fields and accessors + @tu lazy val FieldAccessorMetaAnnots: Set[Symbol] = + Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot) + + // A list of annotations that are commonly used to indicate that a field/method argument or return + // type is not null. These annotations are used by the nullification logic in JavaNullInterop to + // improve the precision of type nullification. + // We don't require that any of these annotations be present in the class path, but we want to + // create Symbols for the ones that are present, so they can be checked during nullification. + @tu lazy val NotNullAnnots: List[ClassSymbol] = getClassesIfDefined( + "javax.annotation.Nonnull" :: + "javax.validation.constraints.NotNull" :: + "androidx.annotation.NonNull" :: + "android.support.annotation.NonNull" :: + "android.annotation.NonNull" :: + "com.android.annotations.NonNull" :: + "org.eclipse.jdt.annotation.NonNull" :: + "edu.umd.cs.findbugs.annotations.NonNull" :: + "org.checkerframework.checker.nullness.qual.NonNull" :: + "org.checkerframework.checker.nullness.compatqual.NonNullDecl" :: + "org.jetbrains.annotations.NotNull" :: + "org.springframework.lang.NonNull" :: + "org.springframework.lang.NonNullApi" :: + "org.springframework.lang.NonNullFields" :: + "lombok.NonNull" :: + "reactor.util.annotation.NonNull" :: + "reactor.util.annotation.NonNullApi" :: + "io.reactivex.annotations.NonNull" :: Nil) + + // convenient one-parameter method types + def methOfAny(tp: Type): MethodType = MethodType(List(AnyType), tp) + def methOfAnyVal(tp: Type): MethodType = MethodType(List(AnyValType), tp) + def methOfAnyRef(tp: Type): MethodType = MethodType(List(ObjectType), tp) + + // Derived types + + def RepeatedParamType: TypeRef = RepeatedParamClass.typeRef + + def ClassType(arg: Type)(using Context): Type = { + val ctype = ClassClass.typeRef + if (ctx.phase.erasedTypes) ctype else ctype.appliedTo(arg) + } + + /** The enumeration type, goven a value of the enumeration */ + def EnumType(sym: Symbol)(using Context): TypeRef = + // given (in java): "class A { enum E { VAL1 } }" + // - sym: the symbol of the actual enumeration value (VAL1) + // - .owner: the ModuleClassSymbol of the enumeration (object E) + // - .linkedClass: the ClassSymbol of the enumeration (class E) + sym.owner.linkedClass.typeRef + + object FunctionOf { + def apply(args: List[Type], resultType: Type, isContextual: Boolean = false, isErased: Boolean = false)(using Context): Type = + FunctionType(args.length, isContextual, isErased).appliedTo(args ::: resultType :: Nil) + def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean, Boolean)] = { + val tsym = ft.typeSymbol + if isFunctionClass(tsym) && ft.isRef(tsym) then + val targs = ft.dealias.argInfos + if (targs.isEmpty) None + else Some(targs.init, targs.last, tsym.name.isContextFunction, tsym.name.isErasedFunction) + else None + } + } + + object PartialFunctionOf { + def apply(arg: Type, result: Type)(using Context): Type = + PartialFunctionClass.typeRef.appliedTo(arg :: result :: Nil) + def unapply(pft: Type)(using Context): Option[(Type, List[Type])] = + if (pft.isRef(PartialFunctionClass)) { + val targs = pft.dealias.argInfos + if (targs.length == 2) Some((targs.head, targs.tail)) else None + } + else None + } + + object ArrayOf { + def apply(elem: Type)(using Context): Type = + if (ctx.erasedTypes) JavaArrayType(elem) + else ArrayType.appliedTo(elem :: Nil) + def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match { + case AppliedType(at, arg :: Nil) if at.isRef(ArrayType.symbol) => Some(arg) + case JavaArrayType(tp) if ctx.erasedTypes => Some(tp) + case _ => None + } + } + + object MatchCase { + def apply(pat: Type, body: Type)(using Context): Type = + MatchCaseClass.typeRef.appliedTo(pat, body) + def unapply(tp: Type)(using Context): Option[(Type, Type)] = tp match { + case AppliedType(tycon, pat :: body :: Nil) if tycon.isRef(MatchCaseClass) => + Some((pat, body)) + case _ => + None + } + def isInstance(tp: Type)(using Context): Boolean = tp match { + case AppliedType(tycon: TypeRef, _) => + tycon.name == tpnme.MatchCase && // necessary pre-filter to avoid forcing symbols + tycon.isRef(MatchCaseClass) + case _ => false + } + } + + /** An extractor for multi-dimensional arrays. + * Note that this will also extract the high bound if an + * element type is a wildcard upper-bounded by an array. E.g. + * + * Array[? <: Array[? <: Number]] + * + * would match + * + * MultiArrayOf(, 2) + */ + object MultiArrayOf { + def apply(elem: Type, ndims: Int)(using Context): Type = + if (ndims == 0) elem else ArrayOf(apply(elem, ndims - 1)) + def unapply(tp: Type)(using Context): Option[(Type, Int)] = tp match { + case ArrayOf(elemtp) => + def recur(elemtp: Type): Option[(Type, Int)] = elemtp.dealias match { + case tp @ TypeBounds(lo, hi @ MultiArrayOf(finalElemTp, n)) => + Some(finalElemTp, n) + case MultiArrayOf(finalElemTp, n) => Some(finalElemTp, n + 1) + case _ => Some(elemtp, 1) + } + recur(elemtp) + case _ => + None + } + } + + /** Extractor for context function types representing by-name parameters, of the form + * `() ?=> T`. + * Under purefunctions, this becomes `() ?-> T` or `{r1, ..., rN} () ?-> T`. + */ + object ByNameFunction: + def apply(tp: Type)(using Context): Type = tp match + case tp @ EventuallyCapturingType(tp1, refs) if tp.annot.symbol == RetainsByNameAnnot => + CapturingType(apply(tp1), refs) + case _ => + defn.ContextFunction0.typeRef.appliedTo(tp :: Nil) + def unapply(tp: Type)(using Context): Option[Type] = tp match + case tp @ AppliedType(tycon, arg :: Nil) if defn.isByNameFunctionClass(tycon.typeSymbol) => + Some(arg) + case tp @ AnnotatedType(parent, _) => + unapply(parent) + case _ => + None + + final def isByNameFunctionClass(sym: Symbol): Boolean = + sym eq ContextFunction0 + + def isByNameFunction(tp: Type)(using Context): Boolean = tp match + case ByNameFunction(_) => true + case _ => false + + final def isCompiletime_S(sym: Symbol)(using Context): Boolean = + sym.name == tpnme.S && sym.owner == CompiletimeOpsIntModuleClass + + private val compiletimePackageAnyTypes: Set[Name] = Set( + tpnme.Equals, tpnme.NotEquals, tpnme.IsConst, tpnme.ToString + ) + private val compiletimePackageNumericTypes: Set[Name] = Set( + tpnme.Plus, tpnme.Minus, tpnme.Times, tpnme.Div, tpnme.Mod, + tpnme.Lt, tpnme.Gt, tpnme.Ge, tpnme.Le, + tpnme.Abs, tpnme.Negate, tpnme.Min, tpnme.Max + ) + private val compiletimePackageIntTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToString, // ToString is moved to ops.any and deprecated for ops.int + tpnme.NumberOfLeadingZeros, tpnme.ToLong, tpnme.ToFloat, tpnme.ToDouble, + tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR + ) + private val compiletimePackageLongTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.NumberOfLeadingZeros, tpnme.ToInt, tpnme.ToFloat, tpnme.ToDouble, + tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR + ) + private val compiletimePackageFloatTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToInt, tpnme.ToLong, tpnme.ToDouble + ) + private val compiletimePackageDoubleTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToInt, tpnme.ToLong, tpnme.ToFloat + ) + private val compiletimePackageBooleanTypes: Set[Name] = Set(tpnme.Not, tpnme.Xor, tpnme.And, tpnme.Or) + private val compiletimePackageStringTypes: Set[Name] = Set( + tpnme.Plus, tpnme.Length, tpnme.Substring, tpnme.Matches, tpnme.CharAt + ) + private val compiletimePackageOpTypes: Set[Name] = + Set(tpnme.S) + ++ compiletimePackageAnyTypes + ++ compiletimePackageIntTypes + ++ compiletimePackageLongTypes + ++ compiletimePackageFloatTypes + ++ compiletimePackageDoubleTypes + ++ compiletimePackageBooleanTypes + ++ compiletimePackageStringTypes + + final def isCompiletimeAppliedType(sym: Symbol)(using Context): Boolean = + compiletimePackageOpTypes.contains(sym.name) + && ( + isCompiletime_S(sym) + || sym.owner == CompiletimeOpsAnyModuleClass && compiletimePackageAnyTypes.contains(sym.name) + || sym.owner == CompiletimeOpsIntModuleClass && compiletimePackageIntTypes.contains(sym.name) + || sym.owner == CompiletimeOpsLongModuleClass && compiletimePackageLongTypes.contains(sym.name) + || sym.owner == CompiletimeOpsFloatModuleClass && compiletimePackageFloatTypes.contains(sym.name) + || sym.owner == CompiletimeOpsDoubleModuleClass && compiletimePackageDoubleTypes.contains(sym.name) + || sym.owner == CompiletimeOpsBooleanModuleClass && compiletimePackageBooleanTypes.contains(sym.name) + || sym.owner == CompiletimeOpsStringModuleClass && compiletimePackageStringTypes.contains(sym.name) + ) + + // ----- Scala-2 library patches -------------------------------------- + + /** The `scala.runtime.stdLibPacthes` package contains objects + * that contain defnitions that get added as members to standard library + * objects with the same name. + */ + @tu lazy val StdLibPatchesPackage: TermSymbol = requiredPackage("scala.runtime.stdLibPatches") + @tu private lazy val ScalaPredefModuleClassPatch: Symbol = getModuleIfDefined("scala.runtime.stdLibPatches.Predef").moduleClass + @tu private lazy val LanguageModuleClassPatch: Symbol = getModuleIfDefined("scala.runtime.stdLibPatches.language").moduleClass + + /** If `sym` is a patched library class, the source file of its patch class, + * otherwise `NoSource` + */ + def patchSource(sym: Symbol)(using Context): SourceFile = + if sym == ScalaPredefModuleClass then ScalaPredefModuleClassPatch.source + else if sym == LanguageModuleClass then LanguageModuleClassPatch.source + else NoSource + + /** A finalizer that patches standard library classes. + * It copies all non-private, non-synthetic definitions from `patchCls` + * to `denot` while changing their owners to `denot`. Before that it deletes + * any definitions of `denot` that have the same name as one of the copied + * definitions. + * + * If an object is present in both the original class and the patch class, + * it is not overwritten. Instead its members are copied recursively. + * + * To avpid running into cycles on bootstrap, patching happens only if `patchCls` + * is read from a classfile. + */ + def patchStdLibClass(denot: ClassDenotation)(using Context): Unit = + def patch2(denot: ClassDenotation, patchCls: Symbol): Unit = + val scope = denot.info.decls.openForMutations + + def recurse(patch: Symbol) = patch.is(Module) && scope.lookup(patch.name).exists + + def makeClassSymbol(patch: Symbol, parents: List[Type], selfInfo: TypeOrSymbol) = + newClassSymbol( + owner = denot.symbol, + name = patch.name.asTypeName, + flags = patch.flags, + // need to rebuild a fresh ClassInfo + infoFn = cls => ClassInfo( + prefix = denot.symbol.thisType, + cls = cls, + declaredParents = parents, // assume parents in patch don't refer to symbols in the patch + decls = newScope, + selfInfo = + if patch.is(Module) + then TermRef(denot.symbol.thisType, patch.name.sourceModuleName) + else selfInfo // assume patch self type annotation does not refer to symbols in the patch + ), + privateWithin = patch.privateWithin, + coord = denot.symbol.coord, + assocFile = denot.symbol.associatedFile + ) + + def makeNonClassSymbol(patch: Symbol) = + if patch.is(Inline) then + // Inline symbols contain trees in annotations, which is coupled + // with the underlying symbol. + // Changing owner for inline symbols is a simple workaround. + patch.denot = patch.denot.copySymDenotation(owner = denot.symbol) + patch + else + // change `info` which might contain reference to the patch + patch.copy( + owner = denot.symbol, + info = + if patch.is(Module) + then TypeRef(denot.symbol.thisType, patch.name.moduleClassName) + else patch.info // assume non-object info does not refer to symbols in the patch + ) + + if patchCls.exists then + val patches = patchCls.info.decls.filter(patch => + !patch.isConstructor && !patch.isOneOf(PrivateOrSynthetic)) + for patch <- patches if !recurse(patch) do + val e = scope.lookupEntry(patch.name) + if e != null then scope.unlink(e) + for patch <- patches do + patch.ensureCompleted() + if !recurse(patch) then + val sym = + patch.info match + case ClassInfo(_, _, parents, _, selfInfo) => + makeClassSymbol(patch, parents, selfInfo) + case _ => + makeNonClassSymbol(patch) + end match + sym.annotations = patch.annotations + scope.enter(sym) + if patch.isClass then + patch2(scope.lookup(patch.name).asClass, patch) + + def patchWith(patchCls: Symbol) = + denot.sourceModule.info = denot.typeRef // we run into a cyclic reference when patching if this line is omitted + patch2(denot, patchCls) + + if denot.name == tpnme.Predef.moduleClassName && denot.symbol == ScalaPredefModuleClass then + patchWith(ScalaPredefModuleClassPatch) + else if denot.name == tpnme.language.moduleClassName && denot.symbol == LanguageModuleClass then + patchWith(LanguageModuleClassPatch) + end patchStdLibClass + + // ----- Symbol sets --------------------------------------------------- + + @tu lazy val topClasses: Set[Symbol] = Set(AnyClass, MatchableClass, ObjectClass, AnyValClass) + + @tu lazy val untestableClasses: Set[Symbol] = Set(NothingClass, NullClass, SingletonClass) + + @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] + val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) + def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) + + @tu lazy val caseClassSynthesized: List[Symbol] = List( + Any_hashCode, Any_equals, Any_toString, Product_canEqual, Product_productArity, + Product_productPrefix, Product_productElement, Product_productElementName) + + val LazyHolder: PerRun[Map[Symbol, Symbol]] = new PerRun({ + def holderImpl(holderType: String) = requiredClass("scala.runtime." + holderType) + Map[Symbol, Symbol]( + IntClass -> holderImpl("LazyInt"), + LongClass -> holderImpl("LazyLong"), + BooleanClass -> holderImpl("LazyBoolean"), + FloatClass -> holderImpl("LazyFloat"), + DoubleClass -> holderImpl("LazyDouble"), + ByteClass -> holderImpl("LazyByte"), + CharClass -> holderImpl("LazyChar"), + ShortClass -> holderImpl("LazyShort") + ) + .withDefaultValue(holderImpl("LazyRef")) + }) + + @tu lazy val TupleType: Array[TypeRef | Null] = mkArityArray("scala.Tuple", MaxTupleArity, 1) + + def isSpecializedTuple(cls: Symbol)(using Context): Boolean = + cls.isClass && TupleSpecializedClasses.exists(tupleCls => cls.name.isSpecializedNameOf(tupleCls.name)) + + def SpecializedTuple(base: Symbol, args: List[Type])(using Context): Symbol = + base.owner.requiredClass(base.name.specializedName(args)) + + /** Cached function types of arbitary arities. + * Function types are created on demand with newFunctionNTrait, which is + * called from a synthesizer installed in ScalaPackageClass. + */ + private class FunType(prefix: String): + private var classRefs: Array[TypeRef | Null] = new Array(22) + def apply(n: Int): TypeRef = + while n >= classRefs.length do + val classRefs1 = new Array[TypeRef | Null](classRefs.length * 2) + Array.copy(classRefs, 0, classRefs1, 0, classRefs.length) + classRefs = classRefs1 + val funName = s"scala.$prefix$n" + if classRefs(n) == null then + classRefs(n) = + if prefix.startsWith("Impure") + then staticRef(funName.toTypeName).symbol.typeRef + else requiredClassRef(funName) + classRefs(n).nn + end FunType + + private def funTypeIdx(isContextual: Boolean, isErased: Boolean, isImpure: Boolean): Int = + (if isContextual then 1 else 0) + + (if isErased then 2 else 0) + + (if isImpure then 4 else 0) + + private val funTypeArray: IArray[FunType] = + val arr = Array.ofDim[FunType](8) + val choices = List(false, true) + for contxt <- choices; erasd <- choices; impure <- choices do + var str = "Function" + if contxt then str = "Context" + str + if erasd then str = "Erased" + str + if impure then str = "Impure" + str + arr(funTypeIdx(contxt, erasd, impure)) = FunType(str) + IArray.unsafeFromArray(arr) + + def FunctionSymbol(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): Symbol = + funTypeArray(funTypeIdx(isContextual, isErased, isImpure))(n).symbol + + @tu lazy val Function0_apply: Symbol = Function0.requiredMethod(nme.apply) + @tu lazy val ContextFunction0_apply: Symbol = ContextFunction0.requiredMethod(nme.apply) + + @tu lazy val Function0: Symbol = FunctionSymbol(0) + @tu lazy val Function1: Symbol = FunctionSymbol(1) + @tu lazy val Function2: Symbol = FunctionSymbol(2) + @tu lazy val ContextFunction0: Symbol = FunctionSymbol(0, isContextual = true) + + def FunctionType(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): TypeRef = + FunctionSymbol(n, isContextual && !ctx.erasedTypes, isErased, isImpure).typeRef + + lazy val PolyFunctionClass = requiredClass("scala.PolyFunction") + def PolyFunctionType = PolyFunctionClass.typeRef + + /** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */ + def scalaClassName(cls: Symbol)(using Context): TypeName = cls.denot match + case clsd: ClassDenotation if clsd.owner eq ScalaPackageClass => + clsd.name.asInstanceOf[TypeName] + case _ => + EmptyTypeName + + /** If type `ref` refers to a class in the scala package, its name, otherwise EmptyTypeName */ + def scalaClassName(ref: Type)(using Context): TypeName = scalaClassName(ref.classSymbol) + + private def isVarArityClass(cls: Symbol, prefix: String) = + cls.isClass + && cls.owner.eq(ScalaPackageClass) + && cls.name.testSimple(name => + name.startsWith(prefix) + && name.length > prefix.length + && digitsOnlyAfter(name, prefix.length)) + + private def digitsOnlyAfter(name: SimpleName, idx: Int): Boolean = + idx == name.length || name(idx).isDigit && digitsOnlyAfter(name, idx + 1) + + def isBottomClass(cls: Symbol): Boolean = + if ctx.mode.is(Mode.SafeNulls) && !ctx.phase.erasedTypes + then cls == NothingClass + else isBottomClassAfterErasure(cls) + + def isBottomClassAfterErasure(cls: Symbol): Boolean = cls == NothingClass || cls == NullClass + + /** Is any function class where + * - FunctionXXL + * - FunctionN for N >= 0 + * - ContextFunctionN for N >= 0 + * - ErasedFunctionN for N > 0 + * - ErasedContextFunctionN for N > 0 + */ + def isFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isFunction + + /** Is a function class, or an impure function type alias */ + def isFunctionSymbol(sym: Symbol): Boolean = + sym.isType && (sym.owner eq ScalaPackageClass) && sym.name.isFunction + + /** Is a function class where + * - FunctionN for N >= 0 and N != XXL + */ + def isPlainFunctionClass(cls: Symbol) = isVarArityClass(cls, str.Function) + + /** Is an context function class. + * - ContextFunctionN for N >= 0 + * - ErasedContextFunctionN for N > 0 + */ + def isContextFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isContextFunction + + /** Is an erased function class. + * - ErasedFunctionN for N > 0 + * - ErasedContextFunctionN for N > 0 + */ + def isErasedFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isErasedFunction + + /** Is either FunctionXXL or a class that will be erased to FunctionXXL + * - FunctionXXL + * - FunctionN for N >= 22 + * - ContextFunctionN for N >= 22 + */ + def isXXLFunctionClass(cls: Symbol): Boolean = { + val name = scalaClassName(cls) + (name eq tpnme.FunctionXXL) || name.functionArity > MaxImplementedFunctionArity + } + + /** Is a synthetic function class + * - FunctionN for N > 22 + * - ContextFunctionN for N >= 0 + * - ErasedFunctionN for N > 0 + * - ErasedContextFunctionN for N > 0 + */ + def isSyntheticFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isSyntheticFunction + + def isAbstractFunctionClass(cls: Symbol): Boolean = isVarArityClass(cls, str.AbstractFunction) + def isTupleClass(cls: Symbol): Boolean = isVarArityClass(cls, str.Tuple) + def isProductClass(cls: Symbol): Boolean = isVarArityClass(cls, str.Product) + + def isBoxedUnitClass(cls: Symbol): Boolean = + cls.isClass && (cls.owner eq ScalaRuntimePackageClass) && cls.name == tpnme.BoxedUnit + + /** Returns the erased type of the function class `cls` + * - FunctionN for N > 22 becomes FunctionXXL + * - FunctionN for 22 > N >= 0 remains as FunctionN + * - ContextFunctionN for N > 22 becomes FunctionXXL + * - ContextFunctionN for N <= 22 becomes FunctionN + * - ErasedFunctionN becomes Function0 + * - ImplicitErasedFunctionN becomes Function0 + * - anything else becomes a NoType + */ + def functionTypeErasure(cls: Symbol): Type = + val arity = scalaClassName(cls).functionArity + if cls.name.isErasedFunction then FunctionType(0) + else if arity > 22 then FunctionXXLClass.typeRef + else if arity >= 0 then FunctionType(arity) + else NoType + + private val JavaImportFns: List[RootRef] = List( + RootRef(() => JavaLangPackageVal.termRef) + ) + + private val ScalaImportFns: List[RootRef] = + JavaImportFns :+ + RootRef(() => ScalaPackageVal.termRef) + + private val PredefImportFns: RootRef = + RootRef(() => ScalaPredefModule.termRef, isPredef=true) + + @tu private lazy val JavaRootImportFns: List[RootRef] = + if ctx.settings.YnoImports.value then Nil + else JavaImportFns + + @tu private lazy val ScalaRootImportFns: List[RootRef] = + if ctx.settings.YnoImports.value then Nil + else if ctx.settings.YnoPredef.value then ScalaImportFns + else ScalaImportFns :+ PredefImportFns + + @tu private lazy val JavaRootImportTypes: List[TermRef] = JavaRootImportFns.map(_.refFn()) + @tu private lazy val ScalaRootImportTypes: List[TermRef] = ScalaRootImportFns.map(_.refFn()) + @tu private lazy val JavaUnqualifiedOwnerTypes: Set[NamedType] = unqualifiedTypes(JavaRootImportTypes) + @tu private lazy val ScalaUnqualifiedOwnerTypes: Set[NamedType] = unqualifiedTypes(ScalaRootImportTypes) + + /** Are we compiling a java source file? */ + private def isJavaContext(using Context): Boolean = + ctx.compilationUnit.isJava + + private def unqualifiedTypes(refs: List[TermRef]) = + val types = refs.toSet[NamedType] + types ++ types.map(_.symbol.moduleClass.typeRef) + + /** Lazy references to the root imports */ + def rootImportFns(using Context): List[RootRef] = + if isJavaContext then JavaRootImportFns + else ScalaRootImportFns + + /** Root types imported by default */ + def rootImportTypes(using Context): List[TermRef] = + if isJavaContext then JavaRootImportTypes + else ScalaRootImportTypes + + /** Modules whose members are in the default namespace and their module classes */ + def unqualifiedOwnerTypes(using Context): Set[NamedType] = + if isJavaContext then JavaUnqualifiedOwnerTypes + else ScalaUnqualifiedOwnerTypes + + /** Names of the root import symbols that can be hidden by other imports */ + @tu lazy val ShadowableImportNames: Set[TermName] = Set("Predef".toTermName) + + /** Class symbols for which no class exist at runtime */ + @tu lazy val NotRuntimeClasses: Set[Symbol] = Set(AnyClass, MatchableClass, AnyValClass, NullClass, NothingClass) + + @tu lazy val SpecialClassTagClasses: Set[Symbol] = Set(UnitClass, AnyClass, AnyValClass) + + @tu lazy val SpecialManifestClasses: Set[Symbol] = Set(AnyClass, AnyValClass, ObjectClass, NullClass, NothingClass) + + /** Classes that are known not to have an initializer irrespective of + * whether NoInits is set. Note: FunctionXXLClass is in this set + * because if it is compiled by Scala2, it does not get a NoInit flag. + * But since it is introduced only at erasure, there's no chance + * for augmentScala2Traits to do anything on a class that inherits it. So + * it also misses an implementation class, which means that the usual scheme + * of calling a superclass init in the implementation class of a Scala2 + * trait gets screwed up. Therefore, it is mandatory that FunctionXXL + * is treated as a NoInit trait. + */ + @tu lazy val NoInitClasses: Set[Symbol] = NotRuntimeClasses + FunctionXXLClass + + def isPolymorphicAfterErasure(sym: Symbol): Boolean = + (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf) || (sym eq Object_synchronized) + + /** Is this type a `TupleN` type? + * + * @return true if the dealiased type of `tp` is `TupleN[T1, T2, ..., Tn]` + */ + def isTupleNType(tp: Type)(using Context): Boolean = { + val tp1 = tp.dealias + val arity = tp1.argInfos.length + arity <= MaxTupleArity && { + val tupletp = TupleType(arity) + tupletp != null && tp1.isRef(tupletp.symbol) + } + } + + def tupleType(elems: List[Type]): Type = { + val arity = elems.length + if 0 < arity && arity <= MaxTupleArity then + val tupletp = TupleType(arity) + if tupletp != null then tupletp.appliedTo(elems) + else TypeOps.nestedPairs(elems) + else TypeOps.nestedPairs(elems) + } + + def tupleTypes(tp: Type, bound: Int = Int.MaxValue)(using Context): Option[List[Type]] = { + @tailrec def rec(tp: Type, acc: List[Type], bound: Int): Option[List[Type]] = tp.normalized.dealias match { + case _ if bound < 0 => Some(acc.reverse) + case tp: AppliedType if PairClass == tp.classSymbol => rec(tp.args(1), tp.args.head :: acc, bound - 1) + case tp: AppliedType if isTupleNType(tp) => Some(acc.reverse ::: tp.args) + case tp: TermRef if tp.symbol == defn.EmptyTupleModule => Some(acc.reverse) + case _ => None + } + rec(tp.stripTypeVar, Nil, bound) + } + + def isProductSubType(tp: Type)(using Context): Boolean = tp.derivesFrom(ProductClass) + + /** Is `tp` (an alias) of either a scala.FunctionN or a scala.ContextFunctionN + * instance? + */ + def isNonRefinedFunction(tp: Type)(using Context): Boolean = + val arity = functionArity(tp) + val sym = tp.dealias.typeSymbol + + arity >= 0 + && isFunctionClass(sym) + && tp.isRef( + FunctionType(arity, sym.name.isContextFunction, sym.name.isErasedFunction).typeSymbol, + skipRefined = false) + end isNonRefinedFunction + + /** Is `tp` a representation of a (possibly dependent) function type or an alias of such? */ + def isFunctionType(tp: Type)(using Context): Boolean = + isNonRefinedFunction(tp.dropDependentRefinement) + + def isFunctionOrPolyType(tp: Type)(using Context): Boolean = + isFunctionType(tp) || (tp.typeSymbol eq defn.PolyFunctionClass) + + private def withSpecMethods(cls: ClassSymbol, bases: List[Name], paramTypes: Set[TypeRef]) = + for base <- bases; tp <- paramTypes do + cls.enter(newSymbol(cls, base.specializedName(List(tp)), Method, ExprType(tp))) + cls + + @tu lazy val Tuple1: ClassSymbol = withSpecMethods(requiredClass("scala.Tuple1"), List(nme._1), Tuple1SpecializedParamTypes) + @tu lazy val Tuple2: ClassSymbol = withSpecMethods(requiredClass("scala.Tuple2"), List(nme._1, nme._2), Tuple2SpecializedParamTypes) + + @tu lazy val TupleSpecializedClasses: Set[Symbol] = Set(Tuple1, Tuple2) + @tu lazy val Tuple1SpecializedParamTypes: Set[TypeRef] = Set(IntType, LongType, DoubleType) + @tu lazy val Tuple2SpecializedParamTypes: Set[TypeRef] = Set(IntType, LongType, DoubleType, CharType, BooleanType) + @tu lazy val Tuple1SpecializedParamClasses: PerRun[Set[Symbol]] = new PerRun(Tuple1SpecializedParamTypes.map(_.symbol)) + @tu lazy val Tuple2SpecializedParamClasses: PerRun[Set[Symbol]] = new PerRun(Tuple2SpecializedParamTypes.map(_.symbol)) + + // Specialized type parameters defined for scala.Function{0,1,2}. + @tu lazy val Function1SpecializedParamTypes: collection.Set[TypeRef] = + Set(IntType, LongType, FloatType, DoubleType) + @tu lazy val Function2SpecializedParamTypes: collection.Set[TypeRef] = + Set(IntType, LongType, DoubleType) + @tu lazy val Function0SpecializedReturnTypes: collection.Set[TypeRef] = + ScalaNumericValueTypeList.toSet + UnitType + BooleanType + @tu lazy val Function1SpecializedReturnTypes: collection.Set[TypeRef] = + Set(UnitType, BooleanType, IntType, FloatType, LongType, DoubleType) + @tu lazy val Function2SpecializedReturnTypes: collection.Set[TypeRef] = + Function1SpecializedReturnTypes + + @tu lazy val Function1SpecializedParamClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function1SpecializedParamTypes.map(_.symbol)) + @tu lazy val Function2SpecializedParamClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function2SpecializedParamTypes.map(_.symbol)) + @tu lazy val Function0SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function0SpecializedReturnTypes.map(_.symbol)) + @tu lazy val Function1SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function1SpecializedReturnTypes.map(_.symbol)) + @tu lazy val Function2SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function2SpecializedReturnTypes.map(_.symbol)) + + def isSpecializableTuple(base: Symbol, args: List[Type])(using Context): Boolean = + args.length <= 2 && base.isClass && TupleSpecializedClasses.exists(base.asClass.derivesFrom) && args.match + case List(x) => Tuple1SpecializedParamClasses().contains(x.classSymbol) + case List(x, y) => Tuple2SpecializedParamClasses().contains(x.classSymbol) && Tuple2SpecializedParamClasses().contains(y.classSymbol) + case _ => false + && base.owner.denot.info.member(base.name.specializedName(args)).exists // when dotc compiles the stdlib there are no specialised classes + + def isSpecializableFunction(cls: ClassSymbol, paramTypes: List[Type], retType: Type)(using Context): Boolean = + paramTypes.length <= 2 + && (cls.derivesFrom(FunctionSymbol(paramTypes.length)) || isByNameFunctionClass(cls)) + && isSpecializableFunctionSAM(paramTypes, retType) + + /** If the Single Abstract Method of a Function class has this type, is it specializable? */ + def isSpecializableFunctionSAM(paramTypes: List[Type], retType: Type)(using Context): Boolean = + paramTypes.length <= 2 && (paramTypes match { + case Nil => + Function0SpecializedReturnClasses().contains(retType.typeSymbol) + case List(paramType0) => + Function1SpecializedParamClasses().contains(paramType0.typeSymbol) && + Function1SpecializedReturnClasses().contains(retType.typeSymbol) + case List(paramType0, paramType1) => + Function2SpecializedParamClasses().contains(paramType0.typeSymbol) && + Function2SpecializedParamClasses().contains(paramType1.typeSymbol) && + Function2SpecializedReturnClasses().contains(retType.typeSymbol) + case _ => + false + }) + + @tu lazy val Function0SpecializedApplyNames: collection.Set[TermName] = + for r <- Function0SpecializedReturnTypes + yield nme.apply.specializedFunction(r, Nil).asTermName + + @tu lazy val Function1SpecializedApplyNames: collection.Set[TermName] = + for + r <- Function1SpecializedReturnTypes + t1 <- Function1SpecializedParamTypes + yield + nme.apply.specializedFunction(r, List(t1)).asTermName + + @tu lazy val Function2SpecializedApplyNames: collection.Set[TermName] = + for + r <- Function2SpecializedReturnTypes + t1 <- Function2SpecializedParamTypes + t2 <- Function2SpecializedParamTypes + yield + nme.apply.specializedFunction(r, List(t1, t2)).asTermName + + @tu lazy val FunctionSpecializedApplyNames: collection.Set[Name] = + Function0SpecializedApplyNames ++ Function1SpecializedApplyNames ++ Function2SpecializedApplyNames + + def functionArity(tp: Type)(using Context): Int = tp.dropDependentRefinement.dealias.argInfos.length - 1 + + /** Return underlying context function type (i.e. instance of an ContextFunctionN class) + * or NoType if none exists. The following types are considered as underlying types: + * - the alias of an alias type + * - the instance or origin of a TypeVar (i.e. the result of a stripTypeVar) + * - the upper bound of a TypeParamRef in the current constraint + */ + def asContextFunctionType(tp: Type)(using Context): Type = + tp.stripTypeVar.dealias match + case tp1: TypeParamRef if ctx.typerState.constraint.contains(tp1) => + asContextFunctionType(TypeComparer.bounds(tp1).hiBound) + case tp1 => + if tp1.typeSymbol.name.isContextFunction && isFunctionType(tp1) then tp1 + else NoType + + /** Is `tp` an context function type? */ + def isContextFunctionType(tp: Type)(using Context): Boolean = + asContextFunctionType(tp).exists + + /** An extractor for context function types `As ?=> B`, possibly with + * dependent refinements. Optionally returns a triple consisting of the argument + * types `As`, the result type `B` and a whether the type is an erased context function. + */ + object ContextFunctionType: + def unapply(tp: Type)(using Context): Option[(List[Type], Type, Boolean)] = + if ctx.erasedTypes then + atPhase(erasurePhase)(unapply(tp)) + else + val tp1 = asContextFunctionType(tp) + if tp1.exists then + val args = tp1.dropDependentRefinement.argInfos + Some((args.init, args.last, tp1.typeSymbol.name.isErasedFunction)) + else None + + def isErasedFunctionType(tp: Type)(using Context): Boolean = + tp.dealias.typeSymbol.name.isErasedFunction && isFunctionType(tp) + + /** A whitelist of Scala-2 classes that are known to be pure */ + def isAssuredNoInits(sym: Symbol): Boolean = + (sym `eq` SomeClass) || isTupleClass(sym) + + /** If `cls` is Tuple1..Tuple22, add the corresponding *: type as last parent to `parents` */ + def adjustForTuple(cls: ClassSymbol, tparams: List[TypeSymbol], parents: List[Type]): List[Type] = { + if !isTupleClass(cls) then parents + else if tparams.isEmpty then parents :+ TupleTypeRef + else + assert(parents.head.typeSymbol == ObjectClass) + TypeOps.nestedPairs(tparams.map(_.typeRef)) :: parents.tail + } + + /** If it is BoxedUnit, remove `java.io.Serializable` from `parents`. */ + def adjustForBoxedUnit(cls: ClassSymbol, parents: List[Type]): List[Type] = + if (isBoxedUnitClass(cls)) parents.filter(_.typeSymbol != JavaSerializableClass) + else parents + + private val HasProblematicGetClass: Set[Name] = Set( + tpnme.AnyVal, tpnme.Byte, tpnme.Short, tpnme.Char, tpnme.Int, tpnme.Long, tpnme.Float, tpnme.Double, + tpnme.Unit, tpnme.Boolean) + + /** When typing a primitive value class or AnyVal, we ignore the `getClass` + * member: it's supposed to be an override of the `getClass` defined on `Any`, + * but in dotty `Any#getClass` is polymorphic so it ends up being an overload. + * This is especially problematic because it means that when writing: + * + * 1.asInstanceOf[Int & AnyRef].getClass + * + * the `getClass` that returns `Class[Int]` defined in Int can be selected, + * but this call is specified to return `classOf[Integer]`, see + * tests/run/t5568.scala. + * + * FIXME: remove all the `getClass` methods defined in the standard library + * so we don't have to hot-patch it like this. + */ + def hasProblematicGetClass(className: Name): Boolean = + HasProblematicGetClass.contains(className) + + /** Is synthesized symbol with alphanumeric name allowed to be used as an infix operator? */ + def isInfix(sym: Symbol)(using Context): Boolean = + (sym eq Object_eq) || (sym eq Object_ne) + + @tu lazy val assumedTransparentTraits = + Set[Symbol](ComparableClass, ProductClass, SerializableClass, + // add these for now, until we had a chance to retrofit 2.13 stdlib + // we should do a more through sweep through it then. + requiredClass("scala.collection.SortedOps"), + requiredClass("scala.collection.StrictOptimizedSortedSetOps"), + requiredClass("scala.collection.generic.DefaultSerializable"), + requiredClass("scala.collection.generic.IsIterable"), + requiredClass("scala.collection.generic.IsIterableOnce"), + requiredClass("scala.collection.generic.IsMap"), + requiredClass("scala.collection.generic.IsSeq"), + requiredClass("scala.collection.generic.Subtractable"), + requiredClass("scala.collection.immutable.StrictOptimizedSeqOps") + ) + + // ----- primitive value class machinery ------------------------------------------ + + class PerRun[T](generate: Context ?=> T) { + private var current: RunId = NoRunId + private var cached: T = _ + def apply()(using Context): T = { + if (current != ctx.runId) { + cached = generate + current = ctx.runId + } + cached + } + } + + @tu lazy val ScalaNumericValueTypeList: List[TypeRef] = List( + ByteType, ShortType, CharType, IntType, LongType, FloatType, DoubleType) + + @tu private lazy val ScalaNumericValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypeList.toSet + @tu private lazy val ScalaValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypes `union` Set(UnitType, BooleanType) + + val ScalaNumericValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(ScalaNumericValueTypes.map(_.symbol)) + val ScalaValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(ScalaValueTypes.map(_.symbol)) + + val ScalaBoxedClasses: PerRun[collection.Set[Symbol]] = new PerRun( + Set(BoxedByteClass, BoxedShortClass, BoxedCharClass, BoxedIntClass, BoxedLongClass, BoxedFloatClass, BoxedDoubleClass, BoxedUnitClass, BoxedBooleanClass) + ) + + private val valueTypeEnc = mutable.Map[TypeName, PrimitiveClassEnc]() + private val typeTags = mutable.Map[TypeName, Name]().withDefaultValue(nme.specializedTypeNames.Object) + +// private val unboxedTypeRef = mutable.Map[TypeName, TypeRef]() +// private val javaTypeToValueTypeRef = mutable.Map[Class[?], TypeRef]() +// private val valueTypeNamesToJavaType = mutable.Map[TypeName, Class[?]]() + + private def valueTypeRef(name: String, jtype: Class[?], enc: Int, tag: Name): TypeRef = { + val vcls = requiredClassRef(name) + valueTypeEnc(vcls.name) = enc + typeTags(vcls.name) = tag +// unboxedTypeRef(boxed.name) = vcls +// javaTypeToValueTypeRef(jtype) = vcls +// valueTypeNamesToJavaType(vcls.name) = jtype + vcls + } + + /** The type of the boxed class corresponding to primitive value type `tp`. */ + def boxedType(tp: Type)(using Context): TypeRef = { + val cls = tp.classSymbol + if (cls eq ByteClass) BoxedByteClass + else if (cls eq ShortClass) BoxedShortClass + else if (cls eq CharClass) BoxedCharClass + else if (cls eq IntClass) BoxedIntClass + else if (cls eq LongClass) BoxedLongClass + else if (cls eq FloatClass) BoxedFloatClass + else if (cls eq DoubleClass) BoxedDoubleClass + else if (cls eq UnitClass) BoxedUnitClass + else if (cls eq BooleanClass) BoxedBooleanClass + else sys.error(s"Not a primitive value type: $tp") + }.typeRef + + def unboxedType(tp: Type)(using Context): TypeRef = { + val cls = tp.classSymbol + if (cls eq BoxedByteClass) ByteType + else if (cls eq BoxedShortClass) ShortType + else if (cls eq BoxedCharClass) CharType + else if (cls eq BoxedIntClass) IntType + else if (cls eq BoxedLongClass) LongType + else if (cls eq BoxedFloatClass) FloatType + else if (cls eq BoxedDoubleClass) DoubleType + else if (cls eq BoxedUnitClass) UnitType + else if (cls eq BoxedBooleanClass) BooleanType + else sys.error(s"Not a boxed primitive value type: $tp") + } + + /** The JVM tag for `tp` if it's a primitive, `java.lang.Object` otherwise. */ + def typeTag(tp: Type)(using Context): Name = typeTags(scalaClassName(tp)) + +// /** The `Class[?]` of a primitive value type name */ +// def valueTypeNameToJavaType(name: TypeName)(using Context): Option[Class[?]] = +// valueTypeNamesToJavaType.get(if (name.firstPart eq nme.scala) name.lastPart.toTypeName else name) + + type PrimitiveClassEnc = Int + + val ByteEnc: Int = 2 + val ShortEnc: Int = ByteEnc * 3 + val CharEnc: Int = 5 + val IntEnc: Int = ShortEnc * CharEnc + val LongEnc: Int = IntEnc * 7 + val FloatEnc: Int = LongEnc * 11 + val DoubleEnc: Int = FloatEnc * 13 + val BooleanEnc: Int = 17 + val UnitEnc: Int = 19 + + def isValueSubType(tref1: TypeRef, tref2: TypeRef)(using Context): Boolean = + valueTypeEnc(tref2.name) % valueTypeEnc(tref1.name) == 0 + def isValueSubClass(sym1: Symbol, sym2: Symbol): Boolean = + valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0 + + @tu lazy val specialErasure: SimpleIdentityMap[Symbol, ClassSymbol] = + SimpleIdentityMap.empty[Symbol] + .updated(AnyClass, ObjectClass) + .updated(MatchableClass, ObjectClass) + .updated(AnyValClass, ObjectClass) + .updated(SingletonClass, ObjectClass) + .updated(TupleClass, ProductClass) + .updated(NonEmptyTupleClass, ProductClass) + .updated(PairClass, ObjectClass) + + // ----- Initialization --------------------------------------------------- + + /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ + @tu lazy val syntheticScalaClasses: List[TypeSymbol] = + List( + AnyClass, + MatchableClass, + AnyRefAlias, + AnyKindClass, + andType, + orType, + RepeatedParamClass, + ByNameParamClass2x, + AnyValClass, + NullClass, + NothingClass, + SingletonClass) + + @tu lazy val syntheticCoreClasses: List[Symbol] = syntheticScalaClasses ++ List( + EmptyPackageVal, + OpsPackageClass) + + /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ + @tu lazy val syntheticCoreMethods: List[TermSymbol] = + AnyMethods ++ ObjectMethods ++ List(String_+, throwMethod) + + @tu lazy val reservedScalaClassNames: Set[Name] = syntheticScalaClasses.map(_.name).toSet + + private var isInitialized = false + + def init()(using Context): Unit = { + this.initCtx = ctx + if (!isInitialized) { + // force initialization of every symbol that is synthesized or hijacked by the compiler + val forced = + syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() :+ JavaEnumClass + isInitialized = true + } + addSyntheticSymbolsComments + } + + def addSyntheticSymbolsComments(using Context): Unit = + def add(sym: Symbol, doc: String) = ctx.docCtx.foreach(_.addDocstring(sym, Some(Comment(NoSpan, doc)))) + + add(AnyClass, + """/** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala + | * execution environment inherits directly or indirectly from this class. + | * + | * Starting with Scala 2.10 it is possible to directly extend `Any` using ''universal traits''. + | * A ''universal trait'' is a trait that extends `Any`, only has `def`s as members, and does no initialization. + | * + | * The main use case for universal traits is to allow basic inheritance of methods for [[scala.AnyVal value classes]]. + | * For example, + | * + | * {{{ + | * trait Printable extends Any { + | * def print(): Unit = println(this) + | * } + | * class Wrapper(val underlying: Int) extends AnyVal with Printable + | * + | * val w = new Wrapper(3) + | * w.print() + | * }}} + | * + | * See the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]] for more + | * details on the interplay of universal traits and value classes. + | */ + """.stripMargin) + + add(Any_==, + """/** Test two objects for equality. + | * The expression `x == that` is equivalent to `if (x eq null) that eq null else x.equals(that)`. + | * + | * @param that the object to compare against this object for equality. + | * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. + | */ + """.stripMargin) + + add(Any_!=, + """/** Test two objects for inequality. + | * + | * @param that the object to compare against this object for equality. + | * @return `true` if !(this == that), `false` otherwise. + | */ + """.stripMargin) + + add(Any_equals, + """/** Compares the receiver object (`this`) with the argument object (`that`) for equivalence. + | * + | * Any implementation of this method should be an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]: + | * + | * - It is reflexive: for any instance `x` of type `Any`, `x.equals(x)` should return `true`. + | * - It is symmetric: for any instances `x` and `y` of type `Any`, `x.equals(y)` should return `true` if and + | * only if `y.equals(x)` returns `true`. + | * - It is transitive: for any instances `x`, `y`, and `z` of type `Any` if `x.equals(y)` returns `true` and + | * `y.equals(z)` returns `true`, then `x.equals(z)` should return `true`. + | * + | * If you override this method, you should verify that your implementation remains an equivalence relation. + | * Additionally, when overriding this method it is usually necessary to override `hashCode` to ensure that + | * objects which are "equal" (`o1.equals(o2)` returns `true`) hash to the same [[scala.Int]]. + | * (`o1.hashCode.equals(o2.hashCode)`). + | * + | * @param that the object to compare against this object for equality. + | * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. + | */ + """.stripMargin) + + add(Any_hashCode, + """/** Calculate a hash code value for the object. + | * + | * The default hashing algorithm is platform dependent. + | * + | * Note that it is allowed for two objects to have identical hash codes (`o1.hashCode.equals(o2.hashCode)`) yet + | * not be equal (`o1.equals(o2)` returns `false`). A degenerate implementation could always return `0`. + | * However, it is required that if two objects are equal (`o1.equals(o2)` returns `true`) that they have + | * identical hash codes (`o1.hashCode.equals(o2.hashCode)`). Therefore, when overriding this method, be sure + | * to verify that the behavior is consistent with the `equals` method. + | * + | * @return the hash code value for this object. + | */ + """.stripMargin) + + add(Any_toString, + """/** Returns a string representation of the object. + | * + | * The default representation is platform dependent. + | * + | * @return a string representation of the object. + | */ + """.stripMargin) + + add(Any_##, + """/** Equivalent to `x.hashCode` except for boxed numeric types and `null`. + | * For numerics, it returns a hash value which is consistent + | * with value equality: if two value type instances compare + | * as true, then ## will produce the same hash value for each + | * of them. + | * For `null` returns a hashcode where `null.hashCode` throws a + | * `NullPointerException`. + | * + | * @return a hash value consistent with == + | */ + """.stripMargin) + + add(Any_isInstanceOf, + """/** Test whether the dynamic type of the receiver object is `T0`. + | * + | * Note that the result of the test is modulo Scala's erasure semantics. + | * Therefore the expression `1.isInstanceOf[String]` will return `false`, while the + | * expression `List(1).isInstanceOf[List[String]]` will return `true`. + | * In the latter example, because the type argument is erased as part of compilation it is + | * not possible to check whether the contents of the list are of the specified type. + | * + | * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise. + | */ + """.stripMargin) + + add(Any_asInstanceOf, + """/** Cast the receiver object to be of type `T0`. + | * + | * Note that the success of a cast at runtime is modulo Scala's erasure semantics. + | * Therefore the expression `1.asInstanceOf[String]` will throw a `ClassCastException` at + | * runtime, while the expression `List(1).asInstanceOf[List[String]]` will not. + | * In the latter example, because the type argument is erased as part of compilation it is + | * not possible to check whether the contents of the list are of the requested type. + | * + | * @throws ClassCastException if the receiver object is not an instance of the erasure of type `T0`. + | * @return the receiver object. + | */ + """.stripMargin) + + add(Any_getClass, + """/** Returns the runtime class representation of the object. + | * + | * @return a class object corresponding to the runtime type of the receiver. + | */ + """.stripMargin) + + add(MatchableClass, + """/** The base trait of types that can be safely pattern matched against. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html]]. + | */ + """.stripMargin) + + add(AnyRefAlias, + """/** Class `AnyRef` is the root class of all ''reference types''. + | * All types except the value types descend from this class. + | */ + """.stripMargin) + + add(Object_eq, + """/** Tests whether the argument (`that`) is a reference to the receiver object (`this`). + | * + | * The `eq` method implements an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] on + | * non-null instances of `AnyRef`, and has three additional properties: + | * + | * - It is consistent: for any non-null instances `x` and `y` of type `AnyRef`, multiple invocations of + | * `x.eq(y)` consistently returns `true` or consistently returns `false`. + | * - For any non-null instance `x` of type `AnyRef`, `x.eq(null)` and `null.eq(x)` returns `false`. + | * - `null.eq(null)` returns `true`. + | * + | * When overriding the `equals` or `hashCode` methods, it is important to ensure that their behavior is + | * consistent with reference equality. Therefore, if two objects are references to each other (`o1 eq o2`), they + | * should be equal to each other (`o1 == o2`) and they should hash to the same value (`o1.hashCode == o2.hashCode`). + | * + | * @param that the object to compare against this object for reference equality. + | * @return `true` if the argument is a reference to the receiver object; `false` otherwise. + | */ + """.stripMargin) + + add(Object_ne, + """/** Equivalent to `!(this eq that)`. + | * + | * @param that the object to compare against this object for reference equality. + | * @return `true` if the argument is not a reference to the receiver object; `false` otherwise. + | */ + """.stripMargin) + + add(Object_synchronized, + """/** Executes the code in `body` with an exclusive lock on `this`. + | * + | * @param body the code to execute + | * @return the result of `body` + | */ + """.stripMargin) + + add(Object_clone, + """/** Create a copy of the receiver object. + | * + | * The default implementation of the `clone` method is platform dependent. + | * + | * @note not specified by SLS as a member of AnyRef + | * @return a copy of the receiver object. + | */ + """.stripMargin) + + add(Object_finalize, + """/** Called by the garbage collector on the receiver object when there + | * are no more references to the object. + | * + | * The details of when and if the `finalize` method is invoked, as + | * well as the interaction between `finalize` and non-local returns + | * and exceptions, are all platform dependent. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_notify, + """/** Wakes up a single thread that is waiting on the receiver object's monitor. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_notifyAll, + """/** Wakes up all threads that are waiting on the receiver object's monitor. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_wait, + """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait--]]. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_waitL, + """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-]]. + | * + | * @param timeout the maximum time to wait in milliseconds. + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_waitLI, + """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-int-]] + | * + | * @param timeout the maximum time to wait in milliseconds. + | * @param nanos additional time, in nanoseconds range 0-999999. + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(AnyKindClass, + """/** The super-type of all types. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html]]. + | */ + """.stripMargin) + + add(andType, + """/** The intersection of two types. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html]]. + | */ + """.stripMargin) + + add(orType, + """/** The union of two types. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/new-types/union-types.html]]. + | */ + """.stripMargin) + + add(AnyValClass, + """/** `AnyVal` is the root class of all ''value types'', which describe values + | * not implemented as objects in the underlying host system. Value classes + | * are specified in Scala Language Specification, section 12.2. + | * + | * The standard implementation includes nine `AnyVal` subtypes: + | * + | * [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]], + | * [[scala.Short]], and [[scala.Byte]] are the ''numeric value types''. + | * + | * [[scala.Unit]] and [[scala.Boolean]] are the ''non-numeric value types''. + | * + | * Other groupings: + | * + | * - The ''subrange types'' are [[scala.Byte]], [[scala.Short]], and [[scala.Char]]. + | * - The ''integer types'' include the subrange types as well as [[scala.Int]] and [[scala.Long]]. + | * - The ''floating point types'' are [[scala.Float]] and [[scala.Double]]. + | * + | * Prior to Scala 2.10, `AnyVal` was a sealed trait. Beginning with Scala 2.10, + | * however, it is possible to define a subclass of `AnyVal` called a ''user-defined value class'' + | * which is treated specially by the compiler. Properly-defined user value classes provide a way + | * to improve performance on user-defined types by avoiding object allocation at runtime, and by + | * replacing virtual method invocations with static method invocations. + | * + | * User-defined value classes which avoid object allocation... + | * + | * - must have a single `val` parameter that is the underlying runtime representation. + | * - can define `def`s, but no `val`s, `var`s, or nested `traits`s, `class`es or `object`s. + | * - typically extend no other trait apart from `AnyVal`. + | * - cannot be used in type tests or pattern matching. + | * - may not override `equals` or `hashCode` methods. + | * + | * A minimal example: + | * {{{ + | * class Wrapper(val underlying: Int) extends AnyVal { + | * def foo: Wrapper = new Wrapper(underlying * 19) + | * } + | * }}} + | * + | * It's important to note that user-defined value classes are limited, and in some circumstances, + | * still must allocate a value class instance at runtime. These limitations and circumstances are + | * explained in greater detail in the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]]. + | */ + """.stripMargin) + + add(NullClass, + """/** `Null` is - together with [[scala.Nothing]] - at the bottom of the Scala type hierarchy. + | * + | * `Null` is the type of the `null` literal. It is a subtype of every type + | * except those of value classes. Value classes are subclasses of [[AnyVal]], which includes + | * primitive types such as [[Int]], [[Boolean]], and user-defined value classes. + | * + | * Since `Null` is not a subtype of value types, `null` is not a member of any such type. + | * For instance, it is not possible to assign `null` to a variable of type [[scala.Int]]. + | */ + """.stripMargin) + + add(NothingClass, + """/** `Nothing` is - together with [[scala.Null]] - at the bottom of Scala's type hierarchy. + | * + | * `Nothing` is a subtype of every other type (including [[scala.Null]]); there exist + | * ''no instances'' of this type. Although type `Nothing` is uninhabited, it is + | * nevertheless useful in several ways. For instance, the Scala library defines a value + | * [[scala.collection.immutable.Nil]] of type `List[Nothing]`. Because lists are covariant in Scala, + | * this makes [[scala.collection.immutable.Nil]] an instance of `List[T]`, for any element of type `T`. + | * + | * Another usage for Nothing is the return type for methods which never return normally. + | * One example is method error in [[scala.sys]], which always throws an exception. + | */ + """.stripMargin) + + add(SingletonClass, + """/** `Singleton` is used by the compiler as a supertype for singleton types. This includes literal types, + | * as they are also singleton types. + | * + | * {{{ + | * scala> object A { val x = 42 } + | * defined object A + | * + | * scala> implicitly[A.type <:< Singleton] + | * res12: A.type <:< Singleton = generalized constraint + | * + | * scala> implicitly[A.x.type <:< Singleton] + | * res13: A.x.type <:< Singleton = generalized constraint + | * + | * scala> implicitly[42 <:< Singleton] + | * res14: 42 <:< Singleton = generalized constraint + | * + | * scala> implicitly[Int <:< Singleton] + | * ^ + | * error: Cannot prove that Int <:< Singleton. + | * }}} + | * + | * `Singleton` has a special meaning when it appears as an upper bound on a formal type + | * parameter. Normally, type inference in Scala widens singleton types to the underlying + | * non-singleton type. When a type parameter has an explicit upper bound of `Singleton`, + | * the compiler infers a singleton type. + | * + | * {{{ + | * scala> def check42[T](x: T)(implicit ev: T =:= 42): T = x + | * check42: [T](x: T)(implicit ev: T =:= 42)T + | * + | * scala> val x1 = check42(42) + | * ^ + | * error: Cannot prove that Int =:= 42. + | * + | * scala> def singleCheck42[T <: Singleton](x: T)(implicit ev: T =:= 42): T = x + | * singleCheck42: [T <: Singleton](x: T)(implicit ev: T =:= 42)T + | * + | * scala> val x2 = singleCheck42(42) + | * x2: Int = 42 + | * }}} + | * + | * See also [[https://docs.scala-lang.org/sips/42.type.html SIP-23 about Literal-based Singleton Types]]. + | */ + """.stripMargin) +} diff --git a/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala b/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala new file mode 100644 index 000000000000..6690cae3a142 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala @@ -0,0 +1,82 @@ +package dotty.tools.dotc +package core + +import Periods._ +import SymDenotations._ +import Contexts._ +import Types._ +import Symbols._ +import Denotations._ +import Phases._ + +object DenotTransformers { + + /** A transformer group contains a sequence of transformers, + * ordered by the phase where they apply. Transformers are added + * to a group via `install`. + */ + + /** A transformer transforms denotations at a given phase */ + trait DenotTransformer extends Phase { + + /** The last phase during which the transformed denotations are valid */ + def lastPhaseId(using Context): Int = ctx.base.nextDenotTransformerId(id + 1) + + /** The validity period of the transformed denotations in the given context */ + def validFor(using Context): Period = + Period(ctx.runId, id + 1, lastPhaseId) + + /** The transformation method */ + def transform(ref: SingleDenotation)(using Context): SingleDenotation + } + + /** A transformer that only transforms the info field of denotations */ + trait InfoTransformer extends DenotTransformer { + + def transformInfo(tp: Type, sym: Symbol)(using Context): Type + + def transform(ref: SingleDenotation)(using Context): SingleDenotation = { + val sym = ref.symbol + if (sym.exists && !infoMayChange(sym)) ref + else { + val info1 = transformInfo(ref.info, ref.symbol) + if (info1 eq ref.info) ref + else ref match { + case ref: SymDenotation => + ref.copySymDenotation(info = info1).copyCaches(ref, ctx.phase.next) + case _ => + ref.derivedSingleDenotation(ref.symbol, info1) + } + } + } + + /** Denotations with a symbol where `infoMayChange` is false are guaranteed to be + * unaffected by this transform, so `transformInfo` need not be run. This + * can save time, and more importantly, can help avoid forcing symbol completers. + */ + protected def infoMayChange(sym: Symbol)(using Context): Boolean = true + } + + /** A transformer that only transforms SymDenotations. + * Note: Infos of non-sym denotations are left as is. So the transformer should + * be used before erasure only if this is not a problem. After erasure, all + * denotations are SymDenotations, so SymTransformers can be used freely. + */ + trait SymTransformer extends DenotTransformer { + + def transformSym(sym: SymDenotation)(using Context): SymDenotation + + def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref match { + case ref: SymDenotation => transformSym(ref) + case _ => ref + } + } + + /** A `DenotTransformer` trait that has the identity as its `transform` method. + * You might want to inherit from this trait so that new denotations can be + * installed using `installAfter` and `enteredAfter` at the end of the phase. + */ + trait IdentityDenotTransformer extends DenotTransformer { + def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala new file mode 100644 index 000000000000..5d99118e56af --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala @@ -0,0 +1,1375 @@ +package dotty.tools +package dotc +package core + +import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, acceptStale, traceInvalid } +import Contexts._ +import Names._ +import NameKinds._ +import StdNames._ +import Symbols.NoSymbol +import Symbols._ +import Types._ +import Periods._ +import Flags._ +import DenotTransformers._ +import Decorators._ +import Signature.MatchDegree._ +import printing.Texts._ +import printing.Printer +import io.AbstractFile +import config.Config +import config.Printers.overload +import util.common._ +import typer.ProtoTypes.NoViewsAllowed +import collection.mutable.ListBuffer +import language.experimental.pureFunctions + +/** Denotations represent the meaning of symbols and named types. + * The following diagram shows how the principal types of denotations + * and their denoting entities relate to each other. Lines ending in + * a down-arrow `v` are member methods. The two methods shown in the diagram are + * "symbol" and "deref". Both methods are parameterized by the current context, + * and are effectively indexed by current period. + * + * Lines ending in a horizontal line mean subtyping (right is a subtype of left). + * + * NamedType + * | Symbol---------ClassSymbol + * | | | + * | denot | denot | denot + * v v v + * Denotation-+-----SingleDenotation-+------SymDenotation-+----ClassDenotation + * | | + * +-----MultiDenotation | + * | + * +--UniqueRefDenotation + * +--JointRefDenotation + * + * Here's a short summary of the classes in this diagram. + * + * NamedType A type consisting of a prefix type and a name, with fields + * prefix: Type + * name: Name + * It has two subtypes: TermRef and TypeRef + * Symbol A label for a definition or declaration in one compiler run + * ClassSymbol A symbol representing a class + * Denotation The meaning of a named type or symbol during a period + * MultiDenotation A denotation representing several overloaded members + * SingleDenotation A denotation representing a non-overloaded member or definition, with main fields + * symbol: Symbol + * info: Type + * UniqueRefDenotation A denotation referring to a single definition with some member type + * JointRefDenotation A denotation referring to a member that could resolve to several definitions + * SymDenotation A denotation representing a single definition with its original type, with main fields + * name: Name + * owner: Symbol + * flags: Flags + * privateWithin: Symbol + * annotations: List[Annotation] + * ClassDenotation A denotation representing a single class definition. + */ +object Denotations { + + implicit def eqDenotation: CanEqual[Denotation, Denotation] = CanEqual.derived + + /** A PreDenotation represents a group of single denotations or a single multi-denotation + * It is used as an optimization to avoid forming MultiDenotations too eagerly. + */ + abstract class PreDenotation extends caps.Pure { + + /** A denotation in the group exists */ + def exists: Boolean + + /** First/last denotation in the group */ + def first: Denotation + def last: Denotation + + /** Convert to full denotation by &-ing all elements */ + def toDenot(pre: Type)(using Context): Denotation + + /** Group contains a denotation that refers to given symbol */ + def containsSym(sym: Symbol): Boolean + + /** Group contains a denotation with the same signature as `other` */ + def matches(other: SingleDenotation)(using Context): Boolean + + /** Keep only those denotations in this group which satisfy predicate `p`. */ + def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation + + /** Keep only those denotations in this group which have a signature + * that's not already defined by `denots`. + */ + def filterDisjoint(denots: PreDenotation)(using Context): PreDenotation + + /** Keep only those inherited members M of this predenotation for which the following is true + * - M is not marked Private + * - If M has a unique symbol, it does not appear in `prevDenots`. + * - M's signature as seen from prefix `pre` does not appear in `ownDenots` + * Return the denotation as seen from `pre`. + * Called from SymDenotations.computeMember. There, `ownDenots` are the denotations found in + * the base class, which shadow any inherited denotations with the same signature. + * `prevDenots` are the denotations that are defined in the class or inherited from + * a base type which comes earlier in the linearization. + */ + def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(using Context): PreDenotation + + /** Keep only those denotations in this group that have all of the flags in `required`, + * but none of the flags in `excluded`. + */ + def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): PreDenotation + + /** Map `f` over all single denotations and aggregate the results with `g`. */ + def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T + + private var cachedPrefix: Type = _ + private var cachedAsSeenFrom: AsSeenFromResult = _ + private var validAsSeenFrom: Period = Nowhere + + type AsSeenFromResult <: PreDenotation + + /** The denotation with info(s) as seen from prefix type */ + def asSeenFrom(pre: Type)(using Context): AsSeenFromResult = + if (Config.cacheAsSeenFrom) { + if ((cachedPrefix ne pre) || ctx.period != validAsSeenFrom) { + cachedAsSeenFrom = computeAsSeenFrom(pre) + cachedPrefix = pre + validAsSeenFrom = if (pre.isProvisional) Nowhere else ctx.period + } + cachedAsSeenFrom + } + else computeAsSeenFrom(pre) + + protected def computeAsSeenFrom(pre: Type)(using Context): AsSeenFromResult + + /** The union of two groups. */ + def union(that: PreDenotation): PreDenotation = + if (!this.exists) that + else if (!that.exists) this + else DenotUnion(this, that) + } + + /** A denotation is the result of resolving + * a name (either simple identifier or select) during a given period. + * + * Denotations can be combined with `&` and `|`. + * & is conjunction, | is disjunction. + * + * `&` will create an overloaded denotation from two + * non-overloaded denotations if their signatures differ. + * Analogously `|` of two denotations with different signatures will give + * an empty denotation `NoDenotation`. + * + * A denotation might refer to `NoSymbol`. This is the case if the denotation + * was produced from a disjunction of two denotations with different symbols + * and there was no common symbol in a superclass that could substitute for + * both symbols. Here is an example: + * + * Say, we have: + * + * class A { def f: A } + * class B { def f: B } + * val x: A | B = if (test) new A else new B + * val y = x.f + * + * Then the denotation of `y` is `SingleDenotation(NoSymbol, A | B)`. + * + * @param symbol The referencing symbol, or NoSymbol is none exists + */ + abstract class Denotation(val symbol: Symbol, protected var myInfo: Type) extends PreDenotation with printing.Showable { + type AsSeenFromResult <: Denotation + + /** The type info. + * The info is an instance of TypeType iff this is a type denotation + * Uncompleted denotations set myInfo to a LazyType. + */ + final def info(using Context): Type = { + def completeInfo = { // Written this way so that `info` is small enough to be inlined + this.asInstanceOf[SymDenotation].completeFrom(myInfo.asInstanceOf[LazyType]); info + } + if (myInfo.isInstanceOf[LazyType]) completeInfo else myInfo + } + + /** The type info, or, if this is a SymDenotation where the symbol + * is not yet completed, the completer + */ + def infoOrCompleter: Type + + /** The period during which this denotation is valid. */ + def validFor: Period + + /** Is this a reference to a type symbol? */ + def isType: Boolean + + /** Is this a reference to a term symbol? */ + def isTerm: Boolean = !isType + + /** Is this denotation overloaded? */ + final def isOverloaded: Boolean = isInstanceOf[MultiDenotation] + + /** Denotation points to unique symbol; false for overloaded denotations + * and JointRef denotations. + */ + def hasUniqueSym: Boolean + + /** The name of the denotation */ + def name(using Context): Name + + /** The signature of the denotation. */ + def signature(using Context): Signature + + /** Resolve overloaded denotation to pick the ones with the given signature + * when seen from prefix `site`. + * @param relaxed When true, consider only parameter signatures for a match. + */ + def atSignature(sig: Signature, targetName: Name, site: Type = NoPrefix, relaxed: Boolean = false)(using Context): Denotation + + /** The variant of this denotation that's current in the given context. + * If no such denotation exists, returns the denotation with each alternative + * at its first point of definition. + */ + def current(using Context): Denotation + + /** Is this denotation different from NoDenotation or an ErrorDenotation? */ + def exists: Boolean = true + + /** A denotation with the info of this denotation transformed using `f` */ + def mapInfo(f: Type => Type)(using Context): Denotation + + /** If this denotation does not exist, fallback to alternative */ + inline def orElse(inline that: Denotation): Denotation = if (this.exists) this else that + + /** The set of alternative single-denotations making up this denotation */ + final def alternatives: List[SingleDenotation] = altsWith(alwaysTrue) + + /** The alternatives of this denotation that satisfy the predicate `p`. */ + def altsWith(p: Symbol => Boolean): List[SingleDenotation] + + /** The unique alternative of this denotation that satisfies the predicate `p`, + * or NoDenotation if no satisfying alternative exists. + * @throws TypeError if there is at more than one alternative that satisfies `p`. + */ + def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation + + override def filterWithPredicate(p: SingleDenotation => Boolean): Denotation + + /** If this is a SingleDenotation, return it, otherwise throw a TypeError */ + def checkUnique(using Context): SingleDenotation = suchThat(alwaysTrue) + + /** Does this denotation have an alternative that satisfies the predicate `p`? */ + def hasAltWith(p: SingleDenotation => Boolean): Boolean + + /** The denotation made up from the alternatives of this denotation that + * are accessible from prefix `pre`, or NoDenotation if no accessible alternative exists. + */ + def accessibleFrom(pre: Type, superAccess: Boolean = false)(using Context): Denotation + + /** Find member of this denotation with given `name`, all `required` + * flags and no `excluded` flag, and produce a denotation that contains the type of the member + * as seen from given prefix `pre`. + */ + def findMember(name: Name, pre: Type, required: FlagSet, excluded: FlagSet)(using Context): Denotation = + info.findMember(name, pre, required, excluded) + + /** If this denotation is overloaded, filter with given predicate. + * If result is still overloaded throw a TypeError. + * Note: disambiguate is slightly different from suchThat in that + * single-denotations that do not satisfy the predicate are left alone + * (whereas suchThat would map them to NoDenotation). + */ + inline def disambiguate(inline p: Symbol => Boolean)(using Context): SingleDenotation = this match { + case sdenot: SingleDenotation => sdenot + case mdenot => suchThat(p) orElse NoQualifyingRef(alternatives) + } + + /** Return symbol in this denotation that satisfies the given predicate. + * if generateStubs is specified, return a stubsymbol if denotation is a missing ref. + * Throw a `TypeError` if predicate fails to disambiguate symbol or no alternative matches. + */ + def requiredSymbol(kind: String, + name: Name, + site: Denotation = NoDenotation, + args: List[Type] = Nil, + source: AbstractFile | Null = null, + generateStubs: Boolean = true) + (p: Symbol => Boolean) + (using Context): Symbol = + disambiguate(p) match { + case m @ MissingRef(ownerd, name) if generateStubs => + if ctx.settings.YdebugMissingRefs.value then m.ex.printStackTrace() + newStubSymbol(ownerd.symbol, name, source) + case NoDenotation | _: NoQualifyingRef | _: MissingRef => + def argStr = if (args.isEmpty) "" else i" matching ($args%, %)" + val msg = + if (site.exists) i"$site does not have a member $kind $name$argStr" + else i"missing: $kind $name$argStr" + throw new TypeError(msg) + case denot => + denot.symbol + } + + def requiredMethod(pname: PreName)(using Context): TermSymbol = { + val name = pname.toTermName + info.member(name).requiredSymbol("method", name, this)(_.is(Method)).asTerm + } + def requiredMethodRef(name: PreName)(using Context): TermRef = + requiredMethod(name).termRef + + def requiredMethod(pname: PreName, argTypes: List[Type])(using Context): TermSymbol = { + val name = pname.toTermName + info.member(name).requiredSymbol("method", name, this, argTypes) { x => + x.is(Method) && { + x.info.paramInfoss match { + case paramInfos :: Nil => paramInfos.corresponds(argTypes)(_ =:= _) + case _ => false + } + } + }.asTerm + } + def requiredMethodRef(name: PreName, argTypes: List[Type])(using Context): TermRef = + requiredMethod(name, argTypes).termRef + + def requiredValue(pname: PreName)(using Context): TermSymbol = { + val name = pname.toTermName + info.member(name).requiredSymbol("field or getter", name, this)(_.info.isParameterless).asTerm + } + def requiredValueRef(name: PreName)(using Context): TermRef = + requiredValue(name).termRef + + def requiredClass(pname: PreName)(using Context): ClassSymbol = { + val name = pname.toTypeName + info.member(name).requiredSymbol("class", name, this)(_.isClass).asClass + } + + def requiredType(pname: PreName)(using Context): TypeSymbol = { + val name = pname.toTypeName + info.member(name).requiredSymbol("type", name, this)(_.isType).asType + } + + /** The alternative of this denotation that has a type matching `targetType` when seen + * as a member of type `site` and that has a target name matching `targetName`, or + * `NoDenotation` if none exists. + */ + def matchingDenotation(site: Type, targetType: Type, targetName: Name)(using Context): SingleDenotation = { + def qualifies(sym: Symbol) = + site.memberInfo(sym).matchesLoosely(targetType) && sym.hasTargetName(targetName) + if (isOverloaded) + atSignature(targetType.signature, targetName, site, relaxed = true) match { + case sd: SingleDenotation => sd.matchingDenotation(site, targetType, targetName) + case md => md.suchThat(qualifies(_)) + } + else if (exists && !qualifies(symbol)) NoDenotation + else asSingleDenotation + } + + /** Form a denotation by conjoining with denotation `that`. + * + * NoDenotations are dropped. MultiDenotations are handled by merging + * parts with same signatures. SingleDenotations with equal signatures + * are joined by following this sequence of steps: + * + * 1. If exactly one the denotations has an inaccessible symbol, pick the other one. + * 2. Otherwise, if one of the infos overrides the other one, and the associated + * symbol does not score strictly lower than the other one, + * pick the associated denotation. + * 3. Otherwise, if the two infos can be combined with `infoMeet`, pick that as + * result info, and pick the symbol that scores higher as result symbol, + * or pick `sym1` as a tie breaker. The picked info and symbol are combined + * in a JointDenotation. + * 4. Otherwise, if one of the two symbols scores strongly higher than the + * other one, pick the associated denotation. + * 5. Otherwise return a multi-denotation consisting of both denotations. + * + * Symbol scoring is determined according to the following ranking + * where earlier criteria trump later ones. Cases marked with (*) + * give a strong score advantage, the others a weak one. + * + * 1. The symbol exists, and the other one does not. (*) + * 2. The symbol is not a bridge, but the other one is. (*) + * 3. The symbol is concrete, and the other one is deferred + * 4. The symbol appears before the other in the linearization of `pre` + * 5. The symbol's visibility is strictly greater than the other one's. + * 6. The symbol is a method, but the other one is not. + */ + def meet(that: Denotation, pre: Type, safeIntersection: Boolean = false)(using Context): Denotation = { + /** Try to merge denot1 and denot2 without adding a new signature. */ + def mergeDenot(denot1: Denotation, denot2: SingleDenotation): Denotation = denot1 match { + case denot1 @ MultiDenotation(denot11, denot12) => + val d1 = mergeDenot(denot11, denot2) + if (d1.exists) denot1.derivedUnionDenotation(d1, denot12) + else { + val d2 = mergeDenot(denot12, denot2) + if (d2.exists) denot1.derivedUnionDenotation(denot11, d2) + else NoDenotation + } + case denot1: SingleDenotation => + if (denot1 eq denot2) denot1 + else if denot1.matches(denot2) then mergeSingleDenot(denot1, denot2) + else NoDenotation + } + + /** Try to merge single-denotations. */ + def mergeSingleDenot(denot1: SingleDenotation, denot2: SingleDenotation): Denotation = + val info1 = denot1.info + val info2 = denot2.info + val sym1 = denot1.symbol + val sym2 = denot2.symbol + + /** Does `owner1` come before `owner2` in the linearization of `pre`? */ + def linearScore(owner1: Symbol, owner2: Symbol): Int = + + def searchBaseClasses(bcs: List[ClassSymbol]): Int = bcs match + case bc :: bcs1 => + if bc eq owner1 then 1 + else if bc eq owner2 then -1 + else searchBaseClasses(bcs1) + case Nil => 0 + + if owner1 eq owner2 then 0 + else if owner1.derivesFrom(owner2) then 1 + else if owner2.derivesFrom(owner1) then -1 + else searchBaseClasses(pre.baseClasses) + end linearScore + + /** Similar to SymDenotation#accessBoundary, but without the special cases. */ + def accessBoundary(sym: Symbol) = + if (sym.is(Private)) sym.owner + else sym.privateWithin.orElse( + if (sym.is(Protected)) sym.owner.enclosingPackageClass + else defn.RootClass) + + def isHidden(sym: Symbol) = sym.exists && !sym.isAccessibleFrom(pre) + // In typer phase filter out denotations with symbols that are not + // accessible. After typer, this is not possible since we cannot guarantee + // that the current owner is set correctly. See pos/14660.scala. + val hidden1 = isHidden(sym1) && ctx.isTyper + val hidden2 = isHidden(sym2) && ctx.isTyper + if hidden1 && !hidden2 then denot2 + else if hidden2 && !hidden1 then denot1 + else + // The score that determines which symbol to pick for the result denotation. + // A value > 0 means pick `sym1`, < 0 means pick `sym2`. + // A value of +/- 2 means pick one of the denotations as a tie-breaker + // if a common info does not exist. + val symScore: Int = + if !sym1.exists then -2 + else if !sym2.exists then 2 + else if sym1.is(Bridge) && !sym2.is(Bridge) then -2 + else if sym2.is(Bridge) && !sym1.is(Bridge) then 2 + else if !sym1.isAsConcrete(sym2) then -1 + else if !sym2.isAsConcrete(sym1) then 1 + else + val linScore = linearScore(sym1.owner, sym2.owner) + if linScore != 0 then linScore + else + val boundary1 = accessBoundary(sym1) + val boundary2 = accessBoundary(sym2) + if boundary1.isProperlyContainedIn(boundary2) then -1 + else if boundary2.isProperlyContainedIn(boundary1) then 1 + else if sym2.is(Method) && !sym1.is(Method) then -1 + else if sym1.is(Method) && !sym2.is(Method) then 1 + else 0 + + val relaxedOverriding = ctx.explicitNulls && (sym1.is(JavaDefined) || sym2.is(JavaDefined)) + val matchLoosely = sym1.matchNullaryLoosely || sym2.matchNullaryLoosely + + if symScore <= 0 && info2.overrides(info1, relaxedOverriding, matchLoosely, checkClassInfo = false) then + denot2 + else if symScore >= 0 && info1.overrides(info2, relaxedOverriding, matchLoosely, checkClassInfo = false) then + denot1 + else + val jointInfo = infoMeet(info1, info2, safeIntersection) + if jointInfo.exists then + val sym = if symScore >= 0 then sym1 else sym2 + JointRefDenotation(sym, jointInfo, denot1.validFor & denot2.validFor, pre, denot1.isRefinedMethod || denot2.isRefinedMethod) + else if symScore == 2 then denot1 + else if symScore == -2 then denot2 + else + overload.println(i"overloaded with same signature: ${sym1.showLocated}: $info1 / ${sym2.showLocated}: $info2, info = ${info1.getClass}, ${info2.getClass}, $jointInfo") + MultiDenotation(denot1, denot2) + end mergeSingleDenot + + if (this eq that) this + else if (!this.exists) that + else if (!that.exists) this + else that match { + case that: SingleDenotation => + val r = mergeDenot(this, that) + if (r.exists) r else MultiDenotation(this, that) + case that @ MultiDenotation(denot1, denot2) => + this.meet(denot1, pre).meet(denot2, pre) + } + } + + final def asSingleDenotation: SingleDenotation = asInstanceOf[SingleDenotation] + final def asSymDenotation: SymDenotation = asInstanceOf[SymDenotation] + + def toText(printer: Printer): Text = printer.toText(this) + + // ------ PreDenotation ops ---------------------------------------------- + + final def toDenot(pre: Type)(using Context): Denotation = this + final def containsSym(sym: Symbol): Boolean = hasUniqueSym && (symbol eq sym) + } + + // ------ Info meets ---------------------------------------------------- + + /** Merge parameter names of lambda types. If names in corresponding positions match, keep them, + * otherwise generate new synthetic names. + */ + private def mergeParamNames(tp1: LambdaType, tp2: LambdaType): List[tp1.ThisName] = + (for ((name1, name2, idx) <- tp1.paramNames.lazyZip(tp2.paramNames).lazyZip(tp1.paramNames.indices)) + yield if (name1 == name2) name1 else tp1.companion.syntheticParamName(idx)).toList + + /** Normally, `tp1 & tp2`, with extra care taken to return `tp1` or `tp2` directly if that's + * a valid answer. Special cases for matching methods and classes, with + * the possibility of returning NoType. Special handling of ExprTypes, where mixed + * intersections widen the ExprType away. + */ + def infoMeet(tp1: Type, tp2: Type, safeIntersection: Boolean)(using Context): Type = + if tp1 eq tp2 then tp1 + else tp1 match + case tp1: TypeBounds => + tp2 match + case tp2: TypeBounds => if safeIntersection then tp1 safe_& tp2 else tp1 & tp2 + case tp2: ClassInfo => tp2 + case _ => NoType + case tp1: ClassInfo => + tp2 match + case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix & tp2.prefix) + case tp2: TypeBounds => tp1 + case _ => NoType + case tp1: MethodType => + tp2 match + case tp2: MethodType + if TypeComparer.matchingMethodParams(tp1, tp2) + && tp1.isImplicitMethod == tp2.isImplicitMethod + && tp1.isErasedMethod == tp2.isErasedMethod => + val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) + if resType.exists then + tp1.derivedLambdaType(mergeParamNames(tp1, tp2), tp1.paramInfos, resType) + else NoType + case _ => NoType + case tp1: PolyType => + tp2 match + case tp2: PolyType if tp1.paramNames.hasSameLengthAs(tp2.paramNames) => + val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) + if resType.exists then + tp1.derivedLambdaType( + mergeParamNames(tp1, tp2), + tp1.paramInfos.zipWithConserve(tp2.paramInfos)( _ & _ ), + resType) + else NoType + case _ => NoType + case ExprType(rtp1) => + tp2 match + case ExprType(rtp2) => ExprType(rtp1 & rtp2) + case _ => infoMeet(rtp1, tp2, safeIntersection) + case _ => + tp2 match + case _: MethodType | _: PolyType => NoType + case _ => tp1 & tp2.widenExpr + end infoMeet + + /** A non-overloaded denotation */ + abstract class SingleDenotation(symbol: Symbol, initInfo: Type) extends Denotation(symbol, initInfo) { + protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation + + final def name(using Context): Name = symbol.name + + /** For SymDenotation, this is NoPrefix. For other denotations this is the prefix + * under which the denotation was constructed. + * + * Note that `asSeenFrom` might return a `SymDenotation` and therefore in + * general one cannot rely on `prefix` being set, see + * `Config.reuseSymDenotations` for details. + */ + def prefix: Type = NoPrefix + + /** True if the info of this denotation comes from a refinement. */ + def isRefinedMethod: Boolean = false + + /** For SymDenotations, the language-specific signature of the info, depending on + * where the symbol is defined. For non-SymDenotations, the Scala 3 + * signature. + * + * Invariants: + * - Before erasure, the signature of a denotation is always equal to the + * signature of its corresponding initial denotation. + * - Two distinct overloads will have SymDenotations with distinct + * signatures (the SELECTin tag in Tasty relies on this to refer to an + * overload unambiguously). Note that this only applies to + * SymDenotations, in general we cannot assume that distinct + * SingleDenotations will have distinct signatures (cf #9050). + */ + final def signature(using Context): Signature = + signature(sourceLanguage = if isType || !this.isInstanceOf[SymDenotation] then SourceLanguage.Scala3 else SourceLanguage(symbol)) + + /** Overload of `signature` which lets the caller pick the language used + * to compute the signature of the info. Useful to match denotations defined in + * different classes (see `matchesLoosely`). + */ + def signature(sourceLanguage: SourceLanguage)(using Context): Signature = + if (isType) Signature.NotAMethod // don't force info if this is a type denotation + else info match { + case info: MethodOrPoly => + try info.signature(sourceLanguage) + catch { // !!! DEBUG + case scala.util.control.NonFatal(ex) => + report.echo(s"cannot take signature of $info") + throw ex + } + case _ => Signature.NotAMethod + } + + def derivedSingleDenotation(symbol: Symbol, info: Type, pre: Type = this.prefix, isRefinedMethod: Boolean = this.isRefinedMethod)(using Context): SingleDenotation = + if ((symbol eq this.symbol) && (info eq this.info) && (pre eq this.prefix) && (isRefinedMethod == this.isRefinedMethod)) this + else newLikeThis(symbol, info, pre, isRefinedMethod) + + def mapInfo(f: Type => Type)(using Context): SingleDenotation = + derivedSingleDenotation(symbol, f(info)) + + inline def orElse(inline that: SingleDenotation): SingleDenotation = if (this.exists) this else that + + def altsWith(p: Symbol => Boolean): List[SingleDenotation] = + if (exists && p(symbol)) this :: Nil else Nil + + def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = + if (exists && p(symbol)) this else NoDenotation + + def hasAltWith(p: SingleDenotation => Boolean): Boolean = + exists && p(this) + + def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = + if (!symbol.exists || symbol.isAccessibleFrom(pre, superAccess)) this else NoDenotation + + def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): SingleDenotation = + val situated = if site == NoPrefix then this else asSeenFrom(site) + val sigMatches = sig.matchDegree(situated.signature) match + case FullMatch => + true + case MethodNotAMethodMatch => + // See comment in `matches` + relaxed && !symbol.is(JavaDefined) + case ParamMatch => + relaxed + case noMatch => + false + if sigMatches && symbol.hasTargetName(targetName) then this else NoDenotation + + def matchesImportBound(bound: Type)(using Context): Boolean = + if bound.isRef(defn.NothingClass) then false + else if bound.isAny then true + else NoViewsAllowed.normalizedCompatible(info, bound, keepConstraint = false) + + // ------ Transformations ----------------------------------------- + + private var myValidFor: Period = Nowhere + + def validFor: Period = myValidFor + def validFor_=(p: Period): Unit = { + myValidFor = p + symbol.invalidateDenotCache() + } + + /** The next SingleDenotation in this run, with wrap-around from last to first. + * + * There may be several `SingleDenotation`s with different validity + * representing the same underlying definition at different phases. + * These are called a "flock". Flock members are generated by + * @See current. Flock members are connected in a ring + * with their `nextInRun` fields. + * + * There are the following invariants concerning flock members + * + * 1) validity periods are non-overlapping + * 2) the union of all validity periods is a contiguous + * interval. + */ + protected var nextInRun: SingleDenotation = this + + /** The version of this SingleDenotation that was valid in the first phase + * of this run. + */ + def initial: SingleDenotation = + if (validFor.firstPhaseId <= 1) this + else { + var current = nextInRun + while (current.validFor.code > this.myValidFor.code) current = current.nextInRun + current + } + + def history: List[SingleDenotation] = { + val b = new ListBuffer[SingleDenotation] + var current = initial + while ({ + b += (current) + current = current.nextInRun + current ne initial + }) + () + b.toList + } + + /** Invalidate all caches and fields that depend on base classes and their contents */ + def invalidateInheritedInfo(): Unit = () + + private def updateValidity()(using Context): this.type = { + assert( + ctx.runId >= validFor.runId + || ctx.settings.YtestPickler.value // mixing test pickler with debug printing can travel back in time + || ctx.mode.is(Mode.Printing) // no use to be picky when printing error messages + || symbol.isOneOf(ValidForeverFlags), + s"denotation $this invalid in run ${ctx.runId}. ValidFor: $validFor") + var d: SingleDenotation = this + while ({ + d.validFor = Period(ctx.runId, d.validFor.firstPhaseId, d.validFor.lastPhaseId) + d.invalidateInheritedInfo() + d = d.nextInRun + d ne this + }) + () + this + } + + /** Move validity period of this denotation to a new run. Throw a StaleSymbol error + * if denotation is no longer valid. + * However, StaleSymbol error is not thrown in the following situations: + * + * - If acceptStale returns true (e.g. because we are in the IDE), + * update the symbol to the new version if it exists, or return + * the old version otherwise. + * - If the symbol did not have a denotation that was defined at the current phase + * return a NoDenotation instead. + */ + private def bringForward()(using Context): SingleDenotation = { + this match { + case symd: SymDenotation => + if (stillValid(symd)) return updateValidity() + if acceptStale(symd) && symd.initial.validFor.firstPhaseId <= ctx.lastPhaseId then + // New run might have fewer phases than old, so symbol might no longer be + // visible at all. TabCompleteTests have examples where this happens. + return symd.currentSymbol.denot.orElse(symd).updateValidity() + case _ => + } + if (!symbol.exists) return updateValidity() + if (!coveredInterval.containsPhaseId(ctx.phaseId)) return NoDenotation + if (ctx.debug) traceInvalid(this) + staleSymbolError + } + + /** The next defined denotation (following `nextInRun`) or an arbitrary + * undefined denotation, if all denotations in a `nextinRun` cycle are + * undefined. + */ + private def nextDefined: SingleDenotation = { + var p1 = this + var p2 = nextInRun + while (p1.validFor == Nowhere && (p1 ne p2)) { + p1 = p1.nextInRun + p2 = p2.nextInRun.nextInRun + } + p1 + } + + /** Skip any denotations that have been removed by an installAfter or that + * are otherwise undefined. + */ + def skipRemoved(using Context): SingleDenotation = + if (myValidFor.code <= 0) nextDefined else this + + /** Produce a denotation that is valid for the given context. + * Usually called when !(validFor contains ctx.period) + * (even though this is not a precondition). + * If the runId of the context is the same as runId of this denotation, + * the right flock member is located, or, if it does not exist yet, + * created by invoking a transformer (@See Transformers). + * If the runId's differ, but this denotation is a SymDenotation + * and its toplevel owner class or module + * is still a member of its enclosing package, then the whole flock + * is brought forward to be valid in the new runId. Otherwise + * the symbol is stale, which constitutes an internal error. + */ + def current(using Context): SingleDenotation = + util.Stats.record("current") + val currentPeriod = ctx.period + val valid = myValidFor + + def assertNotPackage(d: SingleDenotation, transformer: DenotTransformer) = d match + case d: ClassDenotation => + assert(!d.is(Package), s"illegal transformation of package denotation by transformer $transformer") + case _ => + + def escapeToNext = nextDefined.ensuring(_.validFor != Nowhere) + + def toNewRun = + util.Stats.record("current.bringForward") + if exists then initial.bringForward().current else this + + def goForward = + var cur = this + // search for containing period as long as nextInRun increases. + var next = nextInRun + while next.validFor.code > valid.code && !(next.validFor contains currentPeriod) do + cur = next + next = next.nextInRun + if next.validFor.code > valid.code then + // in this case, next.validFor contains currentPeriod + cur = next + cur + else + //println(s"might need new denot for $cur, valid for ${cur.validFor} at $currentPeriod") + // not found, cur points to highest existing variant + val nextTransformerId = ctx.base.nextDenotTransformerId(cur.validFor.lastPhaseId) + if currentPeriod.lastPhaseId <= nextTransformerId then + cur.validFor = Period(currentPeriod.runId, cur.validFor.firstPhaseId, nextTransformerId) + else + var startPid = nextTransformerId + 1 + val transformer = ctx.base.denotTransformers(nextTransformerId) + //println(s"transforming $this with $transformer") + val savedPeriod = ctx.period + val mutCtx = ctx.asInstanceOf[FreshContext] + try + mutCtx.setPhase(transformer) + next = transformer.transform(cur) + // We temporarily update the context with the new phase instead of creating a + // new one. This is done for performance. We cut down on about 30% of context + // creations that way, and also avoid phase caches in contexts to get large. + // To work correctly, we need to demand that the context with the new phase + // is not retained in the result. + catch case ex: CyclicReference => + // println(s"error while transforming $this") + throw ex + finally + mutCtx.setPeriod(savedPeriod) + if next eq cur then + startPid = cur.validFor.firstPhaseId + else + assertNotPackage(next, transformer) + next.insertAfter(cur) + cur = next + cur.validFor = Period(currentPeriod.runId, startPid, transformer.lastPhaseId) + //printPeriods(cur) + //println(s"new denot: $cur, valid for ${cur.validFor}") + cur.current // multiple transformations could be required + end goForward + + def goBack: SingleDenotation = + // currentPeriod < end of valid; in this case a version must exist + // but to be defensive we check for infinite loop anyway + var cur = this + var cnt = 0 + while !(cur.validFor contains currentPeriod) do + //println(s"searching: $cur at $currentPeriod, valid for ${cur.validFor}") + cur = cur.nextInRun + // Note: One might be tempted to add a `prev` field to get to the new denotation + // more directly here. I tried that, but it degrades rather than improves + // performance: Test setup: Compile everything in dotc and immediate subdirectories + // 10 times. Best out of 10: 18154ms with `prev` field, 17777ms without. + cnt += 1 + if cnt > MaxPossiblePhaseId then + return atPhase(coveredInterval.firstPhaseId)(current) + cur + end goBack + + if valid.code <= 0 then + // can happen if we sit on a stale denotation which has been replaced + // wholesale by an installAfter; in this case, proceed to the next + // denotation and try again. + escapeToNext + else if valid.runId != currentPeriod.runId then + toNewRun + else if currentPeriod.code > valid.code then + goForward + else + goBack + end current + + private def demandOutsideDefinedMsg(using Context): String = + s"demanding denotation of $this at phase ${ctx.phase}(${ctx.phaseId}) outside defined interval: defined periods are${definedPeriodsString}" + + /** Install this denotation to be the result of the given denotation transformer. + * This is the implementation of the same-named method in SymDenotations. + * It's placed here because it needs access to private fields of SingleDenotation. + * @pre Can only be called in `phase.next`. + */ + protected def installAfter(phase: DenotTransformer)(using Context): Unit = { + val targetId = phase.next.id + if (ctx.phaseId != targetId) atPhase(phase.next)(installAfter(phase)) + else { + val current = symbol.current + // println(s"installing $this after $phase/${phase.id}, valid = ${current.validFor}") + // printPeriods(current) + this.validFor = Period(ctx.runId, targetId, current.validFor.lastPhaseId) + if (current.validFor.firstPhaseId >= targetId) + current.replaceWith(this) + else { + current.validFor = Period(ctx.runId, current.validFor.firstPhaseId, targetId - 1) + insertAfter(current) + } + } + // printPeriods(this) + } + + /** Apply a transformation `f` to all denotations in this group that start at or after + * given phase. Denotations are replaced while keeping the same validity periods. + */ + protected def transformAfter(phase: DenotTransformer, f: SymDenotation => SymDenotation)(using Context): Unit = { + var current = symbol.current + while (current.validFor.firstPhaseId < phase.id && (current.nextInRun.validFor.code > current.validFor.code)) + current = current.nextInRun + var hasNext = true + while ((current.validFor.firstPhaseId >= phase.id) && hasNext) { + val current1: SingleDenotation = f(current.asSymDenotation) + if (current1 ne current) { + current1.validFor = current.validFor + current.replaceWith(current1) + } + hasNext = current1.nextInRun.validFor.code > current1.validFor.code + current = current1.nextInRun + } + } + + /** Insert this denotation so that it follows `prev`. */ + private def insertAfter(prev: SingleDenotation) = { + this.nextInRun = prev.nextInRun + prev.nextInRun = this + } + + /** Insert this denotation instead of `old`. + * Also ensure that `old` refers with `nextInRun` to this denotation + * and set its `validFor` field to `Nowhere`. This is necessary so that + * references to the old denotation can be brought forward via `current` + * to a valid denotation. + * + * The code to achieve this is subtle in that it works correctly + * whether the replaced denotation is the only one in its cycle or not. + */ + private[dotc] def replaceWith(newd: SingleDenotation): Unit = { + var prev = this + while (prev.nextInRun ne this) prev = prev.nextInRun + // order of next two assignments is important! + prev.nextInRun = newd + newd.nextInRun = nextInRun + validFor = Nowhere + nextInRun = newd + } + + def staleSymbolError(using Context): Nothing = + throw new StaleSymbol(staleSymbolMsg) + + def staleSymbolMsg(using Context): String = { + def ownerMsg = this match { + case denot: SymDenotation => s"in ${denot.owner}" + case _ => "" + } + s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${myValidFor}, is referred to in run ${ctx.period}" + } + + /** The period (interval of phases) for which there exists + * a valid denotation in this flock. + */ + def coveredInterval(using Context): Period = { + var cur = this + var cnt = 0 + var interval = validFor + while ({ + cur = cur.nextInRun + cnt += 1 + assert(cnt <= MaxPossiblePhaseId, demandOutsideDefinedMsg) + interval |= cur.validFor + cur ne this + }) + () + interval + } + + /** Show declaration string; useful for showing declarations + * as seen from subclasses. + */ + def showDcl(using Context): String = ctx.printer.dclText(this).show + + override def toString: String = + if (symbol == NoSymbol) symbol.toString + else s"" + + def definedPeriodsString: String = { + var sb = new StringBuilder() + var cur = this + var cnt = 0 + while ({ + sb.append(" " + cur.validFor) + cur = cur.nextInRun + cnt += 1 + if (cnt > MaxPossiblePhaseId) { sb.append(" ..."); cur = this } + cur ne this + }) + () + sb.toString + } + + // ------ PreDenotation ops ---------------------------------------------- + + final def first: SingleDenotation = this + final def last: SingleDenotation = this + + def matches(other: SingleDenotation)(using Context): Boolean = + symbol.hasTargetName(other.symbol.targetName) + && matchesLoosely(other) + + /** `matches` without a target name check. + * + * For definitions coming from different languages, we pick a common + * language to compute their signatures. This allows us for example to + * override some Java definitions from Scala even if they have a different + * erasure (see i8615b, i9109b), Erasure takes care of adding any necessary + * bridge to make this work at runtime. + */ + def matchesLoosely(other: SingleDenotation, alwaysCompareTypes: Boolean = false)(using Context): Boolean = + if isType then true + else + val thisLanguage = SourceLanguage(symbol) + val otherLanguage = SourceLanguage(other.symbol) + val commonLanguage = SourceLanguage.commonLanguage(thisLanguage, otherLanguage) + val sig = signature(commonLanguage) + val otherSig = other.signature(commonLanguage) + sig.matchDegree(otherSig) match + case FullMatch => + !alwaysCompareTypes || info.matches(other.info) + case MethodNotAMethodMatch => + !ctx.erasedTypes && { + // A Scala zero-parameter method and a Scala non-method always match. + if !thisLanguage.isJava && !otherLanguage.isJava then + true + // Java allows defining both a field and a zero-parameter method with the same name, + // so they must not match. + else if thisLanguage.isJava && otherLanguage.isJava then + false + // A Java field never matches a Scala method. + else if thisLanguage.isJava then + symbol.is(Method) + else // otherLanguage.isJava + other.symbol.is(Method) + } + case ParamMatch => + // The signatures do not tell us enough to be sure about matching + !ctx.erasedTypes && info.matches(other.info) + case noMatch => + false + + def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(using Context): SingleDenotation = + if hasUniqueSym && prevDenots.containsSym(symbol) then NoDenotation + else if isType then filterDisjoint(ownDenots).asSeenFrom(pre) + else asSeenFrom(pre).filterDisjoint(ownDenots) + + def filterWithPredicate(p: SingleDenotation => Boolean): SingleDenotation = + if (p(this)) this else NoDenotation + def filterDisjoint(denots: PreDenotation)(using Context): SingleDenotation = + if (denots.exists && denots.matches(this)) NoDenotation else this + def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): SingleDenotation = + val realExcluded = if ctx.isAfterTyper then excluded else excluded | Invisible + def symd: SymDenotation = this match + case symd: SymDenotation => symd + case _ => symbol.denot + if !required.isEmpty && !symd.isAllOf(required) + || symd.isOneOf(realExcluded) then NoDenotation + else this + def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T = f(this) + + type AsSeenFromResult = SingleDenotation + + protected def computeAsSeenFrom(pre: Type)(using Context): SingleDenotation = { + val symbol = this.symbol + val owner = this match { + case thisd: SymDenotation => thisd.owner + case _ => if (symbol.exists) symbol.owner else NoSymbol + } + + /** The derived denotation with the given `info` transformed with `asSeenFrom`. + * + * As a performance hack, we might reuse an existing SymDenotation, + * instead of creating a new denotation with a given `prefix`, + * see `Config.reuseSymDenotations`. + */ + def derived(info: Type) = + /** Do we need to return a denotation with a prefix set? */ + def needsPrefix = + // For opaque types, the prefix is used in `ElimOpaques#transform`, + // without this i7159.scala would fail when compiled from tasty. + symbol.is(Opaque) + + val derivedInfo = info.asSeenFrom(pre, owner) + if Config.reuseSymDenotations && this.isInstanceOf[SymDenotation] + && (derivedInfo eq info) && !needsPrefix then + this + else + derivedSingleDenotation(symbol, derivedInfo, pre) + end derived + + // Tt could happen that we see the symbol with prefix `this` as a member a different class + // through a self type and that it then has a different info. In this case we have to go + // through the asSeenFrom to switch the type back. Test case is pos/i9352.scala. + def hasOriginalInfo: Boolean = this match + case sd: SymDenotation => true + case _ => info eq symbol.info + + def ownerIsPrefix = pre match + case pre: ThisType => pre.sameThis(owner.thisType) + case _ => false + + if !owner.membersNeedAsSeenFrom(pre) && (!ownerIsPrefix || hasOriginalInfo) + || symbol.is(NonMember) + then this + else if symbol.isAllOf(ClassTypeParam) then + val arg = symbol.typeRef.argForParam(pre, widenAbstract = true) + if arg.exists + then derivedSingleDenotation(symbol, normalizedArgBounds(arg.bounds), pre) + else derived(symbol.info) + else derived(symbol.info) + } + + /** The argument bounds, possibly intersected with the parameter's info TypeBounds, + * if the latter is not F-bounded and does not refer to other type parameters + * of the same class, and the intersection is provably nonempty. + */ + private def normalizedArgBounds(argBounds: TypeBounds)(using Context): TypeBounds = + if symbol.isCompleted && !hasBoundsDependingOnParamsOf(symbol.owner) then + val combined @ TypeBounds(lo, hi) = symbol.info.bounds & argBounds + if (lo frozen_<:< hi) then combined + else argBounds + else argBounds + + private def hasBoundsDependingOnParamsOf(cls: Symbol)(using Context): Boolean = + val acc = new TypeAccumulator[Boolean]: + def apply(x: Boolean, tp: Type): Boolean = tp match + case _: LazyRef => true + case tp: TypeRef + if tp.symbol.isAllOf(ClassTypeParam) && tp.symbol.owner == cls => true + case _ => foldOver(x, tp) + acc(false, symbol.info) + } + + abstract class NonSymSingleDenotation(symbol: Symbol, initInfo: Type, override val prefix: Type) extends SingleDenotation(symbol, initInfo) { + def infoOrCompleter: Type = initInfo + def isType: Boolean = infoOrCompleter.isInstanceOf[TypeType] + } + + class UniqueRefDenotation( + symbol: Symbol, + initInfo: Type, + initValidFor: Period, + prefix: Type) extends NonSymSingleDenotation(symbol, initInfo, prefix) { + validFor = initValidFor + override def hasUniqueSym: Boolean = true + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + if isRefinedMethod then + new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + else + new UniqueRefDenotation(s, i, validFor, pre) + } + + class JointRefDenotation( + symbol: Symbol, + initInfo: Type, + initValidFor: Period, + prefix: Type, + override val isRefinedMethod: Boolean) extends NonSymSingleDenotation(symbol, initInfo, prefix) { + validFor = initValidFor + override def hasUniqueSym: Boolean = false + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + } + + class ErrorDenotation(using Context) extends NonSymSingleDenotation(NoSymbol, NoType, NoType) { + override def exists: Boolean = false + override def hasUniqueSym: Boolean = false + validFor = Period.allInRun(ctx.runId) + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + this + } + + /** An error denotation that provides more info about the missing reference. + * Produced by staticRef, consumed by requiredSymbol. + */ + case class MissingRef(val owner: SingleDenotation, name: Name)(using Context) extends ErrorDenotation { + val ex: Exception = new Exception // DEBUG + } + + /** An error denotation that provides more info about alternatives + * that were found but that do not qualify. + * Produced by staticRef, consumed by requiredSymbol. + */ + case class NoQualifyingRef(alts: List[SingleDenotation])(using Context) extends ErrorDenotation + + /** A double definition + */ + def isDoubleDef(sym1: Symbol, sym2: Symbol)(using Context): Boolean = + (sym1.exists && sym2.exists && + (sym1 `ne` sym2) && (sym1.effectiveOwner `eq` sym2.effectiveOwner) && + !sym1.is(Bridge) && !sym2.is(Bridge)) + + // --- Overloaded denotations and predenotations ------------------------------------------------- + + trait MultiPreDenotation extends PreDenotation { + def denot1: PreDenotation + def denot2: PreDenotation + + assert(denot1.exists && denot2.exists, s"Union of non-existing denotations ($denot1) and ($denot2)") + def first: Denotation = denot1.first + def last: Denotation = denot2.last + def matches(other: SingleDenotation)(using Context): Boolean = + denot1.matches(other) || denot2.matches(other) + def mapInherited(owndenot: PreDenotation, prevdenot: PreDenotation, pre: Type)(using Context): PreDenotation = + derivedUnion(denot1.mapInherited(owndenot, prevdenot, pre), denot2.mapInherited(owndenot, prevdenot, pre)) + def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation = + derivedUnion(denot1 filterWithPredicate p, denot2 filterWithPredicate p) + def filterDisjoint(denot: PreDenotation)(using Context): PreDenotation = + derivedUnion(denot1 filterDisjoint denot, denot2 filterDisjoint denot) + def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): PreDenotation = + derivedUnion(denot1.filterWithFlags(required, excluded), denot2.filterWithFlags(required, excluded)) + def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T = + g(denot1.aggregate(f, g), denot2.aggregate(f, g)) + protected def derivedUnion(denot1: PreDenotation, denot2: PreDenotation) = + if ((denot1 eq this.denot1) && (denot2 eq this.denot2)) this + else denot1 union denot2 + } + + final case class DenotUnion(denot1: PreDenotation, denot2: PreDenotation) extends MultiPreDenotation { + def exists: Boolean = true + def toDenot(pre: Type)(using Context): Denotation = + denot1.toDenot(pre).meet(denot2.toDenot(pre), pre) + def containsSym(sym: Symbol): Boolean = + (denot1 containsSym sym) || (denot2 containsSym sym) + type AsSeenFromResult = PreDenotation + def computeAsSeenFrom(pre: Type)(using Context): PreDenotation = + derivedUnion(denot1.asSeenFrom(pre), denot2.asSeenFrom(pre)) + } + + /** An overloaded denotation consisting of the alternatives of both given denotations. + */ + case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol, NoType) with MultiPreDenotation { + final def infoOrCompleter: Type = multiHasNot("info") + final def validFor: Period = denot1.validFor & denot2.validFor + final def isType: Boolean = false + final def hasUniqueSym: Boolean = false + final def name(using Context): Name = denot1.name + final def signature(using Context): Signature = Signature.OverloadedSignature + def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): Denotation = + if (sig eq Signature.OverloadedSignature) this + else derivedUnionDenotation( + denot1.atSignature(sig, targetName, site, relaxed), + denot2.atSignature(sig, targetName, site, relaxed)) + def current(using Context): Denotation = + derivedUnionDenotation(denot1.current, denot2.current) + def altsWith(p: Symbol => Boolean): List[SingleDenotation] = + denot1.altsWith(p) ++ denot2.altsWith(p) + def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = { + val sd1 = denot1.suchThat(p) + val sd2 = denot2.suchThat(p) + if sd1.exists then + if sd2.exists then + throw TypeError( + em"""Failure to disambiguate overloaded reference with + | ${denot1.symbol.showLocated}: ${denot1.info} and + | ${denot2.symbol.showLocated}: ${denot2.info}""") + else sd1 + else sd2 + } + override def filterWithPredicate(p: SingleDenotation => Boolean): Denotation = + derivedUnionDenotation(denot1.filterWithPredicate(p), denot2.filterWithPredicate(p)) + def hasAltWith(p: SingleDenotation => Boolean): Boolean = + denot1.hasAltWith(p) || denot2.hasAltWith(p) + def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = { + val d1 = denot1 accessibleFrom (pre, superAccess) + val d2 = denot2 accessibleFrom (pre, superAccess) + if (!d1.exists) d2 + else if (!d2.exists) d1 + else derivedUnionDenotation(d1, d2) + } + def mapInfo(f: Type => Type)(using Context): Denotation = + derivedUnionDenotation(denot1.mapInfo(f), denot2.mapInfo(f)) + def derivedUnionDenotation(d1: Denotation, d2: Denotation): Denotation = + if ((d1 eq denot1) && (d2 eq denot2)) this + else if (!d1.exists) d2 + else if (!d2.exists) d1 + else MultiDenotation(d1, d2) + type AsSeenFromResult = Denotation + def computeAsSeenFrom(pre: Type)(using Context): Denotation = + derivedUnionDenotation(denot1.asSeenFrom(pre), denot2.asSeenFrom(pre)) + override def toString: String = alternatives.mkString(" ") + + private def multiHasNot(op: String): Nothing = + throw new UnsupportedOperationException( + s"multi-denotation with alternatives $alternatives does not implement operation $op") + } + + /** The current denotation of the static reference given by path, + * or a MissingRef or NoQualifyingRef instance, if it does not exist. + * if generateStubs is set, generates stubs for missing top-level symbols + */ + def staticRef(path: Name, generateStubs: Boolean = true, isPackage: Boolean = false)(using Context): Denotation = { + def select(prefix: Denotation, selector: Name): Denotation = { + val owner = prefix.disambiguate(_.info.isParameterless) + def isPackageFromCoreLibMissing: Boolean = + // if the scala package is missing, the stdlib must be missing + owner.symbol == defn.RootClass && selector == nme.scala + if (owner.exists) { + val result = if (isPackage) owner.info.decl(selector) else owner.info.member(selector) + if (result.exists) result + else if (isPackageFromCoreLibMissing) throw new MissingCoreLibraryException(selector.toString) + else { + val alt = + if (generateStubs) missingHook(owner.symbol.moduleClass, selector) + else NoSymbol + if (alt.exists) alt.denot + else MissingRef(owner, selector) + } + } + else owner + } + def recur( + path: Name, + wrap: TermName -> Name = identity[Name] // !cc! default argument needs to be instantiated, error if [Name] is dropped + ): Denotation = path match { + case path: TypeName => + recur(path.toTermName, n => n.toTypeName) + case ModuleClassName(underlying) => + recur(underlying, n => wrap(ModuleClassName(n))) + case QualifiedName(prefix, selector) => + select(recur(prefix), wrap(selector)) + case qn @ AnyQualifiedName(prefix, _) => + recur(prefix, n => wrap(qn.info.mkString(n).toTermName)) + case path: SimpleName => + def recurSimple(len: Int, wrap: TermName -> Name): Denotation = { + val point = path.lastIndexOf('.', len - 1) + val selector = wrap(path.slice(point + 1, len).asTermName) + val prefix = + if (point > 0) recurSimple(point, identity) + else if (selector.isTermName) defn.RootClass.denot + else defn.EmptyPackageClass.denot + select(prefix, selector) + } + recurSimple(path.length, wrap) + } + + val run = ctx.run + if run == null then recur(path) + else run.staticRefs.getOrElseUpdate(path, recur(path)) + } + + /** If we are looking for a non-existing term name in a package, + * assume it is a package for which we do not have a directory and + * enter it. + */ + def missingHook(owner: Symbol, name: Name)(using Context): Symbol = + if (owner.is(Package) && name.isTermName) + newCompletePackageSymbol(owner, name.asTermName).entered + else + NoSymbol + + /** An exception for accessing symbols that are no longer valid in current run */ + class StaleSymbol(msg: -> String) extends Exception { + util.Stats.record("stale symbol") + override def getMessage(): String = msg + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Flags.scala b/tests/pos-with-compiler-cc/dotc/core/Flags.scala new file mode 100644 index 000000000000..8bf65ed8288f --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Flags.scala @@ -0,0 +1,613 @@ +package dotty.tools.dotc +package core + +object Flags { + + object opaques { + + /** A FlagSet represents a set of flags. Flags are encoded as follows: + * The first two bits indicate whether a flag set applies to terms, + * to types, or to both. Bits 2..63 are available for properties + * and can be doubly used for terms and types. + */ + opaque type FlagSet = Long + def FlagSet(bits: Long): FlagSet = bits + def toBits(fs: FlagSet): Long = fs + + /** A flag set consisting of a single flag */ + opaque type Flag <: FlagSet = Long + private[Flags] def Flag(bits: Long): Flag = bits + } + export opaques.FlagSet + + type Flag = opaques.Flag + + extension (x: FlagSet) { + + inline def bits: Long = opaques.toBits(x) + + /** The union of the given flag sets. + * Combining two FlagSets with `|` will give a FlagSet + * that has the intersection of the applicability to terms/types + * of the two flag sets. It is checked that the intersection is not empty. + */ + def | (y: FlagSet): FlagSet = + if (x.bits == 0) y + else if (y.bits == 0) x + else { + val tbits = x.bits & y.bits & KINDFLAGS + if (tbits == 0) + assert(false, s"illegal flagset combination: ${x.flagsString} and ${y.flagsString}") + FlagSet(tbits | ((x.bits | y.bits) & ~KINDFLAGS)) + } + + /** The intersection of the given flag sets */ + def & (y: FlagSet): FlagSet = FlagSet(x.bits & y.bits) + + /** The intersection of a flag set with the complement of another flag set */ + def &~ (y: FlagSet): FlagSet = { + val tbits = x.bits & KINDFLAGS + if ((tbits & y.bits) == 0) x + else FlagSet(tbits | ((x.bits & ~y.bits) & ~KINDFLAGS)) + } + + def ^ (y: FlagSet) = + FlagSet((x.bits | y.bits) & KINDFLAGS | (x.bits ^ y.bits) & ~KINDFLAGS) + + /** Does the given flag set contain the given flag? + * This means that both the kind flags and the carrier bits have non-empty intersection. + */ + def is (flag: Flag): Boolean = { + val fs = x.bits & flag.bits + (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 + } + + /** Does the given flag set contain the given flag + * and at the same time contain none of the flags in the `butNot` set? + */ + def is (flag: Flag, butNot: FlagSet): Boolean = x.is(flag) && !x.isOneOf(butNot) + + /** Does the given flag set have a non-empty intersection with another flag set? + * This means that both the kind flags and the carrier bits have non-empty intersection. + */ + def isOneOf (flags: FlagSet): Boolean = { + val fs = x.bits & flags.bits + (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 + } + + /** Does the given flag set have a non-empty intersection with another flag set, + * and at the same time contain none of the flags in the `butNot` set? + */ + def isOneOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isOneOf(flags) && !x.isOneOf(butNot) + + /** Does a given flag set have all of the flags of another flag set? + * Pre: The intersection of the term/type flags of both sets must be non-empty. + */ + def isAllOf (flags: FlagSet): Boolean = { + val fs = x.bits & flags.bits + ((fs & KINDFLAGS) != 0 || flags.bits == 0) && + (fs >>> TYPESHIFT) == (flags.bits >>> TYPESHIFT) + } + + /** Does a given flag set have all of the flags in another flag set + * and at the same time contain none of the flags in the `butNot` set? + * Pre: The intersection of the term/type flags of both sets must be non-empty. + */ + def isAllOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isAllOf(flags) && !x.isOneOf(butNot) + + def isEmpty: Boolean = (x.bits & ~KINDFLAGS) == 0 + + /** Is a given flag set a subset of another flag set? */ + def <= (y: FlagSet): Boolean = (x.bits & y.bits) == x.bits + + /** Does the given flag set apply to terms? */ + def isTermFlags: Boolean = (x.bits & TERMS) != 0 + + /** Does the given flag set apply to terms? */ + def isTypeFlags: Boolean = (x.bits & TYPES) != 0 + + /** The given flag set with all flags transposed to be type flags */ + def toTypeFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits & ~KINDFLAGS | TYPES) + + /** The given flag set with all flags transposed to be term flags */ + def toTermFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits & ~KINDFLAGS | TERMS) + + /** The given flag set with all flags transposed to be common flags */ + def toCommonFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits | KINDFLAGS) + + /** The number of non-kind flags in the given flag set */ + def numFlags: Int = java.lang.Long.bitCount(x.bits & ~KINDFLAGS) + + /** The lowest non-kind bit set in the given flag set */ + def firstBit: Int = java.lang.Long.numberOfTrailingZeros(x.bits & ~KINDFLAGS) + + /** The list of non-empty names of flags with given index idx that are set in the given flag set */ + private def flagString(idx: Int): List[String] = + if ((x.bits & (1L << idx)) == 0) Nil + else { + def halfString(kind: Int) = + if ((x.bits & (1L << kind)) != 0) flagName(idx)(kind) else "" + val termFS = halfString(TERMindex) + val typeFS = halfString(TYPEindex) + val strs = termFS :: (if (termFS == typeFS) Nil else typeFS :: Nil) + strs filter (_.nonEmpty) + } + + /** The list of non-empty names of flags that are set in the given flag set */ + def flagStrings(privateWithin: String = ""): Seq[String] = { + var rawStrings = (2 to MaxFlag).flatMap(x.flagString(_)) // DOTTY problem: cannot drop with (_) + if (!privateWithin.isEmpty && !x.is(Protected)) + rawStrings = rawStrings :+ "private" + val scopeStr = if (x.is(Local)) "this" else privateWithin + if (scopeStr != "") + rawStrings.filter(_ != "").map { + case "private" => s"private[$scopeStr]" + case "protected" => s"protected[$scopeStr]" + case str => str + } + else rawStrings + } + + /** The string representation of the given flag set */ + def flagsString: String = x.flagStrings("").mkString(" ") + } + + // Temporary while extension names are in flux + def or(x1: FlagSet, x2: FlagSet) = x1 | x2 + def and(x1: FlagSet, x2: FlagSet) = x1 & x2 + + def termFlagSet(x: Long) = FlagSet(TERMS | x) + + private inline val TYPESHIFT = 2 + private inline val TERMindex = 0 + private inline val TYPEindex = 1 + private inline val TERMS = 1 << TERMindex + private inline val TYPES = 1 << TYPEindex + private inline val KINDFLAGS = TERMS | TYPES + + private inline val FirstFlag = 2 + private inline val FirstNotPickledFlag = 48 + private inline val MaxFlag = 63 + + private val flagName = Array.fill(64, 2)("") + + private def isDefinedAsFlag(idx: Int) = flagName(idx).exists(_.nonEmpty) + + /** The flag set containing all defined flags of either kind whose bits + * lie in the given range + */ + private def flagRange(start: Int, end: Int) = + FlagSet((start until end).foldLeft(KINDFLAGS.toLong) ((bits, idx) => + if (isDefinedAsFlag(idx)) bits | (1L << idx) else bits)) + + /** The union of all flags in given flag set */ + def union(flagss: FlagSet*): FlagSet = { + var flag = EmptyFlags + for (f <- flagss) + flag |= f + flag + } + + def commonFlags(flagss: FlagSet*): FlagSet = union(flagss.map(_.toCommonFlags): _*) + + /** The empty flag set */ + val EmptyFlags: FlagSet = FlagSet(0) + + /** The undefined flag set */ + val UndefinedFlags: FlagSet = FlagSet(~KINDFLAGS) + + /** Three flags with given index between 2 and 63. + * The first applies to both terms and types. the second is a term flag, and + * the third is a type flag. Installs given name(s) as the name(s) of the flags. + * @param name The name to be used for the term flag + * @param typeName The name to be used for the type flag, if it is different from `name`. + */ + private def newFlags(index: Int, name: String, typeName: String = ""): (Flag, Flag, Flag) = { + flagName(index)(TERMindex) = name + flagName(index)(TYPEindex) = if (typeName.isEmpty) name else typeName + val bits = 1L << index + (opaques.Flag(KINDFLAGS | bits), opaques.Flag(TERMS | bits), opaques.Flag(TYPES | bits)) + } + + // ----------------- Available flags ----------------------------------------------------- + + /** Labeled with `private` modifier */ + val (Private @ _, PrivateTerm @ _, PrivateType @ _) = newFlags(2, "private") + + /** Labeled with `protected` modifier */ + val (Protected @ _, _, _) = newFlags(3, "protected") + + /** Labeled with `override` modifier */ + val (Override @ _, _, _) = newFlags(4, "override") + + /** A declared, but not defined member */ + val (Deferred @ _, DeferredTerm @ _, DeferredType @ _) = newFlags(5, "") + + /** Labeled with `final` modifier */ + val (Final @ _, _, _) = newFlags(6, "final") + + /** A method symbol / a super trait */ + val (_, Method @ _, _) = newFlags(7, "") + + /** A (term or type) parameter to a class or method */ + val (Param @ _, TermParam @ _, TypeParam @ _) = newFlags(8, "") + + /** Labeled with `implicit` modifier (implicit value) */ + val (Implicit @ _, ImplicitVal @ _, _) = newFlags(9, "implicit") + + /** Labeled with `lazy` (a lazy val) / a trait */ + val (LazyOrTrait @ _, Lazy @ _, Trait @ _) = newFlags(10, "lazy", "") + + /** A value or variable accessor (getter or setter) */ + val (AccessorOrSealed @ _, Accessor @ _, Sealed @ _) = newFlags(11, "", "sealed") + + /** A mutable var, an open class */ + val (MutableOrOpen @ __, Mutable @ _, Open @ _) = newFlags(12, "mutable", "open") + + /** Symbol is local to current class (i.e. private[this] or protected[this] + * pre: Private or Protected are also set + */ + val (Local @ _, _, _) = newFlags(13, "") + + /** A field generated for a primary constructor parameter (no matter if it's a 'val' or not), + * or an accessor of such a field. + */ + val (_, ParamAccessor @ _, _) = newFlags(14, "") + + /** A value or class implementing a module */ + val (Module @ _, ModuleVal @ _, ModuleClass @ _) = newFlags(15, "module") + + /** A value or class representing a package */ + val (Package @ _, PackageVal @ _, PackageClass @ _) = newFlags(16, "") + + /** A case class or its companion object + * Note: Case is also used to indicate that a symbol is bound by a pattern. + */ + val (Case @ _, CaseVal @ _, CaseClass @ _) = newFlags(17, "case") + + /** A compiler-generated symbol, which is visible for type-checking + * (compare with artifact) + */ + val (Synthetic @ _, _, _) = newFlags(18, "") + + /** Labelled with `inline` modifier */ + val (Inline @ _, _, _) = newFlags(19, "inline") + + /** An outer accessor / a covariant type variable */ + val (OuterOrCovariant @ _, OuterAccessor @ _, Covariant @ _) = newFlags(20, "", "") + + /** The label of a labeled block / a contravariant type variable */ + val (LabelOrContravariant @ _, Label @ _, Contravariant @ _) = newFlags(21, "