diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 85db025812be..924a9162bdb9 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -59,16 +59,15 @@ class Driver { MacroClassLoader.init(ctx) Positioned.updateDebugPos(ctx) - if (!ctx.settings.YdropComments.value(ctx) || ctx.mode.is(Mode.ReadComments)) { + if (!ctx.settings.YdropComments.value(ctx) || ctx.mode.is(Mode.ReadComments)) ctx.setProperty(ContextDoc, new ContextDocstrings) - } val fileNames = CompilerCommand.checkUsage(summary, sourcesRequired)(ctx) fromTastySetup(fileNames, ctx) } /** Setup extra classpath and figure out class names for tasty file inputs */ - protected def fromTastySetup(fileNames0: List[String], ctx0: Context): (List[String], Context) = { + protected def fromTastySetup(fileNames0: List[String], ctx0: Context): (List[String], Context) = if (ctx0.settings.fromTasty.value(ctx0)) { // Resolve classpath and class names of tasty files val (classPaths, classNames) = fileNames0.flatMap { name => @@ -98,8 +97,8 @@ class Driver { val fullClassPath = (classPaths1 :+ ctx1.settings.classpath.value(ctx1)).mkString(java.io.File.pathSeparator) ctx1.setSetting(ctx1.settings.classpath, fullClassPath) (classNames, ctx1) - } else (fileNames0, ctx0) - } + } + else (fileNames0, ctx0) /** Entry point to the compiler that can be conveniently used with Java reflection. * diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 09bae4f1dc5b..ce71dcdaeadc 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -102,7 +102,8 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint def compile(fileNames: List[String]): Unit = try { val sources = fileNames.map(ctx.getSource(_)) compileSources(sources) - } catch { + } + catch { case NonFatal(ex) => ctx.echo(i"exception occurred while compiling $units%, %") throw ex @@ -114,11 +115,12 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint * or we need to assemble phases on each run, and take -Yskip, -Ystop into * account. I think the latter would be preferable. */ - def compileSources(sources: List[SourceFile]): Unit = + def compileSources(sources: List[SourceFile]): Unit = { if (sources forall (_.exists)) { units = sources.map(CompilationUnit(_)) compileUnits() } + } def compileUnits(us: List[CompilationUnit]): Unit = { units = us @@ -150,24 +152,23 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint var lastPrintedTree: PrintedTree = NoPrintedTree val profiler = ctx.profiler - for (phase <- ctx.base.allPhases) + for (phase <- ctx.base.allPhases) { if (phase.isRunnable) Stats.trackTime(s"$phase ms ") { val start = System.currentTimeMillis val profileBefore = profiler.beforePhase(phase) units = phase.runOn(units) profiler.afterPhase(phase, profileBefore) - if (ctx.settings.Xprint.value.containsPhase(phase)) { - for (unit <- units) { + if (ctx.settings.Xprint.value.containsPhase(phase)) + for (unit <- units) lastPrintedTree = printTree(lastPrintedTree)(ctx.fresh.setPhase(phase.next).setCompilationUnit(unit)) - } - } ctx.informTime(s"$phase ", start) Stats.record(s"total trees at end of $phase", ast.Trees.ntrees) for (unit <- units) Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) } + } profiler.finished() } @@ -189,7 +190,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint * If `typeCheck = true`, also run typer on the compilation unit, and set * `rootTreeOrProvider`. */ - def lateCompile(file: AbstractFile, typeCheck: Boolean)(implicit ctx: Context): Unit = + def lateCompile(file: AbstractFile, typeCheck: Boolean)(implicit ctx: Context): Unit = { if (!files.contains(file) && !lateFiles.contains(file)) { lateFiles += file val unit = CompilationUnit(ctx.getSource(file.path)) @@ -208,6 +209,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint } process()(runContext.fresh.setCompilationUnit(unit)) } + } private sealed trait PrintedTree private /*final*/ case class SomePrintedTree(phase: String, tree: String) extends PrintedTree diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index f678d8e0cfe9..1f43503cc514 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -308,12 +308,12 @@ object desugar { * case '{ @patternBindHole def `$a`(...) = ...; ... `$a`() ... } => a * ``` */ - def transformQuotedPatternName(tree: ValOrDefDef)(implicit ctx: Context): ValOrDefDef = { + def transformQuotedPatternName(tree: ValOrDefDef)(implicit ctx: Context): ValOrDefDef = if (ctx.mode.is(Mode.QuotedPattern) && !isBackquoted(tree) && tree.name != nme.ANON_FUN && tree.name.startsWith("$")) { val mods = tree.mods.withAddedAnnotation(New(ref(defn.InternalQuoted_patternBindHoleAnnot.typeRef)).withSpan(tree.span)) tree.withMods(mods) - } else tree - } + } + else tree /** Add an explicit ascription to the `expectedTpt` to every tail splice. * @@ -926,7 +926,8 @@ object desugar { val patternBindHoleAnnot = New(ref(defn.InternalQuoted_patternBindHoleAnnot.typeRef)).withSpan(tree.span) val mods = tree.mods.withAddedAnnotation(patternBindHoleAnnot) tree.withMods(mods) - } else tree + } + else tree } /** The normalized name of `mdef`. This means @@ -953,7 +954,8 @@ object desugar { impl.body.find { case dd: DefDef if dd.mods.is(Extension) => true case _ => false - } match { + } + match { case Some(DefDef(name, _, (vparam :: _) :: _, _, _)) => s"${name}_of_${inventTypeName(vparam.tpt)}" case _ => @@ -1007,7 +1009,7 @@ object desugar { case id: Ident => expandSimpleEnumCase(id.name.asTermName, mods, Span(id.span.start, id.span.end, id.span.start)) - } + } else { val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) pats1 map (makePatDef(pdef, mods, _, rhs)) @@ -1068,7 +1070,7 @@ object desugar { if (tupleOptimizable) // include `_` pat match { case Tuple(pats) => - pats.map { case id: Ident => id -> TypeTree() } + pats.map { case id: Ident => id -> TypeTree() } } else getVariables(pat) // no `_` @@ -1127,11 +1129,12 @@ object desugar { case tree: MemberDef => var tested: MemberDef = tree def fail(msg: String) = ctx.error(msg, tree.sourcePos) - def checkApplicable(flag: Flag, test: MemberDefTest): Unit = + def checkApplicable(flag: Flag, test: MemberDefTest): Unit = { if (tested.mods.is(flag) && !test.applyOrElse(tree, (md: MemberDef) => false)) { fail(i"modifier `${flag.flagsString}` is not allowed for this definition") tested = tested.withMods(tested.mods.withoutFlags(flag)) } + } checkApplicable(Opaque, legalOpaque) tested case _ => @@ -1296,7 +1299,7 @@ object desugar { if (isGenericTuple) Apply(Select(refOfDef(param), nme.apply), Literal(Constant(n))) else Select(refOfDef(param), nme.selectorName(n)) val vdefs = - params.zipWithIndex.map{ + params.zipWithIndex.map { case (param, idx) => DefDef(param.name, Nil, Nil, TypeTree(), selector(idx)).withSpan(param.span) } @@ -1338,7 +1341,7 @@ object desugar { .withSpan(original.span.withPoint(named.span.start)) val mayNeedSetter = valDef(vdef) mayNeedSetter - } + } private def derivedDefDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(implicit src: SourceFile) = DefDef(named.name.asTermName, Nil, Nil, tpt, rhs) @@ -1551,7 +1554,8 @@ object desugar { RefinedTypeTree(polyFunctionTpt, List( DefDef(nme.apply, applyTParams, List(applyVParams), res, EmptyTree) )) - } else { + } + else { // Desugar [T_1, ..., T_M] -> (x_1: P_1, ..., x_N: P_N) => body // Into new scala.PolyFunction { def apply[T_1, ..., T_M](x_1: P_1, ..., x_N: P_N) = body } @@ -1606,7 +1610,8 @@ object desugar { Annotated( AppliedTypeTree(ref(seqType), t), New(ref(defn.RepeatedAnnot.typeRef), Nil :: Nil)) - } else { + } + else { assert(ctx.mode.isExpr || ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive), ctx.mode) Select(t, op.name) } @@ -1692,8 +1697,9 @@ object desugar { private def getVariables(tree: Tree)(implicit ctx: Context): List[VarInfo] = { val buf = ListBuffer[VarInfo]() def seenName(name: Name) = buf exists (_._1.name == name) - def add(named: NameTree, t: Tree): Unit = + def add(named: NameTree, t: Tree): Unit = { if (!seenName(named.name) && named.name.isTermName) buf += ((named, t)) + } def collect(tree: Tree): Unit = tree match { case Bind(nme.WILDCARD, tree1) => collect(tree1) diff --git a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala index e9d44a564412..dbed71918b29 100644 --- a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala +++ b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala @@ -45,7 +45,7 @@ object MainProxies { def pos = mainFun.sourcePos val argsRef = Ident(nme.args) - def addArgs(call: untpd.Tree, mt: MethodType, idx: Int): untpd.Tree = { + def addArgs(call: untpd.Tree, mt: MethodType, idx: Int): untpd.Tree = if (mt.isImplicitMethod) { ctx.error(s"@main method cannot have implicit parameters", pos) call @@ -71,7 +71,6 @@ object MainProxies { call1 } } - } var result: List[TypeDef] = Nil if (!mainFun.owner.isStaticOwner) diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index 85add107119d..5446a1447c05 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -97,7 +97,8 @@ object NavigateAST { case _ => } childPath(p.productIterator, p :: path) - } else path + } + else path singlePath(from, Nil) } } diff --git a/compiler/src/dotty/tools/dotc/ast/Positioned.scala b/compiler/src/dotty/tools/dotc/ast/Positioned.scala index a794bf7bae7e..3ff320449f95 100644 --- a/compiler/src/dotty/tools/dotc/ast/Positioned.scala +++ b/compiler/src/dotty/tools/dotc/ast/Positioned.scala @@ -38,9 +38,8 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Pro /** The span part of the item's position */ def span: Span = mySpan - def span_=(span: Span): Unit = { + def span_=(span: Span): Unit = mySpan = span - } uniqueId = src.nextId span = envelope(src) @@ -58,9 +57,8 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Pro else { val newpd: this.type = if (mySpan.isSynthetic) { - if (!mySpan.exists && span.exists) { + if (!mySpan.exists && span.exists) envelope(source, span.startPos) // fill in children spans - } this } else cloneIn(source) @@ -109,12 +107,11 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Pro else if (span1.start == MaxOffset) // No positioned child was found NoSpan - else { + else ///println(s"revisit $uniqueId with $span1") // We have some children left whose span could not be assigned. // Go through it again with the known start position. includeChildren(span1.startPos, 0) - } span2.toSynthetic } @@ -145,7 +142,7 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Pro found = isParent(productElement(n)) } found - } + } } /** A hook that can be overridden if overlap checking in `checkPos` should be @@ -232,7 +229,8 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Pro n += 1 } } - } catch { + } + catch { case ex: AssertionError => println(i"error while checking $this") throw ex @@ -242,7 +240,6 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Pro object Positioned { @sharable private[Positioned] var debugId = Int.MinValue - def updateDebugPos(implicit ctx: Context): Unit = { + def updateDebugPos(implicit ctx: Context): Unit = debugId = ctx.settings.YdebugTreeWithId.value - } } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 27c8f1b786f9..e23e899a91b9 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -401,7 +401,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => || fn.symbol.isPrimaryConstructor && fn.symbol.owner.isNoInitsClass) // TODO: include in isStable? minOf(exprPurity(fn), args.map(exprPurity)) `min` Pure else if (fn.symbol.is(Erased)) Pure - else if (fn.symbol.isStableMember /* && fn.symbol.is(Lazy) */) + else if (fn.symbol.isStableMember) /* && fn.symbol.is(Lazy) */ minOf(exprPurity(fn), args.map(exprPurity)) `min` Idempotent else Impure case Typed(expr, _) => @@ -686,7 +686,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => def defPath(sym: Symbol, root: Tree)(implicit ctx: Context): List[Tree] = trace.onDebug(s"defpath($sym with position ${sym.span}, ${root.show})") { require(sym.span.exists, sym) object accum extends TreeAccumulator[List[Tree]] { - def apply(x: List[Tree], tree: Tree)(implicit ctx: Context): List[Tree] = { + def apply(x: List[Tree], tree: Tree)(implicit ctx: Context): List[Tree] = if (tree.span.contains(sym.span)) if (definedSym(tree) == sym) tree :: x else { @@ -694,7 +694,6 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => if (x1 ne x) tree :: x1 else x1 } else x - } } accum(Nil, root) } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala index 154729a877c8..87a5012ba6b4 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala @@ -28,7 +28,7 @@ class TreeMapWithImplicits extends tpd.TreeMap { @tailrec def traverse(curStats: List[Tree])(implicit ctx: Context): List[Tree] = { - def recur(stats: List[Tree], changed: Tree, rest: List[Tree])(implicit ctx: Context): List[Tree] = { + def recur(stats: List[Tree], changed: Tree, rest: List[Tree])(implicit ctx: Context): List[Tree] = if (stats eq curStats) { val rest1 = transformStats(rest, exprOwner) changed match { @@ -37,7 +37,6 @@ class TreeMapWithImplicits extends tpd.TreeMap { } } else stats.head :: recur(stats.tail, changed, rest) - } curStats match { case stat :: rest => @@ -123,5 +122,5 @@ class TreeMapWithImplicits extends tpd.TreeMap { tree } } - } + diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 7ef2d8a30f6a..4c79a36b6abf 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -189,9 +189,8 @@ class TreeTypeMap( val origDcls = cls.info.decls.toList val mappedDcls = ctx.mapSymbols(origDcls, tmap) val tmap1 = tmap.withMappedSyms(origDcls, mappedDcls) - if (symsChanged) { + if (symsChanged) origDcls.lazyZip(mappedDcls).foreach(cls.asClass.replace) - } tmap1 } if (symsChanged || (fullMap eq substMap)) fullMap diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 2b689c7ef9ab..01559e463b55 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -118,7 +118,7 @@ object Trees { * - the child tree is an identifier, or * - errors were reported */ - private def checkChildrenTyped(it: Iterator[Any])(implicit ctx: Context): Unit = + private def checkChildrenTyped(it: Iterator[Any])(implicit ctx: Context): Unit = { if (!this.isInstanceOf[Import[_]]) while (it.hasNext) it.next() match { @@ -129,6 +129,7 @@ object Trees { case xs: List[_] => checkChildrenTyped(xs.iterator) case _ => } + } def withTypeUnchecked(tpe: Type): ThisTree[Type] = { val tree = @@ -374,7 +375,7 @@ object Trees { if (idx >= 0) idx else point // use `point` anyway. This is important if no source exists so scanning fails } - Span(point, point + realName.length, point) + Span(point, point + realName.length, point) } } else span @@ -421,14 +422,13 @@ object Trees { extends DenotingTree[T] with TermTree[T] { type ThisTree[-T >: Untyped] = This[T] // Denotation of a This tree is always the underlying class; needs correction for modules. - override def denot(implicit ctx: Context): Denotation = { + override def denot(implicit ctx: Context): Denotation = typeOpt match { case tpe @ TermRef(pre, _) if tpe.symbol.is(Module) => tpe.symbol.moduleClass.denot.asSeenFrom(pre) case _ => super.denot } - } } /** C.super[mix], where qual = C.this */ diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 79afa05783a5..6029fce79e11 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -281,8 +281,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(constr => isApplicable(constr.info)) New(firstParent, constr.symbol.asTerm, superArgs) } - ClassDefWithParents(cls, constr, superRef :: otherParents.map(TypeTree(_)), body) - } + ClassDefWithParents(cls, constr, superRef :: otherParents.map(TypeTree(_)), body) + } def ClassDefWithParents(cls: ClassSymbol, constr: DefDef, parents: List[Tree], body: List[Tree])(implicit ctx: Context): TypeDef = { val selfType = @@ -436,7 +436,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { if (!ctx.erasedTypes) { assert(!TypeErasure.isGeneric(elemTpe), elemTpe) //needs to be done during typer. See Applications.convertNewGenericArray newArr.appliedToTypeTrees(TypeTree(returnTpe) :: Nil).appliedToArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) - } else // after erasure + } + else // after erasure newArr.appliedToArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) } @@ -451,12 +452,11 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** A tree representing a `wrapXYZArray(tree)` operation of the right * kind for the given element type in `elemTpe`. */ - def wrapArray(tree: Tree, elemtp: Type)(implicit ctx: Context): Tree = { + def wrapArray(tree: Tree, elemtp: Type)(implicit ctx: Context): Tree = ref(defn.getWrapVarargsArrayModule) .select(wrapArrayMethodName(elemtp)) .appliedToTypes(if (elemtp.isPrimitiveValueType) Nil else elemtp :: Nil) .appliedTo(tree) - } // ------ Creating typed equivalents of trees that exist only in untyped form ------- @@ -534,7 +534,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { if (owner.isLocalDummy && owner.owner == cls) owner else if (owner == cls) foldOver(sym, tree) else sym - } else foldOver(sym, tree) + } + else foldOver(sym, tree) } override val cpy: TypedTreeCopier = // Type ascription needed to pick up any new members in TreeCopier (currently there are none) @@ -761,14 +762,12 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * owner to `to`, and continue until a non-weak owner is reached. */ def changeOwner(from: Symbol, to: Symbol)(implicit ctx: Context): ThisTree = { - @tailrec def loop(from: Symbol, froms: List[Symbol], tos: List[Symbol]): ThisTree = { + @tailrec def loop(from: Symbol, froms: List[Symbol], tos: List[Symbol]): ThisTree = if (from.isWeakOwner && !from.owner.isClass) loop(from.owner, from :: froms, to :: tos) - else { + else //println(i"change owner ${from :: froms}%, % ==> $tos of $tree") TreeTypeMap(oldOwners = from :: froms, newOwners = tos).apply(tree) - } - } if (from == to) tree else loop(from, Nil, to :: Nil) } @@ -936,11 +935,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** `tree ne null` (might need a cast to be type correct) */ def testNotNull(implicit ctx: Context): Tree = { - val receiver = if (defn.isBottomType(tree.tpe)) { + val receiver = if (defn.isBottomType(tree.tpe)) // If the receiver is of type `Nothing` or `Null`, add an ascription so that the selection // succeeds: e.g. `null.ne(null)` doesn't type, but `(null: AnyRef).ne(null)` does. Typed(tree, TypeTree(defn.AnyRefType)) - } else tree.ensureConforms(defn.ObjectType) receiver.select(defn.Object_ne).appliedTo(nullLiteral).withSpan(tree.span) } @@ -1066,10 +1064,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { protected def skipLocal(sym: Symbol): Boolean = true /** Is this a symbol that of a local val or parameterless def for which we could get the rhs */ - private def isBinding(sym: Symbol)(implicit ctx: Context): Boolean = { + private def isBinding(sym: Symbol)(implicit ctx: Context): Boolean = sym.isTerm && !sym.is(Param) && !sym.owner.isClass && !(sym.is(Method) && sym.info.isInstanceOf[MethodOrPoly]) // if is a method it is parameterless - } } implicit class ListOfTreeDecorator(val xs: List[tpd.Tree]) extends AnyVal { @@ -1090,7 +1087,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { if (ctx.settings.YretainTrees.value) { if (myTrees == null) myTrees = computeRootTrees myTrees - } else computeRootTrees + } + else computeRootTrees /** Get first tree defined by this provider, or EmptyTree if none exists */ def tree(implicit ctx: Context): Tree = @@ -1128,7 +1126,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case tpnme.Double => TYPE(defn.BoxedDoubleModule) case tpnme.Unit => TYPE(defn.BoxedUnitModule) case _ => - if(ctx.erasedTypes || !tp.derivesFrom(defn.ArrayClass)) + if (ctx.erasedTypes || !tp.derivesFrom(defn.ArrayClass)) Literal(Constant(TypeErasure.erasure(tp))) else Literal(Constant(tp)) } @@ -1166,11 +1164,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } @tailrec - def sameTypes(trees: List[tpd.Tree], trees1: List[tpd.Tree]): Boolean = { + def sameTypes(trees: List[tpd.Tree], trees1: List[tpd.Tree]): Boolean = if (trees.isEmpty) trees.isEmpty else if (trees1.isEmpty) trees.isEmpty else (trees.head.tpe eq trees1.head.tpe) && sameTypes(trees.tail, trees1.tail) - } /** If `tree`'s purity level is less than `level`, let-bind it so that it gets evaluated * only once. I.e. produce a @@ -1181,21 +1178,19 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * * ~within('tree) */ - def letBindUnless(level: TreeInfo.PurityLevel, tree: Tree)(within: Tree => Tree)(implicit ctx: Context): Tree = { + def letBindUnless(level: TreeInfo.PurityLevel, tree: Tree)(within: Tree => Tree)(implicit ctx: Context): Tree = if (exprPurity(tree) >= level) within(tree) else { val vdef = SyntheticValDef(TempResultName.fresh(), tree) Block(vdef :: Nil, within(Ident(vdef.namedType))) } - } /** Let bind `tree` unless `tree` is at least idempotent */ def evalOnce(tree: Tree)(within: Tree => Tree)(implicit ctx: Context): Tree = letBindUnless(TreeInfo.Idempotent, tree)(within) - def runtimeCall(name: TermName, args: List[Tree])(implicit ctx: Context): Tree = { + def runtimeCall(name: TermName, args: List[Tree])(implicit ctx: Context): Tree = Ident(defn.ScalaRuntimeModule.requiredMethod(name).termRef).appliedToArgs(args) - } /** An extractor that pulls out type arguments */ object MaybePoly { @@ -1294,7 +1289,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { */ def importedSymbols(imp: Import, selectorPredicate: untpd.Tree => Boolean = util.common.alwaysTrue) - (implicit ctx: Context): List[Symbol] = { + (implicit ctx: Context): List[Symbol] = imp.selectors.find(selectorPredicate) match { case Some(id: untpd.Ident) => importedSymbols(imp.expr, id.name) @@ -1303,7 +1298,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _ => Nil } - } /** * The list of select trees that resolve to the same symbols as the ones that are imported @@ -1356,9 +1350,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** Replaces all positions in `tree` with zero-extent positions */ private def focusPositions(tree: Tree)(implicit ctx: Context): Tree = { val transformer = new tpd.TreeMap { - override def transform(tree: Tree)(implicit ctx: Context): Tree = { + override def transform(tree: Tree)(implicit ctx: Context): Tree = super.transform(tree).withSpan(tree.span.focus) - } } transformer.transform(tree) } diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 6dcb63fa3eb8..743b0f14cf88 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -208,7 +208,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** Modifiers with given list of Mods. It is checked that * all modifiers are already accounted for in `flags` and `privateWithin`. */ - def withMods(ms: List[Mod]): Modifiers = { + def withMods(ms: List[Mod]): Modifiers = if (mods eq ms) this else { if (ms.nonEmpty) @@ -218,7 +218,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { s"unaccounted modifier: $m in $this when adding $ms") copy(mods = ms) } - } def withAddedAnnotation(annot: Tree): Modifiers = if (annotations.exists(_ eq annot)) this diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index 03e212fd7dc8..c6c0f9e0981b 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -31,12 +31,11 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { override def findClass(className: String): Option[ClassRepresentation] = { val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) - def findEntry(isSource: Boolean): Option[ClassRepresentation] = { + def findEntry(isSource: Boolean): Option[ClassRepresentation] = aggregatesForPackage(pkg).iterator.map(_.findClass(className)).collectFirst { case Some(s: SourceFileEntry) if isSource => s case Some(s: ClassFileEntry) if !isSource => s } - } val classEntry = findEntry(isSource = false) val sourceEntry = findEntry(isSource = true) @@ -68,9 +67,9 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { override private[dotty] def hasPackage(pkg: String): Boolean = aggregates.exists(_.hasPackage(pkg)) override private[dotty] def list(inPackage: String): ClassPathEntries = { val (packages, classesAndSources) = aggregates.map { cp => - try { + try cp.list(inPackage).toTuple - } catch { + catch { case ex: java.io.IOException => val e = new FatalError(ex.getMessage) e.initCause(ex) @@ -96,7 +95,8 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { for { partOfEntries <- entries entry <- partOfEntries - } { + } + { val name = entry.name if (indices contains name) { val index = indices(name) @@ -122,7 +122,8 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { for { cp <- aggregates entry <- getEntries(cp) if !seenNames.contains(entry.name) - } { + } + { entriesBuffer += entry seenNames += entry.name } diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala index 3ade2fcf2f13..305dc88d4e0a 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala @@ -25,7 +25,8 @@ class ClassPathFactory { for { file <- expandPath(path, expandStar = false) dir <- Option(AbstractFile getDirectory file) - } yield createSourcePath(dir) + } + yield createSourcePath(dir) def expandPath(path: String, expandStar: Boolean = true): List[String] = dotty.tools.io.ClassPath.expandPath(path, expandStar) @@ -37,7 +38,8 @@ class ClassPathFactory { dir <- expandPath(path, expandStar = false) name <- expandDir(dir) entry <- Option(AbstractFile.getDirectory(name)) - } yield newClassPath(entry) + } + yield newClassPath(entry) def classesInExpandedPath(path: String)(implicit ctx: Context): IndexedSeq[ClassPath] = classesInPathImpl(path, expand = true).toIndexedSeq @@ -56,7 +58,8 @@ class ClassPathFactory { def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None Option(AbstractFile.getDirectory(file)).orElse(asImage) } - } yield newClassPath(dir) + } + yield newClassPath(dir) private def createSourcePath(file: AbstractFile)(implicit ctx: Context): ClassPath = if (file.isJarOrZip) diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index be5727c43bfe..46ae68c1ca62 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -33,14 +33,13 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends ClassPath { protected def createFileEntry(file: AbstractFile): FileEntryType protected def isMatchingFile(f: F): Boolean - private def getDirectory(forPackage: String): Option[F] = { - if (forPackage == ClassPath.RootPackage) { + private def getDirectory(forPackage: String): Option[F] = + if (forPackage == ClassPath.RootPackage) Some(dir) - } else { + else { val packageDirName = FileUtils.dirPath(forPackage) getSubDir(packageDirName) } - } override private[dotty] def hasPackage(pkg: String): Boolean = getDirectory(pkg).isDefined @@ -113,7 +112,8 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo def compare(o1: JFile, o2: JFile) = o1.getName.compareTo(o2.getName) }) listing - } else Array() + } + else Array() } protected def getName(f: JFile): String = f.getName protected def toAbstractFile(f: JFile): AbstractFile = new PlainFile(new dotty.tools.io.File(f.toPath)) @@ -127,15 +127,15 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo object JrtClassPath { import java.nio.file._, java.net.URI - def apply(): Option[ClassPath] = { + def apply(): Option[ClassPath] = try { val fs = FileSystems.getFileSystem(URI.create("jrt:/")) Some(new JrtClassPath(fs)) - } catch { + } + catch { case _: ProviderNotFoundException | _: FileSystemNotFoundException => None } - } } /** @@ -154,9 +154,8 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No // e.g. "java.lang" -> Seq("/modules/java.base") private val packageToModuleBases: Map[String, Seq[Path]] = { val ps = Files.newDirectoryStream(dir).iterator().asScala - def lookup(pack: Path): Seq[Path] = { + def lookup(pack: Path): Seq[Path] = Files.list(pack).iterator().asScala.map(l => if (Files.isSymbolicLink(l)) Files.readSymbolicLink(l) else l).toList - } ps.map(p => (p.toString.stripPrefix("/packages/"), lookup(p))).toMap } @@ -170,14 +169,12 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No else inPackage == "" packageToModuleBases.keysIterator.filter(matches).map(PackageEntryImpl(_)).toVector } - private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] = { + private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] = if (inPackage == "") Nil - else { + else packageToModuleBases.getOrElse(inPackage, Nil).flatMap(x => Files.list(x.resolve(FileUtils.dirPath(inPackage))).iterator().asScala.filter(_.getFileName.toString.endsWith(".class"))).map(x => ClassFileEntryImpl(new PlainFile(new dotty.tools.io.File(x)))).toVector - } - } override private[dotty] def list(inPackage: String): ClassPathEntries = if (inPackage == "") ClassPathEntries(packages(inPackage), Nil) @@ -188,7 +185,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No // java models them as entries in the new "module path", we'll probably need to follow this. def asClassPathStrings: Seq[String] = Nil - def findClassFile(className: String): Option[AbstractFile] = { + def findClassFile(className: String): Option[AbstractFile] = if (!className.contains(".")) None else { val inPackage = packageOf(className) @@ -197,7 +194,6 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No if (Files.exists(file)) new PlainFile(new dotty.tools.io.File(file)) :: Nil else Nil }.take(1).toList.headOption } - } private def packageOf(dottedClassName: String): String = dottedClassName.substring(0, dottedClassName.lastIndexOf(".")) } @@ -212,7 +208,8 @@ case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[ClassFile val wrappedClassFile = new dotty.tools.io.File(classFile.toPath) val abstractClassFile = new PlainFile(wrappedClassFile) Some(abstractClassFile) - } else None + } + else None } protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) diff --git a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala index a8628be6db3c..9046b8861c0a 100644 --- a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala +++ b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala @@ -37,11 +37,10 @@ object FileUtils { // FIXME: drop last condition when we stop being compatible with Scala 2.11 } - def stripSourceExtension(fileName: String): String = { + def stripSourceExtension(fileName: String): String = if (endsScala(fileName)) stripClassExtension(fileName) else if (endsJava(fileName)) stripJavaExtension(fileName) else throw new FatalError("Unexpected source file ending: " + fileName) - } def dirPath(forPackage: String): String = forPackage.replace('.', JFile.separatorChar) diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index 7dd9a3cd2393..d80170528894 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -21,10 +21,9 @@ import FileUtils._ sealed trait ZipAndJarFileLookupFactory { private val cache = new FileBasedCache[ClassPath] - def create(zipFile: AbstractFile)(implicit ctx: Context): ClassPath = { + def create(zipFile: AbstractFile)(implicit ctx: Context): ClassPath = if (ctx.settings.YdisableFlatCpCaching.value || zipFile.file == null) createForZipFile(zipFile) else createUsingCache(zipFile) - } protected def createForZipFile(zipFile: AbstractFile): ClassPath diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala index e42414da9578..5901010c3fce 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala @@ -30,21 +30,24 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPa for { dirEntry <- findDirEntry(inPackage).toSeq entry <- dirEntry.iterator if entry.isPackage - } yield PackageEntryImpl(prefix + entry.name) + } + yield PackageEntryImpl(prefix + entry.name) } protected def files(inPackage: String): Seq[FileEntryType] = for { dirEntry <- findDirEntry(inPackage).toSeq entry <- dirEntry.iterator if isRequiredFileType(entry) - } yield createFileEntry(entry) + } + yield createFileEntry(entry) protected def file(inPackage: String, name: String): Option[FileEntryType] = for { dirEntry <- findDirEntry(inPackage) entry <- Option(dirEntry.lookupName(name, directory = false)) if isRequiredFileType(entry) - } yield createFileEntry(entry) + } + yield createFileEntry(entry) override private[dotty] def hasPackage(pkg: String): Boolean = findDirEntry(pkg).isDefined override private[dotty] def list(inPackage: String): ClassPathEntries = { diff --git a/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala b/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala index c8304010e1b1..3f4e84337d35 100644 --- a/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala +++ b/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala @@ -26,7 +26,8 @@ object CommandLineParser { // require(next.isEmpty || !escaped) if (next startsWith del) Some((quoted, next substring 1)) else None - } else None + } + else None } } private object DoubleQuoted extends QuotedExtractor('"') @@ -59,10 +60,9 @@ object CommandLineParser { class ParseException(msg: String) extends RuntimeException(msg) def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x)) - def tokenize(line: String, errorFn: String => Unit): List[String] = { + def tokenize(line: String, errorFn: String => Unit): List[String] = commandLine(line) match { case Right((args, _)) => args case Left(msg) => errorFn(msg) ; Nil } - } } diff --git a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala index c99724b4d024..c2e7056824e7 100644 --- a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala @@ -80,13 +80,12 @@ object CompilerCommand { // For example 'false' for the version command. "" } - def formatSetting(name: String, value: String) = { + def formatSetting(name: String, value: String) = if (value.nonEmpty) // the format here is helping to make empty padding and put the additional information exactly under the description. s"\n${format("")} $name: $value." else "" - } s"${format(s.name)} ${s.description}${formatSetting("Default", defaultValue)}${formatSetting("Choices", s.legalChoices)}" } ss map helpStr mkString "\n" @@ -140,7 +139,8 @@ object CompilerCommand { else if (shouldStopWithInfo) { ctx.echo(infoMessage) Nil - } else { + } + else { if (sourcesRequired && summary.arguments.isEmpty) ctx.echo(usageMessage) summary.arguments } diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 4366ac45170f..157ed40604cb 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -157,6 +157,9 @@ object Config { */ final val simplifyApplications = true + /** Always assume -indent */ + final val allowIndent = true + /** If set, prints a trace of all symbol completions */ final val showCompletions = false diff --git a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala index 7f158b0e3230..4a0f7550be00 100644 --- a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala +++ b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala @@ -49,9 +49,8 @@ class OutputDirs { /** Set the single output directory. From now on, all files will * be dumped in there, regardless of previous calls to 'add'. */ - def setSingleOutput(dir: AbstractFile): Unit = { + def setSingleOutput(dir: AbstractFile): Unit = singleOutDir = Some(dir) - } def add(src: AbstractFile, dst: AbstractFile): Unit = { singleOutDir = None diff --git a/compiler/src/dotty/tools/dotc/config/PathResolver.scala b/compiler/src/dotty/tools/dotc/config/PathResolver.scala index 6e62661962ac..b22bc79bf525 100644 --- a/compiler/src/dotty/tools/dotc/config/PathResolver.scala +++ b/compiler/src/dotty/tools/dotc/config/PathResolver.scala @@ -137,7 +137,7 @@ object PathResolver { * Otherwise, show values in Calculated as if those options had been given * to a scala runner. */ - def main(args: Array[String]): Unit = { + def main(args: Array[String]): Unit = if (args.isEmpty) { println(Environment) println(Defaults) @@ -156,7 +156,6 @@ object PathResolver { println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") } } - } } import PathResolver.{Defaults, ppcp} @@ -203,10 +202,9 @@ class PathResolver(implicit ctx: Context) { */ def sourcePath: String = cmdLineOrElse("sourcepath", Defaults.scalaSourcePath) - def userClassPath: String = { + def userClassPath: String = if (!settings.classpath.isDefault) settings.classpath.value else sys.env.getOrElse("CLASSPATH", ".") - } import classPathFactory._ @@ -265,5 +263,5 @@ class PathResolver(implicit ctx: Context) { } def asURLs: Seq[java.net.URL] = result.asURLs - } + diff --git a/compiler/src/dotty/tools/dotc/config/Printers.scala b/compiler/src/dotty/tools/dotc/config/Printers.scala index 155f6984a7e9..6a0549501f3d 100644 --- a/compiler/src/dotty/tools/dotc/config/Printers.scala +++ b/compiler/src/dotty/tools/dotc/config/Printers.scala @@ -27,6 +27,7 @@ object Printers { val hk: Printer = noPrinter val implicits: Printer = noPrinter val implicitsDetailed: Printer = noPrinter + val lexical: Printer = noPrinter val inlining: Printer = noPrinter val interactiv: Printer = noPrinter val overload: Printer = noPrinter diff --git a/compiler/src/dotty/tools/dotc/config/Properties.scala b/compiler/src/dotty/tools/dotc/config/Properties.scala index c025685d1801..08384222bd0f 100644 --- a/compiler/src/dotty/tools/dotc/config/Properties.scala +++ b/compiler/src/dotty/tools/dotc/config/Properties.scala @@ -36,10 +36,9 @@ trait PropertiesTrait { private def quietlyDispose(action: => Unit, disposal: => Unit) = try { action } - finally { + finally try { disposal } catch { case _: IOException => } - } def propIsSet(name: String): Boolean = System.getProperty(name) != null def propIsSetTo(name: String, value: String): Boolean = propOrNull(name) == value @@ -70,9 +69,9 @@ trait PropertiesTrait { val versionString: String = { val v = scalaPropOrElse("version.number", "(unknown)") "version " + scalaPropOrElse("version.number", "(unknown)") + { - if (v.contains("SNAPSHOT") || v.contains("NIGHTLY")) { + if (v.contains("SNAPSHOT") || v.contains("NIGHTLY")) "-git-" + scalaPropOrElse("git.hash", "(unknown)") - } else "" + else "" } } diff --git a/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala b/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala index 3ec8049aeb5b..883ce31ca741 100644 --- a/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala +++ b/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala @@ -14,5 +14,5 @@ class SJSPlatform()(implicit ctx: Context) extends JavaPlatform { /** Is the SAMType `cls` also a SAM under the rules of the Scala.js back-end? */ override def isSam(cls: ClassSymbol)(implicit ctx: Context): Boolean = defn.isFunctionClass(cls) || jsDefinitions.isJSFunctionClass(cls) - } + diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 80a6c19b41ab..fc50873a4e68 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -50,6 +50,8 @@ class ScalaSettings extends Settings.SettingGroup { val newSyntax: Setting[Boolean] = BooleanSetting("-new-syntax", "Require `then` and `do` in control expressions") val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions") + val indent: Setting[Boolean] = BooleanSetting("-indent", "allow significant indentation") + val noindent: Setting[Boolean] = BooleanSetting("-noindent", "require classical {...} syntax, indentation is not significant") /** Decompiler settings */ val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.") withAbbreviation "--print-tasty" diff --git a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala index 8ba3dce8306a..61fb630a8edb 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala @@ -102,7 +102,8 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu case R(_, majorS, _, minorS, _, revS, _, buildS) => Success(SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))) case _ => failure - } catch { + } + catch { case e: NumberFormatException => failure } } @@ -179,7 +180,7 @@ object ScalaBuild { case Milestone(thatN) => n - thatN // a milestone is older than anything other than another milestone case _ => -1 - } } } + diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 84407b55d3ee..765b092f953f 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -116,14 +116,13 @@ object Settings { def tryToSet(state: ArgsSummary): ArgsSummary = { val ArgsSummary(sstate, arg :: args, errors, warnings) = state - def update(value: Any, args: List[String]) = { - if (changed) { + def update(value: Any, args: List[String]) = + if (changed) ArgsSummary(updateIn(sstate, value), args, errors, warnings :+ s"Flag $name set repeatedly") - } else { + else { changed = true ArgsSummary(updateIn(sstate, value), args, errors, warnings) } - } def fail(msg: String, args: List[String]) = ArgsSummary(sstate, args, errors :+ msg, warnings) def missingArg = @@ -161,7 +160,8 @@ object Settings { case _ => update(x, args2) } - } catch { + } + catch { case _: NumberFormatException => fail(s"$arg2 is not an integer argument for $name", args2) } diff --git a/compiler/src/dotty/tools/dotc/consumetasty/TastyConsumerPhase.scala b/compiler/src/dotty/tools/dotc/consumetasty/TastyConsumerPhase.scala index ac5cd6038281..938043ffd7ab 100644 --- a/compiler/src/dotty/tools/dotc/consumetasty/TastyConsumerPhase.scala +++ b/compiler/src/dotty/tools/dotc/consumetasty/TastyConsumerPhase.scala @@ -14,5 +14,5 @@ class TastyConsumerPhase(consumer: TastyConsumer) extends Phase { val reflect = ReflectionImpl(ctx) consumer(reflect)(ctx.compilationUnit.tpdTree.asInstanceOf[reflect.Tree]) } - } + diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index a8925b3ca257..493e6a6da487 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -207,18 +207,24 @@ object Annotations { sym.hasAnnotation(defn.DeprecatedAnnot) def deprecationMessage(implicit ctx: Context): Option[String] = - for (annot <- sym.getAnnotation(defn.DeprecatedAnnot); - arg <- annot.argumentConstant(0)) + for { + annot <- sym.getAnnotation(defn.DeprecatedAnnot) + arg <- annot.argumentConstant(0) + } yield arg.stringValue def migrationVersion(implicit ctx: Context): Option[Try[ScalaVersion]] = - for (annot <- sym.getAnnotation(defn.MigrationAnnot); - arg <- annot.argumentConstant(1)) + for { + annot <- sym.getAnnotation(defn.MigrationAnnot) + arg <- annot.argumentConstant(1) + } yield ScalaVersion.parse(arg.stringValue) def migrationMessage(implicit ctx: Context): Option[Try[ScalaVersion]] = - for (annot <- sym.getAnnotation(defn.MigrationAnnot); - arg <- annot.argumentConstant(0)) + for { + annot <- sym.getAnnotation(defn.MigrationAnnot) + arg <- annot.argumentConstant(0) + } yield ScalaVersion.parse(arg.stringValue) } } diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index 300009312e36..0195da7a4f6c 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -149,7 +149,8 @@ class CheckRealizable(implicit ctx: Context) { for { mbr <- tp.nonClassTypeMembers if !(mbr.info.loBound <:< mbr.info.hiBound) - } yield new HasProblemBounds(mbr.name, mbr.info) + } + yield new HasProblemBounds(mbr.name, mbr.info) val refinementProblems = for { @@ -157,7 +158,8 @@ class CheckRealizable(implicit ctx: Context) { if (name.isTypeName) mbr <- tp.member(name).alternatives if !(mbr.info.loBound <:< mbr.info.hiBound) - } yield new HasProblemBounds(name, mbr.info) + } + yield new HasProblemBounds(name, mbr.info) def baseTypeProblems(base: Type) = base match { case AndType(base1, base2) => diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala index 16f899eb939c..d14fcb34ab7f 100644 --- a/compiler/src/dotty/tools/dotc/core/Comments.scala +++ b/compiler/src/dotty/tools/dotc/core/Comments.scala @@ -78,13 +78,12 @@ object Comments { Comment(span, raw, None, Nil) private def parseUsecases(expandedComment: String, span: Span)(implicit ctx: Context): List[UseCase] = - if (!isDocComment(expandedComment)) { + if (!isDocComment(expandedComment)) Nil - } else { + else tagIndex(expandedComment) .filter { startsWithTag(expandedComment, _, "@usecase") } .map { case (start, end) => decomposeUseCase(expandedComment, span, start, end) } - } /** Turns a usecase section into a UseCase, with code changed to: * {{{ @@ -236,12 +235,11 @@ object Comments { if (end > tocopy) tocopy = end case None => srcSec match { - case Some((start1, end1)) => { + case Some((start1, end1)) => out append dst.substring(copied, tocopy).trim out append "\n" copied = tocopy out append src.substring(start1, end1).trim - } case None => } } @@ -357,7 +355,7 @@ object Comments { // excluding variables written as \$foo so we can use them when // necessary to document things like Symbol#decode def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\' - while (idx < str.length) { + while (idx < str.length) if ((str charAt idx) != '$' || isEscaped) idx += 1 else { @@ -373,8 +371,9 @@ object Comments { superComment(sym) foreach { sc => val superSections = tagIndex(sc) replaceWith(sc.substring(3, startTag(sc, superSections))) - for (sec @ (start, end) <- superSections) + for (sec @ (start, end) <- superSections) { if (!isMovable(sc, sec)) out append sc.substring(start, end) + } } case "" => idx += 1 case vname => @@ -383,9 +382,8 @@ object Comments { case None => dottydoc.println(s"Variable $vname undefined in comment for $sym in $site") } - } + } } - } if (out.length == 0) str else { out append str.substring(copied) @@ -448,11 +446,10 @@ object Comments { * an infinite loop has broken out between superComment and cookedDocComment * since r23926. */ - private def allInheritedOverriddenSymbols(sym: Symbol)(implicit ctx: Context): List[Symbol] = { + private def allInheritedOverriddenSymbols(sym: Symbol)(implicit ctx: Context): List[Symbol] = if (!sym.owner.isClass) Nil else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..` //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol) - } class ExpansionLimitExceeded(str: String) extends Exception } diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 1fe82638e01d..be88ea2b8b8a 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -199,12 +199,11 @@ trait ConstraintHandling[AbstractContext] { } - protected def isSubType(tp1: Type, tp2: Type, whenFrozen: Boolean)(implicit actx: AbstractContext): Boolean = { + protected def isSubType(tp1: Type, tp2: Type, whenFrozen: Boolean)(implicit actx: AbstractContext): Boolean = if (whenFrozen) isSubTypeWhenFrozen(tp1, tp2) else isSubType(tp1, tp2) - } @forceInline final def inFrozenConstraint[T](op: => T): T = { val savedFrozen = frozenConstraint @@ -527,7 +526,7 @@ trait ConstraintHandling[AbstractContext] { /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */ def checkPropagated(msg: => String)(result: Boolean)(implicit actx: AbstractContext): Boolean = { - if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) { + if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) inFrozenConstraint { for (p <- constraint.domainParams) { def check(cond: => Boolean, q: TypeParamRef, ordering: String, explanation: String): Unit = @@ -540,7 +539,6 @@ trait ConstraintHandling[AbstractContext] { } } } - } result } } diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala index e6caae72bdde..3322b385cc48 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala @@ -7,11 +7,12 @@ import config.Printers.{default, typr} trait ConstraintRunInfo { self: Run => private[this] var maxSize = 0 private[this] var maxConstraint: Constraint = _ - def recordConstraintSize(c: Constraint, size: Int): Unit = + def recordConstraintSize(c: Constraint, size: Int): Unit = { if (size > maxSize) { maxSize = size maxConstraint = c } + } def printMaxConstraint()(implicit ctx: Context): Unit = { val printer = if (ctx.settings.YdetailedStats.value) default else typr if (maxSize > 0) printer.println(s"max constraint = ${maxConstraint.show}") diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 1cfb94b10c83..44336a50d358 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -210,7 +210,7 @@ object Contexts { /** The new implicit references that are introduced by this scope */ protected var implicitsCache: ContextualImplicits = null def implicits: ContextualImplicits = { - if (implicitsCache == null ) + if (implicitsCache == null) implicitsCache = { val implicitRefs: List[ImplicitRef] = if (isClassDefContext) @@ -643,10 +643,9 @@ object Contexts { /** The platform */ def platform: Platform = { - if (_platform == null) { + if (_platform == null) throw new IllegalStateException( "initialize() must be called before accessing platform") - } _platform } @@ -671,9 +670,8 @@ object Contexts { definitions.init() } - def squashed(p: Phase): Phase = { + def squashed(p: Phase): Phase = allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase) - } } /** The essential mutable state of a context base, collected into a common class */ diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index fda2534a2479..770d81e3caf5 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -55,10 +55,10 @@ object Decorators { final def mapconserve[U](f: T => U): List[U] = { @tailrec def loop(mapped: ListBuffer[U], unchanged: List[U], pending: List[T]): List[U] = - if (pending.isEmpty) { + if (pending.isEmpty) if (mapped eq null) unchanged else mapped.prependToList(unchanged) - } else { + else { val head0 = pending.head val head1 = f(head0) @@ -93,7 +93,8 @@ object Decorators { if (ys1 eq xs1) xs else x :: ys1 else ys1 - } else xs filter p + } + else xs filter p } loop(xs, 0) } diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 811ae808e668..f1982974ea36 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -160,9 +160,8 @@ class Definitions { val info = if (useCompleter) new LazyType { - def complete(denot: SymDenotation)(implicit ctx: Context): Unit = { + def complete(denot: SymDenotation)(implicit ctx: Context): Unit = denot.info = ptype - } } else ptype enterMethod(cls, name, info, flags) @@ -311,10 +310,9 @@ class Definitions { @tu lazy val AnyKindClass: ClassSymbol = { val cls = ctx.newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil) - if (!ctx.settings.YnoKindPolymorphism.value) { + if (!ctx.settings.YnoKindPolymorphism.value) // Enable kind-polymorphism by exposing scala.AnyKind cls.entered - } cls } def AnyKindType: TypeRef = AnyKindClass.typeRef @@ -801,13 +799,12 @@ class Definitions { object PartialFunctionOf { def apply(arg: Type, result: Type)(implicit ctx: Context): Type = PartialFunctionClass.typeRef.appliedTo(arg :: result :: Nil) - def unapply(pft: Type)(implicit ctx: Context): Option[(Type, List[Type])] = { + def unapply(pft: Type)(implicit ctx: Context): Option[(Type, List[Type])] = if (pft.isRef(PartialFunctionClass)) { val targs = pft.dealias.argInfos if (targs.length == 2) Some((targs.head, targs.tail)) else None } else None - } } object ArrayOf { @@ -903,7 +900,7 @@ class Definitions { else ctx.requiredClass("scala.Function" + n.toString) - @tu lazy val Function0_apply: Symbol = ImplementedFunctionType(0).symbol.requiredMethod(nme.apply) + @tu lazy val Function0_apply: Symbol = ImplementedFunctionType(0).symbol.requiredMethod(nme.apply) def FunctionType(n: Int, isContextual: Boolean = false, isErased: Boolean = false)(implicit ctx: Context): TypeRef = if (n <= MaxImplementedFunctionArity && (!isContextual || ctx.erasedTypes) && !isErased) ImplementedFunctionType(n) diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index f8f1d1f84a34..2ee53398004a 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -130,7 +130,8 @@ object Denotations { validAsSeenFrom = if (pre.isProvisional) Nowhere else ctx.period } cachedAsSeenFrom - } else computeAsSeenFrom(pre) + } + else computeAsSeenFrom(pre) protected def computeAsSeenFrom(pre: Type)(implicit ctx: Context): AsSeenFromResult @@ -346,12 +347,11 @@ object Denotations { */ def matchingDenotation(site: Type, targetType: Type)(implicit ctx: Context): SingleDenotation = { def qualifies(sym: Symbol) = site.memberInfo(sym).matchesLoosely(targetType) - if (isOverloaded) { + if (isOverloaded) atSignature(targetType.signature, site, relaxed = true) match { case sd: SingleDenotation => sd.matchingDenotation(site, targetType) case md => md.suchThat(qualifies(_)) } - } else if (exists && !qualifies(symbol)) NoDenotation else asSingleDenotation } @@ -577,144 +577,143 @@ object Denotations { // ------ Info meets and joins --------------------------------------------- - /** Handle merge conflict by throwing a `MergeError` exception */ - private def mergeConflict(sym1: Symbol, sym2: Symbol, tp1: Type, tp2: Type)(implicit ctx: Context): Type = - throw new MergeError(sym1, sym2, tp1, tp2, NoPrefix) - - /** Merge parameter names of lambda types. If names in corresponding positions match, keep them, - * otherwise generate new synthetic names. - */ - private def mergeParamNames(tp1: LambdaType, tp2: LambdaType): List[tp1.ThisName] = - (for ((name1, name2, idx) <- tp1.paramNames.lazyZip(tp2.paramNames).lazyZip(tp1.paramNames.indices)) - yield if (name1 == name2) name1 else tp1.companion.syntheticParamName(idx)).toList - - /** Normally, `tp1 & tp2`. - * Special cases for matching methods and classes, with - * the possibility of raising a merge error. - * Special handling of ExprTypes, where mixed intersections widen the ExprType away. - */ - def infoMeet(tp1: Type, tp2: Type, sym1: Symbol, sym2: Symbol, safeIntersection: Boolean)(implicit ctx: Context): Type = { - if (tp1 eq tp2) tp1 - else tp1 match { - case tp1: TypeBounds => - tp2 match { - case tp2: TypeBounds => if (safeIntersection) tp1 safe_& tp2 else tp1 & tp2 - case tp2: ClassInfo if tp1 contains tp2 => tp2 - case _ => mergeConflict(sym1, sym2, tp1, tp2) - } - case tp1: ClassInfo => - tp2 match { - case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix & tp2.prefix) - case tp2: TypeBounds if tp2 contains tp1 => tp1 - case _ => mergeConflict(sym1, sym2, tp1, tp2) - } - - // Two remedial strategies: - // - // 1. Prefer method types over poly types. This is necessary to handle - // overloaded definitions like the following - // - // def ++ [B >: A](xs: C[B]): D[B] - // def ++ (xs: C[A]): D[A] - // - // (Code like this is found in the collection strawman) - // - // 2. In the case of two method types or two polytypes with matching - // parameters and implicit status, merge corresponding parameter - // and result types. - case tp1: MethodType => - tp2 match { - case tp2: PolyType => - tp1 - case tp2: MethodType - if ctx.typeComparer.matchingMethodParams(tp1, tp2) && (tp1.companion eq tp2.companion) => - tp1.derivedLambdaType( - mergeParamNames(tp1, tp2), - tp1.paramInfos, - infoMeet(tp1.resultType, tp2.resultType.subst(tp2, tp1), sym1, sym2, safeIntersection)) - case _ => - mergeConflict(sym1, sym2, tp1, tp2) - } - case tp1: PolyType => - tp2 match { - case tp2: MethodType => - tp2 - case tp2: PolyType if ctx.typeComparer.matchingPolyParams(tp1, tp2) => - tp1.derivedLambdaType( - mergeParamNames(tp1, tp2), - tp1.paramInfos.zipWithConserve(tp2.paramInfos) { (p1, p2) => - infoMeet(p1, p2.subst(tp2, tp1), sym1, sym2, safeIntersection).bounds - }, - infoMeet(tp1.resultType, tp2.resultType.subst(tp2, tp1), sym1, sym2, safeIntersection)) - case _ => - mergeConflict(sym1, sym2, tp1, tp2) - } - case ExprType(rtp1) => - tp2 match { - case ExprType(rtp2) => ExprType(rtp1 & rtp2) - case _ => rtp1 & tp2 - } - case _ => - try tp1 & tp2.widenExpr - catch { - case ex: Throwable => - println(i"error for meet: $tp1 &&& $tp2, ${tp1.getClass}, ${tp2.getClass}") - throw ex - } - } - } - - /** Normally, `tp1 | tp2`. - * Special cases for matching methods and classes, with - * the possibility of raising a merge error. - * Special handling of ExprTypes, where mixed unions widen the ExprType away. - */ - def infoJoin(tp1: Type, tp2: Type, sym1: Symbol, sym2: Symbol)(implicit ctx: Context): Type = tp1 match { + /** Handle merge conflict by throwing a `MergeError` exception */ + private def mergeConflict(sym1: Symbol, sym2: Symbol, tp1: Type, tp2: Type)(implicit ctx: Context): Type = + throw new MergeError(sym1, sym2, tp1, tp2, NoPrefix) + + /** Merge parameter names of lambda types. If names in corresponding positions match, keep them, + * otherwise generate new synthetic names. + */ + private def mergeParamNames(tp1: LambdaType, tp2: LambdaType): List[tp1.ThisName] = + (for ((name1, name2, idx) <- tp1.paramNames.lazyZip(tp2.paramNames).lazyZip(tp1.paramNames.indices)) + yield if (name1 == name2) name1 else tp1.companion.syntheticParamName(idx)).toList + + /** Normally, `tp1 & tp2`. + * Special cases for matching methods and classes, with + * the possibility of raising a merge error. + * Special handling of ExprTypes, where mixed intersections widen the ExprType away. + */ + def infoMeet(tp1: Type, tp2: Type, sym1: Symbol, sym2: Symbol, safeIntersection: Boolean)(implicit ctx: Context): Type = + if (tp1 eq tp2) tp1 + else tp1 match { case tp1: TypeBounds => tp2 match { - case tp2: TypeBounds => tp1 | tp2 - case tp2: ClassInfo if tp1 contains tp2 => tp1 + case tp2: TypeBounds => if (safeIntersection) tp1 safe_& tp2 else tp1 & tp2 + case tp2: ClassInfo if tp1 contains tp2 => tp2 case _ => mergeConflict(sym1, sym2, tp1, tp2) } case tp1: ClassInfo => tp2 match { - case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix | tp2.prefix) - case tp2: TypeBounds if tp2 contains tp1 => tp2 + case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix & tp2.prefix) + case tp2: TypeBounds if tp2 contains tp1 => tp1 case _ => mergeConflict(sym1, sym2, tp1, tp2) } + + // Two remedial strategies: + // + // 1. Prefer method types over poly types. This is necessary to handle + // overloaded definitions like the following + // + // def ++ [B >: A](xs: C[B]): D[B] + // def ++ (xs: C[A]): D[A] + // + // (Code like this is found in the collection strawman) + // + // 2. In the case of two method types or two polytypes with matching + // parameters and implicit status, merge corresponding parameter + // and result types. case tp1: MethodType => tp2 match { + case tp2: PolyType => + tp1 case tp2: MethodType if ctx.typeComparer.matchingMethodParams(tp1, tp2) && (tp1.companion eq tp2.companion) => tp1.derivedLambdaType( mergeParamNames(tp1, tp2), tp1.paramInfos, - infoJoin(tp1.resultType, tp2.resultType.subst(tp2, tp1), sym1, sym2)) + infoMeet(tp1.resultType, tp2.resultType.subst(tp2, tp1), sym1, sym2, safeIntersection)) case _ => mergeConflict(sym1, sym2, tp1, tp2) } case tp1: PolyType => tp2 match { - case tp2: PolyType - if ctx.typeComparer.matchingPolyParams(tp1, tp2) => + case tp2: MethodType => + tp2 + case tp2: PolyType if ctx.typeComparer.matchingPolyParams(tp1, tp2) => tp1.derivedLambdaType( mergeParamNames(tp1, tp2), tp1.paramInfos.zipWithConserve(tp2.paramInfos) { (p1, p2) => - infoJoin(p1, p2.subst(tp2, tp1), sym1, sym2).bounds + infoMeet(p1, p2.subst(tp2, tp1), sym1, sym2, safeIntersection).bounds }, - infoJoin(tp1.resultType, tp2.resultType.subst(tp2, tp1), sym1, sym2)) + infoMeet(tp1.resultType, tp2.resultType.subst(tp2, tp1), sym1, sym2, safeIntersection)) case _ => mergeConflict(sym1, sym2, tp1, tp2) } case ExprType(rtp1) => tp2 match { - case ExprType(rtp2) => ExprType(rtp1 | rtp2) - case _ => rtp1 | tp2 + case ExprType(rtp2) => ExprType(rtp1 & rtp2) + case _ => rtp1 & tp2 } case _ => - tp1 | tp2.widenExpr + try tp1 & tp2.widenExpr + catch { + case ex: Throwable => + println(i"error for meet: $tp1 &&& $tp2, ${tp1.getClass}, ${tp2.getClass}") + throw ex + } } + /** Normally, `tp1 | tp2`. + * Special cases for matching methods and classes, with + * the possibility of raising a merge error. + * Special handling of ExprTypes, where mixed unions widen the ExprType away. + */ + def infoJoin(tp1: Type, tp2: Type, sym1: Symbol, sym2: Symbol)(implicit ctx: Context): Type = tp1 match { + case tp1: TypeBounds => + tp2 match { + case tp2: TypeBounds => tp1 | tp2 + case tp2: ClassInfo if tp1 contains tp2 => tp1 + case _ => mergeConflict(sym1, sym2, tp1, tp2) + } + case tp1: ClassInfo => + tp2 match { + case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix | tp2.prefix) + case tp2: TypeBounds if tp2 contains tp1 => tp2 + case _ => mergeConflict(sym1, sym2, tp1, tp2) + } + case tp1: MethodType => + tp2 match { + case tp2: MethodType + if ctx.typeComparer.matchingMethodParams(tp1, tp2) && (tp1.companion eq tp2.companion) => + tp1.derivedLambdaType( + mergeParamNames(tp1, tp2), + tp1.paramInfos, + infoJoin(tp1.resultType, tp2.resultType.subst(tp2, tp1), sym1, sym2)) + case _ => + mergeConflict(sym1, sym2, tp1, tp2) + } + case tp1: PolyType => + tp2 match { + case tp2: PolyType + if ctx.typeComparer.matchingPolyParams(tp1, tp2) => + tp1.derivedLambdaType( + mergeParamNames(tp1, tp2), + tp1.paramInfos.zipWithConserve(tp2.paramInfos) { (p1, p2) => + infoJoin(p1, p2.subst(tp2, tp1), sym1, sym2).bounds + }, + infoJoin(tp1.resultType, tp2.resultType.subst(tp2, tp1), sym1, sym2)) + case _ => + mergeConflict(sym1, sym2, tp1, tp2) + } + case ExprType(rtp1) => + tp2 match { + case ExprType(rtp2) => ExprType(rtp1 | rtp2) + case _ => rtp1 | tp2 + } + case _ => + tp1 | tp2.widenExpr + } + /** A non-overloaded denotation */ abstract class SingleDenotation(symbol: Symbol, initInfo: Type) extends Denotation(symbol, initInfo) { protected def newLikeThis(symbol: Symbol, info: Type): SingleDenotation @@ -802,11 +801,12 @@ object Denotations { def history: List[SingleDenotation] = { val b = new ListBuffer[SingleDenotation] var current = initial - while { + while ({ b += (current) current = current.nextInRun current ne initial - } do () + }) + () b.toList } @@ -820,12 +820,13 @@ object Denotations { symbol.is(Permanent), // Permanent symbols are valid in all runIds s"denotation $this invalid in run ${ctx.runId}. ValidFor: $validFor") var d: SingleDenotation = this - while { + while ({ d.validFor = Period(ctx.period.runId, d.validFor.firstPhaseId, d.validFor.lastPhaseId) d.invalidateInheritedInfo() d = d.nextInRun d ne this - } do () + }) + () this } @@ -912,7 +913,8 @@ object Denotations { // in this case, next.validFor contains currentPeriod cur = next cur - } else { + } + else { //println(s"might need new denot for $cur, valid for ${cur.validFor} at $currentPeriod") // not found, cur points to highest existing variant val nextTransformerId = ctx.base.nextDenotTransformerId(cur.validFor.lastPhaseId) @@ -922,9 +924,9 @@ object Denotations { var startPid = nextTransformerId + 1 val transformer = ctx.base.denotTransformers(nextTransformerId) //println(s"transforming $this with $transformer") - try { + try next = transformer.transform(cur)(ctx.withPhase(transformer)) - } catch { + catch { case ex: CyclicReference => println(s"error while transforming $this") // DEBUG throw ex @@ -946,7 +948,8 @@ object Denotations { } cur.current // multiple transformations could be required } - } else { + } + else { // currentPeriod < end of valid; in this case a version must exist // but to be defensive we check for infinite loop anyway var cnt = 0 @@ -988,8 +991,8 @@ object Denotations { current.validFor = Period(ctx.runId, current.validFor.firstPhaseId, targetId - 1) insertAfter(current) } - // printPeriods(this) } + // printPeriods(this) } /** Apply a transformation `f` to all denotations in this group that start at or after @@ -1054,13 +1057,14 @@ object Denotations { var cur = this var cnt = 0 var interval = validFor - while { + while ({ cur = cur.nextInRun cnt += 1 assert(cnt <= MaxPossiblePhaseId, demandOutsideDefinedMsg) interval |= cur.validFor cur ne this - } do () + }) + () interval } @@ -1077,13 +1081,14 @@ object Denotations { var sb = new StringBuilder() var cur = this var cnt = 0 - while { + while ({ sb.append(" " + cur.validFor) cur = cur.nextInRun cnt += 1 if (cnt > MaxPossiblePhaseId) { sb.append(" ..."); cur = this } cur ne this - } do () + }) + () sb.toString } @@ -1219,7 +1224,7 @@ object Denotations { |they are both defined in ${sym1.effectiveOwner} but have matching signatures | ${denot1.info} and | ${denot2.info}${super.addendum}""" - } + } else throw new MergeError(sym1, sym2, denot1.info, denot2.info, pre) } @@ -1323,13 +1328,12 @@ object Denotations { def staticRef(path: Name, generateStubs: Boolean = true, isPackage: Boolean = false)(implicit ctx: Context): Denotation = { def select(prefix: Denotation, selector: Name): Denotation = { val owner = prefix.disambiguate(_.info.isParameterless) - def isPackageFromCoreLibMissing: Boolean = { + def isPackageFromCoreLibMissing: Boolean = owner.symbol == defn.RootClass && ( selector == nme.scala_ || // if the scala package is missing, the stdlib must be missing selector == nme.scalaShadowing // if the scalaShadowing package is missing, the dotty library must be missing ) - } if (owner.exists) { val result = if (isPackage) owner.info.decl(selector) else owner.info.member(selector) if (result.exists) result diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index c2be7fa11c72..a4aa72347ac2 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -75,10 +75,10 @@ object Flags { (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 } - /** Does the given flag set have a non-empty intersection with another flag set, - * and at the same time contain none of the flags in the `butNot` set? - */ - def (x: FlagSet) isOneOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isOneOf(flags) && !x.isOneOf(butNot) + /** Does the given flag set have a non-empty intersection with another flag set, + * and at the same time contain none of the flags in the `butNot` set? + */ + def (x: FlagSet) isOneOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isOneOf(flags) && !x.isOneOf(butNot) /** Does a given flag set have all of the flags of another flag set? * Pre: The intersection of the term/type flags of both sets must be non-empty. diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index 962773aea0b0..90a971969b9c 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -194,7 +194,7 @@ final class ProperGadtConstraint private( .ensuring(containsNoInternalTypes(_)) } - override def bounds(sym: Symbol)(implicit ctx: Context): TypeBounds = { + override def bounds(sym: Symbol)(implicit ctx: Context): TypeBounds = mapping(sym) match { case null => null case tv => @@ -208,7 +208,6 @@ final class ProperGadtConstraint private( //.reporting(i"gadt bounds $sym: $result", gadts) //.ensuring(containsNoInternalTypes(_)) } - } override def contains(sym: Symbol)(implicit ctx: Context): Boolean = mapping(sym) ne null diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala index 9d2742e4e124..7594b3a749dc 100644 --- a/compiler/src/dotty/tools/dotc/core/NameOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala @@ -187,17 +187,15 @@ object NameOps { /** Is an implicit function name, i.e one of ImplicitFunctionN for N >= 0 or ErasedImplicitFunctionN for N > 0 */ - def isImplicitFunction: Boolean = { + def isImplicitFunction: Boolean = functionArityFor(str.ImplicitFunction) >= 0 || functionArityFor(str.ErasedImplicitFunction) > 0 - } /** Is an erased function name, i.e. one of ErasedFunctionN, ErasedImplicitFunctionN for N > 0 */ - def isErasedFunction: Boolean = { + def isErasedFunction: Boolean = functionArityFor(str.ErasedFunction) > 0 || functionArityFor(str.ErasedImplicitFunction) > 0 - } /** Is a synthetic function name, i.e. one of * - FunctionN for N > 22 @@ -205,22 +203,21 @@ object NameOps { * - ErasedFunctionN for N > 0 * - ErasedImplicitFunctionN for N > 0 */ - def isSyntheticFunction: Boolean = { + def isSyntheticFunction: Boolean = functionArityFor(str.Function) > MaxImplementedFunctionArity || functionArityFor(str.ImplicitFunction) >= 0 || isErasedFunction - } /** Parsed function arity for function with some specific prefix */ - private def functionArityFor(prefix: String): Int = { + private def functionArityFor(prefix: String): Int = if (name.startsWith(prefix)) { val suffix = name.toString.substring(prefix.length) if (suffix.matches("\\d+")) suffix.toInt else -1 - } else -1 - } + } + else -1 /** The name of the generic runtime operation corresponding to an array operation */ def genericArrayOp: TermName = name match { @@ -287,13 +284,12 @@ object NameOps { else n) def fieldName: TermName = - if (name.isSetterName) { + if (name.isSetterName) if (name.is(TraitSetterName)) { val TraitSetterName(_, original) = name original.fieldName } else getterName.fieldName - } else FieldName(name) def stripScala2LocalSuffix: TermName = diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index ff7173657089..481a6838d2ac 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -573,12 +573,13 @@ object Names { val h = hashValue(cs, offset, len) & (table.length - 1) /** Make sure the capacity of the character array is at least `n` */ - def ensureCapacity(n: Int) = + def ensureCapacity(n: Int) = { if (n > chrs.length) { val newchrs = new Array[Char](chrs.length * 2) chrs.copyToArray(newchrs) chrs = newchrs } + } /** Enter characters into chrs array. */ def enterChars(): Unit = { @@ -592,7 +593,7 @@ object Names { } /** Rehash chain of names */ - def rehash(name: SimpleName): Unit = + def rehash(name: SimpleName): Unit = { if (name != null) { val oldNext = name.next val h = hashValue(chrs, name.start, name.length) & (table.size - 1) @@ -600,6 +601,7 @@ object Names { table(h) = name rehash(oldNext) } + } /** Make sure the hash table is large enough for the given load factor */ def incTableSize() = { @@ -681,12 +683,11 @@ object Names { val until = x.length min y.length var i = 0 while (i < until && x(i) == y(i)) i = i + 1 - if (i < until) { + if (i < until) if (x(i) < y(i)) -1 else /*(x(i) > y(i))*/ 1 - } else { + else x.length - y.length - } } private def compareTermNames(x: TermName, y: TermName): Int = x match { case x: SimpleName => @@ -702,11 +703,10 @@ object Names { case _ => 1 } } - def compare(x: Name, y: Name): Int = { + def compare(x: Name, y: Name): Int = if (x.isTermName && y.isTypeName) 1 else if (x.isTypeName && y.isTermName) -1 else if (x eq y) 0 else compareTermNames(x.toTermName, y.toTermName) - } } } diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index a47b13e61113..b4b4a539a8de 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -469,7 +469,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, (poly, entries) <- boundsMap.toList n <- 0 until paramCount(entries) if entries(n).exists - } yield poly.paramRefs(n) + } + yield poly.paramRefs(n) def forallParams(p: TypeParamRef => Boolean): Boolean = boundsMap.forallBinding { (poly, entries) => @@ -587,9 +588,10 @@ class OrderingConstraint(private val boundsMap: ParamBounds, case TypeParamRef(binder: TypeLambda, _) => !contains(binder) case _ => false } - def checkClosedType(tp: Type, where: String) = + def checkClosedType(tp: Type, where: String) = { if (tp != null) assert(!tp.existsPart(isFreeTypeParamRef), i"unclosed constraint: $this refers to $tp in $where") + } boundsMap.foreachBinding((_, tps) => tps.foreach(checkClosedType(_, "bounds"))) lowerMap.foreachBinding((_, paramss) => paramss.foreach(_.foreach(checkClosedType(_, "lower")))) upperMap.foreachBinding((_, paramss) => paramss.foreach(_.foreach(checkClosedType(_, "upper")))) @@ -602,12 +604,11 @@ class OrderingConstraint(private val boundsMap: ParamBounds, if (myUninstVars == null || myUninstVars.exists(_.inst.exists)) { myUninstVars = new mutable.ArrayBuffer[TypeVar] boundsMap.foreachBinding { (poly, entries) => - for (i <- 0 until paramCount(entries)) { + for (i <- 0 until paramCount(entries)) typeVar(entries, i) match { case tv: TypeVar if !tv.inst.exists && isBounds(entries(i)) => myUninstVars += tv case _ => } - } } } myUninstVars diff --git a/compiler/src/dotty/tools/dotc/core/Periods.scala b/compiler/src/dotty/tools/dotc/core/Periods.scala index 73298d80287f..2d7fc358a0ff 100644 --- a/compiler/src/dotty/tools/dotc/core/Periods.scala +++ b/compiler/src/dotty/tools/dotc/core/Periods.scala @@ -32,9 +32,8 @@ abstract class Periods { self: Context => def stablePeriod: Period = { var first = phaseId val nxTrans = ctx.base.nextDenotTransformerId(first) - while (first - 1 > NoPhaseId && (ctx.base.nextDenotTransformerId(first - 1) == nxTrans)) { + while (first - 1 > NoPhaseId && (ctx.base.nextDenotTransformerId(first - 1) == nxTrans)) first -= 1 - } Period(runId, first, nxTrans) } @@ -131,19 +130,16 @@ object Periods { object Period { /** The single-phase period consisting of given run id and phase id */ - def apply(rid: RunId, pid: PhaseId): Period = { + def apply(rid: RunId, pid: PhaseId): Period = new Period(((rid << PhaseWidth) | pid) << PhaseWidth) - } /** The period consisting of given run id, and lo/hi phase ids */ - def apply(rid: RunId, loPid: PhaseId, hiPid: PhaseId): Period = { + def apply(rid: RunId, loPid: PhaseId, hiPid: PhaseId): Period = new Period(((rid << PhaseWidth) | hiPid) << PhaseWidth | (hiPid - loPid)) - } /** The interval consisting of all periods of given run id */ - def allInRun(rid: RunId): Period = { + def allInRun(rid: RunId): Period = apply(rid, 0, PhaseMask) - } } final val Nowhere: Period = new Period(0) diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index a9ebb3e65470..ed4d5cce3723 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -107,7 +107,7 @@ object Phases { val filteredPhaseBlock = filteredPhases(i) val phaseToAdd = if (filteredPhaseBlock.length > 1) { - for (phase <- filteredPhaseBlock) { + for (phase <- filteredPhaseBlock) phase match { case p: MiniPhase => val unmetRequirements = p.runsAfterGroupsOf &~ prevPhases @@ -117,11 +117,11 @@ object Phases { case _ => assert(false, s"Only tree transforms can be squashed, ${phase.phaseName} can not be squashed") } - } val superPhase = new MegaPhase(filteredPhaseBlock.asInstanceOf[List[MiniPhase]].toArray) prevPhases ++= filteredPhaseBlock.map(_.phaseName) superPhase - } else { // block of a single phase, no squashing + } + else { // block of a single phase, no squashing val phase = filteredPhaseBlock.head prevPhases += phase.phaseName phase @@ -169,8 +169,8 @@ object Phases { assert(unmetPrecedeRequirements.isEmpty, s"phase ${p} has unmet requirement: ${unmetPrecedeRequirements.mkString(", ")} should precede this phase") phasesAfter += p.phaseName - } + var i = 0 while (i < phasess.length) { @@ -206,11 +206,10 @@ object Phases { nextDenotTransformerId(i) = lastTransformerId } - if (squash) { + if (squash) this.squashedPhases = (NoPhase :: phasess).toArray - } else { + else this.squashedPhases = this.phases - } config.println(s"Phases = ${phases.toList}") config.println(s"nextDenotTransformerId = ${nextDenotTransformerId.toList}") diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala index f74d783dab34..5e89a969085a 100644 --- a/compiler/src/dotty/tools/dotc/core/Scopes.scala +++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala @@ -158,11 +158,12 @@ object Scopes { */ final def filteredScope(p: Symbol => Boolean)(implicit ctx: Context): Scope = { var result: MutableScope = null - for (sym <- iterator) + for (sym <- iterator) { if (!p(sym)) { if (result == null) result = cloneScope result.unlink(sym) } + } if (result == null) this else result } @@ -265,10 +266,9 @@ object Scopes { /** enter a symbol in this scope. */ final def enter[T <: Symbol](sym: T)(implicit ctx: Context): T = { - if (sym.isType && ctx.phaseId <= ctx.typerPhase.id) { + if (sym.isType && ctx.phaseId <= ctx.typerPhase.id) assert(lookup(sym.name) == NoSymbol, s"duplicate ${sym.debugString}; previous was ${lookup(sym.name).debugString}") // !!! DEBUG - } newScopeEntry(sym) sym } @@ -279,8 +279,9 @@ object Scopes { enter(sym) } - private def ensureCapacity(tableSize: Int)(implicit ctx: Context): Unit = + private def ensureCapacity(tableSize: Int)(implicit ctx: Context): Unit = { if (size >= tableSize * FillFactor) createHash(tableSize * 2) + } private def createHash(tableSize: Int)(implicit ctx: Context): Unit = if (size > tableSize * FillFactor) createHash(tableSize * 2) @@ -291,11 +292,12 @@ object Scopes { } private def enterAllInHash(e: ScopeEntry, n: Int = 0)(implicit ctx: Context): Unit = { - if (e ne null) { + if (e ne null) if (n < MaxRecursions) { enterAllInHash(e.prev, n + 1) enterInHash(e) - } else { + } + else { var entries: List[ScopeEntry] = List() var ee = e while (ee ne null) { @@ -304,14 +306,13 @@ object Scopes { } entries foreach enterInHash } - } } /** Remove entry from this scope (which is required to be present) */ final def unlink(e: ScopeEntry)(implicit ctx: Context): Unit = { - if (lastEntry == e) { + if (lastEntry == e) lastEntry = e.prev - } else { + else { var e1 = lastEntry while (e1.prev != e) e1 = e1.prev e1.prev = e.prev @@ -362,14 +363,13 @@ object Scopes { var e: ScopeEntry = null if (hashTable ne null) { e = hashTable(name.hashCode & (hashTable.length - 1)) - while ((e ne null) && e.name != name) { + while ((e ne null) && e.name != name) e = e.tail - } - } else { + } + else { e = lastEntry - while ((e ne null) && e.name != name) { + while ((e ne null) && e.name != name) e = e.prev - } } if ((e eq null) && (synthesize != null)) { val sym = synthesize(name)(ctx) diff --git a/compiler/src/dotty/tools/dotc/core/StagingContext.scala b/compiler/src/dotty/tools/dotc/core/StagingContext.scala index a6d0dd5dc47c..b3f85b1ae589 100644 --- a/compiler/src/dotty/tools/dotc/core/StagingContext.scala +++ b/compiler/src/dotty/tools/dotc/core/StagingContext.scala @@ -21,5 +21,5 @@ object StagingContext { /** Context with a decremented quotation level. */ def spliceContext(implicit ctx: Context): Context = ctx.fresh.setProperty(QuotationLevel, level - 1) - } + diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index a25996e7d9aa..03da138f04a5 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -421,6 +421,7 @@ object StdNames { val elem: N = "elem" val elems: N = "elems" val emptyValDef: N = "emptyValDef" + val end: N = "end" val ensureAccessible : N = "ensureAccessible" val eq: N = "eq" val eqInstance: N = "eqInstance" @@ -864,7 +865,7 @@ object StdNames { final val BeanProperty: N = "scala.beans.BeanProperty" final val BooleanBeanProperty: N = "scala.beans.BooleanBeanProperty" final val JavaSerializable: N = "java.io.Serializable" - } + } class JavaTermNames extends JavaNames[TermName] { protected def fromString(s: String): TermName = termName(s) diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala index f90cdf72baa4..bd70216ba390 100644 --- a/compiler/src/dotty/tools/dotc/core/Substituters.scala +++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala @@ -21,7 +21,7 @@ trait Substituters { this: Context => .mapOver(tp) } - final def subst1(tp: Type, from: Symbol, to: Type, theMap: Subst1Map): Type = { + final def subst1(tp: Type, from: Symbol, to: Type, theMap: Subst1Map): Type = tp match { case tp: NamedType => val sym = tp.symbol @@ -34,9 +34,8 @@ trait Substituters { this: Context => (if (theMap != null) theMap else new Subst1Map(from, to)) .mapOver(tp) } - } - final def subst2(tp: Type, from1: Symbol, to1: Type, from2: Symbol, to2: Type, theMap: Subst2Map): Type = { + final def subst2(tp: Type, from1: Symbol, to1: Type, from2: Symbol, to2: Type, theMap: Subst2Map): Type = tp match { case tp: NamedType => val sym = tp.symbol @@ -50,9 +49,8 @@ trait Substituters { this: Context => (if (theMap != null) theMap else new Subst2Map(from1, to1, from2, to2)) .mapOver(tp) } - } - final def subst(tp: Type, from: List[Symbol], to: List[Type], theMap: SubstMap): Type = { + final def subst(tp: Type, from: List[Symbol], to: List[Type], theMap: SubstMap): Type = tp match { case tp: NamedType => val sym = tp.symbol @@ -71,7 +69,6 @@ trait Substituters { this: Context => (if (theMap != null) theMap else new SubstMap(from, to)) .mapOver(tp) } - } final def substSym(tp: Type, from: List[Symbol], to: List[Symbol], theMap: SubstSymMap): Type = tp match { diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index d1d53494b1ff..7d24c90bc213 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -67,7 +67,8 @@ trait SymDenotations { this: Context => || (owner.unforcedDecls.lookupAll(denot.name) contains denot.symbol) || denot.isSelfSym || denot.isLocalDummy) - } catch { + } + catch { case ex: StaleSymbol => false } @@ -433,7 +434,7 @@ object SymDenotations { case tp: HKTypeLambda => tp.derivedLambdaType(resType = abstractRHS(tp.resType)) case _ => defn.AnyType } - if (isOpaqueAlias) { + if (isOpaqueAlias) info match { case TypeAlias(alias) => val (refiningAlias, bounds) = alias match { @@ -457,7 +458,6 @@ object SymDenotations { typeRef.recomputeDenot() case _ => } - } } // ------ Names ---------------------------------------------- @@ -486,7 +486,7 @@ object SymDenotations { * followed by the separator implied by `kind` and the given `name`. * Drops package objects. Represents each term in the owner chain by a simple `_$`. */ - def fullNameSeparated(prefixKind: QualifiedNameKind, kind: QualifiedNameKind, name: Name)(implicit ctx: Context): Name = + def fullNameSeparated(prefixKind: QualifiedNameKind, kind: QualifiedNameKind, name: Name)(implicit ctx: Context): Name = if (symbol == NoSymbol || isEffectiveRoot || kind == FlatName && is(PackageClass)) name else { @@ -830,8 +830,8 @@ object SymDenotations { i""" | Access to protected $this not permitted because enclosing ${ctx.owner.enclosingClass.showLocated} | is not a subclass of ${owner.showLocated} where target is defined""") - else if ( - !( isType // allow accesses to types from arbitrary subclasses fixes #4737 + else if + (!( isType // allow accesses to types from arbitrary subclasses fixes #4737 || pre.derivesFrom(cls) || isConstructor || owner.is(ModuleClass) // don't perform this check for static members @@ -1080,10 +1080,9 @@ object SymDenotations { * except for a toplevel module, where its module class is returned. */ final def topLevelClass(implicit ctx: Context): Symbol = { - @tailrec def topLevel(d: SymDenotation): Symbol = { + @tailrec def topLevel(d: SymDenotation): Symbol = if (d.isTopLevelClass) d.symbol else topLevel(d.owner) - } val sym = topLevel(this) if (sym.isClass) sym else sym.moduleClass @@ -1391,9 +1390,10 @@ object SymDenotations { case _ => false } - def assertNoSkolems(tp: Type): Unit = + def assertNoSkolems(tp: Type): Unit = { if (!this.isSkolem) assert(!hasSkolems(tp), s"assigning type $tp containing skolems to $this") + } // ----- copies and transforms ---------------------------------------- @@ -1407,8 +1407,8 @@ object SymDenotations { initFlags: FlagSet = UndefinedFlags, info: Type = null, privateWithin: Symbol = null, - annotations: List[Annotation] = null)(implicit ctx: Context): SymDenotation = - { // simulate default parameters, while also passing implicit context ctx to the default values + annotations: List[Annotation] = null)(implicit ctx: Context): SymDenotation = { + // simulate default parameters, while also passing implicit context ctx to the default values val initFlags1 = (if (initFlags != UndefinedFlags) initFlags else this.flags) val info1 = if (info != null) info else this.info if (ctx.isAfterTyper && changedClassParents(info, info1, completersMatter = false)) @@ -1709,9 +1709,8 @@ object SymDenotations { if (proceedWithEnter(sym, mscope)) { enterNoReplace(sym, mscope) val nxt = this.nextInRun - if (nxt.validFor.code > this.validFor.code) { + if (nxt.validFor.code > this.validFor.code) this.nextInRun.asSymDenotation.asClass.enter(sym) - } if (defn.isScalaShadowingPackageClass(sym.owner)) defn.ScalaPackageClass.enter(sym) // ScalaShadowing members are mirrored in ScalaPackage } @@ -1779,12 +1778,14 @@ object SymDenotations { if (denots == null) { denots = computeNPMembersNamed(name) memberCache.enter(name, denots) - } else if (Config.checkCacheMembersNamed) { + } + else if (Config.checkCacheMembersNamed) { val denots1 = computeNPMembersNamed(name) assert(denots.exists == denots1.exists, s"cache inconsistency: cached: $denots, computed $denots1, name = $name, owner = $this") } denots - } else computeNPMembersNamed(name) + } + else computeNPMembersNamed(name) } private[core] def computeNPMembersNamed(name: Name)(implicit ctx: Context): PreDenotation = { @@ -1895,8 +1896,8 @@ object SymDenotations { def computeApplied = { btrCache.put(tp, NoPrefix) val baseTp = - if (tycon.typeSymbol eq symbol) tp - else (tycon.typeParams: @unchecked) match { + if (tycon.typeSymbol eq symbol) tp + else (tycon.typeParams: @unchecked) match { case LambdaParam(_, _) :: _ => recur(tp.superType) case tparams: List[Symbol @unchecked] => @@ -1959,14 +1960,14 @@ object SymDenotations { } - /*>|>*/ trace.onDebug(s"$tp.baseType($this)") /*<|<*/ { + trace.onDebug(s"$tp.baseType($this)") { Stats.record("baseTypeOf") recur(tp) } } def memberNames(keepOnly: NameFilter)(implicit onBehalf: MemberNames, ctx: Context): Set[Name] = - if (this.is(PackageClass) || !Config.cacheMemberNames) + if (this.is(PackageClass) || !Config.cacheMemberNames) computeMemberNames(keepOnly) // don't cache package member names; they might change else { if (!memberNamesCache.isValid) memberNamesCache = MemberNames.newCache() @@ -2039,9 +2040,10 @@ object SymDenotations { private[this] var myCompanion: Symbol = NoSymbol /** Register companion class */ - override def registerCompanion(companion: Symbol)(implicit ctx: Context) = + override def registerCompanion(companion: Symbol)(implicit ctx: Context) = { if (companion.isClass && !isAbsent(canForce = false) && !companion.isAbsent(canForce = false)) myCompanion = companion + } override def registeredCompanion(implicit ctx: Context) = { ensureCompleted(); myCompanion } override def registeredCompanion_=(c: Symbol) = { myCompanion = c } @@ -2368,13 +2370,14 @@ object SymDenotations { * the cache itself. In that case we should cancel invalidation and * proceed as usual. However, all cache entries should be cleared. */ - def invalidate(): Unit = + def invalidate(): Unit = { if (cache != null) if (locked) cache = SimpleIdentityMap.Empty else { cache = null invalidateDependents() } + } def apply(keepOnly: NameFilter, clsd: ClassDenotation)(implicit onBehalf: MemberNames, ctx: Context) = { assert(isValid) @@ -2404,19 +2407,20 @@ object SymDenotations { final def isValid(implicit ctx: Context): Boolean = valid && isValidAt(ctx.phase) - def invalidate(): Unit = + def invalidate(): Unit = { if (valid && !locked) { cache = null valid = false invalidateDependents() } + } def signalProvisional() = provisional = true def apply(clsd: ClassDenotation)(implicit onBehalf: BaseData, ctx: Context) : (List[ClassSymbol], BaseClassSet) = { assert(isValid) - try { + try if (cache != null) cache else { if (locked) throw CyclicReference(clsd) @@ -2429,7 +2433,6 @@ object SymDenotations { else onBehalf.signalProvisional() computed } - } finally addDependent(onBehalf) } diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 04459cd1674d..3e79458ccb36 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -65,7 +65,7 @@ object SymbolLoaders { */ def enterPackage(owner: Symbol, pname: TermName, completer: (TermSymbol, ClassSymbol) => PackageLoader)(implicit ctx: Context): Symbol = { val preExisting = owner.info.decls lookup pname - if (preExisting != NoSymbol) { + if (preExisting != NoSymbol) // Some jars (often, obfuscated ones) include a package and // object with the same name. Rather than render them unusable, // offer a setting to resolve the conflict one way or the other. @@ -75,7 +75,8 @@ object SymbolLoaders { ctx.warning( s"Resolving package/object name conflict in favor of package ${preExisting.fullName}. The object will be inaccessible.") owner.asClass.delete(preExisting) - } else if (ctx.settings.YtermConflict.value == "object") { + } + else if (ctx.settings.YtermConflict.value == "object") { ctx.warning( s"Resolving package/object name conflict in favor of object ${preExisting.fullName}. The package will be inaccessible.") return NoSymbol @@ -84,7 +85,6 @@ object SymbolLoaders { throw new TypeError( i"""$owner contains object and package with same name: $pname |one of them needs to be removed from classpath""") - } ctx.newModuleSymbol(owner, pname, PackageCreationFlags, PackageCreationFlags, completer).entered } @@ -180,7 +180,7 @@ object SymbolLoaders { /** Initialize toplevel class and module symbols in `owner` from class path representation `classRep` */ - def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation)(implicit ctx: Context): Unit = { + def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation)(implicit ctx: Context): Unit = ((classRep.binary, classRep.source): @unchecked) match { case (Some(bin), Some(src)) if needCompile(bin, src) && !binaryOnly(owner, classRep.name) => if (ctx.settings.verbose.value) ctx.inform("[symloader] picked up newer source file for " + src.path) @@ -191,7 +191,6 @@ object SymbolLoaders { case (Some(bin), _) => enterClassAndModule(owner, classRep.name, ctx.platform.newClassLoader(bin)) } - } def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = src.lastModified >= bin.lastModified @@ -261,14 +260,16 @@ object SymbolLoaders { if (!root.isRoot) { val classReps = classPath.list(packageName).classesAndSources - for (classRep <- classReps) + for (classRep <- classReps) { if (!maybeModuleClass(classRep) && hasFlatName(classRep) == flat && (!flat || isAbsent(classRep))) // on 2nd enter of flat names, check that the name has not been entered before initializeFromClassPath(root.symbol, classRep) - for (classRep <- classReps) + } + for (classRep <- classReps) { if (maybeModuleClass(classRep) && hasFlatName(classRep) == flat && isAbsent(classRep)) initializeFromClassPath(root.symbol, classRep) + } } } @@ -339,7 +340,8 @@ abstract class SymbolLoader extends LazyType { self => else doComplete(root) ctx.informTime("loaded " + description, start) - } catch { + } + catch { case ex: IOException => signalError(ex) case NonFatal(ex: TypeError) => @@ -348,11 +350,13 @@ abstract class SymbolLoader extends LazyType { self => case NonFatal(ex) => println(s"exception caught when loading $root: $ex") throw ex - } finally { - def postProcess(denot: SymDenotation) = + } + finally { + def postProcess(denot: SymDenotation) = { if (!denot.isCompleted && !denot.completer.isInstanceOf[SymbolLoaders.SecondCompleter]) denot.markAbsent() + } postProcess(root) if (!root.isRoot) postProcess(root.scalacLinkedClass.denot) @@ -396,14 +400,13 @@ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader { val (classRoot, moduleRoot) = rootDenots(root.asClass) val classfileParser = new ClassfileParser(classfile, classRoot, moduleRoot)(ctx) val result = classfileParser.run() - if (mayLoadTreesFromTasty) { + if (mayLoadTreesFromTasty) result match { case Some(unpickler: tasty.DottyUnpickler) => classRoot.classSymbol.rootTreeOrProvider = unpickler moduleRoot.classSymbol.rootTreeOrProvider = unpickler case _ => } - } } private def mayLoadTreesFromTasty(implicit ctx: Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 17c221243a8b..36ad56eeca17 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -444,8 +444,9 @@ object Symbols { if (myDefTree == null) tpd.EmptyTree else myDefTree /** Set defining tree if this symbol retains its definition tree */ - def defTree_=(tree: Tree)(implicit ctx: Context): Unit = + def defTree_=(tree: Tree)(implicit ctx: Context): Unit = { if (retainsDefTree) myDefTree = tree + } /** Does this symbol retain its definition tree? * A good policy for this needs to balance costs and benefits, where @@ -802,7 +803,7 @@ object Symbols { } sourceFromTopLevel(ctx.withPhaseNoLater(ctx.flattenPhase)) } - }//.reporting(i"source of $this # $id in ${denot.owner} = $result") + } mySource } diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index c8d7a0de975b..790c8b95b3a8 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -221,9 +221,8 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w pendingSubTypes += p firstTry } - finally { + finally pendingSubTypes -= p - } } } @@ -498,26 +497,26 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w case tp2: TypeParamRef => def compareTypeParamRef = assumedTrue(tp2) || { - val alwaysTrue = - // The following condition is carefully formulated to catch all cases - // where the subtype relation is true without needing to add a constraint - // It's tricky because we might need to either approximate tp2 by its - // lower bound or else widen tp1 and check that the result is a subtype of tp2. - // So if the constraint is not yet frozen, we do the same comparison again - // with a frozen constraint, which means that we get a chance to do the - // widening in `fourthTry` before adding to the constraint. - if (frozenConstraint) recur(tp1, bounds(tp2).lo) - else isSubTypeWhenFrozen(tp1, tp2) - alwaysTrue || - frozenConstraint && (tp1 match { - case tp1: TypeParamRef => constraint.isLess(tp1, tp2) - case _ => false - }) || { - if (canConstrain(tp2) && !approx.low) - addConstraint(tp2, tp1.widenExpr, fromBelow = true) - else fourthTry + val alwaysTrue = + // The following condition is carefully formulated to catch all cases + // where the subtype relation is true without needing to add a constraint + // It's tricky because we might need to either approximate tp2 by its + // lower bound or else widen tp1 and check that the result is a subtype of tp2. + // So if the constraint is not yet frozen, we do the same comparison again + // with a frozen constraint, which means that we get a chance to do the + // widening in `fourthTry` before adding to the constraint. + if (frozenConstraint) recur(tp1, bounds(tp2).lo) + else isSubTypeWhenFrozen(tp1, tp2) + alwaysTrue || + frozenConstraint && (tp1 match { + case tp1: TypeParamRef => constraint.isLess(tp1, tp2) + case _ => false + }) || { + if (canConstrain(tp2) && !approx.low) + addConstraint(tp2, tp1.widenExpr, fromBelow = true) + else fourthTry + } } - } compareTypeParamRef case tp2: RefinedType => def compareRefinedSlow: Boolean = { @@ -1041,10 +1040,10 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w * the case was covered previously during subtyping. */ def isNewSubType(tp1: Type): Boolean = - if (isCovered(tp1) && isCovered(tp2)) { + if (isCovered(tp1) && isCovered(tp2)) //println(s"useless subtype: $tp1 <:< $tp2") false - } else isSubType(tp1, tp2, approx.addLow) + else isSubType(tp1, tp2, approx.addLow) def isSubApproxHi(tp1: Type, tp2: Type): Boolean = tp1.eq(tp2) || tp2.ne(NothingType) && isSubType(tp1, tp2, approx.addHigh) @@ -1077,7 +1076,8 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w } if (Stats.monitored) recordStatistics(result, savedSuccessCount) result - } catch { + } + catch { case NonFatal(ex) => if (ex.isInstanceOf[AssertionError]) showGoal(tp1, tp2) recCount -= 1 @@ -1403,38 +1403,41 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w private def necessaryEither(op1: => Boolean, op2: => Boolean): Boolean = { val preConstraint = constraint - val preGadt = ctx.gadt.fresh - // if GADTflexible mode is on, we expect to always have a ProperGadtConstraint - val pre = preGadt.asInstanceOf[ProperGadtConstraint] - if (op1) { - val leftConstraint = constraint - val leftGadt = ctx.gadt.fresh - constraint = preConstraint - ctx.gadt.restore(preGadt) - if (op2) { - if (pre.subsumes(leftGadt, ctx.gadt, preGadt) && subsumes(leftConstraint, constraint, preConstraint)) { - gadts.println(i"GADT CUT - prefer ${ctx.gadt} over $leftGadt") - constr.println(i"CUT - prefer $constraint over $leftConstraint") - true - } else if (pre.subsumes(ctx.gadt, leftGadt, preGadt) && subsumes(constraint, leftConstraint, preConstraint)) { - gadts.println(i"GADT CUT - prefer $leftGadt over ${ctx.gadt}") - constr.println(i"CUT - prefer $leftConstraint over $constraint") - constraint = leftConstraint - ctx.gadt.restore(leftGadt) - true - } else { - gadts.println(i"GADT CUT - no constraint is preferable, reverting to $preGadt") - constr.println(i"CUT - no constraint is preferable, reverting to $preConstraint") - constraint = preConstraint - ctx.gadt.restore(preGadt) - true - } - } else { + val preGadt = ctx.gadt.fresh + // if GADTflexible mode is on, we expect to always have a ProperGadtConstraint + val pre = preGadt.asInstanceOf[ProperGadtConstraint] + if (op1) { + val leftConstraint = constraint + val leftGadt = ctx.gadt.fresh + constraint = preConstraint + ctx.gadt.restore(preGadt) + if (op2) + if (pre.subsumes(leftGadt, ctx.gadt, preGadt) && subsumes(leftConstraint, constraint, preConstraint)) { + gadts.println(i"GADT CUT - prefer ${ctx.gadt} over $leftGadt") + constr.println(i"CUT - prefer $constraint over $leftConstraint") + true + } + else if (pre.subsumes(ctx.gadt, leftGadt, preGadt) && subsumes(constraint, leftConstraint, preConstraint)) { + gadts.println(i"GADT CUT - prefer $leftGadt over ${ctx.gadt}") + constr.println(i"CUT - prefer $leftConstraint over $constraint") constraint = leftConstraint ctx.gadt.restore(leftGadt) true } - } else op2 + else { + gadts.println(i"GADT CUT - no constraint is preferable, reverting to $preGadt") + constr.println(i"CUT - no constraint is preferable, reverting to $preConstraint") + constraint = preConstraint + ctx.gadt.restore(preGadt) + true + } + else { + constraint = leftConstraint + ctx.gadt.restore(leftGadt) + true + } + } + else op2 } /** Does type `tp1` have a member with name `name` whose normalized type is a subtype of @@ -1443,7 +1446,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w * rebase both itself and the member info of `tp` on a freshly created skolem type. */ protected def hasMatchingMember(name: Name, tp1: Type, tp2: RefinedType): Boolean = - /*>|>*/ trace(i"hasMatchingMember($tp1 . $name :? ${tp2.refinedInfo}), mbr: ${tp1.member(name).info}", subtyping) /*<|<*/ { + trace(i"hasMatchingMember($tp1 . $name :? ${tp2.refinedInfo}), mbr: ${tp1.member(name).info}", subtyping) { val rinfo2 = tp2.refinedInfo // If the member is an abstract type and the prefix is a path, compare the member itself @@ -1511,14 +1514,13 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w * The precondition is established by `skipMatching`. */ private def isSubRefinements(tp1: RefinedType, tp2: RefinedType, limit: Type): Boolean = { - def hasSubRefinement(tp1: RefinedType, refine2: Type): Boolean = { + def hasSubRefinement(tp1: RefinedType, refine2: Type): Boolean = isSubType(tp1.refinedInfo, refine2) || { // last effort: try to adapt variances of higher-kinded types if this is sound. // TODO: Move this to eta-expansion? val adapted2 = refine2.adaptHkVariances(tp1.parent.member(tp1.refinedName).symbol.info) adapted2.ne(refine2) && hasSubRefinement(tp1, adapted2) } - } hasSubRefinement(tp1, tp2.refinedInfo) && ( (tp2.parent eq limit) || isSubRefinements( @@ -1529,7 +1531,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w * is some combination of TypeRefs that point to classes, where the * combiners are AppliedTypes, RefinedTypes, RecTypes, And/Or-Types or AnnotatedTypes. */ - private def isCovered(tp: Type): Boolean = tp.dealiasKeepRefiningAnnots.stripTypeVar match { + private def isCovered(tp: Type): Boolean = tp.dealiasKeepRefiningAnnots.stripTypeVar match { case tp: TypeRef => tp.symbol.isClass && tp.symbol != NothingClass && tp.symbol != NullClass case tp: AppliedType => isCovered(tp.tycon) case tp: RefinedOrRecType => isCovered(tp.parent) @@ -1719,7 +1721,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w } case _ => andType(tp1, tp2) } - } + } } } } @@ -1988,10 +1990,10 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w case _ => NoType } - // opportunistically merge same-named refinements - // this does not change anything semantically (i.e. merging or not merging - // gives =:= types), but it keeps the type smaller. case tp1: RefinedType => + // opportunistically merge same-named refinements + // this does not change anything semantically (i.e. merging or not merging + // gives =:= types), but it keeps the type smaller. tp2 match { case tp2: RefinedType if tp1.refinedName == tp2.refinedName => try { @@ -2094,11 +2096,10 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w private def traceInfo(tp1: Type, tp2: Type) = s"${tp1.show} <:< ${tp2.show}" + { - if (ctx.settings.verbose.value || Config.verboseExplainSubtype) { + if (ctx.settings.verbose.value || Config.verboseExplainSubtype) s" ${tp1.getClass}, ${tp2.getClass}" + (if (frozenConstraint) " frozen" else "") + (if (ctx.mode is Mode.TypevarsMissContext) " tvars-miss-ctx" else "") - } else "" } @@ -2168,9 +2169,9 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w case (tp1: TypeRef, tp2: TypeRef) if tp1.symbol.isClass && tp2.symbol.isClass => val cls1 = tp1.classSymbol val cls2 = tp2.classSymbol - if (cls1.derivesFrom(cls2) || cls2.derivesFrom(cls1)) { + if (cls1.derivesFrom(cls2) || cls2.derivesFrom(cls1)) false - } else { + else if (cls1.is(Final) || cls2.is(Final)) // One of these types is final and they are not mutually // subtype, so they must be unrelated. @@ -2186,9 +2187,8 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w decompose(cls2, tp2).forall(x => disjoint(x, tp1)) else false - } case (AppliedType(tycon1, args1), AppliedType(tycon2, args2)) if tycon1 == tycon2 => - def covariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = { + def covariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = disjoint(tp1, tp2) && { // We still need to proof that `Nothing` is not a valid // instantiation of this type parameter. We have two ways @@ -2209,7 +2209,6 @@ class TypeComparer(initctx: Context) extends ConstraintHandling[AbsentContext] w } !lowerBoundedByNothing || typeUsedAsField } - } args1.lazyZip(args2).lazyZip(tycon1.typeParams).exists { (arg1, arg2, tparam) => diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index fc324ffdabb5..70ecc04a6e22 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -118,7 +118,8 @@ object TypeErasure { semiEraseVCs <- List(false, true) isConstructor <- List(false, true) wildcardOK <- List(false, true) - } erasures(erasureIdx(isJava, semiEraseVCs, isConstructor, wildcardOK)) = + } + erasures(erasureIdx(isJava, semiEraseVCs, isConstructor, wildcardOK)) = new TypeErasure(isJava, semiEraseVCs, isConstructor, wildcardOK) /** Produces an erasure function. See the documentation of the class [[TypeErasure]] @@ -196,7 +197,7 @@ object TypeErasure { MethodType(Nil, defn.BoxedUnitClass.typeRef) else if (sym.isAnonymousFunction && einfo.paramInfos.length > MaxImplementedFunctionArity) MethodType(nme.ALLARGS :: Nil, JavaArrayType(defn.ObjectType) :: Nil, einfo.resultType) - else if (sym.name == nme.apply && sym.owner.derivesFrom(defn.PolyFunctionClass)) { + else if (sym.name == nme.apply && sym.owner.derivesFrom(defn.PolyFunctionClass)) // The erasure of `apply` in subclasses of PolyFunction has to match // the erasure of FunctionN#apply, since after `ElimPolyFunction` we replace // a `PolyFunction` parent by a `FunctionN` parent. @@ -204,7 +205,6 @@ object TypeErasure { paramInfos = einfo.paramInfos.map(_ => defn.ObjectType), resType = defn.ObjectType ) - } else einfo case einfo => @@ -213,9 +213,9 @@ object TypeErasure { // PolyFunction arguments if (sym.is(TermParam) && sym.owner.name == nme.apply && sym.owner.owner.derivesFrom(defn.PolyFunctionClass) - && !(tp <:< defn.PolyFunctionType)) { + && !(tp <:< defn.PolyFunctionType)) defn.ObjectType - } else + else einfo } } @@ -290,11 +290,11 @@ object TypeErasure { import dotty.tools.dotc.transform.TypeUtils._ tp2 match { case JavaArrayType(elem2) => - if (elem1.isPrimitiveValueType || elem2.isPrimitiveValueType) { + if (elem1.isPrimitiveValueType || elem2.isPrimitiveValueType) if (elem1.classSymbol eq elem2.classSymbol) // same primitive JavaArrayType(elem1) else defn.ObjectType - } else JavaArrayType(erasedLub(elem1, elem2)) + else JavaArrayType(erasedLub(elem1, elem2)) case _ => defn.ObjectType } case _ => @@ -573,13 +573,12 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean /** The name of the type as it is used in `Signature`s. * Need to ensure correspondence with erasure! */ - private def sigName(tp: Type)(implicit ctx: Context): TypeName = try { + private def sigName(tp: Type)(implicit ctx: Context): TypeName = try tp match { case tp: TypeRef => - if (!tp.denot.exists) { + if (!tp.denot.exists) // println(i"missing: ${tp.toString} ${tp.denot} / ${tp.prefix.member(tp.name)}") throw new MissingType(tp.prefix, tp.name) - } val sym = tp.symbol if (!sym.isClass) { val info = tp.translucentSuperType @@ -627,7 +626,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean assert(erasedTp ne tp, tp) sigName(erasedTp) } - } catch { + catch { case ex: AssertionError => println(s"no sig for $tp because of ${ex.printStackTrace()}") throw ex diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index b7c205a0c6cf..e4ae285ce3c1 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -52,8 +52,8 @@ class RecursionOverflow(val op: String, details: => String, val previous: Throwa result += ro loop(ro.previous) case _ => result.toList - } + loop(this) } @@ -86,10 +86,10 @@ class RecursionOverflow(val op: String, details: => String, val previous: Throwa // Beware: Since this object is only used when handling a StackOverflow, this code // cannot consume significant amounts of stack. object handleRecursive { - def apply(op: String, details: => String, exc: Throwable, weight: Int = 1)(implicit ctx: Context): Nothing = { - if (ctx.settings.YnoDecodeStacktraces.value) { + def apply(op: String, details: => String, exc: Throwable, weight: Int = 1)(implicit ctx: Context): Nothing = + if (ctx.settings.YnoDecodeStacktraces.value) throw exc - } else { + else exc match { case _: RecursionOverflow => throw new RecursionOverflow(op, details, exc, weight) @@ -99,8 +99,6 @@ object handleRecursive { if (e != null) throw new RecursionOverflow(op, details, e, weight) else throw exc } - } - } } /** @@ -125,8 +123,8 @@ class CyclicReference private (val denot: SymDenotation) extends TypeError { * Mode.InferringReturnType for the innermost member without type * annotations (!tree.tpt.typeOpt.exists). */ - def errorMsg(cx: Context): Message = { - if (cx.mode is Mode.InferringReturnType) { + def errorMsg(cx: Context): Message = + if (cx.mode is Mode.InferringReturnType) cx.tree match { case tree: untpd.ValOrDefDef if !tree.tpt.typeOpt.exists => if (inImplicitSearch) @@ -140,13 +138,12 @@ class CyclicReference private (val denot: SymDenotation) extends TypeError { case _ => errorMsg(cx.outer) } - } + // Give up and give generic errors. else if (cycleSym.isOneOf(GivenOrImplicit, butNot = Method) && cycleSym.owner.isTerm) CyclicReferenceInvolvingImplicit(cycleSym) else CyclicReferenceInvolving(denot) - } errorMsg(ctx) } diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index ef2a19c6848b..db3a9632ad91 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -89,7 +89,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. } } - /*>|>*/ trace.conditionally(TypeOps.track, s"asSeen ${tp.show} from (${pre.show}, ${cls.show})", show = true) /*<|<*/ { // !!! DEBUG + trace.conditionally(TypeOps.track, s"asSeen ${tp.show} from (${pre.show}, ${cls.show})", show = true) { // !!! DEBUG // All cases except for ThisType are the same as in Map. Inlined for performance // TODO: generalize the inlining trick? tp match { @@ -649,16 +649,14 @@ trait TypeOps { this: Context => // TODO: Make standalone object. parent.argInfos.nonEmpty && minTypeMap.apply(parent) <:< maxTypeMap.apply(tp2) } - if (protoTp1 <:< tp2) { + if (protoTp1 <:< tp2) if (isFullyDefined(protoTp1, force)) protoTp1 else instUndetMap.apply(protoTp1) - } else { val protoTp2 = maxTypeMap.apply(tp2) - if (protoTp1 <:< protoTp2 || parentQualify) { + if (protoTp1 <:< protoTp2 || parentQualify) if (isFullyDefined(AndType(protoTp1, protoTp2), force)) protoTp1 else instUndetMap.apply(protoTp1) - } else { typr.println(s"$protoTp1 <:< $protoTp2 = false") NoType diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala index d32352932d1d..95de69388ed4 100644 --- a/compiler/src/dotty/tools/dotc/core/TyperState.scala +++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala @@ -106,11 +106,10 @@ class TyperState(private val previous: TyperState /* | Null */) { val savedCommitted = isCommitted myIsCommittable = false myReporter = { - if (testReporter == null || testReporter.inUse) { + if (testReporter == null || testReporter.inUse) testReporter = new TestReporter(reporter) - } else { + else testReporter.reset() - } testReporter.inUse = true testReporter } diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index f88696789c90..b092e41f3668 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -508,9 +508,8 @@ object Types { } /** A denotation containing the non-private declaration(s) in this type with the given name */ - final def nonPrivateDecl(name: Name)(implicit ctx: Context): Denotation = { + final def nonPrivateDecl(name: Name)(implicit ctx: Context): Denotation = findDecl(name, Private) - } /** A denotation containing the declaration(s) in this type with the given * name, as seen from prefix type `pre`. Declarations that have a flag @@ -528,7 +527,7 @@ object Types { } /** The member of this type with the given name */ - final def member(name: Name)(implicit ctx: Context): Denotation = /*>|>*/ { + final def member(name: Name)(implicit ctx: Context): Denotation = { record("member") memberBasedOnFlags(name, required = EmptyFlags, excluded = EmptyFlags) } @@ -643,7 +642,8 @@ object Types { if (tp.opened) { // defensive copy tp.openedTwice = true RecType(rt => tp.parent.substRecThis(tp, rt.recThis)) - } else tp + } + else tp rt.opened = true try go(rt.parent).mapInfo(_.substRecThis(rt, pre)) finally { @@ -660,12 +660,12 @@ object Types { if (ctx.base.pendingMemberSearches.contains(name)) pinfo safe_& rinfo else pinfo recoverable_& rinfo pdenot.asSingleDenotation.derivedSingleDenotation(pdenot.symbol, jointInfo) - } else { + } + else pdenot & ( new JointRefDenotation(NoSymbol, rinfo, Period.allInRun(ctx.runId)), pre, safeIntersection = ctx.base.pendingMemberSearches.contains(name)) - } } def goApplied(tp: AppliedType, tycon: HKTypeLambda) = @@ -705,9 +705,8 @@ object Types { case d => d } - def goAnd(l: Type, r: Type) = { + def goAnd(l: Type, r: Type) = go(l) & (go(r), pre, safeIntersection = ctx.base.pendingMemberSearches.contains(name)) - } val recCount = ctx.base.findMemberCount if (recCount >= Config.LogPendingFindMemberThreshold) @@ -1298,10 +1297,9 @@ object Types { } case pre: RecType => val candidate = pre.parent.lookupRefined(name) - if (candidate.exists && !pre.isReferredToBy(candidate)) { + if (candidate.exists && !pre.isReferredToBy(candidate)) //println(s"lookupRefined ${this.toString} . $name, pre: $pre ---> $candidate / ${candidate.toString}") candidate - } else NoType case SkolemType(tp) => loop(tp) @@ -1619,8 +1617,9 @@ object Types { /** Is the `hash` of this type the same for all possible sequences of enclosing binders? */ def stableHash: Boolean = true + } - } // end Type + // end Type // ----- Type categories ---------------------------------------------- @@ -1870,10 +1869,9 @@ object Types { * Assumes that symbols do not change between periods in the same run. * Used to get the class underlying a ThisType. */ - private[Types] def stableInRunSymbol(implicit ctx: Context): Symbol = { + private[Types] def stableInRunSymbol(implicit ctx: Context): Symbol = if (checkedPeriod.runId == ctx.runId) lastSymbol else symbol - } def info(implicit ctx: Context): Type = denot.info @@ -2009,9 +2007,10 @@ object Types { setDenot(memberDenot(name, allowPrivate = !symbol.exists || symbol.is(Private))) private def setDenot(denot: Denotation)(implicit ctx: Context): Unit = { - if (Config.checkNoDoubleBindings) + if (Config.checkNoDoubleBindings) { if (ctx.settings.YnoDoubleBindings.value) checkSymAssign(denot.symbol) + } lastDenotation = denot lastSymbol = denot.symbol @@ -2108,9 +2107,9 @@ object Types { } finally { ctx.pendingUnderlying -= this } - } finally { - ctx.base.underlyingRecursions -= 1 } + finally + ctx.base.underlyingRecursions -= 1 /** The argument corresponding to class type parameter `tparam` as seen from * prefix `pre`. Can produce a TypeBounds type in case prefix is an & or | type @@ -2347,9 +2346,8 @@ object Types { } /** Hook that can be called from creation methods in TermRef and TypeRef */ - def validated(implicit ctx: Context): this.type = { + def validated(implicit ctx: Context): this.type = this - } } final class CachedTermRef(prefix: Type, designator: Designator, hc: Int) extends TermRef(prefix, designator) { @@ -2363,8 +2361,9 @@ object Types { } /** Assert current phase does not have erasure semantics */ - private def assertUnerased()(implicit ctx: Context) = + private def assertUnerased()(implicit ctx: Context) = { if (Config.checkUnerased) assert(!ctx.phase.erasedTypes) + } /** The designator to be used for a named type creation with given prefix, name, and denotation. * This is the denotation's symbol, if it exists and the prefix is not the this type @@ -2575,6 +2574,8 @@ object Types { case _ => false } + // equals comes from case class; no matching override is needed + override def iso(that: Any, bs: BinderPairs): Boolean = that match { case that: RefinedType => refinedName.eq(that.refinedName) && @@ -2582,7 +2583,6 @@ object Types { parent.equals(that.parent, bs) case _ => false } - // equals comes from case class; no matching override is needed } class CachedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type) @@ -2844,7 +2844,7 @@ object Types { private[this] var myAtoms: Set[Type] = _ private[this] var myWidened: Type = _ - private def ensureAtomsComputed()(implicit ctx: Context): Unit = + private def ensureAtomsComputed()(implicit ctx: Context): Unit = { if (atomsRunId != ctx.runId) { val atoms1 = tp1.atoms val atoms2 = tp2.atoms @@ -2854,6 +2854,7 @@ object Types { myWidened = if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else tp1w | tp2w atomsRunId = ctx.runId } + } override def atoms(implicit ctx: Context): Set[Type] = { ensureAtomsComputed() @@ -2944,11 +2945,12 @@ object Types { case _ => false } + // equals comes from case class; no matching override is needed + override def iso(that: Any, bs: BinderPairs): Boolean = that match { case that: ExprType => resType.equals(that.resType, bs) case _ => false } - // equals comes from case class; no matching override is needed } final class CachedExprType(resultType: Type) extends ExprType(resultType) @@ -3137,19 +3139,18 @@ object Types { * def f(x: C)(y: x.T) // dependencyStatus = FalseDeps, i.e. * // dependency can be eliminated by dealiasing. */ - private def dependencyStatus(implicit ctx: Context): DependencyStatus = { + private def dependencyStatus(implicit ctx: Context): DependencyStatus = if (myDependencyStatus != Unknown) myDependencyStatus else { val result = depStatus(NoDeps, resType) if ((result & Provisional) == 0) myDependencyStatus = result (result & StatusMask).toByte } - } /** The parameter dependency status of this method. Analogous to `dependencyStatus`, * but tracking dependencies in same parameter list. */ - private def paramDependencyStatus(implicit ctx: Context): DependencyStatus = { + private def paramDependencyStatus(implicit ctx: Context): DependencyStatus = if (myParamDependencyStatus != Unknown) myParamDependencyStatus else { val result = @@ -3158,7 +3159,6 @@ object Types { if ((result & Provisional) == 0) myParamDependencyStatus = result (result & StatusMask).toByte } - } /** Does result type contain references to parameters of this method type, * which cannot be eliminated by de-aliasing? @@ -3305,7 +3305,7 @@ object Types { } object MethodType extends MethodTypeCompanion("MethodType") { - def companion(isJava: Boolean = false, isContextual: Boolean = false, isImplicit: Boolean = false, isErased: Boolean = false): MethodTypeCompanion = { + def companion(isJava: Boolean = false, isContextual: Boolean = false, isImplicit: Boolean = false, isErased: Boolean = false): MethodTypeCompanion = if (isJava) { assert(!isImplicit) assert(!isErased) @@ -3318,7 +3318,6 @@ object Types { if (isErased) ErasedImplicitMethodType else ImplicitMethodType else if (isErased) ErasedMethodType else MethodType - } } object JavaMethodType extends MethodTypeCompanion("JavaMethodType") object ErasedMethodType extends MethodTypeCompanion("ErasedMethodType") @@ -3432,9 +3431,8 @@ object Types { object HKTypeLambda extends TypeLambdaCompanion[HKTypeLambda] { def apply(paramNames: List[TypeName])( paramInfosExp: HKTypeLambda => List[TypeBounds], - resultTypeExp: HKTypeLambda => Type)(implicit ctx: Context): HKTypeLambda = { + resultTypeExp: HKTypeLambda => Type)(implicit ctx: Context): HKTypeLambda = unique(new HKTypeLambda(paramNames)(paramInfosExp, resultTypeExp)) - } def unapply(tl: HKTypeLambda): Some[(List[LambdaParam], Type)] = Some((tl.typeParams, tl.resType)) @@ -3469,9 +3467,8 @@ object Types { object PolyType extends TypeLambdaCompanion[PolyType] { def apply(paramNames: List[TypeName])( paramInfosExp: PolyType => List[TypeBounds], - resultTypeExp: PolyType => Type)(implicit ctx: Context): PolyType = { + resultTypeExp: PolyType => Type)(implicit ctx: Context): PolyType = unique(new PolyType(paramNames)(paramInfosExp, resultTypeExp)) - } def unapply(tl: PolyType): Some[(List[LambdaParam], Type)] = Some((tl.typeParams, tl.resType)) @@ -3556,14 +3553,13 @@ object Types { case _ => NoType } - if (defn.isCompiletime_S(tycon.symbol) && args.length == 1) { + if (defn.isCompiletime_S(tycon.symbol) && args.length == 1) trace(i"normalize S $this", typr, show = true) { args.head.normalized match { case ConstantType(Constant(n: Int)) => ConstantType(Constant(n + 1)) case none => tryMatchAlias } } - } else tryMatchAlias case _ => NoType @@ -3603,11 +3599,12 @@ object Types { override def eql(that: Type): Boolean = this `eq` that // safe because applied types are hash-consed separately + // equals comes from case class; no matching override is needed + final override def iso(that: Any, bs: BinderPairs): Boolean = that match { case that: AppliedType => tycon.equals(that.tycon, bs) && args.equalElements(that.args, bs) case _ => false } - // equals comes from case class; no matching override is needed } final class CachedAppliedType(tycon: Type, args: List[Type], hc: Int) extends AppliedType(tycon, args) { @@ -4154,6 +4151,7 @@ object Types { case that: AliasingBounds => this.isTypeAlias == that.isTypeAlias && alias.equals(that.alias, bs) case _ => false } + // equals comes from case class; no matching override is needed override def eql(that: Type): Boolean = that match { @@ -4227,11 +4225,12 @@ object Types { isRefiningCache } + // equals comes from case class; no matching override is needed + override def iso(that: Any, bs: BinderPairs): Boolean = that match { case that: AnnotatedType => parent.equals(that.parent, bs) && (annot `eq` that.annot) case _ => false } - // equals comes from case class; no matching override is needed } object AnnotatedType { @@ -4317,11 +4316,12 @@ object Types { case _ => false } + // equals comes from case class; no matching override is needed + override def iso(that: Any, bs: BinderPairs): Boolean = that match { case that: WildcardType => optBounds.equals(that.optBounds, bs) case _ => false } - // equals comes from case class; no matching override is needed } final class CachedWildcardType(optBounds: Type) extends WildcardType(optBounds) @@ -4404,7 +4404,7 @@ object Types { mapOver(info.alias) case TypeBounds(lo, hi) => range(atVariance(-variance)(apply(lo)), apply(hi)) - case _ => + case _ => range(defn.NothingType, defn.AnyType) // should happen only in error cases } case _ => @@ -4720,7 +4720,7 @@ object Types { * If the expansion is a wildcard parameter reference, convert its * underlying bounds to a range, otherwise return the expansion. */ - def expandParam(tp: NamedType, pre: Type): Type = { + def expandParam(tp: NamedType, pre: Type): Type = tp.argForParam(pre) match { case arg @ TypeRef(pre, _) if pre.isArgPrefixOf(arg.symbol) => arg.info match { @@ -4730,7 +4730,6 @@ object Types { case arg: TypeBounds => expandBounds(arg) case arg => reapply(arg) } - } /** Derived selection. * @pre the (upper bound of) prefix `pre` has a member named `tp.name`. @@ -4856,9 +4855,8 @@ object Types { if (underlying.isBottomType) underlying else tp.derivedAnnotatedType(underlying, annot) } - override protected def derivedWildcardType(tp: WildcardType, bounds: Type): WildcardType = { + override protected def derivedWildcardType(tp: WildcardType, bounds: Type): WildcardType = tp.derivedWildcardType(rangeToBounds(bounds)) - } override protected def derivedSkolemType(tp: SkolemType, info: Type): Type = info match { case Range(lo, hi) => @@ -5083,23 +5081,23 @@ object Types { else { seen.put(tp, tp) tp match { - case tp: AppliedType => - foldOver(n + 1, tp) - case tp: RefinedType => - foldOver(n + 1, tp) - case tp: TypeRef if tp.info.isTypeAlias => - apply(n, tp.superType) - case tp: TypeParamRef => - apply(n, ctx.typeComparer.bounds(tp)) - case _ => - foldOver(n, tp) + case tp: AppliedType => + foldOver(n + 1, tp) + case tp: RefinedType => + foldOver(n + 1, tp) + case tp: TypeRef if tp.info.isTypeAlias => + apply(n, tp.superType) + case tp: TypeParamRef => + apply(n, ctx.typeComparer.bounds(tp)) + case _ => + foldOver(n, tp) + } } - } } class CoveringSetAccumulator(implicit ctx: Context) extends TypeAccumulator[Set[Symbol]] { val seen = new java.util.IdentityHashMap[Type, Type] - def apply(cs: Set[Symbol], tp: Type): Set[Symbol] = { + def apply(cs: Set[Symbol], tp: Type): Set[Symbol] = if (seen.get(tp) != null) cs else { seen.put(tp, tp) @@ -5123,7 +5121,6 @@ object Types { foldOver(cs, tp) } } - } } // ----- Name Filters -------------------------------------------------- diff --git a/compiler/src/dotty/tools/dotc/core/Uniques.scala b/compiler/src/dotty/tools/dotc/core/Uniques.scala index 1bb6db5baeec..37c5313316b7 100644 --- a/compiler/src/dotty/tools/dotc/core/Uniques.scala +++ b/compiler/src/dotty/tools/dotc/core/Uniques.scala @@ -18,7 +18,8 @@ object Uniques { if (h == NotCached) { record("uncached-types") record(s"uncached: $clazz") - } else { + } + else { record("cached-types") record(s"cached: $clazz") } @@ -31,8 +32,10 @@ object Uniques { val result = ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T] if (ctx.uniques.size > size) record(s"fresh unique ${tp.getClass}") result - } else ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T] - } /* !!! DEBUG + } + else ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T] + } + /* !!! DEBUG ensuring ( result => tp.toString == result.toString || { println(s"cache mismatch; tp = $tp, cached = $result") diff --git a/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala b/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala index cad3a4132393..4ade0fa44903 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala @@ -84,5 +84,5 @@ class AbstractFileReader(val file: AbstractFile) { /** skip next 'n' bytes */ def skip(n: Int): Unit = { bp += n } - } + diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala b/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala index badd9e5603b3..acd7982f4fd3 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala @@ -26,7 +26,8 @@ object ByteCodecs { dst(j) = (0xc0).toByte dst(j + 1) = (0x80).toByte j += 2 - } else { + } + else { dst(j) = (in + 1).toByte j += 1 } @@ -44,10 +45,12 @@ object ByteCodecs { if (in == 0xc0 && (src(i + 1) & 0xff) == 0x80) { src(j) = 0x7f i += 2 - } else if (in == 0) { + } + else if (in == 0) { src(j) = 0x7f i += 1 - } else { + } + else { src(j) = (in - 1).toByte i += 1 } diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 483d06be6dd1..16c73f5df420 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -85,7 +85,8 @@ class ClassfileParser( parseHeader() this.pool = new ConstantPool parseClass() - } catch { + } + catch { case e: RuntimeException => if (ctx.debug) e.printStackTrace() throw new IOException( @@ -188,16 +189,15 @@ class ClassfileParser( setClassInfo(classRoot, classInfo, fromScala2 = false) setClassInfo(moduleRoot, staticInfo, fromScala2 = false) - } else if (result == Some(NoEmbedded)) { + } + else if (result == Some(NoEmbedded)) for (sym <- List(moduleRoot.sourceModule, moduleRoot.symbol, classRoot.symbol)) { classRoot.owner.asClass.delete(sym) - if (classRoot.owner == defn.ScalaShadowingPackage.moduleClass) { + if (classRoot.owner == defn.ScalaShadowingPackage.moduleClass) // Symbols in scalaShadowing are also added to scala defn.ScalaPackageClass.delete(sym) - } sym.markAbsent() } - } // eager load java enum definitions for exhaustivity check of pattern match if (isEnum) { @@ -214,9 +214,8 @@ class ClassfileParser( def addEnclosingTParams()(implicit ctx: Context): Unit = { var sym = classRoot.owner while (sym.isClass && !sym.is(Flags.ModuleClass)) { - for (tparam <- sym.typeParams) { + for (tparam <- sym.typeParams) classTParams = classTParams.updated(tparam.name, tparam) - } sym = sym.owner } } @@ -298,9 +297,9 @@ class ClassfileParser( enumClass.addAnnotation(Annotation.Child(sym, NoSpan)) } } - } finally { - in.bp = oldbp } + finally + in.bp = oldbp } } @@ -366,7 +365,8 @@ class ClassfileParser( } accept('>') if (skiptvs) tp else tp.appliedTo(argsBuf.toList) - } else tp + } + else tp case tp => assert(sig(index) != '<', tp) tp @@ -391,9 +391,8 @@ class ClassfileParser( // NOTE that the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object // if the bound is exactly Object, it will have been converted to Any, and the comparison will fail // see also RestrictJavaArraysMap (when compiling java sources directly) - if (elemtp.typeSymbol.isAbstractOrParamType && !(elemtp.derivesFrom(defn.ObjectClass))) { + if (elemtp.typeSymbol.isAbstractOrParamType && !(elemtp.derivesFrom(defn.ObjectClass))) elemtp = AndType(elemtp, defn.ObjectType) - } defn.ArrayOf(elemtp) case '(' => // we need a method symbol. given in line 486 by calling getType(methodSym, ..) @@ -412,7 +411,8 @@ class ClassfileParser( //assert(tparams contains n, s"classTparams = $classTParams, tparams = $tparams, key = $n") if (skiptvs) defn.AnyType else tparams(n).typeRef } - } // sig2type(tparams, skiptvs) + } + // sig2type(tparams, skiptvs) def sig2typeBounds(tparams: immutable.Map[Name, Symbol], skiptvs: Boolean)(implicit ctx: Context): Type = { val ts = new ListBuffer[Type] @@ -436,9 +436,9 @@ class ClassfileParser( denot.symbol, sig2typeBounds(tparams, skiptvs = false), reportErrors = false) - } finally { - index = savedIndex } + finally + index = savedIndex } } @@ -466,13 +466,13 @@ class ClassfileParser( else { classTParams = tparams val parents = new ListBuffer[Type]() - while (index < end) { + while (index < end) parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter - } TempClassInfoType(parents.toList, instanceScope, owner) } if (ownTypeParams.isEmpty) tpe else TempPolyType(ownTypeParams, tpe) - } // sigToType + } + // sigToType def parseAnnotArg(skip: Boolean = false)(implicit ctx: Context): Option[Tree] = { val tag = in.nextByte.toChar @@ -491,11 +491,11 @@ class ClassfileParser( val n = pool.getName(in.nextChar) val module = t.typeSymbol.companionModule val s = module.info.decls.lookup(n) - if (skip) { + if (skip) None - } else if (s != NoSymbol) { + else if (s != NoSymbol) Some(Literal(Constant(s))) - } else { + else { ctx.warning(s"""While parsing annotations in ${in.file}, could not find $n in enum $module.\nThis is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (SI-7014).""") None } @@ -538,7 +538,8 @@ class ClassfileParser( } if (hasError || skip) None else Some(Annotation.deferredResolve(attrType, argbuf.toList)) - } catch { + } + catch { case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found case NonFatal(ex) => // We want to be robust when annotations are unavailable, so the very least @@ -554,12 +555,11 @@ class ClassfileParser( } def parseAttributes(sym: Symbol, symtype: Type)(implicit ctx: Context): Type = { - def convertTo(c: Constant, pt: Type): Constant = { + def convertTo(c: Constant, pt: Type): Constant = if (pt == defn.BooleanType && c.tag == IntTag) Constant(c.value != 0) else c convertTo pt - } var newType = symtype def parseAttribute(): Unit = { @@ -629,7 +629,7 @@ class ClassfileParser( /** Parse a sequence of annotations and attaches them to the * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */ - def parseAnnotations(len: Int): Unit = { + def parseAnnotations(len: Int): Unit = { val nAttr = in.nextChar for (n <- 0 until nAttr) parseAnnotation(in.nextChar) match { @@ -640,9 +640,8 @@ class ClassfileParser( } // begin parseAttributes - for (i <- 0 until in.nextChar) { + for (i <- 0 until in.nextChar) parseAttribute() - } cook.apply(newType) } @@ -676,14 +675,13 @@ class ClassfileParser( name1.drop(name1.lastIndexOf('.') + 1) } - def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) = { + def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) = SymbolLoaders.enterClassAndModule( getOwner(jflags), entry.originalName, new ClassfileLoader(file), classTranslation.flags(jflags), getScope(jflags)) - } for (entry <- innerClasses.values) { // create a new class member for immediate inner classes @@ -791,7 +789,8 @@ class ClassfileParser( } tastyOutStream.flush() tastyOutStream.toByteArray - } else { + } + else { ctx.error(s"Could not find $path in $jar") Array.empty } @@ -815,7 +814,7 @@ class ClassfileParser( else return unpickleTASTY(bytes) } - if (scan(tpnme.ScalaATTR) && !scalaUnpickleWhitelist.contains(classRoot.name)) { + if (scan(tpnme.ScalaATTR) && !scalaUnpickleWhitelist.contains(classRoot.name)) // To understand the situation, it's helpful to know that: // - Scalac emits the `ScalaSig` attribute for classfiles with pickled information // and the `Scala` attribute for everything else. @@ -825,7 +824,6 @@ class ClassfileParser( // Therefore, if the `Scala` attribute is present but the `TASTY` // attribute isn't, this classfile is a compilation artifact. return Some(NoEmbedded) - } if (scan(tpnme.RuntimeVisibleAnnotationATTR) || scan(tpnme.RuntimeInvisibleAnnotationATTR)) { val attrLen = in.nextInt @@ -837,7 +835,7 @@ class ClassfileParser( var j = 0 while (j < nArgs) { val argName = pool.getName(in.nextChar) - if (argName == nme.bytes) + if (argName == nme.bytes) { if (attrClass == defn.ScalaSignatureAnnot) return unpickleScala(parseScalaSigBytes) else if (attrClass == defn.ScalaLongSignatureAnnot) @@ -846,6 +844,7 @@ class ClassfileParser( return unpickleTASTY(parseScalaSigBytes) else if (attrClass == defn.TASTYLongSignatureAnnot) return unpickleTASTY(parseScalaLongSigBytes) + } parseAnnotArg(skip = true) j += 1 } @@ -868,7 +867,8 @@ class ClassfileParser( } } None - } finally in.bp = oldbp + } + finally in.bp = oldbp } /** An entry in the InnerClasses attribute of this class file. */ @@ -891,7 +891,8 @@ class ClassfileParser( while (isDefinedAt(entry.outerName)) entry = this(entry.outerName) entry.outerName - } else + } + else name classNameToSymbol(tlName) } @@ -901,7 +902,7 @@ class ClassfileParser( */ def classSymbol(entry: InnerClassEntry)(implicit ctx: Context): Symbol = { def getMember(sym: Symbol, name: Name)(implicit ctx: Context): Symbol = - if (isStatic(entry.jflags)) { + if (isStatic(entry.jflags)) if (sym == classRoot.symbol) staticScope.lookup(name) else { @@ -910,7 +911,6 @@ class ClassfileParser( module = sym.scalacLinkedClass module.info.member(name).symbol } - } else if (sym == classRoot.symbol) instanceScope.lookup(name) else @@ -931,7 +931,7 @@ class ClassfileParser( } } - def skipAttributes(): Unit = { + def skipAttributes(): Unit = { val attrCount = in.nextChar for (i <- 0 until attrCount) { in.skip(2); in.skip(in.nextInt) @@ -1068,12 +1068,14 @@ class ClassfileParser( if (name.firstPart(0) == ARRAY_TAG) { c = sigToType(name) values(index) = c - } else { + } + else { val sym = classNameToSymbol(name) values(index) = sym c = sym.typeRef } - } else c = value match { + } + else c = value match { case tp: Type => tp case cls: Symbol => cls.typeRef } diff --git a/compiler/src/dotty/tools/dotc/core/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/core/quoted/PickledQuotes.scala index 75e8fdb7116d..425163447e1c 100644 --- a/compiler/src/dotty/tools/dotc/core/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/core/quoted/PickledQuotes.scala @@ -26,14 +26,13 @@ object PickledQuotes { import tpd._ /** Pickle the tree of the quote into strings */ - def pickleQuote(tree: Tree)(implicit ctx: Context): PickledQuote = { + def pickleQuote(tree: Tree)(implicit ctx: Context): PickledQuote = if (ctx.reporter.hasErrors) Nil else { assert(!tree.isInstanceOf[Hole]) // Should not be pickled as it represents `'{$x}` which should be optimized to `x` val pickled = pickle(tree) TastyString.pickle(pickled) } - } /** Transform the expression into its fully spliced Tree */ def quotedExprToTree[T](expr: quoted.Expr[T])(implicit ctx: Context): Tree = { @@ -133,13 +132,12 @@ object PickledQuotes { /** Make sure that the owner of this tree is `ctx.owner` */ private def healOwner(tree: Tree)(implicit ctx: Context): Tree = { val getCurrentOwner = new TreeAccumulator[Option[Symbol]] { - def apply(x: Option[Symbol], tree: tpd.Tree)(implicit ctx: Context): Option[Symbol] = { + def apply(x: Option[Symbol], tree: tpd.Tree)(implicit ctx: Context): Option[Symbol] = if (x.isDefined) x else tree match { case tree: DefTree => Some(tree.symbol.owner) case _ => foldOver(x, tree) } - } } getCurrentOwner(None, tree) match { case Some(owner) if owner != ctx.owner => tree.changeOwner(owner, ctx.owner) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala index f6d7fb097bee..ea46ff469b84 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala @@ -39,5 +39,5 @@ class CommentPickler(pickler: TastyPickler, addrOfTree: tpd.Tree => Addr)(implic traverseChildren(tree) } } - } + diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala index f3d016921a4a..316a77d13fbf 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala @@ -28,5 +28,5 @@ class CommentUnpickler(reader: TastyReader) { def commentAt(addr: Addr): Option[Comment] = comments.get(addr) - } + diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala index cccf6411652a..4745fd51871d 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala @@ -51,9 +51,8 @@ class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLe def enter(roots: Set[SymDenotation])(implicit ctx: Context): Unit = treeUnpickler.enter(roots) - protected def treeSectionUnpickler(posUnpicklerOpt: Option[PositionUnpickler], commentUnpicklerOpt: Option[CommentUnpickler]): TreeSectionUnpickler = { + protected def treeSectionUnpickler(posUnpicklerOpt: Option[PositionUnpickler], commentUnpicklerOpt: Option[CommentUnpickler]): TreeSectionUnpickler = new TreeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt) - } protected def computeRootTrees(implicit ctx: Context): List[Tree] = treeUnpickler.unpickle(mode) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala index 30ed9ac5aeae..388de191823e 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala @@ -42,7 +42,7 @@ class NameBuffer extends TastyBuffer(10000) { val ref = NameRef(nameRefs.size) nameRefs(name1) = ref ref - } + } } private def withLength(op: => Unit, lengthWidth: Int = 1): Unit = { diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala index bfaf60ef2ad2..cb8e6cda23bd 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala @@ -103,8 +103,7 @@ class PositionPickler(pickler: TastyPickler, addrOfTree: untpd.Tree => Addr) { traverse(x.tree, current) case _ => } - for (root <- roots) { + for (root <- roots) traverse(root, NoSource) - } } } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala index ca283576886d..52194c6d2a4f 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala @@ -17,7 +17,7 @@ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { private var mySourcePaths: mutable.HashMap[Addr, String] = _ private var isDefined = false - def ensureDefined(): Unit = + def ensureDefined(): Unit = { if (!isDefined) { mySpans = new mutable.HashMap[Addr, Span] mySourcePaths = new mutable.HashMap[Addr, String] @@ -42,10 +42,11 @@ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { mySpans(Addr(curIndex)) = if (hasPoint) Span(curStart, curEnd, curStart + readInt()) else Span(curStart, curEnd) - } + } } isDefined = true } + } private[tasty] def spans: Map[Addr, Span] = { ensureDefined() diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala index d8a793715a08..8d188bb64cd7 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala @@ -126,7 +126,7 @@ class TastyBuffer(initialSize: Int) { def putNat(at: Addr, x: Int, width: Int): Unit = { var y = x var w = width - if(at.index + w >= bytes.length) + if (at.index + w >= bytes.length) bytes = dble(bytes) var digit = y & 0x7f | 0x80 while (w > 0) { @@ -149,12 +149,13 @@ class TastyBuffer(initialSize: Int) { var b = 0L var x = 0L var idx = at.index - while { + while ({ b = bytes(idx) x = (x << 7) | (b & 0x7f) idx += 1 (b & 0x80) == 0 - } do () + }) + () x } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyHash.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyHash.scala index 6845f36fcf06..4ffc1965202e 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyHash.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyHash.scala @@ -19,5 +19,5 @@ object TastyHash { } h } - } + diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyHeaderUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyHeaderUnpickler.scala index d06436e5af77..5fd56c18af59 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyHeaderUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyHeaderUnpickler.scala @@ -26,6 +26,7 @@ class TastyHeaderUnpickler(reader: TastyReader) { def isAtEnd: Boolean = reader.isAtEnd - private def check(cond: Boolean, msg: => String): Unit = + private def check(cond: Boolean, msg: => String): Unit = { if (!cond) throw new UnpickleException(msg) + } } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala index 97d6d0077bb9..a0ead8d38dbe 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala @@ -77,5 +77,5 @@ class TastyPickler(val rootCls: ClassSymbol) { var addrOfSym: Symbol => Option[Addr] = (_ => None) val treePkl: TreePickler = new TreePickler(this) - } + diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala index f2defc3ffe51..4bf0cd89031a 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala @@ -75,12 +75,13 @@ class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int = def readLongNat(): Long = { var b = 0L var x = 0L - while { + while ({ b = bytes(bp) x = (x << 7) | (b & 0x7f) bp += 1 (b & 0x80) == 0 - } do () + }) + () x } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyString.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyString.scala index d484514b3521..cef20a5223e6 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyString.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyString.scala @@ -24,5 +24,5 @@ object TastyString { strings.foreach(string.append) Base64.getDecoder().decode(string.result().getBytes(UTF_8)) } - } + diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala index 5bd1c48add8c..da7d79e6ea4f 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala @@ -98,16 +98,16 @@ class TreeBuffer extends TastyBuffer(50000) { assert(len1 == len2, s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2") len1 - } else adjusted(original) + } + else adjusted(original) } /** Adjust all offsets according to previously computed deltas */ - private def adjustOffsets(): Unit = { + private def adjustOffsets(): Unit = for (i <- 0 until numOffsets) { val corrected = adjustedOffset(i) fillAddr(offset(i), corrected) } - } /** Adjust deltas to also take account references that will shrink (and thereby * generate additional zeroes that can be skipped) due to previously @@ -175,11 +175,12 @@ class TreeBuffer extends TastyBuffer(50000) { //println(s"offsets: ${offsets.take(numOffsets).deep}") //println(s"deltas: ${delta.take(numOffsets).deep}") var saved = 0 - while { + while ({ saved = adjustDeltas() pickling.println(s"adjusting deltas, saved = $saved") saved > 0 && length / saved < 100 - } do () + }) + () adjustOffsets() adjustTreeAddrs() val wasted = compress() diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index cbac6f01bf38..f7207919d3bb 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -45,9 +45,8 @@ class TreePickler(pickler: TastyPickler) { fillRef(lengthAddr, currentAddr, relative = true) } - def addrOfSym(sym: Symbol): Option[Addr] = { + def addrOfSym(sym: Symbol): Option[Addr] = symRefs.get(sym) - } def preRegister(tree: Tree)(implicit ctx: Context): Unit = tree match { case tree: MemberDef => @@ -144,7 +143,8 @@ class TreePickler(pickler: TastyPickler) { writeByte(SHAREDtype) writeRef(prev.asInstanceOf[Addr]) } - } catch { + } + catch { case ex: AssertionError => println(i"error when pickling type $tpe") throw ex @@ -290,8 +290,9 @@ class TreePickler(pickler: TastyPickler) { def pickleTpt(tpt: Tree)(implicit ctx: Context): Unit = pickleTree(tpt) - def pickleTreeUnlessEmpty(tree: Tree)(implicit ctx: Context): Unit = + def pickleTreeUnlessEmpty(tree: Tree)(implicit ctx: Context): Unit = { if (!tree.isEmpty) pickleTree(tree) + } def pickleDef(tag: Int, sym: Symbol, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ())(implicit ctx: Context): Unit = { assert(symRefs(sym) == NoAddr, sym) @@ -373,7 +374,8 @@ class TreePickler(pickler: TastyPickler) { if (fun.symbol eq defn.throwMethod) { writeByte(THROW) pickleTree(args.head) - } else { + } + else { writeByte(APPLY) withLength { pickleTree(fun) @@ -437,10 +439,9 @@ class TreePickler(pickler: TastyPickler) { case tree @ Match(selector, cases) => writeByte(MATCH) withLength { - if (tree.isInline) { + if (tree.isInline) if (selector.isEmpty) writeByte(IMPLICIT) else { writeByte(INLINE); pickleTree(selector) } - } else pickleTree(selector) tree.cases.foreach(pickleTree) } @@ -661,7 +662,8 @@ class TreePickler(pickler: TastyPickler) { if (flags.is(ParamAccessor)) writeModTag(PARAMsetter) if (flags.is(Exported)) writeModTag(EXPORTED) assert(!(flags.is(Label))) - } else { + } + else { if (flags.is(Sealed)) writeModTag(SEALED) if (flags.is(Abstract)) writeModTag(ABSTRACT) if (flags.is(Trait)) writeModTag(TRAIT) @@ -681,11 +683,12 @@ class TreePickler(pickler: TastyPickler) { ann.symbol == defn.BodyAnnot // inline bodies are reconstituted automatically when unpickling } - def pickleAnnotation(owner: Symbol, ann: Annotation)(implicit ctx: Context): Unit = + def pickleAnnotation(owner: Symbol, ann: Annotation)(implicit ctx: Context): Unit = { if (!isUnpicklable(owner, ann)) { writeByte(ANNOTATION) withLength { pickleType(ann.symbol.typeRef); pickleTree(ann.tree) } } + } // ---- main entry points --------------------------------------- diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 0c2654d3c42d..475f696ccfc5 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -110,11 +110,10 @@ class TreeUnpickler(reader: TastyReader, import reader._ val owner = ctx.owner val source = ctx.source - def complete(denot: SymDenotation)(implicit ctx: Context): Unit = { + def complete(denot: SymDenotation)(implicit ctx: Context): Unit = treeAtAddr(currentAddr) = new TreeReader(reader).readIndexedDef()( ctx.withPhaseNoLater(ctx.picklerPhase).withOwner(owner).withSource(source)) - } } class TreeReader(val reader: TastyReader) { @@ -123,11 +122,12 @@ class TreeUnpickler(reader: TastyReader, def forkAt(start: Addr): TreeReader = new TreeReader(subReader(start, endAddr)) def fork: TreeReader = forkAt(currentAddr) - def skipTree(tag: Int): Unit = + def skipTree(tag: Int): Unit = { if (tag >= firstLengthTreeTag) goto(readEnd()) else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() } else if (tag >= firstASTTreeTag) skipTree() else if (tag >= firstNatTreeTag) readNat() + } def skipTree(): Unit = skipTree(readByte()) def skipParams(): Unit = @@ -167,10 +167,9 @@ class TreeUnpickler(reader: TastyReader, } else { for (i <- 0 until nrefs) readNat() - if (tag == BIND) { + if (tag == BIND) // a Bind is never the owner of anything, so we set `end = start` buf += new OwnerTree(start, tag, fork, end = start) - } scanTrees(buf, end) } @@ -668,7 +667,7 @@ class TreeUnpickler(reader: TastyReader, */ def indexStats(end: Addr)(implicit ctx: Context): FlagSet = { var initsFlags = NoInitsInterface - while (currentAddr.index < end.index) { + while (currentAddr.index < end.index) nextByte match { case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM => val sym = symbolAtCurrent() @@ -686,7 +685,6 @@ class TreeUnpickler(reader: TastyReader, skipTree() initsFlags = EmptyFlags } - } assert(currentAddr.index == end.index) initsFlags } @@ -748,13 +746,12 @@ class TreeUnpickler(reader: TastyReader, val tag = readByte() val end = readEnd() - def readParamss(implicit ctx: Context): List[List[ValDef]] = { + def readParamss(implicit ctx: Context): List[List[ValDef]] = collectWhile(nextByte == PARAMS) { readByte() readEnd() readParams[ValDef](PARAM) } - } val localCtx = localContext(sym) @@ -812,7 +809,8 @@ class TreeUnpickler(reader: TastyReader, if (companion.exists && isCodefined) sym.registerCompanion(companion) TypeDef(readTemplate(localCtx)) - } else { + } + else { sym.info = TypeBounds.empty // needed to avoid cyclic references when unpickling rhs, see i3816.scala sym.setFlag(Provisional) val rhs = readTpt()(localCtx) @@ -842,9 +840,8 @@ class TreeUnpickler(reader: TastyReader, } goto(end) setSpan(start, tree) - if (!sym.isType) { // Only terms might have leaky aliases, see the documentation of `checkNoPrivateLeaks` + if (!sym.isType) // Only terms might have leaky aliases, see the documentation of `checkNoPrivateLeaks` sym.info = ta.avoidPrivateLeaks(sym) - } if (ctx.mode.is(Mode.ReadComments)) { assert(ctx.docCtx.isDefined, "Mode is `ReadComments`, but no `docCtx` is set.") @@ -919,14 +916,13 @@ class TreeUnpickler(reader: TastyReader, nextByte == IMPORT || nextByte == PACKAGE def readTopLevel()(implicit ctx: Context): List[Tree] = { - @tailrec def read(acc: ListBuffer[Tree]): List[Tree] = { + @tailrec def read(acc: ListBuffer[Tree]): List[Tree] = if (isTopLevel) { acc += readIndexedStat(NoSymbol) if (!isAtEnd) read(acc) else acc.toList } else // top-level trees which are not imports or packages are not part of tree acc.toList - } read(new ListBuffer[tpd.Tree]) } @@ -1281,7 +1277,8 @@ class TreeUnpickler(reader: TastyReader, val filled = if (isType) { val quotedType = splice.asInstanceOf[Seq[Any] => quoted.Type[_]](reifiedArgs) PickledQuotes.quotedTypeToTree(quotedType) - } else { + } + else { val splice1 = splice.asInstanceOf[Seq[Any] => given scala.quoted.QuoteContext => quoted.Expr[_]] val quotedExpr = splice1(reifiedArgs) given dotty.tools.dotc.quoted.QuoteContext() PickledQuotes.quotedExprToTree(quotedExpr) @@ -1302,14 +1299,14 @@ class TreeUnpickler(reader: TastyReader, /** Pickled span for `addr`. */ def spanAt(addr: Addr)(implicit ctx: Context): Span = - if (ctx.mode.is(Mode.ReadPositions)) { + if (ctx.mode.is(Mode.ReadPositions)) posUnpicklerOpt match { case Some(posUnpickler) => posUnpickler.spanAt(addr) case _ => NoSpan } - } else NoSpan + else NoSpan /** Coordinate for the symbol at `addr`. */ def coordAt(addr: Addr)(implicit ctx: Context): Coord = { @@ -1322,14 +1319,14 @@ class TreeUnpickler(reader: TastyReader, /** Pickled source path at `addr`. */ def sourcePathAt(addr: Addr)(implicit ctx: Context): String = - if (ctx.mode.is(Mode.ReadPositions)) { + if (ctx.mode.is(Mode.ReadPositions)) posUnpicklerOpt match { case Some(posUnpickler) => posUnpickler.sourcePathAt(addr) case _ => "" } - } else "" + else "" /** If currentAddr carries a source path, the current context with * the source of that path, otherwise the current context itself. @@ -1394,23 +1391,23 @@ class TreeUnpickler(reader: TastyReader, def findOwner(addr: Addr)(implicit ctx: Context): Symbol = { def search(cs: List[OwnerTree], current: Symbol): Symbol = try cs match { - case ot :: cs1 => - if (ot.addr.index == addr.index) { - assert(current.exists, i"no symbol at $addr") - current - } - else if (ot.addr.index < addr.index && addr.index < ot.end.index) - search(ot.children, reader.symbolAt(ot.addr)) - else - search(cs1, current) - case Nil => - throw new TreeWithoutOwner - } - catch { - case ex: TreeWithoutOwner => - pickling.println(i"no owner for $addr among $cs%, %") - throw ex - } + case ot :: cs1 => + if (ot.addr.index == addr.index) { + assert(current.exists, i"no symbol at $addr") + current + } + else if (ot.addr.index < addr.index && addr.index < ot.end.index) + search(ot.children, reader.symbolAt(ot.addr)) + else + search(cs1, current) + case Nil => + throw new TreeWithoutOwner + } + catch { + case ex: TreeWithoutOwner => + pickling.println(i"no owner for $addr among $cs%, %") + throw ex + } try search(children, NoSymbol) catch { case ex: TreeWithoutOwner => diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala index 821501d79bef..6d06a74188af 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala @@ -111,7 +111,8 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { b = readByte() x = (x << 7) + (b & 0x7f) (b & 0x80) != 0L - }) () + }) + () x } diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala index 634faeeee0d4..a9a64a9bb7ee 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala @@ -218,10 +218,10 @@ object PickleFormat { final val firstSymTag = NONEsym final val lastSymTag = VALsym final val lastExtSymTag = EXTMODCLASSref +} //The following two are no longer accurate, because ANNOTATEDtpe, //SUPERtpe, ... are not in the same range as the other types //final val firstTypeTag = NOtpe //final val lastTypeTag = POLYtpe -} diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 3e9c2635cad8..7fec2fd5343e 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -88,12 +88,13 @@ object Scala2Unpickler { tp.derivedLambdaType(tp.paramNames, tp.paramInfos, arrayToRepeated(tp.resultType)) } - def ensureConstructor(cls: ClassSymbol, scope: Scope)(implicit ctx: Context): Unit = + def ensureConstructor(cls: ClassSymbol, scope: Scope)(implicit ctx: Context): Unit = { if (scope.lookup(nme.CONSTRUCTOR) == NoSymbol) { val constr = ctx.newDefaultConstructor(cls) addConstructorTypeParams(constr) cls.enter(constr, scope) } + } def setClassInfo(denot: ClassDenotation, info: Type, fromScala2: Boolean, selfInfo: Type = NoType)(implicit ctx: Context): Unit = { val cls = denot.classSymbol @@ -149,12 +150,11 @@ object Scala2Unpickler { class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot: ClassDenotation)(ictx: Context) extends PickleBuffer(bytes, 0, -1) with ClassfileParser.Embedded { - def showPickled(): Unit = { + def showPickled(): Unit = atReadPos(0, () => { println(s"classRoot = ${classRoot.debugString}, moduleClassRoot = ${moduleClassRoot.debugString}") util.ShowPickled.printFile(this) }) - } // print("unpickling "); showPickled() // !!! DEBUG @@ -215,7 +215,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas readIndex = index(i) readSymbolAnnotation() readIndex = savedIndex - } else if (isChildrenEntry(i)) { + } + else if (isChildrenEntry(i)) { val savedIndex = readIndex readIndex = index(i) readChildren() @@ -224,7 +225,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } i += 1 } - } catch { + } + catch { case ex: RuntimeException => handleRuntimeException(ex) } @@ -386,7 +388,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas def slowSearch(name: Name): Symbol = owner.info.decls.find(_.name == name) - def nestedObjectSymbol: Symbol = { + def nestedObjectSymbol: Symbol = // If the owner is overloaded (i.e. a method), it's not possible to select the // right member, so return NoSymbol. This can only happen when unpickling a tree. // the "case Apply" in readTree() takes care of selecting the correct alternative @@ -398,14 +400,14 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val module = owner.info.decl(name.toTermName).suchThat(_.is(Module)) module.info // force it, as completer does not yet point to module class. module.symbol.moduleClass + } /* was: val moduleVar = owner.info.decl(name.toTermName.moduleVarName).symbol if (moduleVar.isLazyAccessor) return moduleVar.lazyAccessor.lazyAccessor */ - } else NoSymbol - } + else NoSymbol // println(s"read ext symbol $name from ${owner.denot.debugString} in ${classRoot.debugString}") // !!! DEBUG @@ -455,11 +457,10 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } name = name.adjustIfModuleClass(flags) - if (flags.is(Method)) { + if (flags.is(Method)) name = if (name == nme.TRAIT_CONSTRUCTOR) nme.CONSTRUCTOR else name.asTermName.unmangle(Scala2MethodNameKinds) - } if ((flags.is(Scala2ExpandedName))) { name = name.unmangle(ExpandedName) flags = flags &~ Scala2ExpandedName @@ -496,9 +497,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } def finishSym(sym: Symbol): Symbol = { - if (sym.isClass) { + if (sym.isClass) sym.setFlag(Scala2x) - } if (!(isRefinementClass(sym) || isUnpickleRoot(sym) || sym.is(Scala2Existential))) { val owner = sym.owner if (owner.isClass) @@ -589,15 +589,14 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas else if (!denot.is(Param)) tp1.underlyingIfRepeated(isJava = false) else tp1 if (denot.isConstructor) addConstructorTypeParams(denot) - if (atEnd) { + if (atEnd) assert(!denot.isSuperAccessor, denot) - } else { + else { assert(denot.is(ParamAccessor) || denot.isSuperAccessor, denot) - def disambiguate(alt: Symbol) = { // !!! DEBUG + def disambiguate(alt: Symbol) = // !!! DEBUG trace.onDebug(s"disambiguating ${denot.info} =:= ${denot.owner.thisType.memberInfo(alt)} ${denot.owner}") { denot.info matches denot.owner.thisType.memberInfo(alt) } - } val alias = readDisambiguatedSymbolRef(disambiguate).asTerm denot.addAnnotation(Annotation.makeAlias(alias)) } @@ -607,7 +606,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas atReadPos(startCoord(denot).toIndex, () => parseToCompletion(denot)( ctx.addMode(Mode.Scala2Unpickling).withPhaseNoLater(ctx.picklerPhase))) - } catch { + } + catch { case ex: RuntimeException => handleRuntimeException(ex) } } @@ -621,7 +621,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas if (tag == POLYtpe) { val unusedRestpeRef = readNat() until(end, () => readSymbolRef()(ctx)).asInstanceOf[List[TypeSymbol]] - } else Nil + } + else Nil } private def loadTypeParams(implicit ctx: Context) = atReadPos(index(infoRef), () => readTypeParams()(ctx)) @@ -651,7 +652,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas // Need to be careful not to run into cyclic references here (observed when // compiling t247.scala). That's why we avoid taking `symbol` of a TypeRef // unless names match up. - val isBound = (tp: Type) => { + val isBound = { (tp: Type) => def refersTo(tp: Type, sym: Symbol): Boolean = tp match { case tp: TypeRef => sym.name == tp.name && sym == tp.symbol case tp: TypeVar => refersTo(tp.underlying, sym) @@ -701,7 +702,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val tp2 = tp1.subst(boundSyms, boundBounds).subst(boundSyms, anyTypes) ctx.warning(FailureToEliminateExistential(tp, tp1, tp2, boundSyms)) tp2 - } else tp1 + } + else tp1 } /** Read type ref, mapping a TypeRef to a package to the package's ThisType @@ -813,7 +815,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas if (tag == POLYtpe) { val unusedRestperef = readNat() until(end, () => readSymbolRef()) - } else Nil + } + else Nil } def noSuchTypeTag(tag: Int, end: Int)(implicit ctx: Context): Type = @@ -935,7 +938,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val name = at(argref, () => readName()) val arg = readClassfileAnnotArg(readNat()) NamedArg(name.asTermName, arg) - } else readAnnotArg(argref) + } + else readAnnotArg(argref) } } t.toList @@ -1006,9 +1010,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas name = readNameRef() } /** Read a Symbol */ - def setSym(): Unit = { + def setSym(): Unit = symbol = readSymbolRef() - } implicit val span: Span = NoSpan @@ -1297,5 +1300,5 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas case other => errorBadSignature("expected an TypeDef (" + other + ")") } - } + diff --git a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala index b526480e088b..c83990509372 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala @@ -19,7 +19,7 @@ class DecompilationPrinter extends Phase { override def phaseName: String = "decompilationPrinter" - override def run(implicit ctx: Context): Unit = { + override def run(implicit ctx: Context): Unit = if (ctx.settings.outputDir.isDefault) printToOutput(System.out) else { val outputDir = ctx.settings.outputDir.value @@ -29,18 +29,18 @@ class DecompilationPrinter extends Phase { os = File(outputDir.fileNamed("decompiled.scala").path)(Codec.UTF8).outputStream(append = true) ps = new PrintStream(os, /* autoFlush = */ false, "UTF-8") printToOutput(ps) - } finally { + } + finally { if (os ne null) os.close() if (ps ne null) ps.close() } } - } private def printToOutput(out: PrintStream)(implicit ctx: Context): Unit = { val unit = ctx.compilationUnit - if (ctx.settings.printTasty.value) { + if (ctx.settings.printTasty.value) println(new TastyPrinter(unit.pickled.head._2).printContents()) - } else { + else { val unitFile = unit.source.toString.replace("\\", "/").replace(".class", ".tasty") out.println(s"/** Decompiled from $unitFile */") out.println(ReflectionImpl.showTree(unit.tpdTree)) diff --git a/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala b/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala index f2d9adca1e50..1cd6602cd40f 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala @@ -43,12 +43,11 @@ object Debug { source.substring(fromSourcesOut.toString.length + 1, source.length - x.extension.length - 1).replace('/', '.') }.toList - val fromTastyArgs = { + val fromTastyArgs = "-from-tasty" :: "-d" :: fromTastyOut.toString :: insertClasspathInArgs(args.filterNot(_.endsWith(".scala")).toList, fromSourcesOut.toString) ::: classes - } println("Compiling TASTY from .class sources") val compilation2 = dotc.Main.process(fromTastyArgs.toArray) diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala index d67175137925..a8f4dfce7302 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala @@ -36,7 +36,7 @@ object TastyFileUtil { val classpath = path.toString.replace(classInPath, "") (classpath, fullName) } - } - } + + diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index f1de66c5045d..7ffdc53b8dae 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -56,7 +56,7 @@ object Completion { * * Otherwise, provide no completion suggestion. */ - private def completionMode(path: List[Tree], pos: SourcePosition): Mode = { + private def completionMode(path: List[Tree], pos: SourcePosition): Mode = path match { case (ref: RefTree) :: _ => if (ref.name.isTermName) Mode.Term @@ -73,13 +73,12 @@ object Completion { case _ => Mode.None } - } /** * Inspect `path` to determine the completion prefix. Only symbols whose name start with the * returned prefix should be considered. */ - private def completionPrefix(path: List[Tree], pos: SourcePosition): String = { + private def completionPrefix(path: List[Tree], pos: SourcePosition): String = path match { case Thicket(name :: _ :: Nil) :: (_: Import) :: _ => completionPrefix(name :: Nil, pos) @@ -96,15 +95,13 @@ object Completion { case _ => "" } - } /** Inspect `path` to determine the offset where the completion result should be inserted. */ - private def completionOffset(path: List[Tree]): Int = { + private def completionOffset(path: List[Tree]): Int = path match { case (ref: RefTree) :: _ => ref.span.point case _ => 0 } - } /** Create a new `CompletionBuffer` for completing at `pos`. */ private def completionBuffer(path: List[Tree], pos: SourcePosition): CompletionBuffer = { @@ -118,14 +115,13 @@ object Completion { val offset = completionOffset(path) val buffer = completionBuffer(path, pos) - if (buffer.mode != Mode.None) { + if (buffer.mode != Mode.None) path match { case Select(qual, _) :: _ => buffer.addMemberCompletions(qual) case Import(_, expr, _) :: _ => buffer.addMemberCompletions(expr) // TODO: distinguish given from plain imports case (_: Thicket) :: Import(_, expr, _) :: _ => buffer.addMemberCompletions(expr) case _ => buffer.addScopeCompletions } - } val completionList = buffer.getCompletions @@ -164,7 +160,7 @@ object Completion { * * When there are multiple symbols, show their kinds. */ - private def description(symbols: List[Symbol])(implicit ctx: Context): String = { + private def description(symbols: List[Symbol])(implicit ctx: Context): String = symbols match { case sym :: Nil => if (sym.isType) sym.showFullName @@ -176,7 +172,6 @@ object Completion { case Nil => "" } - } /** * Add symbols that are currently in scope to `info`: the members of the current class and the @@ -211,12 +206,11 @@ object Completion { def addMemberCompletions(qual: Tree)(implicit ctx: Context): Unit = { if (!qual.tpe.widenDealias.isBottomType) { addAccessibleMembers(qual.tpe) - if (!mode.is(Mode.Import) && !qual.tpe.isRef(defn.NullClass)) { + if (!mode.is(Mode.Import) && !qual.tpe.isRef(defn.NullClass)) // Implicit conversions do not kick in when importing // and for `NullClass` they produce unapplicable completions (for unclear reasons) implicitConversionTargets(qual)(ctx.fresh.setExploreTyperState()) .foreach(addAccessibleMembers) - } } } @@ -224,19 +218,19 @@ object Completion { * If `sym` exists, no symbol with the same name is already included, and it satisfies the * inclusion filter, then add it to the completions. */ - private def add(sym: Symbol, nameInScope: Name)(implicit ctx: Context) = + private def add(sym: Symbol, nameInScope: Name)(implicit ctx: Context) = { if (sym.exists && completionsFilter(NoType, nameInScope) && !completions.lookup(nameInScope).exists && - include(sym, nameInScope)) { + include(sym, nameInScope)) completions.enter(sym, nameInScope) - } + } /** Lookup members `name` from `site`, and try to add them to the completion list. */ - private def addMember(site: Type, name: Name, nameInScope: Name)(implicit ctx: Context) = - if (!completions.lookup(nameInScope).exists) { + private def addMember(site: Type, name: Name, nameInScope: Name)(implicit ctx: Context) = { + if (!completions.lookup(nameInScope).exists) for (alt <- site.member(name).alternatives) add(alt.symbol, nameInScope) - } + } /** Include in completion sets only symbols that * 1. start with given name prefix, and @@ -299,9 +293,8 @@ object Completion { addMember(imp.site, name.toTypeName, nameInScope.toTypeName) } imp.reverseMapping.foreachBinding { (nameInScope, original) => - if (original != nameInScope || !imp.excluded.contains(original)) { + if (original != nameInScope || !imp.excluded.contains(original)) addImport(original, nameInScope) - } } if (imp.isWildcardImport) for (mbr <- accessibleMembers(imp.site) if !imp.excluded.contains(mbr.name.toTermName)) @@ -325,11 +318,10 @@ object Completion { } /** Filter for names that should appear when looking for completions. */ - private[this] object completionsFilter extends NameFilter { - def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean = - !name.isConstructorName && name.toTermName.info.kind == SimpleNameKind - } - + private[this] object completionsFilter extends NameFilter { + def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean = + !name.isConstructorName && name.toTermName.info.kind == SimpleNameKind + } } /** @@ -354,23 +346,23 @@ object Completion { val Import: Mode = new Mode(4) | Term | Type } - /** A scope that tracks renames of the entered symbols. - * Useful for providing completions for renamed symbols - * in the REPL and the IDE. - */ - private class RenameAwareScope extends Scopes.MutableScope { - private[this] val nameToSymbols: mutable.Map[TermName, List[Symbol]] = mutable.Map.empty - - /** Enter the symbol `sym` in this scope, recording a potential renaming. */ - def enter[T <: Symbol](sym: T, name: Name)(implicit ctx: Context): T = { - val termName = name.stripModuleClassSuffix.toTermName - nameToSymbols += termName -> (sym :: nameToSymbols.getOrElse(termName, Nil)) - newScopeEntry(name, sym) - sym - } - - /** Get the names that are known in this scope, along with the list of symbols they refer to. */ - def mappings: Map[TermName, List[Symbol]] = nameToSymbols.toMap - } + /** A scope that tracks renames of the entered symbols. + * Useful for providing completions for renamed symbols + * in the REPL and the IDE. + */ + private class RenameAwareScope extends Scopes.MutableScope { + private[this] val nameToSymbols: mutable.Map[TermName, List[Symbol]] = mutable.Map.empty + + /** Enter the symbol `sym` in this scope, recording a potential renaming. */ + def enter[T <: Symbol](sym: T, name: Name)(implicit ctx: Context): T = { + val termName = name.stripModuleClassSuffix.toTermName + nameToSymbols += termName -> (sym :: nameToSymbols.getOrElse(termName, Nil)) + newScopeEntry(name, sym) + sym + } + /** Get the names that are known in this scope, along with the list of symbols they refer to. */ + def mappings: Map[TermName, List[Symbol]] = nameToSymbols.toMap + } } + diff --git a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala index 46e482f693bc..d88e0248b05f 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala @@ -104,15 +104,16 @@ object Interactive { val funSym = fn.symbol if (funSym.name == StdNames.nme.copy && funSym.is(Synthetic) - && funSym.owner.is(CaseClass)) { + && funSym.owner.is(CaseClass)) List(funSym.owner.info.member(name).symbol) - } else { + else { val classTree = funSym.topLevelClass.asClass.rootTree val paramSymbol = for { DefDef(_, _, paramss, _, _) <- tpd.defPath(funSym, classTree).lastOption param <- paramss.flatten.find(_.name == name) - } yield param.symbol + } + yield param.symbol List(paramSymbol.getOrElse(fn.symbol)) } @@ -179,7 +180,7 @@ object Interactive { )(implicit ctx: Context): List[SourceTree] = safely { val buf = new mutable.ListBuffer[SourceTree] - def traverser(source: SourceFile) = { + def traverser(source: SourceFile) = new untpd.TreeTraverser { private def handle(utree: untpd.NameTree): Unit = { val tree = utree.asInstanceOf[tpd.NameTree] @@ -192,7 +193,7 @@ object Interactive { && treePredicate(tree)) buf += SourceTree(tree, source) } - override def traverse(tree: untpd.Tree)(implicit ctx: Context) = { + override def traverse(tree: untpd.Tree)(implicit ctx: Context) = tree match { case imp: untpd.Import if include.isImports && tree.hasType => val tree = imp.asInstanceOf[tpd.Import] @@ -210,9 +211,7 @@ object Interactive { case _ => traverseChildren(tree) } - } } - } trees.foreach(t => traverser(t.source).traverse(t.tree)) @@ -404,20 +403,20 @@ object Interactive { * @param sym The symbol whose implementations to find. * @return A function that determines whether a `NameTree` is an implementation of `sym`. */ - def implementationFilter(sym: Symbol)(implicit ctx: Context): NameTree => Boolean = { + def implementationFilter(sym: Symbol)(implicit ctx: Context): NameTree => Boolean = if (sym.isClass) { case td: TypeDef => val treeSym = td.symbol (treeSym != sym || !treeSym.isOneOf(AbstractOrTrait)) && treeSym.derivesFrom(sym) case _ => false - } else { + } + else { case md: MemberDef => matchSymbol(md, sym, Include.overriding) && !md.symbol.is(Deferred) case _ => false } - } /** * Is this tree using a renaming introduced by an import statement or an alias for `this`? @@ -432,11 +431,10 @@ object Interactive { } /** Are the two names the same? */ - def sameName(n0: Name, n1: Name): Boolean = { + def sameName(n0: Name, n1: Name): Boolean = n0.stripModuleClassSuffix.toTermName eq n1.stripModuleClassSuffix.toTermName - } private[interactive] def safely[T](op: => List[T]): List[T] = try op catch { case ex: TypeError => Nil } - } + diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala index 7d623b296dc2..179d21bafdf8 100644 --- a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala +++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala @@ -105,11 +105,10 @@ class InteractiveDriver(val settings: List[String]) extends Driver { val fromCompilationOutput = { val classNames = new mutable.ListBuffer[TypeName] val output = ctx.settings.outputDir.value - if (output.isDirectory) { + if (output.isDirectory) classesFromDir(output.jpath, classNames) - } else { + else classesFromZip(output.file, classNames) - } classNames.flatMap { cls => treesFromClassName(cls, id) } @@ -228,7 +227,7 @@ class InteractiveDriver(val settings: List[String]) extends Driver { } /** Adds the names of the classes that are defined in `dir` to `buffer`. */ - private def classesFromDir(dir: Path, buffer: mutable.ListBuffer[TypeName]): Unit = { + private def classesFromDir(dir: Path, buffer: mutable.ListBuffer[TypeName]): Unit = try Files.walkFileTree(dir, new SimpleFileVisitor[Path] { override def visitFile(path: Path, attrs: BasicFileAttributes) = { @@ -237,7 +236,8 @@ class InteractiveDriver(val settings: List[String]) extends Driver { for { tastySuffix <- tastySuffixes if name.endsWith(tastySuffix) - } { + } + { buffer += dir.relativize(path).toString.replace("/", ".").stripSuffix(tastySuffix).toTypeName } } @@ -247,7 +247,6 @@ class InteractiveDriver(val settings: List[String]) extends Driver { catch { case _: NoSuchFileException => } - } private def topLevelTrees(topTree: Tree, source: SourceFile): List[SourceTree] = { val trees = new mutable.ListBuffer[SourceTree] @@ -317,22 +316,22 @@ class InteractiveDriver(val settings: List[String]) extends Driver { myCtx = run.runContext run.compileUnits(Nil, myCtx) } - } + object InteractiveDriver { def toUriOption(file: AbstractFile): Option[URI] = if (!file.exists) None else - try { + try // We don't use file.file here since it'll be null // for the VirtualFiles created by InteractiveDriver#toSource // TODO: To avoid these round trip conversions, we could add an // AbstractFile#toUri method and implement it by returning a constant // passed as a parameter to a constructor of VirtualFile Some(Paths.get(file.path).toUri) - } catch { + catch { case e: InvalidPathException => None } diff --git a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala index 6782b458293d..a86b84e108fa 100644 --- a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala +++ b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala @@ -51,7 +51,7 @@ case class SourceTree(tree: tpd.Import | tpd.NameTree, source: SourceFile) { } object SourceTree { - def fromSymbol(sym: ClassSymbol, id: String = "")(implicit ctx: Context): List[SourceTree] = { + def fromSymbol(sym: ClassSymbol, id: String = "")(implicit ctx: Context): List[SourceTree] = if (sym == defn.SourceFileAnnot || // FIXME: No SourceFile annotation on SourceFile itself !sym.source.exists) // FIXME: We cannot deal with external projects yet Nil @@ -78,5 +78,4 @@ object SourceTree { case None => Nil } } - } } diff --git a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala index 8e4d524dda8b..c5d0e35a4362 100644 --- a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala +++ b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala @@ -27,9 +27,6 @@ abstract class CharArrayReader { self => /** The start offset of the current line */ var lineStartOffset: Int = startFrom - /** The start offset of the line before the current one */ - var lastLineStartOffset: Int = startFrom - private[this] var lastUnicodeOffset = -1 /** Is last character a unicode escape \\uxxxx? */ @@ -40,9 +37,9 @@ abstract class CharArrayReader { self => val idx = charOffset lastCharOffset = idx charOffset = idx + 1 - if (idx >= buf.length) { + if (idx >= buf.length) ch = SU - } else { + else { val c = buf(idx) ch = c if (c == '\\') potentialUnicode() @@ -60,9 +57,9 @@ abstract class CharArrayReader { self => val idx = charOffset lastCharOffset = idx charOffset = idx + 1 - if (idx >= buf.length) { + if (idx >= buf.length) ch = SU - } else { + else { val c = buf(idx) ch = c if (c == '\\') potentialUnicode() @@ -76,7 +73,7 @@ abstract class CharArrayReader { self => while (p >= 0 && buf(p) == '\\') p -= 1 (charOffset - p) % 2 == 0 } - def udigit: Int = { + def udigit: Int = if (charOffset >= buf.length) { // Since the positioning code is very insistent about throwing exceptions, // we have to decrement the position so our error message can be seen, since @@ -90,12 +87,12 @@ abstract class CharArrayReader { self => else error("error in unicode escape", charOffset) d } - } if (charOffset < buf.length && buf(charOffset) == 'u' && decodeUni && evenSlashPrefix) { - while { + while ({ charOffset += 1 charOffset < buf.length && buf(charOffset) == 'u' - } do () + }) + () val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit lastUnicodeOffset = charOffset ch = code.toChar @@ -104,19 +101,17 @@ abstract class CharArrayReader { self => /** replace CR;LF by LF */ private def skipCR(): Unit = { - if (ch == CR) + if (ch == CR) { if (charOffset < buf.length && buf(charOffset) == LF) { charOffset += 1 ch = LF } + } } /** Handle line ends */ private def potentialLineEnd(): Unit = { - if (ch == LF || ch == FF) { - lastLineStartOffset = lineStartOffset - lineStartOffset = charOffset - } + if (ch == LF || ch == FF) lineStartOffset = charOffset } def isAtEnd: Boolean = charOffset >= buf.length diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index d8648fd84f39..1f7280ecad94 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -73,9 +73,8 @@ object JavaParsers { } } - def syntaxError(msg: String, skipIt: Boolean): Unit = { + def syntaxError(msg: String, skipIt: Boolean): Unit = syntaxError(in.offset, msg, skipIt) - } def syntaxError(offset: Int, msg: String, skipIt: Boolean): Unit = { if (offset > lastErrorOffset) { @@ -158,16 +157,15 @@ object JavaParsers { case _ => } in.token != EOF && (nparens > 0 || nbraces > 0) - }) () + }) + () } - def skipTo(tokens: Int*): Unit = { - while (!(tokens contains in.token) && in.token != EOF) { + def skipTo(tokens: Int*): Unit = + while (!(tokens contains in.token) && in.token != EOF) if (in.token == LBRACE) { skipAhead(); accept(RBRACE) } else if (in.token == LPAREN) { skipAhead(); accept(RPAREN) } else in.nextToken() - } - } /** Consume one token of the specified type, or * signal an error if it is not there. @@ -206,7 +204,8 @@ object JavaParsers { val name = in.name in.nextToken() name - } else { + } + else { accept(IDENTIFIER) nme.ERROR } @@ -257,7 +256,8 @@ object JavaParsers { in.nextToken() accept(RBRACKET) optArrayBrackets(tpt1) - } else tpt + } + else tpt def basicType(): Tree = atSpan(in.offset) { @@ -290,9 +290,9 @@ object JavaParsers { t = typeArgs(atSpan(t.span.start, in.offset)(typeSelect(t, ident()))) } convertToTypeId(t) - } else { - basicType() } + else + basicType() } def typeArgs(t: Tree): Tree = { @@ -313,9 +313,9 @@ object JavaParsers { */ TypeBoundsTree(lo, hi) } - } else { - typ() } + else + typ() if (in.token == LT) { in.nextToken() val t1 = convertToTypeId(t) @@ -324,7 +324,8 @@ object JavaParsers { atSpan(t1.span.start) { AppliedTypeTree(t1, args) } - } else t + } + else t } def annotations(): List[Tree] = { @@ -355,7 +356,7 @@ object JavaParsers { New(TypeTree(sym.typeRef)) } - while (true) { + while (true) in.token match { case AT if (in.lookaheadToken != INTERFACE) => in.nextToken() @@ -397,7 +398,6 @@ object JavaParsers { return Modifiers(flags, privateWithin) withAnnotations annots } - } assert(false, "should not be here") throw new RuntimeException } @@ -408,7 +408,8 @@ object JavaParsers { val tparams = repsep(() => typeParam(flags), COMMA) acceptClosingAngle() tparams - } else List() + } + else List() def typeParam(flags: FlagSet): TypeDef = atSpan(in.offset) { @@ -493,7 +494,8 @@ object JavaParsers { List(vparams), TypeTree(), methodBody()).withMods(mods) } } - } else { + } + else { var mods1 = mods if (mods.is(Flags.Abstract)) mods1 = mods &~ Flags.Abstract nameOffset = in.offset @@ -505,9 +507,9 @@ object JavaParsers { optThrows() val bodyOk = !inInterface || (mods.is(Flags.DefaultMethod)) val body = - if (bodyOk && in.token == LBRACE) { + if (bodyOk && in.token == LBRACE) methodBody() - } else { + else if (parentToken == AT && in.token == DEFAULT) { val annot = atSpan(nameOffset) { @@ -518,18 +520,19 @@ object JavaParsers { skipTo(SEMI) accept(SEMI) unimplemented - } else { + } + else { accept(SEMI) EmptyTree } - } //if (inInterface) mods1 |= Flags.Deferred List { atSpan(start, nameOffset) { DefDef(name.toTermName, tparams, List(vparams), rtpt, body).withMods(mods1 | Flags.Method) } } - } else { + } + else { if (inInterface) mods1 |= Flags.Final | Flags.JavaStatic val result = fieldDecls(start, nameOffset, mods1, rtpt, name) accept(SEMI) @@ -554,28 +557,29 @@ object JavaParsers { while (in.token == COMMA) { in.nextToken() if (in.token == IDENTIFIER) { // if there's an ident after the comma ... - val nextNameOffset = in.offset - val name = ident() + val nextNameOffset = in.offset + val name = ident() if (in.token == EQUALS || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition buf ++= maybe buf += atSpan(start, nextNameOffset) { varDecl(mods, tpt, name.toTermName) } maybe.clear() - } else if (in.token == COMMA) { // ... if there's a comma after the ident, it could be a real vardef or not. + } + else if (in.token == COMMA) // ... if there's a comma after the ident, it could be a real vardef or not. maybe += atSpan(start, nextNameOffset) { varDecl(mods, tpt, name.toTermName) } - } else { // ... if there's something else we were still in the initializer of the + else { // ... if there's something else we were still in the initializer of the // previous var def; skip to next comma or semicolon. skipTo(COMMA, SEMI) maybe.clear() } - } else { // ... if there's no ident following the comma we were still in the initializer of the + } + else { // ... if there's no ident following the comma we were still in the initializer of the // previous var def; skip to next comma or semicolon. skipTo(COMMA, SEMI) maybe.clear() } } - if (in.token == SEMI) { + if (in.token == SEMI) buf ++= maybe // every potential vardef that survived until here is real. - } buf.toList } @@ -632,21 +636,22 @@ object JavaParsers { val start = in.offset accept(IMPORT) val buf = new ListBuffer[Name] - def collectIdents() : Int = { + def collectIdents() : Int = if (in.token == ASTERISK) { val starOffset = in.offset in.nextToken() buf += nme.WILDCARD starOffset - } else { + } + else { val nameOffset = in.offset buf += ident() if (in.token == DOT) { in.nextToken() collectIdents() - } else nameOffset + } + else nameOffset } - } if (in.token == STATIC) in.nextToken() else buf += nme.ROOTPKG val lastnameOffset = collectIdents() @@ -655,7 +660,8 @@ object JavaParsers { if (names.length < 2) { syntaxError(start, "illegal import", skipIt = false) List() - } else { + } + else { val qual = names.tail.init.foldLeft(Ident(names.head): Tree)(Select(_, _)) val lastname = names.last val ident = Ident(lastname).withSpan(Span(lastnameOffset)) @@ -672,9 +678,9 @@ object JavaParsers { if (in.token == IMPLEMENTS) { in.nextToken() repsep(() => typ(), COMMA) - } else { - List() } + else + List() def classDecl(start: Offset, mods: Modifiers): List[Tree] = { accept(CLASS) @@ -685,9 +691,9 @@ object JavaParsers { if (in.token == EXTENDS) { in.nextToken() typ() - } else { - javaLangObject() } + else + javaLangObject() val interfaces = interfacesOpt() val (statics, body) = typeBody(CLASS, name, tparams) val cls = atSpan(start, nameOffset) { @@ -705,9 +711,9 @@ object JavaParsers { if (in.token == EXTENDS) { in.nextToken() repsep(() => typ(), COMMA) - } else { - List(javaLangObject()) } + else + List(javaLangObject()) val (statics, body) = typeBody(INTERFACE, name, tparams) val iface = atSpan(start, nameOffset) { TypeDef( @@ -734,9 +740,10 @@ object JavaParsers { if (in.token == LBRACE) { skipAhead() // skip init block, we just assume we have seen only static accept(RBRACE) - } else if (in.token == SEMI) { + } + else if (in.token == SEMI) in.nextToken() - } else { + else { if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.JavaStatic val decls = memberDecl(start, mods, parentToken, parentTParams) (if (mods.is(Flags.JavaStatic) || inInterface && !(decls exists (_.isInstanceOf[DefDef]))) @@ -803,9 +810,9 @@ object JavaParsers { if (in.token == SEMI) { in.nextToken() typeBodyDecls(ENUM, name, List()) - } else { - (List(), List()) } + else + (List(), List()) val predefs = List( DefDef( nme.values, List(), @@ -868,9 +875,9 @@ object JavaParsers { val pkg = qualId() accept(SEMI) pkg - } else { - Ident(nme.EMPTY_PACKAGE) } + else + Ident(nme.EMPTY_PACKAGE) thisPackageName = convertToTypeName(pkg) match { case Some(t) => t.name.toTypeName case _ => tpnme.EMPTY diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala index 81c8ff9b0d6f..7a8e17cbadf5 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala @@ -28,7 +28,7 @@ object JavaScanners { // Get next token ------------------------------------------------------------ - def nextToken(): Unit = { + def nextToken(): Unit = if (next.token == EMPTY) { lastOffset = lastCharOffset fetchToken() @@ -37,7 +37,6 @@ object JavaScanners { this copyFrom next next.token = EMPTY } - } def lookaheadToken: Int = { prev copyFrom this @@ -80,9 +79,9 @@ object JavaScanners { if (ch == 'x' || ch == 'X') { nextChar() base = 16 - } else { - base = 8 } + else + base = 8 getNumber() case '1' | '2' | '3' | '4' | @@ -92,16 +91,15 @@ object JavaScanners { case '\"' => nextChar() - while (ch != '\"' && (isUnicodeEscape || ch != CR && ch != LF && ch != SU)) { + while (ch != '\"' && (isUnicodeEscape || ch != CR && ch != LF && ch != SU)) getlitch() - } if (ch == '\"') { token = STRINGLIT setStrVal() nextChar() - } else { - error("unclosed string literal") } + else + error("unclosed string literal") case '\'' => nextChar() @@ -110,9 +108,9 @@ object JavaScanners { nextChar() token = CHARLIT setStrVal() - } else { - error("unclosed character literal") } + else + error("unclosed character literal") case '=' => token = EQUALS @@ -128,13 +126,15 @@ object JavaScanners { if (ch == '=') { token = GTEQ nextChar() - } else if (ch == '>') { + } + else if (ch == '>') { token = GTGT nextChar() if (ch == '=') { token = GTGTEQ nextChar() - } else if (ch == '>') { + } + else if (ch == '>') { token = GTGTGT nextChar() if (ch == '=') { @@ -150,7 +150,8 @@ object JavaScanners { if (ch == '=') { token = LTEQ nextChar() - } else if (ch == '<') { + } + else if (ch == '<') { token = LTLT nextChar() if (ch == '=') { @@ -189,7 +190,8 @@ object JavaScanners { if (ch == '&') { token = AMPAMP nextChar() - } else if (ch == '=') { + } + else if (ch == '=') { token = AMPEQ nextChar() } @@ -200,7 +202,8 @@ object JavaScanners { if (ch == '|') { token = BARBAR nextChar() - } else if (ch == '=') { + } + else if (ch == '=') { token = BAREQ nextChar() } @@ -211,7 +214,8 @@ object JavaScanners { if (ch == '+') { token = PLUSPLUS nextChar() - } else if (ch == '=') { + } + else if (ch == '=') { token = PLUSEQ nextChar() } @@ -222,7 +226,8 @@ object JavaScanners { if (ch == '-') { token = MINUSMINUS nextChar() - } else if (ch == '=') { + } + else if (ch == '=') { token = MINUSEQ nextChar() } @@ -244,7 +249,8 @@ object JavaScanners { token = SLASHEQ nextChar() } - } else fetchToken() + } + else fetchToken() case '^' => token = HAT @@ -268,12 +274,14 @@ object JavaScanners { if ('0' <= ch && ch <= '9') { putChar('.'); getFraction() - } else if (ch == '.') { + } + else if (ch == '.') { nextChar() if (ch == '.') { nextChar() token = DOTDOTDOT - } else error("`.' character expected") + } + else error("`.' character expected") } case ';' => @@ -320,7 +328,8 @@ object JavaScanners { putChar(ch) nextChar() getIdentRest() - } else { + } + else { error("illegal character: " + ch.toInt) nextChar() } @@ -347,8 +356,8 @@ object JavaScanners { // Identifiers --------------------------------------------------------------- - private def getIdentRest(): Unit = { - while (true) { + private def getIdentRest(): Unit = + while (true) (ch: @switch) match { case 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | @@ -379,13 +388,12 @@ object JavaScanners { if (Character.isUnicodeIdentifierPart(ch)) { putChar(ch) nextChar() - } else { + } + else { finishNamed() return } } - } - } // Literals ----------------------------------------------------------------- @@ -407,7 +415,8 @@ object JavaScanners { } } putChar(oct.asInstanceOf[Char]) - } else { + } + else { ch match { case 'b' => putChar('\b') case 't' => putChar('\t') @@ -423,7 +432,8 @@ object JavaScanners { } nextChar() } - } else { + } + else { putChar(ch) nextChar() } @@ -440,9 +450,8 @@ object JavaScanners { if (ch == 'e' || ch == 'E') { val lookahead = lookaheadReader() lookahead.nextChar() - if (lookahead.ch == '+' || lookahead.ch == '-') { + if (lookahead.ch == '+' || lookahead.ch == '-') lookahead.nextChar() - } if ('0' <= lookahead.ch && lookahead.ch <= '9') { putChar(ch) nextChar() @@ -461,7 +470,8 @@ object JavaScanners { putChar(ch) nextChar() token = DOUBLELIT - } else if (ch == 'f' || ch == 'F') { + } + else if (ch == 'f' || ch == 'F') { putChar(ch) nextChar() token = FLOATLIT @@ -497,9 +507,8 @@ object JavaScanners { if (base <= 10 && (ch == 'e' || ch == 'E' || ch == 'f' || ch == 'F' || - ch == 'd' || ch == 'D')) { + ch == 'd' || ch == 'D')) return getFraction() - } setStrVal() if (ch == 'l' || ch == 'L') { nextChar() diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 81803ce86b9c..c4969b4e6b02 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -24,7 +24,7 @@ import ScriptParsers._ import Decorators._ import scala.internal.Chars import scala.annotation.{tailrec, switch} -import rewrites.Rewrites.patch +import rewrites.Rewrites.{patch, overlapsPatch} object Parsers { @@ -130,12 +130,13 @@ object Parsers { /** Issue an error at given offset if beyond last error offset * and update lastErrorOffset. */ - def syntaxError(msg: => Message, offset: Int = in.offset): Unit = + def syntaxError(msg: => Message, offset: Int = in.offset): Unit = { if (offset > lastErrorOffset) { val length = if (offset == in.offset && in.name != null) in.name.show.length else 0 syntaxError(msg, Span(offset, offset + length)) lastErrorOffset = in.offset } + } /** Unconditionally issue an error at given span, without * updating lastErrorOffset. @@ -152,15 +153,14 @@ object Parsers { def skipBracesHook(): Option[Tree] def skipBraces(): Unit = { - accept(LBRACE) + accept(if (in.token == INDENT) INDENT else LBRACE) var openBraces = 1 - while (in.token != EOF && openBraces > 0) { + while (in.token != EOF && openBraces > 0) skipBracesHook() getOrElse { - if (in.token == LBRACE) openBraces += 1 - else if (in.token == RBRACE) openBraces -= 1 + if (in.token == LBRACE || in.token == INDENT) openBraces += 1 + else if (in.token == RBRACE || in.token == OUTDENT) openBraces -= 1 in.nextToken() } - } } } @@ -181,15 +181,15 @@ object Parsers { /* -------------- TOKEN CLASSES ------------------------------------------- */ - def isIdent: Boolean = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT - def isIdent(name: Name): Boolean = in.token == IDENTIFIER && in.name == name - def isSimpleLiteral: Boolean = simpleLiteralTokens contains in.token - def isLiteral: Boolean = literalTokens contains in.token - def isNumericLit: Boolean = numericLitTokens contains in.token - def isTemplateIntro: Boolean = templateIntroTokens contains in.token - def isDclIntro: Boolean = dclIntroTokens contains in.token - def isStatSeqEnd: Boolean = in.token == RBRACE || in.token == EOF - def mustStartStat: Boolean = mustStartStatTokens contains in.token + def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT + def isIdent(name: Name) = in.token == IDENTIFIER && in.name == name + def isSimpleLiteral = simpleLiteralTokens contains in.token + def isLiteral = literalTokens contains in.token + def isNumericLit = numericLitTokens contains in.token + def isTemplateIntro = templateIntroTokens contains in.token + def isDclIntro = dclIntroTokens contains in.token + def isStatSeqEnd = in.isNestedEnd || in.token == EOF + def mustStartStat = mustStartStatTokens contains in.token /** Is current token a hard or soft modifier (in modifier position or not)? */ def isModifier: Boolean = modifierTokens.contains(in.token) || in.isSoftModifier @@ -232,9 +232,10 @@ object Parsers { */ private[this] var lastStatOffset = -1 - def setLastStatOffset(): Unit = - if (mustStartStat && in.isAfterLineEnd()) + def setLastStatOffset(): Unit = { + if (mustStartStat && in.isAfterLineEnd) lastStatOffset = in.offset + } /** Is offset1 less or equally indented than offset2? * This is the case if the characters between the preceding end-of-line and offset1 @@ -276,15 +277,21 @@ object Parsers { if (openParens.count(LBRACKET) > 0 && skippedParens.nonePositive) return skippedParens.change(LBRACKET, -1) + case OUTDENT => + if (openParens.count(INDENT) > 0 && skippedParens.count(INDENT) == 0) + return + skippedParens.change(INDENT, -1) case LBRACE => - skippedParens.change(LBRACE, + 1) + skippedParens.change(LBRACE, +1) case LPAREN => - skippedParens.change(LPAREN, + 1) + skippedParens.change(LPAREN, +1) case LBRACKET=> - skippedParens.change(LBRACKET, + 1) + skippedParens.change(LBRACKET, +1) + case INDENT => + skippedParens.change(INDENT, +1) case _ => if (mustStartStat && - in.isAfterLineEnd() && + in.isAfterLineEnd && isLeqIndented(in.offset, lastStatOffset max 0)) return } @@ -323,9 +330,8 @@ object Parsers { */ def accept(token: Int): Int = { val offset = in.offset - if (in.token != token) { + if (in.token != token) syntaxErrorOrIncomplete(ExpectedTokenButFound(token, in.token)) - } if (in.token == token) in.nextToken() offset } @@ -338,7 +344,7 @@ object Parsers { case _ => accept(SEMI) } - def acceptStatSepUnlessAtEnd(altEnd: Token = EOF): Unit = + def acceptStatSepUnlessAtEnd(altEnd: Token = EOF): Unit = { if (!isStatSeqEnd) in.token match { case EOF => @@ -346,19 +352,19 @@ object Parsers { case NEWLINE | NEWLINES => in.nextToken() case SEMI => in.nextToken() case _ => - syntaxError("end of statement expected") + syntaxError(i"end of statement expected but $in found") in.nextToken() // needed to ensure progress; otherwise we might cycle forever accept(SEMI) } + } def rewriteNotice(additionalOption: String = "") = { val optionStr = if (additionalOption.isEmpty) "" else " " ++ additionalOption i"\nThis construct can be rewritten automatically under$optionStr -rewrite." } - def syntaxVersionError(option: String, span: Span) = { + def syntaxVersionError(option: String, span: Span) = syntaxError(em"""This construct is not allowed under $option.${rewriteNotice(option)}""", span) - } def rewriteToNewSyntax(span: Span = Span(in.offset)): Boolean = { if (in.newSyntax) { @@ -384,7 +390,8 @@ object Parsers { try { inFunReturnType = true body - } finally inFunReturnType = saved + } + finally inFunReturnType = saved } /** A flag indicating we are parsing in the annotations of a primary @@ -531,9 +538,21 @@ object Parsers { def inBraces[T](body: => T): T = enclosed(LBRACE, body) def inBrackets[T](body: => T): T = enclosed(LBRACKET, body) + def inBracesOrIndented[T](body: => T): T = + if (in.token == INDENT) { + val rewriteToBraces = + in.rewriteNoIndent && + !testChars(in.lastOffset - 3, " =>") // braces are always optional after `=>` so none should be inserted + if (rewriteToBraces) indentedToBraces(body) + else enclosed(INDENT, body) + } + else + if (in.rewriteToIndent) bracesToIndented(body) + else inBraces(body) + def inDefScopeBraces[T](body: => T): T = { val saved = lastStatOffset - try inBraces(body) + try inBracesOrIndented(body) finally lastStatOffset = saved } @@ -557,6 +576,11 @@ object Parsers { /* -------- REWRITES ----------------------------------------------------------- */ + /** The last offset where a colon at the end of line would be required if a subsequent { ... } + * block would be converted to an indentation region. + */ + var possibleColonOffset: Int = -1 + /** A list of pending patches, to be issued if we can rewrite all enclosing braces to * indentation regions. */ @@ -580,28 +604,144 @@ object Parsers { if (testChar(idx, c => c == ' ' || c == '\t' || c == Chars.CR)) skipBlanks(idx + step, step) else idx - def skipLineCommentsRightOf(idx: Int, column: Int): Int = { - val j = skipBlanks(idx) - if (testChar(j, '/') && testChar(j + 1, '/') && source.column(j) > column) - skipLineCommentsRightOf(source.nextLine(j), column) - else idx + /** Parse indentation region `body` and rewrite it to be in braces instead */ + def indentedToBraces[T](body: => T): T = { + val indentWidth = in.indent.enclosing.width + val followsColon = testChar(in.lastOffset - 1, ':') + val startOpening = + if (followsColon) + if (testChar(in.lastOffset - 2, ' ')) in.lastOffset - 2 + else in.lastOffset - 1 + else in.lastOffset + val endOpening = in.lastOffset + + val t = enclosed(INDENT, body) + + /** Is `expr` a tree that lacks a final `else`? Put such trees in `{...}` to make + * sure we don't accidentally merge them with a following `else`. + */ + def isPartialIf(expr: Tree): Boolean = expr match { + case If(_, _, EmptyTree) => true + case If(_, _, e) => isPartialIf(e) + case _ => false + } + + /** Is `expr` a (possibly curried) function that has a multi-statement block + * as body? Put such trees in `{...}` since we don't enclose statements following + * a `=>` in braces. + */ + def isBlockFunction[T](expr: T): Boolean = expr match { + case Function(_, body) => isBlockFunction(body) + case Block(stats, expr) => stats.nonEmpty || isBlockFunction(expr) + case _ => false + } + + /** Start of first line after in.lastOffset that does not have a comment + * at indent width greater than the indent width of the closing brace. + */ + def closingOffset(lineStart: Offset): Offset = + if (lineStart >= in.lineOffset) in.lineOffset + else { + val candidate = source.nextLine(lineStart) + val commentStart = skipBlanks(lineStart) + if (testChar(commentStart, '/') && indentWidth < in.indentWidth(commentStart)) + closingOffset(source.nextLine(lineStart)) + else + lineStart + } + + val needsBraces = t match { + case Block(Nil, expr) => followsColon || isPartialIf(expr) || isBlockFunction(expr) + case _ => true + } + if (needsBraces) { + patch(source, Span(startOpening, endOpening), " {") + patch(source, Span(closingOffset(source.nextLine(in.lastOffset))), indentWidth.toPrefix ++ "}\n") + } + t } - /** The region to eliminate when replacing a closing `)` or `}` that starts - * a new line + /** The region to eliminate when replacing an opening `(` or `{` that ends a line. + * The `(` or `{` is at in.offset. + */ + def startingElimRegion(colonRequired: Boolean): (Offset, Offset) = { + val skipped = skipBlanks(in.offset + 1) + if (in.isAfterLineEnd) + if (testChar(skipped, Chars.LF) && !colonRequired) + (in.lineOffset, skipped + 1) // skip the whole line + else + (in.offset, skipped) + else if (testChar(in.offset - 1, ' ')) (in.offset - 1, in.offset + 1) + else (in.offset, in.offset + 1) + } + + /** The region to eliminate when replacing a closing `)` or `}` that starts a new line + * The `)` or `}` precedes in.lastOffset. */ def closingElimRegion(): (Offset, Offset) = { val skipped = skipBlanks(in.lastOffset) - if (testChar(skipped, Chars.LF)) // if `}` is on a line by itself + if (testChar(skipped, Chars.LF)) // if `)` or `}` is on a line by itself (source.startOfLine(in.lastOffset), skipped + 1) // skip the whole line else // else - (in.lastOffset - 1, skipped) // move the following text up to where the `}` was + (in.lastOffset - 1, skipped) // move the following text up to where the `)` or `}` was + } + + /** Parse brace-enclosed `body` and rewrite it to be an indentation region instead, if possible. + * If possible means: + * 1. not inside (...), [...], case ... => + * 2. opening brace `{` is at end of line + * 3. closing brace `}` is at start of line + * 4. there is at least one token between the braces + * 5. the closing brace is also at the end of the line, or it is followed by one of + * `then`, `else`, `do`, `catch`, `finally`, `yield`, or `match`. + * 6. the opening brace does not follow a `=>`. The reason for this condition is that + * rewriting back to braces does not work after `=>` (since in most cases braces are omitted + * after a `=>` it would be annoying if braces were inserted). + */ + def bracesToIndented[T](body: => T): T = { + val colonRequired = possibleColonOffset == in.lastOffset + val (startOpening, endOpening) = startingElimRegion(colonRequired) + val isOuterMost = in.sepRegions.isEmpty + val savedPending = pendingPatches + var canRewrite = + in.sepRegions.forall(token => token == RBRACE || token == OUTDENT) && // test (1) + !testChars(in.lastOffset - 3, " =>") // test(6) + val t = enclosed(LBRACE, { + canRewrite &= in.isAfterLineEnd // test (2) + val curOffset = in.offset + try body + finally { + canRewrite &= in.isAfterLineEnd && in.offset != curOffset // test (3)(4) + } + }) + canRewrite &= (in.isAfterLineEnd || statCtdTokens.contains(in.token)) // test (5) + if (canRewrite) { + val openingPatchStr = + if (!colonRequired) "" + else if (testChar(startOpening - 1, Chars.isOperatorPart(_))) " :" + else ":" + val (startClosing, endClosing) = closingElimRegion() + val applyPatch = () => { + patch(source, Span(startOpening, endOpening), openingPatchStr) + patch(source, Span(startClosing, endClosing), "") + } + pendingPatches = applyPatch :: pendingPatches + if (isOuterMost) { + pendingPatches.reverse.foreach(_()) + pendingPatches = Nil + } + } + else pendingPatches = savedPending // can't rewrite, cancel all nested patches. + t } /** Drop (...) or { ... }, replacing the closing element with `endStr` */ def dropParensOrBraces(start: Offset, endStr: String): Unit = { - patch(source, Span(start, start + 1), - if (testChar(start - 1, Chars.isIdentifierPart)) " " else "") + if (testChar(start + 1, Chars.isLineBreakChar)) + patch(source, Span(if (testChar(start - 1, ' ')) start - 1 else start, start + 1), "") + else + patch(source, Span(start, start + 1), + if (testChar(start - 1, Chars.isIdentifierPart)) " " else "") val closingStartsLine = testChar(skipBlanks(in.lastOffset - 2, -1), Chars.LF) val preFill = if (closingStartsLine || endStr.isEmpty) "" else " " val postFill = if (in.lastOffset == in.offset) " " else "" @@ -611,26 +751,40 @@ object Parsers { patch(source, Span(startClosing, endClosing), s"$preFill$endStr$postFill") } + /** If all other characters on the same line as `span` are blanks, widen to + * the whole line. + */ + def widenIfWholeLine(span: Span): Span = { + val start = skipBlanks(span.start - 1, -1) + val end = skipBlanks(span.end, 1) + if (testChar(start, Chars.LF) && testChar(end, Chars.LF)) Span(start, end) + else span + } + /** Drop current token, which is assumed to be `then` or `do`. */ def dropTerminator(): Unit = { var startOffset = in.offset var endOffset = in.lastCharOffset - if (in.isAfterLineEnd()) { - if (testChar(endOffset, ' ')) endOffset += 1 + if (in.isAfterLineEnd) { + if (testChar(endOffset, ' ')) + endOffset += 1 } else { - if (testChar(startOffset - 1, ' ')) startOffset -= 1 + if (testChar(startOffset - 1, ' ') && + !overlapsPatch(source, Span(startOffset - 1, endOffset))) + startOffset -= 1 } - patch(source, Span(startOffset, endOffset), "") + patch(source, widenIfWholeLine(Span(startOffset, endOffset)), "") } /** rewrite code with (...) around the source code of `t` */ - def revertToParens(t: Tree): Unit = + def revertToParens(t: Tree): Unit = { if (t.span.exists) { patch(source, t.span.startPos, "(") patch(source, t.span.endPos, ")") dropTerminator() } + } /** In the tokens following the current one, does `query` precede any of the tokens that * - must start a statement, or @@ -644,13 +798,13 @@ object Parsers { val token = lookahead.token if (braces == 0) { if (token == query) return true - if (stopScanTokens.contains(token) || lookahead.token == RBRACE) return false + if (stopScanTokens.contains(token) || lookahead.isNestedEnd) return false } else if (token == EOF) return false - else if (lookahead.token == RBRACE) + else if (lookahead.isNestedEnd) braces -= 1 - if (lookahead.token == LBRACE) braces += 1 + if (lookahead.isNestedStart) braces += 1 lookahead.nextToken() } false @@ -679,14 +833,15 @@ object Parsers { var opStack: List[OpInfo] = Nil - def checkAssoc(offset: Token, op1: Name, op2: Name, op2LeftAssoc: Boolean): Unit = + def checkAssoc(offset: Token, op1: Name, op2: Name, op2LeftAssoc: Boolean): Unit = { if (isLeftAssoc(op1) != op2LeftAssoc) syntaxError(MixedLeftAndRightAssociativeOps(op1, op2, op2LeftAssoc), offset) + } def reduceStack(base: List[OpInfo], top: Tree, prec: Int, leftAssoc: Boolean, op2: Name, isType: Boolean): Tree = { if (opStack != base && precedence(opStack.head.operator.name) == prec) checkAssoc(opStack.head.offset, opStack.head.operator.name, op2, leftAssoc) - def recur(top: Tree): Tree = { + def recur(top: Tree): Tree = if (opStack == base) top else { val opInfo = opStack.head @@ -701,7 +856,6 @@ object Parsers { } else top } - } recur(top) } @@ -723,6 +877,7 @@ object Parsers { val op = if (isType) typeIdent() else termIdent() val top1 = reduceStack(base, top, precedence(op.name), isLeftAssoc(op.name), op.name, isType) opStack = OpInfo(top1, op, in.offset) :: opStack + colonAtEOLOpt() newLineOptWhenFollowing(canStartOperand) if (maybePostfix && !canStartOperand(in.token)) { val topInfo = opStack.head @@ -759,7 +914,8 @@ object Parsers { val name = in.name in.nextToken() name - } else { + } + else { syntaxErrorOrIncomplete(ExpectedTokenButFound(IDENTIFIER, in.token)) nme.ERROR } @@ -806,7 +962,7 @@ object Parsers { * @param finish An alternative parse in case the token following a `.' is not an identifier. * If the alternative does not apply, its tree argument is returned unchanged. */ - def dotSelectors(t: Tree, finish: Tree => Tree = id): Tree = + def dotSelectors(t: Tree, finish: Tree => Tree = id): Tree = if (in.token == DOT) { in.nextToken(); selectors(t, finish) } else t @@ -907,7 +1063,7 @@ object Parsers { t } else atSpan(negOffset) { - if (in.token == QUOTEID) { + if (in.token == QUOTEID) if ((staged & StageKind.Spliced) != 0 && Chars.isIdentifierStart(in.name(0))) { val t = atSpan(in.offset + 1) { val tok = in.toToken(in.name) @@ -930,7 +1086,6 @@ object Parsers { } atSpan(in.skipToken()) { SymbolLit(in.strVal) } } - } else if (in.token == INTERPOLATIONID) interpolatedString(inPattern) else { val t = literalOf(in.token) @@ -1002,6 +1157,28 @@ object Parsers { if (in.token == NEWLINE && p(in.next.token)) newLineOpt() } + def colonAtEOLOpt(): Unit = { + possibleColonOffset = in.lastOffset + if (in.token == COLONEOL) in.nextToken() + } + + def possibleBracesStart(): Unit = { + colonAtEOLOpt() + newLineOptWhenFollowedBy(LBRACE) + } + + def indentRegion[T](tag: EndMarkerTag)(op: => T): T = { + val iw = in.indent.width + val t = op + in.consumeEndMarker(tag, iw) + t + } + + def indentRegion[T](pid: Tree)(op: => T): T = pid match { + case pid: RefTree => indentRegion(pid.name.toTermName)(op) + case _ => op + } + /* ------------- TYPES ------------------------------------------------------ */ /** Same as [[typ]], but if this results in a wildcard it emits a syntax error and * returns a tree for type `Any` instead. @@ -1069,7 +1246,7 @@ object Parsers { functionRest(ts) else { val ts1 = - for (t <- ts) yield { + for (t <- ts) yield t match { case t@ByNameTypeTree(t1) => syntaxError(ByNameParameterNotSupported(t), t.span) @@ -1077,7 +1254,6 @@ object Parsers { case _ => t } - } val tuple = atSpan(start) { makeTupleOrParens(ts1) } infixTypeRest( refinedTypeRest( @@ -1103,7 +1279,8 @@ object Parsers { Ident(nme.ERROR.toTypeName) } } - } else { accept(TLARROW); typ() } + } + else { accept(TLARROW); typ() } } else infixType() @@ -1145,8 +1322,9 @@ object Parsers { val refinedType: () => Tree = () => refinedTypeRest(withType()) def refinedTypeRest(t: Tree): Tree = { - newLineOptWhenFollowedBy(LBRACE) - if (in.token == LBRACE) refinedTypeRest(atSpan(startOffset(t)) { RefinedTypeTree(rejectWildcardType(t), refinement()) }) + possibleBracesStart() + if (in.isNestedStart) + refinedTypeRest(atSpan(startOffset(t)) { RefinedTypeTree(rejectWildcardType(t), refinement()) }) else t } @@ -1235,7 +1413,7 @@ object Parsers { typeBounds().withSpan(Span(start, in.lastOffset, start)) } else if (isIdent(nme.*) && ctx.settings.YkindProjector.value) { - syntaxError("`*` placeholders are not implemented yet") + syntaxError("`*` placeholders are not implemented yet") typeIdent() } else if (isSplice) @@ -1250,7 +1428,8 @@ object Parsers { if (in.token == TYPE) { in.nextToken() atSpan(startOffset(t)) { SingletonTypeTree(t) } - } else t + } + else t private def simpleTypeRest(t: Tree): Tree = in.token match { case HASH => simpleTypeRest(typeProjection(t)) @@ -1321,7 +1500,8 @@ object Parsers { if (isIdent(nme.raw.STAR)) { in.nextToken() atSpan(startOffset(t)) { PostfixOp(t, Ident(tpnme.raw.STAR)) } - } else t + } + else t } /** TypeArgs ::= `[' Type {`,' Type} `]' @@ -1331,7 +1511,7 @@ object Parsers { /** Refinement ::= `{' RefineStatSeq `}' */ - def refinement(): List[Tree] = inBraces(refineStatSeq()) + def refinement(): List[Tree] = inBracesOrIndented(refineStatSeq()) /** TypeBounds ::= [`>:' Type] [`<:' Type] */ @@ -1365,9 +1545,16 @@ object Parsers { Nil } - def typedOpt(): Tree = + def typedOpt(): Tree = { + if (in.token == COLONEOL) in.token = COLON + // a hack to allow + // + // def f(): + // T + // if (in.token == COLON) { in.nextToken(); toplevelTyp() } else TypeTree().withSpan(Span(in.lastOffset)) + } def typeDependingOn(location: Location.Value): Tree = if (location == Location.InParens) typ() @@ -1383,7 +1570,7 @@ object Parsers { expr() } - def condExpr(altToken: Token): Tree = { + def condExpr(altToken: Token): Tree = if (in.token == LPAREN) { var t: Tree = atSpan(in.offset) { Parens(inParens(exprInParens())) } if (in.token != altToken && followedByToken(altToken)) @@ -1398,13 +1585,17 @@ object Parsers { else if (rewriteToNewSyntax(t.span)) dropParensOrBraces(t.span.start, s"${tokenString(altToken)}") t - } else { - val t = inSepRegion(LPAREN, RPAREN)(expr()) + } + else { + val t = + if (in.isNestedStart) + try expr() finally newLinesOpt() + else + inSepRegion(LPAREN, RPAREN)(expr()) if (rewriteToOldSyntax(t.span.startPos)) revertToParens(t) accept(altToken) t } - } /** Expr ::= [ClosureMods] FunParams =>' Expr * | Expr1 @@ -1419,7 +1610,6 @@ object Parsers { * | [‘inline’] `if' Expr `then' Expr [[semi] else Expr] * | `while' `(' Expr `)' {nl} Expr * | `while' Expr `do' Expr - * | `do' Expr [semi] `while' Expr * | `try' Expr Catches [`finally' Expr] * | `try' Expr [`finally' Expr] * | `throw' Expr @@ -1482,13 +1672,15 @@ object Parsers { def expr1(location: Location.Value = Location.ElseWhere): Tree = in.token match { case IF => - ifExpr(in.offset, If) + indentRegion(IF) { ifExpr(in.offset, If) } case WHILE => - atSpan(in.skipToken()) { - val cond = condExpr(DO) - newLinesOpt() - val body = expr() - WhileDo(cond, body) + indentRegion(WHILE) { + atSpan(in.skipToken()) { + val cond = condExpr(DO) + newLinesOpt() + val body = expr() + WhileDo(cond, body) + } } case DO => in.errorOrMigrationWarning( @@ -1517,41 +1709,47 @@ object Parsers { WhileDo(Block(body, cond), Literal(Constant(()))) } case TRY => - val tryOffset = in.offset - atSpan(in.skipToken()) { - val body = expr() - val (handler, handlerStart) = - if (in.token == CATCH) { - val span = in.offset - in.nextToken() - (expr(), span) - } else (EmptyTree, -1) - - handler match { - case Block(Nil, EmptyTree) => - assert(handlerStart != -1) - syntaxError( - EmptyCatchBlock(body), - Span(handlerStart, endOffset(handler)) - ) - case _ => - } - - val finalizer = - if (in.token == FINALLY) { in.nextToken(); expr() } - else { - if (handler.isEmpty) warning( - EmptyCatchAndFinallyBlock(body), - source.atSpan(Span(tryOffset, endOffset(body))) - ) - EmptyTree + indentRegion(TRY) { + val tryOffset = in.offset + atSpan(in.skipToken()) { + val body = expr() + val (handler, handlerStart) = + if (in.token == CATCH) { + val span = in.offset + in.nextToken() + (expr(), span) + } + else (EmptyTree, -1) + + handler match { + case Block(Nil, EmptyTree) => + assert(handlerStart != -1) + syntaxError( + EmptyCatchBlock(body), + Span(handlerStart, endOffset(handler)) + ) + case _ => } - ParsedTry(body, handler, finalizer) + + val finalizer = + if (in.token == FINALLY) { in.nextToken(); expr() } + else { + if (handler.isEmpty) warning( + EmptyCatchAndFinallyBlock(body), + source.atSpan(Span(tryOffset, endOffset(body))) + ) + EmptyTree + } + ParsedTry(body, handler, finalizer) + } } case THROW => atSpan(in.skipToken()) { Throw(expr()) } case RETURN => - atSpan(in.skipToken()) { Return(if (isExprIntro) expr() else EmptyTree, EmptyTree) } + atSpan(in.skipToken()) { + colonAtEOLOpt() + Return(if (isExprIntro) expr() else EmptyTree, EmptyTree) + } case FOR => forExpr() case LBRACKET => @@ -1611,7 +1809,8 @@ object Parsers { in.nextToken() if (in.token != RPAREN) syntaxError(SeqWildcardPatternPos(), uscoreStart) Typed(t, atSpan(uscoreStart) { Ident(tpnme.WILDCARD_STAR) }) - } else { + } + else { syntaxErrorOrIncomplete(IncorrectRepeatedParameterSyntax()) t } @@ -1644,8 +1843,10 @@ object Parsers { /** `match' { CaseClauses } */ def matchExpr(t: Tree, start: Offset, mkMatch: (Tree, List[CaseDef]) => Match) = - atSpan(start, in.skipToken()) { - inBraces(mkMatch(t, caseClauses(caseClause))) + indentRegion(MATCH) { + atSpan(start, in.skipToken()) { + inBracesOrIndented(mkMatch(t, caseClauses(caseClause))) + } } /** `match' { ImplicitCaseClauses } @@ -1676,7 +1877,7 @@ object Parsers { */ def matchType(t: Tree): MatchTypeTree = atSpan(t.span.start, accept(MATCH)) { - inBraces(MatchTypeTree(EmptyTree, t, caseClauses(typeCaseClause))) + inBracesOrIndented(MatchTypeTree(EmptyTree, t, caseClauses(typeCaseClause))) } /** FunParams ::= Bindings @@ -1790,7 +1991,7 @@ object Parsers { atSpan(start) { Ident(pname) } case LPAREN => atSpan(in.offset) { makeTupleOrParens(inParens(exprsInParensOpt())) } - case LBRACE => + case LBRACE | INDENT => canApply = false blockExpr() case QUOTE => @@ -1816,6 +2017,10 @@ object Parsers { "Scala 2 macros are not supported, see http://dotty.epfl.ch/docs/reference/dropped-features/macros.html", start) unimplementedExpr + case COLONEOL => + syntaxError("':' not allowed here") + in.nextToken() + simpleExpr() case _ => if (isLiteral) literal() else { @@ -1827,7 +2032,7 @@ object Parsers { } def simpleExprRest(t: Tree, canApply: Boolean = true): Tree = { - if (canApply) newLineOptWhenFollowedBy(LBRACE) + if (canApply) possibleBracesStart() in.token match { case DOT => in.nextToken() @@ -1835,7 +2040,7 @@ object Parsers { case LBRACKET => val tapp = atSpan(startOffset(t), in.offset) { TypeApply(t, typeArgs(namedOK = true, wildOK = false)) } simpleExprRest(tapp, canApply = true) - case LPAREN | LBRACE if canApply => + case LPAREN | LBRACE | INDENT if canApply => val app = atSpan(startOffset(t), in.offset) { Apply(t, argumentExprs()) } simpleExprRest(app, canApply = true) case USCORE => @@ -1847,30 +2052,31 @@ object Parsers { /** SimpleExpr ::= ‘new’ (ConstrApp {`with` ConstrApp} [TemplateBody] | TemplateBody) */ - def newExpr(): Tree = { - val start = in.skipToken() - def reposition(t: Tree) = t.withSpan(Span(start, in.lastOffset)) - newLineOptWhenFollowedBy(LBRACE) - val parents = - if (in.token == LBRACE) Nil - else constrApp() :: { - if (in.token == WITH) { - // Enable this for 3.1, when we drop `with` for inheritance: - // in.errorUnlessInScala2Mode( - // "anonymous class with multiple parents is no longer supported; use a named class instead") - in.nextToken() - tokenSeparated(WITH, constrApp) + def newExpr(): Tree = + indentRegion(NEW) { + val start = in.skipToken() + def reposition(t: Tree) = t.withSpan(Span(start, in.lastOffset)) + possibleBracesStart() + val parents = + if (in.isNestedStart) Nil + else constrApp() :: { + if (in.token == WITH) { + // Enable this for 3.1, when we drop `with` for inheritance: + // in.errorUnlessInScala2Mode( + // "anonymous class with multiple parents is no longer supported; use a named class instead") + in.nextToken() + tokenSeparated(WITH, constrApp) + } + else Nil } - else Nil + possibleBracesStart() + parents match { + case parent :: Nil if !in.isNestedStart => + reposition(if (parent.isType) ensureApplied(wrapNew(parent)) else parent) + case _ => + New(reposition(templateBodyOpt(emptyConstructor, parents, Nil))) } - newLineOptWhenFollowedBy(LBRACE) - parents match { - case parent :: Nil if in.token != LBRACE => - reposition(if (parent.isType) ensureApplied(wrapNew(parent)) else parent) - case _ => - New(reposition(templateBodyOpt(emptyConstructor, parents, Nil))) } - } /** ExprsInParens ::= ExprInParens {`,' ExprInParens} */ @@ -1887,19 +2093,18 @@ object Parsers { * | [nl] BlockExpr */ def argumentExprs(): List[Tree] = - if (in.token == LBRACE) blockExpr() :: Nil else parArgumentExprs() + if (in.isNestedStart) blockExpr() :: Nil else parArgumentExprs() val argumentExpr: () => Tree = () => exprInParens() match { case arg @ Assign(Ident(id), rhs) => cpy.NamedArg(arg)(id, rhs) case arg => arg } - /** ArgumentExprss ::= {ArgumentExprs} */ def argumentExprss(fn: Tree): Tree = { - newLineOptWhenFollowedBy(LBRACE) - if (in.token == LPAREN || in.token == LBRACE) argumentExprss(Apply(fn, argumentExprs())) + possibleBracesStart() + if (in.token == LPAREN || in.isNestedStart) argumentExprss(Apply(fn, argumentExprs())) else fn } @@ -2004,77 +2209,81 @@ object Parsers { * {nl} [`yield'] Expr * | `for' Enumerators (`do' Expr | `yield' Expr) */ - def forExpr(): Tree = atSpan(in.skipToken()) { - var wrappedEnums = true - val start = in.offset - val forEnd = in.lastOffset - val leading = in.token - val enums = - if (leading == LBRACE || leading == LPAREN && parensEncloseGenerators) { - in.nextToken() - openParens.change(leading, 1) - val res = - if (leading == LBRACE || in.token == CASE) - enumerators() - else { - val pats = patternsOpt() - val pat = - if (in.token == RPAREN || pats.length > 1) { - wrappedEnums = false - accept(RPAREN) - openParens.change(LPAREN, -1) - atSpan(start) { makeTupleOrParens(pats) } // note: alternatives `|' need to be weeded out by typer. + def forExpr(): Tree = indentRegion(FOR) { + atSpan(in.skipToken()) { + var wrappedEnums = true + val start = in.offset + val forEnd = in.lastOffset + val leading = in.token + val enums = + if (leading == LBRACE || leading == LPAREN && parensEncloseGenerators) { + in.nextToken() + openParens.change(leading, 1) + val res = + if (leading == LBRACE || in.token == CASE) + enumerators() + else { + val pats = patternsOpt() + val pat = + if (in.token == RPAREN || pats.length > 1) { + wrappedEnums = false + accept(RPAREN) + openParens.change(LPAREN, -1) + atSpan(start) { makeTupleOrParens(pats) } // note: alternatives `|' need to be weeded out by typer. + } + else pats.head + generatorRest(pat, casePat = false) :: enumeratorsRest() + } + if (wrappedEnums) { + val closingOnNewLine = in.isAfterLineEnd + accept(leading + 1) + openParens.change(leading, -1) + def hasMultiLineEnum = + res.exists { t => + val pos = t.sourcePos + pos.startLine < pos.endLine } - else pats.head - generatorRest(pat, casePat = false) :: enumeratorsRest() - } - if (wrappedEnums) { - val closingOnNewLine = in.isAfterLineEnd() - accept(leading + 1) - openParens.change(leading, -1) - def hasMultiLineEnum = - res.exists { t => - val pos = t.sourcePos - pos.startLine < pos.endLine + if (rewriteToNewSyntax(Span(start)) && (leading == LBRACE || !hasMultiLineEnum)) { + // Don't rewrite if that could change meaning of newlines + newLinesOpt() + dropParensOrBraces(start, if (in.token == YIELD || in.token == DO) "" else "do") } - if (rewriteToNewSyntax(Span(start)) && (leading == LBRACE || !hasMultiLineEnum)) { - // Don't rewrite if that could change meaning of newlines - newLinesOpt() - dropParensOrBraces(start, if (in.token == YIELD || in.token == DO) "" else "do") } + res } - res - } - else { - wrappedEnums = false - - /*if (in.token == INDENT) inBracesOrIndented(enumerators()) else*/ - val ts = inSepRegion(LBRACE, RBRACE)(enumerators()) - if (rewriteToOldSyntax(Span(start)) && ts.nonEmpty) { - if (ts.length > 1 && ts.head.sourcePos.startLine != ts.last.sourcePos.startLine) { - patch(source, Span(forEnd), " {") - patch(source, Span(in.offset), "} ") - } + else { + wrappedEnums = false + + if (in.token == INDENT) + inBracesOrIndented(enumerators()) else { - patch(source, ts.head.span.startPos, "(") - patch(source, ts.last.span.endPos, ")") + val ts = inSepRegion(LBRACE, RBRACE)(enumerators()) + if (rewriteToOldSyntax(Span(start)) && ts.nonEmpty) + if (ts.head.sourcePos.startLine != ts.last.sourcePos.startLine) { + patch(source, Span(forEnd), " {") + patch(source, Span(in.offset), "} ") + } + else { + patch(source, ts.head.span.startPos, "(") + patch(source, ts.last.span.endPos, ")") + } + ts } } - ts + newLinesOpt() + if (in.token == YIELD) { + in.nextToken() + ForYield(enums, expr()) + } + else if (in.token == DO) { + if (rewriteToOldSyntax()) dropTerminator() + in.nextToken() + ForDo(enums, expr()) + } + else { + if (!wrappedEnums) syntaxErrorOrIncomplete(YieldOrDoExpectedInForComprehension()) + ForDo(enums, expr()) } - newLinesOpt() - if (in.token == YIELD) { - in.nextToken() - ForYield(enums, expr()) - } - else if (in.token == DO) { - if (rewriteToOldSyntax()) dropTerminator() - in.nextToken() - ForDo(enums, expr()) - } - else { - if (!wrappedEnums) syntaxErrorOrIncomplete(YieldOrDoExpectedInForComprehension()) - ForDo(enums, expr()) } } @@ -2190,7 +2399,8 @@ object Parsers { in.nextToken() if (in.token != RPAREN) syntaxError(SeqWildcardPatternPos(), wildIndent.span) atSpan(wildIndent.span) { Ident(tpnme.WILDCARD_STAR) } - } else wildIndent + } + else wildIndent case LPAREN => atSpan(in.offset) { makeTupleOrParens(inParens(patternsOpt())) } case QUOTE => @@ -2301,7 +2511,8 @@ object Parsers { if (in.token == THIS) { in.nextToken(); mods | Local } else mods.withPrivateWithin(ident().toTypeName) } - } else mods + } + else mods /** {Annotation} {Modifier} * Modifiers ::= {Modifier} @@ -2315,20 +2526,20 @@ object Parsers { */ def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = { @tailrec - def loop(mods: Modifiers): Modifiers = { + def loop(mods: Modifiers): Modifiers = if (allowed.contains(in.token) || in.isSoftModifier && localModifierTokens.subsetOf(allowed)) { // soft modifiers are admissible everywhere local modifiers are val isAccessMod = accessModifierTokens contains in.token val mods1 = addModifier(mods) loop(if (isAccessMod) accessQualifierOpt(mods1) else mods1) - } else if (in.token == NEWLINE && (mods.hasFlags || mods.hasAnnotations)) { + } + else if (in.token == NEWLINE && (mods.hasFlags || mods.hasAnnotations)) { in.nextToken() loop(mods) - } else { - mods } - } + else + mods normalize(loop(start)) } @@ -2475,9 +2686,8 @@ object Parsers { val default = if (in.token == EQUALS) { in.nextToken(); expr() } else EmptyTree - if (impliedMods.mods.nonEmpty) { + if (impliedMods.mods.nonEmpty) impliedMods = impliedMods.withMods(Nil) // keep only flags, so that parameter positions don't overlap - } ValDef(name, tpt, default).withMods(mods) } } @@ -2520,7 +2730,7 @@ object Parsers { ofCaseClass: Boolean = false, ofInstance: Boolean = false): List[List[ValDef]] = { - def followingIsParamClause: Boolean = { + def followingIsParamClause: Boolean = { val lookahead = in.lookaheadScanner lookahead.nextToken() paramIntroTokens.contains(lookahead.token) && { @@ -2780,15 +2990,16 @@ object Parsers { } else emptyType val rhs = - if (tpt.isEmpty || in.token == EQUALS) { - accept(EQUALS) - if (in.token == USCORE && !tpt.isEmpty && mods.is(Mutable) && - (lhs.toList forall (_.isInstanceOf[Ident]))) { - wildcardIdent() - } else { - expr() + if (tpt.isEmpty || in.token == EQUALS) + indentRegion(first) { + accept(EQUALS) + if (in.token == USCORE && !tpt.isEmpty && mods.is(Mutable) && + (lhs.toList forall (_.isInstanceOf[Ident]))) + wildcardIdent() + else + expr() } - } else EmptyTree + else EmptyTree lhs match { case (id @ Ident(name: TermName)) :: Nil if name != nme.WILDCARD => val vdef = ValDef(name, tpt, rhs) @@ -2829,7 +3040,8 @@ object Parsers { atSpan(in.offset) { constrExpr() } } makeConstructor(Nil, vparamss, rhs).withMods(mods).setComment(in.getDocComment(start)) - } else { + } + else { val (leadingParamss, flags) = if (in.token == LPAREN) try (paramClause(prefix = true) :: Nil, Method | Extension) @@ -2838,6 +3050,7 @@ object Parsers { (Nil, Method) val mods1 = addFlag(mods, flags) val ident = termIdent() + val name = ident.name.asTermName val tparams = typeParamClauseOpt(ParamOwner.Def) val vparamss = paramClauses() match { case rparams :: rparamss if leadingParamss.nonEmpty && !isLeftAssoc(ident.name) => @@ -2854,10 +3067,11 @@ object Parsers { } if (in.isScala2Mode) newLineOptWhenFollowedBy(LBRACE) val rhs = - if (in.token == EQUALS) { - in.nextToken() - expr() - } + if (in.token == EQUALS) + indentRegion(name) { + in.nextToken() + expr() + } else if (!tpt.isEmpty) EmptyTree else if (scala2ProcedureSyntax(": Unit")) { @@ -2871,7 +3085,7 @@ object Parsers { expr() } - val ddef = DefDef(ident.name.asTermName, tparams, vparamss, tpt, rhs) + val ddef = DefDef(name, tparams, vparamss, tpt, rhs) if (isBackquoted(ident)) ddef.pushAttachment(Backquoted, ()) finalizeDef(ddef, mods1, start) } @@ -2881,29 +3095,25 @@ object Parsers { * | `{' SelfInvocation {semi BlockStat} `}' */ def constrExpr(): Tree = - if (in.token == LBRACE) constrBlock() + if (in.isNestedStart) + atSpan(in.offset) { + inBracesOrIndented { + val stats = selfInvocation() :: ( + if (isStatSep) { in.nextToken(); blockStatSeq() } + else Nil) + Block(stats, Literal(Constant(()))) + } + } else Block(selfInvocation() :: Nil, Literal(Constant(()))) /** SelfInvocation ::= this ArgumentExprs {ArgumentExprs} */ def selfInvocation(): Tree = atSpan(accept(THIS)) { - newLineOptWhenFollowedBy(LBRACE) + possibleBracesStart() argumentExprss(Apply(Ident(nme.CONSTRUCTOR), argumentExprs())) } - /** ConstrBlock ::= `{' SelfInvocation {semi BlockStat} `}' - */ - def constrBlock(): Tree = - atSpan(in.skipToken()) { - val stats = selfInvocation() :: { - if (isStatSep) { in.nextToken(); blockStatSeq() } - else Nil - } - accept(RBRACE) - Block(stats, Literal(Constant(()))) - } - /** TypeDcl ::= id [TypeParamClause] TypeBounds [‘=’ Type] */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { @@ -2948,7 +3158,7 @@ object Parsers { makeTypeDef(rhs) } else makeTypeDef(bounds) - case SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | EOF => + case SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | OUTDENT | EOF => makeTypeDef(typeBounds()) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) @@ -2962,7 +3172,7 @@ object Parsers { * | ‘enum’ EnumDef * | ‘given’ GivenDef */ - def tmplDef(start: Int, mods: Modifiers): Tree = { + def tmplDef(start: Int, mods: Modifiers): Tree = in.token match { case TRAIT => classDef(start, posMods(start, addFlag(mods, Trait))) @@ -2982,7 +3192,6 @@ object Parsers { syntaxErrorOrIncomplete(ExpectedStartOfTopLevelDefinition()) EmptyTree } - } /** ClassDef ::= id ClassConstr TemplateOpt */ @@ -2990,11 +3199,12 @@ object Parsers { classDefRest(start, mods, ident().toTypeName) } - def classDefRest(start: Offset, mods: Modifiers, name: TypeName): TypeDef = { - val constr = classConstr(isCaseClass = mods.is(Case)) - val templ = templateOpt(constr) - finalizeDef(TypeDef(name, templ), mods, start) - } + def classDefRest(start: Offset, mods: Modifiers, name: TypeName): TypeDef = + indentRegion(name.toTermName) { + val constr = classConstr(isCaseClass = mods.is(Case)) + val templ = templateOpt(constr) + finalizeDef(TypeDef(name, templ), mods, start) + } /** ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses */ @@ -3013,22 +3223,23 @@ object Parsers { /** ObjectDef ::= id TemplateOpt */ def objectDef(start: Offset, mods: Modifiers): ModuleDef = atSpan(start, nameStart) { - objectDefRest(start, mods, ident()) - } - - def objectDefRest(start: Offset, mods: Modifiers, name: TermName): ModuleDef = { - val templ = templateOpt(emptyConstructor) - finalizeDef(ModuleDef(name, templ), mods, start) + val name = ident() + indentRegion(name) { + val templ = templateOpt(emptyConstructor) + finalizeDef(ModuleDef(name, templ), mods, start) + } } /** EnumDef ::= id ClassConstr InheritClauses EnumBody */ def enumDef(start: Offset, mods: Modifiers): TypeDef = atSpan(start, nameStart) { - val modName = ident() - val clsName = modName.toTypeName - val constr = classConstr() - val templ = template(constr, isEnum = true) - finalizeDef(TypeDef(clsName, templ), mods, start) + val modulName = ident() + indentRegion(modulName) { + val clsName = modulName.toTypeName + val constr = classConstr() + val templ = template(constr, isEnum = true) + finalizeDef(TypeDef(clsName, templ), mods, start) + } } /** EnumCase = `case' (id ClassConstr [`extends' ConstrApps] | ids) @@ -3077,42 +3288,44 @@ object Parsers { def instanceDef(newStyle: Boolean, start: Offset, mods: Modifiers, instanceMod: Mod) = atSpan(start, nameStart) { var mods1 = addMod(mods, instanceMod) val name = if (isIdent && !(newStyle && isIdent(nme.as))) ident() else EmptyTermName - val tparams = typeParamClauseOpt(ParamOwner.Def) - var leadingParamss = - if (in.token == LPAREN) - try paramClause(prefix = true) :: Nil - finally { - newLineOptWhenFollowedBy(LBRACE) - if (in.token != LBRACE) syntaxErrorOrIncomplete("`{' expected") + indentRegion(name) { + val tparams = typeParamClauseOpt(ParamOwner.Def) + var leadingParamss = + if (in.token == LPAREN) + try paramClause(prefix = true) :: Nil + finally { + possibleBracesStart() + if (!in.isNestedStart) syntaxErrorOrIncomplete("`{' expected") + } + else Nil + val parents = + if (!newStyle && in.token == FOR || isIdent(nme.as)) { // for the moment, accept both `given for` and `given as` + in.nextToken() + tokenSeparated(COMMA, constrApp) } - else Nil - val parents = - if (!newStyle && in.token == FOR || isIdent(nme.as)) { // for the moment, accept both `given for` and `given as` - in.nextToken() - tokenSeparated(COMMA, constrApp) - } - else Nil - val vparamss = paramClauses(ofInstance = true) - val instDef = - if (in.token == EQUALS && parents.length == 1 && parents.head.isType) { - in.nextToken() - mods1 |= Final - DefDef(name, tparams, vparamss, parents.head, expr()) - } - else { - newLineOptWhenFollowedBy(LBRACE) - val (tparams1, vparamss1) = - if (leadingParamss.nonEmpty) - (tparams, leadingParamss) - else - (tparams.map(tparam => tparam.withMods(tparam.mods | PrivateLocal)), - vparamss.map(_.map(vparam => - vparam.withMods(vparam.mods &~ Param | ParamAccessor | PrivateLocal)))) - val templ = templateBodyOpt(makeConstructor(tparams1, vparamss1), parents, Nil) - if (tparams.isEmpty && vparamss1.isEmpty || leadingParamss.nonEmpty) ModuleDef(name, templ) - else TypeDef(name.toTypeName, templ) - } - finalizeDef(instDef, mods1, start) + else Nil + val vparamss = paramClauses(ofInstance = true) + val instDef = + if (in.token == EQUALS && parents.length == 1 && parents.head.isType) { + in.nextToken() + mods1 |= Final + DefDef(name, tparams, vparamss, parents.head, expr()) + } + else { + possibleBracesStart() + val (tparams1, vparamss1) = + if (leadingParamss.nonEmpty) + (tparams, leadingParamss) + else + (tparams.map(tparam => tparam.withMods(tparam.mods | PrivateLocal)), + vparamss.map(_.map(vparam => + vparam.withMods(vparam.mods &~ Param | ParamAccessor | PrivateLocal)))) + val templ = templateBodyOpt(makeConstructor(tparams1, vparamss1), parents, Nil) + if (tparams.isEmpty && vparamss1.isEmpty || leadingParamss.nonEmpty) ModuleDef(name, templ) + else TypeDef(name.toTypeName, templ) + } + finalizeDef(instDef, mods1, start) + } } /* -------- TEMPLATES ------------------------------------------- */ @@ -3180,7 +3393,7 @@ object Parsers { val extended = if (in.token == EXTENDS) { in.nextToken() - if (in.token == LBRACE) { + if (in.token == LBRACE || in.token == COLONEOL) { in.errorOrMigrationWarning("`extends' must be followed by at least one parent") Nil } @@ -3200,7 +3413,7 @@ object Parsers { */ def template(constr: DefDef, isEnum: Boolean = false): Template = { val (parents, derived) = inheritClauses() - newLineOptWhenFollowedBy(LBRACE) + possibleBracesStart() if (isEnum) { val (self, stats) = withinEnum(templateBody()) Template(constr, parents, derived, self, stats) @@ -3211,8 +3424,8 @@ object Parsers { /** TemplateOpt = [Template] */ def templateOpt(constr: DefDef): Template = { - newLineOptWhenFollowedBy(LBRACE) - if (in.token == EXTENDS || isIdent(nme.derives) || in.token == LBRACE) + possibleBracesStart() + if (in.token == EXTENDS || isIdent(nme.derives) || in.isNestedStart) template(constr) else Template(constr, Nil, Nil, EmptyValDef, Nil) @@ -3222,7 +3435,7 @@ object Parsers { */ def templateBodyOpt(constr: DefDef, parents: List[Tree], derived: List[Tree]): Template = { val (self, stats) = - if (in.token == LBRACE) templateBody() else (EmptyValDef, Nil) + if (in.isNestedStart) templateBody() else (EmptyValDef, Nil) Template(constr, parents, derived, self, stats) } @@ -3247,9 +3460,11 @@ object Parsers { */ def packaging(start: Int): Tree = { val pkg = qualId() - newLineOptWhenFollowedBy(LBRACE) - val stats = inDefScopeBraces(topStatSeq()) - makePackaging(start, pkg, stats) + indentRegion(pkg) { + possibleBracesStart() + val stats = inDefScopeBraces(topStatSeq()) + makePackaging(start, pkg, stats) + } } /** TopStatSeq ::= TopStat {semi TopStat} @@ -3278,12 +3493,11 @@ object Parsers { stats ++= importClause(EXPORT, Export.apply) else if (in.token == AT || isDefIntro(modifierTokens)) stats +++= defOrDcl(in.offset, defAnnotsMods(modifierTokens)) - else if (!isStatSep) { + else if (!isStatSep) if (in.token == CASE) syntaxErrorOrIncomplete(OnlyCaseClassOrCaseObjectAllowed()) else syntaxErrorOrIncomplete(ExpectedToplevelDef()) - } acceptStatSepUnlessAtEnd() } stats.toList @@ -3313,8 +3527,10 @@ object Parsers { if (name != nme.ERROR) self = makeSelfDef(name, tpt).withSpan(first.span) } + in.token = EMPTY // hack to suppress INDENT insertion after `=>` in.nextToken() - } else { + } + else { stats += first acceptStatSepUnlessAtEnd() } @@ -3376,12 +3592,11 @@ object Parsers { def localDef(start: Int, implicitMods: Modifiers = EmptyModifiers): Tree = { var mods = defAnnotsMods(localModifierTokens) for (imod <- implicitMods.mods) mods = addMod(mods, imod) - if (mods.is(Final)) { + if (mods.is(Final)) // A final modifier means the local definition is "class-like". // FIXME: Deal with modifiers separately tmplDef(start, mods) - } else { + else defOrDcl(start, mods) - } } /** BlockStatSeq ::= { BlockStat semi } [Expr] @@ -3420,9 +3635,9 @@ object Parsers { stats += impliedMatch(start, imods) else stats +++= localDef(start, imods) - } else { - stats +++= localDef(in.offset) } + else + stats +++= localDef(in.offset) else if (!isStatSep && (in.token != CASE)) { exitOnError = mustStartStat syntaxErrorOrIncomplete(IllegalStartOfStatement(isModifier)) @@ -3448,19 +3663,22 @@ object Parsers { acceptStatSep() ts ++= topStatSeq() } - } else { + } + else { val pkg = qualId() - newLineOptWhenFollowedBy(LBRACE) - if (in.token == EOF) - ts += makePackaging(start, pkg, List()) - else if (in.token == LBRACE) { - ts += inDefScopeBraces(makePackaging(start, pkg, topStatSeq())) - acceptStatSepUnlessAtEnd() - ts ++= topStatSeq() - } - else { - acceptStatSep() - ts += makePackaging(start, pkg, topstats()) + indentRegion(pkg) { + possibleBracesStart() + if (in.token == EOF) + ts += makePackaging(start, pkg, List()) + else if (in.isNestedStart) { + ts += inDefScopeBraces(makePackaging(start, pkg, topStatSeq())) + acceptStatSepUnlessAtEnd() + ts ++= topStatSeq() + } + else { + acceptStatSep() + ts += makePackaging(start, pkg, topstats()) + } } } } diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index b5a8c7686597..b8ce2ef189b6 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -9,11 +9,13 @@ import java.lang.Character.isDigit import scala.internal.Chars._ import util.NameTransformer.avoidIllegalChars import util.Spans.Span +import config.Config import Tokens._ import scala.annotation.{ switch, tailrec } import scala.collection.mutable import scala.collection.immutable.{SortedMap, BitSet} import rewrites.Rewrites.patch +import config.Printers.lexical object Scanners { @@ -36,6 +38,11 @@ object Scanners { /** the offset of the character following the token preceding this one */ var lastOffset: Offset = 0 + /** the offset of the newline immediately preceding the token, or -1 if + * token is not preceded by a newline. + */ + var lineOffset: Offset = -1 + /** the name of an identifier */ var name: SimpleName = null @@ -49,6 +56,7 @@ object Scanners { this.token = td.token this.offset = td.offset this.lastOffset = td.lastOffset + this.lineOffset = td.lineOffset this.name = td.name this.strVal = td.strVal this.base = td.base @@ -72,9 +80,8 @@ object Scanners { errOffset = off } - def errorButContinue(msg: String, off: Offset = offset): Unit = { + def errorButContinue(msg: String, off: Offset = offset): Unit = ctx.error(msg, source atSpan Span(off)) - } /** signal an error where the input ended in the middle of a token */ def incompleteInputError(msg: String): Unit = { @@ -122,10 +129,10 @@ object Scanners { /** Convert current strVal, base to long value * This is tricky because of max negative value. */ - def intVal(negated: Boolean): Long = { - if (token == CHARLIT && !negated) { + def intVal(negated: Boolean): Long = + if (token == CHARLIT && !negated) charVal - } else { + else { var value: Long = 0 val divider = if (base == 10) 1 else 2 val limit: Long = @@ -153,7 +160,6 @@ object Scanners { } if (negated) -value else value } - } def intVal: Long = intVal(false) @@ -172,7 +178,8 @@ object Scanners { if (value == 0.0f && !zeroFloat.pattern.matcher(text).matches) errorButContinue("floating point number too small") if (negated) -value else value - } catch { + } + catch { case _: NumberFormatException => error("malformed floating point number") 0.0f @@ -194,7 +201,8 @@ object Scanners { if (value == 0.0d && !zeroFloat.pattern.matcher(text).matches) errorButContinue("double precision floating point number too small") if (negated) -value else value - } catch { + } + catch { case _: NumberFormatException => error("malformed floating point number") 0.0 @@ -213,11 +221,10 @@ object Scanners { if (isNumberSeparator(litBuf.last)) errorButContinue("trailing separator is not allowed", offset + litBuf.length - 1) } - } class Scanner(source: SourceFile, override val startFrom: Offset = 0)(implicit ctx: Context) extends ScannerCommon(source)(ctx) { - val keepComments: Boolean = !ctx.settings.YdropComments.value + val keepComments = !ctx.settings.YdropComments.value /** A switch whether operators at the start of lines can be infix operators */ private var allowLeadingInfixOperators = true @@ -226,6 +233,14 @@ object Scanners { val oldSyntax = ctx.settings.oldSyntax.value val newSyntax = ctx.settings.newSyntax.value + val noindentSyntax = ctx.settings.noindent.value + val indentSyntax = Config.allowIndent || ctx.settings.indent.value || noindentSyntax && rewrite + val rewriteToIndent = ctx.settings.indent.value && rewrite + val rewriteNoIndent = noindentSyntax && rewrite + + if (rewrite && oldSyntax & noindentSyntax) + error("-rewrite cannot be used with both -old-syntax and -noindent; -noindent must come first") + /** All doc comments kept by their end position in a `Map` */ private[this] var docstringMap: SortedMap[Int, Comment] = SortedMap.empty @@ -290,7 +305,13 @@ object Scanners { * (the STRINGLIT appears twice in succession on the stack iff the * expression is a multiline string literal). */ - var sepRegions: List[Token] = List() + var sepRegions: List[Token] = Nil + + /** Indentation widths, innermost to outermost */ + var indent: IndentRegion = IndentRegion(IndentWidth.Zero, Set(), EMPTY, null) + + /** The end marker that was skipped last */ + val endMarkers = new mutable.ListBuffer[EndMarker] // Scala 2 compatibility @@ -377,112 +398,281 @@ object Scanners { // Read a token or copy it from `next` tokenData if (next.token == EMPTY) { lastOffset = lastCharOffset - if (inStringInterpolation) fetchStringPart() - else fetchToken() + if (inStringInterpolation) fetchStringPart() else fetchToken() if (token == ERROR) adjustSepRegions(STRINGLIT) - } else { - this copyFrom next + } + else { + this.copyFrom(next) next.token = EMPTY } - def insertNL(nl: Token): Unit = { - next.copyFrom(this) - // todo: make offset line-end of previous line? - offset = if (lineStartOffset <= offset) lineStartOffset else lastLineStartOffset - token = nl - } + if (isAfterLineEnd) handleNewLine(lastToken) + postProcessToken() + //printState() + } + + protected def printState() = + print("[" + show + "]") + /** Insert `token` at assumed `offset` in front of current one. */ + def insert(token: Token, offset: Int) = { + next.copyFrom(this) + this.offset = offset + this.token = token + } + + /** If this token and the next constitute an end marker, skip them and append a new EndMarker + * value at the end of the endMarkers queue. + */ + private def handleEndMarkers(width: IndentWidth): Unit = { + if (next.token == IDENTIFIER && next.name == nme.end && width == indent.width) { + val lookahead = lookaheadScanner + lookahead.nextToken() // skip the `end` - /** A leading symbolic or backquoted identifier is treated as an infix operator - * if it is followed by at least one ' ' and a token on the same line - * that can start an expression. - */ - def isLeadingInfixOperator = - allowLeadingInfixOperators && - (token == BACKQUOTED_IDENT || - token == IDENTIFIER && isOperatorPart(name(name.length - 1))) && - (ch == ' ') && { - val lookahead = lookaheadScanner - lookahead.allowLeadingInfixOperators = false - // force a NEWLINE a after current token if it is on its own line + def handle(tag: EndMarkerTag) = { + val skipTo = lookahead.charOffset lookahead.nextToken() - canStartExpressionTokens.contains(lookahead.token) + if (lookahead.isAfterLineEnd || lookahead.token == EOF) { + lexical.println(i"produce end marker $tag $width") + endMarkers += EndMarker(tag, width, offset) + next.token = EMPTY + while (charOffset < skipTo) nextChar() + } } - /** Insert NEWLINE or NEWLINES if - * - we are after a newline - * - we are within a { ... } or on toplevel (wrt sepRegions) - * - the current token can start a statement and the one before can end it - * insert NEWLINES if we are past a blank line, NEWLINE otherwise - */ - if (isAfterLineEnd() && - (canEndStatTokens contains lastToken) && - (canStartStatTokens contains token) && - (sepRegions.isEmpty || sepRegions.head == RBRACE || - sepRegions.head == ARROW && token == CASE)) { - if (pastBlankLine()) - insertNL(NEWLINES) - else if (!isLeadingInfixOperator) - insertNL(NEWLINE) - else if (isScala2Mode || oldSyntax) + lookahead.token match { + case IDENTIFIER | BACKQUOTED_IDENT => handle(lookahead.name) + case IF | WHILE | FOR | MATCH | TRY | NEW => handle(lookahead.token) + case _ => + } + } + } + + /** Consume and cancel the head of the end markers queue if it has the given `tag` and width. + * Flag end markers with higher indent widths as errors. + */ + def consumeEndMarker(tag: EndMarkerTag, width: IndentWidth): Unit = { + lexical.println(i"consume end marker $tag $width") + if (endMarkers.nonEmpty) { + val em = endMarkers.head + if (width <= em.width) { + if (em.tag != tag || em.width != width) { + lexical.println(i"misaligned end marker ${em.tag}, ${em.width} at ${width}") + errorButContinue("misaligned end marker", em.offset) + } + endMarkers.trimStart(1) + } + } + } + + /** A leading symbolic or backquoted identifier is treated as an infix operator if + * - it does not follow a blank line, and + * - it is followed on the same line by at least one ' ' + * and a token that can start an expression. + * If a leading infix operator is found and -language:Scala2 or -old-syntax is set, + * emit a change warning. + */ + def isLeadingInfixOperator() = ( + allowLeadingInfixOperators + && ( token == BACKQUOTED_IDENT + || token == IDENTIFIER && isOperatorPart(name(name.length - 1))) + && ch == ' ' + && !pastBlankLine + && { + val lookahead = lookaheadScanner + lookahead.allowLeadingInfixOperators = false + // force a NEWLINE a after current token if it is on its own line + lookahead.nextToken() + canStartExpressionTokens.contains(lookahead.token) + } + && { + if (isScala2Mode || oldSyntax && !rewrite) ctx.warning(em"""Line starts with an operator; |it is now treated as a continuation of the expression on the previous line, |not as a separate statement.""", source.atSpan(Span(offset))) + true } + ) - postProcessToken() - // print("[" + this +"]") + /** The indentation width of the given offset. + * It is assumed that only blank characters are between the start of the line and the offset. + */ + def indentWidth(offset: Offset): IndentWidth = { + import IndentWidth.{Run, Conc} + def recur(idx: Int, ch: Char, n: Int): IndentWidth = + if (idx < 0) Run(ch, n) + else { + val nextChar = buf(idx) + if (nextChar == ' ' || nextChar == '\t') + if (nextChar == ch) + recur(idx - 1, ch, n + 1) + else { + val prefix = recur(idx - 1, nextChar, 1) + if (n == 0) prefix else Conc(prefix, Run(ch, n)) + } + else Run(ch, n) + } + recur(offset - 1, ' ', 0) } + /** Handle newlines, possibly inserting an INDENT, OUTDENT, NEWLINE, or NEWLINES token + * in front of the current token. This depends on whether indentation is significant or not. + * + * Indentation is _significant_ if indentSyntax is set, and we are not inside a + * {...}, [...], (...), case ... => pair, nor in a if/while condition + * (i.e. sepRegions is empty). + * + * There are three rules: + * + * 1. Insert NEWLINE or NEWLINES if + * + * - the closest enclosing sepRegion is { ... } or for ... do/yield, + * or we are on the toplevel, i.e. sepRegions is empty, and + * - the previous token can end a statement, and + * - the current token can start a statement, and + * - the current token is not a leading infix operator, and + * - if indentation is significant then the current token starts at the current + * indentation width or to the right of it. + * + * The inserted token is NEWLINES if the current token is preceded by a + * whitespace line, or NEWLINE otherwise. + * + * 2. Insert INDENT if + * + * - indentation is significant, and + * - the last token can start an indentation region. + * - the indentation of the current token is strictly greater than the previous + * indentation width, or the two widths are the same and the current token is + * one of `:` or `match`. + * + * The following tokens can start an indentation region: + * + * : = => <- if then else while do try catch finally for yield match + * + * Inserting an INDENT starts a new indentation region with the indentation of the current + * token as indentation width. + * + * 3. Insert OUTDENT if + * + * - indentation is significant, and + * - the indentation of the current token is strictly less than the + * previous indentation width, + * - the current token is not a leading infix operator. + * + * Inserting an OUTDENT closes an indentation region. In this case, issue an error if + * the indentation of the current token does not match the indentation of some previous + * line in an enclosing indentation region. + * + * If a token is inserted and consumed, the original source token is still considered to + * start a new line, so the process that inserts an OUTDENT might repeat several times. + * + * Indentation widths are strings consisting of spaces and tabs, ordered by the prefix relation. + * I.e. `a <= b` iff `b.startsWith(a)`. If indentation is significant it is considered an error + * if the current indentation width and the indentation of the current token are incomparable. + */ + def handleNewLine(lastToken: Token) = { + val indentIsSignificant = indentSyntax && sepRegions.isEmpty + val newlineIsSeparating = ( + sepRegions.isEmpty + || sepRegions.head == RBRACE + || sepRegions.head == ARROW && token == CASE + ) + val curWidth = indentWidth(offset) + val lastWidth = indent.width + if (newlineIsSeparating && + canEndStatTokens.contains(lastToken)&& + canStartStatTokens.contains(token) && + (!indentIsSignificant || lastWidth <= curWidth) && + !isLeadingInfixOperator()) + insert(if (pastBlankLine) NEWLINES else NEWLINE, lineOffset) + else if (indentIsSignificant) { + if (lastWidth < curWidth || + lastWidth == curWidth && (lastToken == MATCH || lastToken == CATCH) && token == CASE) { + if (canStartIndentTokens.contains(lastToken)) { + indent = IndentRegion(curWidth, Set(), lastToken, indent) + insert(INDENT, offset) + } + } + else if (curWidth < lastWidth || + curWidth == lastWidth && (indent.token == MATCH || indent.token == CATCH) && token != CASE) { + if (!isLeadingInfixOperator()) { + indent = indent.enclosing + insert(OUTDENT, offset) + handleEndMarkers(curWidth) + } + } + else if (lastWidth != curWidth) + errorButContinue( + i"""Incompatible combinations of tabs and spaces in indentation prefixes. + |Previous indent : $lastWidth + |Latest indent : $curWidth""") + } + if (indentIsSignificant && indent.width < curWidth && !indent.others.contains(curWidth)) + if (token == OUTDENT) + errorButContinue( + i"""The start of this line does not match any of the previous indentation widths. + |Indentation width of current line : $curWidth + |This falls between previous widths: ${indent.width} and $lastWidth""") + else + indent = IndentRegion(indent.width, indent.others + curWidth, indent.token, indent.outer) + } + + /** - Join CASE + CLASS => CASECLASS, CASE + OBJECT => CASEOBJECT, SEMI + ELSE => ELSE, COLON + => COLONEOL + * - Insert missing OUTDENTs at EOF + */ def postProcessToken(): Unit = { - // Join CASE + CLASS => CASECLASS, CASE + OBJECT => CASEOBJECT, SEMI + ELSE => ELSE def lookahead() = { - prev copyFrom this + prev.copyFrom(this) + lastOffset = lastCharOffset fetchToken() } - def reset(nextLastOffset: Offset) = { - lastOffset = nextLastOffset - next copyFrom this - this copyFrom prev + def reset() = { + next.copyFrom(this) + this.copyFrom(prev) } def fuse(tok: Int) = { token = tok offset = prev.offset lastOffset = prev.lastOffset + lineOffset = prev.lineOffset } - if (token == CASE) { - val nextLastOffset = lastCharOffset - lookahead() - if (token == CLASS) fuse(CASECLASS) - else if (token == OBJECT) fuse(CASEOBJECT) - else reset(nextLastOffset) - } else if (token == SEMI) { - val nextLastOffset = lastCharOffset - lookahead() - if (token != ELSE) reset(nextLastOffset) - } else if (token == COMMA){ - val nextLastOffset = lastCharOffset - lookahead() - if (isAfterLineEnd() && (token == RPAREN || token == RBRACKET || token == RBRACE)) { - /* skip the trailing comma */ - } else if (token == EOF) { // e.g. when the REPL is parsing "val List(x, y, _*," - /* skip the trailing comma */ - } else reset(nextLastOffset) + token match { + case CASE => + lookahead() + if (token == CLASS) fuse(CASECLASS) + else if (token == OBJECT) fuse(CASEOBJECT) + else reset() + case SEMI => + lookahead() + if (token != ELSE) reset() + case COMMA => + lookahead() + if (isAfterLineEnd && (token == RPAREN || token == RBRACKET || token == RBRACE || token == OUTDENT)) { + /* skip the trailing comma */ + } else if (token == EOF) { // e.g. when the REPL is parsing "val List(x, y, _*," + /* skip the trailing comma */ + } else reset() + case COLON => + lookahead() + val atEOL = isAfterLineEnd + reset() + if (atEOL) token = COLONEOL + case EOF if !indent.isOutermost => + insert(OUTDENT, offset) + indent = indent.outer + case _ => } - } /** Is current token first one after a newline? */ - def isAfterLineEnd(): Boolean = - lastOffset < lineStartOffset && - (lineStartOffset <= offset || - lastOffset < lastLineStartOffset && lastLineStartOffset <= offset) + def isAfterLineEnd: Boolean = lineOffset >= 0 /** Is there a blank line between the current token and the last one? + * A blank line consists only of characters <= ' '. * @pre afterLineEnd(). */ - private def pastBlankLine(): Boolean = { + private def pastBlankLine: Boolean = { val end = offset def recur(idx: Offset, isBlank: Boolean): Boolean = idx < end && { @@ -497,6 +687,7 @@ object Scanners { */ protected final def fetchToken(): Unit = { offset = charOffset - 1 + lineOffset = if (lastOffset < lineStartOffset) lineStartOffset else -1 name = null (ch: @switch) match { case ' ' | '\t' | CR | LF | FF => @@ -541,9 +732,9 @@ object Scanners { nextChar() getOperatorRest() case '/' => - if (skipComment()) { + if (skipComment()) fetchToken() - } else { + else { putChar('/') getOperatorRest() } @@ -556,7 +747,8 @@ object Scanners { base = 16 if (isNumberSeparator(ch)) errorButContinue("leading separator is not allowed", offset + 2) - } else { + } + else { /** * What should leading 0 be in the future? It is potentially dangerous * to let it be base-10 because of history. Should it be an error? Is @@ -575,7 +767,7 @@ object Scanners { case '`' => getBackquotedIdent() case '\"' => - def fetchDoubleQuote() = { + def fetchDoubleQuote() = if (token == INTERPOLATIONID) { nextRawChar() if (ch == '\"') { @@ -585,30 +777,33 @@ object Scanners { getStringPart(multiLine = true) sepRegions = STRINGPART :: sepRegions // indicate string part sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part - } else { + } + else { token = STRINGLIT strVal = "" } - } else { + } + else { getStringPart(multiLine = false) sepRegions = STRINGLIT :: sepRegions // indicate single line string part } - } else { + } + else { nextChar() if (ch == '\"') { nextChar() if (ch == '\"') { nextRawChar() getRawStringLit() - } else { + } + else { token = STRINGLIT strVal = "" } - } else { - getStringLit() } + else + getStringLit() } - } fetchDoubleQuote() case '\'' => def fetchSingleQuote() = { @@ -631,9 +826,9 @@ object Scanners { nextChar() if ('0' <= ch && ch <= '9') { putChar('.'); getFraction(); setStrVal() - } else { - token = DOT } + else + token = DOT case ';' => nextChar(); token = SEMI case ',' => @@ -657,32 +852,36 @@ object Scanners { nextChar() } case _ => - def fetchOther() = { + def fetchOther() = if (ch == '\u21D2') { nextChar(); token = ARROW - } else if (ch == '\u2190') { + } + else if (ch == '\u2190') { nextChar(); token = LARROW - } else if (Character.isUnicodeIdentifierStart(ch)) { + } + else if (Character.isUnicodeIdentifierStart(ch)) { putChar(ch) nextChar() getIdentRest() - } else if (isSpecial(ch)) { + } + else if (isSpecial(ch)) { putChar(ch) nextChar() getOperatorRest() - } else { + } + else { // FIXME: Dotty deviation: f"" interpolator is not supported (#1814) error("illegal character '\\u%04x'".format(ch: Int)) nextChar() } - } fetchOther() } } private def skipComment(): Boolean = { - def appendToComment(ch: Char) = + def appendToComment(ch: Char) = { if (keepComments) commentBuf.append(ch) + } def nextChar() = { appendToComment(ch) Scanner.this.nextChar() @@ -692,7 +891,7 @@ object Scanners { if ((ch != CR) && (ch != LF) && (ch != SU)) skipLine() } @tailrec - def skipComment(): Unit = { + def skipComment(): Unit = if (ch == '/') { nextChar() if (ch == '*') nestedComment() @@ -705,7 +904,6 @@ object Scanners { } else if (ch == SU) incompleteInputError("unclosed comment") else { nextChar(); skipComment() } - } def nestedComment() = { nextChar(); skipComment() } val start = lastCharOffset def finishComment(): Boolean = { @@ -714,9 +912,8 @@ object Scanners { val comment = Comment(pos, flushBuf(commentBuf)) commentPosBuf += pos - if (comment.isDocComment) { + if (comment.isDocComment) addComment(comment) - } } true @@ -733,28 +930,36 @@ object Scanners { // Lookahead --------------------------------------------------------------- - /** A new Scanner that starts at the current token offset */ - def lookaheadScanner: Scanner = new Scanner(source, offset) - - /** Is the token following the current one in `tokens`? */ - def lookaheadIn(tokens: BitSet): Boolean = { - val lookahead = lookaheadScanner - while ({ - lookahead.nextToken() - lookahead.token == NEWLINE || lookahead.token == NEWLINES - }) () - tokens.contains(lookahead.token) - } + /** A new Scanner that starts at the current token offset */ + def lookaheadScanner: Scanner = new Scanner(source, offset) { + override val indentSyntax = false + override protected def printState() = { + print("la:") + super.printState() + } + } - /** Is the current token in a position where a modifier is allowed? */ - def inModifierPosition(): Boolean = { - val lookahead = lookaheadScanner - while ({ - lookahead.nextToken() - lookahead.token == NEWLINE || lookahead.token == NEWLINES || lookahead.isSoftModifier - }) () - modifierFollowers.contains(lookahead.token) - } + /** Is the token following the current one in `tokens`? */ + def lookaheadIn(tokens: BitSet): Boolean = { + val lookahead = lookaheadScanner + while ({ + lookahead.nextToken() + lookahead.token == NEWLINE || lookahead.token == NEWLINES + }) + () + tokens.contains(lookahead.token) + } + + /** Is the current token in a position where a modifier is allowed? */ + def inModifierPosition(): Boolean = { + val lookahead = lookaheadScanner + while ({ + lookahead.nextToken() + lookahead.token == NEWLINE || lookahead.token == NEWLINES || lookahead.isSoftModifier + }) + () + modifierFollowers.contains(lookahead.token) + } // Identifiers --------------------------------------------------------------- @@ -802,9 +1007,9 @@ object Scanners { putChar(ch) nextChar() getIdentRest() - } else { - finishNamed() } + else + finishNamed() } private def getOperatorRest(): Unit = (ch: @switch) match { @@ -821,7 +1026,7 @@ object Scanners { else finishNamed() } - private def getIdentOrOperatorRest(): Unit = { + private def getIdentOrOperatorRest(): Unit = if (isIdentifierPart(ch)) getIdentRest() else ch match { @@ -834,7 +1039,6 @@ object Scanners { if (isSpecial(ch)) getOperatorRest() else finishNamed() } - } def isSoftModifier: Boolean = token == IDENTIFIER && softModifierNames.contains(name) @@ -845,6 +1049,9 @@ object Scanners { def isSoftModifierInParamModifierPosition: Boolean = isSoftModifier && !lookaheadIn(BitSet(COLON)) + def isNestedStart = token == LBRACE || token == INDENT + def isNestedEnd = token == RBRACE || token == OUTDENT + // Literals ----------------------------------------------------------------- private def getStringLit() = { @@ -853,25 +1060,27 @@ object Scanners { setStrVal() nextChar() token = STRINGLIT - } else error("unclosed string literal") + } + else error("unclosed string literal") } - private def getRawStringLit(): Unit = { + private def getRawStringLit(): Unit = if (ch == '\"') { nextRawChar() if (isTripleQuote()) { setStrVal() token = STRINGLIT - } else + } + else getRawStringLit() - } else if (ch == SU) { + } + else if (ch == SU) incompleteInputError("unclosed multi-line string literal") - } else { + else { putChar(ch) nextRawChar() getRawStringLit() } - } @annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = { def finishStringPart() = { @@ -880,48 +1089,53 @@ object Scanners { next.lastOffset = charOffset - 1 next.offset = charOffset - 1 } - if (ch == '"') { + if (ch == '"') if (multiLine) { nextRawChar() if (isTripleQuote()) { setStrVal() token = STRINGLIT - } else + } + else getStringPart(multiLine) - } else { + } + else { nextChar() setStrVal() token = STRINGLIT } - } else if (ch == '$') { + else if (ch == '$') { nextRawChar() if (ch == '$') { putChar(ch) nextRawChar() getStringPart(multiLine) - } else if (ch == '{') { + } + else if (ch == '{') { finishStringPart() nextRawChar() next.token = LBRACE - } else if (Character.isUnicodeIdentifierStart(ch) || ch == '_') { + } + else if (Character.isUnicodeIdentifierStart(ch) || ch == '_') { finishStringPart() while ({ putChar(ch) nextRawChar() ch != SU && Character.isUnicodeIdentifierPart(ch) - }) () + }) + () finishNamed(target = next) - } else { - error("invalid string interpolation: `$$', `$'ident or `$'BlockExpr expected") } - } else { + else + error("invalid string interpolation: `$$', `$'ident or `$'BlockExpr expected") + } + else { val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF))) - if (isUnclosedLiteral) { + if (isUnclosedLiteral) if (multiLine) incompleteInputError("unclosed multi-line string literal") else error("unclosed string literal") - } else { putChar(ch) nextRawChar() @@ -945,12 +1159,14 @@ object Scanners { nextChar() } true - } else { + } + else { putChar('"') putChar('"') false } - } else { + } + else { putChar('"') false } @@ -974,7 +1190,8 @@ object Scanners { } } putChar(oct.toChar) - } else { + } + else { ch match { case 'b' => putChar('\b') case 't' => putChar('\t') @@ -988,7 +1205,8 @@ object Scanners { } nextChar() } - } else { + } + else { putChar(ch) nextChar() } @@ -998,10 +1216,9 @@ object Scanners { putChar(ch) } - private def getLitChars(delimiter: Char) = { + private def getLitChars(delimiter: Char) = while (ch != delimiter && !isAtEnd && (ch != SU && ch != CR && ch != LF || isUnicodeEscape)) getLitChar() - } /** read fractional part and exponent of floating point number * if one is present. @@ -1016,9 +1233,8 @@ object Scanners { if (ch == 'e' || ch == 'E') { val lookahead = lookaheadReader() lookahead.nextChar() - if (lookahead.ch == '+' || lookahead.ch == '-') { + if (lookahead.ch == '+' || lookahead.ch == '-') lookahead.nextChar() - } if ('0' <= lookahead.ch && lookahead.ch <= '9' || isNumberSeparator(ch)) { putChar(ch) nextChar() @@ -1038,7 +1254,8 @@ object Scanners { putChar(ch) nextChar() token = DOUBLELIT - } else if (ch == 'f' || ch == 'F') { + } + else if (ch == 'f' || ch == 'F') { putChar(ch) nextChar() token = FLOATLIT @@ -1066,7 +1283,8 @@ object Scanners { nextChar() getFraction() } - } else (ch: @switch) match { + } + else (ch: @switch) match { case 'e' | 'E' | 'f' | 'F' | 'd' | 'D' => if (base == 10) getFraction() case 'l' | 'L' => @@ -1124,8 +1342,8 @@ object Scanners { } /* Resume normal scanning after XML */ - def resume(lastToken: Token): Unit = { - token = lastToken + def resume(lastTokenData: TokenData): Unit = { + this.copyFrom(lastTokenData) if (next.token != EMPTY && !ctx.reporter.hasErrors) error("unexpected end of input: possible missing '}' in XML block") @@ -1135,7 +1353,76 @@ object Scanners { /* Initialization: read first char, then first token */ nextChar() nextToken() - } // end Scanner + } + // end Scanner + + /** A class describing an indentation region. + * @param width The principal indendation width + * @param others Other indendation widths > width of lines in the same region + */ + class IndentRegion(val width: IndentWidth, val others: Set[IndentWidth], val token: Token, val outer: IndentRegion | Null) { + def enclosing: IndentRegion = outer.asInstanceOf[IndentRegion] + def isOutermost = outer == null + } + + enum IndentWidth { + case Run(ch: Char, n: Int) + case Conc(l: IndentWidth, r: Run) + + def <= (that: IndentWidth): Boolean = this match { + case Run(ch1, n1) => + that match { + case Run(ch2, n2) => n1 <= n2 && (ch1 == ch2 || n1 == 0) + case Conc(l, r) => this <= l + } + case Conc(l1, r1) => + that match { + case Conc(l2, r2) => l1 == l2 && r1 <= r2 + case _ => false + } + } + + def < (that: IndentWidth): Boolean = this <= that && !(that <= this) + + def toPrefix: String = this match { + case Run(ch, n) => ch.toString * n + case Conc(l, r) => l.toPrefix ++ r.toPrefix + } + + override def toString: String = { + def kind(ch: Char) = ch match { + case ' ' => "space" + case '\t' => "tab" + case _ => s"'$ch'-character" + } + this match { + case Run(ch, n) => s"$n ${kind(ch)}${if (n == 1) "" else "s"}" + case Conc(l, r) => s"$l, $r" + } + } + } + object IndentWidth { + private inline val MaxCached = 40 + private val spaces = Array.tabulate(MaxCached + 1)(new Run(' ', _)) + private val tabs = Array.tabulate(MaxCached + 1)(new Run('\t', _)) + + def Run(ch: Char, n: Int): Run = + if (n <= MaxCached && ch == ' ') spaces(n) + else if (n <= MaxCached && ch == '\t') tabs(n) + else new Run(ch, n) + + val Zero = Run(' ', 0) + } + + /** What can be referred to in an end marker */ + type EndMarkerTag = TermName | Token + + /** A processed end marker + * @param tag The name or token referred to in the marker + * @param width The indentation width where the marker occurred + * @param offset The offset of the `end` + */ + case class EndMarker(tag: EndMarkerTag, width: IndentWidth, offset: Int) // ------------- keyword configuration ----------------------------------- diff --git a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala index afa7fefab860..4d28b21e45ac 100644 --- a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala @@ -55,7 +55,11 @@ object ScriptParsers { * by compilationUnit(). */ override def parse(): Tree = unsupported("parse") + } +} + /* TODO: reinstantiate + val stmts = templateStatSeq(false)._2 accept(EOF) @@ -141,5 +145,3 @@ object ScriptParsers { // package { ... } makePackaging(0, emptyPkg, List(moduleDef)) }*/ - } -} diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index 4f458cf9d58c..96acb0557791 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -125,9 +125,11 @@ abstract class TokensCommon { final val RBRACKET = 93; enter(RBRACKET, "']'") final val LBRACE = 94; enter(LBRACE, "'{'") final val RBRACE = 95; enter(RBRACE, "'}'") + final val INDENT = 96; enter(INDENT, "indent") + final val OUTDENT = 97; enter(OUTDENT, "unindent") final val firstParen = LPAREN - final val lastParen = RBRACE + final val lastParen = OUTDENT def buildKeywordArray(keywords: TokenSet): (Int, Array[Int]) = { def start(tok: Token) = tokenString(tok).toTermName.asSimpleName.start @@ -186,6 +188,7 @@ object Tokens extends TokensCommon { /** special symbols */ final val NEWLINE = 78; enter(NEWLINE, "end of statement", "new line") final val NEWLINES = 79; enter(NEWLINES, "end of statement", "new lines") + final val COLONEOL = 88; enter(COLONEOL, ":", ": at eol") /** special keywords */ final val USCORE = 73; enter(USCORE, "_") @@ -200,7 +203,7 @@ object Tokens extends TokensCommon { final val QUOTE = 86; enter(QUOTE, "'") /** XML mode */ - final val XMLSTART = 96; enter(XMLSTART, "$XMLSTART$<") // TODO: deprecate + final val XMLSTART = 98; enter(XMLSTART, "$XMLSTART$<") // TODO: deprecate final val alphaKeywords: TokenSet = tokenRange(IF, MACRO) final val symbolicKeywords: TokenSet = tokenRange(USCORE, TLARROW) @@ -216,7 +219,7 @@ object Tokens extends TokensCommon { USCORE, NULL, THIS, SUPER, TRUE, FALSE, RETURN, QUOTEID, XMLSTART) final val canStartExpressionTokens: TokenSet = atomicExprTokens | BitSet( - LBRACE, LPAREN, QUOTE, IF, DO, WHILE, FOR, NEW, TRY, THROW, IMPLIED, GIVEN) + LBRACE, LPAREN, INDENT, QUOTE, IF, DO, WHILE, FOR, NEW, TRY, THROW, IMPLIED, GIVEN) final val canStartTypeTokens: TokenSet = literalTokens | identifierTokens | BitSet( THIS, SUPER, USCORE, LPAREN, AT) @@ -249,7 +252,7 @@ object Tokens extends TokensCommon { AT, CASE) final val canEndStatTokens: TokenSet = atomicExprTokens | BitSet( - TYPE, RPAREN, RBRACE, RBRACKET) + TYPE, RPAREN, RBRACE, RBRACKET, OUTDENT) /** Tokens that stop a lookahead scan search for a `<-`, `then`, or `do`. * Used for disambiguating between old and new syntax. @@ -259,6 +262,12 @@ object Tokens extends TokensCommon { final val numericLitTokens: TokenSet = BitSet(INTLIT, LONGLIT, FLOATLIT, DOUBLELIT) + final val statCtdTokens: BitSet = BitSet(THEN, ELSE, DO, CATCH, FINALLY, YIELD, MATCH) + + final val canStartIndentTokens: BitSet = + statCtdTokens | BitSet(COLONEOL, EQUALS, ARROW, LARROW, WHILE, TRY, FOR) + // `if` is excluded because it often comes after `else` which makes for awkward indentation rules + final val scala3keywords = BitSet(ENUM, ERASED, GIVEN, IMPLIED) final val softModifierNames = Set(nme.inline, nme.opaque) diff --git a/compiler/src/dotty/tools/dotc/parsing/package.scala b/compiler/src/dotty/tools/dotc/parsing/package.scala index e039e1bf67fb..5b7ceaa9659c 100644 --- a/compiler/src/dotty/tools/dotc/parsing/package.scala +++ b/compiler/src/dotty/tools/dotc/parsing/package.scala @@ -36,5 +36,5 @@ package object parsing { def minPrec: Int = 0 def minInfixPrec: Int = 1 def maxPrec: Int = 11 - } + diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala index 9279d0b72416..35d77e2c149a 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala @@ -318,6 +318,9 @@ object MarkupParsers { /** Some try/catch/finally logic used by xLiteral and xLiteralPattern. */ @forceInline private def xLiteralCommon(f: () => Tree, ifTruncated: String => Unit): Tree = { + assert(parser.in.token == Tokens.XMLSTART) + val saved = parser.in.newTokenData + saved.copyFrom(parser.in) var output: Tree = null.asInstanceOf[Tree] try output = f() catch { @@ -328,7 +331,7 @@ object MarkupParsers { case _: ArrayIndexOutOfBoundsException => parser.syntaxError("missing end tag in XML literal for <%s>" format debugLastElem, debugLastPos) } - finally parser.in resume Tokens.XMLSTART + finally parser.in.resume(saved) if (output == null) parser.errorTermTree @@ -396,7 +399,12 @@ object MarkupParsers { def escapeToScala[A](op: => A, kind: String): A = { xEmbeddedBlock = false val res = saving[List[Int], A](parser.in.sepRegions, parser.in.sepRegions = _) { - parser.in resume LBRACE + val lbrace = parser.in.newTokenData + lbrace.token = LBRACE + lbrace.offset = parser.in.charOffset - 1 + lbrace.lastOffset = parser.in.lastOffset + lbrace.lineOffset = parser.in.lineOffset + parser.in.resume(lbrace) op } if (parser.in.token != RBRACE) diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala index ad9c110e739d..47a2d7a6602d 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala @@ -83,9 +83,9 @@ object Plugin { */ def load(classname: String, loader: ClassLoader): Try[AnyClass] = { import scala.util.control.NonFatal - try { + try Success[AnyClass](loader loadClass classname) - } catch { + catch { case NonFatal(e) => Failure(new PluginLoadException(classname, s"Error: unable to load class $classname: ${e.getMessage}")) case e: NoClassDefFoundError => @@ -103,8 +103,7 @@ object Plugin { def loadAllFrom( paths: List[List[Path]], dirs: List[Path], - ignoring: List[String]): List[Try[Plugin]] = - { + ignoring: List[String]): List[Try[Plugin]] = { def fromFile(inputStream: InputStream, path: Path): String = { val props = new Properties diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index 734208ce51be..01fd0b6d4e6f 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -58,8 +58,7 @@ trait Plugins { // remove any with conflicting names or subcomponent names def pick( plugins: List[Plugin], - plugNames: Set[String]): List[Plugin] = - { + plugNames: Set[String]): List[Plugin] = { if (plugins.isEmpty) return Nil // early return val plug :: tail = plugins @@ -89,7 +88,8 @@ trait Plugins { for { opt <- ctx.settings.pluginOptions.value if !(plugs exists (opt startsWith _.name + ":")) - } ctx.error("bad option: -P:" + opt) + } + ctx.error("bad option: -P:" + opt) plugs } diff --git a/compiler/src/dotty/tools/dotc/printing/DecompilerPrinter.scala b/compiler/src/dotty/tools/dotc/printing/DecompilerPrinter.scala index 7ce03efbadff..e45e34c7f097 100644 --- a/compiler/src/dotty/tools/dotc/printing/DecompilerPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/DecompilerPrinter.scala @@ -73,9 +73,8 @@ class DecompilerPrinter(_ctx: Context) extends RefinedPrinter(_ctx) { else super.toTextTemplate(untpd.cpy.Template(impl)(parents = parents, body = body), ofNew) } - override protected def typeApplyText[T >: Untyped](tree: TypeApply[T]): Text = { + override protected def typeApplyText[T >: Untyped](tree: TypeApply[T]): Text = if (tree.symbol eq defn.InternalQuoted_exprQuote) "'" else if (tree.symbol eq defn.InternalQuoted_typeQuote) "'[" ~ toTextGlobal(tree.args, ", ") ~ "]" else super.typeApplyText(tree) - } } diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index ef444da7182e..88e414fe21b7 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -27,9 +27,9 @@ class PlainPrinter(_ctx: Context) extends Printer { try { ctx.base.toTextRecursions += 1 op - } finally { - ctx.base.toTextRecursions -= 1 } + finally + ctx.base.toTextRecursions -= 1 else { if (ctx.base.toTextRecursions >= maxToTextRecursions) recursionLimitExceeded() @@ -404,7 +404,7 @@ class PlainPrinter(_ctx: Context) extends Printer { } protected def privateWithinString(sym: Symbol): String = - if (sym.exists && sym.privateWithin.exists) + if (sym.exists && sym.privateWithin.exists) nameString(sym.privateWithin.name.stripModuleClassSuffix) else "" @@ -520,11 +520,10 @@ class PlainPrinter(_ctx: Context) extends Printer { nodeName ~ "(" ~ elems ~ tpSuffix ~ ")" ~ (Str(tree.sourcePos.toString) provided printDebug) }.close // todo: override in refined printer - def toText(pos: SourcePosition): Text = { + def toText(pos: SourcePosition): Text = if (!pos.exists) "" else if (pos.source.exists) s"${pos.source.file.name}:${pos.line + 1}" else s"(no source file, offset = ${pos.span.point})" - } def toText(result: SearchResult): Text = result match { case result: SearchSuccess => @@ -537,7 +536,7 @@ class PlainPrinter(_ctx: Context) extends Printer { "Ambiguous Implicit: " ~ toText(result.alt1.ref) ~ " and " ~ toText(result.alt2.ref) case _ => "Search Failure: " ~ toText(result.tree) - } + } } def toText(importInfo: ImportInfo): Text = { diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 8ce095f96de8..9e4ab644b43d 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -564,8 +564,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case _ => "(" ~ Text(args map argToText, ", ") ~ ")" } changePrec(GlobalPrec) { - (keywordText("given ") provided contextual) ~ - (keywordText("erased ") provided isErased) ~ + (keywordText("given ") provided contextual) ~ + (keywordText("erased ") provided isErased) ~ argsText ~ " => " ~ toText(body) } case PolyFunction(targs, body) => @@ -711,14 +711,13 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { protected def optAscription[T >: Untyped](tpt: Tree[T]): Text = optText(tpt)(": " ~ _) - private def idText(tree: untpd.Tree): Text = { + private def idText(tree: untpd.Tree): Text = if ((ctx.settings.uniqid.value || Printer.debugPrintUnique) && tree.hasType && tree.symbol.exists) s"#${tree.symbol.id}" else "" - } private def useSymbol(tree: untpd.Tree) = tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value - protected def nameIdText[T >: Untyped](tree: NameTree[T]): Text = { + protected def nameIdText[T >: Untyped](tree: NameTree[T]): Text = if (tree.hasType && tree.symbol.exists) { val str: Text = nameString(tree.symbol) tree match { @@ -728,7 +727,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } else toText(tree.name) ~ idText(tree) - } private def toTextOwner(tree: Tree[_]) = "[owner = " ~ tree.symbol.maybeOwner.show ~ "]" provided ctx.settings.YprintDebugOwners.value @@ -801,7 +799,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case _ => false } params ::: rest - } else impl.body + } + else impl.body val bodyText = " {" ~~ selfText ~ toTextGlobal(primaryConstrs ::: body, "\n") ~ "}" @@ -936,11 +935,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { override def plain: PlainPrinter = new PlainPrinter(_ctx) - private def withPos(txt: Text, pos: SourcePosition): Text = { + private def withPos(txt: Text, pos: SourcePosition): Text = if (!printLines || !pos.exists) txt else txt match { case Str(s, _) => Str(s, LineRange(pos.line, pos.endLine)) case _ => txt } - } } diff --git a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala index f97cbfdabae5..03669ce47c20 100644 --- a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala @@ -35,7 +35,7 @@ class ReplPrinter(_ctx: Context) extends DecompilerPrinter(_ctx) { else if (const.tag == Constants.StringTag) Str(s"${qSc}${const.value}$qSc") else Str(const.value.toString) - override def dclText(sym: Symbol): Text = if (debugPrint) super.dclText(sym) else { + override def dclText(sym: Symbol): Text = if (debugPrint) super.dclText(sym) else ("lazy": Text).provided(sym.is(Lazy)) ~~ toText(sym) ~ { if (sym.is(Method)) toText(sym.info) @@ -43,7 +43,6 @@ class ReplPrinter(_ctx: Context) extends DecompilerPrinter(_ctx) { else if (sym.isType || sym.isClass) "" else ":" ~~ toText(sym.info) } - } override def toTextSingleton(tp: SingletonType): Text = if (debugPrint) diff --git a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala index 96f091c392b3..fe298c0cf046 100644 --- a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala +++ b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala @@ -39,16 +39,17 @@ object SyntaxHighlighting { def highlightRange(from: Int, to: Int, color: String) = Arrays.fill(colorAt.asInstanceOf[Array[AnyRef]], from, to, color) - def highlightPosition(span: Span, color: String) = if (span.exists) { + def highlightPosition(span: Span, color: String) = if (span.exists) if (span.start < 0 || span.end > in.length) { if (debug) println(s"Trying to highlight erroneous position $span. Input size: ${in.length}") } else highlightRange(span.start, span.end, color) - } - val scanner = new Scanner(source) + val scanner = new Scanner(source) { + override protected def printState() = () + } while (scanner.token != EOF) { val start = scanner.offset val token = scanner.token diff --git a/compiler/src/dotty/tools/dotc/printing/Texts.scala b/compiler/src/dotty/tools/dotc/printing/Texts.scala index ea64ae7ead1f..56a4218b0f87 100644 --- a/compiler/src/dotty/tools/dotc/printing/Texts.scala +++ b/compiler/src/dotty/tools/dotc/printing/Texts.scala @@ -56,7 +56,7 @@ object Texts { private def appendIndented(that: Text)(width: Int): Text = Vertical(that.layout(width - indentMargin).indented :: this.relems) - private def append(width: Int)(that: Text): Text = { + private def append(width: Int)(that: Text): Text = if (this.isEmpty) that.layout(width) else if (that.isEmpty) this else if (that.isVertical) appendIndented(that)(width) @@ -64,7 +64,6 @@ object Texts { else if (that.remaining(width - lengthWithoutAnsi(lastLine)) >= 0) appendToLastLine(that) else if (that.isSplittable) that.relems.reverse.foldLeft(this)(_.append(width)(_)) else appendIndented(that)(width) - } private def lengthWithoutAnsi(str: String): Int = str.replaceAll("\u001b\\[\\d+m", "").length @@ -158,14 +157,13 @@ object Texts { /** A concatenation of elements in `xs` and interspersed with * separator strings `sep`. */ - def apply(xs: Traversable[Text], sep: String = " "): Text = { + def apply(xs: Traversable[Text], sep: String = " "): Text = if (sep == "\n") lines(xs) else { val ys = xs filterNot (_.isEmpty) if (ys.isEmpty) Str("") else ys reduce (_ ~ sep ~ _) } - } /** The given texts `xs`, each on a separate line */ def lines(xs: Traversable[Text]): Vertical = Vertical(xs.toList.reverse) diff --git a/compiler/src/dotty/tools/dotc/profile/AsyncHelper.scala b/compiler/src/dotty/tools/dotc/profile/AsyncHelper.scala index f0f5bbc84441..857519b9d31b 100644 --- a/compiler/src/dotty/tools/dotc/profile/AsyncHelper.scala +++ b/compiler/src/dotty/tools/dotc/profile/AsyncHelper.scala @@ -15,9 +15,9 @@ sealed trait AsyncHelper { def newBoundedQueueFixedThreadPool (nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, shortId: String, priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor - } + object AsyncHelper { def apply(phase: Phase)(implicit ctx: Context): AsyncHelper = ctx.profiler match { case NoOpProfiler => new BasicAsyncHelper(phase) @@ -78,7 +78,8 @@ object AsyncHelper { new SinglePhaseInstrumentedThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) } - override protected def wrapRunnable(r: Runnable, shortId:String): Runnable = () => { + override protected def wrapRunnable(r: Runnable, shortId:String): Runnable = { + () => val data = new ThreadProfileData localData.set(data) @@ -133,7 +134,7 @@ object AsyncHelper { super.afterExecute(r, t) } - } } } + diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index 69d493b78f3f..b3a2016bc6fb 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -29,9 +29,8 @@ case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEn case class ProfileSnap(threadId: Long, threadName: String, snapTimeNanos : Long, idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, allocatedBytes:Long, heapBytes:Long) { - def updateHeap(heapBytes:Long): ProfileSnap = { + def updateHeap(heapBytes:Long): ProfileSnap = copy(heapBytes = heapBytes) - } } case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpose:String, taskCount:Int, thread:Thread) { def allocatedBytes: Long = end.allocatedBytes - start.allocatedBytes @@ -92,9 +91,8 @@ private [profile] object RealProfiler { } private [profile] class RealProfiler(reporter : ProfileReporter)(implicit ctx: Context) extends Profiler with NotificationListener { - def completeBackground(threadRange: ProfileRange): Unit = { + def completeBackground(threadRange: ProfileRange): Unit = reporter.reportBackground(this, threadRange) - } def outDir: AbstractFile = ctx.settings.outputDir.value @@ -173,7 +171,8 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(implicit ctx: C val finalSnap = if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) { doGC initialSnap.updateHeap(readHeapUsage()) - } else initialSnap + } + else initialSnap reporter.reportForeground(this, ProfileRange(snapBefore, finalSnap, phase, "", 0, Thread.currentThread)) } @@ -188,9 +187,9 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(implicit ctx: C } snapThread(0) } - } + case class EventType(name: String) object EventType { //main thread with other tasks @@ -223,13 +222,11 @@ object ConsoleProfileReporter extends ProfileReporter { override def close(profiler: RealProfiler): Unit = () - override def header(profiler: RealProfiler): Unit = { + override def header(profiler: RealProfiler): Unit = println(s"Profiler start (${profiler.id}) ${profiler.outDir}") - } - override def reportGc(data: GcEventData): Unit = { + override def reportGc(data: GcEventData): Unit = println(s"Profiler GC reported ${data.gcEndMillis - data.gcStartMillis}ms") - } } class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { @@ -239,15 +236,12 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { out.println(s"header(GC),startNs,endNs,startMs,endMs,name,action,cause,threads") } - override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = { + override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = reportCommon(EventType.BACKGROUND, profiler, threadRange) - } - override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = { + override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = reportCommon(EventType.MAIN, profiler, threadRange) - } - private def reportCommon(tpe:EventType, profiler: RealProfiler, threadRange: ProfileRange): Unit = { + private def reportCommon(tpe:EventType, profiler: RealProfiler, threadRange: ProfileRange): Unit = out.println(s"$tpe,${threadRange.start.snapTimeNanos},${threadRange.end.snapTimeNanos},${profiler.id},${threadRange.phase.id},${threadRange.phase.phaseName.replace(',', ' ')},${threadRange.purpose},${threadRange.taskCount},${threadRange.thread.getId},${threadRange.thread.getName},${threadRange.runNs},${threadRange.idleNs},${threadRange.cpuNs},${threadRange.userNs},${threadRange.allocatedBytes},${threadRange.end.heapBytes} ") - } override def reportGc(data: GcEventData): Unit = { val duration = TimeUnit.MILLISECONDS.toNanos(data.gcEndMillis - data.gcStartMillis + 1) diff --git a/compiler/src/dotty/tools/dotc/quoted/QuoteCompiler.scala b/compiler/src/dotty/tools/dotc/quoted/QuoteCompiler.scala index 3fe8488e15ab..9fb46836f539 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuoteCompiler.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuoteCompiler.scala @@ -51,7 +51,7 @@ class QuoteCompiler extends Compiler { def phaseName: String = "quotedFrontend" - override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = { + override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = units.flatMap { case exprUnit: ExprCompilationUnit => val pos = Span(0) @@ -80,7 +80,6 @@ class QuoteCompiler extends Compiler { Some(CompilationUnit(source, tree, forceTrees = true)) } } - } /** Get the literal value if this tree only contains a literal tree */ @tailrec private def getLiteral(tree: Tree): Option[Any] = tree match { diff --git a/compiler/src/dotty/tools/dotc/quoted/QuoteContext.scala b/compiler/src/dotty/tools/dotc/quoted/QuoteContext.scala index f241828453d2..6ac6372acfd8 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuoteContext.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuoteContext.scala @@ -7,5 +7,5 @@ object QuoteContext { def apply() given Context: scala.quoted.QuoteContext = new scala.quoted.QuoteContext(ReflectionImpl(the[Context])) - } + diff --git a/compiler/src/dotty/tools/dotc/quoted/QuoteDriver.scala b/compiler/src/dotty/tools/dotc/quoted/QuoteDriver.scala index c4d29d421351..3ef28808a3f0 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuoteDriver.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuoteDriver.scala @@ -78,5 +78,5 @@ object QuoteDriver { case _ => classpath0 } } - } + diff --git a/compiler/src/dotty/tools/dotc/quoted/ToolboxImpl.scala b/compiler/src/dotty/tools/dotc/quoted/ToolboxImpl.scala index 17671b9d93f3..3da705bc44af 100644 --- a/compiler/src/dotty/tools/dotc/quoted/ToolboxImpl.scala +++ b/compiler/src/dotty/tools/dotc/quoted/ToolboxImpl.scala @@ -25,9 +25,9 @@ object ToolboxImpl { throw new scala.quoted.staging.RunScopeException() running = true driver.run(exprBuilder, settings) - } finally { - running = false } + finally + running = false } } @@ -42,5 +42,5 @@ object ToolboxImpl { // This id can only differentiate scope extrusion from one compiler instance to another. private[dotty] def scopeId given Context: ScopeId = the[Context].outersIterator.toList.last.hashCode() - } + diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala index bf0d24feffab..251b396ba224 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala @@ -32,10 +32,10 @@ trait MessageRendering { * @return a list of strings with inline locations */ def outer(pos: SourcePosition, prefix: String)(implicit ctx: Context): List[String] = - if (pos.outer.exists) { + if (pos.outer.exists) i"$prefix| This location is in code that was inlined at ${pos.outer}" :: outer(pos.outer, prefix) - } else Nil + else Nil /** Get the sourcelines before and after the position, as well as the offset * for rendering line numbers @@ -153,7 +153,8 @@ trait MessageRendering { val marker = columnMarker(pos, offset, diagnosticLevel) val err = errorMsg(pos, msg.msg, offset) sb.append((srcBefore ::: marker :: err :: outer(pos, " " * (offset - 1)) ::: srcAfter).mkString(EOL)) - } else sb.append(msg.msg) + } + else sb.append(msg.msg) sb.toString } diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index 41713e53c42d..1d0d2496f23f 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -69,14 +69,15 @@ object Reporter { trait Reporting { this: Context => /** For sending messages that are printed only if -verbose is set */ - def inform(msg: => String, pos: SourcePosition = NoSourcePosition): Unit = + def inform(msg: => String, pos: SourcePosition = NoSourcePosition): Unit = { if (this.settings.verbose.value) this.echo(msg, pos) + } def echo(msg: => String, pos: SourcePosition = NoSourcePosition): Unit = reporter.report(new Info(msg, pos)) - def reportWarning(warning: Warning): Unit = - if (!this.settings.silentWarnings.value) { + def reportWarning(warning: Warning): Unit = { + if (!this.settings.silentWarnings.value) if (this.settings.XfatalWarnings.value) warning match { case warning: ConditionalWarning if !warning.enablingOption.value => @@ -85,7 +86,7 @@ trait Reporting { this: Context => reporter.report(warning.toError) } else reporter.report(warning) - } + } def deprecationWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit = reportWarning(new DeprecationWarning(msg, pos)) @@ -104,7 +105,7 @@ trait Reporting { this: Context => val req = if (required) "needs to" else "should" val fqname = s"scala.language.$feature" - val explain = { + val explain = if (reporter.isReportedFeatureUseSite(featureUseSite)) "" else { reporter.reportNewFeatureUseSite(featureUseSite) @@ -114,7 +115,6 @@ trait Reporting { this: Context => |See the Scala docs for value $fqname for a discussion |why the feature $req be explicitly enabled.""".stripMargin } - } val msg = s"$featureDescription $req be enabled\nby making the implicit value $fqname visible.$explain" if (required) error(msg, pos) @@ -160,12 +160,14 @@ trait Reporting { this: Context => * See [[config.CompilerCommand#explainAdvanced]] for the exact meaning of * "contains" here. */ - def log(msg: => String, pos: SourcePosition = NoSourcePosition): Unit = + def log(msg: => String, pos: SourcePosition = NoSourcePosition): Unit = { if (this.settings.Ylog.value.containsPhase(phase)) echo(s"[log ${ctx.phasesStack.reverse.mkString(" -> ")}] $msg", pos) + } - def debuglog(msg: => String): Unit = + def debuglog(msg: => String): Unit = { if (ctx.debug) log(msg) + } def informTime(msg: => String, start: Long): Unit = { def elapsed = s" in ${currentTimeMillis - start}ms" @@ -180,8 +182,9 @@ trait Reporting { this: Context => value } - def debugwarn(msg: => String, pos: SourcePosition = NoSourcePosition): Unit = + def debugwarn(msg: => String, pos: SourcePosition = NoSourcePosition): Unit = { if (this.settings.Ydebug.value) warning(msg, pos) + } private def addInlineds(pos: SourcePosition)(implicit ctx: Context) = { def recur(pos: SourcePosition, inlineds: List[Trees.Tree[_]]): SourcePosition = inlineds match { @@ -269,7 +272,7 @@ abstract class Reporter extends interfaces.ReporterResult { var unreportedWarnings: Map[String, Int] = Map.empty - def report(m: MessageContainer)(implicit ctx: Context): Unit = + def report(m: MessageContainer)(implicit ctx: Context): Unit = { if (!isHidden(m)) { doReport(m)(ctx.addMode(Mode.Printing)) m match { @@ -285,6 +288,7 @@ abstract class Reporter extends interfaces.ReporterResult { // match error if d is something else } } + } def incomplete(m: MessageContainer)(implicit ctx: Context): Unit = incompleteHandler(m, ctx) diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala index 4a69f4f6fd56..2ed2ce347fcd 100644 --- a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala +++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala @@ -20,12 +20,11 @@ trait UniqueMessagePositions extends Reporter { super.isHidden(m) || { m.pos.exists && !ctx.settings.YshowSuppressedErrors.value && { var shouldHide = false - for (pos <- m.pos.start to m.pos.end) { + for (pos <- m.pos.start to m.pos.end) positions get (ctx.source, pos) match { case Some(level) if level >= m.level => shouldHide = true case _ => positions((ctx.source, pos)) = m.level } - } shouldHide } } diff --git a/compiler/src/dotty/tools/dotc/reporting/trace.scala b/compiler/src/dotty/tools/dotc/reporting/trace.scala index 821f9ac6c85a..1993e985622b 100644 --- a/compiler/src/dotty/tools/dotc/reporting/trace.scala +++ b/compiler/src/dotty/tools/dotc/reporting/trace.scala @@ -35,7 +35,8 @@ abstract class TraceSyntax { def op1 = op if (cond) apply[TC](question, Printers.default, show)(op1) else op1 - } else op + } + else op @forceInline def apply[T](question: => String, printer: Printers.Printer, showOp: Any => String)(op: => T)(implicit ctx: Context): T = @@ -97,22 +98,24 @@ abstract class TraceSyntax { if (ctx.mode.is(Mode.Printing)) op else { var finalized = false - def finalize(result: Any, note: String) = + def finalize(result: Any, note: String) = { if (!finalized) { ctx.base.indent -= 1 log(s"${ctx.base.indentTab * ctx.base.indent}${trailing(result)}$note") finalized = true } - try { - log(s"${ctx.base.indentTab * ctx.base.indent}$leading") - ctx.base.indent += 1 - val res = op - finalize(res, "") - res - } catch { - case ex: Throwable => - finalize("", s" (with exception $ex)") - throw ex + } + try { + log(s"${ctx.base.indentTab * ctx.base.indent}$leading") + ctx.base.indent += 1 + val res = op + finalize(res, "") + res + } + catch { + case ex: Throwable => + finalize("", s" (with exception $ex)") + throw ex + } } - } } diff --git a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala index d640544151ca..7c35cd7c0210 100644 --- a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala +++ b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala @@ -17,7 +17,7 @@ object Rewrites { } private class Patches(source: SourceFile) { - private val pbuf = new mutable.ListBuffer[Patch]() + private[Rewrites] val pbuf = new mutable.ListBuffer[Patch]() def addPatch(span: Span, replacement: String): Unit = pbuf += Patch(span, replacement) @@ -27,7 +27,7 @@ object Rewrites { val patches = pbuf.toList.sortBy(_.span.start) if (patches.nonEmpty) patches reduceLeft {(p1, p2) => - assert(p1.span.end <= p2.span.start, s"overlapping patches: $p1 and $p2") + assert(p1.span.end <= p2.span.start, s"overlapping patches in $source: $p1 and $p2") p2 } val ds = new Array[Char](cs.length + delta) @@ -63,17 +63,24 @@ object Rewrites { /** If -rewrite is set, record a patch that replaces the range * given by `span` in `source` by `replacement` */ - def patch(source: SourceFile, span: Span, replacement: String)(implicit ctx: Context): Unit = + def patch(source: SourceFile, span: Span, replacement: String)(implicit ctx: Context): Unit = { if (ctx.reporter != Reporter.NoReporter) // NoReporter is used for syntax highlighting for (rewrites <- ctx.settings.rewrite.value) rewrites.patched .getOrElseUpdate(source, new Patches(source)) .addPatch(span, replacement) + } /** Patch position in `ctx.compilationUnit.source`. */ def patch(span: Span, replacement: String)(implicit ctx: Context): Unit = patch(ctx.compilationUnit.source, span, replacement) + /** Does `span` overlap with a patch region of `source`? */ + def overlapsPatch(source: SourceFile, span: Span) given (ctx: Context): Boolean = + ctx.settings.rewrite.value.exists(rewrites => + rewrites.patched.get(source).exists(patches => + patches.pbuf.exists(patch => patch.span.overlaps(span)))) + /** If -rewrite is set, apply all patches and overwrite patched source files. */ def writeBack()(implicit ctx: Context): Unit = diff --git a/compiler/src/dotty/tools/dotc/tastyreflect/MacroExpansion.scala b/compiler/src/dotty/tools/dotc/tastyreflect/MacroExpansion.scala index 4c2297273b8c..7bd7cf079062 100644 --- a/compiler/src/dotty/tools/dotc/tastyreflect/MacroExpansion.scala +++ b/compiler/src/dotty/tools/dotc/tastyreflect/MacroExpansion.scala @@ -14,5 +14,5 @@ object MacroExpansion { def context(inlinedFrom: tpd.Tree)(implicit ctx: Context): Context = ctx.fresh.setProperty(MacroExpansionPosition, SourcePosition(inlinedFrom.source, inlinedFrom.span)).withSource(inlinedFrom.source) - } + diff --git a/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionCompilerInterface.scala b/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionCompilerInterface.scala index 83c5c6f6cf11..f26f18f2ea0d 100644 --- a/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionCompilerInterface.scala +++ b/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionCompilerInterface.scala @@ -173,10 +173,9 @@ class ReflectionCompilerInterface(val rootContext: core.Contexts.Context) extend def PackageDef_owner(self: PackageDef) given Context: PackageDef = packageDefFromSym(self.symbol.owner) - def PackageDef_members(self: PackageDef) given Context: List[Statement] = { + def PackageDef_members(self: PackageDef) given Context: List[Statement] = if (self.symbol.is(core.Flags.JavaDefined)) Nil // FIXME should also support java packages else self.symbol.info.decls.iterator.map(definitionFromSym).toList - } def PackageDef_symbol(self: PackageDef) given Context: PackageDefSymbol = self.symbol @@ -481,7 +480,8 @@ class ReflectionCompilerInterface(val rootContext: core.Contexts.Context) extend val stats1 = normalizeInnerLoops(block.stats.init) val normalLoop = tpd.Block(block.stats.last :: Nil, block.expr) tpd.Block(stats1, normalLoop) - } else { + } + else { val stats1 = normalizeInnerLoops(block.stats) tpd.cpy.Block(block)(stats1, block.expr) } @@ -1030,12 +1030,11 @@ class ReflectionCompilerInterface(val rootContext: core.Contexts.Context) extend type WildcardPattern = tpd.Ident - def matchPattern_WildcardPattern(pattern: Pattern) given Context: Option[WildcardPattern] = { + def matchPattern_WildcardPattern(pattern: Pattern) given Context: Option[WildcardPattern] = pattern match { case pattern: tpd.Ident if tpd.isWildcardArg(pattern) => Some(pattern) case _ => None } - } def Pattern_WildcardPattern_module_apply(tpe: TypeOrBounds) given Context: WildcardPattern = untpd.Ident(nme.WILDCARD).withType(tpe) @@ -1069,8 +1068,8 @@ class ReflectionCompilerInterface(val rootContext: core.Contexts.Context) extend case _ => Some(x) } - def Type_apply(clazz: Class[_]) given (ctx: Context): Type = { - if (clazz.isPrimitive) { + def Type_apply(clazz: Class[_]) given (ctx: Context): Type = + if (clazz.isPrimitive) if (clazz == classOf[Boolean]) defn.BooleanType else if (clazz == classOf[Byte]) defn.ByteType else if (clazz == classOf[Char]) defn.CharType @@ -1080,17 +1079,16 @@ class ReflectionCompilerInterface(val rootContext: core.Contexts.Context) extend else if (clazz == classOf[Float]) defn.FloatType else if (clazz == classOf[Double]) defn.DoubleType else defn.UnitType - } else if (clazz.isArray) { + else if (clazz.isArray) defn.ArrayType.appliedTo(Type_apply(clazz.getComponentType)) - } else if (clazz.isMemberClass) { + else if (clazz.isMemberClass) { val name = clazz.getSimpleName.toTypeName val enclosing = Type_apply(clazz.getEnclosingClass) if (enclosing.member(name).exists) enclosing.select(name) - else { + else enclosing.classSymbol.companionModule.termRef.select(name) - } - } else ctx.getClassIfDefined(clazz.getCanonicalName).typeRef - } + } + else ctx.getClassIfDefined(clazz.getCanonicalName).typeRef def `Type_=:=`(self: Type)(that: Type) given Context: Boolean = self =:= that @@ -1503,10 +1501,9 @@ class ReflectionCompilerInterface(val rootContext: core.Contexts.Context) extend def Symbol_pos(self: Symbol) given Context: Position = self.sourcePos - def Symbol_localContext(self: Symbol) given (ctx: Context): Context = { + def Symbol_localContext(self: Symbol) given (ctx: Context): Context = if (self.exists) ctx.withOwner(self) else ctx - } def Symbol_comment(self: Symbol) given (ctx: Context): Option[Comment] = { import dotty.tools.dotc.core.Comments.CommentsContext @@ -1517,12 +1514,11 @@ class ReflectionCompilerInterface(val rootContext: core.Contexts.Context) extend } docCtx.docstring(self) } - def Symbol_annots(self: Symbol) given Context: List[Term] = { + def Symbol_annots(self: Symbol) given Context: List[Term] = self.annotations.flatMap { case _: core.Annotations.LazyBodyAnnotation => Nil case annot => annot.tree :: Nil } - } def Symbol_isDefinedInCurrentRun(self: Symbol) given Context: Boolean = self.topLevelClass.asClass.isDefinedInCurrentRun @@ -1564,41 +1560,36 @@ class ReflectionCompilerInterface(val rootContext: core.Contexts.Context) extend if (sym.exists && isField(sym)) Some(sym) else None } - def ClassDefSymbol_classMethod(self: Symbol)(name: String) given Context: List[DefDefSymbol] = { + def ClassDefSymbol_classMethod(self: Symbol)(name: String) given Context: List[DefDefSymbol] = self.typeRef.decls.iterator.collect { case sym if isMethod(sym) && sym.name.toString == name => sym.asTerm }.toList - } - def ClassDefSymbol_classMethods(self: Symbol) given Context: List[DefDefSymbol] = { + def ClassDefSymbol_classMethods(self: Symbol) given Context: List[DefDefSymbol] = self.typeRef.decls.iterator.collect { case sym if isMethod(sym) => sym.asTerm }.toList - } private def appliedTypeRef(sym: Symbol): Type = sym.typeRef.appliedTo(sym.typeParams.map(_.typeRef)) - def ClassDefSymbol_method(self: Symbol)(name: String) given Context: List[DefDefSymbol] = { + def ClassDefSymbol_method(self: Symbol)(name: String) given Context: List[DefDefSymbol] = appliedTypeRef(self).allMembers.iterator.map(_.symbol).collect { case sym if isMethod(sym) && sym.name.toString == name => sym.asTerm }.toList - } - def ClassDefSymbol_methods(self: Symbol) given Context: List[DefDefSymbol] = { + def ClassDefSymbol_methods(self: Symbol) given Context: List[DefDefSymbol] = appliedTypeRef(self).allMembers.iterator.map(_.symbol).collect { case sym if isMethod(sym) => sym.asTerm }.toList - } private def isMethod(sym: Symbol) given Context: Boolean = sym.isTerm && sym.is(Flags.Method) && !sym.isConstructor - def ClassDefSymbol_caseFields(self: Symbol) given Context: List[ValDefSymbol] = { + def ClassDefSymbol_caseFields(self: Symbol) given Context: List[ValDefSymbol] = if (!self.isClass) Nil else self.asClass.paramAccessors.collect { case sym if sym.is(Flags.CaseAccessor) => sym.asTerm } - } def ClassDefSymbol_companionClass(self: Symbol) given Context: Option[ClassDefSymbol] = { val sym = self.companionModule.companionClass @@ -1766,15 +1757,14 @@ class ReflectionCompilerInterface(val rootContext: core.Contexts.Context) extend def QuotedExpr_cast[U](self: scala.quoted.Expr[_]) given (tp: scala.quoted.Type[U], ctx: Context): scala.quoted.Expr[U] = { val tree = QuotedExpr_unseal(self) val expectedType = QuotedType_unseal(tp).tpe - if (tree.tpe <:< expectedType) { + if (tree.tpe <:< expectedType) self.asInstanceOf[scala.quoted.Expr[U]] - } else { + else throw new scala.tasty.reflect.ExprCastError( s"""Expr: ${tree.show} |did not conform to type: ${expectedType.show} |""".stripMargin ) - } } /** Convert `Type` to an `quoted.Type[_]` */ @@ -1942,5 +1932,5 @@ class ReflectionCompilerInterface(val rootContext: core.Contexts.Context) extend (fn given ctx.withSource(rootPosition.source)).withSpan(rootPosition.span) private def compilerId: Int = rootContext.outersIterator.toList.last.hashCode() - } + diff --git a/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionImpl.scala b/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionImpl.scala index 296974ffe27c..decd795b46a5 100644 --- a/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionImpl.scala +++ b/compiler/src/dotty/tools/dotc/tastyreflect/ReflectionImpl.scala @@ -20,5 +20,5 @@ object ReflectionImpl { else SyntaxHighlight.plain new refl.SourceCodePrinter(syntaxHighlight).showTree(reflTree) given reflCtx } - } + diff --git a/compiler/src/dotty/tools/dotc/tastyreflect/package.scala b/compiler/src/dotty/tools/dotc/tastyreflect/package.scala index 5fc55927136d..86d9a962c3df 100644 --- a/compiler/src/dotty/tools/dotc/tastyreflect/package.scala +++ b/compiler/src/dotty/tools/dotc/tastyreflect/package.scala @@ -18,5 +18,5 @@ package object tastyreflect { override def denot(implicit ctx: Context): SymDenotation = sym.denot } - } + diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala index 67e369a2b4a0..7cd2964b543e 100644 --- a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala +++ b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala @@ -22,8 +22,8 @@ class ArrayApply extends MiniPhase { override def phaseName: String = "arrayApply" - override def transformApply(tree: tpd.Apply)(implicit ctx: Context): tpd.Tree = { - if (tree.symbol.name == nme.apply && tree.symbol.owner == defn.ArrayModule.moduleClass) { // Is `Array.apply` + override def transformApply(tree: tpd.Apply)(implicit ctx: Context): tpd.Tree = + if (tree.symbol.name == nme.apply && tree.symbol.owner == defn.ArrayModule.moduleClass) // Is `Array.apply` tree.args match { case StripAscription(Apply(wrapRefArrayMeth, (seqLit: tpd.JavaSeqLiteral) :: Nil)) :: ct :: Nil if defn.WrapArrayMethods().contains(wrapRefArrayMeth.symbol) && elideClassTag(ct) => @@ -37,8 +37,7 @@ class ArrayApply extends MiniPhase { tree } - } else tree - } + else tree /** Only optimize when classtag if it is one of * - `ClassTag.apply(classOf[XYZ])` diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala index 67a8f66d7326..301817366a22 100644 --- a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala @@ -31,7 +31,8 @@ class ArrayConstructors extends MiniPhase { if (tree.fun.symbol eq defn.ArrayConstructor) { val TypeApply(tycon, targ :: Nil) = tree.fun expand(targ.tpe, tree.args) - } else if ((tree.fun.symbol.maybeOwner eq defn.ArrayModule.moduleClass) && (tree.fun.symbol.name eq nme.ofDim) && !tree.tpe.isInstanceOf[MethodicType]) { + } + else if ((tree.fun.symbol.maybeOwner eq defn.ArrayModule.moduleClass) && (tree.fun.symbol.name eq nme.ofDim) && !tree.tpe.isInstanceOf[MethodicType]) { val Apply(Apply(TypeApply(_, List(tp)), _), _) = tree val cs = tp.tpe.widen.classSymbol tree.fun match { @@ -43,7 +44,8 @@ class ArrayConstructors extends MiniPhase { Block(t.qualifier :: Nil, expand(targ.head.tpe, dims)) case _ => tree } + } - } else tree + else tree } } diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala index 9b950869f7d5..d0f36a185150 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala @@ -59,18 +59,20 @@ class CheckReentrant extends MiniPhase { if (!seen.contains(cls) && !isIgnored(cls)) { seen += cls scanning(cls) { - for (sym <- cls.classInfo.decls) - if (sym.isTerm && !sym.isSetter && !isIgnored(sym)) + for (sym <- cls.classInfo.decls) { + if (sym.isTerm && !sym.isSetter && !isIgnored(sym)) { if (sym.is(Mutable)) { ctx.error( i"""possible data race involving globally reachable ${sym.showLocated}: ${sym.info} | use -Ylog:checkReentrant+ to find out more about why the variable is reachable.""") shared += sym - } else if (!sym.is(Method) || sym.isOneOf(Accessor | ParamAccessor)) { + } + else if (!sym.is(Method) || sym.isOneOf(Accessor | ParamAccessor)) scanning(sym) { sym.info.widenExpr.classSymbols.foreach(addVars) } - } + } + } for (parent <- cls.classInfo.classParents) addVars(parent.classSymbol.asClass) } diff --git a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala index bd109c251b52..00cf1638f602 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala @@ -30,52 +30,48 @@ class CheckStatic extends MiniPhase { override def transformTemplate(tree: tpd.Template)(implicit ctx: Context): tpd.Tree = { val defns = tree.body.collect{case t: ValOrDefDef => t} var hadNonStaticField = false - for(defn <- defns) { + for (defn <- defns) if (defn.symbol.hasAnnotation(ctx.definitions.ScalaStaticAnnot)) { - if(!ctx.owner.is(Module)) { + if (!ctx.owner.is(Module)) ctx.error(StaticFieldsOnlyAllowedInObjects(defn.symbol), defn.sourcePos) - } - if (defn.isInstanceOf[ValDef] && hadNonStaticField) { + if (defn.isInstanceOf[ValDef] && hadNonStaticField) ctx.error(StaticFieldsShouldPrecedeNonStatic(defn.symbol, defns), defn.sourcePos) - } val companion = ctx.owner.companionClass def clashes = companion.asClass.membersNamed(defn.name) - if (!companion.exists) { + if (!companion.exists) ctx.error(MissingCompanionForStatic(defn.symbol), defn.sourcePos) - } else if (clashes.exists) { + else if (clashes.exists) ctx.error(MemberWithSameNameAsStatic(), defn.sourcePos) - } else if (defn.symbol.is(Flags.Mutable) && companion.is(Flags.Trait)) { + else if (defn.symbol.is(Flags.Mutable) && companion.is(Flags.Trait)) ctx.error(TraitCompanionWithMutableStatic(), defn.sourcePos) - } else if (defn.symbol.is(Flags.Lazy)) { + else if (defn.symbol.is(Flags.Lazy)) ctx.error(LazyStaticField(), defn.sourcePos) - } else if (defn.symbol.allOverriddenSymbols.nonEmpty) { + else if (defn.symbol.allOverriddenSymbols.nonEmpty) ctx.error(StaticOverridingNonStaticMembers(), defn.sourcePos) - } - } else hadNonStaticField = hadNonStaticField || defn.isInstanceOf[ValDef] + } + else hadNonStaticField = hadNonStaticField || defn.isInstanceOf[ValDef] - } tree } - override def transformSelect(tree: tpd.Select)(implicit ctx: Context): tpd.Tree = { + override def transformSelect(tree: tpd.Select)(implicit ctx: Context): tpd.Tree = if (tree.symbol.hasAnnotation(defn.ScalaStaticAnnot)) { val symbolWhitelist = tree.symbol.ownersIterator.flatMap(x => if (x.is(Flags.Module)) List(x, x.companionModule) else List(x)).toSet - def isSafeQual(t: Tree): Boolean = { // follow the desugared paths created by typer + def isSafeQual(t: Tree): Boolean = // follow the desugared paths created by typer t match { case t: This => true case t: Select => isSafeQual(t.qualifier) && symbolWhitelist.contains(t.symbol) case t: Ident => symbolWhitelist.contains(t.symbol) case t: Block => t.stats.forall(tpd.isPureExpr) && isSafeQual(t.expr) } - } if (isSafeQual(tree.qualifier)) ref(tree.symbol) else tree - } else tree - } + } + else tree } object CheckStatic { diff --git a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala index 6679074bf0e3..eccd1405840d 100644 --- a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala +++ b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala @@ -29,11 +29,10 @@ class CollectEntryPoints extends MiniPhase { override def phaseName: String = "collectEntryPoints" override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context): tpd.Tree = { - if (tree.symbol.owner.isClass && isJavaEntryPoint(tree.symbol)) { + if (tree.symbol.owner.isClass && isJavaEntryPoint(tree.symbol)) // collecting symbols for entry points here (as opposed to GenBCode where they are used) // has the advantage of saving an additional pass over all ClassDefs. entryPoints += tree.symbol - } tree } @@ -49,9 +48,8 @@ class CollectEntryPoints extends MiniPhase { ) false } - def failNoForwarder(msg: String) = { + def failNoForwarder(msg: String) = fail(s"$msg, which means no static forwarder can be generated.\n") - } val possibles = if (sym.flags is Flags.Module) (sym.info nonPrivateMember nme.main).alternatives else Nil val hasApproximate = possibles exists { m => @@ -90,17 +88,13 @@ class CollectEntryPoints extends MiniPhase { } } - // At this point it's a module with a main-looking method, so either succeed or warn that it isn't. - hasApproximate && precise(ctx.withPhase(ctx.erasurePhase)) + // At this point it's a module with a main-looking method, so either succeed or warn that it isn't. + hasApproximate && precise(ctx.withPhase(ctx.erasurePhase)) // Before erasure so we can identify generic mains. - - -} - + } } class SymbolOrdering(implicit ctx: Context) extends Ordering[Symbol] { - override def compare(x: Symbol, y: Symbol): Int = { + override def compare(x: Symbol, y: Symbol): Int = x.fullName.toString.compareTo(y.fullName.toString) - } } diff --git a/compiler/src/dotty/tools/dotc/transform/CookComments.scala b/compiler/src/dotty/tools/dotc/transform/CookComments.scala index a78f19d878e6..0c493a519629 100644 --- a/compiler/src/dotty/tools/dotc/transform/CookComments.scala +++ b/compiler/src/dotty/tools/dotc/transform/CookComments.scala @@ -22,7 +22,7 @@ class CookComments extends MegaPhase.MiniPhase { } tree - } - } + + diff --git a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala index b7ac1a688be4..abb4611479c5 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala @@ -22,8 +22,8 @@ class ElimStaticThis extends MiniPhase { } else tree - override def transformIdent(tree: tpd.Ident)(implicit ctx: Context): tpd.Tree = { - if (ctx.owner.enclosingMethod.is(JavaStatic)) { + override def transformIdent(tree: tpd.Ident)(implicit ctx: Context): tpd.Tree = + if (ctx.owner.enclosingMethod.is(JavaStatic)) tree.tpe match { case TermRef(thiz: ThisType, _) if thiz.cls.is(ModuleClass, JavaDefined) => ref(thiz.cls.sourceModule).select(tree.symbol) @@ -32,7 +32,5 @@ class ElimStaticThis extends MiniPhase { tree case _ => tree } - } else tree - } } diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 77d2eb7199a3..4297c5c04bce 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -54,8 +54,9 @@ class Erasure extends Phase with DenotTransformer { // After erasure, all former Any members are now Object members val ClassInfo(pre, _, ps, decls, selfInfo) = ref.info val extendedScope = decls.cloneScope - for (decl <- defn.AnyClass.classInfo.decls) + for (decl <- defn.AnyClass.classInfo.decls) { if (!decl.isConstructor) extendedScope.enter(decl) + } ref.copySymDenotation( info = transformInfo(ref.symbol, ClassInfo(pre, defn.ObjectClass, ps, extendedScope, selfInfo)) @@ -66,7 +67,7 @@ class Erasure extends Phase with DenotTransformer { val newSymbol = if ((oldSymbol.owner eq defn.AnyClass) && oldSymbol.isConstructor) defn.ObjectClass.primaryConstructor - else oldSymbol + else oldSymbol val oldOwner = ref.owner val newOwner = if (oldOwner eq defn.AnyClass) defn.ObjectClass else oldOwner val oldName = ref.name @@ -229,7 +230,8 @@ object Erasure { unboxedNull, unboxedTree(t)) } - } else unboxedTree(tree) + } + else unboxedTree(tree) cast(tree1, pt) case _ => @@ -401,9 +403,8 @@ object Erasure { else super.typedLiteral(tree) - override def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context): Tree = { + override def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context): Tree = checkValue(checkNotErased(super.typedIdent(tree, pt)), pt) - } /** Type check select nodes, applying the following rewritings exhaustively * on selections `e.m`, where `OT` is the type of the owner of `m` and `ET` @@ -443,7 +444,8 @@ object Erasure { if (defn.specialErasure.contains(owner)) { assert(sym.isConstructor, s"${sym.showLocated}") defn.specialErasure(owner) - } else if (defn.isSyntheticFunctionClass(owner)) + } + else if (defn.isSyntheticFunctionClass(owner)) defn.erasedFunctionClass(owner) else owner @@ -584,13 +586,11 @@ object Erasure { override def typedTry(tree: untpd.Try, pt: Type)(implicit ctx: Context): Try = super.typedTry(tree, adaptProto(tree, pt)) - private def adaptProto(tree: untpd.Tree, pt: Type)(implicit ctx: Context) = { - if (pt.isValueType) pt else { + private def adaptProto(tree: untpd.Tree, pt: Type)(implicit ctx: Context) = + if (pt.isValueType) pt else if (tree.typeOpt.derivesFrom(ctx.definitions.UnitClass)) tree.typeOpt else valueErasure(tree.typeOpt) - } - } override def typedInlined(tree: untpd.Inlined, pt: Type)(implicit ctx: Context): Tree = super.typedInlined(tree, pt) match { @@ -713,10 +713,10 @@ object Erasure { if (paramAdaptationNeeded || resultAdaptationNeeded) { val bridgeType = - if (paramAdaptationNeeded) { + if (paramAdaptationNeeded) if (resultAdaptationNeeded) sam else implType.derivedLambdaType(paramInfos = samParamTypes) - } else implType.derivedLambdaType(resType = samResultType) + else implType.derivedLambdaType(resType = samResultType) val bridge = ctx.newSymbol(ctx.owner, AdaptedClosureName(meth.symbol.name.asTermName), Flags.Synthetic | Flags.Method, bridgeType) val bridgeCtx = ctx.withOwner(bridge) Closure(bridge, bridgeParamss => { @@ -726,8 +726,10 @@ object Erasure { val rhs = Apply(meth, bridgeParams.lazyZip(implParamTypes).map(adapt(_, _))) adapt(rhs, bridgeType.resultType) }, targetType = implClosure.tpt.tpe) - } else implClosure - } else implClosure + } + else implClosure + } + else implClosure case _ => implClosure } diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index 90f04e016ee9..51a7e089e2bb 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -176,5 +176,5 @@ class ExpandSAMs extends MiniPhase { case tpe => tpe } - } + diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index 1b57ccec8d50..d9b275a3c914 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -355,12 +355,13 @@ object ExplicitOuter { val mt @ MethodTpe(pnames, ptypes, restpe) = tp mt.derivedLambdaType( nme.OUTER :: pnames, outerClass(cls).typeRef :: ptypes, restpe) - } else tp + } + else tp /** If function in an apply node is a constructor that needs to be passed an * outer argument, the singleton list with the argument, otherwise Nil. */ - def args(fun: Tree): List[Tree] = { + def args(fun: Tree): List[Tree] = if (fun.symbol.isConstructor) { val cls = fun.symbol.owner.asClass def outerArg(receiver: Tree): Tree = receiver match { @@ -376,8 +377,8 @@ object ExplicitOuter { case Select(receiver, _) => outerArg(receiver).withSpan(fun.span) :: Nil } else Nil - } else Nil - } + } + else Nil /** A path of outer accessors starting from node `start`. `start` defaults to the * context owner's this node. There are two alternative conditions that determine @@ -405,7 +406,8 @@ object ExplicitOuter { } ctx.log(i"computing outerpath to $toCls from ${ctx.outersIterator.map(_.owner).toList}") loop(start, count) - } catch { + } + catch { case ex: ClassCastException => throw new ClassCastException(i"no path exists from ${ctx.owner.enclosingClass} to $toCls") } diff --git a/compiler/src/dotty/tools/dotc/transform/Flatten.scala b/compiler/src/dotty/tools/dotc/transform/Flatten.scala index 941e7ca146f0..f2e86c30eff5 100644 --- a/compiler/src/dotty/tools/dotc/transform/Flatten.scala +++ b/compiler/src/dotty/tools/dotc/transform/Flatten.scala @@ -28,14 +28,12 @@ class Flatten extends MiniPhase with SymTransformer { override def initContext(ctx: FreshContext): Unit = LiftedDefs = ctx.addLocation[mutable.ListBuffer[Tree]](null) - def transformSym(ref: SymDenotation)(implicit ctx: Context): SymDenotation = { - if (ref.isClass && !ref.is(Package) && !ref.owner.is(Package)) { + def transformSym(ref: SymDenotation)(implicit ctx: Context): SymDenotation = + if (ref.isClass && !ref.is(Package) && !ref.owner.is(Package)) ref.copySymDenotation( name = ref.flatName, owner = ref.enclosingPackageClass) - } else ref - } override def prepareForPackageDef(tree: PackageDef)(implicit ctx: Context): FreshContext = ctx.fresh.updateStore(LiftedDefs, new mutable.ListBuffer[Tree]) diff --git a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala index 993d12a0f974..018ebdc196c6 100644 --- a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala +++ b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala @@ -29,7 +29,7 @@ class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer { override def transformTemplate(impl: Template)(implicit ctx: Context): Template = { - def forwarderRhs(receiver: Tree, xsTree: Tree): Tree = { + def forwarderRhs(receiver: Tree, xsTree: Tree): Tree = { val argsApply = ref(xsTree.symbol).select(nme.apply) var idx = -1 val argss = receiver.tpe.widenDealias.paramInfoss.map(_.map { param => @@ -45,7 +45,8 @@ class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer { if tree.symbol.is(Method) && tree.symbol.name == nme.apply && tree.symbol.signature.paramsSig.size > MaxImplementedFunctionArity && tree.symbol.allOverriddenSymbols.exists(sym => defn.isXXLFunctionClass(sym.owner)) - } yield { + } + yield { val xsType = defn.ArrayType.appliedTo(List(defn.ObjectType)) val methType = MethodType(List(nme.args))(_ => List(xsType), _ => defn.ObjectType) val meth = ctx.newSymbol(tree.symbol.owner, nme.apply, Synthetic | Method, methType) @@ -54,5 +55,5 @@ class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer { cpy.Template(impl)(body = forwarders ::: impl.body) } - } + diff --git a/compiler/src/dotty/tools/dotc/transform/FunctionalInterfaces.scala b/compiler/src/dotty/tools/dotc/transform/FunctionalInterfaces.scala index f8d530a8a904..617fdb20ecfb 100644 --- a/compiler/src/dotty/tools/dotc/transform/FunctionalInterfaces.scala +++ b/compiler/src/dotty/tools/dotc/transform/FunctionalInterfaces.scala @@ -42,6 +42,7 @@ class FunctionalInterfaces extends MiniPhase { val interface = ctx.withPhase(ctx.typerPhase).requiredClass(functionPackage ++ interfaceName) val tpt = tpd.TypeTree(interface.asType.appliedRef) tpd.Closure(tree.env, tree.meth, tpt) - } else tree + } + else tree } } diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index c030a9ff6e11..1b15b4adfe97 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -29,11 +29,10 @@ object GenericSignatures { * @param info The type of the symbol * @return The signature if it could be generated, `None` otherwise. */ - def javaSig(sym0: Symbol, info: Type)(implicit ctx: Context): Option[String] = { + def javaSig(sym0: Symbol, info: Type)(implicit ctx: Context): Option[String] = // Avoid generating a signature for local symbols. if (sym0.isLocal) None else javaSig0(sym0, info)(ctx.withPhase(ctx.erasurePhase)) - } @noinline private final def javaSig0(sym0: Symbol, info: Type)(implicit ctx: Context): Option[String] = { @@ -80,12 +79,13 @@ object GenericSignatures { boundsSig(hiBounds(param.paramInfo.bounds)) } - def polyParamSig(tparams: List[LambdaParam]): Unit = + def polyParamSig(tparams: List[LambdaParam]): Unit = { if (tparams.nonEmpty) { builder.append('<') tparams.foreach(paramSig) builder.append('>') } + } def typeParamSig(name: Name): Unit = { builder.append(ClassfileConstants.TVAR_TAG) @@ -96,11 +96,10 @@ object GenericSignatures { def methodResultSig(restpe: Type): Unit = { val finalType = restpe.finalResultType val sym = finalType.typeSymbol - if (sym == defn.UnitClass || sym == defn.BoxedUnitModule || sym0.isConstructor) { + if (sym == defn.UnitClass || sym == defn.BoxedUnitModule || sym0.isConstructor) builder.append(ClassfileConstants.VOID_TAG) - } else { + else jsig(finalType) - } } // This will reject any name that has characters that cannot appear in @@ -108,11 +107,10 @@ object GenericSignatures { // dont need to generate signatures for them. def sanitizeName(name: Name): String = { val nameString = name.mangledString - if (nameString.forall(c => c == '.' || Character.isJavaIdentifierPart(c))) { + if (nameString.forall(c => c == '.' || Character.isJavaIdentifierPart(c))) nameString - } else { + else throw new UnknownSig - } } // Anything which could conceivably be a module (i.e. isn't known to be @@ -165,9 +163,12 @@ object GenericSignatures { // TODO revisit this. Does it align with javac for code that can be expressed in both languages? val delimiter = if (builder.charAt(builder.length() - 1) == '>') '.' else '$' builder.append(delimiter).append(sanitizeName(sym.name.asSimpleName)) - } else fullNameInSig(sym) - } else fullNameInSig(sym) - } else fullNameInSig(sym) + } + else fullNameInSig(sym) + } + else fullNameInSig(sym) + } + else fullNameInSig(sym) if (args.nonEmpty) { builder.append('<') @@ -188,13 +189,12 @@ object GenericSignatures { case RefOrAppliedType(sym, pre, args) => // If args isEmpty, Array is being used as a type constructor - if (sym == defn.ArrayClass && args.nonEmpty) { + if (sym == defn.ArrayClass && args.nonEmpty) if (unboundedGenericArrayLevel(tp) == 1) jsig(defn.ObjectType) else { builder.append(ClassfileConstants.ARRAY_TAG) args.foreach(jsig(_)) } - } else if (sym == defn.PairClass && tp.tupleArity > Definitions.MaxTupleArity) jsig(defn.TupleXXLClass.typeRef) else if (isTypeParameterInSig(sym, sym0)) { @@ -209,11 +209,10 @@ object GenericSignatures { jsig(defn.RuntimeNothingModuleRef) else if (sym == defn.NullClass) jsig(defn.RuntimeNullModuleRef) - else if (sym.isPrimitiveValueClass) { + else if (sym.isPrimitiveValueClass) if (!primitiveOK) jsig(defn.ObjectType) else if (sym == defn.UnitClass) jsig(defn.BoxedUnitClass.typeRef) else builder.append(defn.typeTag(sym.info)) - } else if (ValueClasses.isDerivedValueClass(sym)) { val unboxed = ValueClasses.valueClassUnbox(sym.asClass).info.finalResultType val unboxedSeen = tp.memberInfo(ValueClasses.valueClassUnbox(sym.asClass)).finalResultType @@ -271,7 +270,7 @@ object GenericSignatures { jsig(intersectionDominator(tp1 :: tp2 :: Nil), primitiveOK = primitiveOK) case ci: ClassInfo => - def polyParamSig(tparams: List[TypeParamInfo]): Unit = + def polyParamSig(tparams: List[TypeParamInfo]): Unit = { if (tparams.nonEmpty) { builder.append('<') tparams.foreach { tp => @@ -280,6 +279,7 @@ object GenericSignatures { } builder.append('>') } + } val tParams = tp.typeParams if (toplevel) polyParamSig(tParams) superSig(ci.typeSymbol, ci.parents) @@ -297,7 +297,7 @@ object GenericSignatures { } } val throwsArgs = sym0.annotations flatMap ThrownException.unapply - if (needsJavaSig(info, throwsArgs)) { + if (needsJavaSig(info, throwsArgs)) try { jsig(info, toplevel = true) throwsArgs.foreach { t => @@ -307,7 +307,6 @@ object GenericSignatures { Some(builder.toString) } catch { case _: UnknownSig => None } - } else None } @@ -325,14 +324,14 @@ object GenericSignatures { * not Object, the dominator is Tc. <--- @PP: "which is not Object" not in spec. * - Otherwise, the dominator is the first element of the span. */ - private def intersectionDominator(parents: List[Type])(implicit ctx: Context): Type = { + private def intersectionDominator(parents: List[Type])(implicit ctx: Context): Type = if (parents.isEmpty) defn.ObjectType else { val psyms = parents map (_.typeSymbol) - if (psyms contains defn.ArrayClass) { + if (psyms contains defn.ArrayClass) // treat arrays specially defn.ArrayType.appliedTo(intersectionDominator(parents.filter(_.typeSymbol == defn.ArrayClass).map(t => t.argInfos.head))) - } else { + else { // implement new spec for erasure of refined types. def isUnshadowed(psym: Symbol) = !(psyms exists (qsym => (psym ne qsym) && (qsym isSubClass psym))) @@ -344,7 +343,6 @@ object GenericSignatures { (if (cs.hasNext) cs else parents.iterator.filter(p => isUnshadowed(p.classSymbol))).next() } } - } /* Drop redundant types (ones which are implemented by some other parent) from the immediate parents. * This is important on Android because there is otherwise an interface explosion. @@ -392,13 +390,12 @@ object GenericSignatures { // * higher-order type parameters // * type parameters appearing in method parameters // * type members not visible in an enclosing template - private def isTypeParameterInSig(sym: Symbol, initialSymbol: Symbol)(implicit ctx: Context) = { + private def isTypeParameterInSig(sym: Symbol, initialSymbol: Symbol)(implicit ctx: Context) = !sym.maybeOwner.isTypeParam && sym.isTypeParam && ( sym.isContainedIn(initialSymbol.topLevelClass) || (initialSymbol.is(Method) && initialSymbol.typeParams.contains(sym)) ) - } /** Extracts the type of the thrown exception from an AnnotationInfo. * @@ -406,14 +403,13 @@ object GenericSignatures { * as well as “new-style” `@throws[Exception]("cause")` annotations. */ private object ThrownException { - def unapply(ann: Annotation)(implicit ctx: Context): Option[Type] = { + def unapply(ann: Annotation)(implicit ctx: Context): Option[Type] = ann.tree match { case Apply(TypeApply(fun, List(tpe)), _) if tpe.isType && fun.symbol.owner == defn.ThrowsAnnot && fun.symbol.isConstructor => Some(tpe.typeOpt) case _ => None } - } } // @M #2585 when generating a java generic signature that includes @@ -474,9 +470,9 @@ object GenericSignatures { case _ => None } - } + private object RefOrAppliedType { def unapply(tp: Type)(implicit ctx: Context): Option[(Symbol, Type, List[Type])] = tp match { case TypeParamRef(_, _) => @@ -500,7 +496,7 @@ object GenericSignatures { private class NeedsSigCollector(implicit ctx: Context) extends TypeAccumulator[Boolean] { override def apply(x: Boolean, tp: Type): Boolean = - if (!x) { + if (!x) tp match { case RefinedType(parent, refinedName, refinedInfo) => val sym = parent.typeSymbol @@ -522,6 +518,6 @@ object GenericSignatures { case _ => foldOver(x, tp) } - } else x + else x } } diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala index fee742c27af6..4fc444982858 100644 --- a/compiler/src/dotty/tools/dotc/transform/Getters.scala +++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala @@ -73,10 +73,9 @@ class Getters extends MiniPhase with SymTransformer { else d // Drop the Local flag from all private[this] and protected[this] members. - if (d1.is(Local)) { + if (d1.is(Local)) if (d1 ne d) d1.resetFlag(Local) else d1 = d1.copySymDenotation(initFlags = d1.flags &~ Local) - } d1 } private val NoGetterNeededFlags = Method | Param | JavaDefined | JavaStatic diff --git a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala index 2cafd96a3c55..5b3ba45e1850 100644 --- a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala @@ -35,7 +35,7 @@ class InterceptedMethods extends MiniPhase { override def transformIdent(tree: tpd.Ident)(implicit ctx: Context): Tree = transformRefTree(tree) - private def transformRefTree(tree: RefTree)(implicit ctx: Context): Tree = { + private def transformRefTree(tree: RefTree)(implicit ctx: Context): Tree = if (tree.symbol.isTerm && (defn.Any_## eq tree.symbol)) { val qual = tree match { case id: Ident => tpd.desugarIdentPrefix(id) @@ -46,7 +46,6 @@ class InterceptedMethods extends MiniPhase { rewritten } else tree - } // TODO: add missing cases from scalac private def poundPoundValue(tree: Tree)(implicit ctx: Context) = { @@ -77,8 +76,8 @@ class InterceptedMethods extends MiniPhase { case TermRef(prefix: ThisType, _) => tpd.This(prefix.cls) } - } + val Any_!= = defn.Any_!= val rewritten: Tree = tree.fun.symbol match { case Any_!= => diff --git a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala index 1dbb0b51dd97..910086b23712 100644 --- a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala +++ b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala @@ -92,14 +92,15 @@ object LambdaLift { /** Set `liftedOwner(sym)` to `owner` if `owner` is more deeply nested * than the previous value of `liftedowner(sym)`. */ - def narrowLiftedOwner(sym: Symbol, owner: Symbol)(implicit ctx: Context): Unit = + def narrowLiftedOwner(sym: Symbol, owner: Symbol)(implicit ctx: Context): Unit = { if (sym.maybeOwner.isTerm && owner.isProperlyContainedIn(liftedOwner(sym)) && owner != sym) { ctx.log(i"narrow lifted $sym to $owner") changedLiftedOwner = true liftedOwner(sym) = owner - } + } + } /** Mark symbol `sym` as being free in `enclosure`, unless `sym` is defined * in `enclosure` or there is an intermediate class properly containing `enclosure` @@ -159,11 +160,12 @@ object LambdaLift { // Constructors and methods nested inside traits get the free variables // of the enclosing trait or class. // Conversely, local traits do not get free variables. - if (!enclosure.is(Trait)) + if (!enclosure.is(Trait)) { if (symSet(free, enclosure).add(sym)) { changedFreeVars = true ctx.log(i"$sym is free in $enclosure") } + } } if (intermediate.isRealClass) intermediate else if (enclosure.isRealClass) enclosure @@ -171,7 +173,8 @@ object LambdaLift { else if (enclosure.isClass) enclosure else NoSymbol } - } catch { + } + catch { case ex: NoPath => println(i"error lambda lifting ${ctx.compilationUnit}: $sym is not visible from $enclosure") throw ex @@ -204,13 +207,12 @@ object LambdaLift { if (sym is Method) markCalled(sym, enclosure) else if (sym.isTerm) markFree(sym, enclosure) } - def captureImplicitThis(x: Type): Unit = { + def captureImplicitThis(x: Type): Unit = x match { case tr@TermRef(x, _) if (!tr.termSymbol.isStatic) => captureImplicitThis(x) case x: ThisType if (!x.tref.typeSymbol.isStaticOwner) => narrowTo(x.tref.typeSymbol.asClass) case _ => } - } captureImplicitThis(tree.tpe) case tree: Select => if (sym.is(Method) && isLocal(sym)) markCalled(sym, enclosure) @@ -243,7 +245,8 @@ object LambdaLift { case _ => } traverseChildren(tree) - } catch { //debug + } + catch { //debug case ex: Exception => println(i"$ex while traversing $tree") throw ex @@ -259,9 +262,11 @@ object LambdaLift { callee <- called(caller) fvs <- free get callee fv <- fvs - } markFree(fv, caller) + } + markFree(fv, caller) changedFreeVars - }) () + }) + () /** Compute final liftedOwner map by closing over caller dependencies */ private def computeLiftedOwners()(implicit ctx: Context): Unit = @@ -270,7 +275,8 @@ object LambdaLift { for { caller <- called.keys callee <- called(caller) - } { + } + { val normalizedCallee = callee.skipConstructor val calleeOwner = normalizedCallee.owner if (calleeOwner.isTerm) narrowLiftedOwner(caller, liftedOwner(normalizedCallee)) @@ -284,7 +290,8 @@ object LambdaLift { } } changedLiftedOwner - }) () + }) + () private def newName(sym: Symbol)(implicit ctx: Context): Name = if (sym.isAnonymousFunction && sym.owner.is(Method)) @@ -321,7 +328,7 @@ object LambdaLift { private def liftLocals()(implicit ctx: Context): Unit = { for ((local, lOwner) <- liftedOwner) { val (newOwner, maybeStatic) = - if (lOwner is Package) { + if (lOwner is Package) { val encClass = local.enclosingClass val topClass = local.topLevelClass val preferEncClass = @@ -338,7 +345,7 @@ object LambdaLift { else (lOwner, EmptyFlags) // Drop Module because class is no longer a singleton in the lifted context. var initFlags = local.flags &~ Module | Private | Lifted | maybeStatic - if (local is Method) { + if (local is Method) if (newOwner is Trait) // Drop Final when a method is lifted into a trait. // According to the JVM specification, a method declared inside interface cannot have the final flag. @@ -348,16 +355,16 @@ object LambdaLift { else // Add Final when a method is lifted into a class. initFlags = initFlags | Final - } local.copySymDenotation( owner = newOwner, name = newName(local), initFlags = initFlags, info = liftedInfo(local)).installAfter(thisPhase) } - for (local <- free.keys) + for (local <- free.keys) { if (!liftedOwner.contains(local)) local.copySymDenotation(info = liftedInfo(local)).installAfter(thisPhase) + } } // initialization diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala index 1dc889712ee9..2802bae99fb3 100644 --- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala @@ -61,9 +61,8 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { transformLazyVal(tree) - override def transformValDef(tree: ValDef)(implicit ctx: Context): Tree = { + override def transformValDef(tree: ValDef)(implicit ctx: Context): Tree = transformLazyVal(tree) - } def transformLazyVal(tree: ValOrDefDef)(implicit ctx: Context): Tree = { val sym = tree.symbol @@ -73,20 +72,18 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { tree else { val isField = sym.owner.isClass - if (isField) { + if (isField) if (sym.isAllOf(SyntheticModule)) transformSyntheticModule(tree) - else if (sym.isThreadUnsafe || ctx.settings.scalajs.value) { + else if (sym.isThreadUnsafe || ctx.settings.scalajs.value) if (sym.is(Module) && !ctx.settings.scalajs.value) { ctx.error(em"@threadUnsafe is only supported on lazy vals", sym.sourcePos) transformMemberDefThreadSafe(tree) } else transformMemberDefThreadUnsafe(tree) - } else transformMemberDefThreadSafe(tree) - } else transformLocalDef(tree) } } @@ -103,9 +100,9 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { data.defs.foreach(_.symbol.addAnnotation(Annotation(defn.ScalaStaticAnnot))) cpy.Template(template)(body = addInFront(data.defs, template.body)) } - } + private def addInFront(prefix: List[Tree], stats: List[Tree]) = stats match { case first :: rest if isSuperConstrCall(first) => first :: prefix ::: rest case _ => prefix ::: stats @@ -253,10 +250,9 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { ).enteredAfter(this) val containerTree = ValDef(containerSymbol, defaultValue(tpe)) - if (x.tpe.isNotNull && tpe <:< defn.ObjectType) { + if (x.tpe.isNotNull && tpe <:< defn.ObjectType) // can use 'null' value instead of flag Thicket(containerTree, mkDefThreadUnsafeNonNullable(x.symbol, containerSymbol, x.rhs)) - } else { val flagName = LazyBitMapName.fresh(x.name.asTermName) val flagSymbol = ctx.newSymbol(x.symbol.owner, flagName, containerFlags | Private, defn.BooleanType).enteredAfter(this) @@ -387,11 +383,11 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { ord = info.ord % flagsPerLong val id = info.ord / flagsPerLong val offsetById = offsetName(id) - if (ord != 0) { // there are unused bits in already existing flag + if (ord != 0) // there are unused bits in already existing flag offsetSymbol = claz.info.decl(offsetById) .suchThat(sym => sym.is(Synthetic) && sym.isTerm) .symbol.asTerm - } else { // need to create a new flag + else { // need to create a new flag offsetSymbol = ctx.newSymbol(claz, offsetById, Synthetic, defn.LongType).enteredAfter(this) offsetSymbol.addAnnotation(Annotation(defn.ScalaStaticAnnot)) val flagName = s"${StdNames.nme.BITMAP_PREFIX}$id".toTermName diff --git a/compiler/src/dotty/tools/dotc/transform/MixinOps.scala b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala index 06541170e6fa..62338d744b5c 100644 --- a/compiler/src/dotty/tools/dotc/transform/MixinOps.scala +++ b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala @@ -63,16 +63,15 @@ class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(implicit ctx: Cont isCurrent(meth) } - private def needsJUnit4Fix(meth: Symbol): Boolean = { + private def needsJUnit4Fix(meth: Symbol): Boolean = meth.annotations.nonEmpty && JUnit4Annotations.exists(annot => meth.hasAnnotation(annot)) && ctx.settings.mixinForwarderChoices.isAtLeastJunit - } final val PrivateOrAccessor: FlagSet = Private | Accessor final val PrivateOrAccessorOrDeferred: FlagSet = Private | Accessor | Deferred - def forwarderRhsFn(target: Symbol): List[Type] => List[List[Tree]] => Tree = - targs => vrefss => { + def forwarderRhsFn(target: Symbol): List[Type] => List[List[Tree]] => Tree = { + targs => vrefss => val tapp = superRef(target).appliedToTypes(targs) vrefss match { case Nil | List(Nil) => @@ -82,12 +81,11 @@ class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(implicit ctx: Cont case _ => tapp.appliedToArgss(vrefss) } - } + } - private def competingMethodsIterator(meth: Symbol): Iterator[Symbol] = { + private def competingMethodsIterator(meth: Symbol): Iterator[Symbol] = cls.baseClasses.iterator .filter(_ ne meth.owner) .map(base => meth.overriddenSymbol(base, cls)) .filter(_.exists) - } } diff --git a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala index e9e9cf6aeed4..8b978a895f5f 100644 --- a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala +++ b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala @@ -23,7 +23,7 @@ class MoveStatics extends MiniPhase with SymTransformer { import tpd._ override def phaseName: String = MoveStatics.name - def transformSym(sym: SymDenotation)(implicit ctx: Context): SymDenotation = { + def transformSym(sym: SymDenotation)(implicit ctx: Context): SymDenotation = if (sym.hasAnnotation(defn.ScalaStaticAnnot) && sym.owner.is(Flags.Module) && sym.owner.companionClass.exists && (sym.is(Flags.Method) || !(sym.is(Flags.Mutable) && sym.owner.companionClass.is(Flags.Trait)))) { sym.owner.asClass.delete(sym.symbol) @@ -31,9 +31,8 @@ class MoveStatics extends MiniPhase with SymTransformer { sym.copySymDenotation(owner = sym.owner.companionClass) } else sym - } - override def transformStats(trees: List[Tree])(implicit ctx: Context): List[Tree] = { + override def transformStats(trees: List[Tree])(implicit ctx: Context): List[Tree] = if (ctx.owner.is(Flags.Package)) { val (classes, others) = trees.partition(x => x.isInstanceOf[TypeDef] && x.symbol.isClass) val pairs = classes.groupBy(_.symbol.name.stripModuleClassSuffix).asInstanceOf[Map[Name, List[TypeDef]]] @@ -51,7 +50,8 @@ class MoveStatics extends MiniPhase with SymTransformer { val staticAssigns = staticFields.map(x => Assign(ref(x.symbol), x.rhs.changeOwner(x.symbol, staticCostructor))) tpd.DefDef(staticCostructor, Block(staticAssigns, tpd.unitLiteral)) :: newBody - } else newBody + } + else newBody val oldTemplate = orig.rhs.asInstanceOf[Template] cpy.TypeDef(orig)(rhs = cpy.Template(oldTemplate)(body = newBodyWithStaticConstr)) @@ -62,7 +62,7 @@ class MoveStatics extends MiniPhase with SymTransformer { if (!module.symbol.is(Flags.Module)) move(companion, module) else { val allMembers = - (if(companion != null) {companion.rhs.asInstanceOf[Template].body} else Nil) ++ + (if (companion != null) {companion.rhs.asInstanceOf[Template].body} else Nil) ++ module.rhs.asInstanceOf[Template].body val (newModuleBody, newCompanionBody) = allMembers.partition(x => {assert(x.symbol.exists); x.symbol.owner == module.symbol}) Trees.flatten(rebuild(companion, newCompanionBody) :: rebuild(module, newModuleBody) :: Nil) @@ -76,6 +76,6 @@ class MoveStatics extends MiniPhase with SymTransformer { else List(rebuild(classes.head, classes.head.rhs.asInstanceOf[Template].body)) else move(classes.head, classes.tail.head) Trees.flatten(newPairs.toList.flatten ++ others) - } else trees - } + } + else trees } diff --git a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala index 11a0bfbde1ea..59bec73819be 100644 --- a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala +++ b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala @@ -91,5 +91,6 @@ class NonLocalReturns extends MiniPhase { if (!ctx.scala2Mode) ctx.strictWarning("Non local returns are deprecated; use scala.util.control.NonLocalReturns instead", tree.sourcePos) nonLocalReturnThrow(tree.expr, tree.from.symbol).withSpan(tree.span) - } else tree + } + else tree } diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index 4fc4355752e3..b89fef2aad1d 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -102,7 +102,7 @@ object OverridingPairs { * overriding = curEntry.sym */ private def nextOverriding(): Unit = { - @tailrec def loop(): Unit = + @tailrec def loop(): Unit = { if (curEntry ne null) { overriding = curEntry.sym if (visited.contains(overriding)) { @@ -110,6 +110,7 @@ object OverridingPairs { loop() } } + } loop() nextEntry = curEntry } @@ -119,10 +120,10 @@ object OverridingPairs { * overriding = overriding member of the pair, provided hasNext is true * overridden = overridden member of the pair, provided hasNext is true */ - @tailrec final def next(): Unit = + @tailrec final def next(): Unit = { if (nextEntry ne null) { nextEntry = decls.lookupNextEntry(nextEntry) - if (nextEntry ne null) { + if (nextEntry ne null) try { overridden = nextEntry.sym if (overriding.owner != overridden.owner && matches(overriding, overridden)) { @@ -136,12 +137,13 @@ object OverridingPairs { // The root cause in this example is an illegal "override" of an inner trait ctx.error(ex, base.sourcePos) } - } else { + else { curEntry = curEntry.prev nextOverriding() } next() } + } nextOverriding() next() diff --git a/compiler/src/dotty/tools/dotc/transform/PCPCheckAndHeal.scala b/compiler/src/dotty/tools/dotc/transform/PCPCheckAndHeal.scala index 58a48632aa47..2c95b0ee72d1 100644 --- a/compiler/src/dotty/tools/dotc/transform/PCPCheckAndHeal.scala +++ b/compiler/src/dotty/tools/dotc/transform/PCPCheckAndHeal.scala @@ -33,7 +33,7 @@ import scala.annotation.constructorOnly class PCPCheckAndHeal(@constructorOnly ictx: Context) extends TreeMapWithStages(ictx) { import tpd._ - override def transform(tree: Tree)(implicit ctx: Context): Tree = { + override def transform(tree: Tree)(implicit ctx: Context): Tree = if (tree.source != ctx.source && tree.source.exists) transform(tree)(ctx.withSource(tree.source)) else tree match { @@ -44,7 +44,6 @@ class PCPCheckAndHeal(@constructorOnly ictx: Context) extends TreeMapWithStages( checkLevel(super.transform(tree)) case _ => checkLevel(super.transform(tree)) } - } /** Transform quoted trees while maintaining phase correctness */ override protected def transformQuotation(body: Tree, quote: Tree)(implicit ctx: Context): Tree = { @@ -87,14 +86,13 @@ class PCPCheckAndHeal(@constructorOnly ictx: Context) extends TreeMapWithStages( assert(checkSymLevel(tree.symbol, tree.tpe, tree.sourcePos).isEmpty) tree case Ident(name) => - if (name == nme.WILDCARD) { + if (name == nme.WILDCARD) untpd.Ident(name).withType(checkType(tree.sourcePos).apply(tree.tpe)).withSpan(tree.span) - } else { + else checkSymLevel(tree.symbol, tree.tpe, tree.sourcePos) match { case Some(tpRef) => tpRef case _ => tree } - } case _: TypeTree | _: AppliedTypeTree | _: Apply | _: TypeApply | _: UnApply | Select(_, OuterSelectName(_, _)) => tree.withType(checkTp(tree.tpe)) case _: ValOrDefDef | _: Bind => @@ -181,13 +179,14 @@ class PCPCheckAndHeal(@constructorOnly ictx: Context) extends TreeMapWithStages( * @return Some(msg) if unsuccessful where `msg` is a potentially empty error message * to be added to the "inconsistent phase" message. */ - protected def tryHeal(sym: Symbol, tp: Type, pos: SourcePosition)(implicit ctx: Context): Option[Tree] = { + protected def tryHeal(sym: Symbol, tp: Type, pos: SourcePosition)(implicit ctx: Context): Option[Tree] = tp match { case tp: TypeRef => if (level == -1) { assert(ctx.inInlineMethod) None - } else { + } + else { val reqType = defn.QuotedTypeClass.typeRef.appliedTo(tp) val tag = ctx.typer.inferImplicitArg(reqType, pos.span) tag.tpe match { @@ -209,7 +208,6 @@ class PCPCheckAndHeal(@constructorOnly ictx: Context) extends TreeMapWithStages( case _ => levelError(sym, tp, pos, "") } - } private def levelError(sym: Symbol, tp: Type, pos: SourcePosition, errMsg: String) given Context = { def symStr = @@ -222,5 +220,5 @@ class PCPCheckAndHeal(@constructorOnly ictx: Context) extends TreeMapWithStages( | - but the access is at level $level.$errMsg""", pos) None } - } + diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index 4d11ca143a7b..589a9c523120 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -315,7 +315,7 @@ object PatternMatcher { matchArgsPlan(caseAccessors.map(ref(scrutinee).select(_)), args, onSuccess) else if (unapp.tpe.widenSingleton.isRef(defn.BooleanClass)) TestPlan(GuardTest, unapp, unapp.span, onSuccess) - else { + else letAbstract(unapp) { unappResult => val isUnapplySeq = unapp.symbol.name == nme.unapplySeq if (isProductMatch(unapp.tpe.widen, args.length) && !isUnapplySeq) { @@ -348,7 +348,6 @@ object PatternMatcher { TestPlan(NonEmptyTest, unappResult, unapp.span, argsPlan) } } - } } // begin patternPlan @@ -365,11 +364,11 @@ object PatternMatcher { patternPlan(casted, pat, onSuccess) }) case UnApply(extractor, implicits, args) => - val unappPlan = if (defn.isBottomType(scrutinee.info)) { + val unappPlan = if (defn.isBottomType(scrutinee.info)) // Generate a throwaway but type-correct plan. // This plan will never execute because it'll be guarded by a `NonNullTest`. ResultPlan(tpd.Throw(tpd.nullLiteral)) - } else { + else { val mt @ MethodType(_) = extractor.tpe.widen var unapp = extractor.appliedTo(ref(scrutinee).ensureConforms(mt.paramInfos.head)) if (implicits.nonEmpty) unapp = unapp.appliedToArgs(implicits) @@ -412,14 +411,13 @@ object PatternMatcher { patternPlan(scrutinee, cdef.pat, onSuccess) } - private def matchPlan(tree: Match): Plan = { + private def matchPlan(tree: Match): Plan = letAbstract(tree.selector) { scrutinee => val matchError: Plan = ResultPlan(Throw(New(defn.MatchErrorClass.typeRef, ref(scrutinee) :: Nil))) tree.cases.foldRight(matchError) { (cdef, next) => SeqPlan(caseDefPlan(scrutinee, cdef), next) } } - } // ----- Optimizing plans --------------- @@ -639,20 +637,18 @@ object PatternMatcher { super.transform(tree) } } - override def apply(plan: LetPlan): Plan = { + override def apply(plan: LetPlan): Plan = if (toDrop(plan.sym)) apply(plan.body) else { initializer(plan.sym) = apply(initializer(plan.sym)) plan.body = apply(plan.body) plan } - } override def apply(plan: SeqPlan): Plan = { val newHead = apply(plan.head) - if (!canFallThrough(newHead)) { + if (!canFallThrough(newHead)) // If the head cannot fall through, the tail is dead code newHead - } else { plan.head = newHead plan.tail = apply(plan.tail) @@ -696,7 +692,7 @@ object PatternMatcher { val expectedTp = tpt.tpe // An outer test is needed in a situation like `case x: y.Inner => ...` - def outerTestNeeded: Boolean = { + def outerTestNeeded: Boolean = // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest` // generates an outer test based on `patType.prefix` with automatically dealises. expectedTp.dealias match { @@ -706,7 +702,6 @@ object PatternMatcher { case _ => false } - } def outerTest: Tree = thisPhase.transformFollowingDeep { val expectedOuter = singleton(expectedTp.normalizedPrefix) @@ -760,7 +755,7 @@ object PatternMatcher { // An extractor to recover the shape of plans that can become alternatives object AlternativesPlan { - def unapply(plan: LabeledPlan): Option[(List[Tree], Plan)] = { + def unapply(plan: LabeledPlan): Option[(List[Tree], Plan)] = plan.expr match { case SeqPlan(LabeledPlan(innerLabel, innerPlan), ons) if !canFallThrough(ons) => val outerLabel = plan.sym @@ -783,7 +778,6 @@ object PatternMatcher { case _ => None } - } } def recur(plan: Plan): List[(List[Tree], Plan)] = plan match { @@ -800,7 +794,7 @@ object PatternMatcher { else Nil } - private def hasEnoughSwitchCases(cases: List[(List[Tree], Plan)], required: Int): Boolean = { + private def hasEnoughSwitchCases(cases: List[(List[Tree], Plan)], required: Int): Boolean = // 1 because of the default case required <= 1 || { cases match { @@ -808,7 +802,6 @@ object PatternMatcher { case _ => false } } - } /** Emit cases of a switch */ private def emitSwitchCases(cases: List[(List[Tree], Plan)]): List[CaseDef] = (cases: @unchecked) match { @@ -910,7 +903,7 @@ object PatternMatcher { case TypeTest(tpt, trusted) => i"TypeTest($tpt, trusted=$trusted)" case _ => test.toString } - def showPlan(plan: Plan): Unit = + def showPlan(plan: Plan): Unit = { if (!seen.contains(plan.id)) { seen += plan.id sb append s"\n${plan.id}: " @@ -936,6 +929,7 @@ object PatternMatcher { sb.append(tree.show) } } + } showPlan(plan) sb.toString } diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index a3c2a3dbb064..954d63d36c68 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -49,8 +49,11 @@ class Pickler extends Phase { val unit = ctx.compilationUnit pickling.println(i"unpickling in run ${ctx.runId}") - for { cls <- dropCompanionModuleClasses(topLevelClasses(unit.tpdTree)) - tree <- sliceTopLevel(unit.tpdTree, cls) } { + for { + cls <- dropCompanionModuleClasses(topLevelClasses(unit.tpdTree)) + tree <- sliceTopLevel(unit.tpdTree, cls) + } + { val pickler = new TastyPickler(cls) if (ctx.settings.YtestPickler.value) { beforePickling(cls) = tree.show @@ -75,6 +78,7 @@ class Pickler extends Phase { pickled.iterator.grouped(10).toList.zipWithIndex.map { case (row, i) => s"${i}0: ${row.mkString(" ")}" } + // println(i"rawBytes = \n$rawBytes%\n%") // DEBUG if (pickling ne noPrinter) { println(i"**** pickled info of $cls") @@ -112,7 +116,7 @@ class Pickler extends Phase { } } - private def testSame(unpickled: String, previous: String, cls: ClassSymbol)(implicit ctx: Context) = + private def testSame(unpickled: String, previous: String, cls: ClassSymbol)(implicit ctx: Context) = { if (previous != unpickled) { output("before-pickling.txt", previous) output("after-pickling.txt", unpickled) @@ -120,4 +124,5 @@ class Pickler extends Phase { | | diff before-pickling.txt after-pickling.txt""".stripMargin) } + } } diff --git a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala index 59e38f232d6e..9e7d128901ac 100644 --- a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala @@ -18,11 +18,10 @@ object ProtectedAccessors { val name: String = "protectedAccessors" /** Is the current context's owner inside the access boundary established by `sym`? */ - def insideBoundaryOf(sym: Symbol)(implicit ctx: Context): Boolean = { - if (sym.is(JavaDefined)) { + def insideBoundaryOf(sym: Symbol)(implicit ctx: Context): Boolean = + if (sym.is(JavaDefined)) sym.is(JavaStatic) || // Java's static protected definitions are treated as public ctx.owner.enclosingPackageClass == sym.enclosingPackageClass - } else { // For Scala-defined symbols we currently allow private and protected accesses // from inner packages, and compensate by widening accessibility of such symbols to public. @@ -30,7 +29,6 @@ object ProtectedAccessors { val boundary = sym.accessBoundary(sym.enclosingPackageClass) ctx.owner.isContainedIn(boundary) || ctx.owner.isContainedIn(boundary.linkedClass) } - } /** Do we need a protected accessor if the current context's owner * is not in a subclass or subtrait of `sym`? @@ -40,8 +38,8 @@ object ProtectedAccessors { !sym.owner.is(Trait) && // trait methods need to be handled specially, are currently always public !insideBoundaryOf(sym) - /** Do we need a protected accessor for accessing sym from the current context's owner? */ - def needsAccessor(sym: Symbol)(implicit ctx: Context): Boolean = + /** Do we need a protected accessor for accessing sym from the current context's owner? */ + def needsAccessor(sym: Symbol)(implicit ctx: Context): Boolean = needsAccessorIfNotInSubclass(sym) && !ctx.owner.enclosingClass.derivesFrom(sym.owner) } diff --git a/compiler/src/dotty/tools/dotc/transform/ReifyQuotes.scala b/compiler/src/dotty/tools/dotc/transform/ReifyQuotes.scala index e05b1a54a174..bd1f48e1a45d 100644 --- a/compiler/src/dotty/tools/dotc/transform/ReifyQuotes.scala +++ b/compiler/src/dotty/tools/dotc/transform/ReifyQuotes.scala @@ -74,7 +74,7 @@ class ReifyQuotes extends MacroTransform { override def allowsImplicitSearch: Boolean = true - override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = { + override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = tree match { case tree: RefTree if !ctx.inInlineMethod => assert(!tree.symbol.isQuote) @@ -84,10 +84,10 @@ class ReifyQuotes extends MacroTransform { s"${tree.symbol} should have been removed by PickledQuotes because it has a @quoteTypeTag") case _ => } - } - override def run(implicit ctx: Context): Unit = + override def run(implicit ctx: Context): Unit = { if (ctx.compilationUnit.needsStaging) super.run(freshStagingContext) + } protected def newTransformer(implicit ctx: Context): Transformer = new Transformer { override def transform(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = @@ -452,12 +452,11 @@ object ReifyQuotes { } /** Type used for the hole that will replace this splice */ - def getHoleType(body: tpd.Tree, splice: tpd.Tree)(implicit ctx: Context): Type = { + def getHoleType(body: tpd.Tree, splice: tpd.Tree)(implicit ctx: Context): Type = // For most expressions the splice.tpe but there are some types that are lost by lifting // that can be recoverd from the original tree. Currently the cases are: // * Method types: the splice represents a method reference map.get(body.symbol).map(_.tpe.widen).getOrElse(splice.tpe) - } def isLiftedSymbol(sym: Symbol)(implicit ctx: Context): Boolean = map.contains(sym) @@ -465,6 +464,6 @@ object ReifyQuotes { def getTrees: List[tpd.Tree] = trees.toList override def toString: String = s"Embedded($trees, $map)" - } } + diff --git a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala index 396023b248fd..a0a6f26508ca 100644 --- a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala +++ b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala @@ -47,15 +47,12 @@ class SelectStatic extends MiniPhase with IdentityDenotTransformer { case _ => t } - override def transformApply(tree: tpd.Apply)(implicit ctx: Context): tpd.Tree = { + override def transformApply(tree: tpd.Apply)(implicit ctx: Context): tpd.Tree = normalize(tree) - } - override def transformTypeApply(tree: tpd.TypeApply)(implicit ctx: Context): tpd.Tree = { + override def transformTypeApply(tree: tpd.TypeApply)(implicit ctx: Context): tpd.Tree = normalize(tree) - } - override def transformClosure(tree: tpd.Closure)(implicit ctx: Context): tpd.Tree = { + override def transformClosure(tree: tpd.Closure)(implicit ctx: Context): tpd.Tree = normalize(tree) - } } diff --git a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala index 9a32eadf54a9..d774844c788d 100644 --- a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala +++ b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala @@ -21,7 +21,7 @@ class SetRootTree extends Phase { } private def traverser = new tpd.TreeTraverser { - override def traverse(tree: tpd.Tree)(implicit ctx: Context): Unit = { + override def traverse(tree: tpd.Tree)(implicit ctx: Context): Unit = tree match { case pkg: tpd.PackageDef => traverseChildren(pkg) @@ -38,7 +38,6 @@ class SetRootTree extends Phase { case _ => () } - } } } diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index 1fffeaaad92f..d767b61cbc17 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -163,7 +163,7 @@ object Splicer { /** Returns the interpreted result of interpreting the code a call to the symbol with default arguments. * Return Some of the result or None if some error happen during the interpretation. */ - def interpret[T](tree: Tree)(implicit ct: ClassTag[T]): Option[T] = { + def interpret[T](tree: Tree)(implicit ct: ClassTag[T]): Option[T] = interpretTree(tree)(Map.empty) match { case obj: T => Some(obj) case obj => @@ -171,7 +171,6 @@ object Splicer { ctx.error(s"Interpreted tree returned a result of an unexpected type. Expected ${ct.runtimeClass} but was ${obj.getClass}", pos) None } - } def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { case Apply(Select(Apply(TypeApply(fn, _), quoted :: Nil), nme.apply), _) if fn.symbol == defn.InternalQuoted_exprQuote => @@ -195,27 +194,27 @@ object Splicer { // TODO disallow interpreted method calls as arguments case Call(fn, args) => - if (fn.symbol.isConstructor && fn.symbol.owner.owner.is(Package)) { + if (fn.symbol.isConstructor && fn.symbol.owner.owner.is(Package)) interpretNew(fn.symbol, args.flatten.map(interpretTree)) - } else if (fn.symbol.is(Module)) { + else if (fn.symbol.is(Module)) interpretModuleAccess(fn.symbol) - } else if (fn.symbol.isStatic) { + else if (fn.symbol.isStatic) { val staticMethodCall = interpretedStaticMethodCall(fn.symbol.owner, fn.symbol) staticMethodCall(args.flatten.map(interpretTree)) - } else if (fn.qualifier.symbol.is(Module) && fn.qualifier.symbol.isStatic) { - if (fn.name == nme.asInstanceOfPM) { + } + else if (fn.qualifier.symbol.is(Module) && fn.qualifier.symbol.isStatic) + if (fn.name == nme.asInstanceOfPM) interpretModuleAccess(fn.qualifier.symbol) - } else { + else { val staticMethodCall = interpretedStaticMethodCall(fn.qualifier.symbol.moduleClass, fn.symbol) staticMethodCall(args.flatten.map(interpretTree)) } - } else if (env.contains(fn.symbol)) { + else if (env.contains(fn.symbol)) env(fn.symbol) - } else if (tree.symbol.is(InlineProxy)) { + else if (tree.symbol.is(InlineProxy)) interpretTree(tree.symbol.defTree.asInstanceOf[ValOrDefDef].rhs) - } else { + else unexpectedTree(tree) - } case closureDef((ddef @ DefDef(_, _, (arg :: Nil) :: Nil, _, _))) => (obj: AnyRef) => interpretTree(ddef.rhs) given env.updated(arg.symbol, obj) @@ -267,9 +266,9 @@ object Splicer { private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol)(implicit env: Env): List[Object] => Object = { val (inst, clazz) = - if (moduleClass.name.startsWith(str.REPL_SESSION_LINE)) { + if (moduleClass.name.startsWith(str.REPL_SESSION_LINE)) (null, loadReplLineClass(moduleClass)) - } else { + else { val inst = loadModule(moduleClass) (inst, inst.getClass) } @@ -297,12 +296,13 @@ object Splicer { private def unexpectedTree(tree: Tree)(implicit env: Env): Object = throw new StopInterpretation("Unexpected tree could not be interpreted: " + tree, tree.sourcePos) - private def loadModule(sym: Symbol): Object = { + private def loadModule(sym: Symbol): Object = if (sym.owner.is(Package)) { // is top level object val moduleClass = loadClass(sym.fullName.toString) moduleClass.getField(str.MODULE_INSTANCE_FIELD).get(null) - } else { + } + else { // nested object in an object val className = { val pack = sym.topLevelClass.owner @@ -312,34 +312,31 @@ object Splicer { val clazz = loadClass(className) clazz.getConstructor().newInstance().asInstanceOf[Object] } - } private def loadReplLineClass(moduleClass: Symbol)(implicit env: Env): Class[_] = { val lineClassloader = new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader) lineClassloader.loadClass(moduleClass.name.firstPart.toString) } - private def loadClass(name: String): Class[_] = { + private def loadClass(name: String): Class[_] = try classLoader.loadClass(name) catch { case _: ClassNotFoundException => val msg = s"Could not find class $name in classpath$extraMsg" throw new StopInterpretation(msg, pos) } - } - private def getMethod(clazz: Class[_], name: Name, paramClasses: List[Class[_]]): Method = { + private def getMethod(clazz: Class[_], name: Name, paramClasses: List[Class[_]]): Method = try clazz.getMethod(name.toString, paramClasses: _*) catch { case _: NoSuchMethodException => val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)$extraMsg" throw new StopInterpretation(msg, pos) } - } private def extraMsg = ". The most common reason for that is that you apply macros in the compilation run that defines them" - private def stopIfRuntimeException[T](thunk: => T, method: Method): T = { + private def stopIfRuntimeException[T](thunk: => T, method: Method): T = try thunk catch { case ex: RuntimeException => @@ -365,7 +362,6 @@ object Splicer { sw.write("\n") throw new StopInterpretation(sw.toString, pos) } - } /** List of classes of the parameters of the signature of `sym` */ private def paramsSig(sym: Symbol): List[Class[_]] = { @@ -422,9 +418,9 @@ object Splicer { } allParams.map(paramClass) } + } - } /** Exception that stops interpretation if some issue is found */ private class StopInterpretation(val msg: String, val pos: SourcePosition) extends Exception @@ -449,5 +445,5 @@ object Splicer { } } } - } + diff --git a/compiler/src/dotty/tools/dotc/transform/Staging.scala b/compiler/src/dotty/tools/dotc/transform/Staging.scala index d55a26b33b5a..6b9cc00ff23e 100644 --- a/compiler/src/dotty/tools/dotc/transform/Staging.scala +++ b/compiler/src/dotty/tools/dotc/transform/Staging.scala @@ -73,16 +73,17 @@ class Staging extends MacroTransform { } } - override def run(implicit ctx: Context): Unit = + override def run(implicit ctx: Context): Unit = { if (ctx.compilationUnit.needsStaging) super.run(freshStagingContext) + } protected def newTransformer(implicit ctx: Context): Transformer = new Transformer { override def transform(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = new PCPCheckAndHeal(ctx).transform(tree) } - } + object Staging { val name: String = "staging" } diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index ff2de86f5cc6..f25f2e94843f 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -34,185 +34,182 @@ class SuperAccessors(thisPhase: DenotTransformer) { import tpd._ - /** Some parts of trees will get a new owner in subsequent phases. - * These are value class methods, which will become extension methods. - * (By-name arguments used to be included also, but these - * don't get a new class anymore, they are just wrapped in a new method). - * - * These regions will have to be treated specially for the purpose - * of adding accessors. For instance, super calls from these regions - * always have to go through an accessor. - * - * The `invalidEnclClass` field, if different from NoSymbol, - * contains the symbol that is not a valid owner. - */ - private[this] var invalidEnclClass: Symbol = NoSymbol - - private def withInvalidCurrentClass[A](trans: => A)(implicit ctx: Context): A = { - val saved = invalidEnclClass - invalidEnclClass = ctx.owner - try trans - finally invalidEnclClass = saved + /** Some parts of trees will get a new owner in subsequent phases. + * These are value class methods, which will become extension methods. + * (By-name arguments used to be included also, but these + * don't get a new class anymore, they are just wrapped in a new method). + * + * These regions will have to be treated specially for the purpose + * of adding accessors. For instance, super calls from these regions + * always have to go through an accessor. + * + * The `invalidEnclClass` field, if different from NoSymbol, + * contains the symbol that is not a valid owner. + */ + private[this] var invalidEnclClass: Symbol = NoSymbol + + private def withInvalidCurrentClass[A](trans: => A)(implicit ctx: Context): A = { + val saved = invalidEnclClass + invalidEnclClass = ctx.owner + try trans + finally invalidEnclClass = saved + } + + private def validCurrentClass(implicit ctx: Context): Boolean = + ctx.owner.enclosingClass != invalidEnclClass + + /** List buffers for new accessor definitions, indexed by class */ + private val accDefs = newMutableSymbolMap[mutable.ListBuffer[Tree]] + + /** A super accessor call corresponding to `sel` */ + private def superAccessorCall(sel: Select)(implicit ctx: Context) = { + val Select(qual, name) = sel + val sym = sel.symbol + val clazz = qual.symbol.asClass + var superName = SuperAccessorName(name.asTermName) + if (clazz.is(Trait)) superName = superName.expandedName(clazz) + val superInfo = sel.tpe.widenSingleton.ensureMethodic + + val accRange = sel.span.focus + val superAcc = clazz.info.decl(superName) + .suchThat(_.signature == superInfo.signature).symbol + .orElse { + ctx.debuglog(s"add super acc ${sym.showLocated} to $clazz") + val maybeDeferred = if (clazz.is(Trait)) Deferred else EmptyFlags + val acc = ctx.newSymbol( + clazz, superName, Artifact | Method | maybeDeferred, + superInfo, coord = accRange).enteredAfter(thisPhase) + // Diagnostic for SI-7091 + if (!accDefs.contains(clazz)) + ctx.error( + s"Internal error: unable to store accessor definition in ${clazz}. clazz.hasPackageFlag=${clazz.is(Package)}. Accessor required for ${sel} (${sel.show})", + sel.sourcePos) + else accDefs(clazz) += DefDef(acc, EmptyTree).withSpan(accRange) + acc } - private def validCurrentClass(implicit ctx: Context): Boolean = - ctx.owner.enclosingClass != invalidEnclClass - - /** List buffers for new accessor definitions, indexed by class */ - private val accDefs = newMutableSymbolMap[mutable.ListBuffer[Tree]] - - /** A super accessor call corresponding to `sel` */ - private def superAccessorCall(sel: Select)(implicit ctx: Context) = { - val Select(qual, name) = sel - val sym = sel.symbol - val clazz = qual.symbol.asClass - var superName = SuperAccessorName(name.asTermName) - if (clazz.is(Trait)) superName = superName.expandedName(clazz) - val superInfo = sel.tpe.widenSingleton.ensureMethodic - - val accRange = sel.span.focus - val superAcc = clazz.info.decl(superName) - .suchThat(_.signature == superInfo.signature).symbol - .orElse { - ctx.debuglog(s"add super acc ${sym.showLocated} to $clazz") - val maybeDeferred = if (clazz.is(Trait)) Deferred else EmptyFlags - val acc = ctx.newSymbol( - clazz, superName, Artifact | Method | maybeDeferred, - superInfo, coord = accRange).enteredAfter(thisPhase) - // Diagnostic for SI-7091 - if (!accDefs.contains(clazz)) - ctx.error( - s"Internal error: unable to store accessor definition in ${clazz}. clazz.hasPackageFlag=${clazz.is(Package)}. Accessor required for ${sel} (${sel.show})", - sel.sourcePos) - else accDefs(clazz) += DefDef(acc, EmptyTree).withSpan(accRange) - acc - } - - This(clazz).select(superAcc).withSpan(sel.span) + This(clazz).select(superAcc).withSpan(sel.span) + } + + /** Check selection `super.f` for conforming to rules. If necessary, + * replace by a super accessor call. + */ + private def transformSuperSelect(sel: Select)(implicit ctx: Context): Tree = { + val Select(sup @ Super(_, mix), name) = sel + val sym = sel.symbol + assert(sup.symbol.exists, s"missing symbol in $sel: ${sup.tpe}") + val clazz = sup.symbol + + if (sym.isTerm && !sym.is(Method, butNot = Accessor) && !ctx.owner.isAllOf(ParamForwarder)) + // ParamForwaders as installed ParamForwarding.scala do use super calls to vals + ctx.error(s"super may be not be used on ${sym.underlyingSymbol}", sel.sourcePos) + else if (isDisallowed(sym)) + ctx.error(s"super not allowed here: use this.${sel.name} instead", sel.sourcePos) + else if (sym.is(Deferred)) { + val member = sym.overridingSymbol(clazz.asClass) + if (!mix.name.isEmpty || + !member.exists || + !(member.is(AbsOverride) && member.isIncompleteIn(clazz))) + ctx.error( + i"${sym.showLocated} is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'", + sel.sourcePos) + else ctx.log(i"ok super $sel ${sym.showLocated} $member $clazz ${member.isIncompleteIn(clazz)}") } - - /** Check selection `super.f` for conforming to rules. If necessary, - * replace by a super accessor call. - */ - private def transformSuperSelect(sel: Select)(implicit ctx: Context): Tree = { - val Select(sup @ Super(_, mix), name) = sel - val sym = sel.symbol - assert(sup.symbol.exists, s"missing symbol in $sel: ${sup.tpe}") - val clazz = sup.symbol - - if (sym.isTerm && !sym.is(Method, butNot = Accessor) && !ctx.owner.isAllOf(ParamForwarder)) - // ParamForwaders as installed ParamForwarding.scala do use super calls to vals - ctx.error(s"super may be not be used on ${sym.underlyingSymbol}", sel.sourcePos) - else if (isDisallowed(sym)) - ctx.error(s"super not allowed here: use this.${sel.name} instead", sel.sourcePos) - else if (sym.is(Deferred)) { - val member = sym.overridingSymbol(clazz.asClass) - if (!mix.name.isEmpty || - !member.exists || - !(member.is(AbsOverride) && member.isIncompleteIn(clazz))) - ctx.error( - i"${sym.showLocated} is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'", - sel.sourcePos) - else ctx.log(i"ok super $sel ${sym.showLocated} $member $clazz ${member.isIncompleteIn(clazz)}") - } - else { - val owner = sym.owner - if (!owner.is(Trait)) { - if (mix.name.isEmpty) { - // scala/bug#4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract. - for (intermediateClass <- clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)) { - val overriding = sym.overridingSymbol(intermediateClass) - if (overriding.is(Deferred, butNot = AbsOverride) && !overriding.owner.is(Trait)) - ctx.error( - s"${sym.showLocated} cannot be directly accessed from ${clazz} because ${overriding.owner} redeclares it as abstract", - sel.sourcePos) - } - } else { - // scala/scala-dev#143: - // a call `super[T].m` that resolves to `A.m` cannot be translated to correct bytecode if - // `A` is a class (not a trait / interface), but not the direct superclass. Invokespecial - // would select an overriding method in the direct superclass, rather than `A.m`. - // We allow this if there are statically no intervening overrides. - def hasClassOverride(member: Symbol, subCls: ClassSymbol): Boolean = { - if (subCls == defn.ObjectClass || subCls == member.owner) false - else if (member.overridingSymbol(subCls).exists) true - else hasClassOverride(member, subCls.superClass.asClass) - } - val superCls = clazz.asClass.superClass.asClass - if (owner != superCls && hasClassOverride(sym, superCls)) { + else { + val owner = sym.owner + if (!owner.is(Trait)) + if (mix.name.isEmpty) + // scala/bug#4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract. + for (intermediateClass <- clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)) { + val overriding = sym.overridingSymbol(intermediateClass) + if (overriding.is(Deferred, butNot = AbsOverride) && !overriding.owner.is(Trait)) ctx.error( - em"""Super call cannot be emitted: the selected $sym is declared in $owner, which is not the direct superclass of $clazz. - |An unqualified super call (super.${sym.name}) would be allowed.""", + s"${sym.showLocated} cannot be directly accessed from ${clazz} because ${overriding.owner} redeclares it as abstract", sel.sourcePos) - } } + else { + // scala/scala-dev#143: + // a call `super[T].m` that resolves to `A.m` cannot be translated to correct bytecode if + // `A` is a class (not a trait / interface), but not the direct superclass. Invokespecial + // would select an overriding method in the direct superclass, rather than `A.m`. + // We allow this if there are statically no intervening overrides. + def hasClassOverride(member: Symbol, subCls: ClassSymbol): Boolean = + if (subCls == defn.ObjectClass || subCls == member.owner) false + else if (member.overridingSymbol(subCls).exists) true + else hasClassOverride(member, subCls.superClass.asClass) + val superCls = clazz.asClass.superClass.asClass + if (owner != superCls && hasClassOverride(sym, superCls)) + ctx.error( + em"""Super call cannot be emitted: the selected $sym is declared in $owner, which is not the direct superclass of $clazz. + |An unqualified super call (super.${sym.name}) would be allowed.""", + sel.sourcePos) } - } - if (name.isTermName && mix.name.isEmpty && - (clazz.is(Trait) || clazz != ctx.owner.enclosingClass || !validCurrentClass)) - superAccessorCall(sel)(ctx.withPhase(thisPhase.next)) - else sel } - - /** Disallow some super.XX calls targeting Any methods which would - * otherwise lead to either a compiler crash or runtime failure. - */ - private def isDisallowed(sym: Symbol)(implicit ctx: Context) = - sym.isTypeTestOrCast || - (sym eq defn.Any_==) || - (sym eq defn.Any_!=) || - (sym eq defn.Any_##) - - /** Transform select node, adding super and protected accessors as needed */ - def transformSelect(tree: Tree, targs: List[Tree])(implicit ctx: Context): Tree = { - val sel @ Select(qual, name) = tree - val sym = sel.symbol - - /** If an accesses to protected member of a class comes from a trait, - * or would need a protected accessor placed in a trait, we cannot - * perform the access to the protected member directly since jvm access - * restrictions require the call site to be in an actual subclass and - * traits don't count as subclasses in this respect. In this case - * we generate a super accessor instead. See SI-2296. - */ - def needsSuperAccessor = - ProtectedAccessors.needsAccessorIfNotInSubclass(sym) && - AccessProxies.hostForAccessorOf(sym).is(Trait) - qual match { - case _: This if needsSuperAccessor => - /* - * A trait which extends a class and accesses a protected member - * of that class cannot implement the necessary accessor method - * because jvm access restrictions require the call site to be in - * an actual subclass and traits don't count as subclasses in this - * respect. We generate a super accessor itself, which will be fixed - * by the implementing class. See SI-2296. - */ - superAccessorCall(sel) - case Super(_, mix) => - transformSuperSelect(sel) - case _ => - sel - } + if (name.isTermName && mix.name.isEmpty && + (clazz.is(Trait) || clazz != ctx.owner.enclosingClass || !validCurrentClass)) + superAccessorCall(sel)(ctx.withPhase(thisPhase.next)) + else sel + } + + /** Disallow some super.XX calls targeting Any methods which would + * otherwise lead to either a compiler crash or runtime failure. + */ + private def isDisallowed(sym: Symbol)(implicit ctx: Context) = + sym.isTypeTestOrCast || + (sym eq defn.Any_==) || + (sym eq defn.Any_!=) || + (sym eq defn.Any_##) + + /** Transform select node, adding super and protected accessors as needed */ + def transformSelect(tree: Tree, targs: List[Tree])(implicit ctx: Context): Tree = { + val sel @ Select(qual, name) = tree + val sym = sel.symbol + + /** If an accesses to protected member of a class comes from a trait, + * or would need a protected accessor placed in a trait, we cannot + * perform the access to the protected member directly since jvm access + * restrictions require the call site to be in an actual subclass and + * traits don't count as subclasses in this respect. In this case + * we generate a super accessor instead. See SI-2296. + */ + def needsSuperAccessor = + ProtectedAccessors.needsAccessorIfNotInSubclass(sym) && + AccessProxies.hostForAccessorOf(sym).is(Trait) + qual match { + case _: This if needsSuperAccessor => + /* + * A trait which extends a class and accesses a protected member + * of that class cannot implement the necessary accessor method + * because jvm access restrictions require the call site to be in + * an actual subclass and traits don't count as subclasses in this + * respect. We generate a super accessor itself, which will be fixed + * by the implementing class. See SI-2296. + */ + superAccessorCall(sel) + case Super(_, mix) => + transformSuperSelect(sel) + case _ => + sel } - - /** Wrap template to template transform `op` with needed initialization and finalization */ - def wrapTemplate(tree: Template)(op: Template => Template)(implicit ctx: Context): Template = { - accDefs(currentClass) = new mutable.ListBuffer[Tree] - val impl = op(tree) - val accessors = accDefs.remove(currentClass).get - if (accessors.isEmpty) impl - else { - val (params, rest) = impl.body span { - case td: TypeDef => !td.isClassDef - case vd: ValOrDefDef => vd.symbol.flags.is(ParamAccessor) - case _ => false - } - cpy.Template(impl)(body = params ++ accessors ++ rest) + } + + /** Wrap template to template transform `op` with needed initialization and finalization */ + def wrapTemplate(tree: Template)(op: Template => Template)(implicit ctx: Context): Template = { + accDefs(currentClass) = new mutable.ListBuffer[Tree] + val impl = op(tree) + val accessors = accDefs.remove(currentClass).get + if (accessors.isEmpty) impl + else { + val (params, rest) = impl.body span { + case td: TypeDef => !td.isClassDef + case vd: ValOrDefDef => vd.symbol.flags.is(ParamAccessor) + case _ => false } + cpy.Template(impl)(body = params ++ accessors ++ rest) } + } - /** Wrap `DefDef` producing operation `op`, potentially setting `invalidClass` info */ - def wrapDefDef(ddef: DefDef)(op: => DefDef)(implicit ctx: Context): DefDef = - if (isMethodWithExtension(ddef.symbol)) withInvalidCurrentClass(op) else op + /** Wrap `DefDef` producing operation `op`, potentially setting `invalidClass` info */ + def wrapDefDef(ddef: DefDef)(op: => DefDef)(implicit ctx: Context): DefDef = + if (isMethodWithExtension(ddef.symbol)) withInvalidCurrentClass(op) else op } diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala index 824ec7db8b01..3f5222c16cd3 100644 --- a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala @@ -40,10 +40,9 @@ class SymUtils(val self: Symbol) extends AnyVal { /** All traits implemented by a class, except for those inherited through the superclass. * The empty list if `self` is a trait. */ - def mixins(implicit ctx: Context): List[ClassSymbol] = { + def mixins(implicit ctx: Context): List[ClassSymbol] = if (self.is(Trait)) Nil else directlyInheritedTraits - } def isTypeTest(implicit ctx: Context): Boolean = self == defn.Any_isInstanceOf || self == defn.Any_typeTest @@ -122,9 +121,8 @@ class SymUtils(val self: Symbol) extends AnyVal { if (self.isConstructor) self.owner else self /** The closest properly enclosing method or class of this symbol. */ - final def enclosure(implicit ctx: Context): Symbol = { + final def enclosure(implicit ctx: Context): Symbol = self.owner.enclosingMethodOrClass - } /** The closest enclosing method or class of this symbol */ @tailrec final def enclosingMethodOrClass(implicit ctx: Context): Symbol = diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index 964713673497..4f6d03f5e8db 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -210,7 +210,7 @@ class TailRec extends MiniPhase { /** The replacement `var`s for the params in `rewrittenParamSyms`. */ var varsForRewrittenParamSyms: List[Symbol] = Nil - private def getVarForRewrittenThis()(implicit ctx: Context): Symbol = { + private def getVarForRewrittenThis()(implicit ctx: Context): Symbol = varForRewrittenThis match { case Some(sym) => sym case none => @@ -221,9 +221,8 @@ class TailRec extends MiniPhase { varForRewrittenThis = Some(sym) sym } - } - private def getVarForRewrittenParam(param: Symbol)(implicit ctx: Context): Symbol = { + private def getVarForRewrittenParam(param: Symbol)(implicit ctx: Context): Symbol = rewrittenParamSyms.indexOf(param) match { case -1 => val sym = ctx.newSymbol(method, TailLocalName.fresh(param.name.toTermName), Synthetic | Mutable, param.info) @@ -232,7 +231,6 @@ class TailRec extends MiniPhase { sym case index => varsForRewrittenParamSyms(index) } - } /** Symbols of Labeled blocks that are in tail position. */ private val tailPositionLabeledSyms = new mutable.HashSet[Symbol]() @@ -240,7 +238,7 @@ class TailRec extends MiniPhase { private[this] var inTailPosition = true /** Rewrite this tree to contain no tail recursive calls */ - def transform(tree: Tree, tailPosition: Boolean)(implicit ctx: Context): Tree = { + def transform(tree: Tree, tailPosition: Boolean)(implicit ctx: Context): Tree = if (inTailPosition == tailPosition) transform(tree) else { val saved = inTailPosition @@ -248,7 +246,6 @@ class TailRec extends MiniPhase { try transform(tree) finally inTailPosition = saved } - } def yesTailTransform(tree: Tree)(implicit ctx: Context): Tree = transform(tree, tailPosition = true) @@ -301,7 +298,7 @@ class TailRec extends MiniPhase { method.matches(calledMethod) && enclosingClass.appliedRef.widen <:< prefix.tpe.widenDealias - if (isRecursiveCall) { + if (isRecursiveCall) if (inTailPosition) { tailrec.println("Rewriting tail recursive call: " + tree.span) rewrote = true @@ -312,17 +309,15 @@ class TailRec extends MiniPhase { case arg: Ident => arg.symbol != param case _ => true }) - } yield { - (getVarForRewrittenParam(param), arg) } + yield + (getVarForRewrittenParam(param), arg) - val assignThisAndParamPairs = { + val assignThisAndParamPairs = if (prefix eq EmptyTree) assignParamPairs - else { + else // TODO Opt: also avoid assigning `this` if the prefix is `this.` (getVarForRewrittenThis(), noTailTransform(prefix)) :: assignParamPairs - } - } val assignments = assignThisAndParamPairs match { case (lhs, rhs) :: Nil => @@ -345,7 +340,6 @@ class TailRec extends MiniPhase { seq(assignments, Typed(Return(unitLiteral.withSpan(tree.span), continueLabel), tpt)) } else fail("it is not in tail position") - } else if (isRecursiveSuperCall) fail("it targets a supertype") else @@ -387,10 +381,9 @@ class TailRec extends MiniPhase { case tree: Try => val expr = noTailTransform(tree.expr) - if (tree.finalizer eq EmptyTree) { + if (tree.finalizer eq EmptyTree) // SI-1672 Catches are in tail position when there is no finalizer cpy.Try(tree)(expr, transformSub(tree.cases), EmptyTree) - } else cpy.Try(tree)( expr, noTailTransforms(tree.cases), diff --git a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala index 316aff6cac5e..3bffd8fb75a4 100644 --- a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala +++ b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala @@ -16,15 +16,13 @@ class TransformWildcards extends MiniPhase with IdentityDenotTransformer { override def phaseName: String = "transformWildcards" - override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = { + override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = tree match { case vDef: ValDef => assert(!tpd.isWildcardArg(vDef.rhs)) case _ => } - } - override def transformValDef(tree: ValDef)(implicit ctx: Context): Tree = { + override def transformValDef(tree: ValDef)(implicit ctx: Context): Tree = if (ctx.owner.isClass) tree else cpy.ValDef(tree)(rhs = tree.rhs.wildcardToDefault) - } } diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 57b9b1690c69..5a7ef446310b 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -154,14 +154,13 @@ class TreeChecker extends Phase with SymTransformer { } assert(!nowDefinedSyms.contains(sym), i"doubly defined symbol: ${sym.fullName} in $tree") - if (ctx.settings.YcheckMods.value) { + if (ctx.settings.YcheckMods.value) tree match { case t: untpd.MemberDef => if (t.name ne sym.name) ctx.warning(s"symbol ${sym.fullName} name doesn't correspond to AST: ${t}") // todo: compare trees inside annotations case _ => } - } locally = sym :: locally nowDefinedSyms += sym case _ => @@ -357,8 +356,7 @@ class TreeChecker extends Phase with SymTransformer { } if (sym.exists && !sym.is(Private) && !symIsFixed && - !tree.name.is(OuterSelectName) // outer selects have effectively fixed symbols - ) { + !tree.name.is(OuterSelectName)) { // outer selects have effectively fixed symbols val qualTpe = tree.qualifier.typeOpt val member = if (sym.is(Private)) qualTpe.member(tree.name) @@ -437,11 +435,10 @@ class TreeChecker extends Phase with SymTransformer { } } - override def typedCase(tree: untpd.CaseDef, selType: Type, pt: Type)(implicit ctx: Context): CaseDef = { + override def typedCase(tree: untpd.CaseDef, selType: Type, pt: Type)(implicit ctx: Context): CaseDef = withPatSyms(tpd.patVars(tree.pat.asInstanceOf[tpd.Tree])) { super.typedCase(tree, selType, pt) } - } override def typedClosure(tree: untpd.Closure, pt: Type)(implicit ctx: Context): Tree = { if (!ctx.phase.lambdaLifted) nestingBlock match { @@ -522,7 +519,7 @@ class TreeChecker extends Phase with SymTransformer { /** * Checks that `New` nodes are always wrapped inside `Select` nodes. */ - def assertSelectWrapsNew(tree: Tree)(implicit ctx: Context): Unit = { + def assertSelectWrapsNew(tree: Tree)(implicit ctx: Context): Unit = (new TreeAccumulator[tpd.Tree] { override def apply(parent: Tree, tree: Tree)(implicit ctx: Context): Tree = { tree match { @@ -537,7 +534,6 @@ class TreeChecker extends Phase with SymTransformer { parent // return the old parent so that my siblings see it } })(tpd.EmptyTree, tree) - } } object TreeChecker { diff --git a/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala index 38dd28be2f16..37570fd3d182 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala @@ -37,9 +37,9 @@ object TreeExtractors { def unapply(t: Tree)(implicit ctx: Context): Option[Tree] = t match { case Apply(sel @ Select(ref, _), Nil) => val sym = ref.tpe.widenDealias.typeSymbol - if (isDerivedValueClass(sym) && (sel.symbol eq valueClassUnbox(sym.asClass))) { + if (isDerivedValueClass(sym) && (sel.symbol eq valueClassUnbox(sym.asClass))) Some(ref) - } else + else None case _ => None diff --git a/compiler/src/dotty/tools/dotc/transform/TreeMapWithStages.scala b/compiler/src/dotty/tools/dotc/transform/TreeMapWithStages.scala index 85adf9adb023..ccc272ccaf0c 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeMapWithStages.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeMapWithStages.scala @@ -59,17 +59,16 @@ abstract class TreeMapWithStages(@constructorOnly ictx: Context) extends TreeMap } /** Transform the quote `quote` which contains the quoted `body`. */ - protected def transformQuotation(body: Tree, quote: Tree)(implicit ctx: Context): Tree = { + protected def transformQuotation(body: Tree, quote: Tree)(implicit ctx: Context): Tree = quote match { case quote: Apply => cpy.Apply(quote)(quote.fun, body :: Nil) case quote: TypeApply => cpy.TypeApply(quote)(quote.fun, body :: Nil) } - } /** Transform the splice `splice` which contains the spliced `body`. */ protected def transformSplice(body: Tree, splice: Tree)(implicit ctx: Context): Tree - override def transform(tree: Tree)(implicit ctx: Context): Tree = { + override def transform(tree: Tree)(implicit ctx: Context): Tree = if (tree.source != ctx.source && tree.source.exists) transform(tree)(ctx.withSource(tree.source)) else reporting.trace(i"StagingTransformer.transform $tree at $level", staging, show = true) { @@ -127,7 +126,6 @@ abstract class TreeMapWithStages(@constructorOnly ictx: Context) extends TreeMap mapOverTree(enteredSyms) } } - } } diff --git a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala index b4cf235168e0..996df0dc5c8b 100644 --- a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala +++ b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala @@ -82,7 +82,7 @@ class TryCatchPatterns extends MiniPhase { } private def mkFallbackPatterMatchCase(patternMatchCases: List[CaseDef], span: Span)( - implicit ctx: Context): Option[CaseDef] = { + implicit ctx: Context): Option[CaseDef] = if (patternMatchCases.isEmpty) None else { val exName = ExceptionBinderName.fresh() @@ -96,6 +96,5 @@ class TryCatchPatterns extends MiniPhase { transformFollowing(Match(sel, patternMatchCases ::: rethrow :: Nil))) ) } - } - } + diff --git a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala index 149ad24c031b..252c6e56e90b 100644 --- a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala +++ b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala @@ -23,7 +23,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { def phaseName: String = "genericTuples" - override def transformApply(tree: tpd.Apply)(implicit ctx: Context): tpd.Tree = { + override def transformApply(tree: tpd.Apply)(implicit ctx: Context): tpd.Tree = if (!tree.symbol.exists || tree.symbol.owner != defn.DynamicTupleModuleClass) tree else if (tree.symbol == defn.DynamicTuple_dynamicCons) transformTupleCons(tree) else if (tree.symbol == defn.DynamicTuple_dynamicTail) transformTupleTail(tree) @@ -32,7 +32,6 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { else if (tree.symbol == defn.DynamicTuple_dynamicApply) transformTupleApply(tree) else if (tree.symbol == defn.DynamicTuple_dynamicToArray) transformTupleToArray(tree) else tree - } private def transformTupleCons(tree: tpd.Apply)(implicit ctx: Context): Tree = { val head :: tail :: Nil = tree.args @@ -40,14 +39,14 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { case Some(tpes) => // Generate a the tuple directly with TupleN+1.apply val size = tpes.size - if (size <= 5) { + if (size <= 5) // val t = tail // TupleN+1(head, t._1, ..., t._n) evalOnce(Typed(tail, TypeTree(defn.tupleType(tpes.tail)))) { tup => val elements = head :: tupleSelectors(tup, size - 1) knownTupleFromElements(tpes, elements) } - } else { + else { // val it = Iterator.single(head) ++ tail.asInstanceOf[Product].productIterator // TupleN+1(it.next(), ..., it.next()) val fullIterator = ref(defn.DynamicTuple_consIterator).appliedToArgs(head :: tail :: Nil) @@ -69,18 +68,17 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { // Generate a the tuple directly with TupleN-1.apply val size = tpes.size assert(size > 0) - if (size == 1) { + if (size == 1) // () Literal(Constant(())) - } - else if (size <= 5) { + else if (size <= 5) // val t = tup.asInstanceOf[TupleN[...]] // TupleN-1(t._2, ..., t._n) evalOnce(Typed(tup, TypeTree(defn.tupleType(tpes)))) { tup => val elements = tupleSelectors(tup, size).tail knownTupleFromElements(tpes.tail, elements) } - } else if (size <= MaxTupleArity + 1) { + else if (size <= MaxTupleArity + 1) // val it = this.asInstanceOf[Product].productIterator // it.next() // TupleN-1(it.next(), ..., it.next()) @@ -90,10 +88,9 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { knownTupleFromIterator(size - 1, it).asInstance(tree.tpe) ) } - } else { + else // tup.asInstanceOf[TupleXXL].tailXXL tup.asInstance(defn.TupleXXLClass.typeRef).select("tailXXL".toTermName) - } case None => // No optimization, keep: // DynamicTuple.dynamicTail(tup) @@ -101,12 +98,11 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { } } - private def transformTupleSize(tree: tpd.Apply)(implicit ctx: Context): Tree = { + private def transformTupleSize(tree: tpd.Apply)(implicit ctx: Context): Tree = tree.tpe.tryNormalize match { case tp: ConstantType => Literal(tp.value) case _ => tree } - } private def transformTupleConcat(tree: tpd.Apply)(implicit ctx: Context): Tree = { val Apply(TypeApply(_, selfTp :: thatTp :: Nil), self :: that :: Nil) = tree @@ -118,7 +114,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { val m = tpes2.size if (n == 0) that else if (m == 0) self - else if (n + m < 5) { + else if (n + m < 5) // val t = self // val u = that // TupleN+M(t._1,..., t._N, u._1, ..., u._M) @@ -129,7 +125,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { knownTupleFromElements(types, elements) } } - } else { + else { // val it = self.asInstanceOf[Product].productIterator ++ that.asInstanceOf[Product].productIterator // TupleN+M(it.next(), ..., it.next()) val fullIterator = ref(defn.DynamicTuple_concatIterator).appliedToArgs(tree.args) @@ -154,13 +150,13 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { if (n < 0 || n >= size) { ctx.error("index out of bounds: " + n, nTree.underlyingArgument.sourcePos) tree - } else if (size <= MaxTupleArity) { + } + else if (size <= MaxTupleArity) // tup._n Typed(tup, TypeTree(defn.tupleType(tpes))).select(nme.selectorName(n)) - } else { + else // tup.asInstanceOf[TupleXXL].productElement(n) tup.asInstance(defn.TupleXXLClass.typeRef).select(nme.productElement).appliedTo(Literal(nTpe.value)) - } case (None, nTpe: ConstantType) if nTpe.value.intValue < 0 => ctx.error("index out of bounds: " + nTpe.value.intValue, nTree.sourcePos) tree @@ -176,16 +172,15 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { defn.tupleTypes(tup.tpe.widen, MaxTupleArity) match { case Some(tpes) => val size = tpes.size - if (size == 0) { + if (size == 0) // Array.emptyObjectArray ref(defn.ArrayModule).select("emptyObjectArray".toTermName).ensureApplied - } else if (size <= MaxTupleArity) { + else if (size <= MaxTupleArity) // DynamicTuple.productToArray(tup.asInstanceOf[Product]) ref(defn.DynamicTuple_productToArray).appliedTo(tup.asInstance(defn.ProductClass.typeRef)) - } else { + else // tup.asInstanceOf[TupleXXL].elems.clone() tup.asInstance(defn.TupleXXLClass.typeRef).select(nme.toArray) - } case None => // No optimization, keep: // DynamicTuple.dynamicToArray(tup) @@ -201,11 +196,10 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { ref(tupleModule).select(nme.apply).appliedToTypes(tpes).appliedToArgs(elements) } - private def knownTupleFromIterator(size: Int, it: Tree)(implicit ctx: Context): Tree = { - if (size == 0) { + private def knownTupleFromIterator(size: Int, it: Tree)(implicit ctx: Context): Tree = + if (size == 0) // Unit for empty tuple Literal(Constant(())) // TODO should this code be here? Or assert(size > specializedSize) - } else if (size <= MaxTupleArity) { // TupleN(it.next(), ..., it.next()) @@ -215,14 +209,13 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { val tpes = List.fill(size)(defn.AnyType) val elements = (0 until size).map(_ => it.select(nme.next)).toList knownTupleFromElements(tpes, elements) - } else { + } + else // No optimization, keep: // TupleXXL.fromIterator(it) ref(defn.TupleXXL_fromIterator).appliedTo(it) - } - } private def tupleSelectors(tup: Tree, size: Int)(implicit ctx: Context): List[Tree] = (0 until size).map(i => tup.select(nme.selectorName(i))).toList - } + diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index a17208ab4564..4b31f5ae28cf 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -157,7 +157,7 @@ object TypeTestsCasts { def interceptTypeApply(tree: TypeApply)(implicit ctx: Context): Tree = trace(s"transforming ${tree.show}", show = true) { /** Intercept `expr.xyz[XYZ]` */ - def interceptWith(expr: Tree): Tree = { + def interceptWith(expr: Tree): Tree = if (expr.isEmpty) tree else { val sym = tree.symbol @@ -241,19 +241,16 @@ object TypeTestsCasts { def testCls = testType.widen.classSymbol if (expr.tpe <:< testType) Typed(expr, tree.args.head) - else if (testCls eq defn.BoxedUnitClass) { + else if (testCls eq defn.BoxedUnitClass) // as a special case, casting to Unit always successfully returns Unit Block(expr :: Nil, Literal(Constant(()))).withSpan(expr.span) - } - else if (foundCls.isPrimitiveValueClass) { + else if (foundCls.isPrimitiveValueClass) if (testCls.isPrimitiveValueClass) primitiveConversion(expr, testCls) else derivedTree(box(expr), defn.Any_asInstanceOf, testType) - } else if (testCls.isPrimitiveValueClass) unbox(expr.ensureConforms(defn.ObjectType), testType) - else if (isDerivedValueClass(testCls)) { + else if (isDerivedValueClass(testCls)) expr // adaptToType in Erasure will do the necessary type adaptation - } else if (testCls eq defn.NothingClass) { // In the JVM `x.asInstanceOf[Nothing]` would throw a class cast exception except when `x eq null`. // To avoid this loophole we execute `x` and then regardless of the result throw a `ClassCastException` @@ -309,7 +306,6 @@ object TypeTestsCasts { transformAsInstanceOf(erasure(tree.args.head.tpe)) else tree } - } val expr = tree.fun match { case Select(expr, _) => expr case i: Ident => diff --git a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala index b15acb4864b4..c9dbb2255508 100644 --- a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala @@ -67,7 +67,7 @@ class VCInlineMethods extends MiniPhase with IdentityDenotTransformer { val ctParams = origCls.typeParams val extensionMeth = extensionMethod(origMeth) - if (!ctParams.isEmpty) { + if (!ctParams.isEmpty) evalOnce(qual) { ev => val ctArgs = ctParams.map(tparam => TypeTree(tparam.typeRef.asSeenFrom(ev.tpe, origCls))) @@ -76,12 +76,11 @@ class VCInlineMethods extends MiniPhase with IdentityDenotTransformer { .appliedTo(ev) .appliedToArgss(mArgss) } - } else { + else ref(extensionMeth) .appliedToTypeTrees(mtArgs) .appliedTo(qual) .appliedToArgss(mArgss) - } } /** If this tree corresponds to a fully-applied value class method call, replace it diff --git a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala index 33cdb5f991d3..2a13e72a2fd4 100644 --- a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala +++ b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala @@ -18,7 +18,7 @@ class YCheckPositions extends Phases.Phase { override def run(implicit ctx: Context): Unit = () // YCheck only - override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = { + override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = tree match { case PackageDef(pid, _) if tree.symbol.owner == defn.RootClass => new TreeTraverser { @@ -54,12 +54,10 @@ class YCheckPositions extends Phases.Phase { }.traverse(tree) case _ => } - } - private def isMacro(call: Tree)(implicit ctx: Context) = { + private def isMacro(call: Tree)(implicit ctx: Context) = if (ctx.phase <= ctx.postTyperPhase) call.symbol.is(Macro) else call.isInstanceOf[Select] // The call of a macro after typer is encoded as a Select while other inlines are Ident // TODO remove this distinction once Inline nodes of expanded macros can be trusted (also in Inliner.inlineCallTrace) - } - } + diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index e23d605641a9..1411577a8fac 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -136,14 +136,15 @@ object Applications { val sels = productSelectorTypes(tp, pos) if (sels.length == args.length) sels else tp :: Nil - } else tp :: Nil + } + else tp :: Nil def productSeqSelectors(tp: Type, argsNum: Int, pos: SourcePosition)(implicit ctx: Context): List[Type] = { - val selTps = productSelectorTypes(tp, pos) - val arity = selTps.length - val elemTp = unapplySeqTypeElemTp(selTps.last) - (0 until argsNum).map(i => if (i < arity - 1) selTps(i) else elemTp).toList - } + val selTps = productSelectorTypes(tp, pos) + val arity = selTps.length + val elemTp = unapplySeqTypeElemTp(selTps.last) + (0 until argsNum).map(i => if (i < arity - 1) selTps(i) else elemTp).toList + } def unapplyArgs(unapplyResult: Type, unapplyFn: Tree, args: List[untpd.Tree], pos: SourcePosition)(implicit ctx: Context): List[Type] = { @@ -162,12 +163,11 @@ object Applications { else fallback } - if (unapplyName == nme.unapplySeq) { + if (unapplyName == nme.unapplySeq) unapplySeq(unapplyResult) { if (isGetMatch(unapplyResult, pos)) unapplySeq(getTp)(fail) else fail } - } else { assert(unapplyName == nme.unapply) if (isProductMatch(unapplyResult, args.length, pos)) @@ -223,7 +223,8 @@ object Applications { } } -trait Applications extends Compatibility { self: Typer with Dynamic => +trait Applications extends Compatibility { + self: Typer & Dynamic => import Applications._ import tpd.{ cpy => _, _ } @@ -476,7 +477,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic => if (!meth.hasDefaultParams) EmptyTree else if (receiver.isEmpty) { - def findGetter(cx: Context): Tree = { + def findGetter(cx: Context): Tree = if (cx eq NoContext) EmptyTree else if (cx.scope != cx.outer.scope && cx.denotNamed(meth.name).hasAltWith(_.symbol == meth)) { @@ -487,8 +488,8 @@ trait Applications extends Compatibility { self: Typer with Dynamic => s"non-existent getter denotation ($denot) for getter($getterName)") findGetter(cx.outer) } - } else findGetter(cx.outer) - } + } + else findGetter(cx.outer) findGetter(ctx) } else { @@ -558,7 +559,8 @@ trait Applications extends Compatibility { self: Typer with Dynamic => tpd.Underscore(formal) else EmptyTree - } else { + } + else { val getter = if (sym.exists) // `sym` doesn't exist for structural calls findDefaultGetter(n + numArgs(normalizedFun)) @@ -574,7 +576,8 @@ trait Applications extends Compatibility { self: Typer with Dynamic => if (!defaultExpr.isEmpty) { val substParam = addTyped(treeToArg(defaultExpr), formal) matchArgs(args1, formals1.mapconserve(substParam), n + 1) - } else + } + else missingArg(n) } @@ -718,11 +721,12 @@ trait Applications extends Compatibility { self: Typer with Dynamic => private def lifter(implicit ctx: Context) = if (methRef.symbol.hasDefaultParams) LiftComplex else LiftImpure - override def liftFun(): Unit = + override def liftFun(): Unit = { if (liftedDefs == null) { liftedDefs = new mutable.ListBuffer[Tree] myNormalizedFun = lifter.liftApp(liftedDefs, myNormalizedFun) } + } /** The index of the first difference between lists of trees `xs` and `ys` * -1 if there are no differences. @@ -846,9 +850,10 @@ trait Applications extends Compatibility { self: Typer with Dynamic => // expected type through `constrainResult`. This can add more constraints which // help sharpen the inferred parameter types for the argument function literal(s). // This tweak is needed to make i1378 compile. - if (tree.args.exists(untpd.isFunctionWithUnknownParamType(_))) + if (tree.args.exists(untpd.isFunctionWithUnknownParamType(_))) { if (!constrainResult(tree.symbol, fun1.tpe.widen, proto.derivedFunProto(resultType = pt))) typr.println(i"result failure for $tree with type ${fun1.tpe.widen}, expected = $pt") + } /** Type application where arguments come from prototype, and no implicits are inserted */ def simpleApply(fun1: Tree, proto: FunProto)(implicit ctx: Context): Tree = @@ -898,7 +903,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic => tryWithImplicitOnQualifier(fun1, originalProto).getOrElse( if (proto eq originalProto) fail else tryWithImplicitOnQualifier(fun1, proto).getOrElse(fail)) - } + } } } @@ -962,9 +967,8 @@ trait Applications extends Compatibility { self: Typer with Dynamic => } def typedTypeApply(tree: untpd.TypeApply, pt: Type)(implicit ctx: Context): Tree = { - if (ctx.mode.is(Mode.Pattern)) { + if (ctx.mode.is(Mode.Pattern)) return errorTree(tree, "invalid pattern") - } val isNamed = hasNamedArg(tree.args) val typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_)) @@ -977,12 +981,13 @@ trait Applications extends Compatibility { self: Typer with Dynamic => case typedFn => typedFn.tpe.widen match { case pt: PolyType => - if (typedArgs.length <= pt.paramInfos.length && !isNamed) + if (typedArgs.length <= pt.paramInfos.length && !isNamed) { if (typedFn.symbol == defn.Predef_classOf && typedArgs.nonEmpty) { val arg = typedArgs.head if (!arg.symbol.is(Module)) // Allow `classOf[Foo.type]` if `Foo` is an object checkClassType(arg.tpe, arg.sourcePos, traitReq = false, stablePrefixReq = false) } + } case _ => } def tryDynamicTypeApply(): Tree = typedFn match { @@ -1078,6 +1083,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic => } } } + // try first for unapply, then for unapplySeq tryWithName(nme.unapply) { sel => tryWithName(nme.unapplySeq)(_ => fallBack(sel)) // for backwards compatibility; will be dropped @@ -1120,7 +1126,8 @@ trait Applications extends Compatibility { self: Typer with Dynamic => unapp.println(i"case 1 $unapplyArgType ${ctx.typerState.constraint}") fullyDefinedType(unapplyArgType, "pattern selector", tree.span) selType.dropAnnot(defn.UncheckedAnnot) // need to drop @unchecked. Just because the selector is @unchecked, the pattern isn't. - } else { + } + else { // We ignore whether constraining the pattern succeeded. // Constraining only fails if the pattern cannot possibly match, // but useless pattern checks detect more such cases, so we simply rely on them instead. @@ -1414,12 +1421,12 @@ trait Applications extends Compatibility { self: Typer with Dynamic => overload.println(i"compare($alt1, $alt2)? $tp1 $tp2 $ownerScore $winsType1 $winsType2") if (ownerScore == 1) - if (winsType1 || !winsType2) 1 else 0 - else if (ownerScore == -1) - if (winsType2 || !winsType1) -1 else 0 - else if (winsType1) - if (winsType2) 0 else 1 - else + if (winsType1 || !winsType2) 1 else 0 + else if (ownerScore == -1) + if (winsType2 || !winsType1) -1 else 0 + else if (winsType1) + if (winsType2) 0 else 1 + else if (winsType2) -1 else 0 } @@ -1626,13 +1633,12 @@ trait Applications extends Compatibility { self: Typer with Dynamic => def narrowBySize(alts: List[TermRef]): List[TermRef] = alts.filter(sizeFits(_)) - def narrowByShapes(alts: List[TermRef]): List[TermRef] = { + def narrowByShapes(alts: List[TermRef]): List[TermRef] = if (normArgs exists untpd.isFunctionWithUnknownParamType) if (hasNamedArg(args)) narrowByTrees(alts, args map treeShape, resultType) else narrowByTypes(alts, normArgs map typeShape, resultType) else alts - } def narrowByTrees(alts: List[TermRef], args: List[Tree], resultType: Type): List[TermRef] = { val alts2 = alts.filter(alt => diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 217f76d03bad..53cc76bf853d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -49,10 +49,9 @@ object Checking { */ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type = NoType)(implicit ctx: Context): Unit = { args.lazyZip(boundss).foreach { (arg, bound) => - if (!bound.isLambdaSub && !arg.tpe.hasSimpleKind) { + if (!bound.isLambdaSub && !arg.tpe.hasSimpleKind) // see MissingTypeParameterFor ctx.error(ex"missing type parameter(s) for $arg", arg.sourcePos) - } } for ((arg, which, bound) <- ctx.boundsViolations(args, boundss, instantiate, app)) ctx.error( @@ -263,7 +262,8 @@ object Checking { tp.withPrefix(pre1) } else tp - } catch { + } + catch { case ex: CyclicReference => ctx.debuglog(i"cycle detected for $tp, $nestedCycleOK, $cycleOK") if (cycleOK) LazyRef(_ => tp) @@ -298,9 +298,8 @@ object Checking { try checker.checkInfo(info) catch { case ex: CyclicReference => - if (reportErrors) { + if (reportErrors) errorType(i"illegal cyclic reference: ${checker.where} ${checker.lastChecked} of $sym refers back to the type itself", sym.sourcePos) - } else info } } @@ -321,8 +320,9 @@ object Checking { def forwardRef(tree: Tree) = flag("forward", tree) def selfRef(tree: Tree) = flag("self", tree) val checkTree = new TreeAccumulator[Unit] { - def checkRef(tree: Tree, sym: Symbol) = + def checkRef(tree: Tree, sym: Symbol) = { if (sym.maybeOwner == refineCls && !seen(sym)) forwardRef(tree) + } def apply(x: Unit, tree: Tree)(implicit ctx: Context) = tree match { case tree: MemberDef => foldOver(x, tree) @@ -371,7 +371,7 @@ object Checking { for (parent <- parents; mbr <- parent.abstractTypeMembers if qualifies(mbr.symbol)) yield mbr.name.asTypeName - for (name <- abstractTypeNames) + for (name <- abstractTypeNames) try { val mbr = joint.member(name) mbr.info match { @@ -392,16 +392,19 @@ object Checking { def checkWellFormed(sym: Symbol)(implicit ctx: Context): Unit = { def fail(msg: Message) = ctx.error(msg, sym.sourcePos) - def checkWithDeferred(flag: FlagSet) = + def checkWithDeferred(flag: FlagSet) = { if (sym.isOneOf(flag)) fail(AbstractMemberMayNotHaveModifier(sym, flag)) - def checkNoConflict(flag1: FlagSet, flag2: FlagSet, msg: => String) = + } + def checkNoConflict(flag1: FlagSet, flag2: FlagSet, msg: => String) = { if (sym.isAllOf(flag1 | flag2)) fail(msg) + } def checkCombination(flag1: FlagSet, flag2: FlagSet) = checkNoConflict(flag1, flag2, i"illegal combination of modifiers: `${flag1.flagsString}` and `${flag2.flagsString}` for: $sym") - def checkApplicable(flag: FlagSet, ok: Boolean) = + def checkApplicable(flag: FlagSet, ok: Boolean) = { if (!ok && !sym.is(Synthetic)) fail(i"modifier `${flag.flagsString}` is not allowed for this definition") + } if (sym.is(Inline) && ( sym.is(ParamAccessor) && sym.owner.isClass @@ -575,10 +578,9 @@ object Checking { ctx.error(ValueClassParameterMayNotBeCallByName(clazz, param), param.sourcePos) if (param.is(Erased)) ctx.error("value class first parameter cannot be `erased`", param.sourcePos) - else { + else for (p <- params if !p.is(Erased)) ctx.error("value class can only have one non `erased` parameter", p.sourcePos) - } case Nil => ctx.error(ValueClassNeedsOneValParam(clazz), clazz.sourcePos) } @@ -599,8 +601,9 @@ trait Checking { Checking.checkNonCyclicInherited(joint, parents, decls, posd) /** Check that type `tp` is stable. */ - def checkStable(tp: Type, pos: SourcePosition)(implicit ctx: Context): Unit = + def checkStable(tp: Type, pos: SourcePosition)(implicit ctx: Context): Unit = { if (!tp.isStable) ctx.error(ex"$tp is not stable", pos) + } /** Check that all type members of `tp` have realizable bounds */ def checkRealizableBounds(cls: Symbol, pos: SourcePosition)(implicit ctx: Context): Unit = { @@ -684,13 +687,12 @@ trait Checking { * @pre sym.is(GivenOrImplicit) */ def checkImplicitConversionDefOK(sym: Symbol)(implicit ctx: Context): Unit = { - def check(): Unit = { + def check(): Unit = checkFeature( nme.implicitConversions, i"Definition of implicit conversion $sym", ctx.owner.topLevelClass, sym.sourcePos) - } sym.info.stripPoly match { case mt @ MethodType(_ :: Nil) @@ -709,7 +711,7 @@ trait Checking { * - it is defined in Predef * - it is the scala.reflect.Selectable.reflectiveSelectable conversion */ - def checkImplicitConversionUseOK(sym: Symbol, posd: Positioned)(implicit ctx: Context): Unit = + def checkImplicitConversionUseOK(sym: Symbol, posd: Positioned)(implicit ctx: Context): Unit = { if (sym.exists) { val conv = if (sym.isOneOf(GivenOrImplicit)) sym @@ -726,6 +728,7 @@ trait Checking { checkFeature(nme.implicitConversions, i"Use of implicit conversion ${conv.showLocated}", NoSymbol, posd.sourcePos) } + } private def infixOKSinceFollowedBy(tree: untpd.Tree): Boolean = tree match { case _: untpd.Block | _: untpd.Match => true @@ -779,9 +782,10 @@ trait Checking { def checkFeature(name: TermName, description: => String, featureUseSite: Symbol, - pos: SourcePosition)(implicit ctx: Context): Unit = + pos: SourcePosition)(implicit ctx: Context): Unit = { if (!ctx.featureEnabled(name)) ctx.featureWarning(name.toString, description, featureUseSite, required = false, pos) + } /** Check that `tp` is a class type and that any top-level type arguments in this type * are feasible, i.e. that their lower bound conforms to their upper bound. If a type @@ -829,10 +833,9 @@ trait Checking { sym.name == nme.apply && sym.is(Synthetic) && sym.owner.is(Module) && sym.owner.companionClass.is(Case) def isCaseClassNew(sym: Symbol): Boolean = sym.isPrimaryConstructor && sym.owner.is(Case) && sym.owner.isStatic - def isCaseObject(sym: Symbol): Boolean = { + def isCaseObject(sym: Symbol): Boolean = // TODO add alias to Nil in scala package sym.is(Case) && sym.is(Module) - } val allow = ctx.erasedTypes || ctx.inInlineMethod || @@ -870,9 +873,10 @@ trait Checking { decl.is(JavaDefined) && other.is(JavaDefined) && decl.is(Method) != other.is(Method) if (decl.matches(other) && !javaFieldMethodPair) { - def doubleDefError(decl: Symbol, other: Symbol): Unit = + def doubleDefError(decl: Symbol, other: Symbol): Unit = { if (!decl.info.isErroneous && !other.info.isErroneous) ctx.error(DoubleDefinition(decl, other, cls), decl.sourcePos) + } if (decl is Synthetic) doubleDefError(other, decl) else doubleDefError(decl, other) } @@ -892,7 +896,7 @@ trait Checking { } } - def checkParentCall(call: Tree, caller: ClassSymbol)(implicit ctx: Context): Unit = + def checkParentCall(call: Tree, caller: ClassSymbol)(implicit ctx: Context): Unit = { if (!ctx.isAfterTyper) { val called = call.tpe.classSymbol if (caller.is(Trait)) @@ -926,14 +930,14 @@ trait Checking { case _ => } } + } /** Check that `tpt` does not define a higher-kinded type */ def checkSimpleKinded(tpt: Tree)(implicit ctx: Context): Tree = - if (!tpt.tpe.hasSimpleKind && !ctx.compilationUnit.isJava) { + if (!tpt.tpe.hasSimpleKind && !ctx.compilationUnit.isJava) // be more lenient with missing type params in Java, // needed to make pos/java-interop/t1196 work. errorTree(tpt, MissingTypeParameterFor(tpt.tpe)) - } else tpt /** Verify classes extending AnyVal meet the requirements */ @@ -1023,12 +1027,13 @@ trait Checking { def checkRefsLegal(tree: tpd.Tree, badOwner: Symbol, allowed: (Name, Symbol) => Boolean, where: String)(implicit ctx: Context): Unit = { val checker = new TreeTraverser { def traverse(t: Tree)(implicit ctx: Context) = { - def check(owner: Symbol, checkedSym: Symbol) = + def check(owner: Symbol, checkedSym: Symbol) = { if (t.span.isSourceDerived && owner == badOwner) t match { case t: RefTree if allowed(t.name, checkedSym) => case _ => ctx.error(i"illegal reference to $checkedSym from $where", t.sourcePos) } + } val sym = t.symbol t match { case Ident(_) | Select(This(_), _) => check(sym.maybeOwner, sym) @@ -1041,7 +1046,7 @@ trait Checking { } /** Check that we are in an inline context (inside an inline method or in inline code) */ - def checkInInlineContext(what: String, posd: Positioned)(implicit ctx: Context): Unit = + def checkInInlineContext(what: String, posd: Positioned)(implicit ctx: Context): Unit = { if (!ctx.inInlineMethod && !ctx.isInlineContext) { val inInlineUnapply = ctx.owner.ownersIterator.exists(owner => owner.name.isUnapplyName && owner.is(Inline) && owner.is(Method)) @@ -1050,6 +1055,7 @@ trait Checking { else "can only be used in an inline method" ctx.error(em"$what $msg", posd.sourcePos) } + } /** 1. Check that all case classes that extend `scala.Enum` are `enum` cases * 2. Check that case class `enum` cases do not extend java.lang.Enum. diff --git a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala index 0dcba1f6907c..00abab45a3eb 100644 --- a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala +++ b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala @@ -53,7 +53,8 @@ object ConstFold { val x = compX if (x ne null) tree.withType(ConstantType(x)).asInstanceOf[T] else tree - } catch { + } + catch { case _: ArithmeticException => tree // the code will crash at runtime, // but that is better than the // compiler itself crashing diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index 8ef5b3e8a494..034f2570e05c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -19,7 +19,8 @@ import transform.SymUtils._ import ErrorReporting.errorTree /** A typer mixin that implements typeclass derivation functionality */ -trait Deriving { this: Typer => +trait Deriving { + this: Typer => /** A helper class to derive type class instances for one class or object * @param cls The class symbol of the class or object with a `derives` clause @@ -49,14 +50,13 @@ trait Deriving { this: Typer => val instanceName = s"derived$$$clsName".toTermName if (ctx.denotNamed(instanceName).exists) ctx.error(i"duplicate typeclass derivation for $clsName", pos) - else { + else // If we set the Synthetic flag here widenGiven will widen too far and the // derived instance will have too low a priority to be selected over a freshly // derived instance at the summoning site. synthetics += ctx.newSymbol(ctx.owner, instanceName, Given | Method, info, coord = pos.span) .entered - } } /** Check derived type tree `derived` for the following well-formedness conditions: @@ -174,12 +174,14 @@ trait Deriving { this: Typer => } addInstance(derivedParams, Nil, List(instanceType)) - } else if (instanceArity == 0 && !clsParams.exists(_.info.isLambdaSub)) { + } + else if (instanceArity == 0 && !clsParams.exists(_.info.isLambdaSub)) { // case (b) ... see description above val instanceType = clsType.appliedTo(clsParams.map(_.typeRef)) val evidenceParamInfos = clsParams.map(param => List(param.typeRef)) addInstance(clsParams, evidenceParamInfos, List(instanceType)) - } else + } + else cannotBeUnified } @@ -266,8 +268,8 @@ trait Deriving { this: Typer => import tpd._ /** The type class instance definition with symbol `sym` */ - def typeclassInstance(sym: Symbol)(implicit ctx: Context): List[Type] => (List[List[tpd.Tree]] => tpd.Tree) = - (tparamRefs: List[Type]) => (paramRefss: List[List[tpd.Tree]]) => { + def typeclassInstance(sym: Symbol)(implicit ctx: Context): List[Type] => (List[List[tpd.Tree]] => tpd.Tree) = { + (tparamRefs: List[Type]) => (paramRefss: List[List[tpd.Tree]]) => val tparams = tparamRefs.map(_.typeSymbol.asType) val params = if (paramRefss.isEmpty) Nil else paramRefss.head.map(_.symbol.asTerm) tparams.foreach(ctx.enter) @@ -287,7 +289,7 @@ trait Deriving { this: Typer => val module = untpd.ref(companionRef(resultType)).withSpan(sym.span) val rhs = untpd.Select(module, nme.derived) typed(rhs, resultType) - } + } def syntheticDef(sym: Symbol): Tree = tpd.polyDefDef(sym.asTerm, typeclassInstance(sym)(ctx.fresh.setOwner(sym).setNewScope)) diff --git a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala index ef61df2f8141..a7fe1ac69b9c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala +++ b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala @@ -20,13 +20,12 @@ object Docstrings { * @param sym The symbol for which the comment is being cooked. * @param owner The class for which comments are being cooked. */ - def cookComment(sym: Symbol, owner: Symbol)(implicit ctx: Context): Option[Comment] = { + def cookComment(sym: Symbol, owner: Symbol)(implicit ctx: Context): Option[Comment] = ctx.docCtx.flatMap { docCtx => expand(sym, owner)(ctx, docCtx) } - } - private def expand(sym: Symbol, owner: Symbol)(implicit ctx: Context, docCtx: ContextDocstrings): Option[Comment] = { + private def expand(sym: Symbol, owner: Symbol)(implicit ctx: Context, docCtx: ContextDocstrings): Option[Comment] = docCtx.docstring(sym).flatMap { case cmt if cmt.isExpanded => Some(cmt) @@ -48,7 +47,6 @@ object Docstrings { commentWithUsecases } } - } private def expandComment(sym: Symbol, owner: Symbol, comment: Comment)(implicit ctx: Context, docCtx: ContextDocstrings): Comment = { val tplExp = docCtx.templateExpander @@ -58,13 +56,12 @@ object Docstrings { newComment } - private def expandComment(sym: Symbol)(implicit ctx: Context, docCtx: ContextDocstrings): Option[Comment] = { + private def expandComment(sym: Symbol)(implicit ctx: Context, docCtx: ContextDocstrings): Option[Comment] = if (sym eq NoSymbol) None - else { + else for { cmt <- docCtx.docstring(sym) if !cmt.isExpanded _ = expandComment(sym.owner) - } yield expandComment(sym, sym.owner, cmt) - } - } + } + yield expandComment(sym, sym.owner, cmt) } diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala index 700aa032d237..6d5685e41cb7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala +++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala @@ -38,7 +38,9 @@ object Dynamic { * or `applyDynamic` on a `Selectable` instance. @See handleStructural. * */ -trait Dynamic { self: Typer with Applications => +trait Dynamic { + self: Typer & Applications => + import Dynamic._ import tpd._ @@ -66,7 +68,7 @@ trait Dynamic { self: Typer with Applications => } } - tree.fun match { + tree.fun match { case sel @ Select(qual, name) if !isDynamicMethod(name) => typedDynamicApply(qual, name, sel.span, Nil) case TypeApply(sel @ Select(qual, name), targs) if !isDynamicMethod(name) => diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index ce9ba2ab7b60..af30cdfa7883 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -62,11 +62,11 @@ object ErrorReporting { def anonymousTypeMemberStr(tpe: Type): String = { val kind = tpe match { - case _: TypeBounds => "type with bounds" - case _: MethodOrPoly => "method" - case _ => "value of type" - } - em"$kind $tpe" + case _: TypeBounds => "type with bounds" + case _: MethodOrPoly => "method" + case _ => "value of type" + } + em"$kind $tpe" } def overloadedAltsStr(alts: List[SingleDenotation]): String = diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 2e2d3b5b55cc..d14f8ceb1f5c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -93,7 +93,7 @@ object Implicits { protected def filterMatching(pt: Type)(implicit ctx: Context): List[Candidate] = { record("filterMatching") - def candidateKind(ref: TermRef)(implicit ctx: Context): Candidate.Kind = /*trace(i"candidateKind $ref $pt")*/ { + def candidateKind(ref: TermRef)(implicit ctx: Context): Candidate.Kind = { /*trace(i"candidateKind $ref $pt")*/ def viewCandidateKind(tpw: Type, argType: Type, resType: Type): Candidate.Kind = { @@ -280,7 +280,7 @@ object Implicits { } /** The implicit references that are eligible for type `tp`. */ - def eligible(tp: Type): List[Candidate] = { + def eligible(tp: Type): List[Candidate] = if (tp.hash == NotCached) computeEligible(tp) else { val eligibles = eligibleCache.get(tp) @@ -300,7 +300,6 @@ object Implicits { result } } - } private def computeEligible(tp: Type): List[Candidate] = /*>|>*/ trace(i"computeEligible $tp in $refs%, %", implicitsDetailed) /*<|<*/ { if (monitored) record(s"check eligible refs in ctx", refs.length) @@ -462,7 +461,8 @@ object Implicits { import Implicits._ /** Info relating to implicits that is kept for one run */ -trait ImplicitRunInfo { self: Run => +trait ImplicitRunInfo { + self: Run => private val implicitScopeCache = mutable.AnyRefMap[Type, OfTypeImplicits]() @@ -548,7 +548,8 @@ trait ImplicitRunInfo { self: Run => if (seen contains t) { incomplete += tp // all references to rootTo will be accounted for in `seen` so we return `EmptySet`. EmptyTermRefSet // on the other hand, the refs of `tp` are now not accurate, so `tp` is marked incomplete. - } else { + } + else { seen += t val is = iscope(t) if (!implicitScopeCache.contains(t)) incomplete += tp @@ -557,8 +558,9 @@ trait ImplicitRunInfo { self: Run => } val comps = new TermRefSet - def addCompanion(pre: Type, companion: Symbol) = + def addCompanion(pre: Type, companion: Symbol) = { if (companion.exists && !companion.isAbsent()) comps += TermRef(pre, companion) + } def addPath(pre: Type): Unit = pre.dealias match { case pre: ThisType if pre.cls.is(Module) && pre.cls.isStaticOwner => @@ -624,9 +626,8 @@ trait ImplicitRunInfo { self: Run => iscope(rootTp) } - protected def reset(): Unit = { + protected def reset(): Unit = implicitScopeCache.clear() - } } /** The implicit resolution part of type checking */ @@ -652,7 +653,7 @@ trait Implicits { self: Typer => */ def inferView(from: Tree, to: Type)(implicit ctx: Context): SearchResult = { record("inferView") - if ( (to isRef defn.AnyClass) + if ((to isRef defn.AnyClass) || (to isRef defn.ObjectClass) || (to isRef defn.UnitClass) || (from.tpe isRef defn.NothingClass) @@ -696,7 +697,7 @@ trait Implicits { self: Typer => classTag.select(sym.name.toTermName) else classTag.select(nme.apply).appliedToType(tp).appliedTo(clsOf(erasure(tp))) - tag.withSpan(span) + tag.withSpan(span) case tp => EmptyTree } @@ -707,7 +708,7 @@ trait Implicits { self: Typer => /** Synthesize the tree for `'[T]` for an implicit `scala.quoted.Type[T]`. * `T` is deeply dealiased to avoid references to local type aliases. */ - lazy val synthesizedTypeTag: SpecialHandler = + lazy val synthesizedTypeTag: SpecialHandler = { (formal, span) => implicit ctx => { def quotedType(t: Type) = { if (StagingContext.level == 0) @@ -724,6 +725,7 @@ trait Implicits { self: Typer => EmptyTree } } + } lazy val synthesizedQuoteContext: SpecialHandler = (formal, span) => implicit ctx => @@ -777,7 +779,7 @@ trait Implicits { self: Typer => /** If `formal` is of the form Eql[T, U], try to synthesize an * `Eql.eqlAny[T, U]` as solution. */ - lazy val synthesizedEq: SpecialHandler = + lazy val synthesizedEq: SpecialHandler = { (formal, span) => implicit ctx => { /** Is there an `Eql[T, T]` instance, assuming -strictEquality? */ @@ -838,11 +840,12 @@ trait Implicits { self: Typer => EmptyTree } } + } /** Creates a tree that will produce a ValueOf instance for the requested type. * An EmptyTree is returned if materialization fails. */ - lazy val synthesizedValueOf: SpecialHandler = + lazy val synthesizedValueOf: SpecialHandler = { (formal, span) => implicit ctx => { def success(t: Tree) = New(defn.ValueOfClass.typeRef.appliedTo(t.tpe), t :: Nil).withSpan(span) @@ -862,6 +865,7 @@ trait Implicits { self: Typer => EmptyTree } } + } /** Create an anonymous class `new Object { type MirroredMonoType = ... }` * and mark it with given attachment so that it is made into a mirror at PostTyper. @@ -922,7 +926,7 @@ trait Implicits { self: Typer => /** An implied instance for a type of the form `Mirror.Product { type MirroredType = T }` * where `T` is a generic product type or a case object or an enum case. */ - lazy val synthesizedProductMirror: SpecialHandler = + lazy val synthesizedProductMirror: SpecialHandler = { (formal, span) => implicit ctx => { def mirrorFor(mirroredType0: Type): Tree = { val mirroredType = mirroredType0.stripTypeVar @@ -978,6 +982,7 @@ trait Implicits { self: Typer => case other => EmptyTree } } + } /** An implied instance for a type of the form `Mirror.Sum { type MirroredType = T }` * where `T` is a generic sum type. @@ -1090,7 +1095,7 @@ trait Implicits { self: Typer => /** Find an implicit argument for parameter `formal`. * Return a failure as a SearchFailureType in the type of the returned tree. */ - def inferImplicitArg(formal: Type, span: Span)(implicit ctx: Context): Tree = { + def inferImplicitArg(formal: Type, span: Span)(implicit ctx: Context): Tree = inferImplicit(formal, EmptyTree, span)(ctx) match { case SearchSuccess(arg, _, _) => arg case fail @ SearchFailure(failed) => @@ -1113,10 +1118,9 @@ trait Implicits { self: Typer => case Nil => failed } - if (fail.isAmbiguous) failed - else trySpecialCases(specialHandlers) + if (fail.isAmbiguous) failed + else trySpecialCases(specialHandlers) } - } /** Search an implicit argument and report error if not found */ def implicitArgTree(formal: Type, span: Span)(implicit ctx: Context): Tree = { @@ -1151,7 +1155,8 @@ trait Implicits { self: Typer => def userDefinedMsg(sym: Symbol, cls: Symbol) = for { ann <- sym.getAnnotation(cls) Trees.Literal(Constant(msg: String)) <- ann.argument(0) - } yield msg + } + yield msg arg.tpe match { @@ -1279,11 +1284,12 @@ trait Implicits { self: Typer => } /** Check that equality tests between types `ltp` and `rtp` make sense */ - def checkCanEqual(ltp: Type, rtp: Type, span: Span)(implicit ctx: Context): Unit = + def checkCanEqual(ltp: Type, rtp: Type, span: Span)(implicit ctx: Context): Unit = { if (!ctx.isAfterTyper && !assumedCanEqual(ltp, rtp)) { val res = implicitArgTree(defn.EqlClass.typeRef.appliedTo(ltp, rtp), span) implicits.println(i"Eql witness found for $ltp / $rtp: $res: ${res.tpe}") } + } /** Find an implicit parameter or conversion. * @param pt The expected type of the parameter or conversion. @@ -1400,27 +1406,27 @@ trait Implicits { self: Typer => } else tryConversion } - if (ctx.reporter.hasErrors) { - ctx.reporter.removeBufferedMessages - SearchFailure { - adapted.tpe match { - case _: SearchFailureType => adapted - case _ => adapted.withType(new MismatchedImplicit(ref, pt, argument)) - } + if (ctx.reporter.hasErrors) { + ctx.reporter.removeBufferedMessages + SearchFailure { + adapted.tpe match { + case _: SearchFailureType => adapted + case _ => adapted.withType(new MismatchedImplicit(ref, pt, argument)) } } - else { - val returned = - if (cand.isExtension) Applications.ExtMethodApply(adapted) - else adapted - SearchSuccess(returned, ref, cand.level)(ctx.typerState, ctx.gadt) - } } + else { + val returned = + if (cand.isExtension) Applications.ExtMethodApply(adapted) + else adapted + SearchSuccess(returned, ref, cand.level)(ctx.typerState, ctx.gadt) + } + } /** Try to type-check implicit reference, after checking that this is not * a diverging search */ - def tryImplicit(cand: Candidate, contextual: Boolean): SearchResult = { + def tryImplicit(cand: Candidate, contextual: Boolean): SearchResult = if (ctx.searchHistory.checkDivergence(cand, pt)) SearchFailure(new DivergingImplicit(cand.ref, pt.widenExpr, argument)) else { @@ -1434,7 +1440,6 @@ trait Implicits { self: Typer => result } } - } /** Search a list of eligible implicit references */ def searchImplicits(eligible: List[Candidate], contextual: Boolean): SearchResult = { @@ -1589,10 +1594,11 @@ trait Implicits { self: Typer => } rank(sort(eligible), NoMatchingImplicitsFailure, Nil) - } // end searchImplicits + } + // end searchImplicits /** Find a unique best implicit reference */ - def bestImplicit(contextual: Boolean): SearchResult = { + def bestImplicit(contextual: Boolean): SearchResult = // Before searching for contextual or implicit scope candidates we first check if // there is an under construction or already constructed term with which we can tie // the knot. @@ -1630,7 +1636,6 @@ trait Implicits { self: Typer => } } } - } def implicitScope(tp: Type): OfTypeImplicits = ctx.run.implicitScope(tp, ctx) @@ -1670,13 +1675,12 @@ abstract class SearchHistory { outer => * @param pt The target type for the above candidate. * @result The nested history. */ - def nest(cand: Candidate, pt: Type)(implicit ctx: Context): SearchHistory = { + def nest(cand: Candidate, pt: Type)(implicit ctx: Context): SearchHistory = new SearchHistory { val root = outer.root val open = (cand, pt) :: outer.open val byname = outer.byname || isByname(pt) } - } def isByname(tp: Type): Boolean = tp.isInstanceOf[ExprType] @@ -1754,13 +1758,12 @@ abstract class SearchHistory { outer => // argument as we ascend the chain of open implicits to the outermost search // context. @tailrec - def loop(ois: List[(Candidate, Type)], belowByname: Boolean): Type = { + def loop(ois: List[(Candidate, Type)], belowByname: Boolean): Type = ois match { case (hd@(cand, tp)) :: tl if (belowByname || isByname(tp)) && tp.widenExpr <:< widePt => tp case (_, tp) :: tl => loop(tl, belowByname || isByname(tp)) case _ => NoType } - } loop(open, bynamePt) match { case NoType => NoType @@ -1805,7 +1808,7 @@ final class SearchRoot extends SearchHistory { * @param tpe The type to link. * @result The TermRef of the corresponding dictionary entry. */ - override def linkBynameImplicit(tpe: Type)(implicit ctx: Context): TermRef = { + override def linkBynameImplicit(tpe: Type)(implicit ctx: Context): TermRef = implicitDictionary.get(tpe) match { case Some((ref, _)) => ref case None => @@ -1814,7 +1817,6 @@ final class SearchRoot extends SearchHistory { implicitDictionary.put(tpe, (ref, tpd.EmptyTree)) ref } - } /** * Look up an implicit dictionary entry by type. @@ -1825,9 +1827,8 @@ final class SearchRoot extends SearchHistory { * @param tpe The type to look up. * @result The corresponding TermRef, or NoType if none. */ - override def refBynameImplicit(tpe: Type)(implicit ctx: Context): Type = { + override def refBynameImplicit(tpe: Type)(implicit ctx: Context): Type = implicitDictionary.get(tpe).map(_._1).getOrElse(NoType) - } /** * Define a pending dictionary entry if any. @@ -1842,14 +1843,13 @@ final class SearchRoot extends SearchHistory { * @result A SearchResult referring to the newly created dictionary entry if tpe * is an under-construction by name implicit, the provided result otherwise. */ - override def defineBynameImplicit(tpe: Type, result: SearchSuccess)(implicit ctx: Context): SearchResult = { + override def defineBynameImplicit(tpe: Type, result: SearchSuccess)(implicit ctx: Context): SearchResult = implicitDictionary.get(tpe) match { case Some((ref, _)) => implicitDictionary.put(tpe, (ref, result.tree)) SearchSuccess(tpd.ref(ref).withSpan(result.tree.span), result.ref, result.level)(result.tstate, result.gstate) case None => result } - } /** * Emit the implicit dictionary at the completion of an implicit search. @@ -1859,9 +1859,9 @@ final class SearchRoot extends SearchHistory { * @result The elaborated result, comprising the implicit dictionary and a result tree * substituted with references into the dictionary. */ - override def emitDictionary(span: Span, result: SearchResult)(implicit ctx: Context): SearchResult = { + override def emitDictionary(span: Span, result: SearchResult)(implicit ctx: Context): SearchResult = if (implicitDictionary == null || implicitDictionary.isEmpty) result - else { + else result match { case failure: SearchFailure => failure case success @ SearchSuccess(tree, _, _) => @@ -1951,8 +1951,6 @@ final class SearchRoot extends SearchHistory { success.copy(tree = blk)(success.tstate, success.gstate) } } - } - } } /** A set of term references where equality is =:= */ diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala index 7b15990a0fdf..7ff4fca3c5d6 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala @@ -137,7 +137,8 @@ class ImportInfo(symf: given Context => Symbol, val selectors: List[untpd.Tree], for { renamed <- reverseMapping.keys denot <- pre.member(reverseMapping(renamed)).altsWith(_.isOneOf(implicitFlags)) - } yield { + } + yield { val original = reverseMapping(renamed) val ref = TermRef(pre, original, denot) if (renamed == original) ref diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 45c7c74ae459..8583739ffba8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -45,9 +45,10 @@ object Inferencing { /** Instantiate selected type variables `tvars` in type `tp` */ - def instantiateSelected(tp: Type, tvars: List[Type])(implicit ctx: Context): Unit = + def instantiateSelected(tp: Type, tvars: List[Type])(implicit ctx: Context): Unit = { if (tvars.nonEmpty) new IsFullyDefinedAccumulator(new ForceDegree.Value(tvars.contains, minimizeAll = true)).process(tp) + } /** Instantiate any type variables in `tp` whose bounds contain a reference to * one of the parameters in `tparams` or `vparamss`. @@ -421,7 +422,7 @@ trait Inferencing { this: Typer => // val y: List[List[String]] = List(List(1)) val hasUnreportedErrors = state.reporter.hasUnreportedErrors def constraint = state.constraint - for (tvar <- qualifying) + for (tvar <- qualifying) { if (!tvar.isInstantiated && state.constraint.contains(tvar)) { // Needs to be checked again, since previous interpolations could already have // instantiated `tvar` through unification. @@ -437,6 +438,7 @@ trait Inferencing { this: Typer => } else typr.println(i"no interpolation for nonvariant $tvar in $state") } + } } } tree diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index 3cfc7d10f287..8498dc11f8a2 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -69,12 +69,11 @@ object Inliner { * when lifting bindings from the expansion to the outside of the call. */ def liftFromInlined(call: Tree) = new TreeMap { - override def transform(t: Tree)(implicit ctx: Context) = { + override def transform(t: Tree)(implicit ctx: Context) = t match { case Inlined(t, Nil, expr) if t.isEmpty => expr case _ => super.transform(t.withSpan(call.span)) } - } } val bindings = new mutable.ListBuffer[Tree] @@ -396,7 +395,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { /** The Inlined node representing the inlined call */ def inlined(sourcePos: SourcePosition): Tree = { - if (callTypeArgs.length == 1) + if (callTypeArgs.length == 1) { if (inlinedMethod == defn.Compiletime_constValue) { val constVal = tryConstValue if (!constVal.isEmpty) return constVal @@ -409,6 +408,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { else New(defn.SomeClass.typeRef.appliedTo(constVal.tpe), constVal :: Nil) ) } + } // Compute bindings for all parameters, appending them to bindingsBuf computeParamBindings(inlinedMethod.info, callTypeArgs, callValueArgss) @@ -491,9 +491,8 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { // report bad inputs at the point of call instead of revealing its internals. val callToReport = if (enclosingInlineds.nonEmpty) enclosingInlineds.last else call val ctxToReport = ctx.outersIterator.dropWhile(enclosingInlineds(_).nonEmpty).next - def issueInCtx(implicit ctx: Context) = { + def issueInCtx(implicit ctx: Context) = ctx.error(msg, callToReport.sourcePos) - } issueInCtx(ctxToReport) case _ => } @@ -513,9 +512,9 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { } } - def malformedString(): String = { - ctx.error("Malformed part `code` string interpolator", call.sourcePos) - "" + def malformedString(): String = { + ctx.error("Malformed part `code` string interpolator", call.sourcePos) + "" } callValueArgss match { @@ -558,13 +557,12 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { if (inlinedMethod == defn.Compiletime_error) issueError() - if (inlinedMethod == defn.Compiletime_code) { + if (inlinedMethod == defn.Compiletime_code) issueCode()(ctx.fresh.setSetting(ctx.settings.color, "never")) - } else { + else // Take care that only argument bindings go into `bindings`, since positions are // different for bindings from arguments and bindings from body. tpd.Inlined(call, finalBindings, finalExpansion) - } } } @@ -860,8 +858,9 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { newTypeBinding(sym, ctx.gadt.approximation(sym, fromBelow = shouldBeMinimized)) } - def registerAsGadtSyms(typeBinds: TypeBindsMap)(implicit ctx: Context): Unit = + def registerAsGadtSyms(typeBinds: TypeBindsMap)(implicit ctx: Context): Unit = { if (typeBinds.size > 0) ctx.gadt.addToConstraint(typeBinds.keys) + } pat match { case Typed(pat1, tpt) => @@ -1034,13 +1033,12 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { } } - override def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context): Tree = { + override def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context): Tree = constToLiteral(betaReduce(super.typedApply(tree, pt))) match { case res: Apply if res.symbol == defn.InternalQuoted_exprSplice && level == 0 && call.symbol.is(Macro) => expandMacro(res.args.head, tree.span) case res => res } - } override def typedMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: Type)(implicit ctx: Context) = if (!tree.isInline || ctx.owner.isInlineMethod) // don't reduce match of nested inline method yet @@ -1184,9 +1182,8 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { } val retained = bindings.filterConserve(binding => retain(binding.symbol)) - if (retained `eq` bindings) { + if (retained `eq` bindings) (bindings, tree) - } else { val expanded = inlineBindings.transform(tree) dropUnusedDefs(retained, expanded) @@ -1211,5 +1208,5 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { if (ctx.reporter.hasErrors) EmptyTree else normalizedSplice.withSpan(span) } - } + diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index aca6fced1602..48c78cea86de 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -22,7 +22,9 @@ import transform.TypeUtils._ import transform.SymUtils._ import reporting.diagnostic.messages._ -trait NamerContextOps { this: Context => +trait NamerContextOps { + this: Context => + import NamerContextOps._ def typer: Typer = ctx.typeAssigner match { @@ -69,7 +71,7 @@ trait NamerContextOps { this: Context => /** The symbol (stored in some typer's symTree) of an enclosing context definition */ def symOfContextTree(tree: untpd.Tree): Symbol = { - def go(ctx: Context): Symbol = { + def go(ctx: Context): Symbol = ctx.typeAssigner match { case typer: Typer => tree.getAttachment(typer.SymOfTree) match { @@ -81,7 +83,6 @@ trait NamerContextOps { this: Context => } case _ => NoSymbol } - } go(this) } @@ -139,8 +140,9 @@ trait NamerContextOps { this: Context => else (params.head.is(Given), params.head.is(Implicit), params.head.is(Erased)) val make = MethodType.companion(isJava = isJava, isContextual = isContextual, isImplicit = isImplicit, isErased = isErased) if (isJava) - for (param <- params) + for (param <- params) { if (param.info.isDirectRef(defn.ObjectClass)) param.info = defn.AnyType + } make.fromSymbols(params, resultType) } if (typeParams.nonEmpty) PolyType.fromParams(typeParams.asInstanceOf[List[TypeSymbol]], monotpe) @@ -247,7 +249,7 @@ class Namer { typer: Typer => } /** The enclosing class with given name; error if none exists */ - def enclosingClassNamed(name: TypeName, span: Span)(implicit ctx: Context): Symbol = { + def enclosingClassNamed(name: TypeName, span: Span)(implicit ctx: Context): Symbol = if (name.isEmpty) NoSymbol else { val cls = ctx.owner.enclosingClassNamed(name) @@ -255,7 +257,6 @@ class Namer { typer: Typer => ctx.error(s"no enclosing class or object is named $name", ctx.source.atSpan(span)) cls } - } /** Record `sym` as the symbol defined by `tree` */ def recordSym(sym: Symbol, tree: Tree)(implicit ctx: Context): Symbol = { @@ -431,11 +432,12 @@ class Namer { typer: Typer => /** Expand tree and store in `expandedTree` */ def expand(tree: Tree)(implicit ctx: Context): Unit = { - def record(expanded: Tree) = + def record(expanded: Tree) = { if (expanded `ne` tree) { typr.println(i"Expansion: $tree expands to $expanded") tree.pushAttachment(ExpandedTree, expanded) } + } tree match { case tree: DefTree => record(desugar.defTree(tree)) case tree: PackageDef => record(desugar.packageDef(tree)) @@ -455,11 +457,12 @@ class Namer { typer: Typer => */ def invalidateCompanions(pkg: Symbol, xstats: List[untpd.Tree])(implicit ctx: Context): Unit = { val definedNames = xstats collect { case stat: NameTree => stat.name } - def invalidate(name: TypeName) = + def invalidate(name: TypeName) = { if (!(definedNames contains name)) { val member = pkg.info.decl(name).asSymDenotation if (member.isClass && !(member.is(Package))) member.markAbsent() } + } xstats foreach { case stat: TypeDef if stat.isClassDef => invalidate(stat.name.moduleClassName) @@ -598,13 +601,14 @@ class Namer { typer: Typer => * 1. `fromStat` and `toStat` could be the same stat * 2. `fromCls` and `toCls` are necessarily different */ - def mergeIfSynthetic(fromStat: Tree, fromCls: TypeDef, toStat: Tree, toCls: TypeDef): Unit = + def mergeIfSynthetic(fromStat: Tree, fromCls: TypeDef, toStat: Tree, toCls: TypeDef): Unit = { if (fromCls.mods.is(Synthetic) && !toCls.mods.is(Synthetic)) { removeInExpanded(fromStat, fromCls) val mcls = mergeModuleClass(toStat, toCls, fromCls) mcls.setMods(toCls.mods) moduleClsDef(fromCls.name) = (toStat, mcls) } + } /** Merge the definitions of a synthetic companion generated by a case class * and the real companion, if both exist. @@ -662,11 +666,11 @@ class Namer { typer: Typer => val classDef = mutable.Map[TypeName, TypeDef]() val moduleDef = mutable.Map[TypeName, TypeDef]() - def updateCache(cdef: TypeDef): Unit = - if (cdef.isClassDef && !cdef.mods.is(Package)) { + def updateCache(cdef: TypeDef): Unit = { + if (cdef.isClassDef && !cdef.mods.is(Package)) if (cdef.mods.is(ModuleClass)) moduleDef(cdef.name) = cdef else classDef(cdef.name) = cdef - } + } for (stat <- stats) expanded(stat) match { @@ -679,13 +683,12 @@ class Namer { typer: Typer => case _ => } - for (cdef @ TypeDef(name, _) <- classDef.values) { + for (cdef @ TypeDef(name, _) <- classDef.values) moduleDef.getOrElse(name.moduleClassName, EmptyTree) match { case t: TypeDef => createLinks(cdef, t) case EmptyTree => } - } // If a top-level object or class has no companion in the current run, we // enter a dummy companion (`denot.isAbsent` returns true) in scope. This @@ -708,11 +711,12 @@ class Namer { typer: Typer => val classSym = ctx.effectiveScope.lookup(className.encode) if (classSym.isDefinedInCurrentRun) { val moduleName = className.toTermName - for (moduleSym <- ctx.effectiveScope.lookupAll(moduleName.encode)) + for (moduleSym <- ctx.effectiveScope.lookupAll(moduleName.encode)) { if (moduleSym.is(Module) && !moduleSym.isDefinedInCurrentRun) { val absentModuleSymbol = ctx.newModuleSymbol(ctx.owner, moduleName, EmptyFlags, EmptyFlags, (_, _) => NoType) enterSymbol(absentModuleSymbol) } + } } } } @@ -845,7 +849,7 @@ class Namer { typer: Typer => def register(child: Symbol, parent: Type) = { val cls = parent.classSymbol - if (cls.is(Sealed)) { + if (cls.is(Sealed)) if ((child.isInaccessibleChildOf(cls) || child.isAnonymousClass) && !sym.hasAnonymousChild) addChild(cls, cls) else if (!cls.is(ChildrenQueried)) @@ -854,7 +858,6 @@ class Namer { typer: Typer => ctx.error(em"""children of $cls were already queried before $sym was discovered. |As a remedy, you could move $sym on the same nesting level as $cls.""", child.sourcePos) - } } if (denot.isClass && !sym.isEnumAnonymClass && !sym.isRefinementClass) @@ -1342,12 +1345,11 @@ class Namer { typer: Typer => // Approximate a type `tp` with a type that does not contain skolem types. val deskolemize = new ApproximatingTypeMap { - def apply(tp: Type) = /*trace(i"deskolemize($tp) at $variance", show = true)*/ { + def apply(tp: Type) = /*trace(i"deskolemize($tp) at $variance", show = true)*/ tp match { case tp: SkolemType => range(defn.NothingType, atVariance(1)(apply(tp.info))) case _ => mapOver(tp) } - } } def cookedRhsType = deskolemize(dealiasIfUnit(widenRhs(rhsType))) @@ -1488,8 +1490,9 @@ class Namer { typer: Typer => // We need to compensate by reloading the denotation of references that might // still contain the TypeBounds.empty. If we do not do this, stdlib factories // fail with a bounds error in PostTyper. - def ensureUpToDate(tref: TypeRef, outdated: Type) = + def ensureUpToDate(tref: TypeRef, outdated: Type) = { if (tref.info == outdated && sym.info != outdated) tref.recomputeDenot() + } ensureUpToDate(sym.typeRef, dummyInfo1) if (dummyInfo2 `ne` dummyInfo1) ensureUpToDate(sym.typeRef, dummyInfo2) diff --git a/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala index a809b781198d..f7a795b1659c 100644 --- a/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala +++ b/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala @@ -213,7 +213,7 @@ object PrepareInlineable { * to have the inline method as owner. */ def registerInlineInfo( - inlined: Symbol, treeExpr: Context => Tree)(implicit ctx: Context): Unit = { + inlined: Symbol, treeExpr: Context => Tree)(implicit ctx: Context): Unit = inlined.unforcedAnnotation(defn.BodyAnnot) match { case Some(ann: ConcreteBodyAnnotation) => case Some(ann: LazyBodyAnnotation) if ann.isEvaluated => @@ -233,7 +233,6 @@ object PrepareInlineable { }) } } - } def checkInlineMethod(inlined: Symbol, body: Tree)(implicit ctx: Context): Unit = { if (ctx.outer.inInlineMethod) @@ -275,5 +274,5 @@ object PrepareInlineable { isValidMacro(rhs) } } - } + diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 10974daf0571..f261f4de973c 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -148,7 +148,7 @@ object ProtoTypes { case _ => false } - override def isMatchedBy(tp1: Type, keepConstraint: Boolean)(implicit ctx: Context): Boolean = { + override def isMatchedBy(tp1: Type, keepConstraint: Boolean)(implicit ctx: Context): Boolean = name == nme.WILDCARD || hasUnknownMembers(tp1) || { val mbr = if (privateOK) tp1.member(name) else tp1.nonPrivateMember(name) @@ -162,7 +162,6 @@ object ProtoTypes { case _ => mbr hasAltWith qualifies } } - } def underlying(implicit ctx: Context): Type = WildcardType @@ -275,7 +274,7 @@ object ProtoTypes { private def cacheTypedArg(arg: untpd.Tree, typerFn: untpd.Tree => Tree, force: Boolean)(implicit ctx: Context): Tree = { var targ = state.typedArg(arg) - if (targ == null) { + if (targ == null) untpd.functionWithUnknownParamType(arg) match { case Some(untpd.Function(args, _)) if !force => // If force = false, assume what we know about the parameter types rather than reporting an error. @@ -292,7 +291,6 @@ object ProtoTypes { if (!ctx.reporter.hasUnreportedErrors) state.typedArg = state.typedArg.updated(arg, targ) } - } targ } @@ -315,9 +313,10 @@ object ProtoTypes { if (!args1.exists(arg => isUndefined(arg.tpe))) state.typedArgs = args1 args1 } - finally + finally { if (this.ctx.typerState.constraint ne prevConstraint) ctx.typerState.mergeConstraintWith(this.ctx.typerState) + } } /** Type single argument and remember the unadapted result in `myTypedArg`. @@ -519,9 +518,9 @@ object ProtoTypes { } /** Create a new TypeVar that represents a dependent method parameter singleton */ - def newDepTypeVar(tp: Type)(implicit ctx: Context): TypeVar = { + def newDepTypeVar(tp: Type)(implicit ctx: Context): TypeVar = newTypeVar(TypeBounds.upper(AndType(tp.widenExpr, defn.SingletonClass.typeRef))) - } + /** The result type of `mt`, where all references to parameters of `mt` are * replaced by either wildcards (if typevarsMissContext) or TypeParamRefs. */ diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index 3906f0efc229..c26727996f9b 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -27,7 +27,8 @@ import scala.annotation.internal.sharable import scala.annotation.threadUnsafe /** Type quotes `'{ ... }` and splices `${ ... }` */ -trait QuotesAndSplices { self: Typer => +trait QuotesAndSplices { + self: Typer => import tpd._ @@ -61,25 +62,26 @@ trait QuotesAndSplices { self: Typer => ctx.warning("Canceled quote directly inside a splice. ${ '{ XYZ } } is equivalent to XYZ.", tree.sourcePos) case _ => } - if (ctx.mode.is(Mode.QuotedPattern) && level == 1) { + if (ctx.mode.is(Mode.QuotedPattern) && level == 1) if (isFullyDefined(pt, ForceDegree.all)) { def spliceOwner(ctx: Context): Symbol = if (ctx.mode.is(Mode.QuotedPattern)) spliceOwner(ctx.outer) else ctx.owner val pat = typedPattern(tree.expr, defn.QuotedExprClass.typeRef.appliedTo(pt))( spliceContext.retractMode(Mode.QuotedPattern).withOwner(spliceOwner(ctx))) Splice(pat) - } else { + } + else { ctx.error(i"Type must be fully defined.\nConsider annotating the splice using a type ascription:\n ($tree: XYZ).", tree.expr.sourcePos) tree.withType(UnspecifiedErrorType) } - } else { if (StagingContext.level == 0) { // Mark the first inline method from the context as a macro - def markAsMacro(c: Context): Unit = + def markAsMacro(c: Context): Unit = { if (c.owner eq c.outer.owner) markAsMacro(c.outer) else if (c.owner.isInlineMethod) c.owner.setFlag(Macro) else if (!c.outer.owner.is(Package)) markAsMacro(c.outer) + } markAsMacro(ctx) } typedApply(untpd.Apply(untpd.ref(defn.InternalQuoted_exprSplice.termRef), tree.expr), pt)(spliceContext).withSpan(tree.span) @@ -96,11 +98,12 @@ trait QuotesAndSplices { self: Typer => ctx.warning("Canceled quote directly inside a splice. ${ '[ XYZ ] } is equivalent to XYZ.", tree.sourcePos) case _ => } - if (ctx.mode.is(Mode.QuotedPattern) && level == 1) { + if (ctx.mode.is(Mode.QuotedPattern) && level == 1) if (isFullyDefined(pt, ForceDegree.all)) { ctx.error(i"Spliced type pattern must not be fully defined. Consider using $pt directly", tree.expr.sourcePos) tree.withType(UnspecifiedErrorType) - } else { + } + else { def spliceOwner(ctx: Context): Symbol = if (ctx.mode.is(Mode.QuotedPattern)) spliceOwner(ctx.outer) else ctx.owner val name = tree.expr match { @@ -115,9 +118,8 @@ trait QuotesAndSplices { self: Typer => spliceContext.retractMode(Mode.QuotedPattern).withOwner(spliceOwner(ctx))) pat.select(tpnme.splice) } - } else { + else typedSelect(untpd.Select(tree.expr, tpnme.splice), pt)(spliceContext).withSpan(tree.span) - } } private def checkSpliceOutsideQuote(tree: untpd.Tree)(implicit ctx: Context): Unit = { @@ -228,7 +230,7 @@ trait QuotesAndSplices { self: Typer => freshTypeBindings, shape0 ) - val shape2 = { + val shape2 = if (freshTypeBindings.isEmpty) shape1 else { val isFreshTypeBindings = freshTypeBindings.map(_.symbol).toSet @@ -243,7 +245,6 @@ trait QuotesAndSplices { self: Typer => } new TreeTypeMap(typeMap = typeMap).transform(shape1) } - } (typeBindings.toMap, shape2, patterns) } @@ -324,7 +325,7 @@ trait QuotesAndSplices { self: Typer => val replaceBindingsInTree = new TreeMap { private[this] var bindMap = Map.empty[Symbol, Symbol] - override def transform(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = { + override def transform(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match { case tree: Bind => val sym = tree.symbol @@ -335,7 +336,6 @@ trait QuotesAndSplices { self: Typer => case _ => super.transform(tree).withType(replaceBindingsInType(tree.tpe)) } - } private[this] val replaceBindingsInType = new ReplaceBindings { override def apply(tp: Type): Type = tp match { case tp: TermRef => bindMap.get(tp.termSymbol).fold[Type](tp)(_.typeRef) @@ -355,5 +355,5 @@ trait QuotesAndSplices { self: Typer => patterns = splicePat :: Nil, proto = defn.QuotedExprClass.typeRef.appliedTo(replaceBindings(quoted1.tpe) & quotedPt)) } - } + diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index bce41cdbce92..ff7459e3242f 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -59,7 +59,7 @@ class ReTyper extends Typer with ReChecking { tree.expr.withType(tpt1.tpe) case _ => typed(tree.expr) } - untpd.cpy.Typed(tree)(expr1, tpt1).withType(tree.typeOpt) + untpd.cpy.Typed(tree)(expr1, tpt1).withType(tree.typeOpt) } override def typedTypeTree(tree: untpd.TypeTree, pt: Type)(implicit ctx: Context): TypeTree = @@ -88,9 +88,8 @@ class ReTyper extends Typer with ReChecking { untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.tpe) } - override def typedUnApply(tree: untpd.Apply, selType: Type)(implicit ctx: Context): Tree = { + override def typedUnApply(tree: untpd.Apply, selType: Type)(implicit ctx: Context): Tree = typedApply(tree, selType) - } override def localDummy(cls: ClassSymbol, impl: untpd.Template)(implicit ctx: Context): Symbol = impl.symbol diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 64f8fc44d00e..9561e33db2c5 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -37,10 +37,11 @@ object RefChecks { // directly requires inspecting the parameter list of every one. That modification // shaved 95% off the time spent in this method. - for ( + for { defaultGetterClass <- List(clazz, clazz.companionModule.moduleClass); if defaultGetterClass.isClass - ) { + } + { val defaultGetterNames = defaultGetterClass.asClass.memberNames(defaultMethodFilter) val defaultMethodNames = defaultGetterNames map { _ replace { case DefaultGetterName(methName, _) => methName @@ -65,12 +66,10 @@ object RefChecks { } // Check for doomed attempt to overload applyDynamic - if (clazz derivesFrom defn.DynamicClass) { - for ((_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.symbol.typeParams.length)) { + if (clazz derivesFrom defn.DynamicClass) + for ((_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.symbol.typeParams.length)) ctx.error("implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)", m1.symbol.sourcePos) - } - } } /** The this-type of `cls` which should be used when looking at the types of @@ -124,7 +123,7 @@ object RefChecks { /** Check that a class and its companion object to not both define * a class or module with same name */ - private def checkCompanionNameClashes(cls: Symbol)(implicit ctx: Context): Unit = + private def checkCompanionNameClashes(cls: Symbol)(implicit ctx: Context): Unit = { if (!cls.owner.is(ModuleClass)) { def clashes(sym: Symbol) = sym.isClass && @@ -135,6 +134,7 @@ object RefChecks { ctx.error(ClassAndCompanionNameClash(cls, other), cls.sourcePos) } } + } // Override checking ------------------------------------------------------------ @@ -179,7 +179,7 @@ object RefChecks { val mixinOverrideErrors = new mutable.ListBuffer[MixinOverrideError]() - def printMixinOverrideErrors(): Unit = { + def printMixinOverrideErrors(): Unit = mixinOverrideErrors.toList match { case Nil => case List(MixinOverrideError(_, msg)) => @@ -195,7 +195,6 @@ object RefChecks { } ctx.error(msg + othersMsg, clazz.sourcePos) } - } def infoString(sym: Symbol) = infoString0(sym, sym.owner != clazz) def infoStringWithLocation(sym: Symbol) = infoString0(sym, true) @@ -241,7 +240,7 @@ object RefChecks { infoStringWithLocation(other), infoString(member), msg, addendum) } - def emitOverrideError(fullmsg: String) = + def emitOverrideError(fullmsg: String) = { if (!(hasErrors && member.is(Synthetic) && member.is(Module))) { // suppress errors relating toi synthetic companion objects if other override // errors (e.g. relating to the companion class) have already been reported. @@ -249,6 +248,7 @@ object RefChecks { else mixinOverrideErrors += new MixinOverrideError(member, fullmsg) hasErrors = true } + } def overrideError(msg: String) = { if (noErrorType) @@ -300,19 +300,16 @@ object RefChecks { def subOther(s: Symbol) = s derivesFrom other.owner def subMember(s: Symbol) = s derivesFrom member.owner - if (subOther(member.owner) && deferredCheck) { + if (subOther(member.owner) && deferredCheck) //Console.println(infoString(member) + " shadows1 " + infoString(other) " in " + clazz);//DEBUG return - } val parentSymbols = clazz.info.parents.map(_.typeSymbol) - if (parentSymbols exists (p => subOther(p) && subMember(p) && deferredCheck)) { + if (parentSymbols exists (p => subOther(p) && subMember(p) && deferredCheck)) //Console.println(infoString(member) + " shadows2 " + infoString(other) + " in " + clazz);//DEBUG return - } - if (parentSymbols forall (p => subOther(p) == subMember(p))) { + if (parentSymbols forall (p => subOther(p) == subMember(p))) //Console.println(infoString(member) + " shadows " + infoString(other) + " in " + clazz);//DEBUG return - } } /* Is the intersection between given two lists of overridden symbols empty? */ @@ -338,19 +335,19 @@ object RefChecks { (ob.isContainedIn(mb) || other.isAllOf(JavaProtected)) // m relaxes o's access boundary, // or o is Java defined and protected (see #3946) ) - if (!isOverrideAccessOK) { + if (!isOverrideAccessOK) overrideAccessError() - } else if (other.isClass) { + else if (other.isClass) // direct overrides were already checked on completion (see Checking.chckWellFormed) // the test here catches indirect overriddes between two inherited base types. overrideError("cannot be used here - class definitions cannot be overridden") - } else if (!other.is(Deferred) && member.isClass) { + else if (!other.is(Deferred) && member.isClass) overrideError("cannot be used here - classes can only override abstract types") - } else if (other.isEffectivelyFinal) { // (1.2) + else if (other.isEffectivelyFinal) // (1.2) overrideError(i"cannot override final member ${other.showLocated}") - } else if (!other.is(Deferred) && + else if (!other.is(Deferred) && !other.name.is(DefaultGetterName) && - !member.isAnyOverride) { + !member.isAnyOverride) // Exclusion for default getters, fixes SI-5178. We cannot assign the Override flag to // the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket. // Also exclusion for implicit shortcut methods @@ -369,53 +366,53 @@ object RefChecks { + "\n(Note: this can be resolved by declaring an override in " + clazz + ".)") else overrideError("needs `override' modifier") - } else if (other.is(AbsOverride) && other.isIncompleteIn(clazz) && !member.is(AbsOverride)) { + else if (other.is(AbsOverride) && other.isIncompleteIn(clazz) && !member.is(AbsOverride)) overrideError("needs `abstract override' modifiers") - } else if (member.is(Override) && other.is(Accessor) && + else if (member.is(Override) && other.is(Accessor) && other.accessedFieldOrGetter.is(Mutable, butNot = Lazy)) { // !?! this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here. // !!! is there a !?! convention? I'm !!!ing this to make sure it turns up on my searches. if (!ctx.settings.YoverrideVars.value) overrideError("cannot override a mutable variable") - } else if (member.isAnyOverride && + } + else if (member.isAnyOverride && !(member.owner.thisType.baseClasses exists (_ isSubClass other.owner)) && !member.is(Deferred) && !other.is(Deferred) && - intersectionIsEmpty(member.extendedOverriddenSymbols, other.extendedOverriddenSymbols)) { + intersectionIsEmpty(member.extendedOverriddenSymbols, other.extendedOverriddenSymbols)) overrideError("cannot override a concrete member without a third member that's overridden by both " + "(this rule is designed to prevent ``accidental overrides'')") - } else if (other.isStableMember && !member.isStableMember) { // (1.4) + else if (other.isStableMember && !member.isStableMember) // (1.4) overrideError("needs to be a stable, immutable value") - } else if (member.is(ModuleVal) && !other.isRealMethod && !other.isOneOf(Deferred | Lazy)) { + else if (member.is(ModuleVal) && !other.isRealMethod && !other.isOneOf(Deferred | Lazy)) overrideError("may not override a concrete non-lazy value") - } else if (member.is(Lazy, butNot = Module) && !other.isRealMethod && !other.is(Lazy) && - !ctx.testScala2Mode(overrideErrorMsg("may not override a non-lazy value"), member.sourcePos)) { + else if (member.is(Lazy, butNot = Module) && !other.isRealMethod && !other.is(Lazy) && + !ctx.testScala2Mode(overrideErrorMsg("may not override a non-lazy value"), member.sourcePos)) overrideError("may not override a non-lazy value") - } else if (other.is(Lazy) && !other.isRealMethod && !member.is(Lazy)) { + else if (other.is(Lazy) && !other.isRealMethod && !member.is(Lazy)) overrideError("must be declared lazy to override a lazy value") - } else if (member.is(Erased) && !other.is(Erased)) { // (1.9.1) + else if (member.is(Erased) && !other.is(Erased)) // (1.9.1) overrideError("is erased, cannot override non-erased member") - } else if (other.is(Erased) && !member.isOneOf(Erased | Inline)) { // (1.9.1) + else if (other.is(Erased) && !member.isOneOf(Erased | Inline)) // (1.9.1) overrideError("is not erased, cannot override erased member") - } else if (member.is(Extension) && !other.is(Extension)) { // (1.9.2) + else if (member.is(Extension) && !other.is(Extension)) // (1.9.2) overrideError("is an extension method, cannot override a normal method") - } else if (other.is(Extension) && !member.is(Extension)) { // (1.9.2) + else if (other.is(Extension) && !member.is(Extension)) // (1.9.2) overrideError("is a normal method, cannot override an extension method") - } else if ((member.isInlineMethod || member.isScala2Macro) && other.is(Deferred) && - member.extendedOverriddenSymbols.forall(_.is(Deferred))) { // (1.10) + else if ((member.isInlineMethod || member.isScala2Macro) && other.is(Deferred) && + member.extendedOverriddenSymbols.forall(_.is(Deferred))) // (1.10) overrideError("is an inline method, must override at least one concrete method") - } else if (other.isScala2Macro && !member.isScala2Macro) { // (1.11) + else if (other.isScala2Macro && !member.isScala2Macro) // (1.11) overrideError("cannot be used here - only Scala-2 macros can override Scala-2 macros") - } else if (!compatibleTypes(memberTp(self), otherTp(self)) && - !compatibleTypes(memberTp(upwardsSelf), otherTp(upwardsSelf))) { + else if (!compatibleTypes(memberTp(self), otherTp(self)) && + !compatibleTypes(memberTp(upwardsSelf), otherTp(upwardsSelf))) overrideError("has incompatible type" + err.whyNoMatchStr(memberTp(self), otherTp(self))) - } else if (member.erasedName != other.erasedName) { + else if (member.erasedName != other.erasedName) if (other.erasedName != other.name) overrideError(i"needs to be declared with @alpha(${"\""}${other.erasedName}${"\""}) so that external names match") else overrideError("cannot have an @alpha annotation since external names would be different") - } else { + else checkOverrideDeprecated() - } } /* TODO enable; right now the annotation is scala-private, so cannot be seen @@ -435,7 +432,8 @@ object RefChecks { checkOverride(opc.overriding, opc.overridden) opc.next() } - } catch { + } + catch { case ex: MergeError => val addendum = ex.tp1 match { case tp1: ClassInfo => @@ -472,7 +470,7 @@ object RefChecks { !sym.owner.derivesFrom(alt.symbol.owner) && alt.matches(sym) } - } + } def ignoreDeferred(member: SingleDenotation) = member.isType || { @@ -540,7 +538,8 @@ object RefChecks { if (memberSym.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)" else if (memberSym.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)" else err.abstractVarMessage(memberSym)) - } else if (underlying.is(Method)) { + } + else if (underlying.is(Method)) { // If there is a concrete method whose name matches the unimplemented // abstract method, and a cursory examination of the difference reveals // something obvious to us, let's make it more obvious to them. @@ -588,7 +587,8 @@ object RefChecks { case concretes => undefined(s"\n(The class implements members with different types: ${concretes.map(_.showDcl)}%\n %)") } - } else undefined("") + } + else undefined("") } } @@ -634,23 +634,25 @@ object RefChecks { // (this is done to avoid false positives since Scala2's rules for checking are different) val membersToCheck = new util.HashSet[Name](4096) val seenClasses = new util.HashSet[Symbol](256) - def addDecls(cls: Symbol): Unit = + def addDecls(cls: Symbol): Unit = { if (!seenClasses.contains(cls)) { seenClasses.addEntry(cls) - for (mbr <- cls.info.decls) + for (mbr <- cls.info.decls) { if (mbr.isTerm && !mbr.isOneOf(Synthetic | Bridge) && mbr.memberCanMatchInheritedSymbols && !membersToCheck.contains(mbr.name)) membersToCheck.addEntry(mbr.name) + } cls.info.parents.map(_.classSymbol) .filter(_.isOneOf(AbstractOrTrait)) .dropWhile(_.isOneOf(JavaDefined | Scala2x)) .foreach(addDecls) } + } addDecls(clazz) // For each member, check that the type of its symbol, as seen from `self` // can override the info of this member - for (name <- membersToCheck) { + for (name <- membersToCheck) for (mbrd <- self.member(name).alternatives) { val mbr = mbrd.symbol val mbrType = mbr.info.asSeenFrom(self, mbr.owner) @@ -661,20 +663,19 @@ object RefChecks { | does not conform to ${mbrd.info}""", (if (mbr.owner == clazz) mbr else clazz).sourcePos) } - } } /** Check that inheriting a case class does not constitute a variant refinement * of a base type of the case class. It is because of this restriction that we * can assume invariant refinement for case classes in `constrainPatternType`. */ - def checkCaseClassInheritanceInvariant() = { + def checkCaseClassInheritanceInvariant() = for (caseCls <- clazz.info.baseClasses.tail.find(_.is(Case))) - for (baseCls <- caseCls.info.baseClasses.tail) + for (baseCls <- caseCls.info.baseClasses.tail) { if (baseCls.typeParams.exists(_.paramVariance != 0)) for (problem <- variantInheritanceProblems(baseCls, caseCls, "non-variant", "case ")) ctx.errorOrMigrationWarning(problem(), clazz.sourcePos) - } + } checkNoAbstractMembers() if (abstractErrors.isEmpty) checkNoAbstractDecls(clazz) @@ -684,7 +685,8 @@ object RefChecks { checkMemberTypesOK() checkCaseClassInheritanceInvariant() - } else if (clazz.is(Trait) && !(clazz derivesFrom defn.AnyValClass)) { + } + else if (clazz.is(Trait) && !(clazz derivesFrom defn.AnyValClass)) // For non-AnyVal classes, prevent abstract methods in interfaces that override // final members in Object; see #4431 for (decl <- clazz.info.decls) { @@ -695,7 +697,6 @@ object RefChecks { if (overridden.is(Final)) ctx.error(TraitRedefinedFinalMethodFromAnyRef(overridden), decl.sourcePos) } - } if (!clazz.is(Trait)) { // check that parameterized base classes and traits are typed in the same way as from the superclass @@ -715,7 +716,8 @@ object RefChecks { cls <- clazz.info.baseClasses.tail if cls.paramAccessors.nonEmpty && !mixins.contains(cls) problem <- variantInheritanceProblems(cls, clazz.asClass.superClass, "parameterized", "super") - } ctx.error(problem(), clazz.sourcePos) + } + ctx.error(problem(), clazz.sourcePos) } checkParameterizedTraitsOK() @@ -781,15 +783,14 @@ object RefChecks { } // 4. Check that every defined member with an `override` modifier overrides some other member. - for (member <- clazz.info.decls) + for (member <- clazz.info.decls) { if (member.isAnyOverride && !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) { - if (checks != noPrinter) { + if (checks != noPrinter) for (bc <- clazz.info.baseClasses.tail) { val sym = bc.info.decl(member.name).symbol if (sym.exists) checks.println(i"$bc has $sym: ${clazz.thisType.memberInfo(sym)}") } - } val nonMatching = clazz.info.member(member.name).altsWith(alt => alt.owner != clazz) nonMatching match { @@ -801,6 +802,7 @@ object RefChecks { member.resetFlag(Override) member.resetFlag(AbsOverride) } + } } // Note: if a symbol has both @deprecated and @migration annotations and both @@ -811,14 +813,13 @@ object RefChecks { private def checkUndesiredProperties(sym: Symbol, pos: SourcePosition)(implicit ctx: Context): Unit = { // If symbol is deprecated, and the point of reference is not enclosed // in either a deprecated member or a scala bridge method, issue a warning. - if (sym.isDeprecated && !ctx.owner.ownersIterator.exists(_.isDeprecated)) { + if (sym.isDeprecated && !ctx.owner.ownersIterator.exists(_.isDeprecated)) ctx.deprecationWarning("%s is deprecated%s".format( sym.showLocated, sym.deprecationMessage map (": " + _) getOrElse ""), pos) - } // Similar to deprecation: check if the symbol is marked with @migration // indicating it has changed semantics between versions. val xMigrationValue = ctx.settings.Xmigration.value - if (sym.hasAnnotation(defn.MigrationAnnot) && xMigrationValue != NoScalaVersion) { + if (sym.hasAnnotation(defn.MigrationAnnot) && xMigrationValue != NoScalaVersion) sym.migrationVersion.get match { case scala.util.Success(symVersion) if xMigrationValue < symVersion=> ctx.warning(SymbolChangedSemanticsInVersion(sym, symVersion), pos) @@ -826,7 +827,6 @@ object RefChecks { ctx.warning(SymbolHasUnparsableVersionNumber(sym, ex.getMessage()), pos) case _ => } - } } /** Check that a deprecated val or def does not override a @@ -869,7 +869,7 @@ object RefChecks { var refSpan: Span = _ var refSym: Symbol = _ - override def enterReference(sym: Symbol, span: Span): Unit = + override def enterReference(sym: Symbol, span: Span): Unit = { if (sym.exists && sym.owner.isTerm) levelAndIndex.get(sym) match { case Some((level, idx)) if (level.maxIndex < idx) => @@ -878,6 +878,7 @@ object RefChecks { level.refSym = sym case _ => } + } } val NoLevelInfo: RefChecks.OptLevelInfo = new OptLevelInfo() @@ -944,14 +945,13 @@ class RefChecks extends MiniPhase { thisPhase => case Ident(nme.WILDCARD) => ctx.error(UnboundPlaceholderParameter(), sym.sourcePos) case _ => } - if (!sym.is(Lazy)) { + if (!sym.is(Lazy)) currentLevel.levelAndIndex.get(sym) match { case Some((level, symIdx)) if symIdx <= level.maxIndex => ctx.error(ForwardReferenceExtendsOverDefinition(sym, level.refSym), ctx.source.atSpan(level.refSpan)) case _ => } - } } tree } @@ -969,7 +969,8 @@ class RefChecks extends MiniPhase { thisPhase => checkCompanionNameClashes(cls) checkAllOverrides(cls) tree - } catch { + } + catch { case ex: TypeError => ctx.error(ex, tree.sourcePos) tree diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 616a753d54b2..f69334742a9b 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -202,9 +202,9 @@ trait TypeAssigner { val d2 = pre.nonPrivateMember(name) if (reallyExists(d2) && firstTry) test(NamedType(pre, name, d2), false) - else if (pre.derivesFrom(defn.DynamicClass) && name.isTermName) { + else if (pre.derivesFrom(defn.DynamicClass) && name.isTermName) TryDynamicCallType - } else { + else { val alts = tpe.denot.alternatives.map(_.symbol).filter(_.exists) var packageAccess = false val what = alts match { @@ -396,7 +396,7 @@ trait TypeAssigner { safeSubstParams(tp1, params.tail, argTypes1) case Nil => tp - } + } def assignType(tree: untpd.Apply, fn: Tree, args: List[Tree])(implicit ctx: Context): Apply = { val ownType = fn.tpe.widen match { @@ -569,11 +569,10 @@ trait TypeAssigner { assert(!hasNamedArg(args)) val tparams = tycon.tpe.typeParams val ownType = - if (sameLength(tparams, args)) { + if (sameLength(tparams, args)) if (tycon.symbol == defn.andType) AndType(args(0).tpe, args(1).tpe) else if (tycon.symbol == defn.orType) OrType(args(0).tpe, args(1).tpe) else tycon.tpe.appliedTo(args.tpes) - } else wrongNumberOfTypeArgs(tycon.tpe, tparams, args, tree.sourcePos) tree.withType(ownType) } @@ -622,8 +621,8 @@ trait TypeAssigner { def assignType(tree: untpd.PackageDef, pid: Tree)(implicit ctx: Context): PackageDef = tree.withType(pid.symbol.termRef) - } + object TypeAssigner extends TypeAssigner diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index e0bd25468647..753bf5e6fb12 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -53,9 +53,10 @@ object Typer { } /** Assert tree has a position, unless it is empty or a typed splice */ - def assertPositioned(tree: untpd.Tree)(implicit ctx: Context): Unit = + def assertPositioned(tree: untpd.Tree)(implicit ctx: Context): Unit = { if (!tree.isEmpty && !tree.isInstanceOf[untpd.TypedSplice] && ctx.typerState.isGlobalCommittable) assert(tree.span.exists, i"position not set for $tree # ${tree.uniqueId} of ${tree.getClass} in ${tree.source}") + } /** A context property that indicates the owner of any expressions to be typed in the context * if that owner is different from the context's owner. Typically, a context with a class @@ -77,9 +78,9 @@ object Typer { * search was tried on a tree. This will in some cases be reported in error messages */ private[typer] val HiddenSearchFailure = new Property.Key[SearchFailure] - } + class Typer extends Namer with TypeAssigner with Applications @@ -162,15 +163,13 @@ class Typer extends Namer if (!previous.exists || ctx.typeComparer.isSameRef(previous, found)) found else if ((prevCtx.scope eq ctx.scope) && (newPrec == Definition || - newPrec == NamedImport && prevPrec == WildImport)) { + newPrec == NamedImport && prevPrec == WildImport)) // special cases: definitions beat imports, and named imports beat // wildcard imports, provided both are in contexts with same scope found - } else { - if (!scala2pkg && !previous.isError && !found.isError) { + if (!scala2pkg && !previous.isError && !found.isError) refctx.error(AmbiguousImport(name, newPrec, prevPrec, prevCtx), posd.sourcePos) - } previous } @@ -178,7 +177,8 @@ class Typer extends Namer if (imp.sym.isCompleting) { ctx.warning(i"cyclic ${imp.sym}, ignored", posd.sourcePos) NoType - } else if (unimported.nonEmpty && unimported.contains(imp.site.termSymbol)) + } + else if (unimported.nonEmpty && unimported.contains(imp.site.termSymbol)) NoType else { val pre = imp.site @@ -253,7 +253,7 @@ class Typer extends Namer !noImports && (prevPrec.ordinal < prec.ordinal || prevPrec == prec && (prevCtx.scope eq ctx.scope)) - @tailrec def loop(lastCtx: Context)(implicit ctx: Context): Type = { + @tailrec def loop(lastCtx: Context)(implicit ctx: Context): Type = if (ctx.scope == null) previous else { var result: Type = NoType @@ -322,8 +322,9 @@ class Typer extends Namer else { // find import val outer = ctx.outer val curImport = ctx.importInfo - def updateUnimported() = + def updateUnimported() = { if (curImport.unimported.exists) unimported += curImport.unimported + } if (ctx.owner.is(Package) && curImport != null && curImport.isRootImport && previous.exists) previous // no more conflicts possible in this case else if (isPossibleImport(NamedImport) && (curImport ne outer.importInfo)) { @@ -347,7 +348,6 @@ class Typer extends Namer else loop(ctx)(outer) } } - } // begin findRefRecur loop(NoContext)(ctx) @@ -405,19 +405,18 @@ class Typer extends Namer val ownType = if (rawType.exists) ensureAccessible(rawType, superAccess = false, tree.sourcePos) - else if (name == nme._scope) { + else if (name == nme._scope) // gross hack to support current xml literals. // awaiting a better implicits based solution for library-supported xml return ref(defn.XMLTopScopeModule.termRef) - } else if (name.toTermName == nme.ERROR) UnspecifiedErrorType else if (ctx.owner.isConstructor && ctx.mode.is(Mode.InSuperCall) && - ctx.owner.owner.unforcedDecls.lookup(tree.name).exists) { + ctx.owner.owner.unforcedDecls.lookup(tree.name).exists) // When InSuperCall mode and in a constructor we are in the arguments // of a this(...) constructor call errorType(ex"$tree is not accessible from constructor arguments", tree.sourcePos) - } else + else errorType(new MissingIdent(tree, kind, name.show), tree.sourcePos) val tree1 = ownType match { @@ -515,7 +514,7 @@ class Typer extends Namer else tree1 } - def typedNew(tree: untpd.New, pt: Type)(implicit ctx: Context): Tree = { + def typedNew(tree: untpd.New, pt: Type)(implicit ctx: Context): Tree = tree.tpt match { case templ: untpd.Template => import untpd._ @@ -550,7 +549,6 @@ class Typer extends Namer assignType(cpy.New(tree)(tpt1), tpt1) } - } def typedTyped(tree: untpd.Typed, pt: Type)(implicit ctx: Context): Tree = { @@ -642,7 +640,7 @@ class Typer extends Namer assignType(cpy.NamedArg(tree)(tree.name, arg1), arg1) } - def typedAssign(tree: untpd.Assign, pt: Type)(implicit ctx: Context): Tree = { + def typedAssign(tree: untpd.Assign, pt: Type)(implicit ctx: Context): Tree = tree.lhs match { case lhs @ Apply(fn, args) => typed(untpd.Apply(untpd.Select(fn, nme.update), args :+ tree.rhs), pt) @@ -699,7 +697,6 @@ class Typer extends Namer reassignmentToVal } } - } def typedBlockStats(stats: List[untpd.Tree])(implicit ctx: Context): (Context, List[tpd.Tree]) = (index(stats), typedStats(stats, ctx.owner)) @@ -798,10 +795,9 @@ class Typer extends Namer } } - def typedFunction(tree: untpd.Function, pt: Type)(implicit ctx: Context): Tree = { + def typedFunction(tree: untpd.Function, pt: Type)(implicit ctx: Context): Tree = if (ctx.mode is Mode.Type) typedFunctionType(tree, pt) else typedFunctionValue(tree, pt) - } def typedFunctionType(tree: untpd.Function, pt: Type)(implicit ctx: Context): Tree = { val untpd.Function(args, body) = tree @@ -955,14 +951,13 @@ class Typer extends Namer else errorType(WrongNumberOfParameters(protoFormals.length), tree.sourcePos) /** Is `formal` a product type which is elementwise compatible with `params`? */ - def ptIsCorrectProduct(formal: Type) = { + def ptIsCorrectProduct(formal: Type) = isFullyDefined(formal, ForceDegree.noBottom) && (defn.isProductSubType(formal) || formal.derivesFrom(defn.PairClass)) && productSelectorTypes(formal, tree.sourcePos).corresponds(params) { (argType, param) => param.tpt.isEmpty || argType.widenExpr <:< typedAheadType(param.tpt).tpe } - } val desugared = if (protoFormals.length == 1 && params.length != 1 && ptIsCorrectProduct(protoFormals.head)) { @@ -992,7 +987,7 @@ class Typer extends Namer case SAMType(sam) if !defn.isFunctionType(pt) && mt <:< sam => val targetTpe = - if (!isFullyDefined(pt, ForceDegree.all)) { + if (!isFullyDefined(pt, ForceDegree.all)) if (pt.isRef(defn.PartialFunctionClass)) // Replace the underspecified expected type by one based on the closure method type defn.PartialFunctionOf(mt.firstParamTypes.head, mt.resultType) @@ -1000,7 +995,6 @@ class Typer extends Namer ctx.error(ex"result type of lambda is an underspecified SAM type $pt", tree.sourcePos) pt } - } else pt if (pt.classSymbol.isOneOf(FinalOrSealed)) { val offendingFlag = pt.classSymbol.flags & FinalOrSealed @@ -1008,12 +1002,11 @@ class Typer extends Namer } TypeTree(targetTpe) case _ => - if (mt.isParamDependent) { + if (mt.isParamDependent) throw new java.lang.Error( i"""internal error: cannot turn method type $mt into closure |because it has internal parameter dependencies, |position = ${tree.span}, raw type = ${mt.toString}""") // !!! DEBUG. Eventually, convert to an error? - } else if ((tree.tpt `eq` untpd.ContextualEmptyTree) && mt.paramNames.isEmpty) // Note implicitness of function in target type since there are no method parameters that indicate it. TypeTree(defn.FunctionOf(Nil, mt.resType, isContextual = true, isErased = false)) @@ -1028,7 +1021,7 @@ class Typer extends Namer assignType(cpy.Closure(tree)(env1, meth1, target), meth1, target) } - def typedMatch(tree: untpd.Match, pt: Type)(implicit ctx: Context): Tree = { + def typedMatch(tree: untpd.Match, pt: Type)(implicit ctx: Context): Tree = tree.selector match { case EmptyTree => if (tree.isInline) { @@ -1067,7 +1060,6 @@ class Typer extends Namer } result } - } // Overridden in InlineTyper for inline matches def typedMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: Type)(implicit ctx: Context): Tree = { @@ -1076,9 +1068,8 @@ class Typer extends Namer assignType(cpy.Match(tree)(sel, cases1), sel, cases1) } - def typedCases(cases: List[untpd.CaseDef], selType: Type, pt: Type)(implicit ctx: Context): List[CaseDef] = { + def typedCases(cases: List[untpd.CaseDef], selType: Type, pt: Type)(implicit ctx: Context): List[CaseDef] = cases.mapconserve(typedCase(_, selType, pt)) - } /** - strip all instantiated TypeVars from pattern types. * run/reducable.scala is a test case that shows stripping typevars is necessary. @@ -1102,7 +1093,7 @@ class Typer extends Namer b case t => t } - } + } /** Type a case. */ def typedCase(tree: untpd.CaseDef, selType: Type, pt: Type)(implicit ctx: Context): CaseDef = { @@ -1158,7 +1149,7 @@ class Typer extends Namer ctx.error(ReturnOutsideMethodDefinition(owner), tree.sourcePos) (EmptyTree, WildcardType) } - else if (owner != cx.outer.owner && owner.isRealMethod) { + else if (owner != cx.outer.owner && owner.isRealMethod) if (owner.isInlineMethod) (EmptyTree, errorType(NoReturnFromInlineable(owner), tree.sourcePos)) else if (!owner.isCompleted) @@ -1168,7 +1159,6 @@ class Typer extends Namer val proto = returnProto(owner, cx.scope) (from, proto) } - } else enclMethInfo(cx.outer) } val (from, proto) = @@ -1224,7 +1214,8 @@ class Typer extends Namer val elemtpt1 = typed(tree.elemtpt, elemProto) val elems1 = tree.elems.mapconserve(typed(_, elemtpt1.tpe)) assign(elems1, elemtpt1) - } else { + } + else { val elems1 = tree.elems.mapconserve(typed(_, elemProto)) val elemtptType = if (isFullyDefined(elemProto, ForceDegree.none)) @@ -1245,7 +1236,7 @@ class Typer extends Namer bindings1, expansion1) } - def typedTypeTree(tree: untpd.TypeTree, pt: Type)(implicit ctx: Context): Tree = { + def typedTypeTree(tree: untpd.TypeTree, pt: Type)(implicit ctx: Context): Tree = tree match { case tree: untpd.DerivedTypeTree => tree.ensureCompletions @@ -1265,7 +1256,6 @@ class Typer extends Namer else if (ctx.reporter.errorsReported) UnspecifiedErrorType else errorType(i"cannot infer type; expected type $pt is not fully defined", tree.sourcePos)) } - } def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(implicit ctx: Context): SingletonTypeTree = { val ref1 = typedExpr(tree.ref) @@ -1291,9 +1281,8 @@ class Typer extends Namer ctx.error(PolymorphicMethodMissingTypeInParent(rsym, tpt1.symbol), refinement.sourcePos) val member = refineCls.info.member(rsym.name) - if (member.isOverloaded) { + if (member.isOverloaded) ctx.error(OverloadInRefinement(rsym), refinement.sourcePos) - } } assignType(cpy.RefinedTypeTree(tree)(tpt1, refinements1), tpt1, refinements1, refineCls) } @@ -1325,7 +1314,7 @@ class Typer extends Namer tparam.ensureCompleted() // This is needed to get the test `compileParSetSubset` to work case _ => } - if (desugaredArg.isType) { + if (desugaredArg.isType) arg match { case TypeBoundsTree(EmptyTree, EmptyTree) if tparam.paramInfo.isLambdaSub && @@ -1341,7 +1330,6 @@ class Typer extends Namer case _ => typed(desugaredArg, argPt) } - } else desugaredArg.withType(UnspecifiedErrorType) } args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]] @@ -1398,7 +1386,7 @@ class Typer extends Namer val hi2 = if (hi1.isEmpty) typed(untpd.TypeTree(defn.AnyType)) else hi1 val tree1 = assignType(cpy.TypeBoundsTree(tree)(lo2, hi2), lo2, hi2) - if (ctx.mode.is(Mode.Pattern)) { + if (ctx.mode.is(Mode.Pattern)) // Associate a pattern-bound type symbol with the wildcard. // The bounds of the type symbol can be constrained when comparing a pattern type // with an expected type in typedTyped. The type symbol and the defining Bind node @@ -1410,7 +1398,6 @@ class Typer extends Namer val wildcardSym = ctx.newPatternBoundSymbol(tpnme.WILDCARD, tree1.tpe & pt, tree.span) untpd.Bind(tpnme.WILDCARD, tree1).withType(wildcardSym.typeRef) } - } else tree1 } @@ -1469,13 +1456,12 @@ class Typer extends Namer sym.annotations.foreach(_.ensureCompleted) lazy val annotCtx = annotContext(mdef, sym) // necessary in order to mark the typed ahead annotations as definitely typed: - for annot <- untpd.modsDeco(mdef).mods.annotations do + for (annot <- untpd.modsDeco(mdef).mods.annotations) checkAnnotApplicable(typedAnnotation(annot)(annotCtx), sym) } - def typedAnnotation(annot: untpd.Tree)(implicit ctx: Context): Tree = { + def typedAnnotation(annot: untpd.Tree)(implicit ctx: Context): Tree = typed(annot, defn.AnnotationClass.typeRef) - } def typedValDef(vdef: untpd.ValDef, sym: Symbol)(implicit ctx: Context): Tree = { val ValDef(name, tpt, _) = vdef @@ -1502,11 +1488,10 @@ class Typer extends Namer * see remark about idempotency in TreeInfo#constToLiteral */ private def patchFinalVals(vdef: ValDef)(implicit ctx: Context): Unit = { - def isFinalInlinableVal(sym: Symbol): Boolean = { + def isFinalInlinableVal(sym: Symbol): Boolean = sym.is(Final, butNot = Mutable) && isIdempotentExpr(vdef.rhs) /* && ctx.scala2Mode (stay compatible with Scala2 for now) */ - } val sym = vdef.symbol sym.info match { case info: ConstantType if isFinalInlinableVal(sym) && !ctx.settings.YnoInline.value => sym.setFlag(Inline) @@ -1531,12 +1516,12 @@ class Typer extends Namer val rhsCtx = ctx.fresh if (tparams1.nonEmpty) { rhsCtx.setFreshGADTBounds - if (!sym.isConstructor) { + if (!sym.isConstructor) // we're typing a polymorphic definition's body, // so we allow constraining all of its type parameters // constructors are an exception as we don't allow constraining type params of classes rhsCtx.gadt.addToConstraint(tparams1.map(_.symbol)) - } else if (!sym.isPrimaryConstructor) { + else if (!sym.isPrimaryConstructor) { // otherwise, for secondary constructors we need a context that "knows" // that their type parameters are aliases of the class type parameters. // See pos/i941.scala @@ -1673,10 +1658,10 @@ class Typer extends Namer val parents1 = ensureConstrCall(cls, parentsWithClass)(superCtx) val self1 = typed(self)(ctx.outer).asInstanceOf[ValDef] // outer context where class members are not visible - if (self1.tpt.tpe.isError || classExistsOnSelf(cls.unforcedDecls, self1)) { + if (self1.tpt.tpe.isError || classExistsOnSelf(cls.unforcedDecls, self1)) // fail fast to avoid typing the body with an error type cdef.withType(UnspecifiedErrorType) - } else { + else { val dummy = localDummy(cls, impl) val body1 = addAccessorDefs(cls, typedStats(impl.body, dummy)(ctx.inClassContext(self1.symbol))) @@ -1718,6 +1703,8 @@ class Typer extends Namer cdef1.putAttachment(Deriver, deriver) cdef1 + } + } // todo later: check that // 1. If class is non-abstract, it is instantiatable: @@ -1726,8 +1713,6 @@ class Typer extends Namer // 2. all private type members have consistent bounds // 3. Types do not override classes. // 4. Polymorphic type defs override nothing. - } - } protected def addAccessorDefs(cls: Symbol, body: List[Tree])(implicit ctx: Context): List[Tree] = ctx.compilationUnit.inlineAccessors.addAccessorDefs(cls, body) @@ -2077,15 +2062,14 @@ class Typer extends Namer protected def simplify(tree: Tree, pt: Type, locked: TypeVars)(implicit ctx: Context): tree.type = { if (!tree.denot.isOverloaded && // for overloaded trees: resolve overloading before simplifying - !tree.isInstanceOf[Applications.IntegratedTypeArgs] + !tree.isInstanceOf[Applications.IntegratedTypeArgs]) { // don't interpolate in the middle of an extension method application - ) if (!tree.tpe.widen.isInstanceOf[MethodOrPoly] // wait with simplifying until method is fully applied - || tree.isDef) // ... unless tree is a definition - { + || tree.isDef) { // ... unless tree is a definition interpolateTypeVars(tree, pt, locked) tree.overwriteType(tree.tpe.simplified) } + } tree } @@ -2346,7 +2330,7 @@ class Typer extends Namer else fallBack } } - } + } } /** If this tree is a select node `qual.name`, try to insert an implicit conversion @@ -2407,9 +2391,8 @@ class Typer extends Namer adapt1(tree, pt, locked) } - final def adapt(tree: Tree, pt: Type)(implicit ctx: Context): Tree = { + final def adapt(tree: Tree, pt: Type)(implicit ctx: Context): Tree = adapt(tree, pt, ctx.typerState.ownedVars) - } private def adapt1(tree: Tree, pt: Type, locked: TypeVars)(implicit ctx: Context): Tree = { assert(pt.exists && !pt.isInstanceOf[ExprType]) @@ -2584,7 +2567,8 @@ class Typer extends Namer } { (_, _) => issueErrors() } - } else issueErrors() + } + else issueErrors() } else tree match { case tree: Block => @@ -2626,12 +2610,11 @@ class Typer extends Namer defn.isImplicitFunctionClass(tree.symbol.maybeOwner) && functionExpected /** Is reference to this symbol `f` automatically expanded to `f()`? */ - def isAutoApplied(sym: Symbol): Boolean = { + def isAutoApplied(sym: Symbol): Boolean = sym.isConstructor || sym.matchNullaryLoosely || ctx.testScala2Mode(MissingEmptyArgumentList(sym), tree.sourcePos, patch(tree.span.endPos, "()")) - } // Reasons NOT to eta expand: // - we reference a constructor @@ -2693,7 +2676,7 @@ class Typer extends Namer else if (tree.symbol.isScala2Macro && // raw and s are eliminated by the StringInterpolatorOpt phase tree.symbol != defn.StringContext_raw && - tree.symbol != defn.StringContext_s) { + tree.symbol != defn.StringContext_s) if (tree.symbol eq defn.StringContext_f) { // As scala.StringContext.f is defined in the standard library which // we currently do not bootstrap we cannot implement the macro in the library. @@ -2703,18 +2686,19 @@ class Typer extends Namer val Apply(TypeApply(Select(sc, _), _), args) = tree val newCall = ref(defn.InternalStringContextMacroModule_f).appliedTo(sc).appliedToArgs(args) readaptSimplified(Inliner.inlineCall(newCall)) - } else if (ctx.settings.XignoreScala2Macros.value) { + } + else if (ctx.settings.XignoreScala2Macros.value) { ctx.warning("Scala 2 macro cannot be used in Dotty, this call will crash at runtime. See http://dotty.epfl.ch/docs/reference/dropped-features/macros.html", tree.sourcePos.startPos) Throw(New(defn.MatchErrorClass.typeRef, Literal(Constant(s"Reached unexpanded Scala 2 macro call to ${tree.symbol.showFullName} compiled with -Xignore-scala2-macros.")) :: Nil)) .withType(tree.tpe) .withSpan(tree.span) - } else { + } + else { ctx.error( """Scala 2 macro cannot be used in Dotty. See http://dotty.epfl.ch/docs/reference/dropped-features/macros.html\n" |To turn this error into a warning, pass -Xignore-scala2-macros to the compiler""".stripMargin, tree.sourcePos.startPos) tree } - } else if (tree.tpe <:< pt) { if (pt.hasAnnotation(defn.InlineParamAnnot)) checkInlineConformant(tree, isFinal = false, "argument to inline parameter") @@ -2763,12 +2747,11 @@ class Typer extends Namer ({ resMatch = constrainResult(tree.symbol, wtp, pt); resMatch } || !functionExpected) => if (resMatch || ctx.mode.is(Mode.ImplicitsEnabled)) adaptNoArgsImplicitMethod(wtp) - else { + else // Don't proceed with implicit search if result type cannot match - the search // will likely be under-constrained, which means that an unbounded number of alternatives // is tried. See strawman-contrib MapDecoratorTest.scala for an example where this happens. err.typeMismatch(tree, pt) - } case wtp: MethodType if needsEta => val funExpected = functionExpected val arity = @@ -2783,7 +2766,7 @@ class Typer extends Namer if (nparams > 0 || pt.eq(AnyFunctionProto)) nparams else -1 // no eta expansion in this case } - adaptNoArgsUnappliedMethod(wtp, funExpected, arity) + adaptNoArgsUnappliedMethod(wtp, funExpected, arity) case _ => adaptNoArgsOther(wtp) } @@ -2877,13 +2860,12 @@ class Typer extends Namer pt match { case SelectionProto(name, mbrType, _, _) => def tryExtension(implicit ctx: Context): Tree = - try { + try findRef(name, WildcardType, ExtensionMethod, tree.posd) match { case ref: TermRef => extMethodApply(untpd.ref(ref).withSpan(tree.span), tree, mbrType) case _ => EmptyTree } - } catch { case ex: TypeError => errorTree(tree, ex, tree.sourcePos) } @@ -3017,7 +2999,7 @@ class Typer extends Namer * * Overwritten to no-op in ReTyper. */ - protected def checkEqualityEvidence(tree: tpd.Tree, pt: Type)(implicit ctx: Context) : Unit = { + protected def checkEqualityEvidence(tree: tpd.Tree, pt: Type)(implicit ctx: Context) : Unit = tree match { case _: RefTree | _: Literal if !isVarPattern(tree) && @@ -3030,7 +3012,6 @@ class Typer extends Namer typedExpr(cmp, defn.BooleanType) case _ => } - } private def checkStatementPurity(tree: tpd.Tree)(original: untpd.Tree, exprOwner: Symbol)(implicit ctx: Context): Unit = { if (!tree.tpe.isErroneous && !ctx.isAfterTyper && isPureExpr(tree) && diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala index 5987419e7b0a..70e17d9dc99a 100644 --- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala +++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala @@ -94,14 +94,13 @@ class VarianceChecker()(implicit ctx: Context) { * The search proceeds from `base` to the owner of `tvar`. * Initially the state is covariant, but it might change along the search. */ - def relativeVariance(tvar: Symbol, base: Symbol, v: Variance = Covariant): Variance = /*trace(i"relative variance of $tvar wrt $base, so far: $v")*/ { + def relativeVariance(tvar: Symbol, base: Symbol, v: Variance = Covariant): Variance = /*trace(i"relative variance of $tvar wrt $base, so far: $v")*/ if (base == tvar.owner) v else if (base.is(Param) && base.owner.isTerm) relativeVariance(tvar, paramOuter(base.owner), flip(v)) else if (ignoreVarianceIn(base.owner)) Bivariant else if (base.isAliasType) relativeVariance(tvar, base.owner, Invariant) else relativeVariance(tvar, base.owner, v) - } /** The next level to take into account when determining the * relative variance with a method parameter as base. The method @@ -175,7 +174,7 @@ class VarianceChecker()(implicit ctx: Context) { case Some(VarianceError(tvar, required)) => def msg = i"${varianceString(tvar.flags)} $tvar occurs in ${varianceString(required)} position in type ${sym.info} of $sym" if (ctx.scala2Mode && - (sym.owner.isConstructor || sym.ownersIterator.exists(_.isAllOf(ProtectedLocal)))) { + (sym.owner.isConstructor || sym.ownersIterator.exists(_.isAllOf(ProtectedLocal)))) ctx.migrationWarning( s"According to new variance rules, this is no longer accepted; need to annotate with @uncheckedVariance:\n$msg", pos) @@ -183,7 +182,6 @@ class VarianceChecker()(implicit ctx: Context) { // Patch is disabled until two TODOs are solved: // TODO use an import or shorten if possible // TODO need to use a `:' if annotation is on term - } else ctx.error(msg, pos) case None => } diff --git a/compiler/src/dotty/tools/dotc/typer/Variances.scala b/compiler/src/dotty/tools/dotc/typer/Variances.scala index 4a42ba32a1d0..c593daa121c3 100644 --- a/compiler/src/dotty/tools/dotc/typer/Variances.scala +++ b/compiler/src/dotty/tools/dotc/typer/Variances.scala @@ -11,11 +11,10 @@ object Variances { val Invariant: Variance = EmptyFlags /** Flip between covariant and contravariant */ - def flip(v: Variance): Variance = { + def flip(v: Variance): Variance = if (v == Covariant) Contravariant else if (v == Contravariant) Covariant else v - } /** Map everything below Bivariant to Invariant */ def cut(v: Variance): Variance = @@ -54,14 +53,12 @@ object Variances { } /** Compute variance of type parameter `tparam' in all type annotations `annots'. */ - def varianceInAnnots(annots: List[Annotation])(tparam: Symbol)(implicit ctx: Context): Variance = { + def varianceInAnnots(annots: List[Annotation])(tparam: Symbol)(implicit ctx: Context): Variance = annots.foldLeft(Bivariant) ((v, annot) => v & varianceInAnnot(annot)(tparam)) - } /** Compute variance of type parameter `tparam' in type annotation `annot'. */ - def varianceInAnnot(annot: Annotation)(tparam: Symbol)(implicit ctx: Context): Variance = { + def varianceInAnnot(annot: Annotation)(tparam: Symbol)(implicit ctx: Context): Variance = varianceInType(annot.tree.tpe)(tparam) - } /** Compute variance of type parameter tparam in type tp. */ def varianceInType(tp: Type)(tparam: Symbol)(implicit ctx: Context): Variance = tp match { diff --git a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala index 4c5036121fa8..ee72127811d8 100644 --- a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala +++ b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala @@ -104,7 +104,7 @@ object CommentParsing { * Merge sections following an usecase into the usecase comment, so they * can override the parent symbol's sections */ - def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = { + def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = idxs.indexWhere(str.startsWith("@usecase", _)) match { case firstUCIndex if firstUCIndex != -1 => val commentSections = idxs.take(firstUCIndex) @@ -113,7 +113,6 @@ object CommentParsing { case _ => idxs } - } /** * Merge the inheritdoc sections, as they never make sense on their own @@ -172,12 +171,14 @@ object CommentParsing { def skipVariable(str: String, start: Int): Int = { var idx = start if (idx < str.length && (str charAt idx) == '{') { - while { + while ({ idx += 1 idx < str.length && (str charAt idx) != '}' - } do () + }) + () if (idx < str.length) idx + 1 else start - } else { + } + else { while (idx < str.length && isVarPart(str charAt idx)) idx += 1 idx @@ -243,7 +244,8 @@ object CommentParsing { val toBeRemoved = for { section <- xs lines = sections filter { startsWithTag(raw, _, section) } - } yield lines + } + yield lines val end = startTag(raw, toBeRemoved.flatten.sortBy(_._1).toList) diff --git a/compiler/src/dotty/tools/dotc/util/DiffUtil.scala b/compiler/src/dotty/tools/dotc/util/DiffUtil.scala index 38b283c1a9eb..8be244fa0136 100644 --- a/compiler/src/dotty/tools/dotc/util/DiffUtil.scala +++ b/compiler/src/dotty/tools/dotc/util/DiffUtil.scala @@ -9,28 +9,27 @@ object DiffUtil { val ansiColorToken: Char = '\u001b' - @tailrec private def splitTokens(str: String, acc: List[String] = Nil): List[String] = { - if (str == "") { + @tailrec private def splitTokens(str: String, acc: List[String] = Nil): List[String] = + if (str == "") acc.reverse - } else { + else { val head = str.charAt(0) val (token, rest) = if (head == ansiColorToken) { // ansi color token val splitIndex = str.indexOf('m') + 1 (str.substring(0, splitIndex), str.substring(splitIndex)) - } else if (Character.isAlphabetic(head) || Character.isDigit(head)) { + } + else if (Character.isAlphabetic(head) || Character.isDigit(head)) str.span(c => Character.isAlphabetic(c) || Character.isDigit(c) && c != ansiColorToken) - } else if (Character.isMirrored(head) || Character.isWhitespace(head)) { + else if (Character.isMirrored(head) || Character.isWhitespace(head)) str.splitAt(1) - } else { + else str.span { c => !Character.isAlphabetic(c) && !Character.isDigit(c) && !Character.isMirrored(c) && !Character.isWhitespace(c) && c != ansiColorToken } - } splitTokens(rest, token :: acc) } - } /** @return a tuple of the (found, expected, changedPercentage) diffs as strings */ @@ -122,16 +121,16 @@ object DiffUtil { private def added(str: String): String = bgColored(str, Console.GREEN) private def deleted(str: String) = bgColored(str, Console.RED) - private def bgColored(str: String, color: String): String = { + private def bgColored(str: String, color: String): String = if (str.isEmpty) "" else { val (spaces, rest) = str.span(_ == '\n') if (spaces.isEmpty) { val (text, rest2) = str.span(_ != '\n') Console.BOLD + color + text + Console.RESET + bgColored(rest2, color) - } else spaces + bgColored(rest, color) + } + else spaces + bgColored(rest, color) } - } private def eof() = "\u001B[51m" + "EOF" + Console.RESET private sealed trait Patch @@ -141,14 +140,14 @@ object DiffUtil { private final case class Inserted(str: String) extends Patch private def hirschberg(a: Array[String], b: Array[String]): Array[Patch] = { - def build(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = { - if (x.isEmpty) { + def build(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = + if (x.isEmpty) builder += Inserted(y.mkString) - } else if (y.isEmpty) { + else if (y.isEmpty) builder += Deleted(x.mkString) - } else if (x.length == 1 || y.length == 1) { + else if (x.length == 1 || y.length == 1) needlemanWunsch(x, y, builder) - } else { + else { val xlen = x.length val xmid = xlen / 2 val ylen = y.length @@ -166,7 +165,6 @@ object DiffUtil { build(x1, y1, builder) build(x2, y2, builder) } - } val builder = Array.newBuilder[Patch] build(a, b, builder) builder.result() @@ -200,19 +198,18 @@ object DiffUtil { else if (j == 0) d * i else 0 } - for (i <- 1 to x.length) { + for (i <- 1 to x.length) for (j <- 1 to y.length) { val mtch = score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1)) val delete = score(i - 1)(j) + d val insert = score(i)(j - 1) + d score(i)(j) = mtch max insert max delete } - } var alignment = List.empty[Patch] var i = x.length var j = y.length - while (i > 0 || j > 0) { + while (i > 0 || j > 0) if (i > 0 && j > 0 && score(i)(j) == score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1))) { val newHead = if (x(i - 1) == y(j - 1)) Unmodified(x(i - 1)) @@ -220,15 +217,16 @@ object DiffUtil { alignment = newHead :: alignment i = i - 1 j = j - 1 - } else if (i > 0 && score(i)(j) == score(i - 1)(j) + d) { + } + else if (i > 0 && score(i)(j) == score(i - 1)(j) + d) { alignment = Deleted(x(i - 1)) :: alignment i = i - 1 - } else { + } + else { alignment = Inserted(y(j - 1)) :: alignment j = j - 1 } - } builder ++= alignment } - } + diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index 972ca57c5c25..8eaaec342450 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -94,9 +94,8 @@ class HashSet[T >: Null <: AnyRef](powerOfTwoInitialCapacity: Int, loadFactor: F } /** Add all entries in `xs` to set */ - def addEntries(xs: TraversableOnce[T]): Unit = { + def addEntries(xs: TraversableOnce[T]): Unit = xs.iterator foreach addEntry - } /** The iterator of all elements in the set */ def iterator: Iterator[T] = new Iterator[T] { diff --git a/compiler/src/dotty/tools/dotc/util/LRUCache.scala b/compiler/src/dotty/tools/dotc/util/LRUCache.scala index 61f7457a6435..a3e4d63f604a 100644 --- a/compiler/src/dotty/tools/dotc/util/LRUCache.scala +++ b/compiler/src/dotty/tools/dotc/util/LRUCache.scala @@ -42,10 +42,12 @@ class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag] { next = next.updated(last, current) } values(current) - } else if (current == last) { + } + else if (current == last) { lastButOne = prev null - } else + } + else lookupNext(current, follow, nx) } lookupNext(last, first, next) @@ -67,11 +69,12 @@ class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag] { /** Invalidate key. The invalidated element becomes * the last in the queue. */ - def invalidate(key: Key): Unit = + def invalidate(key: Key): Unit = { if (lookup(key) != null) { keys(first) = null last = first } + } def indices: Iterator[Int] = Iterator.iterate(first)(next.apply) diff --git a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala index f66a8675d261..b9d5481ce5eb 100644 --- a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala +++ b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala @@ -56,11 +56,11 @@ object NameTransformer { } /** Decode expanded characters starting with `$u`, followed by the character's unicode expansion. */ - def decodeIllegalChars(name: String): String = { + def decodeIllegalChars(name: String): String = if (name.contains("$u")) { val sb = new mutable.StringBuilder() var i = 0 - while (i < name.length) { + while (i < name.length) if (i < name.length - 5 && name(i) == '$' && name(i + 1) == 'u') { val numbers = name.substring(i + 2, i + 6) try sb.append(Integer.valueOf(name.substring(i + 2, i + 6), 16).toChar) @@ -69,15 +69,14 @@ object NameTransformer { sb.append("$u").append(numbers) } i += 6 - } else { + } + else { sb.append(name(i)) i += 1 } - } sb.result() } else name - } /** Replace operator symbols by corresponding expansion strings. * diff --git a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala index 266a3145e0bc..a56881631586 100644 --- a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala +++ b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala @@ -62,7 +62,8 @@ class ParsedComment(val comment: Comment) { boundss <- groupedSections.get(tag) texts = boundss.map { case (start, end) => clean(content.slice(start, end)) } formatted <- formatter(texts) - } { + } + { buf.append(formatted) buf.append(System.lineSeparator) } @@ -101,9 +102,11 @@ object ParsedComment { */ def docOf(symbol: Symbol)(implicit ctx: Context): Option[ParsedComment] = { val documentedSymbol = if (symbol.isPrimaryConstructor) symbol.owner else symbol - for { docCtx <- ctx.docCtx - comment <- docCtx.docstring(documentedSymbol) - } yield new ParsedComment(comment) + for { + docCtx <- ctx.docCtx + comment <- docCtx.docstring(documentedSymbol) + } + yield new ParsedComment(comment) } @scala.annotation.internal.sharable @@ -172,15 +175,13 @@ object ParsedComment { * @param snippet The code snippet * @return `snippet`, wrapped in a code fence. */ - private def toCodeFence(language: String)(ctx: Context, snippet: String): String = { - if (colorEnabled(ctx)) { + private def toCodeFence(language: String)(ctx: Context, snippet: String): String = + if (colorEnabled(ctx)) SyntaxHighlighting.highlight(snippet)(ctx) - } else { + else s"""```$language |$snippet |```""".stripMargin - } - } /** * Format the elements of documentation associated with a given tag using `fn`, and starts the @@ -212,9 +213,8 @@ object ParsedComment { ctx.settings.color.value != "never" /** Show `str` in bold */ - private def bold(str: String)(implicit ctx: Context): String = { + private def bold(str: String)(implicit ctx: Context): String = if (colorEnabled) s"$BOLD$str$RESET" else s"**$str**" - } - } + diff --git a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala index c4e9ad0f9e62..f69b993ade4d 100644 --- a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala +++ b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala @@ -106,7 +106,8 @@ object ShowPickled { idx += 1 result = (result << 7) + (b & 0x7f) (b & 0x80) != 0L - }) () + }) + () result.toInt } @@ -248,15 +249,15 @@ object ShowPickled { case _ => } out.println() - if (buf.readIndex != end) { + if (buf.readIndex != end) out.println("BAD ENTRY END: computed = %d, actual = %d, bytes = %s".format( end, buf.readIndex, buf.bytes.slice(index(i), (end max buf.readIndex)).mkString(", ") )) - } } for (i <- 0 until index.length) printEntry(i) } +} /* * @@ -282,4 +283,3 @@ object ShowPickled { } } }*/ -} diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index 9b4d7934dcc0..d59080437627 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -48,7 +48,7 @@ object Signatures { * @return A triple containing the index of the parameter being edited, the index of the function * being called, the list of overloads of this function). */ - def callInfo(path: List[tpd.Tree], span: Span)(implicit ctx: Context): (Int, Int, List[SingleDenotation]) = { + def callInfo(path: List[tpd.Tree], span: Span)(implicit ctx: Context): (Int, Int, List[SingleDenotation]) = path match { case Apply(fun, params) :: _ => val alreadyAppliedCount = Signatures.countParams(fun) @@ -74,7 +74,6 @@ object Signatures { case _ => (0, 0, Nil) } - } def toSignature(denot: SingleDenotation)(implicit ctx: Context): Option[Signature] = { val symbol = denot.symbol @@ -142,12 +141,11 @@ object Signatures { * @param tree The tree to inspect. * @return The number of parameters that are passed. */ - private def countParams(tree: tpd.Tree): Int = { + private def countParams(tree: tpd.Tree): Int = tree match { case Apply(fun, params) => countParams(fun) + params.length case _ => 0 } - } /** * Inspect `err` to determine, if it is an error related to application of an overloaded @@ -173,31 +171,31 @@ object Signatures { Nil } - // If the user writes `foo(bar, )`, the typer will insert a synthetic - // `null` parameter: `foo(bar, null)`. This may influence what's the "best" - // alternative, so we discard it. - val userParams = params match { - case xs :+ (nul @ Literal(Constant(null))) if nul.span.isZeroExtent => xs - case _ => params - } - val userParamsTypes = userParams.map(_.tpe) - - // Assign a score to each alternative (how many parameters are correct so far), and - // use that to determine what is the current active signature. - val alternativesScores = alternatives.map { alt => - alt.info.stripPoly match { - case tpe: MethodType => - userParamsTypes.zip(tpe.paramInfos).takeWhile{ case (t0, t1) => t0 <:< t1 }.size - case _ => - 0 - } + // If the user writes `foo(bar, )`, the typer will insert a synthetic + // `null` parameter: `foo(bar, null)`. This may influence what's the "best" + // alternative, so we discard it. + val userParams = params match { + case xs :+ (nul @ Literal(Constant(null))) if nul.span.isZeroExtent => xs + case _ => params + } + val userParamsTypes = userParams.map(_.tpe) + + // Assign a score to each alternative (how many parameters are correct so far), and + // use that to determine what is the current active signature. + val alternativesScores = alternatives.map { alt => + alt.info.stripPoly match { + case tpe: MethodType => + userParamsTypes.zip(tpe.paramInfos).takeWhile{ case (t0, t1) => t0 <:< t1 }.size + case _ => + 0 } - val bestAlternative = - if (alternativesScores.isEmpty) 0 - else alternativesScores.zipWithIndex.maxBy(_._1)._2 + } + val bestAlternative = + if (alternativesScores.isEmpty) 0 + else alternativesScores.zipWithIndex.maxBy(_._1)._2 - (bestAlternative, alternatives) + (bestAlternative, alternatives) } - } + diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala index 6d34760efd4f..a456633f1bc5 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala @@ -159,19 +159,22 @@ object SimpleIdentityMap { def remove(k: K): SimpleIdentityMap[K, V] = { var i = 0 while (i < bindings.length) { - if (bindings(i) eq k) return { - if (size == CompactifyThreshold) { - var m: SimpleIdentityMap[K, V] = Empty[K] - for (j <- 0 until bindings.length by 2) - if (j != i) m = m.updated(key(j), value(j)) - m - } else { - val bindings1 = new Array[AnyRef](bindings.length - 2) - System.arraycopy(bindings, 0, bindings1, 0, i) - System.arraycopy(bindings, i + 2, bindings1, i, bindings1.length - i) - new MapMore(bindings1) + if (bindings(i) eq k) + return { + if (size == CompactifyThreshold) { + var m: SimpleIdentityMap[K, V] = Empty[K] + for (j <- 0 until bindings.length by 2) { + if (j != i) m = m.updated(key(j), value(j)) + } + m + } + else { + val bindings1 = new Array[AnyRef](bindings.length - 2) + System.arraycopy(bindings, 0, bindings1, 0, i) + System.arraycopy(bindings, i + 2, bindings1, i, bindings1.length - i) + new MapMore(bindings1) + } } - } i += 2 } this diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala index fe87ca9b9043..f0daa8b75375 100644 --- a/compiler/src/dotty/tools/dotc/util/SourceFile.scala +++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala @@ -32,7 +32,8 @@ object ScriptSourceFile { val matcher = headerPattern matcher content.mkString if (matcher.find) matcher.end else throw new IOException("script file does not close its header with !# or ::!#") - } else 0 + } + else 0 new SourceFile(file, content drop headerLength) { override val underlying = new SourceFile(file, content) } diff --git a/compiler/src/dotty/tools/dotc/util/Spans.scala b/compiler/src/dotty/tools/dotc/util/Spans.scala index c3b1e371e7a2..b2a4c7c0514f 100644 --- a/compiler/src/dotty/tools/dotc/util/Spans.scala +++ b/compiler/src/dotty/tools/dotc/util/Spans.scala @@ -77,6 +77,17 @@ object Spans { def contains(that: Span): Boolean = !that.exists || exists && (start <= that.start && end >= that.end) + /** Does the range of this span overlap with the range of that span at more than a single point? */ + def overlaps(that: Span): Boolean = { + def containsInner(span: Span, offset: Int) = span.start < offset && offset < span.end + exists && that.exists && ( + containsInner(this, that.start) + || containsInner(this, that.end) + || containsInner(that, this.start) + || containsInner(that, this.end) + ) + } + /** Is this span synthetic? */ def isSynthetic: Boolean = pointDelta == SyntheticPointDelta @@ -132,13 +143,12 @@ object Spans { def !=(that: Span): Boolean = this.coords != that.coords } - private def fromOffsets(start: Int, end: Int, pointDelta: Int) = { + private def fromOffsets(start: Int, end: Int, pointDelta: Int) = //assert(start <= end || start == 1 && end == 0, s"$start..$end") new Span( (start & StartEndMask).toLong | ((end & StartEndMask).toLong << StartEndBits) | (pointDelta.toLong << (StartEndBits * 2))) - } /** A synthetic span with given start and end */ def Span(start: Int, end: Int): Span = diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala index 0f2bf8eeaf21..87d13e0dde1c 100644 --- a/compiler/src/dotty/tools/dotc/util/Stats.scala +++ b/compiler/src/dotty/tools/dotc/util/Stats.scala @@ -20,14 +20,16 @@ import collection.mutable } @forceInline - def record(fn: => String, n: => Int = 1): Unit = + def record(fn: => String, n: => Int = 1): Unit = { if (enabled) doRecord(fn, n) + } - def doRecord(fn: String, n: Int) = + def doRecord(fn: String, n: Int) = { if (monitored) { val name = if (fn.startsWith("member-")) "member" else fn hits(name) += n } + } @forceInline def trackTime[T](fn: String)(op: => T): T = @@ -38,7 +40,8 @@ import collection.mutable if (monitored) { val start = System.nanoTime try op1 finally record(fn, ((System.nanoTime - start) / 1000).toInt) - } else op1 + } + else op1 } final val GroupChar = '/' @@ -52,7 +55,7 @@ import collection.mutable hits(s"Total $prefix") += hits(name) } - def maybeMonitored[T](op: => T)(implicit ctx: Context): T = { + def maybeMonitored[T](op: => T)(implicit ctx: Context): T = if (ctx.settings.YdetailedStats.value) { monitored = true try op @@ -62,6 +65,6 @@ import collection.mutable println(hits.toList.sortBy(_._2).map{ case (x, y) => s"$x -> $y" } mkString "\n") println(s"uniqueInfo (size, accesses, collisions): ${ctx.base.uniquesSizes}") } - } else op - } + } + else op } diff --git a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala index 22c62798c278..a5d40c99421d 100644 --- a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala @@ -44,9 +44,8 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e private def computeCapacity = { if (initialCapacity < 0) throw new IllegalArgumentException("initial capacity cannot be less than 0") var candidate = 1 - while (candidate < initialCapacity) { + while (candidate < initialCapacity) candidate *= 2 - } candidate } @@ -132,13 +131,12 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e @tailrec def linkedListLoop(entry: Entry[A]): Unit = entry match { case null => () - case _ => { + case _ => val bucket = bucketFor(entry.hash) val oldNext = entry.tail entry.tail = table(bucket) table(bucket) = entry linkedListLoop(oldNext) - } } linkedListLoop(oldTable(oldBucket)) @@ -152,7 +150,7 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e // from scala.reflect.internal.Set, find an element or null if it isn't contained def findEntry(elem: A): A = elem match { case null => throw new NullPointerException("WeakHashSet cannot hold nulls") - case _ => { + case _ => removeStaleEntries() val hash = elem.hashCode val bucket = bucketFor(hash) @@ -160,20 +158,18 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e @tailrec def linkedListLoop(entry: Entry[A]): A = entry match { case null => null.asInstanceOf[A] - case _ => { + case _ => val entryElem = entry.get if (elem.equals(entryElem)) entryElem else linkedListLoop(entry.tail) - } } linkedListLoop(table(bucket)) - } } // add an element to this set unless it's already in there and return the element def findEntryOrUpdate(elem: A): A = elem match { case null => throw new NullPointerException("WeakHashSet cannot hold nulls") - case _ => { + case _ => removeStaleEntries() val hash = elem.hashCode val bucket = bucketFor(hash) @@ -189,21 +185,19 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e @tailrec def linkedListLoop(entry: Entry[A]): A = entry match { case null => add() - case _ => { + case _ => val entryElem = entry.get if (elem.equals(entryElem)) entryElem else linkedListLoop(entry.tail) - } } linkedListLoop(oldHead) - } } // add an element to this set unless it's already in there and return this set override def addOne(elem: A): this.type = elem match { case null => throw new NullPointerException("WeakHashSet cannot hold nulls") - case _ => { + case _ => removeStaleEntries() val hash = elem.hashCode val bucket = bucketFor(hash) @@ -224,13 +218,12 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e linkedListLoop(oldHead) this - } } // remove an element from this set and return this set override def subtractOne(elem: A): this.type = elem match { case null => this - case _ => { + case _ => removeStaleEntries() val bucket = bucketFor(elem.hashCode) @@ -245,7 +238,6 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e linkedListLoop(null, table(bucket)) this - } } // empty this set @@ -309,9 +301,9 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e // element null means the weakref has been cleared since we last did a removeStaleEntries(), move to the next entry entry = entry.tail hasNext - } else { - true } + else + true } } diff --git a/docs/docs/internals/syntax.md b/docs/docs/internals/syntax.md index f9827b255162..fbd6479b3f1c 100644 --- a/docs/docs/internals/syntax.md +++ b/docs/docs/internals/syntax.md @@ -84,6 +84,7 @@ comment ::= ‘/*’ “any sequence of characters; nested comments ar | ‘//’ “any sequence of characters up to end of line” nl ::= “new line character” +cnl ::= nl | "colon at eol" semi ::= ‘;’ | nl {nl} ``` @@ -151,8 +152,8 @@ FunArgTypes ::= InfixType | ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ TypedFunParam ::= id ‘:’ Type MatchType ::= InfixType `match` TypeCaseClauses -InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) -RefinedType ::= WithType {[nl] Refinement} RefinedTypeTree(t, ds) +InfixType ::= RefinedType {id [cnl] RefinedType} InfixOp(t1, op, t2) +RefinedType ::= WithType {[cnl] Refinement} RefinedTypeTree(t, ds) WithType ::= AnnotType {‘with’ AnnotType} (deprecated) AnnotType ::= SimpleType {Annotation} Annotated(t, annot) SimpleType ::= SimpleType TypeArgs AppliedTypeTree(t, args) @@ -189,7 +190,7 @@ FunParams ::= Bindings | ‘_’ Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] If(Parens(cond), thenp, elsep?) - | ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] If(cond, thenp, elsep?) + | ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] If(cond, thenp, elsep?) | ‘while’ ‘(’ Expr ‘)’ {nl} Expr WhileDo(Parens(cond), body) | ‘while’ Expr ‘do’ Expr WhileDo(cond, body) | ‘try’ Expr Catches [‘finally’ Expr] Try(expr, catches, expr?) @@ -209,7 +210,7 @@ Ascription ::= ‘:’ InfixType Catches ::= ‘catch’ Expr PostfixExpr ::= InfixExpr [id] PostfixOp(expr, op) InfixExpr ::= PrefixExpr - | InfixExpr id [nl] InfixExpr InfixOp(expr, op, expr) + | InfixExpr id [cnl] InfixExpr InfixOp(expr, op, expr) | InfixExpr ‘given’ (InfixExpr | ParArgumentExprs) PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr PrefixOp(expr, op) SimpleExpr ::= ‘new’ (ConstrApp [TemplateBody] | TemplateBody) New(constr | templ) @@ -234,7 +235,7 @@ ExprInParens ::= PostfixExpr ‘:’ Type ParArgumentExprs ::= ‘(’ ExprsInParens ‘)’ exprs | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘:’ ‘_’ ‘*’ ‘)’ exprs :+ Typed(expr, Ident(wildcardStar)) ArgumentExprs ::= ParArgumentExprs - | [nl] BlockExpr + | [cnl] BlockExpr BlockExpr ::= ‘{’ CaseClauses | Block ‘}’ Block ::= {BlockStat semi} [BlockResult] Block(stats, expr?) BlockStat ::= Import @@ -263,7 +264,7 @@ Pattern ::= Pattern1 { ‘|’ Pattern1 } Pattern1 ::= PatVar ‘:’ RefinedType Bind(name, Typed(Ident(wildcard), tpe)) | Pattern2 Pattern2 ::= [id ‘@’] InfixPattern Bind(name, pat) -InfixPattern ::= SimplePattern { id [nl] SimplePattern } InfixOp(pat, op, pat) +InfixPattern ::= SimplePattern { id [cnl] SimplePattern } InfixOp(pat, op, pat) SimplePattern ::= PatVar Ident(wildcard) | Literal Bind(name, Ident(wildcard)) | ‘(’ [Patterns] ‘)’ Parens(pats) Tuple(pats) @@ -282,7 +283,7 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ### Type and Value Parameters ```ebnf ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ -ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) +ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) id [HkTypeParamClause] TypeParamBounds Bound(below, above, context) DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ @@ -370,9 +371,7 @@ PatDef ::= ids [‘:’ Type] ‘=’ Expr VarDef ::= PatDef | ids ‘:’ Type ‘=’ ‘_’ DefDef ::= DefSig [(‘:’ | ‘<:’) Type] ‘=’ Expr DefDef(_, name, tparams, vparamss, tpe, expr) - | DefSig [nl] ‘{’ Block ‘}’ DefDef(_, name, tparams, vparamss, tpe, Block) - | ‘this’ DefParamClause DefParamClauses DefDef(_, , Nil, vparamss, EmptyTree, expr | Block) - (‘=’ ConstrExpr | [nl] ConstrBlock) + | ‘this’ DefParamClause DefParamClauses ‘=’ ConstrExpr DefDef(_, , Nil, vparamss, EmptyTree, expr | Block) TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef @@ -396,11 +395,10 @@ ConstrApp ::= SimpleConstrApp | ‘(’ SimpleConstrApp {‘given’ (PrefixExpr | ParArgumentExprs)} ‘)’ SimpleConstrApp ::= AnnotType {ArgumentExprs} Apply(tp, args) ConstrExpr ::= SelfInvocation - | ConstrBlock + | ‘{’ SelfInvocation {semi BlockStat} ‘}’ SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} -ConstrBlock ::= ‘{’ SelfInvocation {semi BlockStat} ‘}’ -TemplateBody ::= [nl] ‘{’ [SelfType] TemplateStat {semi TemplateStat} ‘}’ (self, stats) +TemplateBody ::= [cnl] ‘{’ [SelfType] TemplateStat {semi TemplateStat} ‘}’ (self, stats) TemplateStat ::= Import | Export | {Annotation [nl]} {Modifier} Def @@ -410,7 +408,7 @@ TemplateStat ::= Import SelfType ::= id [‘:’ InfixType] ‘=>’ ValDef(_, name, tpt, _) | ‘this’ ‘:’ InfixType ‘=>’ -EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ +EnumBody ::= [cnl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ EnumStat ::= TemplateStat | {Annotation [nl]} {Modifier} EnumCase EnumCase ::= ‘case’ (id ClassConstr [‘extends’ ConstrApps]] | ids) @@ -422,8 +420,8 @@ TopStat ::= Import | Packaging | PackageObject | -Packaging ::= ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’ Package(qid, stats) +Packaging ::= ‘package’ QualId [cnl] ‘{’ TopStatSeq ‘}’ Package(qid, stats) PackageObject ::= ‘package’ ‘object’ ObjectDef object with package in mods. -CompilationUnit ::= {‘package’ QualId semi} TopStatSeq Package(qid, stats) +CompilationUnit ::= {‘package’ QualId (semi | cnl)} TopStatSeq Package(qid, stats) ``` diff --git a/docs/docs/reference/other-new-features/indentation.md b/docs/docs/reference/other-new-features/indentation.md new file mode 100644 index 000000000000..24ea40dc8631 --- /dev/null +++ b/docs/docs/reference/other-new-features/indentation.md @@ -0,0 +1,188 @@ +--- +layout: doc-page +title: Significant Indentation +--- + +As an experimental feature, Scala 3 treats indentation as significant. + +Indentation is significant everywhere except inside regions delineated by braces `{...}`, brackets `[...]` or parentheses `(...)` or within string or character literals. + +Where indentation is significant, the compiler will insert `` or `` +tokens at certain line breaks. Grammatically, pairs of `` and `` tokens have the same effect as pairs of braces `{` and `}`. + +The algorithm makes use of a stack `IW` of previously encountered indentation widths. The stack initially holds a single element with a zero indentation width. The _current indentation width_ is the indentation width of the top of the stack. + +There are two rules: + + 1. An `` is inserted at a line break, if + + - the first token on the next line has an indentation width strictly greater + than the current indentation width, and + - the last token on the previous line can start an indentation region. + + The following tokens can start an indentation region: + ``` + : = => <- if then else while do try catch finally for yield match + ``` + + If an `` is inserted, the indentation width of the token on the next line + is pushed onto `IW`, which makes it the new current indentation width. + + 2. An `` is inserted at a line break, if + + - the first token on the next line has an indentation width strictly less + than the current indentation width, and + - the first token on the next line is not a + [leading infix operator](../changed-features/operators.html). + + If an `` is inserted, the top element if popped from `IW`. + If the indentation width of the token on the next line is still less than the new current indentation width, step (2) repeats. Therefore, several `` tokens + may be inserted in a row. + +It is an error if the indentation width of the token following an `` does not +match the indentation of some previous line in the enclosing indentation region. For instance, the following would be rejected. +```scala +if x < 0 then + -x + else // error: `else` does not align correctly + x +``` + +Indentation prefixes can consist of spaces and tabs. Indentation widths are the indentation prefixes themselves, ordered by the string prefix relation. So, so for instance "2 tabs, followed by 4 spaces" is strictly less than "2 tabs, followed by 5 spaces", but "2 tabs, followed by 4 spaces" is incomparable to "6 tabs" or to "4 spaces, followed by 2 tabs". It is an error if the indentation width of some line is incomparable with the indentation width of the region that's current at that point. To avoid such errors, it is a good idea not to mix spaces and tabs in the same source file. + +### Indentation Marker `:` + +A colon `:` at the end of a line is one of the possible tokens that opens an indentation region. Examples: + +```scala + times(10): + println("ah") + println("ha") +``` +or +```scala + xs.map: + x => + val y = x - 1 + y * y +``` +Colons at the end of lines are their own token, distinct from normal `:`. +The Scala grammar is changed so that colons at end of lines are accepted at all points +where an opening brace is legal, except if the previous token can already start an +indentation region. Special provisions are taken so that method result types can still use a colon on +the end of a line, followed by the actual type on the next. + +### Special Treatment of Case Clauses + +The indentation rules for `match` expressions and `catch` clauses are refined as follows: + + - An indentation region is opened after a `match` or `catch` also if the following `case` + appears at the indentation width that's current for the `match` itself. + - In that case, the indentation region closes at the first token at that + same indentation width that is not a `case`, or at any token with a smaller + indentation width, whichever comes first. + +The rules allow to write `match` expressions where cases are not indented themselves, as in the example below: +```scala +x match +case 1 => print("I") +case 2 => print("II") +case 3 => print("III") +case 4 => print("IV") +case 5 => print("V") + +println(".") +``` + +### The End Marker + +Indentation-based syntax has many advantages over other conventions. But one possible problem is that it makes it hard to discern when a large indentation region ends, since there is no specific token that delineates the end. Braces are not much better since a brace by itself also contains no information about what region is closed. + +To solve this problem, Scala 3 offers an optional `end` marker. Example +```scala +def largeMethod(...) = + ... + if ... then ... + else + ... // a large block + end if + ... // more code +end largeMethod +``` +An `end` marker consists of the identifier `end` which follows an `` token, and is in turn followed on the same line by exactly one other token, which is either an identifier or one of the reserved words +```scala + if while for match try new +``` +If `end` is followed by a reserved word, the compiler checks that the marker closes an indentation region belonging to a construct that starts with the reserved word. If it is followed by an identifier _id_, the compiler checks that the marker closes an indentation region containing the right hand side of a `val`, `var`, or `def` or +the body of a class, trait, object, enum, given instance, or package clause that defines _id_. + +`end` itself is a soft keyword. It is only treated as an `end` marker if it +occurs at the start of a line and is followed by an identifier or one of the reserved words above. + +It is recommended that `end` markers are used for code where the extent of an indentation region is not immediately apparent "at a glance". Typically this is the case if an indentation region spans 20 lines or more. + +### Example + +Here is a (somewhat meta-circular) example of code using indentation. It provides a concrete representation of indentation widths as defined above together with efficient operations for constructing and comparing indentation widths. + +```scala +enum IndentWidth: + + /** A run of `n` characters `ch` */ + case Run(ch: Char, n: Int) + + /** `l` followed by `r` */ + case Conc(l: IndentWidth, r: Run) + + def <= (that: IndentWidth): Boolean = + this match + case Run(ch1, n1) => + that match + case Run(ch2, n2) => n1 <= n2 && (ch1 == ch2 || n1 == 0) + case Conc(l, r) => this <= l + case Conc(l1, r1) => + that match + case Conc(l2, r2) => l1 == l2 && r1 <= r2 + case _ => false + + def < (that: IndentWidth): Boolean = this <= that && !(that <= this) + + override def toString: String = + this match + case Run(ch, n) => + val kind = ch match + case ' ' => "space" + case '\t' => "tab" + case _ => s"'$ch'-character" + val suffix = if n == 1 then "" else "s" + s"$n $kind$suffix" + case Conc(l, r) => + s"$l, $r" + +object IndentWidth: + private inline val MaxCached = 40 + + private val spaces = IArray.tabulate(MaxCached + 1): + new Run(' ', _) + private val tabs = IArray.tabulate(MaxCached + 1): + new Run('\t', _) + + def Run(ch: Char, n: Int): Run = + if n <= MaxCached && ch == ' ' then + spaces(n) + else if n <= MaxCached && ch == '\t' then + tabs(n) + else + new Run(ch, n) + + val Zero = Run(' ', 0) +end IndentWidth +``` + +### Rewrites + +The Dotty compiler can rewrite source code to indented code and back. +When invoked with options `-rewrite -indent` it will rewrite braces to +indented regions where possible. When invoked with with options `-rewrite -noindent` it will rewrite in the reverse direction, inserting braces for indentation regions. +The `-indent` option only works on [new-style syntax](./control-syntax.html). So to go from old-style syntax to new-style indented code one has to invoke the compiler twice, first with options `-rewrite -new-syntax`, then again with options +`-rewrite-indent`. To go in the opposite direction, from indented code to old-style syntax, it's `-rewrite -noindent`, followed by `-rewrite -old-syntax`. diff --git a/docs/sidebar.yml b/docs/sidebar.yml index bc007227b073..62c1f3d1825c 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -103,6 +103,8 @@ sidebar: url: docs/reference/other-new-features/threadUnsafe-annotation.html - title: New Control Syntax url: docs/reference/other-new-features/control-syntax.html + - title: Significant Indentation + url: docs/reference/other-new-features/indentation.html - title: Other Changed Features subsection: - title: Structural Types diff --git a/tests/neg/endmarkers.scala b/tests/neg/endmarkers.scala new file mode 100644 index 000000000000..f371c05bb6cd --- /dev/null +++ b/tests/neg/endmarkers.scala @@ -0,0 +1,110 @@ +object Test: + + locally: + var x = 0 + while x < 10 do x += 1 + end while // error: end of statement expected but while found // error: not found: end + val f = 10 // error: ';' expected, but 'val' found + while + x += 1 + x < 10 + do () + end while // error: misaligned end marker + + def f(x: Int): Int = + val y = + if x > 0 then + println("hello") + 22 + else + println("world") + 33 + end f // error: misaligned end marker + + val z = 22 + x + y + z + end f // error: misaligned end marker + + def g = "!" + + val xs = List(1, 2, 3) + + xs.map: + x => + val y = x * x + y * y + + xs.map: + x => + val y = x * x + y + y + + println(f(2) + g) + + (new Test2).foo + (new Test3).foo + + var x = 1 + while + x += 1 + val y = x + println(y) + x < 10 + do () + +class Test2: + self => + def foo = 1 + + object x: + new Test2: + override def foo = 2 + end new // error: end of statement expected but new found // error: not found: end + def bar = 2 // error: ';' expected, but unindent found + end Test2 // error: misaligned end marker +end Test2 + +class Test3: + self => + def foo = 1 + end Test3 // error: not found: end + +import collection.mutable.HashMap + +class Coder(words: List[String]): + + class Foo: + println() + end Foo // error: not found: end + + (2 -> "ABC", new ArrowAssoc('3') -> "DEF") + + private val mnemonics = Map( + '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL", + '6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ") + + ('1', "1") match + case (digit, str) => true + case _ => false + + ('1', "1") match + case (digit, str) => true + case _ => false + + try List(1, 2, 3) match + case x :: xs => println(x) + case Nil => println("Nil") + catch + case ex: java.io.IOException => println(ex) + case ex: Throwable => throw ex + end try + + /** Invert the mnemonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */ + private val charCode0: Map[Char, Char] = + mnemonics + .withFilter: + case (digit, str) => true + case _ => false + .flatMap: + case (digit, str) => str map (ltr => ltr -> digit) + end Coder // error: The start of this line does not match any of the previous indentation widths. \ No newline at end of file diff --git a/tests/neg/i4373b.scala b/tests/neg/i4373b.scala index 50c8ef0813e7..a3d8f3850eff 100644 --- a/tests/neg/i4373b.scala +++ b/tests/neg/i4373b.scala @@ -1,5 +1,5 @@ // ==> 05bef7805687ba94da37177f7568e3ba7da1f91c.scala <== class x0 { x1: // error - x0 | _ // error + x0 | _ // error \ No newline at end of file diff --git a/tests/pos/indent.scala b/tests/pos/indent.scala new file mode 100644 index 000000000000..0c3ceca52b5d --- /dev/null +++ b/tests/pos/indent.scala @@ -0,0 +1,107 @@ +object Test: + + locally: + var x = 0 + while x < 10 do x += 1 + val f = 10 + while + x += 1 + x < 10 + do () + + def f(x: Int): Int = + val y = + if x > 0 then + println("hello") + 22 + else + println("world") + 33 + val z = 22 + x + y + z + end f + + def g = "!" + + val xs = List(1, 2, 3) + + xs.map: + x => + val y = x * x + y * y + + xs.map: + x => + val y = x * x + y + y + + println(f(2) + g) + + (new Test2).foo + (new Test3).foo + + var x = 1 + while + x += 1 + val y = x + println(y) + x < 10 + do () + +class Test2: + self => + def foo = 1 + + val x = + new Test2: + override def foo = 2 + end new + end x +end Test2 + +class Test3: + self => + def foo = 1 + +import collection.mutable.HashMap + +class Coder(words: List[String]): + + class Foo: + println() + end Foo + + class Bar + + (2 -> "ABC", new ArrowAssoc('3') -> "DEF") + + private val mnemonics = Map( + '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL", + '6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ") + + ('1', "1") match + case (digit, str) => true + case _ => false + + ('1', "1") match + case (digit, str) => true + case _ => false + + try List(1, 2, 3) match + case x :: xs => println(x) + case Nil => println("Nil") + catch + case ex: java.io.IOException => println(ex) + case ex: Throwable => + throw ex + end try + + /** Invert the mnemonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */ + private val charCode0: Map[Char, Char] = + mnemonics + .withFilter: + case (digit, str) => true + case _ => false + .flatMap: + case (digit, str) => str map (ltr => ltr -> digit) +end Coder \ No newline at end of file diff --git a/tests/pos/syntax-rewrite.scala b/tests/pos/syntax-rewrite.scala new file mode 100644 index 000000000000..f94e2a8e99d5 --- /dev/null +++ b/tests/pos/syntax-rewrite.scala @@ -0,0 +1,40 @@ +// This test source should be invariant under the following 4 compilation steps with options +// -rewrite -new-syntax +// -rewrite -indent +// -rewrite -noindent +// -rewrite -old-syntax +object test { + + for { + x <- List(1, 2, 3) + } + println(x) + + for (x <- List(1, 2, 3)) yield x + + for { + x <- List(1, 2, 3) + if x == 0 + } + println(x) + + def foo = { + println("hi") + println("ho") + // this comment goes inside braces + } + // this comment follows the brace + // this comment as well + object o { + } + + def loop[T]()(x: T): T = x + + def g() = /*>*/ loop() /*<*/ { + println() + 1 + } + + def bar() = { /* */ + } +}