From 5b7913c989caf32488719131360c4d89b9388894 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Fri, 24 Feb 2023 16:18:17 +0100 Subject: [PATCH 1/3] ExtractDependencies: Split dependency recording in separate class This will be used later in this PR to perform dependency recording during Typer. The new API is slightly higher level: - Dependencies are recorded using `addUsedName`, `addUsedRawName` and `addClassDependency`. - `addUsedName` takes a Symbol instead of a Name and takes care of name mangling, `addUsedRawName` is still available when there is no symbol (currently this only happens when recording a dependency on a renamed import). - These methods do not take a `from` argument, instead they take care of calling `resolveDependencySource` internally. - Recorded dependencies are sent to Zinc using `sendToZinc`. --- .../tools/dotc/sbt/ExtractDependencies.scala | 361 ++++++++++-------- 1 file changed, 193 insertions(+), 168 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 32ff7aab0206..175ac21d54bf 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -69,12 +69,13 @@ class ExtractDependencies extends Phase { override def run(using Context): Unit = { val unit = ctx.compilationUnit - val collector = new ExtractDependenciesCollector + val rec = DependencyRecorder() + val collector = ExtractDependenciesCollector(rec) collector.traverse(unit.tpdTree) if (ctx.settings.YdumpSbtInc.value) { - val deps = collector.dependencies.map(_.toString).toArray[Object] - val names = collector.usedNames.map { case (clazz, names) => s"$clazz: $names" }.toArray[Object] + val deps = rec.classDependencies.map(_.toString).toArray[Object] + val names = rec.usedNames.map { case (clazz, names) => s"$clazz: $names" }.toArray[Object] Arrays.sort(deps) Arrays.sort(names) @@ -91,61 +92,7 @@ class ExtractDependencies extends Phase { } finally pw.close() } - ctx.withIncCallback: cb => - collector.usedNames.foreach { - case (clazz, usedNames) => - val className = classNameAsString(clazz) - usedNames.names.foreach { - case (usedName, scopes) => - cb.usedName(className, usedName.toString, scopes) - } - } - collector.dependencies.foreach(recordDependency) - } - - /* - * Handles dependency on given symbol by trying to figure out if represents a term - * that is coming from either source code (not necessarily compiled in this compilation - * run) or from class file and calls respective callback method. - */ - def recordDependency(dep: ClassDependency)(using Context): Unit = { - val fromClassName = classNameAsString(dep.from) - val sourceFile = ctx.compilationUnit.source - - def binaryDependency(file: Path, binaryClassName: String) = - ctx.withIncCallback(_.binaryDependency(file, binaryClassName, fromClassName, sourceFile, dep.context)) - - def processExternalDependency(depFile: AbstractFile, binaryClassName: String) = { - depFile match { - case ze: ZipArchive#Entry => // The dependency comes from a JAR - ze.underlyingSource match - case Some(zip) if zip.jpath != null => - binaryDependency(zip.jpath, binaryClassName) - case _ => - case pf: PlainFile => // The dependency comes from a class file, Zinc handles JRT filesystem - binaryDependency(pf.jpath, binaryClassName) - case _ => - internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", dep.from.srcPos) - } - } - - val depFile = dep.to.associatedFile - if (depFile != null) { - // Cannot ignore inheritance relationship coming from the same source (see sbt/zinc#417) - def allowLocal = dep.context == DependencyByInheritance || dep.context == LocalDependencyByInheritance - val depClassFile = - if depFile.isClass then depFile - else depFile.resolveSibling(dep.to.binaryClassName + ".class") - if (depClassFile != null) { - // Dependency is external -- source is undefined - processExternalDependency(depClassFile, dep.to.binaryClassName) - } else if (allowLocal || depFile != sourceFile.file) { - // We cannot ignore dependencies coming from the same source file because - // the dependency info needs to propagate. See source-dependencies/trait-trait-211. - val toClassName = classNameAsString(dep.to) - ctx.withIncCallback(_.classDependency(toClassName, fromClassName, dep.context)) - } - } + rec.sendToZinc() } } @@ -161,8 +108,6 @@ object ExtractDependencies { report.error(em"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos) } -private case class ClassDependency(from: Symbol, to: Symbol, context: DependencyContext) - /** An object that maintain the set of used names from within a class */ private final class UsedNamesInClass { private val _names = new mutable.HashMap[Name, EnumSet[UseScope]] @@ -194,110 +139,18 @@ private final class UsedNamesInClass { * specially, see the subsection "Dependencies introduced by member reference and * inheritance" in the "Name hashing algorithm" section. */ -private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeTraverser => +private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd.TreeTraverser { thisTreeTraverser => import tpd._ - private val _usedNames = new mutable.HashMap[Symbol, UsedNamesInClass] - private val _dependencies = new mutable.HashSet[ClassDependency] - - /** The names used in this class, this does not include names which are only - * defined and not referenced. - */ - def usedNames: collection.Map[Symbol, UsedNamesInClass] = _usedNames - - /** The set of class dependencies from this compilation unit. - */ - def dependencies: Set[ClassDependency] = _dependencies - - /** Top level import dependencies are registered as coming from a first top level - * class/trait/object declared in the compilation unit. If none exists, issue warning. - */ - private var _responsibleForImports: Symbol = _ - private def responsibleForImports(using Context) = { - def firstClassOrModule(tree: Tree) = { - val acc = new TreeAccumulator[Symbol] { - def apply(x: Symbol, t: Tree)(using Context) = - t match { - case typeDef: TypeDef => - typeDef.symbol - case other => - foldOver(x, other) - } - } - acc(NoSymbol, tree) - } - - if (_responsibleForImports == null) { - val tree = ctx.compilationUnit.tpdTree - _responsibleForImports = firstClassOrModule(tree) - if (!_responsibleForImports.exists) - report.warning("""|No class, trait or object is defined in the compilation unit. - |The incremental compiler cannot record the dependency information in such case. - |Some errors like unused import referring to a non-existent class might not be reported. - |""".stripMargin, tree.sourcePos) - } - _responsibleForImports - } - - private var lastOwner: Symbol = _ - private var lastDepSource: Symbol = _ - - /** - * Resolves dependency source (that is, the closest non-local enclosing - * class from a given `ctx.owner` - */ - private def resolveDependencySource(using Context): Symbol = { - def nonLocalEnclosingClass = { - var clazz = ctx.owner.enclosingClass - var owner = clazz - - while (!owner.is(PackageClass)) { - if (owner.isTerm) { - clazz = owner.enclosingClass - owner = clazz - } else { - owner = owner.owner - } - } - clazz - } - - if (lastOwner != ctx.owner) { - lastOwner = ctx.owner - val source = nonLocalEnclosingClass - lastDepSource = if (source.is(PackageClass)) responsibleForImports else source - } - - lastDepSource - } - - private def addUsedName(fromClass: Symbol, name: Name, scope: UseScope): Unit = { - val usedName = _usedNames.getOrElseUpdate(fromClass, new UsedNamesInClass) - usedName.update(name, scope) - } - - private def addUsedName(name: Name, scope: UseScope)(using Context): Unit = { - val fromClass = resolveDependencySource - if (fromClass.exists) { // can happen when visiting imports - assert(fromClass.isClass) - addUsedName(fromClass, name, scope) - } - } - private def addMemberRefDependency(sym: Symbol)(using Context): Unit = if (!ignoreDependency(sym)) { - val enclOrModuleClass = if (sym.is(ModuleVal)) sym.moduleClass else sym.enclosingClass - assert(enclOrModuleClass.isClass, s"$enclOrModuleClass, $sym") + rec.addUsedName(sym, UseScope.Default) + // packages have class symbol. Only record them as used names but not dependency + if (!sym.is(Package)) { + val enclOrModuleClass = if (sym.is(ModuleVal)) sym.moduleClass else sym.enclosingClass + assert(enclOrModuleClass.isClass, s"$enclOrModuleClass, $sym") - val fromClass = resolveDependencySource - if (fromClass.exists) { // can happen when visiting imports - assert(fromClass.isClass) - - addUsedName(fromClass, sym.zincMangledName, UseScope.Default) - // packages have class symbol. Only record them as used names but not dependency - if (!sym.is(Package)) { - _dependencies += ClassDependency(fromClass, enclOrModuleClass, DependencyByMemberRef) - } + rec.addClassDependency(enclOrModuleClass, DependencyByMemberRef) } } @@ -305,15 +158,13 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT // If the tpt is empty, this is a non-SAM lambda, so no need to register // an inheritance relationship. if !tree.tpt.isEmpty then - val from = resolveDependencySource - _dependencies += ClassDependency(from, tree.tpt.tpe.classSymbol, LocalDependencyByInheritance) + rec.addClassDependency(tree.tpt.tpe.classSymbol, LocalDependencyByInheritance) private def addInheritanceDependencies(tree: Template)(using Context): Unit = if (tree.parents.nonEmpty) { val depContext = depContextOf(tree.symbol.owner) - val from = resolveDependencySource for parent <- tree.parents do - _dependencies += ClassDependency(from, parent.tpe.classSymbol, depContext) + rec.addClassDependency(parent.tpe.classSymbol, depContext) } private def depContextOf(cls: Symbol)(using Context): DependencyContext = @@ -351,7 +202,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT for sel <- selectors if !sel.isWildcard do addImported(sel.name) if sel.rename != sel.name then - addUsedName(sel.rename, UseScope.Default) + rec.addUsedRawName(sel.rename, UseScope.Default) case exp @ Export(expr, selectors) => val dep = expr.tpe.classSymbol if dep.exists && selectors.exists(_.isWildcard) then @@ -364,8 +215,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT // inheritance dependency in the presence of wildcard exports // to ensure all new members of `dep` are forwarded to. val depContext = depContextOf(ctx.owner.lexicallyEnclosingClass) - val from = resolveDependencySource - _dependencies += ClassDependency(from, dep, depContext) + rec.addClassDependency(dep, depContext) case t: TypeTree => addTypeDependency(t.tpe) case ref: RefTree => @@ -472,10 +322,185 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT val traverser = new TypeDependencyTraverser { def addDependency(symbol: Symbol) = if (!ignoreDependency(symbol) && symbol.is(Sealed)) { - val usedName = symbol.zincMangledName - addUsedName(usedName, UseScope.PatMatTarget) + rec.addUsedName(symbol, UseScope.PatMatTarget) } } traverser.traverse(tpe) } } + +case class ClassDependency(fromClass: Symbol, toClass: Symbol, context: DependencyContext) + +/** Record dependencies using `addUsedName`/`addClassDependency` and inform Zinc using `sendToZinc()`. + * + * Note: As an alternative design choice, we could directly call the appropriate + * callback as we record each dependency, this way we wouldn't need to record + * them locally and we could get rid of `sendToZinc()`, but this may be less + * efficient since it would mean calling `classNameAsString` on each call + * to `addUsedName` rather than once per class. + */ +class DependencyRecorder { + import ExtractDependencies.* + + /** A map from a non-local class to the names it uses, this does not include + * names which are only defined and not referenced. + */ + def usedNames: collection.Map[Symbol, UsedNamesInClass] = _usedNames + + /** Record a reference to the name of `sym` used according to `scope` + * from the current non-local enclosing class. + */ + def addUsedName(sym: Symbol, scope: UseScope)(using Context): Unit = + addUsedRawName(sym.zincMangledName, scope) + + /** Record a reference to `name` used according to `scope` + * from the current non-local enclosing class. + * + * Most of the time, prefer to sue `addUsedName` which takes + * care of name mangling. + */ + def addUsedRawName(name: Name, scope: UseScope)(using Context): Unit = { + val fromClass = resolveDependencySource + if (fromClass.exists) { + val usedName = _usedNames.getOrElseUpdate(fromClass, new UsedNamesInClass) + usedName.update(name, scope) + } + } + + private val _classDependencies = new mutable.HashSet[ClassDependency] + + def classDependencies: Set[ClassDependency] = _classDependencies + + /** Record a dependency to the class `to` in a given `context` + * from the current non-local enclosing class. + */ + def addClassDependency(toClass: Symbol, context: DependencyContext)(using Context): Unit = + val fromClass = resolveDependencySource + if (fromClass.exists) + _classDependencies += ClassDependency(fromClass, toClass, context) + + private val _usedNames = new mutable.HashMap[Symbol, UsedNamesInClass] + + /** Send the collected dependency information to Zinc and clear the local caches. */ + def sendToZinc()(using Context): Unit = + ctx.withIncCallback: cb => + usedNames.foreach: + case (clazz, usedNames) => + val className = classNameAsString(clazz) + usedNames.names.foreach: + case (usedName, scopes) => + cb.usedName(className, usedName.toString, scopes) + classDependencies.foreach(recordClassDependency(cb, _)) + _usedNames.clear() + _classDependencies.clear() + + /** Handles dependency on given symbol by trying to figure out if represents a term + * that is coming from either source code (not necessarily compiled in this compilation + * run) or from class file and calls respective callback method. + */ + private def recordClassDependency(cb: interfaces.IncrementalCallback, dep: ClassDependency)(using Context): Unit = { + val fromClassName = classNameAsString(dep.fromClass) + val sourceFile = ctx.compilationUnit.source + + def binaryDependency(file: Path, binaryClassName: String) = + cb.binaryDependency(file, binaryClassName, fromClassName, sourceFile, dep.context) + + def processExternalDependency(depFile: AbstractFile, binaryClassName: String) = { + depFile match { + case ze: ZipArchive#Entry => // The dependency comes from a JAR + ze.underlyingSource match + case Some(zip) if zip.jpath != null => + binaryDependency(zip.jpath, binaryClassName) + case _ => + case pf: PlainFile => // The dependency comes from a class file, Zinc handles JRT filesystem + binaryDependency(pf.jpath, binaryClassName) + case _ => + internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", dep.fromClass.srcPos) + } + } + + val depFile = dep.toClass.associatedFile + if (depFile != null) { + // Cannot ignore inheritance relationship coming from the same source (see sbt/zinc#417) + def allowLocal = dep.context == DependencyByInheritance || dep.context == LocalDependencyByInheritance + val depClassFile = + if depFile.isClass then depFile + else depFile.resolveSibling(dep.toClass.binaryClassName + ".class") + if (depClassFile != null) { + // Dependency is external -- source is undefined + processExternalDependency(depClassFile, dep.toClass.binaryClassName) + } else if (allowLocal || depFile != sourceFile.file) { + // We cannot ignore dependencies coming from the same source file because + // the dependency info needs to propagate. See source-dependencies/trait-trait-211. + val toClassName = classNameAsString(dep.toClass) + cb.classDependency(toClassName, fromClassName, dep.context) + } + } + } + + private var lastOwner: Symbol = _ + private var lastDepSource: Symbol = _ + + /** The source of the dependency according to `nonLocalEnclosingClass` + * if it exists, otherwise fall back to `responsibleForImports`. + * + * This is backed by a cache which is invalidated when `ctx.owner` changes. + */ + private def resolveDependencySource(using Context): Symbol = { + if (lastOwner != ctx.owner) { + lastOwner = ctx.owner + val source = nonLocalEnclosingClass + lastDepSource = if (source.is(PackageClass)) responsibleForImports else source + } + + lastDepSource + } + + /** The closest non-local enclosing class from `ctx.owner`. */ + private def nonLocalEnclosingClass(using Context): Symbol = { + var clazz = ctx.owner.enclosingClass + var owner = clazz + + while (!owner.is(PackageClass)) { + if (owner.isTerm) { + clazz = owner.enclosingClass + owner = clazz + } else { + owner = owner.owner + } + } + clazz + } + + private var _responsibleForImports: Symbol = _ + + /** Top level import dependencies are registered as coming from a first top level + * class/trait/object declared in the compilation unit. If none exists, issue a warning and return NoSymbol. + */ + private def responsibleForImports(using Context) = { + import tpd.* + def firstClassOrModule(tree: Tree) = { + val acc = new TreeAccumulator[Symbol] { + def apply(x: Symbol, t: Tree)(using Context) = + t match { + case typeDef: TypeDef => + typeDef.symbol + case other => + foldOver(x, other) + } + } + acc(NoSymbol, tree) + } + + if (_responsibleForImports == null) { + val tree = ctx.compilationUnit.tpdTree + _responsibleForImports = firstClassOrModule(tree) + if (!_responsibleForImports.exists) + report.warning("""|No class, trait or object is defined in the compilation unit. + |The incremental compiler cannot record the dependency information in such case. + |Some errors like unused import referring to a non-existent class might not be reported. + |""".stripMargin, tree.sourcePos) + } + _responsibleForImports + } +} From 54e2f59f885fe25b5ef7dca2593a5f391749a72f Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sat, 29 Jul 2023 00:23:29 +0200 Subject: [PATCH 2/3] Properly support `xsbti.compile.IncOptions#useOptimizedSealed` This option is meant to avoid overcompilation involving sealed classes. By default, Zinc invalidates all usages of a sealed class when a child is added or removed, even though `val x: Sealed = new Child1` will work the same if `Sealed` gets an extra child. When this option is on, Zinc only takes children into account if the name usage is reported with `UseScope.PatMatTarget`. Note that currently this is only set when traversing the scrutinee of a pattern match (just like in the Scala 2 implementation), but to safely turn this option on by default we would need to kept track of all call to `children` and `sealedDescendants` in the compiler. There were two issues related to this option before this commit: - ExtractAPI was unnecessarily extracting the `@Child` annotation, so the API hash of a sealed class changed even with `useOptimizedSealed` enabled. - When registering a used name with `UseScope.PatMatTarget` we should also register it with `UseScope.Default` because (part of) the scrutinee type might not be a sealed class, in which case it won't have a name hash for PatMatTarget but should still lead to invalidations. Since we only ever use two values for the `UseScope` parameter of the `usedName` callback, we create two vals `DefaultScopes` and `PatMatScopes` that we reuse for performance. --- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 11 +- .../tools/dotc/sbt/ExtractDependencies.scala | 101 ++++++++++++------ .../useOptimizedSealed/Sealed.scala | 2 + .../useOptimizedSealed/Test.scala | 3 + .../useOptimizedSealed/build.sbt | 29 +++++ .../useOptimizedSealed/changes/Sealed1.scala | 3 + .../useOptimizedSealed/changes/Sealed2.scala | 4 + .../useOptimizedSealed/changes/Test1.scala | 7 ++ .../project/CompileState.scala | 4 + .../useOptimizedSealed/test | 24 +++++ 10 files changed, 150 insertions(+), 38 deletions(-) create mode 100644 sbt-test/source-dependencies/useOptimizedSealed/Sealed.scala create mode 100644 sbt-test/source-dependencies/useOptimizedSealed/Test.scala create mode 100644 sbt-test/source-dependencies/useOptimizedSealed/build.sbt create mode 100644 sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed1.scala create mode 100644 sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed2.scala create mode 100644 sbt-test/source-dependencies/useOptimizedSealed/changes/Test1.scala create mode 100644 sbt-test/source-dependencies/useOptimizedSealed/project/CompileState.scala create mode 100644 sbt-test/source-dependencies/useOptimizedSealed/test diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 5ecf17be32a9..6398fe613b12 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -676,11 +676,16 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { // In the Scala2 ExtractAPI phase we only extract annotations that extend // StaticAnnotation, but in Dotty we currently pickle all annotations so we - // extract everything (except annotations missing from the classpath which - // we simply skip over, and inline body annotations which are handled above). + // extract everything, except: + // - annotations missing from the classpath which we simply skip over + // - inline body annotations which are handled above + // - the Child annotation since we already extract children via + // `api.ClassLike#childrenOfSealedClass` and adding this annotation would + // lead to overcompilation when using zinc's + // `IncOptions#useOptimizedSealed`. s.annotations.foreach { annot => val sym = annot.symbol - if sym.exists && sym != defn.BodyAnnot then + if sym.exists && sym != defn.BodyAnnot && sym != defn.ChildAnnot then annots += apiAnnotation(annot) } diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 175ac21d54bf..bf5fccb1c5e2 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -108,29 +108,6 @@ object ExtractDependencies { report.error(em"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos) } -/** An object that maintain the set of used names from within a class */ -private final class UsedNamesInClass { - private val _names = new mutable.HashMap[Name, EnumSet[UseScope]] - def names: collection.Map[Name, EnumSet[UseScope]] = _names - - def update(name: Name, scope: UseScope): Unit = { - val scopes = _names.getOrElseUpdate(name, EnumSet.noneOf(classOf[UseScope])) - scopes.add(scope) - } - - override def toString(): String = { - val builder = new StringBuilder - names.foreach { case (name, scopes) => - builder.append(name.mangledString) - builder.append(" in [") - scopes.forEach(scope => builder.append(scope.toString)) - builder.append("]") - builder.append(", ") - } - builder.toString() - } -} - /** Extract the dependency information of a compilation unit. * * To understand why we track the used names see the section "Name hashing @@ -144,7 +121,7 @@ private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd. private def addMemberRefDependency(sym: Symbol)(using Context): Unit = if (!ignoreDependency(sym)) { - rec.addUsedName(sym, UseScope.Default) + rec.addUsedName(sym) // packages have class symbol. Only record them as used names but not dependency if (!sym.is(Package)) { val enclOrModuleClass = if (sym.is(ModuleVal)) sym.moduleClass else sym.enclosingClass @@ -202,7 +179,7 @@ private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd. for sel <- selectors if !sel.isWildcard do addImported(sel.name) if sel.rename != sel.name then - rec.addUsedRawName(sel.rename, UseScope.Default) + rec.addUsedRawName(sel.rename) case exp @ Export(expr, selectors) => val dep = expr.tpe.classSymbol if dep.exists && selectors.exists(_.isWildcard) then @@ -322,7 +299,7 @@ private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd. val traverser = new TypeDependencyTraverser { def addDependency(symbol: Symbol) = if (!ignoreDependency(symbol) && symbol.is(Sealed)) { - rec.addUsedName(symbol, UseScope.PatMatTarget) + rec.addUsedName(symbol, includeSealedChildren = true) } } traverser.traverse(tpe) @@ -347,26 +324,80 @@ class DependencyRecorder { */ def usedNames: collection.Map[Symbol, UsedNamesInClass] = _usedNames - /** Record a reference to the name of `sym` used according to `scope` - * from the current non-local enclosing class. + /** Record a reference to the name of `sym` from the current non-local + * enclosing class. + * + * @param includeSealedChildren See documentation of `addUsedRawName`. */ - def addUsedName(sym: Symbol, scope: UseScope)(using Context): Unit = - addUsedRawName(sym.zincMangledName, scope) + def addUsedName(sym: Symbol, includeSealedChildren: Boolean = false)(using Context): Unit = + addUsedRawName(sym.zincMangledName, includeSealedChildren) - /** Record a reference to `name` used according to `scope` - * from the current non-local enclosing class. + /** Record a reference to `name` from the current non-local enclosing class (aka, "from class"). * - * Most of the time, prefer to sue `addUsedName` which takes + * Most of the time, prefer to use `addUsedName` which takes * care of name mangling. + * + * Zinc will use this information to invalidate the current non-local + * enclosing class if something changes in the set of definitions named + * `name` among the possible dependencies of the from class. + * + * @param includeSealedChildren If true, the addition or removal of children + * to a sealed class called `name` will also + * invalidate the from class. + * Note that this only has an effect if zinc's + * `IncOptions.useOptimizedSealed` is enabled, + * otherwise the addition or removal of children + * always lead to invalidation. + * + * TODO: If the compiler reported to zinc all usages of + * `SymDenotation#{children,sealedDescendants}` (including from macro code), + * we should be able to turn `IncOptions.useOptimizedSealed` on by default + * safely. */ - def addUsedRawName(name: Name, scope: UseScope)(using Context): Unit = { + def addUsedRawName(name: Name, includeSealedChildren: Boolean = false)(using Context): Unit = { val fromClass = resolveDependencySource if (fromClass.exists) { val usedName = _usedNames.getOrElseUpdate(fromClass, new UsedNamesInClass) - usedName.update(name, scope) + usedName.update(name, includeSealedChildren) } } + // The two possible value of `UseScope`. To avoid unnecessary allocations, + // we use vals here, but that means we must be careful to never mutate these sets. + private val DefaultScopes = EnumSet.of(UseScope.Default) + private val PatMatScopes = EnumSet.of(UseScope.Default, UseScope.PatMatTarget) + + /** An object that maintain the set of used names from within a class */ + final class UsedNamesInClass { + /** Each key corresponds to a name used in the class. To understand the meaning + * of the associated value, see the documentation of parameter `includeSealedChildren` + * of `addUsedRawName`. + */ + private val _names = new mutable.HashMap[Name, DefaultScopes.type | PatMatScopes.type] + + def names: collection.Map[Name, EnumSet[UseScope]] = _names + + private[DependencyRecorder] def update(name: Name, includeSealedChildren: Boolean): Unit = { + if (includeSealedChildren) + _names(name) = PatMatScopes + else + _names.getOrElseUpdate(name, DefaultScopes) + } + + override def toString(): String = { + val builder = new StringBuilder + names.foreach { case (name, scopes) => + builder.append(name.mangledString) + builder.append(" in [") + scopes.forEach(scope => builder.append(scope.toString)) + builder.append("]") + builder.append(", ") + } + builder.toString() + } + } + + private val _classDependencies = new mutable.HashSet[ClassDependency] def classDependencies: Set[ClassDependency] = _classDependencies diff --git a/sbt-test/source-dependencies/useOptimizedSealed/Sealed.scala b/sbt-test/source-dependencies/useOptimizedSealed/Sealed.scala new file mode 100644 index 000000000000..06c191cba9ed --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/Sealed.scala @@ -0,0 +1,2 @@ +sealed trait Sealed +class Child1 extends Sealed diff --git a/sbt-test/source-dependencies/useOptimizedSealed/Test.scala b/sbt-test/source-dependencies/useOptimizedSealed/Test.scala new file mode 100644 index 000000000000..086e359babc4 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/Test.scala @@ -0,0 +1,3 @@ +class Test { + val s: Sealed = new Child1 +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/build.sbt b/sbt-test/source-dependencies/useOptimizedSealed/build.sbt new file mode 100644 index 000000000000..1c4c78828a55 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/build.sbt @@ -0,0 +1,29 @@ +scalaVersion := sys.props("plugin.scalaVersion") + +ThisBuild / incOptions ~= { _.withUseOptimizedSealed(true) } + +import sbt.internal.inc.Analysis +import complete.DefaultParsers._ + +// Reset compiler iterations, necessary because tests run in batch mode +val recordPreviousIterations = taskKey[Unit]("Record previous iterations.") +recordPreviousIterations := { + val log = streams.value.log + CompileState.previousIterations = { + val previousAnalysis = (previousCompile in Compile).value.analysis.asScala + previousAnalysis match { + case None => + log.info("No previous analysis detected") + 0 + case Some(a: Analysis) => a.compilations.allCompilations.size + } + } +} + +val checkIterations = inputKey[Unit]("Verifies the accumulated number of iterations of incremental compilation.") + +checkIterations := { + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = ((compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }) - CompileState.previousIterations + assert(expected == actual, s"Expected $expected compilations, got $actual") +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed1.scala b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed1.scala new file mode 100644 index 000000000000..7ce9b1119871 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed1.scala @@ -0,0 +1,3 @@ +sealed trait Sealed +class Child1 extends Sealed +class Child2 extends Sealed diff --git a/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed2.scala b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed2.scala new file mode 100644 index 000000000000..ccf27d31d219 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed2.scala @@ -0,0 +1,4 @@ +sealed trait Sealed +class Child1 extends Sealed +class Child2 extends Sealed +class Child3 extends Sealed diff --git a/sbt-test/source-dependencies/useOptimizedSealed/changes/Test1.scala b/sbt-test/source-dependencies/useOptimizedSealed/changes/Test1.scala new file mode 100644 index 000000000000..559fef4d481a --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/changes/Test1.scala @@ -0,0 +1,7 @@ +class Test { + def foo(x: Sealed): Int = x match + case _: Child1 => 1 + case _: Child2 => 1 + + val s: Sealed = new Child1 +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/project/CompileState.scala b/sbt-test/source-dependencies/useOptimizedSealed/project/CompileState.scala new file mode 100644 index 000000000000..078db9c7bf56 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/project/CompileState.scala @@ -0,0 +1,4 @@ +// This is necessary because tests are run in batch mode +object CompileState { + @volatile var previousIterations: Int = -1 +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/test b/sbt-test/source-dependencies/useOptimizedSealed/test new file mode 100644 index 000000000000..6680e9aab923 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/test @@ -0,0 +1,24 @@ +# Compile Sealed.scala and Test.scala +> compile +> recordPreviousIterations + +# Add an extra children to Sealed +$ copy-file changes/Sealed1.scala Sealed.scala + +# Only Sealed.scala needs to be recompiled because Test.scala does not +# match on a value of type `Sealed`. +> compile +> checkIterations 1 + +> clean +$ copy-file changes/Test1.scala Test.scala +> compile +> recordPreviousIterations + +# Add an extra children to Sealed again +$ copy-file changes/Sealed2.scala Sealed.scala + +# Test.scala will be recompiled in a second round because it matches +# on a value of type `Sealed`. +> compile +> checkIterations 2 From ec59c319e88998509e8c70f87671a805f0a9f078 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Fri, 24 Feb 2023 16:18:17 +0100 Subject: [PATCH 3/3] Make incremental compilation aware of synthesized mirrors A product mirror needs to be resynthesized if any class parameter changes, and a sum mirror needs to be resynthesized if any child of the sealed type changes, but previously this did not reliably work because the dependency recording in ExtractDependencies was unaware of mirrors. Instead of making ExtractDependencies aware of mirrors, we solve this by directly recording the dependencies when the mirror is synthesized, this way we can be sure to always correctly invalidate users of mirrors, even if the synthesized mirror type is not present in the AST at phase ExtractDependencies. This is the first time that we record dependencies outside of the ExtractDependencies phase, in the future we should see if we can extend this mechanism to record more dependencies during typechecking to make incremental compilation more robust (e.g. by keeping track of symbols looked up by macros). Eventually, we might even want to completely get rid of the ExtractDependencies phase and record all dependencies on the fly if it turns out to be faster. --- .../dotty/tools/dotc/CompilationUnit.scala | 9 +++++++++ .../tools/dotc/sbt/ExtractDependencies.scala | 13 ++++++++++--- .../dotty/tools/dotc/typer/Synthesizer.scala | 19 ++++++++++++++++++- .../mirror-product/MyProduct.scala | 1 + .../mirror-product/Test.scala | 10 ++++++++++ .../mirror-product/build.sbt | 1 + .../mirror-product/changes/MyProduct.scala | 1 + .../source-dependencies/mirror-product/test | 7 +++++++ .../source-dependencies/mirror-sum/Sum.scala | 2 ++ .../source-dependencies/mirror-sum/Test.scala | 12 ++++++++++++ .../source-dependencies/mirror-sum/build.sbt | 4 ++++ .../mirror-sum/changes/Sum.scala | 3 +++ sbt-test/source-dependencies/mirror-sum/test | 7 +++++++ 13 files changed, 85 insertions(+), 4 deletions(-) create mode 100644 sbt-test/source-dependencies/mirror-product/MyProduct.scala create mode 100644 sbt-test/source-dependencies/mirror-product/Test.scala create mode 100644 sbt-test/source-dependencies/mirror-product/build.sbt create mode 100644 sbt-test/source-dependencies/mirror-product/changes/MyProduct.scala create mode 100644 sbt-test/source-dependencies/mirror-product/test create mode 100644 sbt-test/source-dependencies/mirror-sum/Sum.scala create mode 100644 sbt-test/source-dependencies/mirror-sum/Test.scala create mode 100644 sbt-test/source-dependencies/mirror-sum/build.sbt create mode 100644 sbt-test/source-dependencies/mirror-sum/changes/Sum.scala create mode 100644 sbt-test/source-dependencies/mirror-sum/test diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index c121fbaf7c00..a906d52ccd4e 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -73,11 +73,20 @@ class CompilationUnit protected (val source: SourceFile) { /** List of all comments present in this compilation unit */ var comments: List[Comment] = Nil + /** This is used to record dependencies to invalidate during incremental + * compilation, but only if `ctx.runZincPhases` is true. + */ + val depRecorder: sbt.DependencyRecorder = sbt.DependencyRecorder() + /** Suspends the compilation unit by thowing a SuspendException * and recording the suspended compilation unit */ def suspend()(using Context): Nothing = assert(isSuspendable) + // Clear references to symbols that may become stale. No need to call + // `depRecorder.sendToZinc()` since all compilation phases will be rerun + // when this unit is unsuspended. + depRecorder.clear() if !suspended then if (ctx.settings.XprintSuspension.value) report.echo(i"suspended: $this") diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index bf5fccb1c5e2..ef9c939c2ea7 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -69,7 +69,7 @@ class ExtractDependencies extends Phase { override def run(using Context): Unit = { val unit = ctx.compilationUnit - val rec = DependencyRecorder() + val rec = unit.depRecorder val collector = ExtractDependenciesCollector(rec) collector.traverse(unit.tpdTree) @@ -422,8 +422,15 @@ class DependencyRecorder { case (usedName, scopes) => cb.usedName(className, usedName.toString, scopes) classDependencies.foreach(recordClassDependency(cb, _)) - _usedNames.clear() - _classDependencies.clear() + clear() + + /** Clear all state. */ + def clear(): Unit = + _usedNames.clear() + _classDependencies.clear() + lastOwner = NoSymbol + lastDepSource = NoSymbol + _responsibleForImports = NoSymbol /** Handles dependency on given symbol by trying to figure out if represents a term * that is coming from either source code (not necessarily compiled in this compilation diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 0a87a95120ae..92e73700af65 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -19,6 +19,9 @@ import ast.Trees.genericEmptyTree import annotation.{tailrec, constructorOnly} import ast.tpd._ import Synthesizer._ +import sbt.ExtractDependencies.* +import sbt.ClassDependency +import xsbti.api.DependencyContext._ /** Synthesize terms for special classes */ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): @@ -458,7 +461,14 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val reason = s"it reduces to a tuple with arity $arity, expected arity <= $maxArity" withErrors(i"${defn.PairClass} is not a generic product because $reason") case MirrorSource.ClassSymbol(pre, cls) => - if cls.isGenericProduct then makeProductMirror(pre, cls, None) + if cls.isGenericProduct then + if ctx.runZincPhases then + // The mirror should be resynthesized if the constructor of the + // case class `cls` changes. See `sbt-test/source-dependencies/mirror-product`. + val rec = ctx.compilationUnit.depRecorder + rec.addClassDependency(cls, DependencyByMemberRef) + rec.addUsedName(cls.primaryConstructor) + makeProductMirror(pre, cls, None) else withErrors(i"$cls is not a generic product because ${cls.whyNotGenericProduct}") case Left(msg) => withErrors(i"type `$mirroredType` is not a generic product because $msg") @@ -478,6 +488,13 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val clsIsGenericSum = cls.isGenericSum(pre) if acceptableMsg.isEmpty && clsIsGenericSum then + if ctx.runZincPhases then + // The mirror should be resynthesized if any child of the sealed class + // `cls` changes. See `sbt-test/source-dependencies/mirror-sum`. + val rec = ctx.compilationUnit.depRecorder + rec.addClassDependency(cls, DependencyByMemberRef) + rec.addUsedName(cls, includeSealedChildren = true) + val elemLabels = cls.children.map(c => ConstantType(Constant(c.name.toString))) def internalError(msg: => String)(using Context): Unit = diff --git a/sbt-test/source-dependencies/mirror-product/MyProduct.scala b/sbt-test/source-dependencies/mirror-product/MyProduct.scala new file mode 100644 index 000000000000..acad1358f62b --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/MyProduct.scala @@ -0,0 +1 @@ +case class MyProduct(x: Int) diff --git a/sbt-test/source-dependencies/mirror-product/Test.scala b/sbt-test/source-dependencies/mirror-product/Test.scala new file mode 100644 index 000000000000..e53d7b999517 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/Test.scala @@ -0,0 +1,10 @@ +import scala.deriving.Mirror +import scala.compiletime.erasedValue + +transparent inline def foo[T](using m: Mirror.Of[T]): Int = + inline erasedValue[m.MirroredElemTypes] match + case _: (Int *: EmptyTuple) => 1 + case _: (Int *: String *: EmptyTuple) => 2 + +@main def Test = + assert(foo[MyProduct] == 2) diff --git a/sbt-test/source-dependencies/mirror-product/build.sbt b/sbt-test/source-dependencies/mirror-product/build.sbt new file mode 100644 index 000000000000..63e314982c41 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/build.sbt @@ -0,0 +1 @@ +scalaVersion := sys.props("plugin.scalaVersion") diff --git a/sbt-test/source-dependencies/mirror-product/changes/MyProduct.scala b/sbt-test/source-dependencies/mirror-product/changes/MyProduct.scala new file mode 100644 index 000000000000..87e5af62bd7e --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/changes/MyProduct.scala @@ -0,0 +1 @@ +case class MyProduct(x: Int, y: String) diff --git a/sbt-test/source-dependencies/mirror-product/test b/sbt-test/source-dependencies/mirror-product/test new file mode 100644 index 000000000000..fbcd15fa4153 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/test @@ -0,0 +1,7 @@ +> compile + +# change the case class constructor +$ copy-file changes/MyProduct.scala MyProduct.scala + +# Both MyProduct.scala and Test.scala should be recompiled, otherwise the assertion will fail +> run diff --git a/sbt-test/source-dependencies/mirror-sum/Sum.scala b/sbt-test/source-dependencies/mirror-sum/Sum.scala new file mode 100644 index 000000000000..782f5a89d913 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/Sum.scala @@ -0,0 +1,2 @@ +sealed trait Sum +case class Child1() extends Sum diff --git a/sbt-test/source-dependencies/mirror-sum/Test.scala b/sbt-test/source-dependencies/mirror-sum/Test.scala new file mode 100644 index 000000000000..9cb6e2c78d64 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/Test.scala @@ -0,0 +1,12 @@ +import scala.deriving.Mirror +import scala.compiletime.erasedValue + +object Test: + transparent inline def foo[T](using m: Mirror.Of[T]): Int = + inline erasedValue[m.MirroredElemLabels] match + case _: ("Child1" *: EmptyTuple) => 1 + case _: ("Child1" *: "Child2" *: EmptyTuple) => 2 + + def main(args: Array[String]): Unit = + assert(foo[Sum] == 2) + diff --git a/sbt-test/source-dependencies/mirror-sum/build.sbt b/sbt-test/source-dependencies/mirror-sum/build.sbt new file mode 100644 index 000000000000..de89f34feb3b --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/build.sbt @@ -0,0 +1,4 @@ +scalaVersion := sys.props("plugin.scalaVersion") +// Use more precise invalidation, otherwise the reference to `Sum` in +// Test.scala is enough to invalidate it when a child is added. +ThisBuild / incOptions ~= { _.withUseOptimizedSealed(true) } diff --git a/sbt-test/source-dependencies/mirror-sum/changes/Sum.scala b/sbt-test/source-dependencies/mirror-sum/changes/Sum.scala new file mode 100644 index 000000000000..13ec68223ed4 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/changes/Sum.scala @@ -0,0 +1,3 @@ +sealed trait Sum +case class Child1() extends Sum +case class Child2() extends Sum diff --git a/sbt-test/source-dependencies/mirror-sum/test b/sbt-test/source-dependencies/mirror-sum/test new file mode 100644 index 000000000000..baf5f17f9905 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/test @@ -0,0 +1,7 @@ +> compile + +# Add a child +$ copy-file changes/Sum.scala Sum.scala + +# Both Sum.scala and Test.scala should be recompiled, otherwise the assertion will fail +> run