-
Notifications
You must be signed in to change notification settings - Fork 1.1k
Export diagnostics (including unused warnings) to SemanticDB #17835
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 7 commits
75e6bb6
c5a80c4
e1d0ec9
f17fb5f
d2b54c2
56c5909
50e0fd2
2babbc4
5186b62
66a5306
a414fae
d5065ec
d7258b4
4b5d3e7
3008ed8
c9de8e2
40a2a00
7b29f4c
4f6a092
a6dfec2
3daeaa6
499c347
71a5cd0
b28b425
d54d8a2
b8565a0
2857632
053e644
567d486
275e6fa
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
package dotty.tools.dotc.semanticdb | ||
|
||
import dotty.tools.dotc.reporting.Diagnostic | ||
import dotty.tools.dotc.{semanticdb => s} | ||
import dotty.tools.dotc.interfaces.Diagnostic.{ERROR, INFO, WARNING} | ||
import dotty.tools.dotc.core.Contexts.Context | ||
|
||
object DiagnosticOps: | ||
extension (d: Diagnostic) | ||
def toSemanticDiagnostic(using Context): s.Diagnostic = | ||
val severity = d.level match | ||
case ERROR => s.Diagnostic.Severity.ERROR | ||
case WARNING => s.Diagnostic.Severity.WARNING | ||
case INFO => s.Diagnostic.Severity.INFORMATION | ||
case _ => s.Diagnostic.Severity.INFORMATION | ||
s.Diagnostic( | ||
range = Scala3.range(d.pos.span, d.pos.source), | ||
severity = severity, | ||
message = d.msg.message | ||
) |
Original file line number | Diff line number | Diff line change | ||||
---|---|---|---|---|---|---|
|
@@ -21,20 +21,40 @@ import transform.SymUtils._ | |||||
|
||||||
import scala.collection.mutable | ||||||
import scala.annotation.{ threadUnsafe => tu, tailrec } | ||||||
import scala.jdk.CollectionConverters._ | ||||||
import scala.PartialFunction.condOpt | ||||||
import typer.ImportInfo.withRootImports | ||||||
|
||||||
import dotty.tools.dotc.{semanticdb => s} | ||||||
import dotty.tools.io.{AbstractFile, JarArchive} | ||||||
import dotty.tools.dotc.util.Property | ||||||
tanishiking marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||
import dotty.tools.dotc.semanticdb.DiagnosticOps.* | ||||||
import scala.util.{Using, Failure, Success} | ||||||
import com.google.protobuf.Empty | ||||||
import com.google.protobuf.UnknownFieldSet | ||||||
import com.google.protobuf.UnknownFieldSet.Field | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. these are java protobuf classes - you should be able to get a slightly better syntax (but no significant difference in performance) with classes in the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Unfortunately, we cannot utilize the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. there used to be a dependency on protobuf due to Zinc 1.3, but we merged yesterday depending exclusively on Zinc 1.9 which no longer has protobuf There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Ah, ok. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm not sure wether should we do without I gonna look into is there an API that reads protobuf as an Empty in There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Could we merge the current state or one with the fixes? This would unblock scalafix, so would be pretty great to do. We could do optimization improvements later? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'll move back to "parsing TextDocuments and update it" anyway :) -> done d5065ec |
||||||
import java.io.ByteArrayOutputStream | ||||||
import java.io.BufferedOutputStream | ||||||
tanishiking marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||
|
||||||
|
||||||
/** Extract symbol references and uses to semanticdb files. | ||||||
* See https://scalameta.org/docs/semanticdb/specification.html#symbol-1 | ||||||
* for a description of the format. | ||||||
* TODO: Also extract type information | ||||||
* | ||||||
* Here, we define two phases for "ExtractSemanticDB", "PostTyper" and "PostInlining". | ||||||
* | ||||||
* The "PostTyper" phase extracts SemanticDB information such as symbol | ||||||
* definitions, symbol occurrences, type information, and synthetics | ||||||
* and write .semanticdb file. | ||||||
* | ||||||
* The "PostInlining" phase extracts diagnostics from "ctx.reporter" and | ||||||
* attaches them to the SemanticDB information extracted in the "PostTyper" phase. | ||||||
* We need to run this phase after the "CheckUnused.PostInlining" phase | ||||||
* so that we can extract the warnings generated by "-Wunused". | ||||||
*/ | ||||||
class ExtractSemanticDB extends Phase: | ||||||
import Scala3.{_, given} | ||||||
class ExtractSemanticDB private (phaseMode: ExtractSemanticDB.PhaseMode, suffix: String) extends Phase: | ||||||
tanishiking marked this conversation as resolved.
Show resolved
Hide resolved
tanishiking marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||
|
||||||
override val phaseName: String = ExtractSemanticDB.name | ||||||
override val phaseName: String = ExtractSemanticDB.phaseNamePrefix + suffix | ||||||
|
||||||
override val description: String = ExtractSemanticDB.description | ||||||
|
||||||
|
@@ -46,14 +66,141 @@ class ExtractSemanticDB extends Phase: | |||||
// Check not needed since it does not transform trees | ||||||
override def isCheckable: Boolean = false | ||||||
|
||||||
override def run(using Context): Unit = | ||||||
val unit = ctx.compilationUnit | ||||||
val extractor = Extractor() | ||||||
extractor.extract(unit.tpdTree) | ||||||
ExtractSemanticDB.write(unit.source, extractor.occurrences.toList, extractor.symbolInfos.toList, extractor.synthetics.toList) | ||||||
override def runOn(units: List[CompilationUnit])(using ctx: Context): List[CompilationUnit] = { | ||||||
val appendDiagnostics = phaseMode == ExtractSemanticDB.PhaseMode.AppendDiagnostics | ||||||
if (appendDiagnostics) | ||||||
val warnings = ctx.reporter.allWarnings.groupBy(w => w.pos.source) | ||||||
units.asJava.parallelStream().forEach { unit => | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. also I think here we should extract the unit source, and do the conversion to SemanticDiagnostic before we start parallelising |
||||||
val unitCtx = ctx.fresh.setCompilationUnit(unit).withRootImports | ||||||
warnings.get(unit.source).foreach { ws => | ||||||
ExtractSemanticDB.appendDiagnostics(unit.source, ws.map(_.toSemanticDiagnostic)) | ||||||
} | ||||||
} | ||||||
else | ||||||
units.foreach { unit => | ||||||
val extractor = ExtractSemanticDB.Extractor() | ||||||
extractor.extract(unit.tpdTree) | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. not quite sure but there could be a slight difference here given that the extractor isn't running with |
||||||
ExtractSemanticDB.write(unit.source, extractor.occurrences.toList, extractor.symbolInfos.toList, extractor.synthetics.toList) | ||||||
} | ||||||
units | ||||||
} | ||||||
|
||||||
def run(using Context): Unit = unsupported("run") | ||||||
end ExtractSemanticDB | ||||||
|
||||||
object ExtractSemanticDB: | ||||||
import java.nio.file.Path | ||||||
import java.nio.file.Files | ||||||
import java.nio.file.Paths | ||||||
|
||||||
val phaseNamePrefix: String = "extractSemanticDB" | ||||||
val description: String = "extract info into .semanticdb files" | ||||||
|
||||||
enum PhaseMode: | ||||||
case ExtractSemanticInfo | ||||||
case AppendDiagnostics | ||||||
|
||||||
class ExtractSemanticInfo extends ExtractSemanticDB(PhaseMode.ExtractSemanticInfo, "ExtractSemanticInfo") | ||||||
|
||||||
class AppendDiagnostics extends ExtractSemanticDB(PhaseMode.AppendDiagnostics, "AppendDiagnostics") | ||||||
|
||||||
private def semanticdbTarget(using Context): Option[Path] = | ||||||
Option(ctx.settings.semanticdbTarget.value) | ||||||
.filterNot(_.isEmpty) | ||||||
.map(Paths.get(_)) | ||||||
|
||||||
private def outputDirectory(using Context): AbstractFile = ctx.settings.outputDir.value | ||||||
|
||||||
private def absolutePath(path: Path): Path = path.toAbsolutePath.normalize | ||||||
|
||||||
private def write( | ||||||
source: SourceFile, | ||||||
occurrences: List[SymbolOccurrence], | ||||||
symbolInfos: List[SymbolInformation], | ||||||
synthetics: List[Synthetic], | ||||||
)(using Context): Unit = | ||||||
val outpath = semanticdbPath(source) | ||||||
Files.createDirectories(outpath.getParent()) | ||||||
val doc: TextDocument = TextDocument( | ||||||
schema = Schema.SEMANTICDB4, | ||||||
language = Language.SCALA, | ||||||
uri = Tools.mkURIstring(Paths.get(relPath(source))), | ||||||
text = "", | ||||||
md5 = internal.MD5.compute(String(source.content)), | ||||||
symbols = symbolInfos, | ||||||
occurrences = occurrences, | ||||||
synthetics = synthetics, | ||||||
) | ||||||
val docs = TextDocuments(List(doc)) | ||||||
val out = Files.newOutputStream(outpath) | ||||||
try | ||||||
val stream = internal.SemanticdbOutputStream.newInstance(out) | ||||||
docs.writeTo(stream) | ||||||
stream.flush() | ||||||
finally | ||||||
out.close() | ||||||
end write | ||||||
|
||||||
private def appendDiagnostics( | ||||||
source: SourceFile, | ||||||
tanishiking marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||
diagnostics: Seq[Diagnostic] | ||||||
)(using Context): Unit = | ||||||
tanishiking marked this conversation as resolved.
Show resolved
Hide resolved
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. so to make this safe to call in parallel we really need to get rid of that There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. done: d7258b4 |
||||||
val path = semanticdbPath(source) | ||||||
Using.Manager { use => | ||||||
val in = use(Files.newInputStream(path)) | ||||||
// val sin = internal.SemanticdbInputStream.newInstance(in) | ||||||
val textDocuments = Empty.parseFrom(in) | ||||||
val docsBytes = textDocuments.getUnknownFields().getField(TextDocuments.DOCUMENTS_FIELD_NUMBER).getLengthDelimitedList() | ||||||
val docFields = Empty.parseFrom(docsBytes.get(0)).getUnknownFields() | ||||||
if (source.file.name == "ValPattern.scala") | ||||||
println(docFields) | ||||||
//docMap.put(7, ) | ||||||
|
||||||
// val docs = TextDocuments.parseFrom(sin) | ||||||
|
||||||
val bos = use(new ByteArrayOutputStream()) | ||||||
val sbos = internal.SemanticdbOutputStream.newInstance(bos) | ||||||
val doc = TextDocument(diagnostics = diagnostics) | ||||||
doc.writeTo(sbos) | ||||||
sbos.flush() | ||||||
val diagnosticsOnly = Empty.parseFrom(bos.toByteArray()).getUnknownFields() | ||||||
|
||||||
val merged = docFields.toBuilder().mergeFrom(diagnosticsOnly).build() | ||||||
// println(merged) | ||||||
val field = Field.newBuilder().addLengthDelimited(merged.toByteString()).build() | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I am AFK so I can't confirm, but I believe you should pass to |
||||||
|
||||||
val fields = textDocuments.getUnknownFields().toBuilder().mergeField(TextDocuments.DOCUMENTS_FIELD_NUMBER, field).build() | ||||||
// println(fields) | ||||||
val updated = textDocuments.toBuilder().setUnknownFields(fields).build() | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. you should start from a fresh |
||||||
if (source.file.name == "ValPattern.scala") | ||||||
println(updated) | ||||||
|
||||||
val out = use(Files.newOutputStream(path)) | ||||||
val bout = new BufferedOutputStream(out) | ||||||
updated.writeTo(bout) | ||||||
bout.flush() | ||||||
// val sout = internal.SemanticdbOutputStream.newInstance(out) | ||||||
// TextDocuments(docs.documents.map(_.withDiagnostics(diagnostics))).writeTo(sout) | ||||||
} match | ||||||
case Failure(ex) => | ||||||
println(ex.getMessage()) | ||||||
// failed somehow, should we say something? | ||||||
case Success(_) => // success to update semanticdb, say nothing | ||||||
end appendDiagnostics | ||||||
|
||||||
private def relPath(source: SourceFile)(using ctx: Context) = | ||||||
SourceFile.relativePath(source, ctx.settings.sourceroot.value) | ||||||
|
||||||
private def semanticdbPath(source: SourceFile)(using ctx: Context) = | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I believe this does not need context
Suggested change
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is needed for private def relPath(source: SourceFile)(using ctx: Context) =
SourceFile.relativePath(source, ctx.settings.sourceroot.value) and
private def semanticdbTarget(using Context): Option[Path] =
Option(ctx.settings.semanticdbTarget.value)
.filterNot(_.isEmpty)
.map(Paths.get(_)) Should we pass around |
||||||
absolutePath(semanticdbTarget.getOrElse(outputDirectory.jpath)) | ||||||
.resolve("META-INF") | ||||||
.resolve("semanticdb") | ||||||
.resolve(relPath(source)) | ||||||
.resolveSibling(source.name + ".semanticdb") | ||||||
|
||||||
/** Extractor of symbol occurrences from trees */ | ||||||
class Extractor extends TreeTraverser: | ||||||
private class Extractor extends TreeTraverser: | ||||||
import Scala3.{_, given} | ||||||
given s.SemanticSymbolBuilder = s.SemanticSymbolBuilder() | ||||||
val synth = SyntheticsExtractor() | ||||||
given converter: s.TypeOps = s.TypeOps() | ||||||
|
@@ -468,52 +615,5 @@ class ExtractSemanticDB extends Phase: | |||||
registerSymbol(vparam.symbol, symkinds) | ||||||
traverse(vparam.tpt) | ||||||
tparams.foreach(tp => traverse(tp.rhs)) | ||||||
|
||||||
|
||||||
object ExtractSemanticDB: | ||||||
import java.nio.file.Path | ||||||
import java.nio.file.Files | ||||||
import java.nio.file.Paths | ||||||
|
||||||
val name: String = "extractSemanticDB" | ||||||
val description: String = "extract info into .semanticdb files" | ||||||
|
||||||
private def semanticdbTarget(using Context): Option[Path] = | ||||||
Option(ctx.settings.semanticdbTarget.value) | ||||||
.filterNot(_.isEmpty) | ||||||
.map(Paths.get(_)) | ||||||
|
||||||
private def outputDirectory(using Context): AbstractFile = ctx.settings.outputDir.value | ||||||
|
||||||
def write( | ||||||
source: SourceFile, | ||||||
occurrences: List[SymbolOccurrence], | ||||||
symbolInfos: List[SymbolInformation], | ||||||
synthetics: List[Synthetic], | ||||||
)(using Context): Unit = | ||||||
def absolutePath(path: Path): Path = path.toAbsolutePath.normalize | ||||||
val relPath = SourceFile.relativePath(source, ctx.settings.sourceroot.value) | ||||||
val outpath = absolutePath(semanticdbTarget.getOrElse(outputDirectory.jpath)) | ||||||
.resolve("META-INF") | ||||||
.resolve("semanticdb") | ||||||
.resolve(relPath) | ||||||
.resolveSibling(source.name + ".semanticdb") | ||||||
Files.createDirectories(outpath.getParent()) | ||||||
val doc: TextDocument = TextDocument( | ||||||
schema = Schema.SEMANTICDB4, | ||||||
language = Language.SCALA, | ||||||
uri = Tools.mkURIstring(Paths.get(relPath)), | ||||||
text = "", | ||||||
md5 = internal.MD5.compute(String(source.content)), | ||||||
symbols = symbolInfos, | ||||||
occurrences = occurrences, | ||||||
synthetics = synthetics, | ||||||
) | ||||||
val docs = TextDocuments(List(doc)) | ||||||
val out = Files.newOutputStream(outpath) | ||||||
try | ||||||
val stream = internal.SemanticdbOutputStream.newInstance(out) | ||||||
docs.writeTo(stream) | ||||||
stream.flush() | ||||||
finally | ||||||
out.close() | ||||||
end Extractor | ||||||
end ExtractSemanticDB |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
object Deprecated/*<-_empty_::Deprecated.*/ { | ||
@deprecated/*->scala::deprecated#*/ def deprecatedMethod/*<-_empty_::Deprecated.deprecatedMethod().*/ = ???/*->scala::Predef.`???`().*/ | ||
def main/*<-_empty_::Deprecated.main().*/ = deprecatedMethod/*->_empty_::Deprecated.deprecatedMethod().*/ | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
object Deprecated { | ||
@deprecated def deprecatedMethod = ??? | ||
def main = deprecatedMethod | ||
} |
Uh oh!
There was an error while loading. Please reload this page.