Skip to content

Commit a79145d

Browse files
authored
Merge pull request #9730 from dotty-staging/tree-hashing
2 parents c2abb49 + 8f50644 commit a79145d

File tree

11 files changed

+152
-19
lines changed

11 files changed

+152
-19
lines changed

compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala

Lines changed: 94 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
package dotty.tools.dotc
22
package sbt
33

4-
import ast.{Trees, tpd}
4+
import ExtractDependencies.internalError
5+
import ast.{Positioned, Trees, tpd, untpd}
56
import core._
67
import core.Decorators._
78
import Annotations._
@@ -11,6 +12,7 @@ import Phases._
1112
import Trees._
1213
import Types._
1314
import Symbols._
15+
import Names._
1416
import NameOps._
1517
import NameKinds.DefaultGetterName
1618
import typer.Inliner
@@ -29,7 +31,7 @@ import scala.collection.mutable
2931
*
3032
* See the documentation of `ExtractAPICollector`, `ExtractDependencies`,
3133
* `ExtractDependenciesCollector` and
32-
* http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html for more
34+
* http://www.scala-sbt.org/1.x/docs/Understanding-Recompilation.html for more
3335
* information on incremental recompilation.
3436
*
3537
* The following flags affect this phase:
@@ -171,6 +173,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
171173
private val orMarker = marker("Or")
172174
private val byNameMarker = marker("ByName")
173175
private val matchMarker = marker("Match")
176+
private val superMarker = marker("Super")
174177

175178
/** Extract the API representation of a source file */
176179
def apiSource(tree: Tree): Seq[api.ClassLike] = {
@@ -513,8 +516,11 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
513516
apiType(tp.ref)
514517
case tp: TypeVar =>
515518
apiType(tp.underlying)
519+
case SuperType(thistpe, supertpe) =>
520+
val s = combineApiTypes(apiType(thistpe), apiType(supertpe))
521+
withMarker(s, superMarker)
516522
case _ => {
517-
report.warning(i"sbt-api: Unhandled type ${tp.getClass} : $tp")
523+
internalError(i"Unhandled type $tp of class ${tp.getClass}")
518524
Constants.emptyType
519525
}
520526
}
@@ -582,13 +588,18 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
582588
val annots = new mutable.ListBuffer[api.Annotation]
583589
val inlineBody = Inliner.bodyToInline(s)
584590
if (!inlineBody.isEmpty) {
585-
// FIXME: If the body of an inlineable method changes, all the reverse
586-
// dependencies of this method need to be recompiled. sbt has no way
587-
// of tracking method bodies, so as a hack we include the printed
588-
// tree of the method as part of the signature we send to sbt.
589-
// To do this properly we would need a way to hash trees and types in
590-
// dotty itself.
591-
annots += marker(inlineBody.toString)
591+
// If the body of an inline def changes, all the reverse dependencies of
592+
// this method need to be recompiled. sbt has no way of tracking method
593+
// bodies, so we include the hash of the body of the method as part of the
594+
// signature we send to sbt.
595+
//
596+
// FIXME: The API of a class we send to Zinc includes the signatures of
597+
// inherited methods, which means that we repeatedly compute the hash of
598+
// an inline def in every class that extends its owner. To avoid this we
599+
// could store the hash as an annotation when pickling an inline def
600+
// and retrieve it here instead of computing it on the fly.
601+
val inlineBodyHash = treeHash(inlineBody)
602+
annots += marker(inlineBodyHash.toString)
592603
}
593604

594605
// In the Scala2 ExtractAPI phase we only extract annotations that extend
@@ -602,16 +613,81 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
602613
annots.toList
603614
}
604615

616+
/** Produce a hash for a tree that is as stable as possible:
617+
* it should stay the same across compiler runs, compiler instances,
618+
* JVMs, etc.
619+
*/
620+
def treeHash(tree: Tree): Int =
621+
import scala.util.hashing.MurmurHash3
622+
623+
def positionedHash(p: ast.Positioned, initHash: Int): Int =
624+
p match
625+
case p: WithLazyField[?] =>
626+
p.forceIfLazy
627+
case _ =>
628+
// FIXME: If `p` is a tree we should probably take its type into account
629+
// when hashing it, but producing a stable hash for a type is not trivial
630+
// since the same type might have multiple representations, for method
631+
// signatures this is already handled by `computeType` and the machinery
632+
// in Zinc that generates hashes from that, if we can reliably produce
633+
// stable hashes for types ourselves then we could bypass all that and
634+
// send Zinc hashes directly.
635+
val h = MurmurHash3.mix(initHash, p.productPrefix.hashCode)
636+
iteratorHash(p.productIterator, h)
637+
end positionedHash
638+
639+
def iteratorHash(it: Iterator[Any], initHash: Int): Int =
640+
import core.Constants._
641+
var h = initHash
642+
while it.hasNext do
643+
it.next() match
644+
case p: Positioned =>
645+
h = positionedHash(p, h)
646+
case xs: List[?] =>
647+
h = iteratorHash(xs.iterator, h)
648+
case c: Constant =>
649+
h = MurmurHash3.mix(h, c.tag)
650+
c.tag match
651+
case NullTag =>
652+
// No value to hash, the tag is enough.
653+
case ClazzTag =>
654+
// Go through `apiType` to get a value with a stable hash, it'd
655+
// be better to use Murmur here too instead of relying on
656+
// `hashCode`, but that would essentially mean duplicating
657+
// https://github.com/sbt/zinc/blob/develop/internal/zinc-apiinfo/src/main/scala/xsbt/api/HashAPI.scala
658+
// and at that point we might as well do type hashing on our own
659+
// representation.
660+
val apiValue = apiType(c.typeValue)
661+
h = MurmurHash3.mix(h, apiValue.hashCode)
662+
case _ =>
663+
h = MurmurHash3.mix(h, c.value.hashCode)
664+
case n: Name =>
665+
// The hashCode of the name itself is not stable across compiler instances
666+
h = MurmurHash3.mix(h, n.toString.hashCode)
667+
case elem =>
668+
internalError(
669+
i"Don't know how to produce a stable hash for `$elem` of unknown class ${elem.getClass}",
670+
tree.sourcePos)
671+
672+
h = MurmurHash3.mix(h, elem.toString.hashCode)
673+
h
674+
end iteratorHash
675+
676+
val seed = 4 // https://xkcd.com/221
677+
val h = positionedHash(tree, seed)
678+
MurmurHash3.finalizeHash(h, 0)
679+
end treeHash
680+
605681
def apiAnnotation(annot: Annotation): api.Annotation = {
606-
// FIXME: To faithfully extract an API we should extract the annotation tree,
607-
// sbt instead wants us to extract the annotation type and its arguments,
608-
// to do this properly we would need a way to hash trees and types in dotty itself,
609-
// instead we use the raw string representation of the annotation tree.
610-
// However, we still need to extract the annotation type in the way sbt expect
611-
// because sbt uses this information to find tests to run (for example
612-
// junit tests are annotated @org.junit.Test).
682+
// Like with inline defs, the whole body of the annotation and not just its
683+
// type is part of its API so we need to store its hash, but Zinc wants us
684+
// to extract the annotation type and its arguments, so we use a dummy
685+
// annotation argument to store the hash of the tree. We still need to
686+
// extract the annotation type in the way Zinc expects because sbt uses this
687+
// information to find tests to run (for example junit tests are
688+
// annotated @org.junit.Test).
613689
api.Annotation.of(
614690
apiType(annot.tree.tpe), // Used by sbt to find tests to run
615-
Array(api.AnnotationArgument.of("FULLTREE", annot.tree.toString)))
691+
Array(api.AnnotationArgument.of("TREE_HASH", treeHash(annot.tree).toString)))
616692
}
617693
}

compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ import dotty.tools.dotc.core.StdNames._
1616
import dotty.tools.dotc.core.Symbols._
1717
import dotty.tools.dotc.core.Types._
1818
import dotty.tools.dotc.transform.SymUtils._
19+
import dotty.tools.dotc.util.{SrcPos, NoSourcePosition}
1920
import dotty.tools.io
2021
import dotty.tools.io.{AbstractFile, PlainFile, ZipArchive}
2122
import xsbti.UseScope
@@ -129,7 +130,7 @@ class ExtractDependencies extends Phase {
129130
if pf.file != null then
130131
binaryDependency(pf.file, binaryClassName)
131132
case _ =>
132-
report.warning(s"sbt-deps: Ignoring dependency $depFile of class ${depFile.getClass}}")
133+
internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", dep.from.srcPos)
133134
}
134135
}
135136

@@ -171,6 +172,10 @@ class ExtractDependencies extends Phase {
171172
object ExtractDependencies {
172173
def classNameAsString(sym: Symbol)(using Context): String =
173174
sym.fullName.stripModuleClassSuffix.toString
175+
176+
/** Report an internal error in incremental compilation. */
177+
def internalError(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit =
178+
report.error(s"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos)
174179
}
175180

176181
private case class ClassDependency(from: Symbol, to: Symbol, context: DependencyContext)
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
class A {
2+
inline def getInline: String = {
3+
class Local {
4+
private val y: Int = 1
5+
}
6+
val a = scala.reflect.classTag[Integer]
7+
println(new Local)
8+
val x = 1
9+
x.toString
10+
}
11+
}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
class B extends A
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
class C {
2+
val b = new B
3+
b.getInline
4+
}
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
import complete.DefaultParsers._
2+
3+
val checkIterations = inputKey[Unit]("Verifies the accumlated number of iterations of incremental compilation.")
4+
5+
checkIterations := {
6+
val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis]
7+
8+
val expected: Int = (Space ~> NatBasic).parsed
9+
val actual: Int = analysis.compilations.allCompilations.size
10+
assert(expected == actual, s"Expected $expected compilations, got $actual")
11+
}
12+
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
class B extends A {
2+
val unrelatedChange: Int = 1
3+
}
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
logLevel := Level.Debug
2+
incOptions in ThisBuild ~= { _.withApiDebug(true) }
3+
incOptions in ThisBuild ~= { _.withRelationsDebug(true) }
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
import sbt._
2+
import Keys._
3+
4+
object DottyInjectedPlugin extends AutoPlugin {
5+
override def requires = plugins.JvmPlugin
6+
override def trigger = allRequirements
7+
8+
override val projectSettings = Seq(
9+
scalaVersion := sys.props("plugin.scalaVersion")
10+
)
11+
}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % sys.props("plugin.version"))
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
> compile
2+
# Force recompilation of B, B.getInline hasn't changed so C shouldn't be recompiled.
3+
$ copy-file changes/B1.scala B.scala
4+
> compile
5+
# One iteration for each call to `compile`
6+
> checkIterations 2

0 commit comments

Comments
 (0)