Skip to content

Commit

Permalink
Use Zinc's new ConsistentAnalysisStore
Browse files Browse the repository at this point in the history
This is new analysis store format added to Zinc by Databricks that is
deterministic. Given two identical Zinc states (after applying the read
write wappers) the outputs should be identical.

As an added bonus it is faster and smaller than the previous format. See
this PR for more info: sbt/zinc#1326

This means we can stop most of the work we're doing to make the Zinc
analysis output more deterministic and just rely on this new analysis
format.
  • Loading branch information
James Judd committed Jul 16, 2024
1 parent 2ad06e5 commit 1af8fe7
Show file tree
Hide file tree
Showing 17 changed files with 301 additions and 962 deletions.
4 changes: 2 additions & 2 deletions rules/private/phases/phase_test_launcher.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ load(
#

def phase_test_launcher(ctx, g):
files = ctx.attr._target_jdk[java_common.JavaRuntimeInfo].files.to_list() + [g.compile.zinc_info.apis]
files = ctx.attr._target_jdk[java_common.JavaRuntimeInfo].files.to_list() + [g.compile.zinc_info.analysis_store]

coverage_replacements = {}
coverage_runner_jars = depset(direct = [])
Expand All @@ -35,7 +35,7 @@ def phase_test_launcher(ctx, g):
all_jars = [test_jars, runner_jars]

args = ctx.actions.args()
args.add("--apis", g.compile.zinc_info.apis.short_path)
args.add("--analysis_store", g.compile.zinc_info.analysis_store.short_path)
args.add_all("--frameworks", ctx.attr.frameworks)
if ctx.attr.isolation == "classloader":
shared_deps = java_common.merge(_collect(JavaInfo, ctx.attr.shared_deps))
Expand Down
42 changes: 22 additions & 20 deletions rules/private/phases/phase_zinc_compile.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,9 @@ def phase_zinc_compile(ctx, g):
scala_configuration = ctx.attr.scala[_ScalaConfiguration]
zinc_configuration = ctx.attr.scala[_ZincConfiguration]

apis = ctx.actions.declare_file("{}/apis.gz".format(ctx.label.name))
infos = ctx.actions.declare_file("{}/infos.gz".format(ctx.label.name))
analysis_store = ctx.actions.declare_file("{}/analysis_store.gz".format(ctx.label.name))
mains_file = ctx.actions.declare_file("{}.jar.mains.txt".format(ctx.label.name))
relations = ctx.actions.declare_file("{}/relations.gz".format(ctx.label.name))
setup = ctx.actions.declare_file("{}/setup.gz".format(ctx.label.name))
stamps = ctx.actions.declare_file("{}/stamps.gz".format(ctx.label.name))
used = ctx.actions.declare_file("{}/deps_used.txt".format(ctx.label.name))

tmp = ctx.actions.declare_directory("{}/tmp".format(ctx.label.name))

javacopts = [
Expand All @@ -53,12 +48,8 @@ def phase_zinc_compile(ctx, g):
args.add_all(javacopts, format_each = "--java_compiler_option=%s")
args.add(ctx.label, format = "--label=%s")
args.add("--main_manifest", mains_file)
args.add("--output_apis", apis)
args.add("--output_infos", infos)
args.add("--output_analysis_store", analysis_store)
args.add("--output_jar", g.classpaths.jar)
args.add("--output_relations", relations)
args.add("--output_setup", setup)
args.add("--output_stamps", stamps)
args.add("--output_used", used)
args.add_all("--plugins", g.classpaths.plugin)
args.add_all("--source_jars", g.classpaths.src_jars)
Expand All @@ -81,7 +72,18 @@ def phase_zinc_compile(ctx, g):
] + [zinc.deps_files for zinc in zincs],
)

outputs = [g.classpaths.jar, mains_file, apis, infos, relations, setup, stamps, used, tmp]
outputs = [g.classpaths.jar, mains_file, analysis_store, used, tmp]

execution_requirements_tags = {
"supports-multiplex-workers": "1",
"supports-workers": "1",
}

# Disable sandboxing if incremental compilation features are going to be used
# because they require stashing files outside the sandbox that Bazel isn't
# aware of.
if zinc_configuration.incremental:
execution_requirements_tags["no-sandbox"] = "1"

# todo: different execution path for nosrc jar?
ctx.actions.run(
Expand All @@ -90,7 +92,10 @@ def phase_zinc_compile(ctx, g):
outputs = outputs,
executable = worker.files_to_run.executable,
input_manifests = input_manifests,
execution_requirements = _resolve_execution_reqs(ctx, {"no-sandbox": "1", "supports-multiplex-workers": "1", "supports-workers": "1"}),
execution_requirements = _resolve_execution_reqs(
ctx,
execution_requirements_tags,
),
arguments = [args],
)

Expand All @@ -99,16 +104,14 @@ def phase_zinc_compile(ctx, g):
jars.append(jar.class_jar)
jars.append(jar.ijar)
zinc_info = _ZincInfo(
apis = apis,
deps_files = depset([apis, relations], transitive = [zinc.deps_files for zinc in zincs]),
analysis_store = analysis_store,
deps_files = depset([analysis_store], transitive = [zinc.deps_files for zinc in zincs]),
label = ctx.label,
relations = relations,
deps = depset(
[struct(
apis = apis,
analysis_store = analysis_store,
jars = tuple(jars),
label = ctx.label,
relations = relations,
)],
transitive = [zinc.deps for zinc in zincs],
),
Expand All @@ -126,6 +129,5 @@ def _compile_analysis(analysis):
return [
"--analysis",
"_{}".format(analysis.label),
analysis.apis.path,
analysis.relations.path,
analysis.analysis_store.path,
] + [jar.path for jar in analysis.jars]
4 changes: 2 additions & 2 deletions rules/providers.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ ZincConfiguration = provider(
"compiler_bridge": "compiled Zinc compiler bridge",
"compile_worker": "the worker label for compilation with Zinc",
"log_level": "log level for the Zinc compiler",
"incremental": "whether incremental compilation will be available for this Zinc compiler",
},
)

Expand Down Expand Up @@ -156,11 +157,10 @@ declare_zinc_configuration = rule(
ZincInfo = provider(
doc = "Zinc-specific outputs.",
fields = {
"apis": "The API file.",
"analysis_store": "The analysis store file.",
"deps": "The depset of library dependency outputs.",
"deps_files": "The depset of all Zinc files.",
"label": "The label for this output.",
"relations": "The relations file.",
},
)

Expand Down
4 changes: 4 additions & 0 deletions rules/scala.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -536,6 +536,10 @@ _configure_zinc_scala = rule(
),
"deps_direct": attr.string(default = "error"),
"deps_used": attr.string(default = "error"),
"incremental": attr.bool(
doc = "Whether Zinc's incremental compilation will be available for this Zinc compiler. If True, this requires additional configuration to use incremental compilation.",
default = False,
),
"_compile_worker": attr.label(
default = "@rules_scala_annex//src/main/scala/higherkindness/rules_scala/workers/zinc/compile",
allow_files = True,
Expand Down
1 change: 1 addition & 0 deletions rules/scala/private/provider.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ def configure_zinc_scala_implementation(ctx):
compile_worker = ctx.attr._compile_worker,
compiler_bridge = ctx.file.compiler_bridge,
log_level = ctx.attr.log_level,
incremental = ctx.attr.incremental,
),
_DepsConfiguration(
direct = ctx.attr.deps_direct,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ scala_library(
srcs = _common_srcs,
scala = "//src/main/scala:zinc",
visibility = ["//visibility:public"],
deps_used_whitelist = [
"@annex//:org_scala_lang_scala_library",
],
deps = [
"@annex//:org_scala_lang_modules_scala_xml_2_13",
"@annex//:org_scala_sbt_test_interface",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
package higherkindness.rules_scala
package workers.common

import java.io.File
import java.nio.file.Paths
import sbt.internal.inc.Analysis
import sbt.internal.inc.consistent.ConsistentFileAnalysisStore
import xsbti.compile.AnalysisStore
import xsbti.compile.analysis.ReadWriteMappers

object AnalysisUtil {
def getAnalysisStore(analysisStoreFile: File, debug: Boolean, isIncremental: Boolean): AnalysisStore = {
val readWriteMappers = AnnexMapper.mappers(Paths.get(""), isIncremental)

if (debug) {
ConsistentFileAnalysisStore.text(
analysisStoreFile,
readWriteMappers,
sort = true,
)
} else {
ConsistentFileAnalysisStore.binary(
analysisStoreFile,
readWriteMappers,
sort = true,
)
}
}

def getAnalysis(analysisStore: AnalysisStore): Analysis = {
analysisStore.get().get().getAnalysis.asInstanceOf[Analysis]
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
package higherkindness.rules_scala
package workers.common

import com.google.devtools.build.buildjar.jarhelper.JarHelper
import java.io.{File, InputStream, OutputStream, OutputStreamWriter}
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, NoSuchFileException, Path, Paths}
import java.nio.file.attribute.FileTime
import java.util
import java.util.concurrent.ConcurrentHashMap
import java.util.LinkedHashMap
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
import java.util.Optional
import sbt.internal.inc.binary.converters.{ProtobufReaders, ProtobufWriters}
import sbt.internal.inc.Schema.Type.{Projection, Structure}
import sbt.internal.inc.{APIs, Analysis, FarmHash, Hash, LastModified, PlainVirtualFile, PlainVirtualFileConverter, Relations, Schema, SourceInfos, Stamp => StampImpl, Stamper, Stamps}
import sbt.internal.inc.Schema.{Access, AnalyzedClass, Annotation, AnnotationArgument, ClassDefinition, ClassDependencies, ClassLike, Companions, MethodParameter, NameHash, ParameterList, Path => SchemaPath, Qualifier, Type, TypeParameter, UsedName, UsedNames, Values}
import sbt.internal.shaded.com.google.protobuf.GeneratedMessageV3
import sbt.io.IO
import scala.collection.immutable.TreeMap
import xsbti.compile.analysis.{GenericMapper, ReadMapper, ReadWriteMappers, Stamp, WriteMapper}
import xsbti.compile.{AnalysisContents, AnalysisStore, MiniSetup}
import scala.jdk.CollectionConverters._
import xsbti.VirtualFileRef
import java.util.Objects

object AnnexMapper {
val rootPlaceholder = Paths.get("_ROOT_")
def mappers(root: Path, isIncremental: Boolean) = {
new ReadWriteMappers(new AnxReadMapper(root, isIncremental), new AnxWriteMapper(root))
}

/**
* Gets a reproducible/consistent stamp that we can write to the analysis file and end up with reproducible output
* across machines, jvms, builds, etc.
*
* Practically speaking, all we're doing is setting the timestamp in LastModified stamps to a constant value.
*/
final def getConsistentWriteStamp(stamp: Stamp): Stamp = {
stamp match {
case farmHash: FarmHash => farmHash
case hash: Hash => hash
case lastModified: LastModified => new LastModified(JarHelper.DEFAULT_TIMESTAMP)
case _ => throw new Exception("Unexpected Stamp type encountered when writing.")
}
}

final def getReadStamp(file: VirtualFileRef, stamp: Stamp, isIncremental: Boolean): Stamp = {
if (isIncremental) {
getIncrementalModeReadStamp(file, stamp)
} else {
stamp
}
}

/**
* When in incremental mode we do not want to rely on the timestamp from the AnalysisStore because we're assuming it
* was set to a constant value when written to the AnalysisStore.
*
* Instead, for any LastModified stamps, we read the file's time stamp from disk.
*/
final def getIncrementalModeReadStamp(file: VirtualFileRef, stamp: Stamp): Stamp = {
stamp match {
case farmHash: FarmHash => farmHash
case hash: Hash => hash
case lastModified: LastModified => {
Stamper.forLastModifiedP(PlainVirtualFileConverter.converter.toPath(file))
}
case _ => throw new Exception("Unexpected Stamp type encountered when reading")
}
}
}

final class AnxWriteMapper(root: Path) extends WriteMapper {
private[this] val rootAbs = root.toAbsolutePath

private[this] def mapFile(path: Path): Path = {
if (path.startsWith(rootAbs)) {
AnnexMapper.rootPlaceholder.resolve(rootAbs.relativize(path))
} else {
path
}
}

private[this] def mapFile(virtualFileRef: VirtualFileRef): Path = {
mapFile(PlainVirtualFileConverter.converter.toPath(virtualFileRef))
}

override def mapSourceFile(sourceFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(sourceFile))
override def mapBinaryFile(binaryFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(binaryFile))
override def mapProductFile(productFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(productFile))

override def mapClasspathEntry(classpathEntry: Path): Path = mapFile(classpathEntry)
override def mapJavacOption(javacOption: String): String = javacOption
override def mapScalacOption(scalacOption: String): String = scalacOption

override def mapOutputDir(outputDir: Path): Path = mapFile(outputDir)
override def mapSourceDir(sourceDir: Path): Path = mapFile(sourceDir)

override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = {
AnnexMapper.getConsistentWriteStamp(sourceStamp)
}
override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = {
AnnexMapper.getConsistentWriteStamp(binaryStamp)
}
override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = {
AnnexMapper.getConsistentWriteStamp(productStamp)
}

override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup
}

final class AnxReadMapper(root: Path, isIncremental: Boolean) extends ReadMapper {
private[this] val rootAbs = root.toAbsolutePath

private[this] def mapFile(virtualFileRef: VirtualFileRef): Path = {
mapFile(PlainVirtualFileConverter.converter.toPath(virtualFileRef))
}

private[this] def mapFile(path: Path): Path = {
if (path.startsWith(AnnexMapper.rootPlaceholder)) {
rootAbs.resolve(AnnexMapper.rootPlaceholder.relativize(path))
} else {
path
}
}

override def mapSourceFile(sourceFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(sourceFile))
override def mapBinaryFile(binaryFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(binaryFile))
override def mapProductFile(productFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(productFile))

override def mapClasspathEntry(classpathEntry: Path): Path = mapFile(classpathEntry)
override def mapJavacOption(javacOption: String): String = javacOption
override def mapScalacOption(scalacOption: String): String = scalacOption

override def mapOutputDir(outputDir: Path): Path = mapFile(outputDir)
override def mapSourceDir(sourceDir: Path): Path = mapFile(sourceDir)

override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = {
AnnexMapper.getReadStamp(file, sourceStamp, isIncremental)
}
override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = {
AnnexMapper.getReadStamp(file, binaryStamp, isIncremental)
}
override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = {
AnnexMapper.getReadStamp(file, productStamp, isIncremental)
}

override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ scala_library(
visibility = ["//visibility:public"],
deps = [
"//src/main/scala/higherkindness/rules_scala/common/args",
"//third_party/bazel/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/jarhelper",
"@annex//:net_sourceforge_argparse4j_argparse4j",
"@annex//:org_scala_sbt_zinc_2_13",
],
Expand Down
Loading

0 comments on commit 1af8fe7

Please sign in to comment.