forked from higherkindness/rules_scala
-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Use Zinc's new ConsistentAnalysisStore
This is new analysis store format added to Zinc by Databricks that is deterministic. Given two identical Zinc states (after applying the read write wappers) the outputs should be identical. As an added bonus it is faster and smaller than the previous format. See this PR for more info: sbt/zinc#1326 This means we can stop most of the work we're doing to make the Zinc analysis output more deterministic and just rely on this new analysis format.
- Loading branch information
James Judd
committed
Jul 16, 2024
1 parent
2ad06e5
commit 1af8fe7
Showing
17 changed files
with
301 additions
and
962 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
33 changes: 33 additions & 0 deletions
33
src/main/scala/higherkindness/rules_scala/workers/common/AnalysisUtil.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
package higherkindness.rules_scala | ||
package workers.common | ||
|
||
import java.io.File | ||
import java.nio.file.Paths | ||
import sbt.internal.inc.Analysis | ||
import sbt.internal.inc.consistent.ConsistentFileAnalysisStore | ||
import xsbti.compile.AnalysisStore | ||
import xsbti.compile.analysis.ReadWriteMappers | ||
|
||
object AnalysisUtil { | ||
def getAnalysisStore(analysisStoreFile: File, debug: Boolean, isIncremental: Boolean): AnalysisStore = { | ||
val readWriteMappers = AnnexMapper.mappers(Paths.get(""), isIncremental) | ||
|
||
if (debug) { | ||
ConsistentFileAnalysisStore.text( | ||
analysisStoreFile, | ||
readWriteMappers, | ||
sort = true, | ||
) | ||
} else { | ||
ConsistentFileAnalysisStore.binary( | ||
analysisStoreFile, | ||
readWriteMappers, | ||
sort = true, | ||
) | ||
} | ||
} | ||
|
||
def getAnalysis(analysisStore: AnalysisStore): Analysis = { | ||
analysisStore.get().get().getAnalysis.asInstanceOf[Analysis] | ||
} | ||
} |
150 changes: 150 additions & 0 deletions
150
src/main/scala/higherkindness/rules_scala/workers/common/AnnexMapper.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,150 @@ | ||
package higherkindness.rules_scala | ||
package workers.common | ||
|
||
import com.google.devtools.build.buildjar.jarhelper.JarHelper | ||
import java.io.{File, InputStream, OutputStream, OutputStreamWriter} | ||
import java.nio.charset.StandardCharsets | ||
import java.nio.file.{Files, NoSuchFileException, Path, Paths} | ||
import java.nio.file.attribute.FileTime | ||
import java.util | ||
import java.util.concurrent.ConcurrentHashMap | ||
import java.util.LinkedHashMap | ||
import java.util.zip.{GZIPInputStream, GZIPOutputStream} | ||
import java.util.Optional | ||
import sbt.internal.inc.binary.converters.{ProtobufReaders, ProtobufWriters} | ||
import sbt.internal.inc.Schema.Type.{Projection, Structure} | ||
import sbt.internal.inc.{APIs, Analysis, FarmHash, Hash, LastModified, PlainVirtualFile, PlainVirtualFileConverter, Relations, Schema, SourceInfos, Stamp => StampImpl, Stamper, Stamps} | ||
import sbt.internal.inc.Schema.{Access, AnalyzedClass, Annotation, AnnotationArgument, ClassDefinition, ClassDependencies, ClassLike, Companions, MethodParameter, NameHash, ParameterList, Path => SchemaPath, Qualifier, Type, TypeParameter, UsedName, UsedNames, Values} | ||
import sbt.internal.shaded.com.google.protobuf.GeneratedMessageV3 | ||
import sbt.io.IO | ||
import scala.collection.immutable.TreeMap | ||
import xsbti.compile.analysis.{GenericMapper, ReadMapper, ReadWriteMappers, Stamp, WriteMapper} | ||
import xsbti.compile.{AnalysisContents, AnalysisStore, MiniSetup} | ||
import scala.jdk.CollectionConverters._ | ||
import xsbti.VirtualFileRef | ||
import java.util.Objects | ||
|
||
object AnnexMapper { | ||
val rootPlaceholder = Paths.get("_ROOT_") | ||
def mappers(root: Path, isIncremental: Boolean) = { | ||
new ReadWriteMappers(new AnxReadMapper(root, isIncremental), new AnxWriteMapper(root)) | ||
} | ||
|
||
/** | ||
* Gets a reproducible/consistent stamp that we can write to the analysis file and end up with reproducible output | ||
* across machines, jvms, builds, etc. | ||
* | ||
* Practically speaking, all we're doing is setting the timestamp in LastModified stamps to a constant value. | ||
*/ | ||
final def getConsistentWriteStamp(stamp: Stamp): Stamp = { | ||
stamp match { | ||
case farmHash: FarmHash => farmHash | ||
case hash: Hash => hash | ||
case lastModified: LastModified => new LastModified(JarHelper.DEFAULT_TIMESTAMP) | ||
case _ => throw new Exception("Unexpected Stamp type encountered when writing.") | ||
} | ||
} | ||
|
||
final def getReadStamp(file: VirtualFileRef, stamp: Stamp, isIncremental: Boolean): Stamp = { | ||
if (isIncremental) { | ||
getIncrementalModeReadStamp(file, stamp) | ||
} else { | ||
stamp | ||
} | ||
} | ||
|
||
/** | ||
* When in incremental mode we do not want to rely on the timestamp from the AnalysisStore because we're assuming it | ||
* was set to a constant value when written to the AnalysisStore. | ||
* | ||
* Instead, for any LastModified stamps, we read the file's time stamp from disk. | ||
*/ | ||
final def getIncrementalModeReadStamp(file: VirtualFileRef, stamp: Stamp): Stamp = { | ||
stamp match { | ||
case farmHash: FarmHash => farmHash | ||
case hash: Hash => hash | ||
case lastModified: LastModified => { | ||
Stamper.forLastModifiedP(PlainVirtualFileConverter.converter.toPath(file)) | ||
} | ||
case _ => throw new Exception("Unexpected Stamp type encountered when reading") | ||
} | ||
} | ||
} | ||
|
||
final class AnxWriteMapper(root: Path) extends WriteMapper { | ||
private[this] val rootAbs = root.toAbsolutePath | ||
|
||
private[this] def mapFile(path: Path): Path = { | ||
if (path.startsWith(rootAbs)) { | ||
AnnexMapper.rootPlaceholder.resolve(rootAbs.relativize(path)) | ||
} else { | ||
path | ||
} | ||
} | ||
|
||
private[this] def mapFile(virtualFileRef: VirtualFileRef): Path = { | ||
mapFile(PlainVirtualFileConverter.converter.toPath(virtualFileRef)) | ||
} | ||
|
||
override def mapSourceFile(sourceFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(sourceFile)) | ||
override def mapBinaryFile(binaryFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(binaryFile)) | ||
override def mapProductFile(productFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(productFile)) | ||
|
||
override def mapClasspathEntry(classpathEntry: Path): Path = mapFile(classpathEntry) | ||
override def mapJavacOption(javacOption: String): String = javacOption | ||
override def mapScalacOption(scalacOption: String): String = scalacOption | ||
|
||
override def mapOutputDir(outputDir: Path): Path = mapFile(outputDir) | ||
override def mapSourceDir(sourceDir: Path): Path = mapFile(sourceDir) | ||
|
||
override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = { | ||
AnnexMapper.getConsistentWriteStamp(sourceStamp) | ||
} | ||
override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = { | ||
AnnexMapper.getConsistentWriteStamp(binaryStamp) | ||
} | ||
override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = { | ||
AnnexMapper.getConsistentWriteStamp(productStamp) | ||
} | ||
|
||
override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup | ||
} | ||
|
||
final class AnxReadMapper(root: Path, isIncremental: Boolean) extends ReadMapper { | ||
private[this] val rootAbs = root.toAbsolutePath | ||
|
||
private[this] def mapFile(virtualFileRef: VirtualFileRef): Path = { | ||
mapFile(PlainVirtualFileConverter.converter.toPath(virtualFileRef)) | ||
} | ||
|
||
private[this] def mapFile(path: Path): Path = { | ||
if (path.startsWith(AnnexMapper.rootPlaceholder)) { | ||
rootAbs.resolve(AnnexMapper.rootPlaceholder.relativize(path)) | ||
} else { | ||
path | ||
} | ||
} | ||
|
||
override def mapSourceFile(sourceFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(sourceFile)) | ||
override def mapBinaryFile(binaryFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(binaryFile)) | ||
override def mapProductFile(productFile: VirtualFileRef): VirtualFileRef = PlainVirtualFile(mapFile(productFile)) | ||
|
||
override def mapClasspathEntry(classpathEntry: Path): Path = mapFile(classpathEntry) | ||
override def mapJavacOption(javacOption: String): String = javacOption | ||
override def mapScalacOption(scalacOption: String): String = scalacOption | ||
|
||
override def mapOutputDir(outputDir: Path): Path = mapFile(outputDir) | ||
override def mapSourceDir(sourceDir: Path): Path = mapFile(sourceDir) | ||
|
||
override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = { | ||
AnnexMapper.getReadStamp(file, sourceStamp, isIncremental) | ||
} | ||
override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = { | ||
AnnexMapper.getReadStamp(file, binaryStamp, isIncremental) | ||
} | ||
override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = { | ||
AnnexMapper.getReadStamp(file, productStamp, isIncremental) | ||
} | ||
|
||
override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.