Skip to content

Commit

Permalink
Ensure namedSetOfFiles URIs specify blob type correctly
Browse files Browse the repository at this point in the history
I noticed when testing the BLAKE3 digest function that uploaded files were being referenced incorrectly in the BES because they were missing the digest type. This CL fixes that.

Before: https://github.com/bazelbuild/bazel/assets/141737/52781f1b-b897-48f0-8956-f63c57b59436

After: https://github.com/bazelbuild/bazel/assets/141737/01ebc61b-3512-4ca5-8e2d-f47ad5f086b7

Closes #19044.

PiperOrigin-RevId: 553405394
Change-Id: Ic976e5a58f80ab8b5270b4aedc6204c22562533a
  • Loading branch information
tylerwilliams authored and copybara-github committed Aug 3, 2023
1 parent 7ed631e commit 1929367
Show file tree
Hide file tree
Showing 3 changed files with 112 additions and 31 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@
import static com.google.devtools.build.lib.remote.util.Utils.grpcAwareErrorMessage;

import build.bazel.remote.execution.v2.Digest;
import build.bazel.remote.execution.v2.DigestFunction;
import build.bazel.remote.execution.v2.RequestMetadata;
import com.google.common.base.Ascii;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
Expand Down Expand Up @@ -135,20 +137,23 @@ private static final class PathMetadata {
private final boolean symlink;
private final boolean remote;
private final boolean omitted;
private final DigestFunction.Value digestFunction;

PathMetadata(
Path path,
Digest digest,
boolean directory,
boolean symlink,
boolean remote,
boolean omitted) {
boolean omitted,
DigestFunction.Value digestFunction) {
this.path = path;
this.digest = digest;
this.directory = directory;
this.symlink = symlink;
this.remote = remote;
this.omitted = omitted;
this.digestFunction = digestFunction;
}

public Path getPath() {
Expand All @@ -174,13 +179,19 @@ public boolean isRemote() {
public boolean isOmitted() {
return omitted;
}

public DigestFunction.Value getDigestFunction() {
return digestFunction;
}
}

/**
* Collects metadata for {@code file}. Depending on the underlying filesystem used this method
* might do I/O.
*/
private PathMetadata readPathMetadata(Path path, LocalFile file) throws IOException {
DigestUtil digestUtil = new DigestUtil(xattrProvider, path.getFileSystem().getDigestFunction());

if (file.type == LocalFileType.OUTPUT_DIRECTORY
|| (file.type == LocalFileType.OUTPUT && path.isDirectory())) {
return new PathMetadata(
Expand All @@ -189,7 +200,8 @@ private PathMetadata readPathMetadata(Path path, LocalFile file) throws IOExcept
/* directory= */ true,
/* symlink= */ false,
/* remote= */ false,
/* omitted= */ false);
/* omitted= */ false,
/* digestFunction= */ digestUtil.getDigestFunction());
}
if (file.type == LocalFileType.OUTPUT_SYMLINK) {
return new PathMetadata(
Expand All @@ -198,7 +210,8 @@ private PathMetadata readPathMetadata(Path path, LocalFile file) throws IOExcept
/* directory= */ false,
/* symlink= */ true,
/* remote= */ false,
/* omitted= */ false);
/* omitted= */ false,
/* digestFunction= */ digestUtil.getDigestFunction());
}

PathFragment filePathFragment = path.asFragment();
Expand All @@ -214,10 +227,15 @@ private PathMetadata readPathMetadata(Path path, LocalFile file) throws IOExcept
}
}

DigestUtil digestUtil = new DigestUtil(xattrProvider, path.getFileSystem().getDigestFunction());
Digest digest = digestUtil.compute(path);
return new PathMetadata(
path, digest, /* directory= */ false, /* symlink= */ false, isRemoteFile(path), omitted);
path,
digest,
/* directory= */ false,
/* symlink= */ false,
isRemoteFile(path),
omitted,
digestUtil.getDigestFunction());
}

private static void processQueryResult(
Expand All @@ -235,7 +253,8 @@ private static void processQueryResult(
file.isDirectory(),
file.isSymlink(),
/* remote= */ true,
file.isOmitted());
file.isOmitted(),
file.getDigestFunction());
knownRemotePaths.add(remotePathMetadata);
}
}
Expand Down Expand Up @@ -336,7 +355,8 @@ private Single<List<PathMetadata>> uploadLocalFiles(
// set remote to true so the PathConverter will use bytestream://
// scheme to convert the URI for this file
/* remote= */ true,
path.isOmitted()))
path.isOmitted(),
path.getDigestFunction()))
.onErrorResumeNext(
error -> {
reportUploadError(error, path.getPath(), path.getDigest());
Expand Down Expand Up @@ -375,7 +395,8 @@ private Single<PathConverter> doUpload(Map<Path, LocalFile> files) {
/* directory= */ false,
/* symlink= */ false,
/* remote= */ false,
/* omitted= */ false);
/* omitted= */ false,
DigestFunction.Value.SHA256);
}
})
.collect(Collectors.toList())
Expand Down Expand Up @@ -419,7 +440,7 @@ public ReferenceCounted touch(Object o) {
private static class PathConverterImpl implements PathConverter {

private final String remoteServerInstanceName;
private final Map<Path, Digest> pathToDigest;
private final Map<Path, PathMetadata> pathToMetadata;
private final Set<Path> skippedPaths;
private final Set<Path> localPaths;

Expand All @@ -429,18 +450,18 @@ private static class PathConverterImpl implements PathConverter {
RemoteBuildEventUploadMode remoteBuildEventUploadMode) {
Preconditions.checkNotNull(uploads);
this.remoteServerInstanceName = remoteServerInstanceName;
pathToDigest = Maps.newHashMapWithExpectedSize(uploads.size());
pathToMetadata = Maps.newHashMapWithExpectedSize(uploads.size());
ImmutableSet.Builder<Path> skippedPaths = ImmutableSet.builder();
ImmutableSet.Builder<Path> localPaths = ImmutableSet.builder();
for (PathMetadata pair : uploads) {
Path path = pair.getPath();
Digest digest = pair.getDigest();
for (PathMetadata metadata : uploads) {
Path path = metadata.getPath();
Digest digest = metadata.getDigest();
if (digest != null) {
// Always use bytestream:// in MINIMAL mode
if (remoteBuildEventUploadMode == RemoteBuildEventUploadMode.MINIMAL) {
pathToDigest.put(path, digest);
} else if (pair.isRemote()) {
pathToDigest.put(path, digest);
pathToMetadata.put(path, metadata);
} else if (metadata.isRemote()) {
pathToMetadata.put(path, metadata);
} else {
localPaths.add(path);
}
Expand All @@ -452,6 +473,14 @@ private static class PathConverterImpl implements PathConverter {
this.localPaths = localPaths.build();
}

private static boolean isOldStyleDigestFunction(DigestFunction.Value digestFunction) {
// Old-style digest functions (SHA256, etc) are distinguishable by the length
// of their hash alone and do not require extra specification, but newer
// digest functions (which may have the same length hashes as the older
// functions!) must be explicitly specified in the upload resource name.
return digestFunction.getNumber() <= 7;
}

@Override
@Nullable
public String apply(Path path) {
Expand All @@ -461,18 +490,34 @@ public String apply(Path path) {
return String.format("file://%s", path.getPathString());
}

Digest digest = pathToDigest.get(path);
if (digest == null) {
PathMetadata metadata = pathToMetadata.get(path);
if (metadata == null) {
if (skippedPaths.contains(path)) {
return null;
}
// It's a programming error to reference a file that has not been uploaded.
throw new IllegalStateException(
String.format("Illegal file reference: '%s'", path.getPathString()));
}
return String.format(
"bytestream://%s/blobs/%s/%d",
remoteServerInstanceName, digest.getHash(), digest.getSizeBytes());

Digest digest = metadata.getDigest();
DigestFunction.Value digestFunction = metadata.getDigestFunction();
String out;
if (isOldStyleDigestFunction(digestFunction)) {
out =
String.format(
"bytestream://%s/blobs/%s/%d",
remoteServerInstanceName, digest.getHash(), digest.getSizeBytes());
} else {
out =
String.format(
"bytestream://%s/blobs/%s/%s/%d",
remoteServerInstanceName,
Ascii.toLowerCase(digestFunction.getValueDescriptor().getName()),
digest.getHash(),
digest.getSizeBytes());
}
return out;
}
}
}
1 change: 1 addition & 0 deletions src/test/java/com/google/devtools/build/lib/remote/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ java_test(
"//src/main/java/com/google/devtools/build/lib/util/io",
"//src/main/java/com/google/devtools/build/lib/vfs",
"//src/main/java/com/google/devtools/build/lib/vfs:pathfragment",
"//src/main/java/com/google/devtools/build/lib/vfs/bazel",
"//src/main/java/com/google/devtools/build/lib/vfs/inmemoryfs",
"//src/main/java/com/google/devtools/common/options",
"//src/main/protobuf:failure_details_java_proto",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
package com.google.devtools.build.lib.remote;

import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assume.assumeNotNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
Expand Down Expand Up @@ -66,6 +67,7 @@
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.SyscallCache;
import com.google.devtools.build.lib.vfs.bazel.BazelHashFunctions;
import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem;
import com.google.devtools.common.options.Options;
import io.grpc.Server;
Expand Down Expand Up @@ -177,7 +179,7 @@ public void uploadsShouldWork() throws Exception {
filesToUpload.put(
file,
new LocalFile(
file, LocalFileType.OUTPUT, /*artifact=*/ null, /*artifactMetadata=*/ null));
file, LocalFileType.OUTPUT, /* artifact= */ null, /* artifactMetadata= */ null));
}
serviceRegistry.addService(new MaybeFailOnceUploadService(blobsByHash));

Expand Down Expand Up @@ -220,7 +222,7 @@ public void uploadsShouldWork_fewerPermitsThanUploads() throws Exception {
filesToUpload.put(
file,
new LocalFile(
file, LocalFileType.OUTPUT, /*artifact=*/ null, /*artifactMetadata=*/ null));
file, LocalFileType.OUTPUT, /* artifact= */ null, /* artifactMetadata= */ null));
}
serviceRegistry.addService(new MaybeFailOnceUploadService(blobsByHash));

Expand Down Expand Up @@ -310,6 +312,33 @@ public void testUnknown_uploadedIfFile() throws Exception {
artifactUploader.release();
}

@Test
public void testUnknown_uploadedIfFileBlake3() throws Exception {
assumeNotNull(BazelHashFunctions.BLAKE3);

FileSystem fs = new InMemoryFileSystem(new JavaClock(), BazelHashFunctions.BLAKE3);
Path file = fs.getPath("/file");
file.getOutputStream().close();
Map<Path, LocalFile> filesToUpload = new HashMap<>();
filesToUpload.put(
file,
new LocalFile(
file, LocalFileType.OUTPUT, /* artifact= */ null, /* artifactMetadata= */ null));
RemoteRetrier retrier =
TestUtils.newRemoteRetrier(() -> new FixedBackoff(1, 0), (e) -> true, retryService);
ReferenceCountedChannel refCntChannel = new ReferenceCountedChannel(channelConnectionFactory);
RemoteCache remoteCache = newRemoteCache(refCntChannel, retrier);
ByteStreamBuildEventArtifactUploader artifactUploader = newArtifactUploader(remoteCache);

PathConverter pathConverter = artifactUploader.upload(filesToUpload).get();
String hash = BaseEncoding.base16().lowerCase().encode(file.getDigest());
long size = file.getFileSize();
String conversion = pathConverter.apply(file);
assertThat(conversion)
.isEqualTo("bytestream://localhost/instance/blobs/blake3/" + hash + "/" + size);
artifactUploader.release();
}

@Test
public void testUnknown_notUploadedIfDirectory() throws Exception {
Path dir = fs.getPath("/dir");
Expand Down Expand Up @@ -352,7 +381,7 @@ public void someUploadsFail_succeedsWithWarningMessages() throws Exception {
filesToUpload.put(
file,
new LocalFile(
file, LocalFileType.OUTPUT, /*artifact=*/ null, /*artifactMetadata=*/ null));
file, LocalFileType.OUTPUT, /* artifact= */ null, /* artifactMetadata= */ null));
}
String hashOfBlobThatShouldFail = blobsByHash.keySet().iterator().next().toString();
serviceRegistry.addService(
Expand Down Expand Up @@ -437,7 +466,7 @@ public void remoteFileShouldNotBeUploaded_actionFs() throws Exception {
assertThat(remotePath.getFileSystem()).isEqualTo(remoteFs);
LocalFile file =
new LocalFile(
remotePath, LocalFileType.OUTPUT, /*artifact=*/ null, /*artifactMetadata=*/ null);
remotePath, LocalFileType.OUTPUT, /* artifact= */ null, /* artifactMetadata= */ null);

// act

Expand Down Expand Up @@ -488,10 +517,16 @@ public void remoteFileShouldNotBeUploaded_findMissingDigests() throws Exception
ImmutableMap.of(
remoteFile,
new LocalFile(
remoteFile, LocalFileType.OUTPUT, /*artifact=*/ null, /*artifactMetadata=*/ null),
remoteFile,
LocalFileType.OUTPUT,
/* artifact= */ null,
/* artifactMetadata= */ null),
localFile,
new LocalFile(
localFile, LocalFileType.OUTPUT, /*artifact=*/ null, /*artifactMetadata=*/ null));
localFile,
LocalFileType.OUTPUT,
/* artifact= */ null,
/* artifactMetadata= */ null));
PathConverter pathConverter = artifactUploader.upload(files).get();

// assert
Expand Down Expand Up @@ -549,11 +584,11 @@ private ByteStreamBuildEventArtifactUploader newArtifactUploader(RemoteCache rem
return new ByteStreamBuildEventArtifactUploader(
MoreExecutors.directExecutor(),
reporter,
/*verboseFailures=*/ true,
/* verboseFailures= */ true,
remoteCache,
/*remoteServerInstanceName=*/ "localhost/instance",
/*buildRequestId=*/ "none",
/*commandId=*/ "none",
/* remoteServerInstanceName= */ "localhost/instance",
/* buildRequestId= */ "none",
/* commandId= */ "none",
SyscallCache.NO_CACHE,
RemoteBuildEventUploadMode.ALL);
}
Expand Down

0 comments on commit 1929367

Please sign in to comment.