Skip to content

Commit

Permalink
Stop needlessly parsing WORKSPACE files from external repositories.
Browse files Browse the repository at this point in the history
    Fixes: bazelbuild/bazel#11936

    I have an additional test to prove the spurious error message is gone and that I still can refer to a workspace by name, in https://github.com/aiuto/repro_11936/tree/more_tests
    I have not added it yet because it is not really clear what the correct behavior should be for external repos - should the WORKSPACE be valid and used, or ignored. This change brings the code in line with the documentation (ignored), but the better behavior might be different.

    RELNOTES: Stop needlessly parsing WORKSPACE files from external repositories.
    PiperOrigin-RevId: 327157715
  • Loading branch information
Luca Di Grazia committed Sep 4, 2022
1 parent 514d9f4 commit d78f850
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 169 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,27 +14,19 @@

package com.google.devtools.build.lib.rules.repository;

import com.google.devtools.build.lib.cmdline.LabelSyntaxException;
import com.google.devtools.build.lib.cmdline.RepositoryName;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.skyframe.ExternalPackageFunction;
import com.google.devtools.build.lib.skyframe.PackageValue;
import com.google.devtools.build.lib.skyframe.RepositoryValue;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.RootedPath;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyFunctionException;
import com.google.devtools.build.skyframe.SkyFunctionException.Transience;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;

import java.io.IOException;
import javax.annotation.Nullable;

/**
* Creates a local or remote repository and checks its WORKSPACE file.
*/
/** Creates a local or remote repository. */
public class RepositoryLoaderFunction implements SkyFunction {

@Nullable
@Override
public SkyValue compute(SkyKey skyKey, Environment env)
Expand All @@ -48,37 +40,24 @@ public SkyValue compute(SkyKey skyKey, Environment env)
if (repository == null) {
return null;
}

SkyKey workspaceKey = ExternalPackageFunction.key(
RootedPath.toRootedPath(repository.getPath(), new PathFragment("WORKSPACE")));
PackageValue workspacePackage = (PackageValue) env.getValue(workspaceKey);
if (workspacePackage == null) {
return null;
if (!repository.repositoryExists()) {
return RepositoryValue.notFound(nameFromRule);
}

RepositoryName workspaceName;
try {
String workspaceNameStr = workspacePackage.getPackage().getWorkspaceName();
workspaceName = workspaceNameStr.isEmpty()
? RepositoryName.create("") : RepositoryName.create("@" + workspaceNameStr);
} catch (LabelSyntaxException e) {
throw new IllegalStateException(e);
}

if (!workspaceName.isDefault() && !nameFromRule.equals(workspaceName)) {
Path workspacePath = repository.getPath().getRelative("WORKSPACE");
env.getListener().handle(Event.warn(Location.fromFile(workspacePath),
"Workspace name in " + workspacePath + " (" + workspaceName + ") does not match name "
+ " given in the repository's definition (" + nameFromRule + "), this will cause "
+ " a build error in future versions."));
}

return new RepositoryValue(nameFromRule, repository);
return RepositoryValue.success(nameFromRule, repository);
}

@Nullable
@Override
public String extractTag(SkyKey skyKey) {
return null;
}

/** An exception thrown by RepositoryLoaderFunction */
public static class RepositoryLoaderFunctionException extends SkyFunctionException {

/** Error reading or writing to the filesystem. */
public RepositoryLoaderFunctionException(IOException cause, Transience transience) {
super(cause, transience);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

import static com.google.common.truth.Truth.assertThat;

import com.google.common.base.Ascii;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
Expand All @@ -32,10 +31,10 @@
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.WorkspaceFileValue;
import com.google.devtools.build.lib.packages.WorkspaceFileValue.WorkspaceFileKey;
import com.google.devtools.build.lib.packages.semantics.BuildLanguageOptions;
import com.google.devtools.build.lib.rules.repository.ManagedDirectoriesKnowledgeImpl;
import com.google.devtools.build.lib.rules.repository.ManagedDirectoriesKnowledgeImpl.ManagedDirectoriesListener;
import com.google.devtools.build.lib.skyframe.util.SkyframeExecutorTestUtils;
import com.google.devtools.build.lib.syntax.StarlarkSemantics;
import com.google.devtools.build.lib.vfs.ModifiedFileSet;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
Expand All @@ -47,15 +46,9 @@
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import javax.annotation.Nullable;
import net.starlark.java.eval.StarlarkSemantics;
import net.starlark.java.syntax.ParserInput;
import net.starlark.java.syntax.StarlarkFile;
import net.starlark.java.syntax.Statement;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import org.junit.Test;
Expand Down Expand Up @@ -279,7 +272,7 @@ public void testManagedDirectories() throws Exception {
"managed_directories attribute should not contain multiple (or duplicate)"
+ " repository mappings for the same directory ('a/b').");
assertManagedDirectoriesParsingError(
"{'@repo1': [], '@repo1': [] }", "dictionary expression has duplicate key: \"@repo1\"");
"{'@repo1': [], '@repo1': [] }", "Duplicated key \"@repo1\" when creating dictionary");
assertManagedDirectoriesParsingError(
"{'@repo1': ['a/b'], '@repo2': ['a/b/c/..'] }",
"managed_directories attribute should not contain multiple (or duplicate)"
Expand Down Expand Up @@ -481,9 +474,7 @@ public void testDoNotSymlinkInExecroot() throws Exception {

try {
StarlarkSemantics semanticsWithNinjaActions =
StarlarkSemantics.builder()
.setBool(BuildLanguageOptions.EXPERIMENTAL_NINJA_ACTIONS, true)
.build();
StarlarkSemantics.builderWithDefaults().experimentalNinjaActions(true).build();
PrecomputedValue.STARLARK_SEMANTICS.set(injectable, semanticsWithNinjaActions);

assertThat(
Expand Down Expand Up @@ -597,124 +588,4 @@ public void reset() {
repositoryNames = null;
}
}

// tests of splitChunks, an internal helper function

@Test
public void testChunksNoLoad() {
assertThat(split(parse("foo_bar = 1"))).isEqualTo("[(assignment)]");
}

@Test
public void testChunksOneLoadAtTop() {
assertThat(
split(
parse(
"load('//:foo.bzl', 'bar')", //
"foo_bar = 1")))
.isEqualTo("[(load assignment)]");
}

@Test
public void testChunksOneLoad() {
assertThat(
split(
parse(
"foo_bar = 1",
//
"load('//:foo.bzl', 'bar')")))
.isEqualTo("[(assignment)][(load)]");
}

@Test
public void testChunksTwoSuccessiveLoads() {
assertThat(
split(
parse(
"foo_bar = 1",
//
"load('//:foo.bzl', 'bar')",
"load('//:bar.bzl', 'foo')")))
.isEqualTo("[(assignment)][(load load)]");
}

@Test
public void testChunksTwoSucessiveLoadsWithNonLoadStatement() {
assertThat(
split(
parse(
"foo_bar = 1",
//
"load('//:foo.bzl', 'bar')",
"load('//:bar.bzl', 'foo')",
"local_repository(name = 'foobar', path = '/bar/foo')")))
.isEqualTo("[(assignment)][(load load expression)]");
}

@Test
public void testChunksThreeLoadsThreeSegments() {
assertThat(
split(
parse(
"foo_bar = 1",
//
"load('//:foo.bzl', 'bar')",
"load('//:bar.bzl', 'foo')",
"local_repository(name = 'foobar', path = '/bar/foo')",
//
"load('@foobar//:baz.bzl', 'bleh')")))
.isEqualTo("[(assignment)][(load load expression)][(load)]");
}

@Test
public void testChunksThreeLoadsThreeSegmentsWithContent() {
assertThat(
split(
parse(
"foo_bar = 1",
//
"load('//:foo.bzl', 'bar')",
"load('//:bar.bzl', 'foo')",
"local_repository(name = 'foobar', path = '/bar/foo')",
//
"load('@foobar//:baz.bzl', 'bleh')",
"bleh()")))
.isEqualTo("[(assignment)][(load load expression)][(load expression)]");
}

@Test
public void testChunksMaySpanFiles() {
assertThat(
split(
parse(
"x = 1", //
"load('m', 'y')"),
parse(
"z = 1", //
"load('m', 'y2')")))
.isEqualTo("[(assignment)][(load)(assignment)][(load)]");
}

// Returns a string that indicates the breakdown of statements into chunks.
private static String split(StarlarkFile... files) {
StringBuilder buf = new StringBuilder();
for (List<StarlarkFile> chunk : WorkspaceFileFunction.splitChunks(Arrays.asList(files))) {
buf.append('[');
for (StarlarkFile partialFile : chunk) {
buf.append('(');
String sep = "";
for (Statement stmt : partialFile.getStatements()) {
buf.append(sep).append(Ascii.toLowerCase(stmt.kind().toString()));
sep = " ";
}
buf.append(')');
}
buf.append(']');
}
return buf.toString();
}

private static StarlarkFile parse(String... lines) {
return StarlarkFile.parse(ParserInput.fromLines(lines));
}
}

0 comments on commit d78f850

Please sign in to comment.