From 26b5cbe709bffc35bdcb5bd396b817f2d630c65f Mon Sep 17 00:00:00 2001 From: Benjy Weinberger Date: Mon, 22 May 2023 06:53:03 -0700 Subject: [PATCH 1/5] Reestablish remote caching in this repo. --- .github/workflows/test.yaml | 221 ++++++++++++++++++ .../bin/generate_github_workflows.py | 34 +++ 2 files changed, 255 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index f56ad46e4f9..2954c38ab88 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -547,6 +547,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Set up Python 3.9 uses: actions/setup-python@v4 with: @@ -632,6 +649,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -670,6 +704,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -729,6 +780,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -788,6 +856,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -847,6 +932,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -906,6 +1008,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -965,6 +1084,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1024,6 +1160,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1083,6 +1236,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1142,6 +1312,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1201,6 +1388,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1261,6 +1465,23 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 + - env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + if: github.repository_owner == 'pantsbuild' + name: Launch bazel-remote + run: 'mkdir -p ~/bazel-remote + + docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 & + + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + + ' - name: Install AdoptJDK uses: actions/setup-java@v3 with: diff --git a/build-support/bin/generate_github_workflows.py b/build-support/bin/generate_github_workflows.py index 8dc953d6685..f3503f076c8 100644 --- a/build-support/bin/generate_github_workflows.py +++ b/build-support/bin/generate_github_workflows.py @@ -198,6 +198,38 @@ def checkout( return steps +def launch_bazel_remote() -> Sequence[Step]: + return [ + { + "name": "Launch bazel-remote", + "if": "github.repository_owner == 'pantsbuild'", + "run": dedent( + """\ + mkdir -p ~/bazel-remote + docker run -u 1001:1000 \ + -v ~/bazel-remote:/data \ + -p 9092:9092 \ + buchgr/bazel-remote-cache \ + --s3.auth_method=access_key \ + --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" \ + --s3.access_key_id="${AWS_ACCESS_KEY_ID}" \ + --s3.bucket=cache.pantsbuild.org \ + --s3.endpoint=s3.us-east-1.amazonaws.com \ + --max_size 30 \ + & + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" + echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + """ + ), + "env": { + "AWS_SECRET_ACCESS_KEY": f"{gha_expr('secrets.AWS_SECRET_ACCESS_KEY')}", + "AWS_ACCESS_KEY_ID": f"{gha_expr('secrets.AWS_ACCESS_KEY_ID')}", + }, + } + ] + + def global_env() -> Env: return { "PANTS_CONFIG_FILES": "+['pants.ci.toml']", @@ -615,6 +647,7 @@ def test_jobs(helper: Helper, shard: str | None, platform_specific: bool) -> Job "if": IS_PANTS_OWNER, "steps": [ *checkout(), + *launch_bazel_remote(), install_jdk(), *( [install_go(), download_apache_thrift()] @@ -798,6 +831,7 @@ def test_workflow_jobs() -> Jobs: "if": IS_PANTS_OWNER, "steps": [ *checkout(), + *launch_bazel_remote(), *linux_x86_64_helper.setup_primary_python(), *linux_x86_64_helper.native_binaries_download(), { From 102bb3a7775bb57285f5e013bb67407bdc99f8f9 Mon Sep 17 00:00:00 2001 From: Benjy Weinberger Date: Thu, 25 May 2023 07:04:25 -0700 Subject: [PATCH 2/5] Use detatch flag --- .github/workflows/test.yaml | 52 +++++++++---------- .../bin/generate_github_workflows.py | 5 +- 2 files changed, 28 insertions(+), 29 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 2954c38ab88..85b357b64de 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -554,8 +554,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -656,8 +656,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -711,8 +711,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -787,8 +787,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -863,8 +863,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -939,8 +939,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -1015,8 +1015,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -1091,8 +1091,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -1167,8 +1167,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -1243,8 +1243,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -1319,8 +1319,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -1395,8 +1395,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" @@ -1472,8 +1472,8 @@ jobs: name: Launch bazel-remote run: 'mkdir -p ~/bazel-remote - docker run -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 & + docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size + 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" diff --git a/build-support/bin/generate_github_workflows.py b/build-support/bin/generate_github_workflows.py index f3503f076c8..48960c3c506 100644 --- a/build-support/bin/generate_github_workflows.py +++ b/build-support/bin/generate_github_workflows.py @@ -206,7 +206,7 @@ def launch_bazel_remote() -> Sequence[Step]: "run": dedent( """\ mkdir -p ~/bazel-remote - docker run -u 1001:1000 \ + docker run --detach -u 1001:1000 \ -v ~/bazel-remote:/data \ -p 9092:9092 \ buchgr/bazel-remote-cache \ @@ -215,8 +215,7 @@ def launch_bazel_remote() -> Sequence[Step]: --s3.access_key_id="${AWS_ACCESS_KEY_ID}" \ --s3.bucket=cache.pantsbuild.org \ --s3.endpoint=s3.us-east-1.amazonaws.com \ - --max_size 30 \ - & + --max_size 30 echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" From b87dd278cd193e5f8cc7248dcfc3afb39dfcec29 Mon Sep 17 00:00:00 2001 From: Benjy Weinberger Date: Thu, 25 May 2023 08:52:40 -0700 Subject: [PATCH 3/5] Support read-only across forks --- .github/workflows/test.yaml | 351 +++++++++--------- .../bin/generate_github_workflows.py | 24 +- 2 files changed, 201 insertions(+), 174 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 85b357b64de..afc30cc0dc8 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -550,20 +550,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Set up Python 3.9 uses: actions/setup-python@v4 with: @@ -652,20 +653,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -707,20 +709,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -783,20 +786,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -859,20 +863,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -935,20 +940,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1011,20 +1017,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1087,20 +1094,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1163,20 +1171,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1239,20 +1248,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1315,20 +1325,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1391,20 +1402,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -1468,20 +1480,21 @@ jobs: - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - if: github.repository_owner == 'pantsbuild' name: Launch bazel-remote - run: 'mkdir -p ~/bazel-remote - - docker run --detach -u 1001:1000 -v ~/bazel-remote:/data -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" --s3.access_key_id="${AWS_ACCESS_KEY_ID}" --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com --max_size - 30 - - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" - - echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" - - ' + run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ + \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ + \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ + \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ + \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ + \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ + \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ + \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ + \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ + \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ + \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: diff --git a/build-support/bin/generate_github_workflows.py b/build-support/bin/generate_github_workflows.py index 48960c3c506..59c56ca2d8e 100644 --- a/build-support/bin/generate_github_workflows.py +++ b/build-support/bin/generate_github_workflows.py @@ -202,28 +202,42 @@ def launch_bazel_remote() -> Sequence[Step]: return [ { "name": "Launch bazel-remote", - "if": "github.repository_owner == 'pantsbuild'", "run": dedent( """\ mkdir -p ~/bazel-remote + if [[ -z "${AWS_ACCESS_KEY_ID}" ]]; then + CACHE_WRITE=false + # If no secret read/write creds, use hard-coded read-only creds, so that + # cross-fork PRs can at least read from the cache. + # These creds are hard-coded here in this public repo, which makes the bucket + # world-readable. But since putting raw AWS tokens in a public repo, even + # deliberately, is icky, we base64-them. This will at least help hide from + # automated scanners that look for checked in AWS keys. + # Not that it would be terrible if we were scanned, since this is public + # on purpose, but it's best not to draw attention. + AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK' | base64 -d) + AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64 -d) + else + CACHE_WRITE=true + fi docker run --detach -u 1001:1000 \ -v ~/bazel-remote:/data \ -p 9092:9092 \ buchgr/bazel-remote-cache \ --s3.auth_method=access_key \ - --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" \ --s3.access_key_id="${AWS_ACCESS_KEY_ID}" \ + --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" \ --s3.bucket=cache.pantsbuild.org \ --s3.endpoint=s3.us-east-1.amazonaws.com \ --max_size 30 - echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" - echo "PANTS_REMOTE_CACHE_WRITE=true" >> "$GITHUB_ENV" echo "PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092" >> "$GITHUB_ENV" + echo "PANTS_REMOTE_CACHE_READ=true" >> "$GITHUB_ENV" + echo "PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}" >> "$GITHUB_ENV" """ ), "env": { - "AWS_SECRET_ACCESS_KEY": f"{gha_expr('secrets.AWS_SECRET_ACCESS_KEY')}", "AWS_ACCESS_KEY_ID": f"{gha_expr('secrets.AWS_ACCESS_KEY_ID')}", + "AWS_SECRET_ACCESS_KEY": f"{gha_expr('secrets.AWS_SECRET_ACCESS_KEY')}", }, } ] From dc19f66424b5c772029524e7eb4200401e903231 Mon Sep 17 00:00:00 2001 From: Benjy Weinberger Date: Thu, 25 May 2023 09:09:13 -0700 Subject: [PATCH 4/5] docstring --- build-support/bin/generate_github_workflows.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/build-support/bin/generate_github_workflows.py b/build-support/bin/generate_github_workflows.py index 59c56ca2d8e..6b3e6a77269 100644 --- a/build-support/bin/generate_github_workflows.py +++ b/build-support/bin/generate_github_workflows.py @@ -199,6 +199,13 @@ def checkout( def launch_bazel_remote() -> Sequence[Step]: + """Run a sidecar bazel-remote instance. + + This process proxies to a public-read/private-write S3 bucket (cache.pantsbuild.org). + PRs within pantsbuild/pants will have AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY + secrets set and so will be able to read and write the cache. PRs across forks + will not, so they use hard-coded read only creds so they can at least read from the cache. + """ return [ { "name": "Launch bazel-remote", From 6acb794d70ba913b9fc9aba6a5bcd6ab2872946c Mon Sep 17 00:00:00 2001 From: Benjy Weinberger Date: Thu, 25 May 2023 09:24:31 -0700 Subject: [PATCH 5/5] Don't run the sidecar on macos or ARM builds --- .github/workflows/test.yaml | 102 ++++++------------ .../bin/generate_github_workflows.py | 34 +++--- 2 files changed, 55 insertions(+), 81 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index afc30cc0dc8..ca614cf21c9 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -553,13 +553,13 @@ jobs: name: Launch bazel-remote run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ creds are hard-coded here in this public repo, which makes the bucket\n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even\n # deliberately, is icky, we base64-them. This will at least help hide from\n\ \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ -p 9092:9092 buchgr/bazel-remote-cache:v2.4.1 --s3.auth_method=access_key\ \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ @@ -650,24 +650,6 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 - - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - name: Launch bazel-remote - run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ - \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ - \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ - \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ - \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ - \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ - \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ - \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ - \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ - \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ - \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: @@ -712,13 +694,13 @@ jobs: name: Launch bazel-remote run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ creds are hard-coded here in this public repo, which makes the bucket\n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even\n # deliberately, is icky, we base64-them. This will at least help hide from\n\ \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ -p 9092:9092 buchgr/bazel-remote-cache:v2.4.1 --s3.auth_method=access_key\ \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ @@ -789,13 +771,13 @@ jobs: name: Launch bazel-remote run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ creds are hard-coded here in this public repo, which makes the bucket\n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even\n # deliberately, is icky, we base64-them. This will at least help hide from\n\ \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ -p 9092:9092 buchgr/bazel-remote-cache:v2.4.1 --s3.auth_method=access_key\ \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ @@ -866,13 +848,13 @@ jobs: name: Launch bazel-remote run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ creds are hard-coded here in this public repo, which makes the bucket\n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even\n # deliberately, is icky, we base64-them. This will at least help hide from\n\ \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ -p 9092:9092 buchgr/bazel-remote-cache:v2.4.1 --s3.auth_method=access_key\ \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ @@ -943,13 +925,13 @@ jobs: name: Launch bazel-remote run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ creds are hard-coded here in this public repo, which makes the bucket\n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even\n # deliberately, is icky, we base64-them. This will at least help hide from\n\ \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ -p 9092:9092 buchgr/bazel-remote-cache:v2.4.1 --s3.auth_method=access_key\ \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ @@ -1020,13 +1002,13 @@ jobs: name: Launch bazel-remote run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ creds are hard-coded here in this public repo, which makes the bucket\n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even\n # deliberately, is icky, we base64-them. This will at least help hide from\n\ \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ -p 9092:9092 buchgr/bazel-remote-cache:v2.4.1 --s3.auth_method=access_key\ \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ @@ -1097,13 +1079,13 @@ jobs: name: Launch bazel-remote run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ creds are hard-coded here in this public repo, which makes the bucket\n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even\n # deliberately, is icky, we base64-them. This will at least help hide from\n\ \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ -p 9092:9092 buchgr/bazel-remote-cache:v2.4.1 --s3.auth_method=access_key\ \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ @@ -1174,13 +1156,13 @@ jobs: name: Launch bazel-remote run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ creds are hard-coded here in this public repo, which makes the bucket\n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even\n # deliberately, is icky, we base64-them. This will at least help hide from\n\ \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ -p 9092:9092 buchgr/bazel-remote-cache:v2.4.1 --s3.auth_method=access_key\ \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ @@ -1251,13 +1233,13 @@ jobs: name: Launch bazel-remote run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ creds are hard-coded here in this public repo, which makes the bucket\n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even\n # deliberately, is icky, we base64-them. This will at least help hide from\n\ \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ -p 9092:9092 buchgr/bazel-remote-cache:v2.4.1 --s3.auth_method=access_key\ \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ @@ -1328,13 +1310,13 @@ jobs: name: Launch bazel-remote run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ creds are hard-coded here in this public repo, which makes the bucket\n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even\n # deliberately, is icky, we base64-them. This will at least help hide from\n\ \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ -p 9092:9092 buchgr/bazel-remote-cache:v2.4.1 --s3.auth_method=access_key\ \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ @@ -1405,13 +1387,13 @@ jobs: name: Launch bazel-remote run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ + \ creds are hard-coded here in this public repo, which makes the bucket\n # world-readable. But since putting raw\ + \ AWS tokens in a public repo, even\n # deliberately, is icky, we base64-them. This will at least help hide from\n\ \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ + \ -p 9092:9092 buchgr/bazel-remote-cache:v2.4.1 --s3.auth_method=access_key\ \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ @@ -1477,24 +1459,6 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 10 - - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - name: Launch bazel-remote - run: "mkdir -p ~/bazel-remote\nif [[ -z \"${AWS_ACCESS_KEY_ID}\" ]]; then\n CACHE_WRITE=false\n # If no secret read/write\ - \ creds, use hard-coded read-only creds, so that\n # cross-fork PRs can at least read from the cache.\n # These\ - \ creds are hard-coded here in this public repo, which makes the bucket \n # world-readable. But since putting raw\ - \ AWS tokens in a public repo, even \n # deliberately, is icky, we base64-them. This will at least help hide from\n\ - \ # automated scanners that look for checked in AWS keys.\n # Not that it would be terrible if we were scanned,\ - \ since this is public\n # on purpose, but it's best not to draw attention.\n AWS_ACCESS_KEY_ID=$(echo 'QUtJQVY2QTZHN1JRVkJJUVM1RUEK'\ - \ | base64 -d)\n AWS_SECRET_ACCESS_KEY=$(echo 'd3dOQ1k1eHJJWVVtejZBblV6M0l1endXV0loQWZWcW9GZlVjMDlKRwo=' | base64\ - \ -d)\nelse\n CACHE_WRITE=true\nfi\ndocker run --detach -u 1001:1000 -v ~/bazel-remote:/data \ - \ -p 9092:9092 buchgr/bazel-remote-cache --s3.auth_method=access_key\ - \ --s3.access_key_id=\"${AWS_ACCESS_KEY_ID}\" --s3.secret_access_key=\"${AWS_SECRET_ACCESS_KEY}\"\ - \ --s3.bucket=cache.pantsbuild.org --s3.endpoint=s3.us-east-1.amazonaws.com \ - \ --max_size 30\necho \"PANTS_REMOTE_STORE_ADDRESS=grpc://localhost:9092\" >> \"$GITHUB_ENV\"\necho\ - \ \"PANTS_REMOTE_CACHE_READ=true\" >> \"$GITHUB_ENV\"\necho \"PANTS_REMOTE_CACHE_WRITE=${CACHE_WRITE}\" >> \"$GITHUB_ENV\"\ - \n" - name: Install AdoptJDK uses: actions/setup-java@v3 with: diff --git a/build-support/bin/generate_github_workflows.py b/build-support/bin/generate_github_workflows.py index 6b3e6a77269..4c2948d76e2 100644 --- a/build-support/bin/generate_github_workflows.py +++ b/build-support/bin/generate_github_workflows.py @@ -201,10 +201,10 @@ def checkout( def launch_bazel_remote() -> Sequence[Step]: """Run a sidecar bazel-remote instance. - This process proxies to a public-read/private-write S3 bucket (cache.pantsbuild.org). - PRs within pantsbuild/pants will have AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY - secrets set and so will be able to read and write the cache. PRs across forks - will not, so they use hard-coded read only creds so they can at least read from the cache. + This process proxies to a public-read/private-write S3 bucket (cache.pantsbuild.org). PRs within + pantsbuild/pants will have AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY secrets set and so will be + able to read and write the cache. PRs across forks will not, so they use hard-coded read only + creds so they can at least read from the cache. """ return [ { @@ -216,8 +216,8 @@ def launch_bazel_remote() -> Sequence[Step]: CACHE_WRITE=false # If no secret read/write creds, use hard-coded read-only creds, so that # cross-fork PRs can at least read from the cache. - # These creds are hard-coded here in this public repo, which makes the bucket - # world-readable. But since putting raw AWS tokens in a public repo, even + # These creds are hard-coded here in this public repo, which makes the bucket + # world-readable. But since putting raw AWS tokens in a public repo, even # deliberately, is icky, we base64-them. This will at least help hide from # automated scanners that look for checked in AWS keys. # Not that it would be terrible if we were scanned, since this is public @@ -230,7 +230,7 @@ def launch_bazel_remote() -> Sequence[Step]: docker run --detach -u 1001:1000 \ -v ~/bazel-remote:/data \ -p 9092:9092 \ - buchgr/bazel-remote-cache \ + buchgr/bazel-remote-cache:v2.4.1 \ --s3.auth_method=access_key \ --s3.access_key_id="${AWS_ACCESS_KEY_ID}" \ --s3.secret_access_key="${AWS_SECRET_ACCESS_KEY}" \ @@ -638,7 +638,9 @@ def bootstrap_jobs( } -def test_jobs(helper: Helper, shard: str | None, platform_specific: bool) -> Jobs: +def test_jobs( + helper: Helper, shard: str | None, platform_specific: bool, with_remote_caching: bool +) -> Jobs: human_readable_job_name = f"Test Python ({helper.platform_name()})" human_readable_step_name = "Run Python tests" log_name = "python-test" @@ -667,7 +669,7 @@ def test_jobs(helper: Helper, shard: str | None, platform_specific: bool) -> Job "if": IS_PANTS_OWNER, "steps": [ *checkout(), - *launch_bazel_remote(), + *(launch_bazel_remote() if with_remote_caching else []), install_jdk(), *( [install_go(), download_apache_thrift()] @@ -692,7 +694,7 @@ def linux_x86_64_test_jobs() -> Jobs: helper = Helper(Platform.LINUX_X86_64) def test_python_linux(shard: str) -> dict[str, Any]: - return test_jobs(helper, shard, platform_specific=False) + return test_jobs(helper, shard, platform_specific=False, with_remote_caching=True) shard_name_prefix = helper.job_name("test_python") jobs = { @@ -721,7 +723,11 @@ def linux_arm64_test_jobs() -> Jobs: validate_ci_config=False, rust_testing=RustTesting.SOME, ), - helper.job_name("test_python"): test_jobs(helper, shard=None, platform_specific=True), + # We run these on a dedicated host with ample local cache, so remote caching + # just adds cost but little value. + helper.job_name("test_python"): test_jobs( + helper, shard=None, platform_specific=True, with_remote_caching=False + ), } return jobs @@ -734,7 +740,11 @@ def macos11_x86_64_test_jobs() -> Jobs: validate_ci_config=False, rust_testing=RustTesting.SOME, ), - helper.job_name("test_python"): test_jobs(helper, shard=None, platform_specific=True), + # We run these on a dedicated host with ample local cache, so remote caching + # just adds cost but little value. + helper.job_name("test_python"): test_jobs( + helper, shard=None, platform_specific=True, with_remote_caching=False + ), } return jobs