diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 46e914f5a52..6fead02fb6f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -81,7 +81,7 @@ jobs: run: # Flags: # - e2e_build=true to get images from a local build, not a dockerhub registry strategy - earthly-cloud build arm --no-output +${{ matrix.test }} --e2e_build=true + earthly-cloud build arm --no-output +${{ matrix.test }} --e2e_mode=build # all the end-to-end integration tests for aztec e2e-x86: @@ -123,7 +123,7 @@ jobs: - name: Test working-directory: ./yarn-project/end-to-end/ run: | - earthly-cloud build x86 --no-output +${{ matrix.test }} + earthly-cloud build x86 --no-output +${{ matrix.test }} --e2e_mode=cache # barretenberg (prover) native tests bb-native-tests: @@ -158,7 +158,7 @@ jobs: - name: Test working-directory: ./barretenberg/cpp/ run: | - earthly-cloud build ${{ matrix.environment }} --no-output +test + earthly-cloud test ${{ matrix.environment }} --no-output +test # All benchmarks, purposefully ran sequential on a machine # they should use parallelism within the benchmark, but only one thing should run at a time @@ -170,9 +170,9 @@ jobs: EARTHLY_TOKEN: ${{ secrets.EARTHLY_TOKEN }} # cancel if reran on same PR if exists, otherwise if on same commit concurrency: - # TODO no longer exclusive.. revisit this - group: ${{ github.actor }}-x86 - cancel-in-progress: ${{ github.ref_name != 'master' }} + # TODO this allows NO concurrency of bench jobs + # TODO if this takes too long, we need to divide up into different bench machines for each pipeline step + group: bench-x86 steps: - uses: earthly/actions-setup@v1 with: @@ -188,10 +188,15 @@ jobs: working-directory: ./scripts run: ./setup_env.sh ${{ secrets.DOCKERHUB_PASSWORD }} ${{ github.actor }} + - name: Build and Push Binaries + working-directory: ./barretenberg/cpp/ + run: earthly-cloud build x86 --push +bench-base + + # Use bench_mode=cache to read the pushed build above - name: Client IVC Bench working-directory: ./barretenberg/cpp/ - run: earthly-cloud bench x86 --no-output +bench-client-ivc + run: earthly-cloud bench x86 --no-output +bench-client-ivc --bench_mode=cache - name: Ultrahonk Bench working-directory: ./barretenberg/cpp/ - run: earthly-cloud bench x86 --no-output +bench-ultra-honk + run: earthly-cloud bench x86 --no-output +bench-ultra-honk --bench_mode=cache diff --git a/.github/workflows/publish_base_images.yml b/.github/workflows/publish_base_images.yml new file mode 100644 index 00000000000..c2f52ab39b5 --- /dev/null +++ b/.github/workflows/publish_base_images.yml @@ -0,0 +1,39 @@ +# Publishes our base images with custom installs or builds etc +# These publish a multi-arch image by first publishing with x86, and then with arm +# This is a bit of a hack, but earthly needs to see both image types to make a multiplatform image +# and its easiest for arm to just pull the x86 image after. +name: Publish Base Images +on: + workflow_dispatch: {} + +jobs: + publish: + runs-on: ubuntu-latest + env: + EARTHLY_TOKEN: ${{ secrets.EARTHLY_TOKEN }} + # cancel if reran on same PR if exists, otherwise if on same commit + concurrency: + group: publish-base-images-${{ github.event.pull_request.number || github.ref_name }} + cancel-in-progress: ${{ github.ref_name != 'master' }} + steps: + - uses: earthly/actions-setup@v1 + with: + version: v0.8.5 + + - name: Checkout + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + submodules: recursive + + - name: Setup + working-directory: ./scripts + run: ./setup_env.sh ${{ secrets.DOCKERHUB_PASSWORD }} ${{ github.actor }} + + - name: Publish Barretenberg Base Images + working-directory: ./barretenberg/cpp + run: | + # see note above about how this creates a multiplatform image + earthly-cloud build x86 --push +build-base + earthly-cloud build arm --push +build-base + diff --git a/avm-transpiler/Earthfile b/avm-transpiler/Earthfile index d00a7bf9823..b2c77f9b3f9 100644 --- a/avm-transpiler/Earthfile +++ b/avm-transpiler/Earthfile @@ -3,10 +3,10 @@ IMPORT ../noir AS noir # we rely on noir source, which this image has FROM noir+nargo -# move noir contents to /build/noir -RUN mv /build /noir && mkdir /build && mv /noir /build +# move noir contents to /usr/src/noir +RUN mv /usr/src /noir && mkdir /usr/src && mv /noir /usr/src # work in avm-transpiler -WORKDIR /build/avm-transpiler +WORKDIR /usr/src/avm-transpiler RUN apt-get update && apt-get install -y git @@ -24,5 +24,5 @@ build: run: #TODO needed? FROM ubuntu:focal - COPY +build/avm-transpiler /build/avm-transpiler + COPY +build/avm-transpiler /usr/src/avm-transpiler ENTRYPOINT ["sh", "-c"] diff --git a/barretenberg/cpp/Earthfile b/barretenberg/cpp/Earthfile index 6b2cba59e9e..de96f18f227 100644 --- a/barretenberg/cpp/Earthfile +++ b/barretenberg/cpp/Earthfile @@ -1,28 +1,34 @@ VERSION 0.8 -FROM ubuntu:lunar -RUN apt-get update && apt-get install -y \ - build-essential \ - curl \ - git \ - cmake \ - lsb-release \ - wget \ - software-properties-common \ - gnupg \ - ninja-build \ - npm \ - libssl-dev \ - jq \ - bash \ - libstdc++6 \ - clang-format \ - clang-16 - -WORKDIR /build - -SAVE IMAGE --push aztecprotocol/cache:bb-ubuntu-lunar -SAVE IMAGE --cache-hint +build-base: + ARG TARGETARCH + FROM --platform=linux/$TARGETARCH ubuntu:lunar + RUN apt-get update && apt-get install -y \ + build-essential \ + curl \ + git \ + cmake \ + lsb-release \ + wget \ + software-properties-common \ + gnupg \ + ninja-build \ + npm \ + libssl-dev \ + jq \ + bash \ + libstdc++6 \ + clang-format \ + clang-16 + + IF [ $TARGETARCH = arm64 ] + # publish arm after, assumes x86 already exists, becomes multiplatform image + SAVE IMAGE --push aztecprotocol/bb-ubuntu-lunar + FROM --platform=linux/amd64 aztecprotocol/bb-ubuntu-lunar:x86-latest + SAVE IMAGE --push aztecprotocol/bb-ubuntu-lunar + ELSE + SAVE IMAGE --push aztecprotocol/bb-ubuntu-lunar:x86-latest + END build-wasi-sdk-image: WORKDIR / @@ -42,7 +48,7 @@ build-wasi-sdk: ARG TARGETARCH # Wrapper just exists share files. FROM scratch - WORKDIR /build + WORKDIR /usr/src COPY +get-wasi-sdk-image/wasi-sdk wasi-sdk SAVE ARTIFACT wasi-sdk SAVE IMAGE --push aztecprotocol/cache:wasi-sdk-threads-v21.0-$TARGETARCH @@ -63,31 +69,34 @@ get-wasi-sdk: SAVE ARTIFACT src/wasi-sdk-20.0 wasi-sdk wasmtime: + FROM aztecprotocol/bb-ubuntu-lunar RUN curl https://wasmtime.dev/install.sh -sSf | bash SAVE ARTIFACT /root/.wasmtime/bin/wasmtime source: + FROM aztecprotocol/bb-ubuntu-lunar + WORKDIR /usr/src/barretenberg # cpp source - COPY --keep-ts --dir src/barretenberg src/CMakeLists.txt src + COPY --dir src/barretenberg src/CMakeLists.txt src # cmake source - COPY --keep-ts --dir cmake CMakeLists.txt CMakePresets.json . + COPY --dir cmake CMakeLists.txt CMakePresets.json . # for debugging rebuilds RUN echo CONTENT HASH $(find . -type f -exec sha256sum {} ';' | sort | sha256sum | awk '{print $1}') | tee .content-hash -preset-release-assert-all: +preset-release: FROM +source - DO +RUN_CMAKE --configure="--preset clang16-assert" --build="" - SAVE ARTIFACT bin + RUN cmake --preset clang16 -Bbuild && cmake --build build --target bb + SAVE ARTIFACT build/bin -preset-release: +preset-release-assert: FROM +source - DO +RUN_CMAKE --configure="--preset clang16" --build="--target bb" - SAVE ARTIFACT bin + RUN cmake --preset clang16 -Bbuild && cmake --build build --target bb + SAVE ARTIFACT build/bin preset-debug: FROM +source - DO +RUN_CMAKE --configure="--preset clang16-dbg" --build="--target bb" - SAVE ARTIFACT bin + RUN cmake --preset clang16-dbg -Bbuild && cmake --build build --target bb + SAVE ARTIFACT build/bin preset-wasm: ARG TARGETARCH @@ -95,126 +104,114 @@ preset-wasm: IF [ $TARGETARCH = arm64 ] # Just use threads for now FROM +preset-wasm-threads - SAVE ARTIFACT bin + SAVE ARTIFACT build/bin ELSE COPY +get-wasi-sdk/wasi-sdk src/wasi-sdk - DO +RUN_CMAKE --configure="--preset wasm" --build="--target barretenberg.wasm" - RUN ../src/wasi-sdk/bin/llvm-strip ./bin/barretenberg.wasm - SAVE ARTIFACT bin + RUN cmake --preset wasm -Bbuild && cmake --build build --target barretenberg.wasm + RUN src/wasi-sdk/bin/llvm-strip ./build/bin/barretenberg.wasm + SAVE ARTIFACT build/bin SAVE IMAGE --cache-hint END preset-wasm-threads: FROM +source COPY +get-wasi-sdk-threads/wasi-sdk src/wasi-sdk - DO +RUN_CMAKE --configure="--preset wasm-threads" --build="--target barretenberg.wasm" - RUN ../src/wasi-sdk/bin/llvm-strip ./bin/barretenberg.wasm - SAVE ARTIFACT bin - SAVE IMAGE --cache-hint + RUN cmake --preset wasm-threads -Bbuild && cmake --build build --target barretenberg.wasm + RUN src/wasi-sdk/bin/llvm-strip ./build/bin/barretenberg.wasm + SAVE ARTIFACT build/bin preset-gcc: FROM +source - DO +RUN_CMAKE --configure="--preset gcc" --build="" - SAVE ARTIFACT bin + RUN cmake --preset gcc -Bbuild && cmake --build build + SAVE ARTIFACT build/bin preset-fuzzing: FROM +source - DO +RUN_CMAKE --configure="--preset fuzzing" --build="" - SAVE ARTIFACT bin + RUN cmake --preset fuzzing -Bbuild && cmake --build build + SAVE ARTIFACT build/bin preset-clang-assert: FROM +source - DO +RUN_CMAKE --configure="--preset clang16 -DCMAKE_BUILD_TYPE=RelWithAssert" --build="--target bb" - SAVE ARTIFACT bin + RUN cmake --preset clang16-assert -Bbuild && cmake --build build --target bb + SAVE ARTIFACT build/bin -preset-op-count: +# benchmarking images +# these are either fresh builds just for benching (op-count and op-count-time) +# or build the binaries we need for benchmarking +preset-op-count-bench: FROM +source - DO +RUN_CMAKE --configure="--preset op-count -DCMAKE_BUILD_TYPE=RelWithAssert" --build="--target bb" - SAVE ARTIFACT bin + RUN cmake --preset op-count -DCMAKE_BUILD_TYPE=RelWithAssert -Bbuild && cmake --build build --target ultra_honk_bench --target client_ivc_bench + SAVE ARTIFACT build/bin -preset-op-count-time: +preset-op-count-time-bench: FROM +source - DO +RUN_CMAKE --configure="--preset op-count-time -DCMAKE_BUILD_TYPE=RelWithAssert" --build="--target bb" - SAVE ARTIFACT bin + RUN cmake --preset op-count-time -Bbuild && cmake --build build --target ultra_honk_bench --target client_ivc_bench + SAVE ARTIFACT build/bin -test-clang-format: - FROM +source - COPY .clang-format . - COPY format.sh . - RUN ./format.sh check +preset-release-bench: + FROM +preset-release + RUN cmake --build build --target ultra_honk_bench --target client_ivc_bench + SAVE ARTIFACT build/bin + +preset-wasm-bench: + FROM +preset-wasm-threads + RUN cmake --build build --target ultra_honk_bench --target client_ivc_bench + SAVE ARTIFACT build/bin + +# test images +preset-release-assert-test: + FROM +preset-release-assert + # build all targets for tests + RUN cmake --build build + SAVE ARTIFACT build/bin + +# Sent to the bench runner using a earthly-cloud build x86 --push +bench-base --bench_mode=true +# then we can run earthly-cloud bench x86 +bench-ultra-honk etc +bench-base: + ARG EARTHLY_GIT_HASH + ARG TARGETARCH + ARG bench_mode=build + LOCALLY + IF [ $bench_mode = cache ] + FROM aztecprotocol/bb-bench-base:$TARGETARCH-$EARTHLY_GIT_HASH + ELSE + FROM +source + COPY +preset-op-count-time-bench/bin/*_bench op-count-time/bin/ + COPY +preset-op-count-bench/bin/*_bench op-count/bin/ + COPY +preset-release-bench/bin/*_bench release/bin/ + COPY +preset-wasm-bench/bin/*_bench wasm/bin/ + SAVE IMAGE --push aztecprotocol/bb-bench-base:$TARGETARCH-$EARTHLY_GIT_HASH + END +# Runs on the bench image, sent from the builder runner bench-ultra-honk: - DO +BENCH_RELEASE --target=ultra_honk_bench --args="--benchmark_filter=construct_proof_ultrahonk_power_of_2/20$" - DO +BENCH_WASM --target=ultra_honk_bench --args="--benchmark_filter=construct_proof_ultrahonk_power_of_2/20$" + FROM +bench-base + # install SRS needed for proving + COPY --dir ./srs_db/+build/. srs_db + RUN cd release && ./bin/ultra_honk_bench --benchmark_filter="construct_proof_ultrahonk_power_of_2/20$" + RUN cd op-count && ./bin/ultra_honk_bench --benchmark_filter="construct_proof_ultrahonk_power_of_2/20$" + RUN cd op-count-time && ./bin/ultra_honk_bench --benchmark_filter="construct_proof_ultrahonk_power_of_2/20$" + COPY +wasmtime/wasmtime /usr/bin/wasmtime + RUN cd wasm && wasmtime run --env HARDWARE_CONCURRENCY=16 -Wthreads=y -Sthreads=y --dir=".." ./bin/ultra_honk_bench --benchmark_filter="construct_proof_ultrahonk_power_of_2/20$" bench-client-ivc: - DO +BENCH_RELEASE --target=client_ivc_bench --args="--benchmark_filter=ClientIVCBench/Full/6$" - DO +BENCH_RELEASE --target=client_ivc_bench --args="--benchmark_filter=ClientIVCBench/Full/6$" - DO +BENCH_WASM --target=client_ivc_bench --args="--benchmark_filter=ClientIVCBench/Full/6$" + FROM +bench-base + # install SRS needed for proving + COPY --dir ./srs_db/+build/. srs_db + RUN cd release && ./bin/client_ivc_bench --benchmark_filter="ClientIVCBench/Full/6$" + RUN cd op-count && ./bin/client_ivc_bench --benchmark_filter="ClientIVCBench/Full/6$" + RUN cd op-count-time && ./bin/client_ivc_bench --benchmark_filter="ClientIVCBench/Full/6$" + COPY +wasmtime/wasmtime /usr/bin/wasmtime + RUN cd wasm && wasmtime run --env HARDWARE_CONCURRENCY=16 -Wthreads=y -Sthreads=y --dir=".." ./bin/client_ivc_bench --benchmark_filter="ClientIVCBench/Full/6$" -build: # default target - BUILD +preset-release - BUILD +preset-wasm-threads +test-clang-format: + FROM +source + COPY .clang-format . + COPY format.sh . + RUN ./format.sh check test: BUILD +test-clang-format - FROM +preset-release-assert-all - COPY --dir ./srs_db/+build/. ../srs_db - RUN GTEST_COLOR=1 ctest -j$(nproc) --output-on-failure - -# Functions -RUN_CMAKE: - FUNCTION - # Runs cmake build and leaves binary artifacts at 'bin'. Uses a mounted cache - # for incremental rebuilds. - ARG configure # cmake configure flags - ARG build # cmake build flags - WORKDIR /build - # Use a mount for incremental builds locally. - # TODO(AD): To be determined: does this get us in trouble in CI? - RUN --mount type=cache,id="$configure-build",target=/build/build \ - (cmake $configure -Bbuild || (rm -f build/CMakeCache.txt && cmake $configure -Bbuild)) && \ - cmake --build build $build && \ - cp -r build build-tmp - # move the temporary build folder back - # this is because the cached build goes away - RUN rm -rf build && mv build-tmp build - WORKDIR /build/build - -BENCH_RELEASE: - FUNCTION - ARG target - ARG args - FROM +preset-release - DO +RUN_CMAKE --configure="--preset clang16" --build="--target $target" - COPY --dir ./srs_db/+build/. ../srs_db - RUN ./bin/$target $args - -BENCH_OP_COUNT_TIME: - FUNCTION - ARG target - ARG args - FROM +preset-op-count-time - DO +RUN_CMAKE --configure="--preset op-count-time" --build="--target $target" - COPY --dir ./srs_db:+build/. ../srs_db - RUN ./bin/$target $args - -BENCH_OP_COUNT: - FUNCTION - ARG target - ARG args - FROM +preset-op-count - DO +RUN_CMAKE --configure="--preset op-count" --build="--target $target" - COPY --dir ./srs_db/+build/. ../srs_db - RUN ./bin/$target $args - -BENCH_WASM: - FUNCTION - ARG target - ARG args - FROM +preset-wasm-threads - DO +RUN_CMAKE --configure="--preset wasm-threads" --build="--target $target" - COPY --dir ./srs_db/+build/. ../srs_db - # install - COPY +wasmtime/wasmtime /usr/bin/wasmtime - RUN wasmtime run --env HARDWARE_CONCURRENCY=8 -Wthreads=y -Sthreads=y --dir=.. ./bin/$target $args + FROM +preset-release-assert-test + COPY --dir ./srs_db/+build/. srs_db + RUN cd build && GTEST_COLOR=1 ctest -j$(nproc) --output-on-failure diff --git a/barretenberg/cpp/srs_db/Earthfile b/barretenberg/cpp/srs_db/Earthfile index 4b4a503b6b7..9bb57f1d5c6 100644 --- a/barretenberg/cpp/srs_db/Earthfile +++ b/barretenberg/cpp/srs_db/Earthfile @@ -4,7 +4,7 @@ FROM ubuntu:lunar RUN apt-get update && apt-get install -y curl build: - WORKDIR /build + WORKDIR /usr/src COPY ./*.sh . RUN ./download_ignition.sh 3 RUN ./download_grumpkin.sh diff --git a/barretenberg/ts/Earthfile b/barretenberg/ts/Earthfile index a0dcd619e32..a11afe022ee 100644 --- a/barretenberg/ts/Earthfile +++ b/barretenberg/ts/Earthfile @@ -1,7 +1,7 @@ VERSION 0.8 FROM node:18.19.0 -WORKDIR /build +WORKDIR /usr/src/barretenberg/ts-build # minimum files to download yarn packages # keep timestamps for incremental builds @@ -20,28 +20,28 @@ COPY ../cpp/+preset-wasm/bin/barretenberg.wasm dest/node-cjs/barretenberg_wasm/b esm: RUN yarn build:esm - SAVE ARTIFACT /build + SAVE ARTIFACT /usr/src/barretenberg/ts-build build cjs: COPY scripts/cjs_postprocess.sh scripts/ RUN yarn build:cjs - SAVE ARTIFACT /build + SAVE ARTIFACT /usr/src/barretenberg/ts-build build browser: RUN yarn build:browser - SAVE ARTIFACT /build + SAVE ARTIFACT /usr/src/barretenberg/ts-build build test-prettier-format: RUN yarn formatting build: # collect all our build types - COPY +esm/build /build - COPY +cjs/build /build - COPY +browser/build /build + COPY +esm/build /usr/src/barretenberg/ts-build + COPY +cjs/build /usr/src/barretenberg/ts-build + COPY +browser/build /usr/src/barretenberg/ts-build # We want to create a pure package, as would be published to npm, for consuming projects. RUN yarn pack && tar zxf package.tgz && rm package.tgz && mv package ../ts - SAVE ARTIFACT /build + SAVE ARTIFACT /usr/src/barretenberg/ts build test: BUILD +test-prettier-format diff --git a/boxes/Earthfile b/boxes/Earthfile index c8a92975bb7..e96e5e681c8 100644 --- a/boxes/Earthfile +++ b/boxes/Earthfile @@ -5,14 +5,14 @@ build: # We need yarn. Start fresh container. FROM node:18.19.0 RUN apt update && apt install netcat-openbsd - COPY ../yarn-project+build/build /build - COPY ../noir/+nargo/nargo /build/noir/noir-repo/target/release/nargo - COPY ../noir-projects/+build/aztec-nr /build/noir-projects/aztec-nr - COPY ../noir-projects/+build/noir-protocol-circuits/crates/types /build/noir-projects/noir-protocol-circuits/crates/types - WORKDIR /build/boxes + COPY ../yarn-project+build/build /usr/src + COPY ../noir/+nargo/nargo /usr/src/noir/noir-repo/target/release/nargo + COPY ../noir-projects/+build/aztec-nr /usr/src/noir-projects/aztec-nr + COPY ../noir-projects/+build/noir-protocol-circuits/crates/types /usr/src/noir-projects/noir-protocol-circuits/crates/types + WORKDIR /usr/src/boxes COPY . . - ENV AZTEC_NARGO=/build/noir/noir-repo/target/release/nargo - ENV AZTEC_CLI=/build/yarn-project/cli/aztec-cli-dest + ENV AZTEC_NARGO=/usr/src/noir/noir-repo/target/release/nargo + ENV AZTEC_CLI=/usr/src/yarn-project/cli/aztec-cli-dest RUN yarn && yarn build RUN npx -y playwright@1.42 install --with-deps ENTRYPOINT ["/bin/sh", "-c"] \ No newline at end of file diff --git a/l1-contracts/Earthfile b/l1-contracts/Earthfile index 6f6f42a85d2..c93c31bf1b3 100644 --- a/l1-contracts/Earthfile +++ b/l1-contracts/Earthfile @@ -14,7 +14,7 @@ RUN foundryup # Install yarn and solhint. RUN npm install --global yarn solhint -WORKDIR /build +WORKDIR /usr/src COPY --dir lib scripts src terraform test *.json *.toml *.sh . build: diff --git a/noir-projects/Earthfile b/noir-projects/Earthfile index 51d82e444c9..c5decbbbeab 100644 --- a/noir-projects/Earthfile +++ b/noir-projects/Earthfile @@ -6,7 +6,7 @@ COPY ../noir/+nargo/nargo /usr/bin/nargo # Install transpiler COPY ../avm-transpiler/+build/avm-transpiler /usr/bin/avm-transpiler -WORKDIR /build +WORKDIR /usr/src/noir-projects # Copy source. COPY --dir aztec-nr noir-contracts noir-protocol-circuits . diff --git a/noir/Earthfile b/noir/Earthfile index a6496804ecc..62345056e0e 100644 --- a/noir/Earthfile +++ b/noir/Earthfile @@ -3,7 +3,7 @@ VERSION 0.8 nargo: FROM rust:bullseye RUN apt update && apt install -y libc++1 - WORKDIR /build + WORKDIR /usr/src # Relevant source (TODO finer-grained 'tooling') COPY --dir \ noir-repo/acvm-repo \ @@ -21,8 +21,8 @@ nargo: COPY ./scripts/bootstrap_native.sh ./scripts/bootstrap_native.sh RUN ./scripts/bootstrap_native.sh RUN echo CONTENT HASH $COMMIT_HASH | tee .content-hash - SAVE ARTIFACT /build/noir-repo/target/release/nargo nargo - SAVE ARTIFACT /build/noir-repo/target/release/acvm acvm + SAVE ARTIFACT /usr/src/noir-repo/target/release/nargo nargo + SAVE ARTIFACT /usr/src/noir-repo/target/release/acvm acvm SAVE IMAGE aztecprotocol/nargo packages: @@ -34,9 +34,9 @@ packages: # `noir-repo` is nested inside of `noir` so we copy `bb.js` as such to account # for the extra nested folder specified in portalled package paths - COPY ../barretenberg/ts/+build/build /build/../barretenberg/ts + COPY ../barretenberg/ts/+build/build /usr/src/../barretenberg/ts - WORKDIR /build + WORKDIR /usr/src # Relevant source (TODO finer-grained) COPY --dir \ @@ -77,8 +77,8 @@ run: # Install Tini as nargo doesn't handle signals properly. # Install git as nargo needs it to clone. RUN apt-get update && apt-get install -y git tini && rm -rf /var/lib/apt/lists/* && apt-get clean - COPY +build/. /build - ENTRYPOINT ["/usr/bin/tini", "--", "/build/nargo"] + COPY +build/. /usr/src + ENTRYPOINT ["/usr/bin/tini", "--", "/usr/src/nargo"] build: BUILD +nargo diff --git a/scripts/earthly-cloud b/scripts/earthly-cloud index 320d9e49b4f..d2a0e39bc2c 100755 --- a/scripts/earthly-cloud +++ b/scripts/earthly-cloud @@ -38,6 +38,9 @@ elif [ "$RUNNER_TYPE" == "bench" ] ; then SIZE=2xlarge NUMBER_OF_RUNNERS=1 # MAX_PARALLELISM=1 +elif [ "$RUNNER_TYPE" == "test" ] ; then + SIZE=4xlarge + NUMBER_OF_RUNNERS=1 fi # Flag to determine if -i is present diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index 2e110b4a209..b62d02875d1 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -3,14 +3,14 @@ FROM node:18.19.0 RUN apt update && apt install -y jq curl perl && rm -rf /var/lib/apt/lists/* && apt-get clean # copy bb-js and noir-packages -COPY ../barretenberg/ts/+build/build /build/barretenberg/ts -COPY ../noir/+packages/packages /build/noir/packages +COPY ../barretenberg/ts/+build/build /usr/src/barretenberg/ts +COPY ../noir/+packages/packages /usr/src/noir/packages # install acvm binary COPY ../noir/+nargo/acvm /usr/bin/acvm -COPY --dir ../noir-projects/+build/. /build/noir-projects -COPY ../l1-contracts/+build/out /build/l1-contracts/out +COPY --dir ../noir-projects/+build/. /usr/src/noir-projects +COPY ../l1-contracts/+build/out /usr/src/l1-contracts/out -WORKDIR /build/yarn-project +WORKDIR /usr/src/yarn-project # copy source COPY --dir * *.json .yarn .yarnrc.yml . @@ -20,24 +20,16 @@ COPY --dir * *.json .yarn .yarnrc.yml . # This does kind of work, but jest doesn't honor it correctly, so this seems like a neat workaround. # Also, --preserve-symlinks causes duplication of portalled instances such as bb.js, and breaks the singleton logic # by initialising the module more than once. So at present I don't see a viable alternative. -RUN ln -s /build/yarn-project/node_modules /build/node_modules +RUN ln -s /usr/src/yarn-project/node_modules /usr/src/node_modules # Target for main build process build: FROM +source ARG EARTHLY_CI # TODO: Replace puppeteer with puppeteer-core to avoid this. - # TODO encapsulate in bash script for cleanliness ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true - ENV YARN_CACHE_FOLDER /build/yarn-cache - # Use a mount for incremental builds locally. - RUN --mount type=cache,target=/build/node_modules_cache --mount type=cache,target=/build/yarn-cache \ - mkdir -p node_modules_cache && \ - mv node_modules_cache node_modules && \ - ./bootstrap.sh && \ - yarn workspaces focus @aztec/cli @aztec/aztec --production && \ - cp -r node_modules node_modules_cache - SAVE ARTIFACT /build + RUN ./bootstrap.sh && yarn workspaces focus @aztec/cli @aztec/aztec --production + SAVE ARTIFACT /usr/src # TODO versioning flow at end before publish? # ENV COMMIT_TAG=$EARTHLY_BUILD_SHA @@ -47,15 +39,15 @@ build: # FROM node:18.19.1-slim # ARG COMMIT_TAG="" # ENV COMMIT_TAG=$COMMIT_TAG -# COPY --from=builder /build /build -# WORKDIR /build/yarn-project +# COPY --from=builder /usr/src /usr/src +# WORKDIR /usr/src/yarn-project # ENTRYPOINT ["yarn"] aztec: FROM +build # ENV vars for using native ACVM simulation ENV ACVM_BINARY_PATH="/usr/bin/acvm" ACVM_WORKING_DIRECTORY="/tmp/acvm" - ENTRYPOINT ["node", "--no-warnings", "/build/yarn-project/aztec/dest/bin/index.js"] + ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/aztec/dest/bin/index.js"] EXPOSE 8080 # TODO(AD) the following are the biggest node modules bundled, should they be deleted as they are build tools? # 25840 @jest @@ -77,7 +69,7 @@ end-to-end: # Build web bundle for browser tests RUN yarn workspace @aztec/end-to-end run build:web RUN yarn workspaces focus @aztec/end-to-end --production && yarn cache clean - SAVE ARTIFACT /build + SAVE ARTIFACT /usr/src/* end-to-end-minimal: # end to end test runner @@ -85,8 +77,8 @@ end-to-end-minimal: FROM node:18.19.1-slim RUN apt-get update && apt-get install jq chromium -y ENV CHROME_BIN="/usr/bin/chromium" - COPY +end-to-end/build /build - WORKDIR /build/yarn-project/end-to-end + COPY +end-to-end/. /usr/src + WORKDIR /usr/src/yarn-project/end-to-end ENTRYPOINT ["yarn", "test"] build-end-to-end: diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index 8a4e05523d2..f88d7ca9b44 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -2,7 +2,26 @@ VERSION 0.8 # requires first saving the images locally with ../+build-end-to-end -# run locally, used for our mainly x86 jobs +# run locally and build +E2E_TEST_LOCAL: + FUNCTION + ARG test + ARG compose_file=./scripts/docker-compose.yml + ARG enable_gas="" + ARG debug="aztec:*" + LOCALLY + ENV ENABLE_GAS=$enable_gas + ENV TEST=$test + ENV DEBUG="$debug" + WITH DOCKER \ + --load aztecprotocol/aztec:latest=../+aztec \ + --load aztecprotocol/end-to-end:latest=../+end-to-end-minimal \ + --load ghcr.io/foundry-rs/foundry:nightly-de33b6af53005037b463318d2628b5cfcaf39916=../../foundry/+get + # Run our docker compose, ending whenever sandbox ends, filtering out noisy eth_getLogs + RUN docker compose -f $compose_file up --exit-code-from=sandbox --force-recreate + END + +# run locally and take from cache, used for our mainly x86 jobs E2E_TEST_FROM_DOCKERHUB: FUNCTION ARG test @@ -13,10 +32,10 @@ E2E_TEST_FROM_DOCKERHUB: ENV ENABLE_GAS=$enable_gas ENV TEST=$test ENV DEBUG="$debug" - # Locally, we do not use WITH DOCKER as we have had issues with earthly copying big images + # In CI we do not use WITH DOCKER as we have had issues with earthly copying big images RUN docker compose -f $compose_file up --exit-code-from=end-to-end --force-recreate -# run on satellite, used for our few ARM jobs (means github runner doesn't need to be ARM) +# run on satellite and build, used for our few ARM jobs (means github runner doesn't need to be ARM) E2E_TEST_FROM_BUILD: FUNCTION ARG test @@ -42,207 +61,209 @@ E2E_TEST: ARG test ARG compose_file=./scripts/docker-compose.yml ARG enable_gas="" - ARG e2e_build=false + ARG e2e_mode=local ARG debug="aztec:*" LOCALLY - IF [ $e2e_build = true ] - DO +E2E_TEST_FROM_BUILD --test=$test --compose_file=$compose_file --enable_gas=$enable_gas --debug=$debug - ELSE + IF [ $e2e_mode = local ] + DO +E2E_TEST_LOCAL --test=$test --compose_file=$compose_file --enable_gas=$enable_gas --debug=$debug + ELSE IF [ $e2e_mode = cache ] DO +E2E_TEST_FROM_DOCKERHUB --test=$test --compose_file=$compose_file --enable_gas=$enable_gas --debug=$debug + ELSE + DO +E2E_TEST_FROM_BUILD --test=$test --compose_file=$compose_file --enable_gas=$enable_gas --debug=$debug END # Define e2e tests e2e-2-pxes: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_2_pxes.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_2_pxes.test.ts --e2e_mode=$e2e_mode e2e-note-getter: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_note_getter.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_note_getter.test.ts --e2e_mode=$e2e_mode e2e-counter: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_counter_contract.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_counter_contract.test.ts --e2e_mode=$e2e_mode e2e-private-voting: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_private_voting_contract.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_private_voting_contract.test.ts --e2e_mode=$e2e_mode e2e-max-block-number: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_max_block_number.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_max_block_number.test.ts --e2e_mode=$e2e_mode e2e-multiple-accounts-1-enc-key: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_multiple_accounts_1_enc_key.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_multiple_accounts_1_enc_key.test.ts --e2e_mode=$e2e_mode e2e-deploy-contract: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_deploy_contract.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_deploy_contract.test.ts --e2e_mode=$e2e_mode e2e-lending-contract: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_lending_contract.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_lending_contract.test.ts --e2e_mode=$e2e_mode e2e-token-contract: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_token_contract.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_token_contract.test.ts --e2e_mode=$e2e_mode e2e-authwit-test: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_authwit.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_authwit.test.ts --e2e_mode=$e2e_mode e2e-blacklist-token-contract: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_blacklist_token_contract.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_blacklist_token_contract.test.ts --e2e_mode=$e2e_mode # TODO(3458): Investigate intermittent failure # e2e-slow-tree: # DO +E2E_TEST --test=e2e_slow_tree e2e-sandbox-example: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_sandbox_example.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_sandbox_example.test.ts --e2e_mode=$e2e_mode e2e-state-vars: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_state_vars.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_state_vars.test.ts --e2e_mode=$e2e_mode e2e-block-building: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_block_building.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_block_building.test.ts --e2e_mode=$e2e_mode e2e-nested-contract: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_nested_contract.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_nested_contract.test.ts --e2e_mode=$e2e_mode e2e-static-calls: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_static_calls.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_static_calls.test.ts --e2e_mode=$e2e_mode e2e-delegate-calls: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_delegate_calls.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_delegate_calls.test.ts --e2e_mode=$e2e_mode e2e-non-contract-account: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_non_contract_account.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_non_contract_account.test.ts --e2e_mode=$e2e_mode e2e-cross-chain-messaging: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_cross_chain_messaging.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_cross_chain_messaging.test.ts --e2e_mode=$e2e_mode e2e-crowdfunding-and-claim: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_crowdfunding_and_claim.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_crowdfunding_and_claim.test.ts --e2e_mode=$e2e_mode e2e-public-cross-chain-messaging: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_public_cross_chain_messaging.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_public_cross_chain_messaging.test.ts --e2e_mode=$e2e_mode e2e-public-to-private-messaging: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_public_to_private_messaging.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_public_to_private_messaging.test.ts --e2e_mode=$e2e_mode e2e-account-contracts: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_account_contracts.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_account_contracts.test.ts --e2e_mode=$e2e_mode e2e-escrow-contract: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_escrow_contract.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_escrow_contract.test.ts --e2e_mode=$e2e_mode e2e-inclusion-proofs-contract: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_inclusion_proofs_contract.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_inclusion_proofs_contract.test.ts --e2e_mode=$e2e_mode e2e-pending-note-hashes-contract: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_pending_note_hashes_contract.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_pending_note_hashes_contract.test.ts --e2e_mode=$e2e_mode e2e-ordering: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_ordering.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_ordering.test.ts --e2e_mode=$e2e_mode e2e-outbox: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_outbox.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_outbox.test.ts --e2e_mode=$e2e_mode uniswap-trade-on-l1-from-l2: - ARG e2e_build=false - DO +E2E_TEST --test=uniswap_trade_on_l1_from_l2.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=uniswap_trade_on_l1_from_l2.test.ts --e2e_mode=$e2e_mode integration-l1-publisher: - ARG e2e_build=false - DO +E2E_TEST --test=integration_l1_publisher.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=integration_l1_publisher.test.ts --e2e_mode=$e2e_mode e2e-cli: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_cli.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_cli.test.ts --e2e_mode=$e2e_mode e2e-persistence: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_persistence.test.ts --compose_file=./scripts/docker-compose-no-sandbox.yml --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_persistence.test.ts --compose_file=./scripts/docker-compose-no-sandbox.yml --e2e_mode=$e2e_mode e2e-browser: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_aztec_js_browser.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_aztec_js_browser.test.ts --e2e_mode=$e2e_mode e2e-card-game: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_card_game.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_card_game.test.ts --e2e_mode=$e2e_mode e2e-avm-simulator: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_avm_simulator.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_avm_simulator.test.ts --e2e_mode=$e2e_mode e2e-fees: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_fees.test.ts --enable_gas=1 --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_fees.test.ts --enable_gas=1 --e2e_mode=$e2e_mode e2e-dapp-subscription: - ARG e2e_build=false - DO +E2E_TEST --test=e2e_dapp_subscription.test.ts --enable_gas=1 --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=e2e_dapp_subscription.test.ts --enable_gas=1 --e2e_mode=$e2e_mode pxe: - ARG e2e_build=false - DO +E2E_TEST --test=pxe.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=pxe.test.ts --e2e_mode=$e2e_mode cli-docs-sandbox: - ARG e2e_build=false - DO +E2E_TEST --test=cli_docs_sandbox.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=cli_docs_sandbox.test.ts --e2e_mode=$e2e_mode e2e-docs-examples: - ARG e2e_build=false - DO +E2E_TEST --test=docs_examples.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=docs_examples.test.ts --e2e_mode=$e2e_mode guides-writing-an-account-contract: - ARG e2e_build=false - DO +E2E_TEST --test=guides/writing_an_account_contract.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=guides/writing_an_account_contract.test.ts --e2e_mode=$e2e_mode guides-dapp-testing: - ARG e2e_build=false - DO +E2E_TEST --test=guides/dapp_testing.test.ts --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=guides/dapp_testing.test.ts --e2e_mode=$e2e_mode guides-sample-dapp: - ARG e2e_build=false - DO +E2E_TEST --test=sample-dapp --e2e_build=$e2e_build + ARG e2e_mode=local + DO +E2E_TEST --test=sample-dapp --e2e_mode=$e2e_mode # TODO currently hangs for hour+ # guides-up-quick-start: -# ARG e2e_build=false -# DO +E2E_TEST --test=guides/up_quick_start.test.ts --e2e_build=$e2e_build +# ARG e2e_mode=local +# DO +E2E_TEST --test=guides/up_quick_start.test.ts --e2e_mode=$e2e_mode bench-publish-rollup: - ARG e2e_build=false - DO +E2E_TEST --test=benchmarks/bench_publish_rollup.test.ts --debug="aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees" --e2e_build=$e2e_build --compose_file=./scripts/docker-compose-no-sandbox.yml + ARG e2e_mode=local + DO +E2E_TEST --test=benchmarks/bench_publish_rollup.test.ts --debug="aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees" --e2e_mode=$e2e_mode --compose_file=./scripts/docker-compose-no-sandbox.yml # TODO need to investigate why this isn't working # bench-process-history: -# ARG e2e_build=false -# DO +E2E_TEST --test=benchmarks/bench_process_history.test.ts --debug="aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees" --e2e_build=$e2e_build --compose_file=./scripts/docker-compose-no-sandbox.yml +# ARG e2e_mode=local +# DO +E2E_TEST --test=benchmarks/bench_process_history.test.ts --debug="aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees" --e2e_mode=$e2e_mode --compose_file=./scripts/docker-compose-no-sandbox.yml # TODO need to investigate why this isn't working # bench-tx-size: -# ARG e2e_build=false -# DO +E2E_TEST --test=benchmarks/bench_tx_size_fees.test.ts --debug="aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees" --e2e_build=$e2e_build --compose_file=./scripts/docker-compose-no-sandbox.yml +# ARG e2e_mode=local +# DO +E2E_TEST --test=benchmarks/bench_tx_size_fees.test.ts --debug="aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees" --e2e_mode=$e2e_mode --compose_file=./scripts/docker-compose-no-sandbox.yml