diff --git a/.circleci/config.yml b/.circleci/config.yml index a4dab5f9f5d..374aff1dd79 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -193,18 +193,6 @@ jobs: command: build bb.js aztec_manifest_key: bb.js - bb-js-tests: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small - steps: - - *checkout - - *setup_env - - run: - name: "Build and test" - command: cond_spot_run_test bb.js 32 ./scripts/run_tests - aztec_manifest_key: bb.js - # Noir noir-x86_64: docker: @@ -278,18 +266,6 @@ jobs: command: cond_spot_run_build noir-packages-tests 32 aztec_manifest_key: noir-packages-tests - noir-compile-acir-tests: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small - steps: - - *checkout - - *setup_env - - run: - name: "Build" - command: cond_spot_run_build noir-compile-acir-tests 32 - aztec_manifest_key: noir-compile-acir-tests - avm-transpiler: docker: - image: aztecprotocol/alpine-build-image @@ -302,42 +278,6 @@ jobs: command: cond_spot_run_build avm-transpiler 32 aztec_manifest_key: avm-transpiler - barretenberg-acir-tests-bb: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small - steps: - - *checkout - - *setup_env - - run: - name: "Build and test" - command: cond_spot_run_build barretenberg-acir-tests-bb 32 - aztec_manifest_key: barretenberg-acir-tests-bb - - barretenberg-acir-tests-bb-sol: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small - steps: - - *checkout - - *setup_env - - run: - name: "Build and test" - command: cond_spot_run_build barretenberg-acir-tests-bb-sol 32 - aztec_manifest_key: barretenberg-acir-tests-bb-sol - - bb-js-acir-tests: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small - steps: - - *checkout - - *setup_env - - run: - name: "Build and test" - command: cond_spot_run_build barretenberg-acir-tests-bb.js 32 - aztec_manifest_key: barretenberg-acir-tests-bb.js - l1-contracts: machine: image: default @@ -532,12 +472,6 @@ defaults: &defaults event: fail branch_pattern: "master" -bb_acir_tests: &bb_acir_tests - requires: - - barretenberg-x86_64-linux-clang-assert - - noir-compile-acir-tests - <<: *defaults - defaults_yarn_project_pre_join: &defaults_yarn_project_pre_join requires: - yarn-project-pre-join @@ -593,10 +527,6 @@ workflows: - noir-ecr-manifest - noir-packages <<: *defaults - - noir-compile-acir-tests: - requires: - - noir-ecr-manifest - <<: *defaults # Transpiler - avm-transpiler: *defaults @@ -607,28 +537,11 @@ workflows: - barretenberg-x86_64-linux-clang-assert: *defaults - barretenberg-x86_64-linux-clang-fuzzing: *defaults - barretenberg-wasm-linux-clang: *defaults - - barretenberg-x86_64-linux-clang-sol: *defaults - - barretenberg-acir-tests-bb: *bb_acir_tests - - barretenberg-acir-tests-bb-sol: - requires: - - barretenberg-x86_64-linux-clang-assert - - barretenberg-x86_64-linux-clang-sol - - noir-compile-acir-tests - <<: *bb_acir_tests - barretenberg-docs: *defaults - bb-js: requires: - barretenberg-wasm-linux-clang <<: *defaults - - bb-js-tests: - requires: - - bb-js - <<: *defaults - - bb-js-acir-tests: - requires: - - bb-js - - noir-compile-acir-tests - <<: *defaults - l1-contracts: *defaults @@ -674,9 +587,6 @@ workflows: - barretenberg-x86_64-linux-clang-assert - barretenberg-x86_64-linux-clang-fuzzing - barretenberg-wasm-linux-clang - - barretenberg-x86_64-linux-clang-sol - - barretenberg-acir-tests-bb - - barretenberg-acir-tests-bb-sol - barretenberg-docs - noir-packages-tests - e2e-join diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b49dc2e7410..369074f09b9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -128,6 +128,23 @@ jobs: # limit our parallelism to half our cores run: earthly-ci --no-output +test --hardware_concurrency=64 + bb-js-test: + needs: setup + runs-on: ${{ github.actor }}-x86 + steps: + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: bb-js-test-${{ github.actor }}-x86 + - name: "bb.js Tests" + working-directory: ./barretenberg/ts/ + timeout-minutes: 25 + run: earthly-ci --no-output ./+test + noir-projects: needs: setup runs-on: ${{ inputs.username || github.actor }}-x86 @@ -194,6 +211,73 @@ jobs: timeout-minutes: 25 run: earthly-ci --no-output ./yarn-project/+prover-client-test + build-acir-tests: + needs: build + runs-on: ${{ github.actor }}-x86 + steps: + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: build-acir-tests-${{ github.actor }}-x86 + - name: "Build Acir Tests" + timeout-minutes: 25 + run: earthly-ci --no-output ./noir/+build-acir-tests + + barretenberg-acir-tests-bb: + needs: build-acir-tests + runs-on: ${{ github.actor }}-x86 + steps: + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: barretenberg-acir-tests-bb-${{ github.actor }}-x86 + - name: "BB Native Acir Tests" + working-directory: ./noir/ + timeout-minutes: 25 + run: earthly-ci --no-output ./+barretenberg-acir-tests-bb + + barretenberg-acir-tests-sol: + needs: build-acir-tests + runs-on: ${{ github.actor }}-x86 + steps: + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: barretenberg-acir-tests-sol-${{ github.actor }}-x86 + - name: "BB Solidity Acir Tests" + working-directory: ./noir/ + timeout-minutes: 25 + run: earthly-ci --no-output ./+barretenberg-acir-tests-sol + + barretenberg-acir-tests-bb-js: + needs: build-acir-tests + runs-on: ${{ github.actor }}-x86 + steps: + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: barretenberg-acir-tests-bb-js-${{ github.actor }}-x86 + - name: "BB JS Acir Tests" + working-directory: ./noir/ + timeout-minutes: 25 + run: earthly-ci --no-output ./+barretenberg-acir-tests-bb.js + docs-preview: needs: setup runs-on: ${{ inputs.username || github.actor }}-x86 diff --git a/barretenberg/Earthfile b/barretenberg/Earthfile new file mode 100644 index 00000000000..4c203496ec0 --- /dev/null +++ b/barretenberg/Earthfile @@ -0,0 +1,14 @@ +VERSION 0.8 + +acir-tests: + FROM ../build-images+build + WORKDIR /usr/src/barretenberg + COPY ./acir_tests . + SAVE ARTIFACT ./* + +sol: + FROM ../build-images+build + WORKDIR /usr/src/barretenberg + COPY ./sol . + SAVE ARTIFACT ./* + diff --git a/barretenberg/acir_tests/sol-test/src/index.js b/barretenberg/acir_tests/sol-test/src/index.js index 59d43bea54f..029074c41e1 100644 --- a/barretenberg/acir_tests/sol-test/src/index.js +++ b/barretenberg/acir_tests/sol-test/src/index.js @@ -1,7 +1,7 @@ import fs from "fs"; -const {readFileSync, promises: fsPromises} = fs; -import {spawn} from "child_process"; -import {ethers} from "ethers"; +const { readFileSync, promises: fsPromises } = fs; +import { spawn } from "child_process"; +import { ethers } from "ethers"; import solc from "solc"; const NUMBER_OF_FIELDS_IN_PROOF = 93; @@ -24,7 +24,7 @@ const getEnvVar = (envvar) => { throw new Error(`Missing environment variable ${envvar}`); } return varVal; -} +}; // Test name is passed into environment from `flows/sol.sh` const testName = getEnvVar("TEST_NAME"); @@ -34,46 +34,46 @@ const keyPath = getEnvVar("KEY_PATH"); const verifierPath = getEnvVar("VERIFIER_PATH"); const testPath = getEnvVar("TEST_PATH"); const basePath = getEnvVar("BASE_PATH"); -const encoding = {encoding: "utf8"}; -const [key, test, verifier, base] = await Promise.all( - [ - fsPromises.readFile(keyPath, encoding), - fsPromises.readFile(testPath, encoding), - fsPromises.readFile(verifierPath, encoding), - fsPromises.readFile(basePath, encoding) - ]); +const encoding = { encoding: "utf8" }; +const [key, test, verifier, base] = await Promise.all([ + fsPromises.readFile(keyPath, encoding), + fsPromises.readFile(testPath, encoding), + fsPromises.readFile(verifierPath, encoding), + fsPromises.readFile(basePath, encoding), +]); var input = { - language: 'Solidity', + language: "Solidity", sources: { - 'Key.sol': { - content: key + "Key.sol": { + content: key, }, - 'Test.sol': { - content: test + "Test.sol": { + content: test, }, - 'Verifier.sol': { - content: verifier + "Verifier.sol": { + content: verifier, + }, + "BaseUltraVerifier.sol": { + content: base, }, - 'BaseUltraVerifier.sol': { - content: base - } }, - settings: { // we require the optimizer + settings: { + // we require the optimizer optimizer: { enabled: true, - runs: 200 + runs: 200, }, outputSelection: { - '*': { - '*': ['evm.bytecode.object', 'abi'] - } - } - } + "*": { + "*": ["evm.bytecode.object", "abi"], + }, + }, + }, }; var output = JSON.parse(solc.compile(JSON.stringify(input))); -const contract = output.contracts['Test.sol']['Test']; +const contract = output.contracts["Test.sol"]["Test"]; const bytecode = contract.evm.bytecode.object; const abi = contract.abi; @@ -91,7 +91,7 @@ const launchAnvil = async (port) => { handle.stderr.on("data", (data) => { const str = data.toString(); if (str.includes("error binding")) { - reject("we go again baby") + reject("we go again baby"); } }); @@ -105,23 +105,23 @@ const launchAnvil = async (port) => { }); return handle; -} +}; /** * Deploys the contract * @param {ethers.Signer} signer */ const deploy = async (signer) => { - const factory = new ethers.ContractFactory(abi, bytecode, signer); - const deployment = await factory.deploy(); - const deployed = await deployment.waitForDeployment(); - return await deployed.getAddress(); -} + const factory = new ethers.ContractFactory(abi, bytecode, signer); + const deployment = await factory.deploy(); + const deployed = await deployment.waitForDeployment(); + return await deployed.getAddress(); +}; /** * Takes in a proof as fields, and returns the public inputs, as well as the number of public inputs - * @param {Array} proofAsFields - * @return {Array} [number, Array] + * @param {Array} proofAsFields + * @return {Array} [number, Array] */ const readPublicInputs = (proofAsFields) => { const publicInputs = []; @@ -131,11 +131,11 @@ const readPublicInputs = (proofAsFields) => { publicInputs.push(proofAsFields[i]); } return [numPublicInputs, publicInputs]; -} +}; /** * Get Anvil - * + * * Creates an anvil instance on a random port, and returns the instance and the port * If the port is already allocated, it will try again * @returns {[ChildProcess, Number]} [anvil, port] @@ -149,43 +149,55 @@ const getAnvil = async () => { // yes this looks dangerous, but it relies on 0-10000 being hard to collide on return getAnvil(); } -} +}; + +const getProvider = async (port) => { + while (true) { + try { + const url = `http://127.0.0.1:${port}`; + return new ethers.JsonRpcProvider(url); + } catch (e) { + console.log(e); + await new Promise((resolve) => setTimeout(resolve, 5000)); + } + } +}; const [anvil, randomPort] = await getAnvil(); const killAnvil = () => { anvil.kill(); - console.log(testName, " complete") -} + console.log(testName, " complete"); +}; try { const proofAsFieldsPath = getEnvVar("PROOF_AS_FIELDS"); const proofAsFields = readFileSync(proofAsFieldsPath); - const [numPublicInputs, publicInputs] = readPublicInputs(JSON.parse(proofAsFields.toString())); + const [numPublicInputs, publicInputs] = readPublicInputs( + JSON.parse(proofAsFields.toString()) + ); const proofPath = getEnvVar("PROOF"); const proof = readFileSync(proofPath); - + // Cut the number of public inputs off of the proof string - const proofStr = `0x${proof.toString("hex").substring(64*numPublicInputs)}`; + const proofStr = `0x${proof.toString("hex").substring(64 * numPublicInputs)}`; - const key = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"; - const provider = new ethers.JsonRpcProvider(`http://localhost:${randomPort}`); + const key = + "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"; + const provider = await getProvider(randomPort); const signer = new ethers.Wallet(key, provider); - // deploy + // deploy const address = await deploy(signer); const contract = new ethers.Contract(address, abi, signer); const result = await contract.test(proofStr, publicInputs); if (!result) throw new Error("Test failed"); -} -catch (e) { - console.error(testName, " failed") - console.log(e) +} catch (e) { + console.error(testName, " failed"); + console.log(e); throw e; -} -finally { +} finally { // Kill anvil at the end of running killAnvil(); } - diff --git a/barretenberg/cpp/Earthfile b/barretenberg/cpp/Earthfile index 31502decf08..7d3c42a6e64 100644 --- a/barretenberg/cpp/Earthfile +++ b/barretenberg/cpp/Earthfile @@ -53,6 +53,11 @@ preset-wasm: SAVE IMAGE --cache-hint END +preset-sol: + FROM +source + RUN cmake --preset clang16 && cmake --build --preset clang16 --target solidity_key_gen solidity_proof_gen + SAVE ARTIFACT build/bin + preset-wasm-threads: FROM +source RUN cmake --preset wasm-threads -Bbuild && cmake --build build --target barretenberg.wasm diff --git a/barretenberg/ts/Earthfile b/barretenberg/ts/Earthfile index fce0ab36807..981dda905d7 100644 --- a/barretenberg/ts/Earthfile +++ b/barretenberg/ts/Earthfile @@ -45,4 +45,5 @@ build: test: BUILD +test-prettier-format - RUN yarn test + # We frequently get tests timing out in GA, increase the timeout here + RUN yarn test --testTimeout=300000 diff --git a/noir/Earthfile b/noir/Earthfile index 18faae05e2e..8a4db6aee43 100644 --- a/noir/Earthfile +++ b/noir/Earthfile @@ -81,6 +81,104 @@ build: BUILD +nargo BUILD +packages +build-acir-tests: + FROM ../build-images/+build + COPY +nargo/ /usr/src/noir-repo/target/release + ENV PATH="/usr/src/noir-repo/target/release:${PATH}" + WORKDIR /usr/src/noir-repo/test_programs + COPY ./noir-repo/test_programs/ /usr/src/noir-repo/test_programs/ + RUN /usr/src/noir-repo/target/release/nargo --version + # We run this with parallel compilation switched off, which isn't ideal. + # There seems to be problems with this when running under Earthly, see bottom of this file* + RUN ./rebuild.sh true + SAVE ARTIFACT /usr/src/noir-repo/test_programs/acir_artifacts/* + +barretenberg-acir-tests-bb: + FROM ../build-images/+build + + COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests + COPY +build-acir-tests/ /usr/src/acir_artifacts + + WORKDIR /usr/src/barretenberg/acir_tests + RUN rm -rf ./acir_tests + + ENV TEST_SRC /usr/src/acir_artifacts + ENV VERBOSE=1 + # Run every acir test through native bb build prove_then_verify flow for UltraPlonk. + # This ensures we test independent pk construction through real/garbage witness data paths. + RUN FLOW=prove_then_verify ./run_acir_tests.sh + # Construct and separately verify a UltraHonk proof for a single program + RUN FLOW=prove_then_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof + # Construct and separately verify a GoblinUltraHonk proof for all acir programs + RUN FLOW=prove_then_verify_goblin_ultra_honk ./run_acir_tests.sh + # Construct and verify a UltraHonk proof for a single program + RUN FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof + # Construct and verify a Goblin UltraHonk (GUH) proof for a single arbitrary program + RUN FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array + # Construct and verify a UltraHonk proof for all ACIR programs using the new witness stack workflow + RUN FLOW=prove_and_verify_ultra_honk_program ./run_acir_tests.sh + # This is a "full" Goblin flow. It constructs and verifies four proofs: GoblinUltraHonk, ECCVM, Translator, and merge + RUN FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array + # Run 1_mul through native bb build, all_cmds flow, to test all cli args. + RUN FLOW=all_cmds ./run_acir_tests.sh 1_mul + + +barretenberg-acir-tests-sol: + FROM ../build-images/+build + + COPY ../barretenberg/cpp/+preset-sol/ /usr/src/barretenberg/cpp/build + COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests + COPY ../barretenberg/+sol/ /usr/src/barretenberg/sol + COPY +build-acir-tests/ /usr/src/acir_artifacts + + WORKDIR /usr/src/barretenberg/acir_tests + + ENV TEST_SRC /usr/src/acir_artifacts + ENV VERBOSE=1 + + RUN (cd sol-test && yarn) + RUN PARALLEL=1 FLOW=sol ./run_acir_tests.sh assert_statement double_verify_proof double_verify_nested_proof + +barretenberg-acir-tests-bb.js: + # Playwright not supported on base image ubuntu:noble, results in unmet dependencies + FROM node:18.19.0 + RUN apt update && apt install -y curl jq lsof + + COPY ../barretenberg/ts/+build/build/ /usr/src/barretenberg/ts + COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests + COPY +build-acir-tests/ /usr/src/acir_artifacts + + WORKDIR /usr/src/barretenberg/acir_tests + + # Build/install ts apps. + RUN cd browser-test-app && yarn && yarn build + RUN cd headless-test && yarn && npx playwright install && npx playwright install-deps + RUN cd ../ts && yarn + ENV VERBOSE=1 + ENV TEST_SRC /usr/src/acir_artifacts + + # Run double_verify_proof through bb.js on node to check 512k support. + RUN BIN=../ts/dest/node/main.js FLOW=prove_then_verify ./run_acir_tests.sh double_verify_proof + # Run a single arbitrary test not involving recursion through bb.js for UltraHonk + RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh 6_array + # Run a single arbitrary test not involving recursion through bb.js for GoblinUltraHonk + RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array + # Run a single arbitrary test not involving recursion through bb.js for full Goblin + RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array + # Run 1_mul through bb.js build, all_cmds flow, to test all cli args. + RUN BIN=../ts/dest/node/main.js FLOW=all_cmds ./run_acir_tests.sh 1_mul + # Run double_verify_proof through bb.js on chrome testing multi-threaded browser support. + # TODO: Currently headless webkit doesn't seem to have shared memory so skipping multi-threaded test. + RUN BROWSER=chrome THREAD_MODEL=mt ./run_acir_tests_browser.sh double_verify_proof + # Run 1_mul through bb.js on chrome/webkit testing single threaded browser support. + RUN BROWSER=chrome THREAD_MODEL=st ./run_acir_tests_browser.sh 1_mul + # Commenting for now as fails intermittently. Unreproducable on mainframe. + # See https://github.com/AztecProtocol/aztec-packages/issues/2104 + #RUN BROWSER=webkit THREAD_MODEL=st ./run_acir_tests_browser.sh 1_mul + + # TOOD # test-packages # FROM aztecprotocol/noir AS noir @@ -116,3 +214,43 @@ build: # # Don't waste time pushing a huge container back to ECR as nothing needs the output. # FROM scratch # COPY --from=0 /usr/src/noir/README.md /usr/src/noir/README.md + + + +#* Analysis of compiling Acir tests inside/outside Earthly +# Each test run compiles the full suite, either in series or in parallel, either inside or outside Earthly. +# Each test prints the contents of the target directory of the eddsa circuit after compilation +# You can see that the 'Inside Earthly Parallel' run has an acir.gz file of a different size +# This results in a proof that fails verification +# +# Outside Earthly Parallel + +# [eddsa] Circuit witness successfully solved +# [eddsa] Witness saved to /mnt/user-data/phil/aztec3-packages/noir/noir-repo/test_programs/execution_success/eddsa/target/witness.gz +# total 2544 +# -rw-rw-r-- 1 phil phil 904034 May 3 10:40 acir.gz +# -rw-rw-r-- 1 phil phil 1696442 May 3 10:40 witness.gz + +# Outside Earthly Series + +# [eddsa] Circuit witness successfully solved +# [eddsa] Witness saved to /mnt/user-data/phil/aztec3-packages/noir/noir-repo/test_programs/execution_success/eddsa/target/witness.gz +# total 2544 +# -rw-rw-r-- 1 phil phil 904034 May 3 10:43 acir.gz +# -rw-rw-r-- 1 phil phil 1696442 May 3 10:43 witness.gz + +# Inside Earthly Parallel + +# +build-acir-tests | [eddsa] Circuit witness successfully solved +# +build-acir-tests | [eddsa] Witness saved to /usr/src/noir-repo/test_programs/execution_success/eddsa/target/witness.gz +# +build-acir-tests | total 2472 +# +build-acir-tests | -rw-r--r-- 1 root root 830340 May 3 10:47 acir.gz +# +build-acir-tests | -rw-r--r-- 1 root root 1696442 May 3 10:47 witness.gz + +# Inside Earthly Series + +# +build-acir-tests | [eddsa] Circuit witness successfully solved +# +build-acir-tests | [eddsa] Witness saved to /usr/src/noir-repo/test_programs/execution_success/eddsa/target/witness.gz +# +build-acir-tests | total 2544 +# +build-acir-tests | -rw-r--r-- 1 root root 904034 May 3 10:50 acir.gz +# +build-acir-tests | -rw-r--r-- 1 root root 1696442 May 3 10:51 witness.gz diff --git a/noir/noir-repo/test_programs/rebuild.sh b/noir/noir-repo/test_programs/rebuild.sh index a3137920fd5..51e97278281 100755 --- a/noir/noir-repo/test_programs/rebuild.sh +++ b/noir/noir-repo/test_programs/rebuild.sh @@ -1,6 +1,8 @@ #!/usr/bin/env bash set -e +NO_PARALLEL=${1:-} + process_dir() { local dir=$1 local current_dir=$2 @@ -46,10 +48,17 @@ done # Process each directory in parallel pids=() +if [ -z $NO_PARALLEL ]; then for dir in "${dirs_to_process[@]}"; do process_dir "$dir" "$current_dir" & pids+=($!) done +else +for dir in "${dirs_to_process[@]}"; do + process_dir "$dir" "$current_dir" + pids+=($!) +done +fi # Check the exit status of each background job. for pid in "${pids[@]}"; do @@ -58,5 +67,7 @@ done # Exit with a failure status if any job failed. if [ ! -z "$exit_status" ]; then + echo "Rebuild failed!" exit $exit_status -fi \ No newline at end of file +fi +echo "Rebuild Succeeded!" \ No newline at end of file