Skip to content

Commit

Permalink
feat: Sync from aztec-packages (#6345)
Browse files Browse the repository at this point in the history
Automated pull of Noir development from
[aztec-packages](https://github.com/AztecProtocol/aztec-packages).
BEGIN_COMMIT_OVERRIDE
chore!: replace usage of vector in keccakf1600 input with array
(AztecProtocol/aztec-packages#9350)
chore!: remove noir_js_backend_barretenberg
(AztecProtocol/aztec-packages#9338)
END_COMMIT_OVERRIDE

---------

Co-authored-by: Tom French <tom@tomfren.ch>
  • Loading branch information
AztecBot and TomAFrench authored Oct 24, 2024
1 parent 4d87c9a commit 3925228
Show file tree
Hide file tree
Showing 38 changed files with 114 additions and 608 deletions.
2 changes: 1 addition & 1 deletion .aztec-sync-commit
Original file line number Diff line number Diff line change
@@ -1 +1 @@
ab0c80d7493e6bdbc58dcd517b248de6ddd6fd67
07d6dc29db2eb04154b8f0c66bd1efa74c0e8b9d
4 changes: 0 additions & 4 deletions .github/scripts/backend-barretenberg-build.sh

This file was deleted.

4 changes: 0 additions & 4 deletions .github/scripts/backend-barretenberg-test.sh

This file was deleted.

11 changes: 4 additions & 7 deletions .github/workflows/test-js-packages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: noirc_abi_wasm
path: |
path: |
./tooling/noirc_abi_wasm/nodejs
./tooling/noirc_abi_wasm/web
retention-days: 10
Expand Down Expand Up @@ -263,9 +263,6 @@ jobs:
- name: Build noir_js_types
run: yarn workspace @noir-lang/types build

- name: Build barretenberg wrapper
run: yarn workspace @noir-lang/backend_barretenberg build

- name: Run noir_js tests
run: |
yarn workspace @noir-lang/noir_js build
Expand Down Expand Up @@ -416,7 +413,7 @@ jobs:
- name: Setup `integration-tests`
run: |
# Note the lack of spaces between package names.
PACKAGES_TO_BUILD="@noir-lang/types,@noir-lang/backend_barretenberg,@noir-lang/noir_js"
PACKAGES_TO_BUILD="@noir-lang/types,@noir-lang/noir_js"
yarn workspaces foreach -vtp --from "{$PACKAGES_TO_BUILD}" run build
- name: Run `integration-tests`
Expand Down Expand Up @@ -461,7 +458,7 @@ jobs:
- name: Setup `integration-tests`
run: |
# Note the lack of spaces between package names.
PACKAGES_TO_BUILD="@noir-lang/types,@noir-lang/backend_barretenberg,@noir-lang/noir_js"
PACKAGES_TO_BUILD="@noir-lang/types,@noir-lang/noir_js"
yarn workspaces foreach -vtp --from "{$PACKAGES_TO_BUILD}" run build
- name: Run `integration-tests`
Expand Down Expand Up @@ -565,7 +562,7 @@ jobs:
runs-on: ubuntu-latest
# We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping.
if: ${{ always() }}
needs:
needs:
- test-acvm_js-node
- test-acvm_js-browser
- test-noirc-abi
Expand Down
12 changes: 6 additions & 6 deletions acvm-repo/acir/codegen/acir.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ namespace Program {
};

struct Keccakf1600 {
Program::HeapVector message;
Program::HeapArray input;
Program::HeapArray output;

friend bool operator==(const Keccakf1600&, const Keccakf1600&);
Expand Down Expand Up @@ -424,8 +424,8 @@ namespace Program {
};

struct Sha256Compression {
Program::HeapVector input;
Program::HeapVector hash_values;
Program::HeapArray input;
Program::HeapArray hash_values;
Program::HeapArray output;

friend bool operator==(const Sha256Compression&, const Sha256Compression&);
Expand Down Expand Up @@ -3498,7 +3498,7 @@ Program::BlackBoxOp::Blake3 serde::Deserializable<Program::BlackBoxOp::Blake3>::
namespace Program {

inline bool operator==(const BlackBoxOp::Keccakf1600 &lhs, const BlackBoxOp::Keccakf1600 &rhs) {
if (!(lhs.message == rhs.message)) { return false; }
if (!(lhs.input == rhs.input)) { return false; }
if (!(lhs.output == rhs.output)) { return false; }
return true;
}
Expand All @@ -3523,15 +3523,15 @@ namespace Program {
template <>
template <typename Serializer>
void serde::Serializable<Program::BlackBoxOp::Keccakf1600>::serialize(const Program::BlackBoxOp::Keccakf1600 &obj, Serializer &serializer) {
serde::Serializable<decltype(obj.message)>::serialize(obj.message, serializer);
serde::Serializable<decltype(obj.input)>::serialize(obj.input, serializer);
serde::Serializable<decltype(obj.output)>::serialize(obj.output, serializer);
}

template <>
template <typename Deserializer>
Program::BlackBoxOp::Keccakf1600 serde::Deserializable<Program::BlackBoxOp::Keccakf1600>::deserialize(Deserializer &deserializer) {
Program::BlackBoxOp::Keccakf1600 obj;
obj.message = serde::Deserializable<decltype(obj.message)>::deserialize(deserializer);
obj.input = serde::Deserializable<decltype(obj.input)>::deserialize(deserializer);
obj.output = serde::Deserializable<decltype(obj.output)>::deserialize(deserializer);
return obj;
}
Expand Down
6 changes: 3 additions & 3 deletions acvm-repo/brillig/src/black_box.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pub enum BlackBoxOp {
},
/// Keccak Permutation function of 1600 width
Keccakf1600 {
message: HeapVector,
input: HeapArray,
output: HeapArray,
},
/// Verifies a ECDSA signature over the secp256k1 curve.
Expand Down Expand Up @@ -102,8 +102,8 @@ pub enum BlackBoxOp {
len: MemoryAddress,
},
Sha256Compression {
input: HeapVector,
hash_values: HeapVector,
input: HeapArray,
hash_values: HeapArray,
output: HeapArray,
},
ToRadix {
Expand Down
8 changes: 4 additions & 4 deletions acvm-repo/brillig_vm/src/black_box.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,8 @@ pub(crate) fn evaluate_black_box<F: AcirField, Solver: BlackBoxFunctionSolver<F>
memory.write_slice(memory.read_ref(output.pointer), &to_value_vec(&bytes));
Ok(())
}
BlackBoxOp::Keccakf1600 { message, output } => {
let state_vec: Vec<u64> = read_heap_vector(memory, message)
BlackBoxOp::Keccakf1600 { input, output } => {
let state_vec: Vec<u64> = read_heap_array(memory, input)
.iter()
.map(|&memory_value| memory_value.try_into().unwrap())
.collect();
Expand Down Expand Up @@ -292,7 +292,7 @@ pub(crate) fn evaluate_black_box<F: AcirField, Solver: BlackBoxFunctionSolver<F>
}
BlackBoxOp::Sha256Compression { input, hash_values, output } => {
let mut message = [0; 16];
let inputs = read_heap_vector(memory, input);
let inputs = read_heap_array(memory, input);
if inputs.len() != 16 {
return Err(BlackBoxResolutionError::Failed(
BlackBoxFunc::Sha256Compression,
Expand All @@ -303,7 +303,7 @@ pub(crate) fn evaluate_black_box<F: AcirField, Solver: BlackBoxFunctionSolver<F>
message[i] = input.try_into().unwrap();
}
let mut state = [0; 8];
let values = read_heap_vector(memory, hash_values);
let values = read_heap_array(memory, hash_values);
if values.len() != 8 {
return Err(BlackBoxResolutionError::Failed(
BlackBoxFunc::Sha256Compression,
Expand Down
64 changes: 32 additions & 32 deletions compiler/integration-tests/package.json
Original file line number Diff line number Diff line change
@@ -1,34 +1,34 @@
{
"name": "integration-tests",
"license": "(MIT OR Apache-2.0)",
"main": "index.js",
"private": true,
"scripts": {
"build": "echo Integration Test build step",
"test": "yarn test:browser && yarn test:node",
"test:node": "bash ./scripts/setup.sh && hardhat test test/node/prove_and_verify.test.ts && hardhat test test/node/smart_contract_verifier.test.ts && hardhat test test/node/onchain_recursive_verification.test.ts",
"test:browser": "web-test-runner",
"test:integration:browser": "web-test-runner test/browser/**/*.test.ts",
"test:integration:browser:watch": "web-test-runner test/browser/**/*.test.ts --watch",
"lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0"
},
"dependencies": {
"@noir-lang/backend_barretenberg": "workspace:*",
"@noir-lang/noir_js": "workspace:*",
"@noir-lang/noir_wasm": "workspace:*",
"@nomicfoundation/hardhat-chai-matchers": "^2.0.0",
"@nomicfoundation/hardhat-ethers": "^3.0.0",
"@web/dev-server-esbuild": "^0.3.6",
"@web/dev-server-import-maps": "^0.2.0",
"@web/test-runner": "^0.18.1",
"@web/test-runner-playwright": "^0.11.0",
"eslint": "^8.57.0",
"eslint-plugin-prettier": "^5.1.3",
"ethers": "^6.7.1",
"hardhat": "^2.22.6",
"prettier": "3.2.5",
"smol-toml": "^1.1.2",
"toml": "^3.0.0",
"tslog": "^4.9.2"
}
"name": "integration-tests",
"license": "(MIT OR Apache-2.0)",
"main": "index.js",
"private": true,
"scripts": {
"build": "echo Integration Test build step",
"test": "yarn test:browser && yarn test:node",
"test:node": "bash ./scripts/setup.sh && hardhat test test/node/prove_and_verify.test.ts && hardhat test test/node/smart_contract_verifier.test.ts && hardhat test test/node/onchain_recursive_verification.test.ts",
"test:browser": "web-test-runner",
"test:integration:browser": "web-test-runner test/browser/**/*.test.ts",
"test:integration:browser:watch": "web-test-runner test/browser/**/*.test.ts --watch",
"lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0"
},
"dependencies": {
"@aztec/bb.js": "0.60.0",
"@noir-lang/noir_js": "workspace:*",
"@noir-lang/noir_wasm": "workspace:*",
"@nomicfoundation/hardhat-chai-matchers": "^2.0.0",
"@nomicfoundation/hardhat-ethers": "^3.0.0",
"@web/dev-server-esbuild": "^0.3.6",
"@web/dev-server-import-maps": "^0.2.0",
"@web/test-runner": "^0.18.1",
"@web/test-runner-playwright": "^0.11.0",
"eslint": "^8.57.0",
"eslint-plugin-prettier": "^5.1.3",
"ethers": "^6.7.1",
"hardhat": "^2.22.6",
"prettier": "3.2.5",
"smol-toml": "^1.1.2",
"toml": "^3.0.0",
"tslog": "^4.9.2"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import * as TOML from 'smol-toml';
import { compile, createFileManager } from '@noir-lang/noir_wasm';
import { Noir } from '@noir-lang/noir_js';
import { InputMap } from '@noir-lang/noirc_abi';
import { BarretenbergBackend } from '@noir-lang/backend_barretenberg';
import { UltraPlonkBackend } from '@aztec/bb.js';

import { getFile } from './utils.js';

Expand Down Expand Up @@ -59,7 +59,7 @@ test_cases.forEach((testInfo) => {
const program = new Noir(noir_program);
const { witness } = await program.execute(inputs);

const backend = new BarretenbergBackend(noir_program);
const backend = new UltraPlonkBackend(noir_program.bytecode);
const proof = await backend.generateProof(witness);

// JS verification
Expand Down
6 changes: 3 additions & 3 deletions compiler/integration-tests/test/browser/recursion.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { Logger } from 'tslog';
import { acvm, abi, Noir } from '@noir-lang/noir_js';

import * as TOML from 'smol-toml';
import { BarretenbergBackend } from '@noir-lang/backend_barretenberg';
import { UltraPlonkBackend } from '@aztec/bb.js';
import { getFile } from './utils.js';
import { Field, InputMap } from '@noir-lang/noirc_abi';
import { createFileManager, compile } from '@noir-lang/noir_wasm';
Expand Down Expand Up @@ -45,7 +45,7 @@ describe('It compiles noir program code, receiving circuit bytes and abi object.
const main_program = await getCircuit(`${base_relative_path}/${circuit_main}`);
const main_inputs: InputMap = TOML.parse(circuit_main_toml) as InputMap;

const main_backend = new BarretenbergBackend(main_program);
const main_backend = new UltraPlonkBackend(main_program.bytecode);

const { witness: main_witnessUint8Array } = await new Noir(main_program).execute(main_inputs);

Expand Down Expand Up @@ -73,7 +73,7 @@ describe('It compiles noir program code, receiving circuit bytes and abi object.

const recursion_program = await getCircuit(`${base_relative_path}/${circuit_recursion}`);

const recursion_backend = new BarretenbergBackend(recursion_program);
const recursion_backend = new UltraPlonkBackend(recursion_program.bytecode);

const { witness: recursion_witnessUint8Array } = await new Noir(recursion_program).execute(recursion_inputs);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { resolve, join } from 'path';
import toml from 'toml';

import { Noir } from '@noir-lang/noir_js';
import { BarretenbergBackend } from '@noir-lang/backend_barretenberg';
import { UltraPlonkBackend } from '@aztec/bb.js';
import { Field, InputMap } from '@noir-lang/noirc_abi';

import { compile, createFileManager } from '@noir-lang/noir_wasm';
Expand Down Expand Up @@ -35,7 +35,7 @@ it.skip(`smart contract can verify a recursive proof`, async () => {

// Intermediate proof

const inner_backend = new BarretenbergBackend(innerProgram);
const inner_backend = new UltraPlonkBackend(innerProgram.bytecode);
const inner = new Noir(innerProgram);

const inner_prover_toml = readFileSync(
Expand Down Expand Up @@ -67,7 +67,7 @@ it.skip(`smart contract can verify a recursive proof`, async () => {

const { witness: recursionWitness } = await recursion.execute(recursion_inputs);

const recursion_backend = new BarretenbergBackend(recursionProgram);
const recursion_backend = new UltraPlonkBackend(recursionProgram.bytecode);
const recursion_proof = await recursion_backend.generateProof(recursionWitness);
expect(await recursion_backend.verifyProof(recursion_proof)).to.be.true;

Expand Down
27 changes: 11 additions & 16 deletions compiler/integration-tests/test/node/prove_and_verify.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,13 @@ import { expect } from 'chai';
import assert_lt_json from '../../circuits/assert_lt/target/assert_lt.json' assert { type: 'json' };
import fold_fibonacci_json from '../../circuits/fold_fibonacci/target/fold_fibonacci.json' assert { type: 'json' };
import { Noir } from '@noir-lang/noir_js';
import {
BarretenbergBackend as Backend,
BarretenbergVerifier as Verifier,
UltraHonkBackend,
UltraHonkVerifier,
} from '@noir-lang/backend_barretenberg';
import { BarretenbergVerifier, UltraPlonkBackend, UltraHonkBackend } from '@aztec/bb.js';
import { CompiledCircuit } from '@noir-lang/types';

const assert_lt_program = assert_lt_json as CompiledCircuit;
const fold_fibonacci_program = fold_fibonacci_json as CompiledCircuit;

const backend = new Backend(assert_lt_program);
const backend = new UltraPlonkBackend(assert_lt_program.bytecode);

it('end-to-end proof creation and verification (outer)', async () => {
// Noir.Js part
Expand Down Expand Up @@ -53,8 +48,8 @@ it('end-to-end proof creation and verification (outer) -- Verifier API', async (
const verificationKey = await backend.getVerificationKey();

// Proof verification
const verifier = new Verifier();
const isValid = await verifier.verifyProof(proof, verificationKey);
const verifier = new BarretenbergVerifier();
const isValid = await verifier.verifyUltraPlonkProof(proof, verificationKey);
expect(isValid).to.be.true;
});

Expand Down Expand Up @@ -94,7 +89,7 @@ it('end-to-end proving and verification with different instances', async () => {
// bb.js part
const proof = await backend.generateProof(witness);

const verifier = new Backend(assert_lt_program);
const verifier = new UltraPlonkBackend(assert_lt_program.bytecode);
const proof_is_valid = await verifier.verifyProof(proof);
expect(proof_is_valid).to.be.true;
});
Expand Down Expand Up @@ -148,15 +143,15 @@ it('end-to-end proof creation and verification for multiple ACIR circuits (inner
// bb.js part
//
// Proof creation
const backend = new Backend(fold_fibonacci_program);
const backend = new UltraPlonkBackend(fold_fibonacci_program.bytecode);
const proof = await backend.generateProof(witness);

// Proof verification
const isValid = await backend.verifyProof(proof);
expect(isValid).to.be.true;
});

const honkBackend = new UltraHonkBackend(assert_lt_program);
const honkBackend = new UltraHonkBackend(assert_lt_program.bytecode);

it('UltraHonk end-to-end proof creation and verification (outer)', async () => {
// Noir.Js part
Expand Down Expand Up @@ -196,8 +191,8 @@ it('UltraHonk end-to-end proof creation and verification (outer) -- Verifier API
const verificationKey = await honkBackend.getVerificationKey();

// Proof verification
const verifier = new UltraHonkVerifier();
const isValid = await verifier.verifyProof(proof, verificationKey);
const verifier = new BarretenbergVerifier();
const isValid = await verifier.verifyUltraHonkProof(proof, verificationKey);
expect(isValid).to.be.true;
});

Expand Down Expand Up @@ -236,7 +231,7 @@ it('UltraHonk end-to-end proving and verification with different instances', asy
// bb.js part
const proof = await honkBackend.generateProof(witness);

const verifier = new UltraHonkBackend(assert_lt_program);
const verifier = new UltraHonkBackend(assert_lt_program.bytecode);
const proof_is_valid = await verifier.verifyProof(proof);
expect(proof_is_valid).to.be.true;
});
Expand Down Expand Up @@ -283,7 +278,7 @@ it('UltraHonk end-to-end proof creation and verification for multiple ACIR circu
// bb.js part
//
// Proof creation
const honkBackend = new UltraHonkBackend(fold_fibonacci_program);
const honkBackend = new UltraHonkBackend(fold_fibonacci_program.bytecode);
const proof = await honkBackend.generateProof(witness);

// Proof verification
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { resolve } from 'path';
import toml from 'toml';

import { Noir } from '@noir-lang/noir_js';
import { BarretenbergBackend } from '@noir-lang/backend_barretenberg';
import { UltraPlonkBackend } from '@aztec/bb.js';

import { compile, createFileManager } from '@noir-lang/noir_wasm';

Expand Down Expand Up @@ -46,7 +46,7 @@ test_cases.forEach((testInfo) => {
const inputs = toml.parse(prover_toml);
const { witness } = await program.execute(inputs);

const backend = new BarretenbergBackend(noir_program);
const backend = new UltraPlonkBackend(noir_program.bytecode);
const proofData = await backend.generateProof(witness);

// JS verification
Expand Down
Loading

0 comments on commit 3925228

Please sign in to comment.