From e8bbce71fde3fc7af410c30920c2a547389d8248 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Wed, 25 Sep 2024 13:40:53 -0400 Subject: [PATCH] feat: Sync from aztec-packages (#6151) Automated pull of Noir development from [aztec-packages](https://github.com/AztecProtocol/aztec-packages). BEGIN_COMMIT_OVERRIDE chore: migrate higher-level APIs for barretenberg to bb.js (https://github.com/AztecProtocol/aztec-packages/pull/8677) chore(ci): bump noir compile tests (https://github.com/AztecProtocol/aztec-packages/pull/8705) feat!: remove sha256 opcode (https://github.com/AztecProtocol/aztec-packages/pull/4571) feat!: add support for u1 in the avm, ToRadix's radix arg is a memory addr (https://github.com/AztecProtocol/aztec-packages/pull/8570) feat: Optimize allocating immediate amounts of memory (https://github.com/AztecProtocol/aztec-packages/pull/8579) END_COMMIT_OVERRIDE --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: TomAFrench --- .aztec-sync-commit | 2 +- .github/scripts/playwright-install.sh | 2 +- acvm-repo/acir/codegen/acir.cpp | 106 +--------- .../acir/src/circuit/black_box_functions.rs | 8 - .../opcodes/black_box_function_call.rs | 9 +- .../acir/tests/test_program_serialization.rs | 44 ++--- acvm-repo/acvm/src/pwg/blackbox/hash.rs | 4 +- acvm-repo/acvm/src/pwg/blackbox/mod.rs | 5 +- acvm-repo/acvm/tests/solver.rs | 31 --- acvm-repo/acvm_js/package.json | 2 +- acvm-repo/acvm_js/src/black_box_solvers.rs | 9 +- acvm-repo/acvm_js/src/lib.rs | 3 +- .../test/browser/black_box_solvers.test.ts | 10 +- .../test/node/black_box_solvers.test.ts | 10 +- .../acvm_js/test/shared/black_box_solvers.ts | 12 +- .../acvm_js/test/shared/multi_scalar_mul.ts | 6 +- .../acvm_js/test/shared/schnorr_verify.ts | 28 +-- acvm-repo/blackbox_solver/src/hash.rs | 8 +- acvm-repo/blackbox_solver/src/lib.rs | 2 +- acvm-repo/bn254_blackbox_solver/src/lib.rs | 5 +- .../bn254_blackbox_solver/src/poseidon2.rs | 84 ++++++++ acvm-repo/brillig/src/black_box.rs | 7 +- acvm-repo/brillig_vm/src/black_box.rs | 68 +------ compiler/integration-tests/package.json | 2 +- .../brillig/brillig_gen/brillig_black_box.rs | 19 -- .../brillig/brillig_ir/codegen_intrinsic.rs | 5 +- .../src/brillig/brillig_ir/codegen_memory.rs | 11 +- .../src/brillig/brillig_ir/debug_show.rs | 3 - .../ssa/acir_gen/acir_ir/generated_acir.rs | 10 +- .../src/ssa/ir/instruction/call.rs | 1 - .../src/ssa/opt/flatten_cfg.rs | 12 +- .../noirc_evaluator/src/ssa/opt/mem2reg.rs | 6 +- .../src/hir/comptime/interpreter/builtin.rs | 4 +- compiler/noirc_frontend/src/tests.rs | 150 +++++++------- .../wasm/test/compiler/node/compile.test.ts | 64 +++--- noir_stdlib/src/hash/keccak.nr | 4 +- noir_stdlib/src/hash/mod.nr | 8 +- noir_stdlib/src/hash/sha256.nr | 13 +- scripts/install_bb.sh | 2 +- .../regression_4436/src/main.nr | 4 +- .../regression_6077/Prover.toml | 1 + .../execution_success/fold_basic/src/main.nr | 2 + tooling/noir_js/src/index.ts | 10 +- .../noir_js_backend_barretenberg/package.json | 2 +- .../src/backend.ts | 187 +++--------------- .../noir_js_backend_barretenberg/src/index.ts | 2 +- .../noir_js_backend_barretenberg/src/types.ts | 9 - .../src/verifier.ts | 113 ++--------- tooling/noirc_abi_wasm/package.json | 2 +- tooling/profiler/src/opcode_formatter.rs | 2 - yarn.lock | 127 ++---------- 51 files changed, 381 insertions(+), 859 deletions(-) create mode 100644 test_programs/compile_success_empty/regression_6077/Prover.toml delete mode 100644 tooling/noir_js_backend_barretenberg/src/types.ts diff --git a/.aztec-sync-commit b/.aztec-sync-commit index f60456adac1..42ef1037b17 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -bcec12dbf79b658406dc21083f8fdeef8962085e +b0d1bab1f02819e7efbe0db73c3c805b5927b66a diff --git a/.github/scripts/playwright-install.sh b/.github/scripts/playwright-install.sh index 4072e996264..7e65021166c 100755 --- a/.github/scripts/playwright-install.sh +++ b/.github/scripts/playwright-install.sh @@ -1,4 +1,4 @@ #!/bin/bash set -eu -npx playwright install && npx playwright install-deps +npx -y playwright@1.42 install --with-deps diff --git a/acvm-repo/acir/codegen/acir.cpp b/acvm-repo/acir/codegen/acir.cpp index a789a0f0c47..70ad596a93a 100644 --- a/acvm-repo/acir/codegen/acir.cpp +++ b/acvm-repo/acir/codegen/acir.cpp @@ -256,15 +256,6 @@ namespace Program { static AES128Encrypt bincodeDeserialize(std::vector); }; - struct Sha256 { - Program::HeapVector message; - Program::HeapArray output; - - friend bool operator==(const Sha256&, const Sha256&); - std::vector bincodeSerialize() const; - static Sha256 bincodeDeserialize(std::vector); - }; - struct Blake2s { Program::HeapVector message; Program::HeapArray output; @@ -462,7 +453,7 @@ namespace Program { struct ToRadix { Program::MemoryAddress input; - uint32_t radix; + Program::MemoryAddress radix; Program::HeapArray output; bool output_bits; @@ -471,7 +462,7 @@ namespace Program { static ToRadix bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); std::vector bincodeSerialize() const; @@ -827,15 +818,6 @@ namespace Program { static RANGE bincodeDeserialize(std::vector); }; - struct SHA256 { - std::vector inputs; - std::array outputs; - - friend bool operator==(const SHA256&, const SHA256&); - std::vector bincodeSerialize() const; - static SHA256 bincodeDeserialize(std::vector); - }; - struct Blake2s { std::vector inputs; std::array outputs; @@ -1040,7 +1022,7 @@ namespace Program { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); std::vector bincodeSerialize() const; @@ -2630,47 +2612,6 @@ Program::BlackBoxFuncCall::RANGE serde::Deserializable BlackBoxFuncCall::SHA256::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline BlackBoxFuncCall::SHA256 BlackBoxFuncCall::SHA256::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::BlackBoxFuncCall::SHA256 &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.inputs, serializer); - serde::Serializable::serialize(obj.outputs, serializer); -} - -template <> -template -Program::BlackBoxFuncCall::SHA256 serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BlackBoxFuncCall::SHA256 obj; - obj.inputs = serde::Deserializable::deserialize(deserializer); - obj.outputs = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Program { inline bool operator==(const BlackBoxFuncCall::Blake2s &lhs, const BlackBoxFuncCall::Blake2s &rhs) { @@ -3652,47 +3593,6 @@ Program::BlackBoxOp::AES128Encrypt serde::Deserializable BlackBoxOp::Sha256::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline BlackBoxOp::Sha256 BlackBoxOp::Sha256::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::BlackBoxOp::Sha256 &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.message, serializer); - serde::Serializable::serialize(obj.output, serializer); -} - -template <> -template -Program::BlackBoxOp::Sha256 serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BlackBoxOp::Sha256 obj; - obj.message = serde::Deserializable::deserialize(deserializer); - obj.output = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Program { inline bool operator==(const BlackBoxOp::Blake2s &lhs, const BlackBoxOp::Blake2s &rhs) { diff --git a/acvm-repo/acir/src/circuit/black_box_functions.rs b/acvm-repo/acir/src/circuit/black_box_functions.rs index 6639f1fbe71..5c07a61af7e 100644 --- a/acvm-repo/acir/src/circuit/black_box_functions.rs +++ b/acvm-repo/acir/src/circuit/black_box_functions.rs @@ -40,12 +40,6 @@ pub enum BlackBoxFunc { /// - input: (witness, bit_size) RANGE, - /// Computes SHA256 of the inputs - /// - inputs are a byte array, i.e a vector of (witness, 8) - /// - output is a byte array of len 32, i.e an array of 32 (witness, 8), - /// constrained to be the sha256 of the inputs. - SHA256, - /// Computes the Blake2s hash of the inputs, as specified in /// https://tools.ietf.org/html/rfc7693 /// - inputs are a byte array, i.e a vector of (witness, 8) @@ -213,7 +207,6 @@ impl BlackBoxFunc { pub fn name(&self) -> &'static str { match self { BlackBoxFunc::AES128Encrypt => "aes128_encrypt", - BlackBoxFunc::SHA256 => "sha256", BlackBoxFunc::SchnorrVerify => "schnorr_verify", BlackBoxFunc::Blake2s => "blake2s", BlackBoxFunc::Blake3 => "blake3", @@ -243,7 +236,6 @@ impl BlackBoxFunc { pub fn lookup(op_name: &str) -> Option { match op_name { "aes128_encrypt" => Some(BlackBoxFunc::AES128Encrypt), - "sha256" => Some(BlackBoxFunc::SHA256), "schnorr_verify" => Some(BlackBoxFunc::SchnorrVerify), "blake2s" => Some(BlackBoxFunc::Blake2s), "blake3" => Some(BlackBoxFunc::Blake3), diff --git a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index f527522cceb..fbe179d7c04 100644 --- a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -94,10 +94,6 @@ pub enum BlackBoxFuncCall { RANGE { input: FunctionInput, }, - SHA256 { - inputs: Vec>, - outputs: Box<[Witness; 32]>, - }, Blake2s { inputs: Vec>, outputs: Box<[Witness; 32]>, @@ -251,7 +247,6 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::AND { .. } => BlackBoxFunc::AND, BlackBoxFuncCall::XOR { .. } => BlackBoxFunc::XOR, BlackBoxFuncCall::RANGE { .. } => BlackBoxFunc::RANGE, - BlackBoxFuncCall::SHA256 { .. } => BlackBoxFunc::SHA256, BlackBoxFuncCall::Blake2s { .. } => BlackBoxFunc::Blake2s, BlackBoxFuncCall::Blake3 { .. } => BlackBoxFunc::Blake3, BlackBoxFuncCall::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, @@ -282,7 +277,6 @@ impl BlackBoxFuncCall { pub fn get_inputs_vec(&self) -> Vec> { match self { BlackBoxFuncCall::AES128Encrypt { inputs, .. } - | BlackBoxFuncCall::SHA256 { inputs, .. } | BlackBoxFuncCall::Blake2s { inputs, .. } | BlackBoxFuncCall::Blake3 { inputs, .. } | BlackBoxFuncCall::BigIntFromLeBytes { inputs, .. } @@ -391,8 +385,7 @@ impl BlackBoxFuncCall { pub fn get_outputs_vec(&self) -> Vec { match self { - BlackBoxFuncCall::SHA256 { outputs, .. } - | BlackBoxFuncCall::Blake2s { outputs, .. } + BlackBoxFuncCall::Blake2s { outputs, .. } | BlackBoxFuncCall::Blake3 { outputs, .. } | BlackBoxFuncCall::Keccak256 { outputs, .. } => outputs.to_vec(), diff --git a/acvm-repo/acir/tests/test_program_serialization.rs b/acvm-repo/acir/tests/test_program_serialization.rs index 2b6ea83fafa..6bf5afe52d9 100644 --- a/acvm-repo/acir/tests/test_program_serialization.rs +++ b/acvm-repo/acir/tests/test_program_serialization.rs @@ -91,10 +91,10 @@ fn multi_scalar_mul_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 141, 11, 10, 0, 32, 8, 67, 43, 181, 15, 116, 255, - 227, 70, 74, 11, 86, 194, 195, 169, 83, 115, 58, 49, 156, 12, 29, 121, 58, 66, 117, 176, - 144, 11, 105, 161, 222, 245, 42, 205, 13, 186, 58, 205, 233, 240, 25, 249, 11, 238, 40, - 245, 19, 253, 255, 119, 159, 216, 103, 157, 249, 169, 193, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 141, 11, 10, 0, 32, 8, 67, 43, 181, 15, 221, 255, + 186, 145, 210, 130, 149, 240, 112, 234, 212, 156, 78, 12, 39, 67, 71, 158, 142, 80, 29, 44, + 228, 66, 90, 168, 119, 189, 74, 115, 131, 174, 78, 115, 58, 124, 70, 254, 130, 59, 74, 253, + 68, 255, 255, 221, 39, 54, 29, 134, 27, 102, 193, 0, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -134,24 +134,24 @@ fn schnorr_verify_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 211, 103, 78, 2, 81, 24, 70, 225, 193, 6, 216, 123, - 47, 216, 123, 239, 136, 136, 136, 136, 136, 187, 96, 255, 75, 32, 112, 194, 55, 201, 129, - 100, 50, 79, 244, 7, 228, 222, 243, 102, 146, 254, 167, 221, 123, 50, 97, 222, 217, 120, - 243, 116, 226, 61, 36, 15, 247, 158, 92, 120, 68, 30, 149, 199, 228, 172, 156, 147, 243, - 242, 184, 60, 33, 79, 202, 83, 242, 180, 60, 35, 207, 202, 115, 242, 188, 188, 32, 47, 202, - 75, 242, 178, 188, 34, 175, 202, 107, 242, 186, 188, 33, 111, 202, 91, 242, 182, 188, 35, - 23, 228, 93, 121, 79, 222, 151, 15, 228, 67, 249, 72, 62, 150, 79, 228, 83, 249, 76, 62, - 151, 47, 228, 75, 249, 74, 190, 150, 111, 228, 91, 249, 78, 190, 151, 31, 228, 71, 249, 73, - 126, 150, 95, 228, 87, 185, 40, 191, 201, 37, 249, 93, 46, 203, 31, 114, 69, 254, 148, 171, - 97, 58, 77, 226, 111, 95, 250, 127, 77, 254, 150, 235, 242, 143, 220, 144, 127, 229, 166, - 252, 39, 183, 194, 255, 241, 253, 45, 253, 14, 182, 201, 38, 217, 34, 27, 100, 123, 233, - 230, 242, 241, 155, 217, 20, 91, 98, 67, 108, 135, 205, 176, 21, 54, 194, 54, 216, 4, 91, - 96, 3, 180, 79, 243, 180, 78, 227, 180, 77, 211, 180, 76, 195, 180, 75, 179, 133, 164, 223, - 40, 109, 210, 36, 45, 210, 32, 237, 209, 28, 173, 209, 24, 109, 209, 20, 45, 209, 16, 237, - 208, 12, 173, 208, 8, 109, 208, 4, 45, 208, 0, 119, 207, 157, 115, 215, 220, 113, 49, 238, - 180, 20, 119, 88, 142, 59, 171, 196, 29, 85, 227, 46, 106, 113, 246, 245, 56, 235, 70, 156, - 109, 51, 206, 50, 61, 179, 244, 220, 18, 157, 231, 192, 167, 11, 75, 28, 99, 152, 25, 5, 0, - 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 211, 103, 78, 2, 81, 24, 70, 225, 193, 130, 96, 239, + 189, 96, 239, 189, 35, 34, 34, 34, 34, 238, 130, 253, 47, 129, 192, 9, 223, 36, 7, 146, + 201, 60, 209, 31, 144, 123, 207, 155, 73, 250, 159, 118, 239, 201, 132, 121, 103, 227, 205, + 211, 137, 247, 144, 60, 220, 123, 114, 225, 17, 121, 84, 206, 202, 99, 114, 78, 206, 203, + 227, 242, 132, 60, 41, 79, 201, 211, 242, 140, 60, 43, 207, 201, 243, 242, 130, 188, 40, + 47, 201, 203, 242, 138, 188, 42, 175, 201, 235, 242, 134, 188, 41, 111, 201, 219, 242, 142, + 92, 144, 119, 229, 61, 121, 95, 62, 144, 15, 229, 35, 249, 88, 62, 145, 79, 229, 51, 249, + 92, 190, 144, 47, 229, 43, 249, 90, 190, 145, 111, 229, 59, 249, 94, 126, 144, 31, 229, 39, + 249, 89, 126, 145, 95, 229, 162, 252, 38, 151, 228, 119, 185, 44, 127, 200, 21, 249, 83, + 174, 134, 233, 52, 137, 191, 125, 233, 255, 53, 249, 91, 174, 203, 63, 114, 67, 254, 149, + 155, 242, 159, 220, 10, 255, 199, 247, 183, 244, 59, 216, 38, 155, 100, 139, 108, 144, 237, + 165, 155, 203, 199, 111, 102, 83, 108, 137, 13, 177, 29, 54, 195, 86, 216, 8, 219, 96, 19, + 108, 129, 13, 208, 62, 205, 211, 58, 141, 211, 54, 77, 211, 50, 13, 211, 46, 205, 22, 146, + 126, 163, 180, 73, 147, 180, 72, 131, 180, 71, 115, 180, 70, 99, 180, 69, 83, 180, 68, 67, + 180, 67, 51, 180, 66, 35, 180, 65, 19, 180, 64, 3, 220, 61, 119, 206, 93, 115, 199, 197, + 184, 211, 82, 220, 97, 57, 238, 172, 18, 119, 84, 141, 187, 168, 197, 217, 215, 227, 172, + 27, 113, 182, 205, 56, 203, 244, 204, 210, 115, 75, 116, 158, 3, 159, 46, 43, 32, 188, 53, + 25, 5, 0, 0, ]; assert_eq!(bytes, expected_serialization) diff --git a/acvm-repo/acvm/src/pwg/blackbox/hash.rs b/acvm-repo/acvm/src/pwg/blackbox/hash.rs index 234ab6162ca..f177cd071d0 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/hash.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/hash.rs @@ -3,7 +3,7 @@ use acir::{ native_types::{Witness, WitnessMap}, AcirField, }; -use acvm_blackbox_solver::{sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError}; +use acvm_blackbox_solver::{sha256_compression, BlackBoxFunctionSolver, BlackBoxResolutionError}; use crate::pwg::{input_to_value, insert_value}; use crate::OpcodeResolutionError; @@ -94,7 +94,7 @@ pub(crate) fn solve_sha_256_permutation_opcode( let message = to_u32_array(initial_witness, inputs)?; let mut state = to_u32_array(initial_witness, hash_values)?; - sha256compression(&mut state, &message); + sha256_compression(&mut state, &message); for (output_witness, value) in outputs.iter().zip(state.into_iter()) { insert_value(output_witness, F::from(value as u128), initial_witness)?; diff --git a/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/acvm-repo/acvm/src/pwg/blackbox/mod.rs index 8b8bfc5cfc5..1cca14cc680 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -3,7 +3,7 @@ use acir::{ native_types::{Witness, WitnessMap}, AcirField, }; -use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600, sha256}; +use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600}; use self::{ aes128::solve_aes128_encryption_opcode, bigint::AcvmBigIntSolver, @@ -84,9 +84,6 @@ pub(crate) fn solve( BlackBoxFuncCall::AND { lhs, rhs, output } => and(initial_witness, lhs, rhs, output), BlackBoxFuncCall::XOR { lhs, rhs, output } => xor(initial_witness, lhs, rhs, output), BlackBoxFuncCall::RANGE { input } => solve_range_opcode(initial_witness, input), - BlackBoxFuncCall::SHA256 { inputs, outputs } => { - solve_generic_256_hash_opcode(initial_witness, inputs, None, outputs, sha256) - } BlackBoxFuncCall::Blake2s { inputs, outputs } => { solve_generic_256_hash_opcode(initial_witness, inputs, None, outputs, blake2s) } diff --git a/acvm-repo/acvm/tests/solver.rs b/acvm-repo/acvm/tests/solver.rs index 766d374c43c..6ad52999820 100644 --- a/acvm-repo/acvm/tests/solver.rs +++ b/acvm-repo/acvm/tests/solver.rs @@ -1075,18 +1075,6 @@ fn solve_blackbox_func_call( Ok(witness_map[&Witness(3)]) } -// N inputs -// 32 outputs -fn sha256_op( - function_inputs_and_outputs: (Vec>, Vec), -) -> Result, OpcodeResolutionError> { - let (function_inputs, outputs) = function_inputs_and_outputs; - Ok(BlackBoxFuncCall::SHA256 { - inputs: function_inputs, - outputs: outputs.try_into().expect("SHA256 returns 32 outputs"), - }) -} - // N inputs // 32 outputs fn blake2s_op( @@ -1457,19 +1445,6 @@ fn poseidon2_permutation_zeroes() { assert_eq!(results, expected_results); } -#[test] -fn sha256_zeros() { - let results = solve_array_input_blackbox_call(vec![], 32, None, sha256_op); - let expected_results: Vec<_> = vec![ - 227, 176, 196, 66, 152, 252, 28, 20, 154, 251, 244, 200, 153, 111, 185, 36, 39, 174, 65, - 228, 100, 155, 147, 76, 164, 149, 153, 27, 120, 82, 184, 85, - ] - .into_iter() - .map(|x: u128| FieldElement::from(x)) - .collect(); - assert_eq!(results, Ok(expected_results)); -} - #[test] fn sha256_compression_zeros() { let results = solve_array_input_blackbox_call( @@ -1643,12 +1618,6 @@ proptest! { prop_assert_eq!(result, expected_result) } - #[test] - fn sha256_injective(inputs_distinct_inputs in any_distinct_inputs(None, 0, 32)) { - let (inputs, distinct_inputs) = inputs_distinct_inputs; - let (result, message) = prop_assert_injective(inputs, distinct_inputs, 32, None, sha256_op); - prop_assert!(result, "{}", message); - } #[test] fn sha256_compression_injective(inputs_distinct_inputs in any_distinct_inputs(None, 24, 24)) { diff --git a/acvm-repo/acvm_js/package.json b/acvm-repo/acvm_js/package.json index 54261a78dbc..95b8a46456f 100644 --- a/acvm-repo/acvm_js/package.json +++ b/acvm-repo/acvm_js/package.json @@ -40,7 +40,7 @@ "@esm-bundle/chai": "^4.3.4-fix.0", "@web/dev-server-esbuild": "^0.3.6", "@web/test-runner": "^0.18.1", - "@web/test-runner-playwright": "^0.10.0", + "@web/test-runner-playwright": "^0.11.0", "chai": "^4.4.1", "eslint": "^8.57.0", "eslint-plugin-prettier": "^5.1.3", diff --git a/acvm-repo/acvm_js/src/black_box_solvers.rs b/acvm-repo/acvm_js/src/black_box_solvers.rs index 4cd85a5ed87..6046d52943c 100644 --- a/acvm-repo/acvm_js/src/black_box_solvers.rs +++ b/acvm-repo/acvm_js/src/black_box_solvers.rs @@ -22,10 +22,13 @@ pub fn xor(lhs: JsString, rhs: JsString) -> JsString { field_element_to_js_string(&result) } -/// Calculates the SHA256 hash of the input bytes +/// Sha256 compression function #[wasm_bindgen] -pub fn sha256(inputs: &[u8]) -> Vec { - acvm::blackbox_solver::sha256(inputs).unwrap().into() +pub fn sha256_compression(inputs: &[u32], state: &[u32]) -> Vec { + let mut state: [u32; 8] = state.try_into().unwrap(); + let inputs: [u32; 16] = inputs.try_into().unwrap(); + acvm::blackbox_solver::sha256_compression(&mut state, &inputs); + state.to_vec() } /// Calculates the Blake2s256 hash of the input bytes diff --git a/acvm-repo/acvm_js/src/lib.rs b/acvm-repo/acvm_js/src/lib.rs index fdb8d5ffe08..8fe64afbba9 100644 --- a/acvm-repo/acvm_js/src/lib.rs +++ b/acvm-repo/acvm_js/src/lib.rs @@ -17,7 +17,8 @@ mod logging; mod public_witness; pub use black_box_solvers::{ - and, blake2s256, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, sha256, xor, + and, blake2s256, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, sha256_compression, + xor, }; pub use build_info::build_info; pub use compression::{ diff --git a/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts b/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts index 695f6b89afc..9dc5be2c682 100644 --- a/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts +++ b/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts @@ -5,7 +5,7 @@ import initACVM, { ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, - sha256, + sha256_compression, xor, } from '@noir-lang/acvm_js'; @@ -32,11 +32,11 @@ it('successfully calculates the bitwise XOR of two fields', async () => { }); it('successfully calculates the sha256 hash', async () => { - const { sha256_test_cases } = await import('../shared/black_box_solvers'); + const { sha256_compression_test_cases } = await import('../shared/black_box_solvers'); - for (const testCase of sha256_test_cases) { - const [preimage, expectedResult] = testCase; - const hash = sha256(preimage); + for (const testCase of sha256_compression_test_cases) { + const [message, state, expectedResult] = testCase; + const hash = sha256_compression(message, state); hash.forEach((value, index) => expect(value).to.be.eq(expectedResult.at(index))); } }); diff --git a/acvm-repo/acvm_js/test/node/black_box_solvers.test.ts b/acvm-repo/acvm_js/test/node/black_box_solvers.test.ts index cedadba2c1a..fc998ced5a5 100644 --- a/acvm-repo/acvm_js/test/node/black_box_solvers.test.ts +++ b/acvm-repo/acvm_js/test/node/black_box_solvers.test.ts @@ -5,7 +5,7 @@ import { ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, - sha256, + sha256_compression, xor, } from '@noir-lang/acvm_js'; @@ -28,11 +28,11 @@ it('successfully calculates the bitwise XOR of two fields', async () => { }); it('successfully calculates the sha256 hash', async () => { - const { sha256_test_cases } = await import('../shared/black_box_solvers'); + const { sha256_compression_test_cases } = await import('../shared/black_box_solvers'); - for (const testCase of sha256_test_cases) { - const [preimage, expectedResult] = testCase; - const hash = sha256(preimage); + for (const testCase of sha256_compression_test_cases) { + const [message, state, expectedResult] = testCase; + const hash = sha256_compression(message, state); hash.forEach((value, index) => expect(value).to.be.eq(expectedResult.at(index))); } }); diff --git a/acvm-repo/acvm_js/test/shared/black_box_solvers.ts b/acvm-repo/acvm_js/test/shared/black_box_solvers.ts index 0ab3fc12b72..22783a028ea 100644 --- a/acvm-repo/acvm_js/test/shared/black_box_solvers.ts +++ b/acvm-repo/acvm_js/test/shared/black_box_solvers.ts @@ -32,15 +32,11 @@ export const xor_test_cases: [[string, string], string][] = [ ], ]; -// https://www.di-mgt.com.au/sha_testvectors.html -export const sha256_test_cases: [Uint8Array, Uint8Array][] = [ +export const sha256_compression_test_cases: [Uint32Array, Uint32Array, Uint32Array][] = [ [ - // "abc" - Uint8Array.from([0x61, 0x62, 0x63]), - Uint8Array.from([ - 0xba, 0x78, 0x16, 0xbf, 0x8f, 0x01, 0xcf, 0xea, 0x41, 0x41, 0x40, 0xde, 0x5d, 0xae, 0x22, 0x23, 0xb0, 0x03, 0x61, - 0xa3, 0x96, 0x17, 0x7a, 0x9c, 0xb4, 0x10, 0xff, 0x61, 0xf2, 0x00, 0x15, 0xad, - ]), + Uint32Array.from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]), + Uint32Array.from([1, 2, 3, 4, 5, 6, 7, 8]), + Uint32Array.from([1862536192, 526086805, 2067405084, 593147560, 726610467, 813867028, 4091010797, 3974542186]), ], ]; diff --git a/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts b/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts index ffb9952b136..f23847a75fc 100644 --- a/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts +++ b/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts @@ -1,8 +1,8 @@ // See `multi_scalar_mul_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 141, 11, 10, 0, 32, 8, 67, 43, 181, 15, 116, 255, 227, 70, 74, 11, 86, 194, - 195, 169, 83, 115, 58, 49, 156, 12, 29, 121, 58, 66, 117, 176, 144, 11, 105, 161, 222, 245, 42, 205, 13, 186, 58, 205, - 233, 240, 25, 249, 11, 238, 40, 245, 19, 253, 255, 119, 159, 216, 103, 157, 249, 169, 193, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 141, 11, 10, 0, 32, 8, 67, 43, 181, 15, 221, 255, 186, 145, 210, 130, 149, 240, + 112, 234, 212, 156, 78, 12, 39, 67, 71, 158, 142, 80, 29, 44, 228, 66, 90, 168, 119, 189, 74, 115, 131, 174, 78, 115, + 58, 124, 70, 254, 130, 59, 74, 253, 68, 255, 255, 221, 39, 54, 29, 134, 27, 102, 193, 0, 0, 0, ]); export const initialWitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], diff --git a/acvm-repo/acvm_js/test/shared/schnorr_verify.ts b/acvm-repo/acvm_js/test/shared/schnorr_verify.ts index c071c86f61f..830ca1026d6 100644 --- a/acvm-repo/acvm_js/test/shared/schnorr_verify.ts +++ b/acvm-repo/acvm_js/test/shared/schnorr_verify.ts @@ -1,19 +1,19 @@ // See `schnorr_verify_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 211, 103, 78, 2, 81, 24, 70, 225, 193, 6, 216, 123, 47, 216, 123, 239, 136, - 136, 136, 136, 136, 187, 96, 255, 75, 32, 112, 194, 55, 201, 129, 100, 50, 79, 244, 7, 228, 222, 243, 102, 146, 254, - 167, 221, 123, 50, 97, 222, 217, 120, 243, 116, 226, 61, 36, 15, 247, 158, 92, 120, 68, 30, 149, 199, 228, 172, 156, - 147, 243, 242, 184, 60, 33, 79, 202, 83, 242, 180, 60, 35, 207, 202, 115, 242, 188, 188, 32, 47, 202, 75, 242, 178, - 188, 34, 175, 202, 107, 242, 186, 188, 33, 111, 202, 91, 242, 182, 188, 35, 23, 228, 93, 121, 79, 222, 151, 15, 228, - 67, 249, 72, 62, 150, 79, 228, 83, 249, 76, 62, 151, 47, 228, 75, 249, 74, 190, 150, 111, 228, 91, 249, 78, 190, 151, - 31, 228, 71, 249, 73, 126, 150, 95, 228, 87, 185, 40, 191, 201, 37, 249, 93, 46, 203, 31, 114, 69, 254, 148, 171, 97, - 58, 77, 226, 111, 95, 250, 127, 77, 254, 150, 235, 242, 143, 220, 144, 127, 229, 166, 252, 39, 183, 194, 255, 241, - 253, 45, 253, 14, 182, 201, 38, 217, 34, 27, 100, 123, 233, 230, 242, 241, 155, 217, 20, 91, 98, 67, 108, 135, 205, - 176, 21, 54, 194, 54, 216, 4, 91, 96, 3, 180, 79, 243, 180, 78, 227, 180, 77, 211, 180, 76, 195, 180, 75, 179, 133, - 164, 223, 40, 109, 210, 36, 45, 210, 32, 237, 209, 28, 173, 209, 24, 109, 209, 20, 45, 209, 16, 237, 208, 12, 173, - 208, 8, 109, 208, 4, 45, 208, 0, 119, 207, 157, 115, 215, 220, 113, 49, 238, 180, 20, 119, 88, 142, 59, 171, 196, 29, - 85, 227, 46, 106, 113, 246, 245, 56, 235, 70, 156, 109, 51, 206, 50, 61, 179, 244, 220, 18, 157, 231, 192, 167, 11, - 75, 28, 99, 152, 25, 5, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 211, 103, 78, 2, 81, 24, 70, 225, 193, 130, 96, 239, 189, 96, 239, 189, 35, 34, + 34, 34, 34, 238, 130, 253, 47, 129, 192, 9, 223, 36, 7, 146, 201, 60, 209, 31, 144, 123, 207, 155, 73, 250, 159, 118, + 239, 201, 132, 121, 103, 227, 205, 211, 137, 247, 144, 60, 220, 123, 114, 225, 17, 121, 84, 206, 202, 99, 114, 78, + 206, 203, 227, 242, 132, 60, 41, 79, 201, 211, 242, 140, 60, 43, 207, 201, 243, 242, 130, 188, 40, 47, 201, 203, 242, + 138, 188, 42, 175, 201, 235, 242, 134, 188, 41, 111, 201, 219, 242, 142, 92, 144, 119, 229, 61, 121, 95, 62, 144, 15, + 229, 35, 249, 88, 62, 145, 79, 229, 51, 249, 92, 190, 144, 47, 229, 43, 249, 90, 190, 145, 111, 229, 59, 249, 94, 126, + 144, 31, 229, 39, 249, 89, 126, 145, 95, 229, 162, 252, 38, 151, 228, 119, 185, 44, 127, 200, 21, 249, 83, 174, 134, + 233, 52, 137, 191, 125, 233, 255, 53, 249, 91, 174, 203, 63, 114, 67, 254, 149, 155, 242, 159, 220, 10, 255, 199, 247, + 183, 244, 59, 216, 38, 155, 100, 139, 108, 144, 237, 165, 155, 203, 199, 111, 102, 83, 108, 137, 13, 177, 29, 54, 195, + 86, 216, 8, 219, 96, 19, 108, 129, 13, 208, 62, 205, 211, 58, 141, 211, 54, 77, 211, 50, 13, 211, 46, 205, 22, 146, + 126, 163, 180, 73, 147, 180, 72, 131, 180, 71, 115, 180, 70, 99, 180, 69, 83, 180, 68, 67, 180, 67, 51, 180, 66, 35, + 180, 65, 19, 180, 64, 3, 220, 61, 119, 206, 93, 115, 199, 197, 184, 211, 82, 220, 97, 57, 238, 172, 18, 119, 84, 141, + 187, 168, 197, 217, 215, 227, 172, 27, 113, 182, 205, 56, 203, 244, 204, 210, 115, 75, 116, 158, 3, 159, 46, 43, 32, + 188, 53, 25, 5, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/acvm-repo/blackbox_solver/src/hash.rs b/acvm-repo/blackbox_solver/src/hash.rs index ac56029b436..af503117466 100644 --- a/acvm-repo/blackbox_solver/src/hash.rs +++ b/acvm-repo/blackbox_solver/src/hash.rs @@ -1,7 +1,6 @@ use acir::BlackBoxFunc; use blake2::digest::generic_array::GenericArray; use blake2::{Blake2s256, Digest}; -use sha2::Sha256; use sha3::Keccak256; use crate::BlackBoxResolutionError; @@ -14,11 +13,6 @@ fn generic_hash_256(message: &[u8]) -> Result<[u8; 32], String> { Ok(output_bytes) } -pub fn sha256(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { - generic_hash_256::(inputs) - .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::SHA256, err)) -} - pub fn blake2s(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { generic_hash_256::(inputs) .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::Blake2s, err)) @@ -33,7 +27,7 @@ pub fn keccak256(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::Keccak256, err)) } -pub fn sha256compression(state: &mut [u32; 8], msg_blocks: &[u32; 16]) { +pub fn sha256_compression(state: &mut [u32; 8], msg_blocks: &[u32; 16]) { let mut blocks = [0_u8; 64]; for (i, block) in msg_blocks.iter().enumerate() { let bytes = block.to_be_bytes(); diff --git a/acvm-repo/blackbox_solver/src/lib.rs b/acvm-repo/blackbox_solver/src/lib.rs index c39deb64138..87ca539f435 100644 --- a/acvm-repo/blackbox_solver/src/lib.rs +++ b/acvm-repo/blackbox_solver/src/lib.rs @@ -21,7 +21,7 @@ pub use aes128::aes128_encrypt; pub use bigint::BigIntSolver; pub use curve_specific_solver::{BlackBoxFunctionSolver, StubbedBlackBoxSolver}; pub use ecdsa::{ecdsa_secp256k1_verify, ecdsa_secp256r1_verify}; -pub use hash::{blake2s, blake3, keccak256, keccakf1600, sha256, sha256compression}; +pub use hash::{blake2s, blake3, keccak256, keccakf1600, sha256_compression}; pub use logic::{bit_and, bit_xor}; #[derive(Clone, PartialEq, Eq, Debug, Error)] diff --git a/acvm-repo/bn254_blackbox_solver/src/lib.rs b/acvm-repo/bn254_blackbox_solver/src/lib.rs index 43ee6a9ddd2..952c4498d84 100644 --- a/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -13,7 +13,10 @@ mod schnorr; use ark_ec::AffineRepr; pub use embedded_curve_ops::{embedded_curve_add, multi_scalar_mul}; pub use generator::generators::derive_generators; -pub use poseidon2::{field_from_hex, poseidon2_permutation, Poseidon2Config, POSEIDON2_CONFIG}; +pub use poseidon2::{ + field_from_hex, poseidon2_permutation, poseidon_hash, Poseidon2Config, Poseidon2Sponge, + POSEIDON2_CONFIG, +}; // Temporary hack, this ensure that we always use a bn254 field here // without polluting the feature flags of the `acir_field` crate. diff --git a/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs b/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs index dd3e8b725c2..64823e37029 100644 --- a/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs +++ b/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs @@ -543,6 +543,75 @@ impl<'a> Poseidon2<'a> { } } +/// Performs a poseidon hash with a sponge construction equivalent to the one in poseidon2.nr +pub fn poseidon_hash(inputs: &[FieldElement]) -> Result { + let two_pow_64 = 18446744073709551616_u128.into(); + let iv = FieldElement::from(inputs.len()) * two_pow_64; + let mut sponge = Poseidon2Sponge::new(iv, 3); + for input in inputs.iter() { + sponge.absorb(*input)?; + } + sponge.squeeze() +} + +pub struct Poseidon2Sponge<'a> { + rate: usize, + poseidon: Poseidon2<'a>, + squeezed: bool, + cache: Vec, + state: Vec, +} + +impl<'a> Poseidon2Sponge<'a> { + pub fn new(iv: FieldElement, rate: usize) -> Poseidon2Sponge<'a> { + let mut result = Poseidon2Sponge { + cache: Vec::with_capacity(rate), + state: vec![FieldElement::zero(); rate + 1], + squeezed: false, + rate, + poseidon: Poseidon2::new(), + }; + result.state[rate] = iv; + result + } + + fn perform_duplex(&mut self) -> Result<(), BlackBoxResolutionError> { + // zero-pad the cache + for _ in self.cache.len()..self.rate { + self.cache.push(FieldElement::zero()); + } + // add the cache into sponge state + for i in 0..self.rate { + self.state[i] += self.cache[i]; + } + self.state = self.poseidon.permutation(&self.state, 4)?; + Ok(()) + } + + pub fn absorb(&mut self, input: FieldElement) -> Result<(), BlackBoxResolutionError> { + assert!(!self.squeezed); + if self.cache.len() == self.rate { + // If we're absorbing, and the cache is full, apply the sponge permutation to compress the cache + self.perform_duplex()?; + self.cache = vec![input]; + } else { + // If we're absorbing, and the cache is not full, add the input into the cache + self.cache.push(input); + } + Ok(()) + } + + pub fn squeeze(&mut self) -> Result { + assert!(!self.squeezed); + // If we're in absorb mode, apply sponge permutation to compress the cache. + self.perform_duplex()?; + self.squeezed = true; + + // Pop one item off the top of the permutation and return it. + Ok(self.state[0]) + } +} + #[cfg(test)] mod test { use acir::AcirField; @@ -562,4 +631,19 @@ mod test { ]; assert_eq!(result, expected_result); } + + #[test] + fn hash_smoke_test() { + let fields = [ + FieldElement::from(1u128), + FieldElement::from(2u128), + FieldElement::from(3u128), + FieldElement::from(4u128), + ]; + let result = super::poseidon_hash(&fields).expect("should hash successfully"); + assert_eq!( + result, + field_from_hex("130bf204a32cac1f0ace56c78b731aa3809f06df2731ebcf6b3464a15788b1b9"), + ); + } } diff --git a/acvm-repo/brillig/src/black_box.rs b/acvm-repo/brillig/src/black_box.rs index c3240c6ff1e..534ef7d318e 100644 --- a/acvm-repo/brillig/src/black_box.rs +++ b/acvm-repo/brillig/src/black_box.rs @@ -12,11 +12,6 @@ pub enum BlackBoxOp { key: HeapArray, outputs: HeapVector, }, - /// Calculates the SHA256 hash of the inputs. - Sha256 { - message: HeapVector, - output: HeapArray, - }, /// Calculates the Blake2s hash of the inputs. Blake2s { message: HeapVector, @@ -130,7 +125,7 @@ pub enum BlackBoxOp { }, ToRadix { input: MemoryAddress, - radix: u32, + radix: MemoryAddress, output: HeapArray, output_bits: bool, }, diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index 3f1a44b921b..56f715c13a9 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -3,7 +3,7 @@ use acir::{AcirField, BlackBoxFunc}; use acvm_blackbox_solver::BigIntSolver; use acvm_blackbox_solver::{ aes128_encrypt, blake2s, blake3, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, - keccakf1600, sha256, sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError, + keccakf1600, sha256_compression, BlackBoxFunctionSolver, BlackBoxResolutionError, }; use num_bigint::BigUint; use num_traits::Zero; @@ -65,12 +65,6 @@ pub(crate) fn evaluate_black_box Ok(()) } - BlackBoxOp::Sha256 { message, output } => { - let message = to_u8_vec(read_heap_vector(memory, message)); - let bytes = sha256(message.as_slice())?; - memory.write_slice(memory.read_ref(output.pointer), &to_value_vec(&bytes)); - Ok(()) - } BlackBoxOp::Blake2s { message, output } => { let message = to_u8_vec(read_heap_vector(memory, message)); let bytes = blake2s(message.as_slice())?; @@ -361,7 +355,7 @@ pub(crate) fn evaluate_black_box state[i] = value.try_into().unwrap(); } - sha256compression(&mut state, &message); + sha256_compression(&mut state, &message); let state = state.map(|x| x.into()); memory.write_slice(memory.read_ref(output.pointer), &state); @@ -369,9 +363,13 @@ pub(crate) fn evaluate_black_box } BlackBoxOp::ToRadix { input, radix, output, output_bits } => { let input: F = *memory.read(*input).extract_field().expect("ToRadix input not a field"); + let radix = memory + .read(*radix) + .expect_integer_with_bit_size(IntegerBitSize::U32) + .expect("ToRadix opcode's radix bit size does not match expected bit size 32"); let mut input = BigUint::from_bytes_be(&input.to_be_bytes()); - let radix = BigUint::from(*radix); + let radix = BigUint::from_bytes_be(&radix.to_be_bytes()); let mut limbs: Vec> = Vec::with_capacity(output.size); @@ -447,7 +445,6 @@ impl BrilligBigintSolver { fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { match op { BlackBoxOp::AES128Encrypt { .. } => BlackBoxFunc::AES128Encrypt, - BlackBoxOp::Sha256 { .. } => BlackBoxFunc::SHA256, BlackBoxOp::Blake2s { .. } => BlackBoxFunc::Blake2s, BlackBoxOp::Blake3 { .. } => BlackBoxFunc::Blake3, BlackBoxOp::Keccak256 { .. } => BlackBoxFunc::Keccak256, @@ -470,54 +467,3 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { BlackBoxOp::PedersenHash { .. } => BlackBoxFunc::PedersenHash, } } - -#[cfg(test)] -mod test { - use acir::{ - brillig::{BlackBoxOp, MemoryAddress}, - FieldElement, - }; - use acvm_blackbox_solver::StubbedBlackBoxSolver; - - use crate::{ - black_box::{evaluate_black_box, to_u8_vec, to_value_vec, BrilligBigintSolver}, - HeapArray, HeapVector, Memory, - }; - - #[test] - fn sha256() { - let message: Vec = b"hello world".to_vec(); - let message_length = message.len(); - - let mut memory: Memory = Memory::default(); - let message_pointer = 3; - let result_pointer = message_pointer + message_length; - memory.write(MemoryAddress(0), message_pointer.into()); - memory.write(MemoryAddress(1), message_length.into()); - memory.write(MemoryAddress(2), result_pointer.into()); - memory.write_slice(MemoryAddress(message_pointer), to_value_vec(&message).as_slice()); - - let op = BlackBoxOp::Sha256 { - message: HeapVector { pointer: 0.into(), size: 1.into() }, - output: HeapArray { pointer: 2.into(), size: 32 }, - }; - - evaluate_black_box( - &op, - &StubbedBlackBoxSolver, - &mut memory, - &mut BrilligBigintSolver::default(), - ) - .unwrap(); - - let result = memory.read_slice(MemoryAddress(result_pointer), 32); - - assert_eq!( - to_u8_vec(result), - vec![ - 185, 77, 39, 185, 147, 77, 62, 8, 165, 46, 82, 215, 218, 125, 171, 250, 196, 132, - 239, 227, 122, 83, 128, 238, 144, 136, 247, 172, 226, 239, 205, 233 - ] - ); - } -} diff --git a/compiler/integration-tests/package.json b/compiler/integration-tests/package.json index a88e55b2321..64a638539d5 100644 --- a/compiler/integration-tests/package.json +++ b/compiler/integration-tests/package.json @@ -21,7 +21,7 @@ "@web/dev-server-esbuild": "^0.3.6", "@web/dev-server-import-maps": "^0.2.0", "@web/test-runner": "^0.18.1", - "@web/test-runner-playwright": "^0.10.0", + "@web/test-runner-playwright": "^0.11.0", "eslint": "^8.57.0", "eslint-plugin-prettier": "^5.1.3", "ethers": "^6.7.1", diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index 2dde5d2ca49..889af07fbef 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -23,25 +23,6 @@ pub(crate) fn convert_black_box_call { - if let ([message], [BrilligVariable::BrilligArray(result_array)]) = - (function_arguments, function_results) - { - let message_vector = convert_array_or_vector(brillig_context, *message, bb_func); - let output_heap_array = - brillig_context.codegen_brillig_array_to_heap_array(*result_array); - - brillig_context.black_box_op_instruction(BlackBoxOp::Sha256 { - message: message_vector, - output: output_heap_array, - }); - - brillig_context.deallocate_heap_vector(message_vector); - brillig_context.deallocate_heap_array(output_heap_array); - } else { - unreachable!("ICE: SHA256 expects one array argument and one array result") - } - } BlackBoxFunc::Blake2s => { if let ([message], [BrilligVariable::BrilligArray(result_array)]) = (function_arguments, function_results) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs index d92412677ca..c9c31267d7b 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs @@ -78,9 +78,11 @@ impl BrilligContext< let heap_array = self.codegen_brillig_array_to_heap_array(target_array); + let radix_var = self.make_constant_instruction(F::from(radix as u128), 32); + self.black_box_op_instruction(BlackBoxOp::ToRadix { input: source_field.address, - radix, + radix: radix_var.address, output: heap_array, output_bits, }); @@ -91,5 +93,6 @@ impl BrilligContext< self.deallocate_single_addr(items_len); } self.deallocate_register(heap_array.pointer); + self.deallocate_register(radix_var.address); } } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_memory.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_memory.rs index ea8969eddf3..0199d9537a6 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_memory.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_memory.rs @@ -9,7 +9,7 @@ use super::{ brillig_variable::{BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable}, debug_show::DebugToString, registers::RegisterAllocator, - BrilligContext, BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + BrilligContext, ReservedRegisters, BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, }; impl BrilligContext { @@ -20,9 +20,12 @@ impl BrilligContext< pointer_register: MemoryAddress, size: usize, ) { - let size_register = self.make_usize_constant_instruction(size.into()); - self.codegen_allocate_mem(pointer_register, size_register.address); - self.deallocate_single_addr(size_register); + self.load_free_memory_pointer_instruction(pointer_register); + self.codegen_usize_op_in_place( + ReservedRegisters::free_memory_pointer(), + BrilligBinaryOp::Add, + size, + ); } /// Allocates an array of size contained in size_register and stores the diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 7c2fb541006..08e6c18182b 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -285,9 +285,6 @@ impl DebugShow { outputs ); } - BlackBoxOp::Sha256 { message, output } => { - debug_println!(self.enable_debug_trace, " SHA256 {} -> {}", message, output); - } BlackBoxOp::Keccak256 { message, output } => { debug_println!(self.enable_debug_trace, " KECCAK256 {} -> {}", message, output); } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index 0cad7b9c978..21d4dfb60b8 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -204,10 +204,6 @@ impl GeneratedAcir { BlackBoxFuncCall::XOR { lhs: inputs[0][0], rhs: inputs[1][0], output: outputs[0] } } BlackBoxFunc::RANGE => BlackBoxFuncCall::RANGE { input: inputs[0][0] }, - BlackBoxFunc::SHA256 => BlackBoxFuncCall::SHA256 { - inputs: inputs[0].clone(), - outputs: outputs.try_into().expect("Compiler should generate correct size outputs"), - }, BlackBoxFunc::Blake2s => BlackBoxFuncCall::Blake2s { inputs: inputs[0].clone(), outputs: outputs.try_into().expect("Compiler should generate correct size outputs"), @@ -649,7 +645,6 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { // variable number of inputs. BlackBoxFunc::AES128Encrypt | BlackBoxFunc::Keccak256 - | BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s | BlackBoxFunc::Blake3 | BlackBoxFunc::PedersenCommitment @@ -701,10 +696,7 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { BlackBoxFunc::AND | BlackBoxFunc::XOR => Some(1), // 32 byte hash algorithms - BlackBoxFunc::Keccak256 - | BlackBoxFunc::SHA256 - | BlackBoxFunc::Blake2s - | BlackBoxFunc::Blake3 => Some(32), + BlackBoxFunc::Keccak256 | BlackBoxFunc::Blake2s | BlackBoxFunc::Blake3 => Some(32), BlackBoxFunc::Keccakf1600 => Some(25), // The permutation returns a fixed number of outputs, equals to the inputs length which depends on the proving system implementation. diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index d3e5acb467b..3068f2b5c37 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -488,7 +488,6 @@ fn simplify_black_box_func( } }; match bb_func { - BlackBoxFunc::SHA256 => simplify_hash(dfg, arguments, acvm::blackbox_solver::sha256), BlackBoxFunc::Blake2s => simplify_hash(dfg, arguments, acvm::blackbox_solver::blake2s), BlackBoxFunc::Blake3 => simplify_hash(dfg, arguments, acvm::blackbox_solver::blake3), BlackBoxFunc::Keccakf1600 => { diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 467514114e4..cb455507985 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -925,7 +925,7 @@ mod test { ir::{ dfg::DataFlowGraph, function::Function, - instruction::{BinaryOp, Instruction, Intrinsic, TerminatorInstruction}, + instruction::{BinaryOp, Instruction, TerminatorInstruction}, map::Id, types::Type, value::{Value, ValueId}, @@ -1487,8 +1487,7 @@ mod test { // Tests that it does not simplify a true constraint an always-false constraint // acir(inline) fn main f1 { // b0(v0: [u8; 2]): - // v4 = call sha256(v0, u8 2) - // v5 = array_get v4, index u8 0 + // v5 = array_get v0, index u8 0 // v6 = cast v5 as u32 // v8 = truncate v6 to 1 bits, max_bit_size: 32 // v9 = cast v8 as u1 @@ -1520,13 +1519,8 @@ mod test { let array = builder.add_parameter(array_type); let zero = builder.numeric_constant(0_u128, Type::unsigned(8)); - let two = builder.numeric_constant(2_u128, Type::unsigned(8)); - let keccak = - builder.import_intrinsic_id(Intrinsic::BlackBox(acvm::acir::BlackBoxFunc::SHA256)); - let v4 = - builder.insert_call(keccak, vec![array, two], vec![Type::Array(element_type, 32)])[0]; - let v5 = builder.insert_array_get(v4, zero, Type::unsigned(8)); + let v5 = builder.insert_array_get(array, zero, Type::unsigned(8)); let v6 = builder.insert_cast(v5, Type::unsigned(32)); let i_two = builder.numeric_constant(2_u128, Type::unsigned(32)); let v8 = builder.insert_binary(v6, BinaryOp::Mod, i_two); diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index d14f50891ea..68c04e3b4b4 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -615,7 +615,8 @@ impl<'f> PerFunctionContext<'f> { fn reduce_load_result_count(&mut self, value: ValueId) { if let Some(context) = self.load_results.get_mut(&value) { - context.uses = context.uses.saturating_sub(1); + // TODO this was saturating https://github.com/noir-lang/noir/issues/6124 + context.uses = context.uses.wrapping_sub(1); } } @@ -743,7 +744,8 @@ impl<'f> PerFunctionContext<'f> { if all_loads_removed && !store_alias_used { self.instructions_to_remove.insert(*store_instruction); if let Some((_, counter)) = remaining_last_stores.get_mut(store_address) { - *counter = counter.saturating_sub(1); + // TODO this was saturating https://github.com/noir-lang/noir/issues/6124 + *counter = counter.wrapping_sub(1); } } else if let Some((_, counter)) = remaining_last_stores.get_mut(store_address) { *counter += 1; diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 2e118eb4f0e..c21ca353e07 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -794,7 +794,7 @@ fn to_le_radix( }; // Decompose the integer into its radix digits in little endian form. - let decomposed_integer = compute_to_radix(value, radix); + let decomposed_integer = compute_to_radix_le(value, radix); let decomposed_integer = vecmap(0..limb_count as usize, |i| match decomposed_integer.get(i) { Some(digit) => Value::U8(*digit), None => Value::U8(0), @@ -805,7 +805,7 @@ fn to_le_radix( )) } -fn compute_to_radix(field: FieldElement, radix: u32) -> Vec { +fn compute_to_radix_le(field: FieldElement, radix: u32) -> Vec { let bit_size = u32::BITS - (radix - 1).leading_zeros(); let radix_big = BigUint::from(radix); assert_eq!(BigUint::from(2u128).pow(bit_size), radix_big, "ICE: Radix must be a power of 2"); diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 91e4115ff69..cb291902ae2 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -132,41 +132,41 @@ fn check_trait_implemented_for_all_t() { trait Default { fn default() -> Self; } - + trait Eq { fn eq(self, other: Self) -> bool; } - + trait IsDefault { fn is_default(self) -> bool; } - + impl IsDefault for T where T: Default + Eq { fn is_default(self) -> bool { self.eq(T::default()) } } - + struct Foo { a: u64, } - + impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } + fn eq(self, other: Foo) -> bool { self.a == other.a } } - + impl Default for u64 { fn default() -> Self { 0 } } - + impl Default for Foo { fn default() -> Self { Foo { a: Default::default() } } } - + fn main(a: Foo) -> pub bool { a.is_default() }"; @@ -179,12 +179,12 @@ fn check_trait_implementation_duplicate_method() { trait Default { fn default(x: Field, y: Field) -> Field; } - + struct Foo { bar: Field, array: [Field; 2], } - + impl Default for Foo { // Duplicate trait methods should not compile fn default(x: Field, y: Field) -> Field { @@ -195,7 +195,7 @@ fn check_trait_implementation_duplicate_method() { x + 2 * y } } - + fn main() {}"; let errors = get_program_errors(src); @@ -226,16 +226,16 @@ fn check_trait_wrong_method_return_type() { trait Default { fn default() -> Self; } - + struct Foo { } - + impl Default for Foo { fn default() -> Field { 0 } } - + fn main() { } "; @@ -266,18 +266,18 @@ fn check_trait_wrong_method_return_type2() { trait Default { fn default(x: Field, y: Field) -> Self; } - + struct Foo { bar: Field, array: [Field; 2], } - + impl Default for Foo { fn default(x: Field, _y: Field) -> Field { x } } - + fn main() { }"; let errors = get_program_errors(src); @@ -306,22 +306,22 @@ fn check_trait_missing_implementation() { let src = " trait Default { fn default(x: Field, y: Field) -> Self; - + fn method2(x: Field) -> Field; - + } - + struct Foo { bar: Field, array: [Field; 2], } - + impl Default for Foo { fn default(x: Field, y: Field) -> Self { Self { bar: x, array: [x,y] } } } - + fn main() { } "; @@ -353,17 +353,17 @@ fn check_trait_not_in_scope() { bar: Field, array: [Field; 2], } - + // Default trait does not exist impl Default for Foo { fn default(x: Field, y: Field) -> Self { Self { bar: x, array: [x,y] } } } - + fn main() { } - + "; let errors = get_program_errors(src); assert!(!has_parser_error(&errors)); @@ -387,19 +387,19 @@ fn check_trait_wrong_method_name() { let src = " trait Default { } - + struct Foo { bar: Field, array: [Field; 2], } - + // wrong trait name method should not compile impl Default for Foo { fn does_not_exist(x: Field, y: Field) -> Self { Self { bar: x, array: [x,y] } } } - + fn main() { }"; let compilation_errors = get_program_errors(src); @@ -432,17 +432,17 @@ fn check_trait_wrong_parameter() { trait Default { fn default(x: Field) -> Self; } - + struct Foo { bar: u32, } - + impl Default for Foo { fn default(x: u32) -> Self { Foo {bar: x} } } - + fn main() { } "; @@ -475,18 +475,18 @@ fn check_trait_wrong_parameter2() { trait Default { fn default(x: Field, y: Field) -> Self; } - + struct Foo { bar: Field, array: [Field; 2], } - + impl Default for Foo { fn default(x: Field, y: Foo) -> Self { Self { bar: x, array: [x, y.bar] } } } - + fn main() { }"; @@ -519,7 +519,7 @@ fn check_trait_wrong_parameter_type() { pub trait Default { fn default(x: Field, y: NotAType) -> Field; } - + fn main(x: Field, y: Field) { assert(y == x); }"; @@ -550,18 +550,18 @@ fn check_trait_wrong_parameters_count() { trait Default { fn default(x: Field, y: Field) -> Self; } - + struct Foo { bar: Field, array: [Field; 2], } - + impl Default for Foo { fn default(x: Field) -> Self { Self { bar: x, array: [x, x] } } } - + fn main() { } "; @@ -630,16 +630,16 @@ fn check_impl_struct_not_trait() { struct Default { x: Field, - z: Field, + z: Field, } - + // Default is a struct not a trait impl Default for Foo { fn default(x: Field, y: Field) -> Self { Self { bar: x, array: [x,y] } } } - + fn main() {} "; let errors = get_program_errors(src); @@ -665,23 +665,23 @@ fn check_trait_duplicate_declaration() { trait Default { fn default(x: Field, y: Field) -> Self; } - + struct Foo { bar: Field, array: [Field; 2], } - + impl Default for Foo { fn default(x: Field,y: Field) -> Self { Self { bar: x, array: [x,y] } } } - - + + trait Default { fn default(x: Field) -> Self; } - + fn main() { }"; let errors = get_program_errors(src); @@ -713,7 +713,7 @@ fn check_trait_duplicate_implementation() { struct Foo { bar: Field, } - + impl Default for Foo { } impl Default for Foo { @@ -744,18 +744,18 @@ fn check_trait_duplicate_implementation_with_alias() { let src = " trait Default { } - + struct MyStruct { } - + type MyType = MyStruct; - + impl Default for MyStruct { } - + impl Default for MyType { } - + fn main() { } "; @@ -808,7 +808,7 @@ fn check_trait_as_type_as_fn_parameter() { } impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } + fn eq(self, other: Foo) -> bool { self.a == other.a } } fn test_eq(x: impl Eq) -> bool { @@ -837,11 +837,11 @@ fn check_trait_as_type_as_two_fn_parameters() { } impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } + fn eq(self, other: Foo) -> bool { self.a == other.a } } impl Test for u64 { - fn test(self) -> bool { self == self } + fn test(self) -> bool { self == self } } fn test_eq(x: impl Eq, y: impl Test) -> bool { @@ -1426,7 +1426,7 @@ fn specify_function_types_with_turbofish() { fn generic_func() -> (T, U) where T: Default, U: Default { (T::default(), U::default()) } - + fn main() { let _ = generic_func::(); } @@ -1453,13 +1453,13 @@ fn specify_method_types_with_turbofish() { struct Foo { inner: T } - + impl Foo { fn generic_method(_self: Self) -> U where U: Default { U::default() } } - + fn main() { let foo: Foo = Foo { inner: 1 }; let _ = foo.generic_method::(); @@ -1522,13 +1522,13 @@ fn incorrect_turbofish_count_method_call() { struct Foo { inner: T } - + impl Foo { fn generic_method(_self: Self) -> U where U: Default { U::default() } } - + fn main() { let foo: Foo = Foo { inner: 1 }; let _ = foo.generic_method::(); @@ -1604,7 +1604,7 @@ fn numeric_generic_binary_operation_type_mismatch() { let mut check: bool = true; check = N; check - } + } "#; let errors = get_program_errors(src); assert_eq!(errors.len(), 1); @@ -1766,7 +1766,7 @@ fn numeric_generic_used_in_nested_type_pass() { } pub struct InnerNumeric { inner: [u64; N], - } + } "#; assert_no_errors(src); } @@ -1783,13 +1783,13 @@ fn numeric_generic_used_in_trait() { c: Field, d: T, } - + impl Deserialize for MyType { fn deserialize(fields: [Field; N], other: T) -> Self { MyType { a: fields[0], b: fields[1], c: fields[2], d: other } } } - + trait Deserialize { fn deserialize(fields: [Field; N], other: T) -> Self; } @@ -1810,17 +1810,17 @@ fn numeric_generic_in_trait_impl_with_extra_impl_generics() { c: Field, d: T, } - + // Make sure that `T` is placed before `N` as we want to test that the order of the generics is correctly maintained. // `N` is used first in the trait impl generics (`Deserialize for MyType`). // We want to make sure that the compiler correctly accounts for that `N` has a numeric kind - // while `T` has a normal kind. + // while `T` has a normal kind. impl Deserialize for MyType where T: Default { fn deserialize(fields: [Field; N]) -> Self { MyType { a: fields[0], b: fields[1], c: fields[2], d: T::default() } } } - + trait Deserialize { fn deserialize(fields: [Field; N]) -> Self; } @@ -2027,7 +2027,7 @@ fn impl_stricter_than_trait_no_trait_method_constraints() { // is a `DefCollectorErrorKind::ImplIsStricterThanTrait` error. let src = r#" trait Serialize { - // We want to make sure we trigger the error when override a trait method + // We want to make sure we trigger the error when override a trait method // which itself has no trait constraints. fn serialize(self) -> [Field; N]; } @@ -2132,14 +2132,14 @@ fn impl_stricter_than_trait_different_object_generics() { } impl Bar for () { - fn bar_good() - where - OtherOption>: OtherTrait, + fn bar_good() + where + OtherOption>: OtherTrait, Option: MyTrait { } - fn bar_bad() - where - OtherOption>: OtherTrait, + fn bar_bad() + where + OtherOption>: OtherTrait, Option: MyTrait { } fn array_good() where [A; 8]: MyTrait { } diff --git a/compiler/wasm/test/compiler/node/compile.test.ts b/compiler/wasm/test/compiler/node/compile.test.ts index 811dc95ce16..4e40df920e0 100644 --- a/compiler/wasm/test/compiler/node/compile.test.ts +++ b/compiler/wasm/test/compiler/node/compile.test.ts @@ -10,30 +10,42 @@ import { shouldCompileContractIdentically, shouldCompileProgramIdentically } fro const basePath = resolve(join(__dirname, '../../')); describe('noir-compiler/node', () => { - shouldCompileProgramIdentically(async () => { - const { simpleScriptProjectPath, simpleScriptExpectedArtifact } = getPaths(basePath); - - const fm = createFileManager(simpleScriptProjectPath); - const nargoArtifact = JSON.parse((await readFile(simpleScriptExpectedArtifact)).toString()) as ProgramArtifact; - const noirWasmArtifact = await compile_program(fm); - return { nargoArtifact, noirWasmArtifact }; - }, expect); - - shouldCompileProgramIdentically(async () => { - const { depsScriptProjectPath, depsScriptExpectedArtifact } = getPaths(basePath); - - const fm = createFileManager(depsScriptProjectPath); - const nargoArtifact = JSON.parse((await readFile(depsScriptExpectedArtifact)).toString()) as ProgramArtifact; - const noirWasmArtifact = await compile_program(fm); - return { nargoArtifact, noirWasmArtifact }; - }, expect); - - shouldCompileContractIdentically(async () => { - const { contractProjectPath, contractExpectedArtifact } = getPaths(basePath); - - const fm = createFileManager(contractProjectPath); - const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as ContractArtifact; - const noirWasmArtifact = await compile_contract(fm); - return { nargoArtifact, noirWasmArtifact }; - }, expect); + shouldCompileProgramIdentically( + async () => { + const { simpleScriptProjectPath, simpleScriptExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(simpleScriptProjectPath); + const nargoArtifact = JSON.parse((await readFile(simpleScriptExpectedArtifact)).toString()) as ProgramArtifact; + const noirWasmArtifact = await compile_program(fm); + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + /*30 second timeout*/ 30000, + ); + + shouldCompileProgramIdentically( + async () => { + const { depsScriptProjectPath, depsScriptExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(depsScriptProjectPath); + const nargoArtifact = JSON.parse((await readFile(depsScriptExpectedArtifact)).toString()) as ProgramArtifact; + const noirWasmArtifact = await compile_program(fm); + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + /*30 second timeout*/ 30000, + ); + + shouldCompileContractIdentically( + async () => { + const { contractProjectPath, contractExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(contractProjectPath); + const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as ContractArtifact; + const noirWasmArtifact = await compile_contract(fm); + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + /*30 second timeout*/ 30000, + ); }); diff --git a/noir_stdlib/src/hash/keccak.nr b/noir_stdlib/src/hash/keccak.nr index bb55ce62210..37eb4dfe8a6 100644 --- a/noir_stdlib/src/hash/keccak.nr +++ b/noir_stdlib/src/hash/keccak.nr @@ -69,9 +69,9 @@ pub(crate) fn keccak256(input: [u8; N], message_size: u32) -> [u8; 3 state = keccakf1600(state); } } else { - // `real_max_blocks` is guaranteed to at least be `1` + // `real_max_blocks` is guaranteed to at least be `1` // We peel out the first block as to avoid a conditional inside of the loop. - // Otherwise, a dynamic predicate can cause a blowup in a constrained runtime. + // Otherwise, a dynamic predicate can cause a blowup in a constrained runtime. for j in 0..LIMBS_PER_BLOCK { state[j] = sliced_buffer.get(j); } diff --git a/noir_stdlib/src/hash/mod.nr b/noir_stdlib/src/hash/mod.nr index 9aa7d220593..93bce3c20e1 100644 --- a/noir_stdlib/src/hash/mod.nr +++ b/noir_stdlib/src/hash/mod.nr @@ -135,7 +135,7 @@ pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] #[foreign(poseidon2_permutation)] pub fn poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] {} -// Generic hashing support. +// Generic hashing support. // Partially ported and impacted by rust. // Hash trait shall be implemented per type. @@ -157,7 +157,7 @@ comptime fn derive_hash(s: StructDefinition) -> Quoted { // TODO: consider making the types generic here ([u8], [Field], etc.) pub trait Hasher { fn finish(self) -> Field; - + fn write(&mut self, input: Field); } @@ -169,7 +169,7 @@ pub trait BuildHasher where H: Hasher { pub struct BuildHasherDefault; impl BuildHasher for BuildHasherDefault -where +where H: Hasher + Default { fn build_hasher(_self: Self) -> H { H::default() @@ -177,7 +177,7 @@ where } impl Default for BuildHasherDefault -where +where H: Hasher + Default { fn default() -> Self { BuildHasherDefault {} diff --git a/noir_stdlib/src/hash/sha256.nr b/noir_stdlib/src/hash/sha256.nr index 6d169d027e0..e712019b5cc 100644 --- a/noir_stdlib/src/hash/sha256.nr +++ b/noir_stdlib/src/hash/sha256.nr @@ -4,11 +4,12 @@ use crate::runtime::is_unconstrained; // 32 bytes. // Deprecated in favour of `sha256_var` -#[foreign(sha256)] // docs:start:sha256 pub fn sha256(input: [u8; N]) -> [u8; 32] // docs:end:sha256 -{} +{ + crate::sha256::digest(input) +} #[foreign(sha256_compression)] pub fn sha256_compression(_input: [u32; 16], _state: [u32; 8]) -> [u32; 8] {} @@ -107,7 +108,7 @@ pub fn sha256_var(msg: [u8; N], message_size: u64) -> [u8; 32] { msg_byte_ptr = new_msg_byte_ptr; } - // If the block is filled, compress it. + // If the block is filled, compress it. // An un-filled block is handled after this loop. if msg_byte_ptr == BLOCK_SIZE { h = sha256_compression(msg_u8_to_u32(msg_block), h); @@ -116,8 +117,8 @@ pub fn sha256_var(msg: [u8; N], message_size: u64) -> [u8; 32] { let modulo = N % BLOCK_SIZE; // Handle setup of the final msg block. - // This case is only hit if the msg is less than the block size, - // or our message cannot be evenly split into blocks. + // This case is only hit if the msg is less than the block size, + // or our message cannot be evenly split into blocks. if modulo != 0 { let msg_start = BLOCK_SIZE * num_blocks; let (new_msg_block, new_msg_byte_ptr) = unsafe { @@ -148,7 +149,7 @@ pub fn sha256_var(msg: [u8; N], message_size: u64) -> [u8; 32] { let zero = msg_block[0] - msg_block[0]; // Pad the rest such that we have a [u32; 2] block at the end representing the length - // of the message, and a block of 1 0 ... 0 following the message (i.e. [1 << 7, 0, ..., 0]). + // of the message, and a block of 1 0 ... 0 following the message (i.e. [1 << 7, 0, ..., 0]). msg_block[msg_byte_ptr] = 1 << 7; let last_block = msg_block; msg_byte_ptr = msg_byte_ptr + 1; diff --git a/scripts/install_bb.sh b/scripts/install_bb.sh index d60c73c0976..c94a1b7dff0 100755 --- a/scripts/install_bb.sh +++ b/scripts/install_bb.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION="0.55.0" +VERSION="0.56.0" BBUP_PATH=~/.bb/bbup diff --git a/test_programs/compile_success_empty/regression_4436/src/main.nr b/test_programs/compile_success_empty/regression_4436/src/main.nr index 834ea3250cc..336d0f1f4ed 100644 --- a/test_programs/compile_success_empty/regression_4436/src/main.nr +++ b/test_programs/compile_success_empty/regression_4436/src/main.nr @@ -3,8 +3,8 @@ trait LibTrait { fn get_constant() -> Field; } -global STRUCT_A_LEN: Field = 3; -global STRUCT_B_LEN: Field = 5; +global STRUCT_A_LEN: u32 = 3; +global STRUCT_B_LEN: u32 = 5; struct StructA; struct StructB; diff --git a/test_programs/compile_success_empty/regression_6077/Prover.toml b/test_programs/compile_success_empty/regression_6077/Prover.toml new file mode 100644 index 00000000000..ba3522cab01 --- /dev/null +++ b/test_programs/compile_success_empty/regression_6077/Prover.toml @@ -0,0 +1 @@ +a = 0 diff --git a/test_programs/execution_success/fold_basic/src/main.nr b/test_programs/execution_success/fold_basic/src/main.nr index 6c17120660b..440779d2460 100644 --- a/test_programs/execution_success/fold_basic/src/main.nr +++ b/test_programs/execution_success/fold_basic/src/main.nr @@ -1,7 +1,9 @@ fn main(x: Field, y: pub Field) { let z = foo(x, y); let z2 = foo(x, y); + let z3 = foo(x, y); assert(z == z2); + assert(z2 == z3); } #[fold] diff --git a/tooling/noir_js/src/index.ts b/tooling/noir_js/src/index.ts index 1feca8fa275..f3016efd032 100644 --- a/tooling/noir_js/src/index.ts +++ b/tooling/noir_js/src/index.ts @@ -2,15 +2,7 @@ import * as acvm from '@noir-lang/acvm_js'; import * as abi from '@noir-lang/noirc_abi'; import { CompiledCircuit } from '@noir-lang/types'; -export { - ecdsa_secp256r1_verify, - ecdsa_secp256k1_verify, - keccak256, - blake2s256, - sha256, - xor, - and, -} from '@noir-lang/acvm_js'; +export { ecdsa_secp256r1_verify, ecdsa_secp256k1_verify, keccak256, blake2s256, xor, and } from '@noir-lang/acvm_js'; export { InputMap } from '@noir-lang/noirc_abi'; export { WitnessMap, ForeignCallHandler, ForeignCallInput, ForeignCallOutput } from '@noir-lang/acvm_js'; diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 14fb5afe5f0..fd6d21132c9 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -41,7 +41,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.55.0", + "@aztec/bb.js": "0.56.0", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/tooling/noir_js_backend_barretenberg/src/backend.ts b/tooling/noir_js_backend_barretenberg/src/backend.ts index 4fd256a7a81..2569c7d868d 100644 --- a/tooling/noir_js_backend_barretenberg/src/backend.ts +++ b/tooling/noir_js_backend_barretenberg/src/backend.ts @@ -1,74 +1,26 @@ -import { decompressSync as gunzip } from 'fflate'; import { acirToUint8Array } from './serialize.js'; import { Backend, CompiledCircuit, ProofData, VerifierBackend } from '@noir-lang/types'; -import { BackendOptions } from './types.js'; import { deflattenFields } from './public_inputs.js'; import { reconstructProofWithPublicInputs, reconstructProofWithPublicInputsHonk } from './verifier.js'; -import { type Barretenberg } from '@aztec/bb.js'; +import { BackendOptions, UltraPlonkBackend, UltraHonkBackend as UltraHonkBackendInternal } from '@aztec/bb.js'; +import { decompressSync as gunzip } from 'fflate'; // This is the number of bytes in a UltraPlonk proof // minus the public inputs. const numBytesInProofWithoutPublicInputs: number = 2144; export class BarretenbergBackend implements Backend, VerifierBackend { - // These type assertions are used so that we don't - // have to initialize `api` and `acirComposer` in the constructor. - // These are initialized asynchronously in the `init` function, - // constructors cannot be asynchronous which is why we do this. - - protected api!: Barretenberg; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - protected acirComposer: any; - protected acirUncompressedBytecode: Uint8Array; + protected backend!: UltraPlonkBackend; - constructor( - acirCircuit: CompiledCircuit, - protected options: BackendOptions = { threads: 1 }, - ) { + constructor(acirCircuit: CompiledCircuit, options: BackendOptions = { threads: 1 }) { const acirBytecodeBase64 = acirCircuit.bytecode; - this.acirUncompressedBytecode = acirToUint8Array(acirBytecodeBase64); - } - - /** @ignore */ - async instantiate(): Promise { - if (!this.api) { - if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { - this.options.threads = navigator.hardwareConcurrency; - } else { - try { - const os = await import('os'); - this.options.threads = os.cpus().length; - } catch (e) { - console.log('Could not detect environment. Falling back to one thread.', e); - } - } - const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); - const api = await Barretenberg.new(this.options); - - const honkRecursion = false; - const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes( - this.acirUncompressedBytecode, - honkRecursion, - ); - - const crs = await Crs.new(subgroupSize + 1); - await api.commonInitSlabAllocator(subgroupSize); - await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); - - this.acirComposer = await api.acirNewAcirComposer(subgroupSize); - await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode); - this.api = api; - } + const acirUncompressedBytecode = acirToUint8Array(acirBytecodeBase64); + this.backend = new UltraPlonkBackend(acirUncompressedBytecode, options); } /** @description Generates a proof */ async generateProof(compressedWitness: Uint8Array): Promise { - await this.instantiate(); - const proofWithPublicInputs = await this.api.acirCreateProof( - this.acirComposer, - this.acirUncompressedBytecode, - gunzip(compressedWitness), - ); + const proofWithPublicInputs = await this.backend.generateProof(gunzip(compressedWitness)); const splitIndex = proofWithPublicInputs.length - numBytesInProofWithoutPublicInputs; @@ -103,45 +55,22 @@ export class BarretenbergBackend implements Backend, VerifierBackend { vkAsFields: string[]; vkHash: string; }> { - await this.instantiate(); const proof = reconstructProofWithPublicInputs(proofData); - const proofAsFields = ( - await this.api.acirSerializeProofIntoFields(this.acirComposer, proof, numOfPublicInputs) - ).slice(numOfPublicInputs); - - // TODO: perhaps we should put this in the init function. Need to benchmark - // TODO how long it takes. - await this.api.acirInitVerificationKey(this.acirComposer); - - // Note: If you don't init verification key, `acirSerializeVerificationKeyIntoFields`` will just hang on serialization - const vk = await this.api.acirSerializeVerificationKeyIntoFields(this.acirComposer); - - return { - proofAsFields: proofAsFields.map((p) => p.toString()), - vkAsFields: vk[0].map((vk) => vk.toString()), - vkHash: vk[1].toString(), - }; + return this.backend.generateRecursiveProofArtifacts(proof, numOfPublicInputs); } /** @description Verifies a proof */ async verifyProof(proofData: ProofData): Promise { const proof = reconstructProofWithPublicInputs(proofData); - await this.instantiate(); - await this.api.acirInitVerificationKey(this.acirComposer); - return await this.api.acirVerifyProof(this.acirComposer, proof); + return this.backend.verifyProof(proof); } async getVerificationKey(): Promise { - await this.instantiate(); - await this.api.acirInitVerificationKey(this.acirComposer); - return await this.api.acirGetVerificationKey(this.acirComposer); + return this.backend.getVerificationKey(); } async destroy(): Promise { - if (!this.api) { - return; - } - await this.api.destroy(); + await this.backend.destroy(); } } @@ -157,54 +86,16 @@ export class UltraHonkBackend implements Backend, VerifierBackend { // These are initialized asynchronously in the `init` function, // constructors cannot be asynchronous which is why we do this. - protected api!: Barretenberg; - protected acirUncompressedBytecode: Uint8Array; + protected backend!: UltraHonkBackendInternal; - constructor( - acirCircuit: CompiledCircuit, - protected options: BackendOptions = { threads: 1 }, - ) { + constructor(acirCircuit: CompiledCircuit, options: BackendOptions = { threads: 1 }) { const acirBytecodeBase64 = acirCircuit.bytecode; - this.acirUncompressedBytecode = acirToUint8Array(acirBytecodeBase64); + const acirUncompressedBytecode = acirToUint8Array(acirBytecodeBase64); + this.backend = new UltraHonkBackendInternal(acirUncompressedBytecode, options); } - /** @ignore */ - async instantiate(): Promise { - if (!this.api) { - if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { - this.options.threads = navigator.hardwareConcurrency; - } else { - try { - const os = await import('os'); - this.options.threads = os.cpus().length; - } catch (e) { - console.log('Could not detect environment. Falling back to one thread.', e); - } - } - const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); - const api = await Barretenberg.new(this.options); - - const honkRecursion = true; - const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes( - this.acirUncompressedBytecode, - honkRecursion, - ); - const crs = await Crs.new(subgroupSize + 1); - await api.commonInitSlabAllocator(subgroupSize); - await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); - - // We don't init a proving key here in the Honk API - // await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode); - this.api = api; - } - } - - async generateProof(decompressedWitness: Uint8Array): Promise { - await this.instantiate(); - const proofWithPublicInputs = await this.api.acirProveUltraHonk( - this.acirUncompressedBytecode, - gunzip(decompressedWitness), - ); + async generateProof(compressedWitness: Uint8Array): Promise { + const proofWithPublicInputs = await this.backend.generateProof(gunzip(compressedWitness)); const proofAsStrings = deflattenFields(proofWithPublicInputs.slice(4)); const numPublicInputs = Number(proofAsStrings[1]); @@ -229,55 +120,25 @@ export class UltraHonkBackend implements Backend, VerifierBackend { } async verifyProof(proofData: ProofData): Promise { - const { RawBuffer } = await import('@aztec/bb.js'); - const proof = reconstructProofWithPublicInputsHonk(proofData); - await this.instantiate(); - const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode); - - return await this.api.acirVerifyUltraHonk(proof, new RawBuffer(vkBuf)); + return this.backend.verifyProof(proof); } async getVerificationKey(): Promise { - await this.instantiate(); - return await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode); + return this.backend.getVerificationKey(); } // TODO(https://github.com/noir-lang/noir/issues/5661): Update this to handle Honk recursive aggregation in the browser once it is ready in the backend itself async generateRecursiveProofArtifacts( - _proofData: ProofData, - _numOfPublicInputs: number, + proofData: ProofData, + numOfPublicInputs: number, ): Promise<{ proofAsFields: string[]; vkAsFields: string[]; vkHash: string }> { - await this.instantiate(); - // TODO(https://github.com/noir-lang/noir/issues/5661): This needs to be updated to handle recursive aggregation. - // There is still a proofAsFields method but we could consider getting rid of it as the proof itself - // is a list of field elements. - // UltraHonk also does not have public inputs directly prepended to the proof and they are still instead - // inserted at an offset. - // const proof = reconstructProofWithPublicInputs(proofData); - // const proofAsFields = (await this.api.acirProofAsFieldsUltraHonk(proof)).slice(numOfPublicInputs); - - // TODO: perhaps we should put this in the init function. Need to benchmark - // TODO how long it takes. - const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode); - const vk = await this.api.acirVkAsFieldsUltraHonk(vkBuf); - - return { - // TODO(https://github.com/noir-lang/noir/issues/5661) - proofAsFields: [], - vkAsFields: vk.map((vk) => vk.toString()), - // We use an empty string for the vk hash here as it is unneeded as part of the recursive artifacts - // The user can be expected to hash the vk inside their circuit to check whether the vk is the circuit - // they expect - vkHash: '', - }; + const proof = reconstructProofWithPublicInputsHonk(proofData); + return this.backend.generateRecursiveProofArtifacts(proof, numOfPublicInputs); } async destroy(): Promise { - if (!this.api) { - return; - } - await this.api.destroy(); + await this.backend.destroy(); } } diff --git a/tooling/noir_js_backend_barretenberg/src/index.ts b/tooling/noir_js_backend_barretenberg/src/index.ts index 6786c1eec48..f1786396a2a 100644 --- a/tooling/noir_js_backend_barretenberg/src/index.ts +++ b/tooling/noir_js_backend_barretenberg/src/index.ts @@ -3,4 +3,4 @@ export { BarretenbergVerifier, UltraHonkVerifier } from './verifier.js'; // typedoc exports export { Backend, CompiledCircuit, ProofData } from '@noir-lang/types'; -export { BackendOptions } from './types.js'; +export { BackendOptions } from '@aztec/bb.js'; diff --git a/tooling/noir_js_backend_barretenberg/src/types.ts b/tooling/noir_js_backend_barretenberg/src/types.ts deleted file mode 100644 index fac23030aad..00000000000 --- a/tooling/noir_js_backend_barretenberg/src/types.ts +++ /dev/null @@ -1,9 +0,0 @@ -/** - * @description - * An options object, currently only used to specify the number of threads to use. - */ -export type BackendOptions = { - /** @description Number of threads */ - threads: number; - memory?: { maximum: number }; -}; diff --git a/tooling/noir_js_backend_barretenberg/src/verifier.ts b/tooling/noir_js_backend_barretenberg/src/verifier.ts index 58612672b35..885ec80caa8 100644 --- a/tooling/noir_js_backend_barretenberg/src/verifier.ts +++ b/tooling/noir_js_backend_barretenberg/src/verifier.ts @@ -1,69 +1,22 @@ import { ProofData } from '@noir-lang/types'; -import { BackendOptions } from './types.js'; import { flattenFieldsAsArray } from './public_inputs.js'; -import { type Barretenberg } from '@aztec/bb.js'; +import { BackendOptions, BarretenbergVerifier as BarretenbergVerifierInternal } from '@aztec/bb.js'; export class BarretenbergVerifier { - // These type assertions are used so that we don't - // have to initialize `api` and `acirComposer` in the constructor. - // These are initialized asynchronously in the `init` function, - // constructors cannot be asynchronous which is why we do this. - - private api!: Barretenberg; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - private acirComposer: any; - - constructor(private options: BackendOptions = { threads: 1 }) {} - - /** @ignore */ - async instantiate(): Promise { - if (!this.api) { - if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { - this.options.threads = navigator.hardwareConcurrency; - } else { - try { - const os = await import('os'); - this.options.threads = os.cpus().length; - } catch (e) { - console.log('Could not detect environment. Falling back to one thread.', e); - } - } - const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); - - // This is the number of CRS points necessary to verify a Barretenberg proof. - const NUM_CRS_POINTS_FOR_VERIFICATION: number = 0; - const [api, crs] = await Promise.all([Barretenberg.new(this.options), Crs.new(NUM_CRS_POINTS_FOR_VERIFICATION)]); - - await api.commonInitSlabAllocator(NUM_CRS_POINTS_FOR_VERIFICATION); - await api.srsInitSrs( - new RawBuffer([] /* crs.getG1Data() */), - NUM_CRS_POINTS_FOR_VERIFICATION, - new RawBuffer(crs.getG2Data()), - ); - - this.acirComposer = await api.acirNewAcirComposer(NUM_CRS_POINTS_FOR_VERIFICATION); - this.api = api; - } + private verifier!: BarretenbergVerifierInternal; + + constructor(options: BackendOptions = { threads: 1 }) { + this.verifier = new BarretenbergVerifierInternal(options); } /** @description Verifies a proof */ async verifyProof(proofData: ProofData, verificationKey: Uint8Array): Promise { - const { RawBuffer } = await import('@aztec/bb.js'); - - await this.instantiate(); - // The verifier can be used for a variety of ACIR programs so we should not assume that it - // is preloaded with the correct verification key. - await this.api.acirLoadVerificationKey(this.acirComposer, new RawBuffer(verificationKey)); - const proof = reconstructProofWithPublicInputs(proofData); - return await this.api.acirVerifyProof(this.acirComposer, proof); + return this.verifier.verifyUltraplonkProof(proof, verificationKey); } async destroy(): Promise { - if (!this.api) { - return; - } - await this.api.destroy(); + await this.verifier.destroy(); } } @@ -78,60 +31,20 @@ export function reconstructProofWithPublicInputs(proofData: ProofData): Uint8Arr } export class UltraHonkVerifier { - // These type assertions are used so that we don't - // have to initialize `api` in the constructor. - // These are initialized asynchronously in the `init` function, - // constructors cannot be asynchronous which is why we do this. - - private api!: Barretenberg; - - constructor(private options: BackendOptions = { threads: 1 }) {} - - /** @ignore */ - async instantiate(): Promise { - if (!this.api) { - if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { - this.options.threads = navigator.hardwareConcurrency; - } else { - try { - const os = await import('os'); - this.options.threads = os.cpus().length; - } catch (e) { - console.log('Could not detect environment. Falling back to one thread.', e); - } - } - const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); - - // This is the number of CRS points necessary to verify a Barretenberg proof. - const NUM_CRS_POINTS_FOR_VERIFICATION: number = 0; - const [api, crs] = await Promise.all([Barretenberg.new(this.options), Crs.new(NUM_CRS_POINTS_FOR_VERIFICATION)]); - - await api.commonInitSlabAllocator(NUM_CRS_POINTS_FOR_VERIFICATION); - await api.srsInitSrs( - new RawBuffer([] /* crs.getG1Data() */), - NUM_CRS_POINTS_FOR_VERIFICATION, - new RawBuffer(crs.getG2Data()), - ); - - this.api = api; - } + private verifier!: BarretenbergVerifierInternal; + + constructor(options: BackendOptions = { threads: 1 }) { + this.verifier = new BarretenbergVerifierInternal(options); } /** @description Verifies a proof */ async verifyProof(proofData: ProofData, verificationKey: Uint8Array): Promise { - const { RawBuffer } = await import('@aztec/bb.js'); - - await this.instantiate(); - const proof = reconstructProofWithPublicInputsHonk(proofData); - return await this.api.acirVerifyUltraHonk(proof, new RawBuffer(verificationKey)); + return this.verifier.verifyUltrahonkProof(proof, verificationKey); } async destroy(): Promise { - if (!this.api) { - return; - } - await this.api.destroy(); + await this.verifier.destroy(); } } diff --git a/tooling/noirc_abi_wasm/package.json b/tooling/noirc_abi_wasm/package.json index b8d0e74617d..021e80017aa 100644 --- a/tooling/noirc_abi_wasm/package.json +++ b/tooling/noirc_abi_wasm/package.json @@ -42,7 +42,7 @@ "@esm-bundle/chai": "^4.3.4-fix.0", "@web/dev-server-esbuild": "^0.3.6", "@web/test-runner": "^0.18.1", - "@web/test-runner-playwright": "^0.10.0", + "@web/test-runner-playwright": "^0.11.0", "eslint": "^8.57.0", "mocha": "^10.2.0" } diff --git a/tooling/profiler/src/opcode_formatter.rs b/tooling/profiler/src/opcode_formatter.rs index fa72793e406..f367360b189 100644 --- a/tooling/profiler/src/opcode_formatter.rs +++ b/tooling/profiler/src/opcode_formatter.rs @@ -14,7 +14,6 @@ fn format_blackbox_function(call: &BlackBoxFuncCall) -> String { BlackBoxFuncCall::AND { .. } => "and".to_string(), BlackBoxFuncCall::XOR { .. } => "xor".to_string(), BlackBoxFuncCall::RANGE { .. } => "range".to_string(), - BlackBoxFuncCall::SHA256 { .. } => "sha256".to_string(), BlackBoxFuncCall::Blake2s { .. } => "blake2s".to_string(), BlackBoxFuncCall::Blake3 { .. } => "blake3".to_string(), BlackBoxFuncCall::SchnorrVerify { .. } => "schnorr_verify".to_string(), @@ -41,7 +40,6 @@ fn format_blackbox_function(call: &BlackBoxFuncCall) -> String { fn format_blackbox_op(call: &BlackBoxOp) -> String { match call { BlackBoxOp::AES128Encrypt { .. } => "aes128_encrypt".to_string(), - BlackBoxOp::Sha256 { .. } => "sha256".to_string(), BlackBoxOp::Blake2s { .. } => "blake2s".to_string(), BlackBoxOp::Blake3 { .. } => "blake3".to_string(), BlackBoxOp::SchnorrVerify { .. } => "schnorr_verify".to_string(), diff --git a/yarn.lock b/yarn.lock index 5d713532c67..ae9251ac205 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,9 +221,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.55.0": - version: 0.55.0 - resolution: "@aztec/bb.js@npm:0.55.0" +"@aztec/bb.js@npm:0.56.0": + version: 0.56.0 + resolution: "@aztec/bb.js@npm:0.56.0" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -231,7 +231,7 @@ __metadata: tslib: ^2.4.0 bin: bb.js: dest/node/main.js - checksum: 18ae18a962e05fd28e15e17796ec08e889ea331ec1790f8c77aa1d9472b4942c8f03c2cb8d8366a2e7a6896ffc135cdfc5b3bb6c9743bdf85e183f771c4f7b88 + checksum: 199a1e6c408e4c1399b69169e1a0a48bac92688299312a7dd6eca242e4970808bc370808d2fe4194f17e0d1fe7f5d09676709a05e3ad6ed569ac5553134be34a languageName: node linkType: hard @@ -4146,7 +4146,7 @@ __metadata: "@esm-bundle/chai": ^4.3.4-fix.0 "@web/dev-server-esbuild": ^0.3.6 "@web/test-runner": ^0.18.1 - "@web/test-runner-playwright": ^0.10.0 + "@web/test-runner-playwright": ^0.11.0 chai: ^4.4.1 eslint: ^8.57.0 eslint-plugin-prettier: ^5.1.3 @@ -4161,7 +4161,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.55.0 + "@aztec/bb.js": 0.56.0 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3 @@ -4278,7 +4278,7 @@ __metadata: "@noir-lang/types": "workspace:*" "@web/dev-server-esbuild": ^0.3.6 "@web/test-runner": ^0.18.1 - "@web/test-runner-playwright": ^0.10.0 + "@web/test-runner-playwright": ^0.11.0 eslint: ^8.57.0 mocha: ^10.2.0 languageName: unknown @@ -6173,15 +6173,6 @@ __metadata: languageName: node linkType: hard -"@web/browser-logs@npm:^0.3.4": - version: 0.3.4 - resolution: "@web/browser-logs@npm:0.3.4" - dependencies: - errorstacks: ^2.2.0 - checksum: fe212c91c26deada3458b6562a8d7d2ae98b7b51c7099e1cdb972e9f799c63f6cd170776b2eadbe43c47531cb6d9b06f48282113a5944f4394270a0076f8565e - languageName: node - linkType: hard - "@web/browser-logs@npm:^0.4.0": version: 0.4.0 resolution: "@web/browser-logs@npm:0.4.0" @@ -6224,32 +6215,6 @@ __metadata: languageName: node linkType: hard -"@web/dev-server-core@npm:^0.6.2": - version: 0.6.3 - resolution: "@web/dev-server-core@npm:0.6.3" - dependencies: - "@types/koa": ^2.11.6 - "@types/ws": ^7.4.0 - "@web/parse5-utils": ^2.0.2 - chokidar: ^3.4.3 - clone: ^2.1.2 - es-module-lexer: ^1.0.0 - get-stream: ^6.0.0 - is-stream: ^2.0.0 - isbinaryfile: ^5.0.0 - koa: ^2.13.0 - koa-etag: ^4.0.0 - koa-send: ^5.0.1 - koa-static: ^5.0.0 - lru-cache: ^8.0.4 - mime-types: ^2.1.27 - parse5: ^6.0.1 - picomatch: ^2.2.2 - ws: ^7.4.2 - checksum: 98ba42df5eb865828c223bd1de098d013efd8e89983efff28e26ecd9d08c8b35fd29b4c1256ed08b05ecb365abe1aa80d2854e1953bdebbbe230a7e2a597dd8f - languageName: node - linkType: hard - "@web/dev-server-core@npm:^0.7.0": version: 0.7.0 resolution: "@web/dev-server-core@npm:0.7.0" @@ -6352,7 +6317,7 @@ __metadata: languageName: node linkType: hard -"@web/parse5-utils@npm:^2.0.2, @web/parse5-utils@npm:^2.1.0": +"@web/parse5-utils@npm:^2.1.0": version: 2.1.0 resolution: "@web/parse5-utils@npm:2.1.0" dependencies: @@ -6385,40 +6350,6 @@ __metadata: languageName: node linkType: hard -"@web/test-runner-core@npm:^0.12.0": - version: 0.12.0 - resolution: "@web/test-runner-core@npm:0.12.0" - dependencies: - "@babel/code-frame": ^7.12.11 - "@types/babel__code-frame": ^7.0.2 - "@types/co-body": ^6.1.0 - "@types/convert-source-map": ^2.0.0 - "@types/debounce": ^1.2.0 - "@types/istanbul-lib-coverage": ^2.0.3 - "@types/istanbul-reports": ^3.0.0 - "@web/browser-logs": ^0.3.4 - "@web/dev-server-core": ^0.6.2 - chokidar: ^3.4.3 - cli-cursor: ^3.1.0 - co-body: ^6.1.0 - convert-source-map: ^2.0.0 - debounce: ^1.2.0 - dependency-graph: ^0.11.0 - globby: ^11.0.1 - ip: ^1.1.5 - istanbul-lib-coverage: ^3.0.0 - istanbul-lib-report: ^3.0.1 - istanbul-reports: ^3.0.2 - log-update: ^4.0.0 - nanocolors: ^0.2.1 - nanoid: ^3.1.25 - open: ^8.0.2 - picomatch: ^2.2.2 - source-map: ^0.7.3 - checksum: e71afa227f9dc2ea4ec67838b1bc4c8af2c61d3e6002b78e37724e3dc09be466e7f7aa5e6795d5431dca1a0b13b94765a880103f98c5497c97943c2f708327eb - languageName: node - linkType: hard - "@web/test-runner-core@npm:^0.13.0": version: 0.13.0 resolution: "@web/test-runner-core@npm:0.13.0" @@ -6453,19 +6384,6 @@ __metadata: languageName: node linkType: hard -"@web/test-runner-coverage-v8@npm:^0.7.3": - version: 0.7.3 - resolution: "@web/test-runner-coverage-v8@npm:0.7.3" - dependencies: - "@web/test-runner-core": ^0.12.0 - istanbul-lib-coverage: ^3.0.0 - lru-cache: ^8.0.4 - picomatch: ^2.2.2 - v8-to-istanbul: ^9.0.1 - checksum: 05d7a9a4df8ca30991307a8d69ac9388a6572a9c6585887a925e7bdb158a0430f213c81cb356b8dcb7bf9cd3423d0071030b481c29358562bd344da8ea814daa - languageName: node - linkType: hard - "@web/test-runner-coverage-v8@npm:^0.8.0": version: 0.8.0 resolution: "@web/test-runner-coverage-v8@npm:0.8.0" @@ -6488,17 +6406,6 @@ __metadata: languageName: node linkType: hard -"@web/test-runner-playwright@npm:^0.10.0": - version: 0.10.3 - resolution: "@web/test-runner-playwright@npm:0.10.3" - dependencies: - "@web/test-runner-core": ^0.12.0 - "@web/test-runner-coverage-v8": ^0.7.3 - playwright: ^1.22.2 - checksum: 7c765d34482f2e299742c3ffe80790229d0825569016ccfccbb1a0c915f89551a3cc14a1454ed7c6895aaa03605ea444f7c1846eeab82bf02702e87a60628b3c - languageName: node - linkType: hard - "@web/test-runner-playwright@npm:^0.11.0": version: 0.11.0 resolution: "@web/test-runner-playwright@npm:0.11.0" @@ -12672,7 +12579,7 @@ __metadata: "@web/dev-server-esbuild": ^0.3.6 "@web/dev-server-import-maps": ^0.2.0 "@web/test-runner": ^0.18.1 - "@web/test-runner-playwright": ^0.10.0 + "@web/test-runner-playwright": ^0.11.0 eslint: ^8.57.0 eslint-plugin-prettier: ^5.1.3 ethers: ^6.7.1 @@ -16392,27 +16299,27 @@ __metadata: languageName: node linkType: hard -"playwright-core@npm:1.40.1": - version: 1.40.1 - resolution: "playwright-core@npm:1.40.1" +"playwright-core@npm:1.42.1": + version: 1.42.1 + resolution: "playwright-core@npm:1.42.1" bin: playwright-core: cli.js - checksum: 84d92fb9b86e3c225b16b6886bf858eb5059b4e60fa1205ff23336e56a06dcb2eac62650992dede72f406c8e70a7b6a5303e511f9b4bc0b85022ede356a01ee0 + checksum: e7081ff0f43b4b9053255109eb1d82164b7c6b55c7d022e25fca935d0f4fc547cb2e02a7b64f0c2a9462729be7bb45edb082f8b038306415944f1061d00d9c90 languageName: node linkType: hard "playwright@npm:^1.22.2": - version: 1.40.1 - resolution: "playwright@npm:1.40.1" + version: 1.42.1 + resolution: "playwright@npm:1.42.1" dependencies: fsevents: 2.3.2 - playwright-core: 1.40.1 + playwright-core: 1.42.1 dependenciesMeta: fsevents: optional: true bin: playwright: cli.js - checksum: 9e36791c1b4a649c104aa365fdd9d049924eeb518c5967c0e921aa38b9b00994aa6ee54784d6c2af194b3b494b6f69772673081ef53c6c4a4b2065af9955c4ba + checksum: 06c16bcd07d03993126ee6c168bde28c59d3cab7f7d4721eaf57bd5c51e9c929e10a286758de062b5fc02874413ceae2684d14cbb7865c0a51fc8df6d9001ad1 languageName: node linkType: hard