From 4d8b51caf477ff83390ec6b40f11b0768e57903f Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Fri, 3 May 2024 15:35:09 -0600 Subject: [PATCH 1/9] feat: expose set_public_teardown_function in private context (#6199) See [spec](https://docs.aztec.network/protocol-specs/gas-and-fees/tx-setup-and-teardown) No additional business logic here, just exposing the planned interface. --- .github/workflows/ci.yml | 4 +- .../src/core/libraries/ConstantsGen.sol | 2 +- .../aztec/src/context/private_context.nr | 146 ++++++++++-------- .../oracle/enqueue_public_function_call.nr | 85 +++++++++- .../types/src/abis/private_call_stack_item.nr | 2 +- .../src/abis/private_circuit_public_inputs.nr | 6 +- .../crates/types/src/constants.nr | 2 +- .../private_circuit_public_inputs_builder.nr | 4 + yarn-project/circuits.js/src/constants.gen.ts | 1 + .../private_call_stack_item.test.ts.snap | 4 +- ...private_circuit_public_inputs.test.ts.snap | 4 +- .../structs/private_circuit_public_inputs.ts | 9 ++ .../circuits.js/src/tests/factories.ts | 1 + .../src/type_conversion.ts | 1 + 14 files changed, 196 insertions(+), 75 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ad0e789d6ca9..cc7fa5753ce0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -192,7 +192,7 @@ jobs: dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" concurrency_key: yarn-project-test-${{ github.actor }}-x86 - name: "Yarn Project Tests" - timeout-minutes: 25 + timeout-minutes: 30 run: earthly-ci --no-output ./yarn-project/+test prover-client-test: @@ -292,7 +292,7 @@ jobs: dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" concurrency_key: docs-preview-${{ inputs.username || github.actor }}-x86 - name: "Docs Preview" - timeout-minutes: 25 + timeout-minutes: 30 run: earthly --no-output ./docs/+deploy-preview --PR=${{ github.event.number }} --AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} --NETLIFY_AUTH_TOKEN=${{ secrets.NETLIFY_AUTH_TOKEN }} --NETLIFY_SITE_ID=${{ secrets.NETLIFY_SITE_ID }} # push benchmarking binaries to dockerhub registry diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 3e5ec74866df..8a9200aaee69 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -137,7 +137,7 @@ library Constants { + (NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL) + (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL - + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + 1 + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + (SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_CALL) + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 2 + HEADER_LENGTH + TX_CONTEXT_LENGTH; diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index b5d5621bd10f..0411ba566a01 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -4,34 +4,32 @@ use crate::{ hash::{hash_args_array, ArgsHasher, compute_encrypted_log_hash, compute_unencrypted_log_hash}, oracle::{ arguments, returns, call_private_function::call_private_function_internal, - enqueue_public_function_call::enqueue_public_function_call_internal, header::get_header_at, - logs::emit_encrypted_log, logs_traits::{LensForEncryptedLog, ToBytesForUnencryptedLog}, + enqueue_public_function_call::{ + enqueue_public_function_call_internal, set_public_teardown_function_call_internal, + parse_public_call_stack_item_from_oracle +}, + header::get_header_at, logs::emit_encrypted_log, + logs_traits::{LensForEncryptedLog, ToBytesForUnencryptedLog}, nullifier_key::{get_nullifier_keys, NullifierKeys} } }; use dep::protocol_types::{ abis::{ - global_variables::GlobalVariables, gas::Gas, call_context::CallContext, function_data::FunctionData, function_selector::FunctionSelector, max_block_number::MaxBlockNumber, nullifier_key_validation_request::NullifierKeyValidationRequest, - private_call_stack_item::PrivateCallStackItem, private_circuit_public_inputs::PrivateCircuitPublicInputs, - public_call_stack_item::PublicCallStackItem, - public_circuit_public_inputs::PublicCircuitPublicInputs, read_request::ReadRequest, - note_hash::NoteHash, nullifier::Nullifier, side_effect::SideEffect + public_call_stack_item::PublicCallStackItem, read_request::ReadRequest, note_hash::NoteHash, + nullifier::Nullifier, side_effect::SideEffect }, address::{AztecAddress, EthAddress}, constants::{ MAX_NEW_NOTE_HASHES_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, - MAX_PUBLIC_DATA_READS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, MAX_NULLIFIER_READ_REQUESTS_PER_CALL, - MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL, - MAX_ENCRYPTED_LOGS_PER_CALL, MAX_UNENCRYPTED_LOGS_PER_CALL + MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL, MAX_ENCRYPTED_LOGS_PER_CALL, + MAX_UNENCRYPTED_LOGS_PER_CALL }, - contrakt::{storage_read::StorageRead, storage_update_request::StorageUpdateRequest}, - grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint, header::Header, - messaging::l2_to_l1_message::L2ToL1Message, utils::reader::Reader, + grumpkin_point::GrumpkinPoint, header::Header, messaging::l2_to_l1_message::L2ToL1Message, traits::{is_empty, Deserialize, Empty} }; @@ -57,6 +55,7 @@ struct PrivateContext { private_call_stack_hashes : BoundedVec, public_call_stack_hashes : BoundedVec, + public_teardown_function_hash: Field, new_l2_to_l1_msgs : BoundedVec, // docs:end:private-context @@ -129,6 +128,7 @@ impl PrivateContext { historical_header: inputs.historical_header, private_call_stack_hashes: BoundedVec::new(), public_call_stack_hashes: BoundedVec::new(), + public_teardown_function_hash: 0, new_l2_to_l1_msgs: BoundedVec::new(), encrypted_logs_hashes: BoundedVec::new(), unencrypted_logs_hashes: BoundedVec::new(), @@ -169,6 +169,7 @@ impl PrivateContext { new_nullifiers: self.new_nullifiers.storage, private_call_stack_hashes: self.private_call_stack_hashes.storage, public_call_stack_hashes: self.public_call_stack_hashes.storage, + public_teardown_function_hash: self.public_teardown_function_hash, new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage, start_side_effect_counter: self.inputs.start_side_effect_counter, end_side_effect_counter: self.side_effect_counter, @@ -257,11 +258,7 @@ impl PrivateContext { let event_selector = 5; // TODO: compute actual event selector. let contract_address = self.this_address(); let log_slice = log.to_be_bytes_arr(); - let log_hash = compute_unencrypted_log_hash( - contract_address, - event_selector, - log, - ); + let log_hash = compute_unencrypted_log_hash(contract_address, event_selector, log); let side_effect = SideEffect { value: log_hash, counter: self.side_effect_counter }; self.unencrypted_logs_hashes.push(side_effect); self.side_effect_counter = self.side_effect_counter + 1; @@ -281,12 +278,7 @@ impl PrivateContext { pub fn emit_contract_class_unencrypted_log(&mut self, log: [Field; N]) { let event_selector = 5; // TODO: compute actual event selector. let contract_address = self.this_address(); - let log_hash = emit_contract_class_unencrypted_log_private_internal( - contract_address, - event_selector, - log, - self.side_effect_counter - ); + let log_hash = emit_contract_class_unencrypted_log_private_internal(contract_address, event_selector, log, self.side_effect_counter); let side_effect = SideEffect { value: log_hash, counter: self.side_effect_counter }; self.unencrypted_logs_hashes.push(side_effect); self.side_effect_counter = self.side_effect_counter + 1; @@ -316,7 +308,7 @@ impl PrivateContext { let side_effect = SideEffect { value: log_hash, counter: self.side_effect_counter }; self.encrypted_logs_hashes.push(side_effect); self.side_effect_counter = self.side_effect_counter + 1; - let encrypted_log_byte_len = 112 + 32*(N + 3); + let encrypted_log_byte_len = 112 + 32 * (N + 3); // + processed log len (4) self.encrypted_log_preimages_length += encrypted_log_byte_len + 4; } @@ -516,47 +508,77 @@ impl PrivateContext { is_delegate_call ); - let mut reader = Reader::new(fields); - - // Note: Not using PublicCirclePublicInputs::deserialize here, because everything below args_hash is 0 and - // there is no more data in fields because there is only ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_SIZE fields! - let item = PublicCallStackItem { - contract_address: AztecAddress::from_field(reader.read()), - function_data: reader.read_struct(FunctionData::deserialize), - public_inputs: PublicCircuitPublicInputs { - call_context: reader.read_struct(CallContext::deserialize), - args_hash: reader.read(), - returns_hash: 0, - nullifier_read_requests: [ReadRequest::empty(); MAX_NULLIFIER_READ_REQUESTS_PER_CALL], - nullifier_non_existent_read_requests: [ReadRequest::empty(); MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL], - contract_storage_update_requests: [StorageUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL], - contract_storage_reads: [StorageRead::empty(); MAX_PUBLIC_DATA_READS_PER_CALL], - public_call_stack_hashes: [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], - new_note_hashes: [NoteHash::empty(); MAX_NEW_NOTE_HASHES_PER_CALL], - new_nullifiers: [Nullifier::empty(); MAX_NEW_NULLIFIERS_PER_CALL], - new_l2_to_l1_msgs: [L2ToL1Message::empty(); MAX_NEW_L2_TO_L1_MSGS_PER_CALL], - start_side_effect_counter: 0, - end_side_effect_counter: 0, - unencrypted_logs_hashes: [SideEffect::empty(); MAX_UNENCRYPTED_LOGS_PER_CALL], - unencrypted_log_preimages_length: 0, - historical_header: Header::empty(), - global_variables: GlobalVariables::empty(), - prover_address: AztecAddress::zero(), - revert_code: 0, - start_gas_left: Gas::empty(), - end_gas_left: Gas::empty(), - transaction_fee: 0 - }, - is_execution_request: true - }; - reader.finish(); + let item = parse_public_call_stack_item_from_oracle(fields); + self.validate_call_stack_item_from_oracle( + item, + contract_address, + function_selector, + args_hash, + is_static_call, + is_delegate_call + ); + + self.side_effect_counter = self.side_effect_counter + 1; + self.public_call_stack_hashes.push(item.hash()); + } + + pub fn set_public_teardown_function( + &mut self, + contract_address: AztecAddress, + function_selector: FunctionSelector, + args: [Field; ARGS_COUNT] + ) { + let args_hash = hash_args_array(args); + assert(args_hash == arguments::pack_arguments_array(args)); + self.set_public_teardown_function_with_packed_args(contract_address, function_selector, args_hash, false, false) + } + + pub fn set_public_teardown_function_with_packed_args( + &mut self, + contract_address: AztecAddress, + function_selector: FunctionSelector, + args_hash: Field, + is_static_call: bool, + is_delegate_call: bool + ) { + let mut is_static_call = is_static_call | self.inputs.call_context.is_static_call; + let fields = set_public_teardown_function_call_internal( + contract_address, + function_selector, + args_hash, + self.side_effect_counter, + is_static_call, + is_delegate_call + ); + let item = parse_public_call_stack_item_from_oracle(fields); + self.validate_call_stack_item_from_oracle( + item, + contract_address, + function_selector, + args_hash, + is_static_call, + is_delegate_call + ); + + self.side_effect_counter = self.side_effect_counter + 1; + self.public_teardown_function_hash = item.hash(); + } + + fn validate_call_stack_item_from_oracle( + self, + item: PublicCallStackItem, + contract_address: AztecAddress, + function_selector: FunctionSelector, + args_hash: Field, + is_static_call: bool, + is_delegate_call: bool + ) { assert(contract_address.eq(item.contract_address)); assert(function_selector.eq(item.function_data.selector)); assert_eq(item.public_inputs.call_context.side_effect_counter, self.side_effect_counter); // We increment the sideffect counter by one, to account for the call itself being a side effect. - self.side_effect_counter = self.side_effect_counter + 1; assert(args_hash == item.public_inputs.args_hash); @@ -577,8 +599,6 @@ impl PrivateContext { item.public_inputs.call_context.msg_sender.eq(self.inputs.call_context.storage_contract_address) ); } - - self.public_call_stack_hashes.push(item.hash()); } } @@ -598,6 +618,7 @@ impl Empty for PrivateContext { new_nullifiers: BoundedVec::new(), private_call_stack_hashes : BoundedVec::new(), public_call_stack_hashes : BoundedVec::new(), + public_teardown_function_hash: 0, new_l2_to_l1_msgs : BoundedVec::new(), historical_header: Header::empty(), encrypted_logs_hashes: BoundedVec::new(), @@ -663,7 +684,6 @@ fn emit_contract_class_unencrypted_log_private( counter: u32 ) -> Field {} - unconstrained pub fn emit_contract_class_unencrypted_log_private_internal( contract_address: AztecAddress, event_selector: Field, diff --git a/noir-projects/aztec-nr/aztec/src/oracle/enqueue_public_function_call.nr b/noir-projects/aztec-nr/aztec/src/oracle/enqueue_public_function_call.nr index 28bcb3312401..65341ba396a1 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/enqueue_public_function_call.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/enqueue_public_function_call.nr @@ -1,6 +1,20 @@ use dep::protocol_types::{ - abis::function_selector::FunctionSelector, address::AztecAddress, - constants::ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH + abis::{ + function_selector::FunctionSelector, public_call_stack_item::PublicCallStackItem, + function_data::FunctionData, public_circuit_public_inputs::PublicCircuitPublicInputs, + call_context::CallContext, read_request::ReadRequest, note_hash::NoteHash, nullifier::Nullifier, + side_effect::SideEffect, global_variables::GlobalVariables, gas::Gas +}, + contrakt::{storage_read::StorageRead, storage_update_request::StorageUpdateRequest}, + messaging::l2_to_l1_message::L2ToL1Message, header::Header, address::AztecAddress, + utils::reader::Reader, + constants::{ + MAX_NEW_NOTE_HASHES_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_DATA_READS_PER_CALL, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, MAX_NULLIFIER_READ_REQUESTS_PER_CALL, + MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL, MAX_UNENCRYPTED_LOGS_PER_CALL, + ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH +} }; #[oracle(enqueuePublicFunctionCall)] @@ -30,3 +44,70 @@ unconstrained pub fn enqueue_public_function_call_internal( is_delegate_call ) } + +#[oracle(setPublicTeardownFunctionCall)] +fn set_public_teardown_function_call_oracle( + _contract_address: AztecAddress, + _function_selector: FunctionSelector, + _args_hash: Field, + _side_effect_counter: u32, + _is_static_call: bool, + _is_delegate_call: bool +) -> [Field; ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH] {} + +unconstrained pub fn set_public_teardown_function_call_internal( + contract_address: AztecAddress, + function_selector: FunctionSelector, + args_hash: Field, + side_effect_counter: u32, + is_static_call: bool, + is_delegate_call: bool +) -> [Field; ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH] { + set_public_teardown_function_call_oracle( + contract_address, + function_selector, + args_hash, + side_effect_counter, + is_static_call, + is_delegate_call + ) +} + +pub fn parse_public_call_stack_item_from_oracle(fields: [Field; ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH]) -> PublicCallStackItem { + let mut reader = Reader::new(fields); + + // Note: Not using PublicCirclePublicInputs::deserialize here, because everything below args_hash is 0 and + // there is no more data in fields because there is only ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_SIZE fields! + let item = PublicCallStackItem { + contract_address: AztecAddress::from_field(reader.read()), + function_data: reader.read_struct(FunctionData::deserialize), + public_inputs: PublicCircuitPublicInputs { + call_context: reader.read_struct(CallContext::deserialize), + args_hash: reader.read(), + returns_hash: 0, + nullifier_read_requests: [ReadRequest::empty(); MAX_NULLIFIER_READ_REQUESTS_PER_CALL], + nullifier_non_existent_read_requests: [ReadRequest::empty(); MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL], + contract_storage_update_requests: [StorageUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL], + contract_storage_reads: [StorageRead::empty(); MAX_PUBLIC_DATA_READS_PER_CALL], + public_call_stack_hashes: [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], + new_note_hashes: [NoteHash::empty(); MAX_NEW_NOTE_HASHES_PER_CALL], + new_nullifiers: [Nullifier::empty(); MAX_NEW_NULLIFIERS_PER_CALL], + new_l2_to_l1_msgs: [L2ToL1Message::empty(); MAX_NEW_L2_TO_L1_MSGS_PER_CALL], + start_side_effect_counter: 0, + end_side_effect_counter: 0, + unencrypted_logs_hashes: [SideEffect::empty(); MAX_UNENCRYPTED_LOGS_PER_CALL], + unencrypted_log_preimages_length: 0, + historical_header: Header::empty(), + global_variables: GlobalVariables::empty(), + prover_address: AztecAddress::zero(), + revert_code: 0, + start_gas_left: Gas::empty(), + end_gas_left: Gas::empty(), + transaction_fee: 0 + }, + is_execution_request: true + }; + reader.finish(); + + item +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr index 7fc8c7a86c0f..652bceb0fe19 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr @@ -85,6 +85,6 @@ fn empty_hash() { let hash = item.hash(); // Value from private_call_stack_item.test.ts "computes empty item hash" test - let test_data_empty_hash = 0x17fd6ffcb3394b845069dc87e055c37ac50599f274130fac69c6fe919bfe382e; + let test_data_empty_hash = 0x2485b8cfe671417410382ba6dfc803de70d9d45008a1b30c31b34d7c4de92106; assert_eq(hash, test_data_empty_hash); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr index e4dc6851f99a..e48226fa1f44 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr @@ -35,6 +35,7 @@ struct PrivateCircuitPublicInputs { new_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_CALL], private_call_stack_hashes: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL], public_call_stack_hashes: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], + public_teardown_function_hash: Field, new_l2_to_l1_msgs: [L2ToL1Message; MAX_NEW_L2_TO_L1_MSGS_PER_CALL], start_side_effect_counter : u32, @@ -109,6 +110,7 @@ impl Serialize for PrivateCircuitPublicInp } fields.extend_from_array(self.private_call_stack_hashes); fields.extend_from_array(self.public_call_stack_hashes); + fields.push(self.public_teardown_function_hash); for i in 0..self.new_l2_to_l1_msgs.len() { fields.extend_from_array(self.new_l2_to_l1_msgs[i].serialize()); } @@ -148,6 +150,7 @@ impl Deserialize for PrivateCircuitPublicI new_nullifiers: reader.read_struct_array(Nullifier::deserialize, [Nullifier::empty(); MAX_NEW_NULLIFIERS_PER_CALL]), private_call_stack_hashes: reader.read_array([0; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL]), public_call_stack_hashes: reader.read_array([0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL]), + public_teardown_function_hash: reader.read(), new_l2_to_l1_msgs: reader.read_struct_array(L2ToL1Message::deserialize, [L2ToL1Message::empty(); MAX_NEW_L2_TO_L1_MSGS_PER_CALL]), start_side_effect_counter: reader.read() as u32, end_side_effect_counter: reader.read() as u32, @@ -185,6 +188,7 @@ impl Empty for PrivateCircuitPublicInputs { new_nullifiers: [Nullifier::empty(); MAX_NEW_NULLIFIERS_PER_CALL], private_call_stack_hashes: [0; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL], public_call_stack_hashes: [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], + public_teardown_function_hash: 0, new_l2_to_l1_msgs: [L2ToL1Message::empty(); MAX_NEW_L2_TO_L1_MSGS_PER_CALL], start_side_effect_counter : 0 as u32, end_side_effect_counter : 0 as u32, @@ -211,6 +215,6 @@ fn empty_hash() { let inputs = PrivateCircuitPublicInputs::empty(); let hash = inputs.hash(); // Value from private_circuit_public_inputs.test.ts "computes empty item hash" test - let test_data_empty_hash = 0x29129c06414f4ac73bf889692c7011f91727d4cdbfe4fe143e6adee69b565cc8; + let test_data_empty_hash = 0x249d46b5a3e35f6489e793cd604e375634d4bfdac762ec06b5f8f03016bb4257; assert_eq(hash, test_data_empty_hash); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index ba2fc9ab0f0e..ad82df9c822f 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -169,7 +169,7 @@ global STATE_REFERENCE_LENGTH: u64 = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + PARTIAL_ global TX_CONTEXT_LENGTH: u64 = 2 + GAS_SETTINGS_LENGTH; global TX_REQUEST_LENGTH: u64 = 2 + TX_CONTEXT_LENGTH + FUNCTION_DATA_LENGTH; global HEADER_LENGTH: u64 = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + CONTENT_COMMITMENT_LENGTH + STATE_REFERENCE_LENGTH + GLOBAL_VARIABLES_LENGTH; -global PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + 3 + MAX_BLOCK_NUMBER_LENGTH + (READ_REQUEST_LENGTH * MAX_NOTE_HASH_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL) + (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + (SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_CALL) + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 2 + HEADER_LENGTH + TX_CONTEXT_LENGTH; +global PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + 3 + MAX_BLOCK_NUMBER_LENGTH + (READ_REQUEST_LENGTH * MAX_NOTE_HASH_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL) + (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + 1 + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + (SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_CALL) + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 2 + HEADER_LENGTH + TX_CONTEXT_LENGTH; global PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + 2 + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH * MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_CALL) + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 1 + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + AZTEC_ADDRESS_LENGTH + /* revert_code */ 1 + 2 * GAS_LENGTH + /* transaction_fee */ 1; global PRIVATE_CALL_STACK_ITEM_LENGTH: u64 = AZTEC_ADDRESS_LENGTH + FUNCTION_DATA_LENGTH + PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr index 2f4ed8d08e73..a8cc18978b80 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr @@ -39,6 +39,7 @@ struct PrivateCircuitPublicInputsBuilder { private_call_stack_hashes: BoundedVec, public_call_stack_hashes: BoundedVec, + public_teardown_function_hash: Field, new_l2_to_l1_msgs: BoundedVec, encrypted_logs_hashes: BoundedVec, @@ -81,6 +82,7 @@ impl PrivateCircuitPublicInputsBuilder { public_inputs.chain_id = 0; public_inputs.version = 1; public_inputs.gas_settings = GasSettings::default(); + public_inputs.public_teardown_function_hash = 0; public_inputs } @@ -103,6 +105,7 @@ impl PrivateCircuitPublicInputsBuilder { new_nullifiers: self.new_nullifiers.storage, private_call_stack_hashes: self.private_call_stack_hashes.storage, public_call_stack_hashes: self.public_call_stack_hashes.storage, + public_teardown_function_hash: self.public_teardown_function_hash, new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage, start_side_effect_counter: self.call_context.side_effect_counter, end_side_effect_counter: 10, @@ -131,6 +134,7 @@ impl Empty for PrivateCircuitPublicInputsBuilder { new_nullifiers: BoundedVec::new(), private_call_stack_hashes: BoundedVec::new(), public_call_stack_hashes: BoundedVec::new(), + public_teardown_function_hash: 0, new_l2_to_l1_msgs: BoundedVec::new(), encrypted_logs_hashes: BoundedVec::new(), unencrypted_logs_hashes: BoundedVec::new(), diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 094991c36b08..70d3975bfea9 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -124,6 +124,7 @@ export const PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH = NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL + MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + + 1 + L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL + 2 + SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_CALL + diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/private_call_stack_item.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/private_call_stack_item.test.ts.snap index 0a6b285dc0c9..36a5ad79d941 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/private_call_stack_item.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/private_call_stack_item.test.ts.snap @@ -1,5 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PrivateCallStackItem computes empty item hash 1`] = `Fr<0x17fd6ffcb3394b845069dc87e055c37ac50599f274130fac69c6fe919bfe382e>`; +exports[`PrivateCallStackItem computes empty item hash 1`] = `Fr<0x2485b8cfe671417410382ba6dfc803de70d9d45008a1b30c31b34d7c4de92106>`; -exports[`PrivateCallStackItem computes hash 1`] = `Fr<0x07e0e054d39be2aab72d74cc72ef8adc61016744fd985c6736e14e710d14c875>`; +exports[`PrivateCallStackItem computes hash 1`] = `Fr<0x0efad8edafef07ee5165f01a51dec26edc7fd28f55eff90478d86f8a95a5352b>`; diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap index 72ffed28cf34..a01d735ecb78 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap @@ -1,5 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PrivateCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x29129c06414f4ac73bf889692c7011f91727d4cdbfe4fe143e6adee69b565cc8>`; +exports[`PrivateCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x249d46b5a3e35f6489e793cd604e375634d4bfdac762ec06b5f8f03016bb4257>`; -exports[`PrivateCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x0ab17c0893be4023ff61f8c2df5a0106c1709f7b10c2fadd53581da7a7d799d6>`; +exports[`PrivateCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x2f33953d4e47a0ebbe6ae3f4785ada5d107383e82038e7caf27cc37fdb69a088>`; diff --git a/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts index ef3b61190edf..8697c01cbdda 100644 --- a/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts @@ -94,6 +94,10 @@ export class PrivateCircuitPublicInputs { * Public call stack at the current kernel iteration. */ public publicCallStackHashes: Tuple, + /** + * Hash of the public teardown function. + */ + public publicTeardownFunctionHash: Fr, /** * New L2 to L1 messages created by the corresponding function call. */ @@ -169,6 +173,7 @@ export class PrivateCircuitPublicInputs { reader.readArray(MAX_NEW_NULLIFIERS_PER_CALL, Nullifier), reader.readArray(MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, Fr), reader.readArray(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, Fr), + reader.readObject(Fr), reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, L2ToL1Message), reader.readObject(Fr), reader.readObject(Fr), @@ -196,6 +201,7 @@ export class PrivateCircuitPublicInputs { reader.readArray(MAX_NEW_NULLIFIERS_PER_CALL, Nullifier), reader.readFieldArray(MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL), reader.readFieldArray(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL), + reader.readField(), reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, L2ToL1Message), reader.readField(), reader.readField(), @@ -226,6 +232,7 @@ export class PrivateCircuitPublicInputs { makeTuple(MAX_NEW_NULLIFIERS_PER_CALL, Nullifier.empty), makeTuple(MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, Fr.zero), makeTuple(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, Fr.zero), + Fr.ZERO, makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, L2ToL1Message.empty), Fr.ZERO, Fr.ZERO, @@ -253,6 +260,7 @@ export class PrivateCircuitPublicInputs { isEmptyArray(this.newNullifiers) && isZeroArray(this.privateCallStackHashes) && isZeroArray(this.publicCallStackHashes) && + this.publicTeardownFunctionHash.isZero() && isEmptyArray(this.newL2ToL1Msgs) && isEmptyArray(this.encryptedLogsHashes) && isEmptyArray(this.unencryptedLogsHashes) && @@ -282,6 +290,7 @@ export class PrivateCircuitPublicInputs { fields.newNullifiers, fields.privateCallStackHashes, fields.publicCallStackHashes, + fields.publicTeardownFunctionHash, fields.newL2ToL1Msgs, fields.startSideEffectCounter, fields.endSideEffectCounter, diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 1e43ed44ebcd..2f48780c24fb 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -843,6 +843,7 @@ export function makePrivateCircuitPublicInputs(seed = 0): PrivateCircuitPublicIn newNullifiers: makeTuple(MAX_NEW_NULLIFIERS_PER_CALL, makeNullifier, seed + 0x500), privateCallStackHashes: makeTuple(MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, fr, seed + 0x600), publicCallStackHashes: makeTuple(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, fr, seed + 0x700), + publicTeardownFunctionHash: fr(seed + 0x800), newL2ToL1Msgs: makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_CALL, makeL2ToL1Message, seed + 0x800), startSideEffectCounter: fr(seed + 0x849), endSideEffectCounter: fr(seed + 0x850), diff --git a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts index e641b21559a0..a891b6c775ce 100644 --- a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts @@ -748,6 +748,7 @@ export function mapPrivateCircuitPublicInputsToNoir( new_nullifiers: mapTuple(privateCircuitPublicInputs.newNullifiers, mapNullifierToNoir), private_call_stack_hashes: mapTuple(privateCircuitPublicInputs.privateCallStackHashes, mapFieldToNoir), public_call_stack_hashes: mapTuple(privateCircuitPublicInputs.publicCallStackHashes, mapFieldToNoir), + public_teardown_function_hash: mapFieldToNoir(privateCircuitPublicInputs.publicTeardownFunctionHash), new_l2_to_l1_msgs: mapTuple(privateCircuitPublicInputs.newL2ToL1Msgs, mapL2ToL1MessageToNoir), start_side_effect_counter: mapFieldToNoir(privateCircuitPublicInputs.startSideEffectCounter), end_side_effect_counter: mapFieldToNoir(privateCircuitPublicInputs.endSideEffectCounter), From 3ccc6acae834f9add0548c0ca044e65a2e13b08b Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 3 May 2024 17:50:32 -0400 Subject: [PATCH 2/9] chore(ci): rollback earthly prune (#6208) We're getting hit by an unfortunately earthly bug, but the workaround here took down other jobs --- scripts/earthly-ci | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/scripts/earthly-ci b/scripts/earthly-ci index c383bcae4c25..84ffc925c7bf 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -20,16 +20,14 @@ while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do echo "Attempt #$ATTEMPT_COUNT failed." # Check the output for specific errors - if grep 'failed to get edge: inconsistent graph state' $OUTPUT_FILE >/dev/null; then + if grep 'failed to get edge: inconsistent graph state' $OUTPUT_FILE >/dev/null || grep 'failed to get state for index' $OUTPUT_FILE >/dev/null ; then INCONSISTENT_GRAPH_STATE_COUNT=$((INCONSISTENT_GRAPH_STATE_COUNT + 1)) - echo "Got 'inconsistent graph state'." if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -eq 2 ]; then - echo "Performing 'earthly prune' due to repeated 'inconsistent graph state' errors." - earthly prune - if earthly $@ 2>&1 | tee $OUTPUT_FILE >&2 ; then - exit 0 # Post-prune success - fi + echo "Unable to recover from 'inconsistent graph state' or 'failed to get state for index'. Connect to spot runner and run 'earthly prune'." + exit 1 fi + echo "Got 'inconsistent graph state' or 'failed to get state for index'. Sleeping for 20 seconds and retrying." + sleep 20 elif grep 'Error: pull ping error: pull ping response' $OUTPUT_FILE >/dev/null; then echo "Got 'Error: pull ping error: pull ping response', intermittent failure when writing out images to docker" elif grep '================================= System Info ==================================' $OUTPUT_FILE >/dev/null; then From 8418eac301fc9761cc29efd901ca5f719c3dfa09 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 3 May 2024 18:57:49 -0300 Subject: [PATCH 3/9] feat: Use actual tx fee in gas token when charging fee (#6166) Uses the actual `transaction_fee` injected during teardown for `pay_fee` in the `GasToken`. This PR also refactors the e2e_fees and e2e_dapp_subscription tests to use snapshots, and changes dapp_subscription so each test can be run independently. It also modifies the `AppSubscription` contract so the max fee it covers is configurable during initialization, instead of hardcoded to 42. It also lowers the severity of some random logs to make e2e output more readable (mostly around prover). Last, it includes bugfix for the snapshot manager when it runs in "disabled mode", where it was recreating the context multiple times, and refactors it by splitting it into different classes depending on the mode it runs. --- .../aztec-nr/aztec/src/context/avm_context.nr | 5 + .../aztec-nr/aztec/src/context/interface.nr | 1 + .../aztec/src/context/public_context.nr | 4 + .../app_subscription_contract/src/main.nr | 9 +- .../contracts/gas_token_contract/src/lib.nr | 4 +- .../contracts/gas_token_contract/src/main.nr | 6 +- .../aztec-node/src/aztec-node/server.ts | 2 +- .../circuit-types/src/interfaces/configs.ts | 2 - .../client_prover_test.ts | 7 +- .../blacklist_token_contract_test.ts | 7 +- .../minting.test.ts | 2 +- .../src/e2e_dapp_subscription.test.ts | 262 ------ .../e2e_delegate_calls/delegate_calls_test.ts | 13 +- .../src/e2e_deploy_contract/deploy_test.ts | 6 +- .../private_initialization.test.ts | 2 - yarn-project/end-to-end/src/e2e_fees.test.ts | 776 ------------------ .../src/e2e_fees/dapp_subscription.test.ts | 235 ++++++ .../end-to-end/src/e2e_fees/failures.test.ts | 298 +++++++ .../end-to-end/src/e2e_fees/fees_test.ts | 269 ++++++ .../src/e2e_fees/private_payments.test.ts | 388 +++++++++ .../src/e2e_lending_contract.test.ts | 2 +- .../nested_contract_test.ts | 7 +- .../end-to-end/src/e2e_ordering.test.ts | 8 +- .../deposits.test.ts | 2 +- .../failure_cases.test.ts | 2 +- .../l1_to_l2.test.ts | 2 +- .../l2_to_l1.test.ts | 2 +- ...lic_cross_chain_messaging_contract_test.ts | 40 +- .../e2e_token_contract/token_contract_test.ts | 7 +- .../src/fixtures/snapshot_manager.ts | 322 +++++--- yarn-project/end-to-end/src/fixtures/utils.ts | 23 +- .../src/flakey_e2e_account_init_fees.test.ts | 2 +- .../ethereum/src/deploy_l1_contracts.ts | 31 +- .../foundation/src/abi/function_selector.ts | 11 + .../src/orchestrator/orchestrator.ts | 2 +- .../src/prover-pool/memory-proving-queue.ts | 2 +- .../src/prover-pool/prover-agent.ts | 2 +- .../src/sequencer/sequencer.ts | 2 +- 38 files changed, 1514 insertions(+), 1253 deletions(-) delete mode 100644 yarn-project/end-to-end/src/e2e_dapp_subscription.test.ts delete mode 100644 yarn-project/end-to-end/src/e2e_fees.test.ts create mode 100644 yarn-project/end-to-end/src/e2e_fees/dapp_subscription.test.ts create mode 100644 yarn-project/end-to-end/src/e2e_fees/failures.test.ts create mode 100644 yarn-project/end-to-end/src/e2e_fees/fees_test.ts create mode 100644 yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts diff --git a/noir-projects/aztec-nr/aztec/src/context/avm_context.nr b/noir-projects/aztec-nr/aztec/src/context/avm_context.nr index cb2c3d26e89c..016e4860c77c 100644 --- a/noir-projects/aztec-nr/aztec/src/context/avm_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/avm_context.nr @@ -78,6 +78,11 @@ impl PublicContextInterface for AvmContext { 0 } + fn transaction_fee(self) -> Field { + assert(false, "'transaction_fee' not implemented!"); + 0 + } + fn nullifier_exists(self, unsiloed_nullifier: Field, address: AztecAddress) -> bool { nullifier_exists(unsiloed_nullifier, address.to_field()) == 1 } diff --git a/noir-projects/aztec-nr/aztec/src/context/interface.nr b/noir-projects/aztec-nr/aztec/src/context/interface.nr index 175d93cc2c44..5051d98511b7 100644 --- a/noir-projects/aztec-nr/aztec/src/context/interface.nr +++ b/noir-projects/aztec-nr/aztec/src/context/interface.nr @@ -54,6 +54,7 @@ trait PublicContextInterface { args: [Field] ) -> FunctionReturns; fn nullifier_exists(self, unsiloed_nullifier: Field, address: AztecAddress) -> bool; + fn transaction_fee(self) -> Field; } struct PrivateCallInterface { diff --git a/noir-projects/aztec-nr/aztec/src/context/public_context.nr b/noir-projects/aztec-nr/aztec/src/context/public_context.nr index 33765f8f3ec5..0e7e9435105d 100644 --- a/noir-projects/aztec-nr/aztec/src/context/public_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/public_context.nr @@ -239,6 +239,10 @@ impl PublicContextInterface for PublicContext { self.inputs.public_global_variables.gas_fees.fee_per_l2_gas } + fn transaction_fee(self) -> Field { + self.inputs.transaction_fee + } + fn nullifier_exists(self, unsiloed_nullifier: Field, address: AztecAddress) -> bool { // Current public can only check for settled nullifiers, so we always silo. let siloed_nullifier = silo_nullifier(address, unsiloed_nullifier); diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr index c3233b2d55de..f049473ea572 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr @@ -31,6 +31,7 @@ contract AppSubscription { subscription_price: SharedImmutable, subscriptions: Map>, gas_token_address: SharedImmutable, + gas_token_limit_per_tx: SharedImmutable, } global SUBSCRIPTION_DURATION_IN_BLOCKS = 5; @@ -47,7 +48,8 @@ contract AppSubscription { note.remaining_txs -= 1; storage.subscriptions.at(user_address).replace(&mut note, true); - GasToken::at(storage.gas_token_address.read_private()).pay_fee(42).enqueue(&mut context); + let gas_limit = storage.gas_token_limit_per_tx.read_private(); + GasToken::at(storage.gas_token_address.read_private()).pay_fee(gas_limit).enqueue(&mut context); context.end_setup(); @@ -63,14 +65,15 @@ contract AppSubscription { subscription_recipient_address: AztecAddress, subscription_token_address: AztecAddress, subscription_price: Field, - gas_token_address: AztecAddress + gas_token_address: AztecAddress, + gas_token_limit_per_tx: Field ) { storage.target_address.initialize(target_address); storage.subscription_token_address.initialize(subscription_token_address); storage.subscription_recipient_address.initialize(subscription_recipient_address); storage.subscription_price.initialize(subscription_price); - storage.gas_token_address.initialize(gas_token_address); + storage.gas_token_limit_per_tx.initialize(gas_token_limit_per_tx); } #[aztec(public)] diff --git a/noir-projects/noir-contracts/contracts/gas_token_contract/src/lib.nr b/noir-projects/noir-contracts/contracts/gas_token_contract/src/lib.nr index 9e1afebac298..e3a5fc12684f 100644 --- a/noir-projects/noir-contracts/contracts/gas_token_contract/src/lib.nr +++ b/noir-projects/noir-contracts/contracts/gas_token_contract/src/lib.nr @@ -2,8 +2,8 @@ use dep::aztec::prelude::{AztecAddress, EthAddress}; use dep::aztec::context::interface::PublicContextInterface; use dep::aztec::protocol_types::hash::sha256_to_field; -pub fn calculate_fee(_context: TPublicContext) -> U128 where TPublicContext: PublicContextInterface { - U128::from_integer(1) +pub fn calculate_fee(context: TPublicContext) -> Field where TPublicContext: PublicContextInterface { + context.transaction_fee() } pub fn get_bridge_gas_msg_hash(owner: AztecAddress, amount: Field) -> Field { diff --git a/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr index fd2bb0356eca..3b46f9b53fad 100644 --- a/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr @@ -54,7 +54,11 @@ contract GasToken { #[aztec(public)] fn pay_fee(fee_limit: Field) -> Field { let fee_limit_u128 = U128::from_integer(fee_limit); - let fee = calculate_fee(context); + let fee = U128::from_integer(calculate_fee(context)); + dep::aztec::oracle::debug_log::debug_log_format( + "Gas token: paying fee {0} (limit {1})", + [fee.to_field(), fee_limit] + ); assert(fee <= fee_limit_u128, "Fee too high"); let sender_new_balance = storage.balances.at(context.msg_sender()).read() - fee; diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 3bf5b97ab46f..343130a1b38c 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -676,7 +676,7 @@ export class AztecNodeService implements AztecNode { this.log.warn(`Simulated tx ${tx.getTxHash()} reverts: ${reverted[0].revertReason}`); throw reverted[0].revertReason; } - this.log.info(`Simulated tx ${tx.getTxHash()} succeeds`); + this.log.debug(`Simulated tx ${tx.getTxHash()} succeeds`); const [processedTx] = processedTxs; return { constants: processedTx.data.constants, diff --git a/yarn-project/circuit-types/src/interfaces/configs.ts b/yarn-project/circuit-types/src/interfaces/configs.ts index 2698d7aeb78f..fe44947209bd 100644 --- a/yarn-project/circuit-types/src/interfaces/configs.ts +++ b/yarn-project/circuit-types/src/interfaces/configs.ts @@ -33,10 +33,8 @@ export interface SequencerConfig { acvmWorkingDirectory?: string; /** The path to the ACVM binary */ acvmBinaryPath?: string; - /** The list of functions calls allowed to run in setup */ allowedFunctionsInSetup?: AllowedFunction[]; - /** The list of functions calls allowed to run teardown */ allowedFunctionsInTeardown?: AllowedFunction[]; } diff --git a/yarn-project/end-to-end/src/client_prover_integration/client_prover_test.ts b/yarn-project/end-to-end/src/client_prover_integration/client_prover_test.ts index d511f5ce7164..346147d92d0e 100644 --- a/yarn-project/end-to-end/src/client_prover_integration/client_prover_test.ts +++ b/yarn-project/end-to-end/src/client_prover_integration/client_prover_test.ts @@ -19,9 +19,10 @@ import * as fs from 'fs/promises'; import { waitRegisteredAccountSynced } from '../benchmarks/utils.js'; import { - SnapshotManager, + type ISnapshotManager, type SubsystemsContext, addAccounts, + createSnapshotManager, publicDeployAccounts, } from '../fixtures/snapshot_manager.js'; import { getBBConfig, setupPXEService } from '../fixtures/utils.js'; @@ -42,7 +43,7 @@ export class ClientProverTest { static TOKEN_NAME = 'Aztec Token'; static TOKEN_SYMBOL = 'AZT'; static TOKEN_DECIMALS = 18n; - private snapshotManager: SnapshotManager; + private snapshotManager: ISnapshotManager; logger: DebugLogger; keys: Array<[Fr, Fq]> = []; wallets: AccountWalletWithSecretKey[] = []; @@ -59,7 +60,7 @@ export class ClientProverTest { constructor(testName: string) { this.logger = createDebugLogger(`aztec:client_prover_test:${testName}`); - this.snapshotManager = new SnapshotManager(`client_prover_integration/${testName}`, dataPath); + this.snapshotManager = createSnapshotManager(`client_prover_integration/${testName}`, dataPath); } /** diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts index afbb66309f82..708f1cbfb3dd 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts @@ -13,9 +13,10 @@ import { import { DocsExampleContract, TokenBlacklistContract, type TokenContract } from '@aztec/noir-contracts.js'; import { - SnapshotManager, + type ISnapshotManager, type SubsystemsContext, addAccounts, + createSnapshotManager, publicDeployAccounts, } from '../fixtures/snapshot_manager.js'; import { TokenSimulator } from '../simulators/token_simulator.js'; @@ -54,7 +55,7 @@ export class BlacklistTokenContractTest { // This value MUST match the same value that we have in the contract static DELAY = 2; - private snapshotManager: SnapshotManager; + private snapshotManager: ISnapshotManager; logger: DebugLogger; wallets: AccountWallet[] = []; accounts: CompleteAddress[] = []; @@ -68,7 +69,7 @@ export class BlacklistTokenContractTest { constructor(testName: string) { this.logger = createDebugLogger(`aztec:e2e_blacklist_token_contract:${testName}`); - this.snapshotManager = new SnapshotManager(`e2e_blacklist_token_contract/${testName}`, dataPath); + this.snapshotManager = createSnapshotManager(`e2e_blacklist_token_contract/${testName}`, dataPath); } async mineBlocks(amount: number = BlacklistTokenContractTest.DELAY) { diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts index b003195c3933..ab535e3e61ab 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts @@ -14,7 +14,7 @@ describe('e2e_blacklist_token_contract mint', () => { await t.setup(); // Have to destructure again to ensure we have latest refs. ({ asset, tokenSim, wallets, blacklisted } = t); - }, 200_000); + }, 300_000); afterAll(async () => { await t.teardown(); diff --git a/yarn-project/end-to-end/src/e2e_dapp_subscription.test.ts b/yarn-project/end-to-end/src/e2e_dapp_subscription.test.ts deleted file mode 100644 index 109c89c181d9..000000000000 --- a/yarn-project/end-to-end/src/e2e_dapp_subscription.test.ts +++ /dev/null @@ -1,262 +0,0 @@ -import { - type AccountWalletWithSecretKey, - type AztecAddress, - type AztecNode, - type DebugLogger, - type DeployL1Contracts, - type FeePaymentMethod, - Fr, - type PXE, - PrivateFeePaymentMethod, - PublicFeePaymentMethod, - SentTx, -} from '@aztec/aztec.js'; -import { GasSettings } from '@aztec/circuits.js'; -import { DefaultDappEntrypoint } from '@aztec/entrypoints/dapp'; -import { - AppSubscriptionContract, - TokenContract as BananaCoin, - CounterContract, - FPCContract, - GasTokenContract, -} from '@aztec/noir-contracts.js'; -import { getCanonicalGasTokenAddress } from '@aztec/protocol-contracts/gas-token'; - -import { type BalancesFn, expectMapping, getBalancesFn, publicDeployAccounts, setup } from './fixtures/utils.js'; - -const TOKEN_NAME = 'BananaCoin'; -const TOKEN_SYMBOL = 'BC'; -const TOKEN_DECIMALS = 18n; - -describe('e2e_dapp_subscription', () => { - let pxe: PXE; - let logger: DebugLogger; - - let aliceWallet: AccountWalletWithSecretKey; - let bobWallet: AccountWalletWithSecretKey; - let aliceAddress: AztecAddress; // Dapp subscriber. - let bobAddress: AztecAddress; // Dapp owner. - let sequencerAddress: AztecAddress; - // let gasTokenContract: GasTokenContract; - let bananaCoin: BananaCoin; - let counterContract: CounterContract; - let subscriptionContract: AppSubscriptionContract; - let gasTokenContract: GasTokenContract; - let bananaFPC: FPCContract; - let gasBalances: BalancesFn; - let bananasPublicBalances: BalancesFn; - let bananasPrivateBalances: BalancesFn; - - const SUBSCRIPTION_AMOUNT = BigInt(100e9); - const INITIAL_GAS_BALANCE = BigInt(1000e9); - const PUBLICLY_MINTED_BANANAS = BigInt(500e9); - const PRIVATELY_MINTED_BANANAS = BigInt(600e9); - - const FEE_AMOUNT = 1n; - const MAX_FEE = BigInt(20e9); - - const GAS_SETTINGS = GasSettings.default(); - - beforeAll(async () => { - process.env.PXE_URL = ''; - process.env.ENABLE_GAS ??= '1'; - - expect(GAS_SETTINGS.getFeeLimit().toBigInt()).toEqual(MAX_FEE); - - let wallets: AccountWalletWithSecretKey[]; - let aztecNode: AztecNode; - let deployL1ContractsValues: DeployL1Contracts; - ({ wallets, aztecNode, deployL1ContractsValues, logger, pxe } = await setup(3, {}, {}, true)); - - await publicDeployAccounts(wallets[0], wallets); - - // this should be a SignerlessWallet but that can't call public functions directly - gasTokenContract = await GasTokenContract.at( - getCanonicalGasTokenAddress(deployL1ContractsValues.l1ContractAddresses.gasPortalAddress), - wallets[0], - ); - - aliceAddress = wallets[0].getAddress(); - bobAddress = wallets[1].getAddress(); - sequencerAddress = wallets[2].getAddress(); - - await aztecNode.setConfig({ - feeRecipient: sequencerAddress, - }); - - [aliceWallet, bobWallet] = wallets; - - bananaCoin = await BananaCoin.deploy(aliceWallet, aliceAddress, TOKEN_NAME, TOKEN_SYMBOL, TOKEN_DECIMALS) - .send() - .deployed(); - bananaFPC = await FPCContract.deploy(aliceWallet, bananaCoin.address, gasTokenContract.address).send().deployed(); - - counterContract = await CounterContract.deploy(bobWallet, 0, bobAddress).send().deployed(); - - subscriptionContract = await AppSubscriptionContract.deploy( - bobWallet, - counterContract.address, - bobAddress, - // anyone can purchase a subscription for 100 test tokens - bananaCoin.address, - SUBSCRIPTION_AMOUNT, - gasTokenContract.address, - ) - .send() - .deployed(); - - // mint some test tokens for Alice - // she'll pay for the subscription with these - await bananaCoin.methods.privately_mint_private_note(PRIVATELY_MINTED_BANANAS).send().wait(); - await bananaCoin.methods.mint_public(aliceAddress, PUBLICLY_MINTED_BANANAS).send().wait(); - await gasTokenContract.methods.mint_public(subscriptionContract.address, INITIAL_GAS_BALANCE).send().wait(); - await gasTokenContract.methods.mint_public(bananaFPC.address, INITIAL_GAS_BALANCE).send().wait(); - - gasBalances = getBalancesFn('⛽', gasTokenContract.methods.balance_of_public, logger); - bananasPublicBalances = getBalancesFn('Public 🍌', bananaCoin.methods.balance_of_public, logger); - bananasPrivateBalances = getBalancesFn('Private 🍌', bananaCoin.methods.balance_of_private, logger); - - await expectMapping( - gasBalances, - [aliceAddress, sequencerAddress, subscriptionContract.address, bananaFPC.address], - [0n, 0n, INITIAL_GAS_BALANCE, INITIAL_GAS_BALANCE], - ); - }); - - it('should allow Alice to subscribe by paying privately with bananas', async () => { - /** - PRIVATE SETUP - we first unshield `MAX_FEE` BC from alice's private balance to the FPC's public balance - - PUBLIC APP LOGIC - we then privately transfer `SUBSCRIPTION_AMOUNT` BC from alice to bob's subscription contract - - PUBLIC TEARDOWN - then the FPC calls `pay_fee`, reducing its gas balance by `FEE_AMOUNT`, and increasing the sequencer's gas balance by `FEE_AMOUNT` - the FPC also publicly sends `REFUND` BC to alice - */ - - await subscribe(new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), MAX_FEE); - - await expectMapping( - bananasPrivateBalances, - [aliceAddress, bobAddress, bananaFPC.address], - [PRIVATELY_MINTED_BANANAS - SUBSCRIPTION_AMOUNT - MAX_FEE, SUBSCRIPTION_AMOUNT, 0n], - ); - - await expectMapping( - bananasPublicBalances, - [aliceAddress, bobAddress, bananaFPC.address], - // refund is done via a transparent note for now - [PUBLICLY_MINTED_BANANAS, 0n, FEE_AMOUNT], - ); - - await expectMapping( - gasBalances, - // note the subscription contract hasn't paid any fees yet - [bananaFPC.address, subscriptionContract.address, sequencerAddress], - [INITIAL_GAS_BALANCE - FEE_AMOUNT, INITIAL_GAS_BALANCE, FEE_AMOUNT], - ); - - // REFUND_AMOUNT is a transparent note note - }); - - it('should allow Alice to subscribe by paying with bananas in public', async () => { - /** - PRIVATE SETUP - we publicly transfer `MAX_FEE` BC from alice's public balance to the FPC's public balance - - PUBLIC APP LOGIC - we then privately transfer `SUBSCRIPTION_AMOUNT` BC from alice to bob's subscription contract - - PUBLIC TEARDOWN - then the FPC calls `pay_fee`, reducing its gas balance by `FEE_AMOUNT`, and increasing the sequencer's gas balance by `FEE_AMOUNT` - the FPC also publicly sends `REFUND` BC to alice - */ - await subscribe(new PublicFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), MAX_FEE); - - await expectMapping( - bananasPrivateBalances, - [aliceAddress, bobAddress, bananaFPC.address], - // we pay the fee publicly, but the subscription payment is still private. - // Also, minus 1 x MAX_FEE as leftover from the previous test, since we paid publicly this time - [PRIVATELY_MINTED_BANANAS - 2n * SUBSCRIPTION_AMOUNT - MAX_FEE, 2n * SUBSCRIPTION_AMOUNT, 0n], - ); - - await expectMapping( - bananasPublicBalances, - [aliceAddress, bobAddress, bananaFPC.address], - [ - // we have the refund from the previous test, - // but since we paid publicly this time, the refund should have been "squashed" - PUBLICLY_MINTED_BANANAS - FEE_AMOUNT, - 0n, // Bob still has no public bananas - 2n * FEE_AMOUNT, // because this is the second time we've used the FPC - ], - ); - - await expectMapping( - gasBalances, - [subscriptionContract.address, bananaFPC.address, sequencerAddress], - [INITIAL_GAS_BALANCE, INITIAL_GAS_BALANCE - 2n * FEE_AMOUNT, 2n * FEE_AMOUNT], - ); - }); - - it('should call dapp subscription entrypoint', async () => { - const dappPayload = new DefaultDappEntrypoint(aliceAddress, aliceWallet, subscriptionContract.address); - const action = counterContract.methods.increment(bobAddress).request(); - const txExReq = await dappPayload.createTxExecutionRequest({ calls: [action] }); - const tx = await pxe.proveTx(txExReq, true); - const sentTx = new SentTx(pxe, pxe.sendTx(tx)); - await sentTx.wait(); - - expect(await counterContract.methods.get_counter(bobAddress).simulate()).toBe(1n); - - await expectMapping( - gasBalances, - [subscriptionContract.address, bananaFPC.address, sequencerAddress], - [INITIAL_GAS_BALANCE - FEE_AMOUNT, INITIAL_GAS_BALANCE - 2n * FEE_AMOUNT, FEE_AMOUNT * 3n], - ); - }); - - it('should reject after the sub runs out', async () => { - // subscribe again. This will overwrite the subscription - await subscribe(new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), MAX_FEE, 0); - await expect(dappIncrement()).rejects.toThrow( - "Failed to solve brillig function '(context.block_number()) as u64 < expiry_block_number as u64'", - ); - }); - - it('should reject after the txs run out', async () => { - // subscribe again. This will overwrite the subscription - await subscribe(new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), FEE_AMOUNT, 5, 1); - await expect(dappIncrement()).resolves.toBeDefined(); - await expect(dappIncrement()).rejects.toThrow(/note.remaining_txs as u64 > 0/); - }); - - async function subscribe( - paymentMethod: FeePaymentMethod, - maxFee: bigint, - blockDelta: number = 5, - txCount: number = 4, - ) { - const nonce = Fr.random(); - const action = bananaCoin.methods.transfer(aliceAddress, bobAddress, SUBSCRIPTION_AMOUNT, nonce); - await aliceWallet.createAuthWit({ caller: subscriptionContract.address, action }); - - return subscriptionContract - .withWallet(aliceWallet) - .methods.subscribe(aliceAddress, nonce, (await pxe.getBlockNumber()) + blockDelta, txCount) - .send({ fee: { gasSettings: GAS_SETTINGS, paymentMethod } }) - .wait(); - } - - async function dappIncrement() { - const dappEntrypoint = new DefaultDappEntrypoint(aliceAddress, aliceWallet, subscriptionContract.address); - const action = counterContract.methods.increment(bobAddress).request(); - const txExReq = await dappEntrypoint.createTxExecutionRequest({ calls: [action] }); - const tx = await pxe.proveTx(txExReq, true); - const sentTx = new SentTx(pxe, pxe.sendTx(tx)); - return sentTx.wait(); - } -}); diff --git a/yarn-project/end-to-end/src/e2e_delegate_calls/delegate_calls_test.ts b/yarn-project/end-to-end/src/e2e_delegate_calls/delegate_calls_test.ts index 457408a05f7a..0911bd0bc80a 100644 --- a/yarn-project/end-to-end/src/e2e_delegate_calls/delegate_calls_test.ts +++ b/yarn-project/end-to-end/src/e2e_delegate_calls/delegate_calls_test.ts @@ -2,12 +2,17 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AccountWallet, type DebugLogger, createDebugLogger } from '@aztec/aztec.js'; import { DelegatedOnContract, DelegatorContract } from '@aztec/noir-contracts.js'; -import { SnapshotManager, type SubsystemsContext, addAccounts } from '../fixtures/snapshot_manager.js'; +import { + type ISnapshotManager, + type SubsystemsContext, + addAccounts, + createSnapshotManager, +} from '../fixtures/snapshot_manager.js'; const { E2E_DATA_PATH: dataPath } = process.env; export class DelegateCallsTest { - private snapshotManager: SnapshotManager; + private snapshotManager: ISnapshotManager; logger: DebugLogger; wallet!: AccountWallet; delegatorContract!: DelegatorContract; @@ -15,7 +20,7 @@ export class DelegateCallsTest { constructor(testName: string) { this.logger = createDebugLogger(`aztec:e2e_delegate_calls:${testName}`); - this.snapshotManager = new SnapshotManager(`e2e_delegate_calls/${testName}`, dataPath); + this.snapshotManager = createSnapshotManager(`e2e_delegate_calls/${testName}`, dataPath); } /** @@ -27,7 +32,7 @@ export class DelegateCallsTest { await this.snapshotManager.snapshot('accounts', addAccounts(1, this.logger), async ({ accountKeys }, { pxe }) => { const accountManager = getSchnorrAccount(pxe, accountKeys[0][0], accountKeys[0][1], 1); this.wallet = await accountManager.getWallet(); - this.logger.verbose(`Wallet address: ${this.wallet.getAddress()}`); + this.logger.verbose(`Wallet address: ${this.wallet.getAddress()}`); }); await this.snapshotManager.snapshot( diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_test.ts index cc44a09b51f4..05b314228286 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_test.ts @@ -15,12 +15,12 @@ import { } from '@aztec/aztec.js'; import { type StatefulTestContract } from '@aztec/noir-contracts.js'; -import { SnapshotManager, addAccounts } from '../fixtures/snapshot_manager.js'; +import { type ISnapshotManager, addAccounts, createSnapshotManager } from '../fixtures/snapshot_manager.js'; const { E2E_DATA_PATH: dataPath } = process.env; export class DeployTest { - private snapshotManager: SnapshotManager; + private snapshotManager: ISnapshotManager; private wallets: AccountWallet[] = []; public logger: DebugLogger; @@ -30,7 +30,7 @@ export class DeployTest { constructor(testName: string) { this.logger = createDebugLogger(`aztec:e2e_deploy_contract:${testName}`); - this.snapshotManager = new SnapshotManager(`e2e_deploy_contract/${testName}`, dataPath); + this.snapshotManager = createSnapshotManager(`e2e_deploy_contract/${testName}`, dataPath); } async setup() { diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/private_initialization.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/private_initialization.test.ts index 6567c8e5cc06..84dab9e5065e 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/private_initialization.test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/private_initialization.test.ts @@ -29,7 +29,6 @@ describe('e2e_deploy_contract private initialization', () => { const expected = siloNullifier(contract.address, new Fr(10)); expect(receipt.debugInfo?.nullifiers[1]).toEqual(expected); }, - 30_000, ); // Tests privately initializing an undeployed contract. Also requires pxe registration in advance. @@ -51,7 +50,6 @@ describe('e2e_deploy_contract private initialization', () => { await contract.methods.create_note(owner, 10).send().wait(); expect(await contract.methods.summed_values(owner).simulate()).toEqual(52n); }, - 30_000, ); // Tests privately initializing multiple undeployed contracts on the same tx through an account contract. diff --git a/yarn-project/end-to-end/src/e2e_fees.test.ts b/yarn-project/end-to-end/src/e2e_fees.test.ts deleted file mode 100644 index b521e7c8ede5..000000000000 --- a/yarn-project/end-to-end/src/e2e_fees.test.ts +++ /dev/null @@ -1,776 +0,0 @@ -import { - type AccountWallet, - type AztecAddress, - BatchCall, - type DebugLogger, - ExtendedNote, - Fr, - type FunctionCall, - FunctionSelector, - Note, - PrivateFeePaymentMethod, - PublicFeePaymentMethod, - type TxHash, - TxStatus, - type Wallet, - computeAuthWitMessageHash, - computeSecretHash, -} from '@aztec/aztec.js'; -import { FunctionData, GasSettings } from '@aztec/circuits.js'; -import { type ContractArtifact, decodeFunctionSignature } from '@aztec/foundation/abi'; -import { - TokenContract as BananaCoin, - FPCContract, - GasTokenContract, - SchnorrAccountContract, -} from '@aztec/noir-contracts.js'; - -import { jest } from '@jest/globals'; - -import { type BalancesFn, expectMapping, getBalancesFn, publicDeployAccounts, setup } from './fixtures/utils.js'; -import { GasPortalTestingHarnessFactory, type IGasBridgingTestHarness } from './shared/gas_portal_test_harness.js'; - -const TOKEN_NAME = 'BananaCoin'; -const TOKEN_SYMBOL = 'BC'; -const TOKEN_DECIMALS = 18n; -const BRIDGED_FPC_GAS = 500n; - -jest.setTimeout(1_000_000_000); - -describe('e2e_fees', () => { - let wallets: AccountWallet[]; - let aliceWallet: Wallet; - let aliceAddress: AztecAddress; - let bobAddress: AztecAddress; - let sequencerAddress: AztecAddress; - let gasTokenContract: GasTokenContract; - let bananaCoin: BananaCoin; - let bananaFPC: FPCContract; - let logger: DebugLogger; - - let gasBridgeTestHarness: IGasBridgingTestHarness; - - let gasBalances: BalancesFn; - let bananaPublicBalances: BalancesFn; - let bananaPrivateBalances: BalancesFn; - - const gasSettings = GasSettings.default(); - - beforeAll(async () => { - const ctx = await setup(3, {}, {}, true); - const { aztecNode, deployL1ContractsValues, pxe } = ctx; - ({ wallets, logger } = ctx); - - logFunctionSignatures(BananaCoin.artifact, logger); - logFunctionSignatures(FPCContract.artifact, logger); - logFunctionSignatures(GasTokenContract.artifact, logger); - logFunctionSignatures(SchnorrAccountContract.artifact, logger); - - await aztecNode.setConfig({ - feeRecipient: wallets.at(-1)!.getAddress(), - }); - - aliceWallet = wallets[0]; - aliceAddress = wallets[0].getAddress(); - bobAddress = wallets[1].getAddress(); - sequencerAddress = wallets[2].getAddress(); - - gasBridgeTestHarness = await GasPortalTestingHarnessFactory.create({ - aztecNode: aztecNode, - pxeService: pxe, - publicClient: deployL1ContractsValues.publicClient, - walletClient: deployL1ContractsValues.walletClient, - wallet: wallets[0], - logger, - mockL1: false, - }); - - gasTokenContract = gasBridgeTestHarness.l2Token; - - bananaCoin = await BananaCoin.deploy(wallets[0], wallets[0].getAddress(), TOKEN_NAME, TOKEN_SYMBOL, TOKEN_DECIMALS) - .send() - .deployed(); - - logger.info(`BananaCoin deployed at ${bananaCoin.address}`); - - bananaFPC = await FPCContract.deploy(wallets[0], bananaCoin.address, gasTokenContract.address).send().deployed(); - logger.info(`BananaPay deployed at ${bananaFPC.address}`); - await publicDeployAccounts(wallets[0], wallets); - - await gasBridgeTestHarness.bridgeFromL1ToL2(BRIDGED_FPC_GAS, BRIDGED_FPC_GAS, bananaFPC.address); - - bananaPublicBalances = getBalancesFn('🍌.public', bananaCoin.methods.balance_of_public, logger); - bananaPrivateBalances = getBalancesFn('🍌.private', bananaCoin.methods.balance_of_private, logger); - gasBalances = getBalancesFn('⛽', gasTokenContract.methods.balance_of_public, logger); - await expectMapping(bananaPrivateBalances, [aliceAddress, bananaFPC.address, sequencerAddress], [0n, 0n, 0n]); - await expectMapping(bananaPublicBalances, [aliceAddress, bananaFPC.address, sequencerAddress], [0n, 0n, 0n]); - await expectMapping(gasBalances, [aliceAddress, bananaFPC.address, sequencerAddress], [0n, BRIDGED_FPC_GAS, 0n]); - }); - - it('reverts transactions but still pays fees using PublicFeePaymentMethod', async () => { - const OutrageousPublicAmountAliceDoesNotHave = BigInt(1e15); - const PublicMintedAlicePublicBananas = BigInt(1e12); - const FeeAmount = 1n; - - const [initialAlicePrivateBananas, initialFPCPrivateBananas] = await bananaPrivateBalances( - aliceAddress, - bananaFPC.address, - ); - const [initialAlicePublicBananas, initialFPCPublicBananas] = await bananaPublicBalances( - aliceAddress, - bananaFPC.address, - ); - const [initialAliceGas, initialFPCGas, initialSequencerGas] = await gasBalances( - aliceAddress, - bananaFPC.address, - sequencerAddress, - ); - - await bananaCoin.methods.mint_public(aliceAddress, PublicMintedAlicePublicBananas).send().wait(); - // if we simulate locally, it throws an error - await expect( - bananaCoin.methods - .transfer_public(aliceAddress, sequencerAddress, OutrageousPublicAmountAliceDoesNotHave, 0) - .send({ - fee: { - gasSettings, - paymentMethod: new PublicFeePaymentMethod(bananaCoin.address, bananaFPC.address, wallets[0]), - }, - }) - .wait(), - ).rejects.toThrow(/attempt to subtract with underflow 'hi == high'/); - - // we did not pay the fee, because we did not submit the TX - await expectMapping( - bananaPrivateBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [initialAlicePrivateBananas, initialFPCPrivateBananas, 0n], - ); - await expectMapping( - bananaPublicBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [initialAlicePublicBananas + PublicMintedAlicePublicBananas, initialFPCPublicBananas, 0n], - ); - await expectMapping( - gasBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [initialAliceGas, initialFPCGas, initialSequencerGas], - ); - - // if we skip simulation, it includes the failed TX - const txReceipt = await bananaCoin.methods - .transfer_public(aliceAddress, sequencerAddress, OutrageousPublicAmountAliceDoesNotHave, 0) - .send({ - skipPublicSimulation: true, - fee: { - gasSettings, - paymentMethod: new PublicFeePaymentMethod(bananaCoin.address, bananaFPC.address, wallets[0]), - }, - }) - .wait({ dontThrowOnRevert: true }); - expect(txReceipt.status).toBe(TxStatus.REVERTED); - - // and thus we paid the fee - await expectMapping( - bananaPrivateBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [initialAlicePrivateBananas, initialFPCPrivateBananas, 0n], - ); - await expectMapping( - bananaPublicBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [initialAlicePublicBananas + PublicMintedAlicePublicBananas - FeeAmount, initialFPCPublicBananas + FeeAmount, 0n], - ); - await expectMapping( - gasBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [initialAliceGas, initialFPCGas - FeeAmount, initialSequencerGas + FeeAmount], - ); - - // TODO(#4712) - demonstrate reverts with the PrivateFeePaymentMethod. - // Can't do presently because all logs are "revertible" so we lose notes that get broadcasted during unshielding. - }); - - describe('private fees payments', () => { - let InitialAlicePrivateBananas: bigint; - let InitialAlicePublicBananas: bigint; - let InitialAliceGas: bigint; - - let InitialBobPrivateBananas: bigint; - // eslint-disable-next-line @typescript-eslint/no-unused-vars - let InitialBobPublicBananas: bigint; - - let InitialFPCPrivateBananas: bigint; - let InitialFPCPublicBananas: bigint; - let InitialFPCGas: bigint; - - let InitialSequencerGas: bigint; - - let MaxFee: bigint; - let FeeAmount: bigint; - let RefundAmount: bigint; - let RefundSecret: Fr; - - beforeAll(async () => { - // Fund Alice private and publicly - await mintPrivate(BigInt(1e12), aliceAddress); - await bananaCoin.methods.mint_public(aliceAddress, 1e12).send().wait(); - }); - - beforeEach(async () => { - FeeAmount = 1n; - MaxFee = BigInt(20e9); - RefundAmount = MaxFee - FeeAmount; - RefundSecret = Fr.random(); - - expect(gasSettings.getFeeLimit().toBigInt()).toEqual(MaxFee); - - [ - [InitialAlicePrivateBananas, InitialBobPrivateBananas, InitialFPCPrivateBananas], - [InitialAlicePublicBananas, InitialBobPublicBananas, InitialFPCPublicBananas], - [InitialAliceGas, InitialFPCGas, InitialSequencerGas], - ] = await Promise.all([ - bananaPrivateBalances(aliceAddress, bobAddress, bananaFPC.address), - bananaPublicBalances(aliceAddress, bobAddress, bananaFPC.address), - gasBalances(aliceAddress, bananaFPC.address, sequencerAddress), - ]); - }); - - it('pays fees for tx that dont run public app logic', async () => { - /** - * PRIVATE SETUP (1 nullifier for tx) - * check authwit (1 nullifier) - * reduce alice BC.private by MaxFee (1 nullifier) - * enqueue public call to increase FPC BC.public by MaxFee - * enqueue public call for fpc.pay_fee_with_shielded_rebate - * - * PRIVATE APP LOGIC - * reduce Alice's BC.private by transferAmount (1 note) - * create note for Bob of transferAmount (1 note) - * encrypted logs of 944 bytes - * unencrypted logs of 20 bytes - * - * PUBLIC SETUP - * increase FPC BC.public by MaxFee - * - * PUBLIC APP LOGIC - * N/A - * - * PUBLIC TEARDOWN - * call gas.pay_fee - * decrease FPC AZT by FeeAmount - * increase sequencer AZT by FeeAmount - * call banana.shield - * decrease FPC BC.public by RefundAmount - * create transparent note with RefundAmount - * - * this is expected to squash notes and nullifiers - */ - const transferAmount = 5n; - const tx = await bananaCoin.methods - .transfer(aliceAddress, bobAddress, transferAmount, 0n) - .send({ - fee: { - gasSettings, - paymentMethod: new PrivateFeePaymentMethod( - bananaCoin.address, - bananaFPC.address, - aliceWallet, - RefundSecret, - ), - }, - }) - .wait(); - - /** - * at present the user is paying DA gas for: - * 3 nullifiers = 3 * DA_BYTES_PER_FIELD * DA_GAS_PER_BYTE = 3 * 32 * 16 = 1536 DA gas - * 2 note hashes = 2 * DA_BYTES_PER_FIELD * DA_GAS_PER_BYTE = 2 * 32 * 16 = 1024 DA gas - * 964 bytes of logs = 964 * DA_GAS_PER_BYTE = 964 * 16 = 15424 DA gas - * tx overhead of 512 DA gas - * for a total of 18496 DA gas. - * - * The default teardown gas allocation at present is - * 100_000_000 for both DA and L2 gas. - * - * That produces a grand total of 200018496n. - * - * This will change because: - * 1. Gas use during public execution is not currently incorporated - * 2. We are presently squashing notes/nullifiers across non/revertible during private exeuction, - * but we shouldn't. - */ - - expect(tx.transactionFee).toEqual(200018496n); - - await expectMapping( - bananaPrivateBalances, - [aliceAddress, bobAddress, bananaFPC.address, sequencerAddress], - [InitialAlicePrivateBananas - MaxFee - transferAmount, transferAmount, InitialFPCPrivateBananas, 0n], - ); - await expectMapping( - bananaPublicBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [InitialAlicePublicBananas, InitialFPCPublicBananas + MaxFee - RefundAmount, 0n], - ); - await expectMapping( - gasBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [InitialAliceGas, InitialFPCGas - FeeAmount, InitialSequencerGas + FeeAmount], - ); - - await expect( - // this rejects if note can't be added - addPendingShieldNoteToPXE(0, RefundAmount, computeSecretHash(RefundSecret), tx.txHash), - ).resolves.toBeUndefined(); - }); - - it('pays fees for tx that creates notes in private', async () => { - /** - * PRIVATE SETUP - * check authwit - * reduce alice BC.private by MaxFee - * enqueue public call to increase FPC BC.public by MaxFee - * enqueue public call for fpc.pay_fee_with_shielded_rebate - * - * PRIVATE APP LOGIC - * increase alice BC.private by newlyMintedBananas - * - * PUBLIC SETUP - * increase FPC BC.public by MaxFee - * - * PUBLIC APP LOGIC - * BC increase total supply - * - * PUBLIC TEARDOWN - * call gas.pay_fee - * decrease FPC AZT by FeeAmount - * increase sequencer AZT by FeeAmount - * call banana.shield - * decrease FPC BC.public by RefundAmount - * create transparent note with RefundAmount - */ - const newlyMintedBananas = 10n; - const tx = await bananaCoin.methods - .privately_mint_private_note(newlyMintedBananas) - .send({ - fee: { - gasSettings, - paymentMethod: new PrivateFeePaymentMethod( - bananaCoin.address, - bananaFPC.address, - aliceWallet, - RefundSecret, - ), - }, - }) - .wait(); - - await expectMapping( - bananaPrivateBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [InitialAlicePrivateBananas - MaxFee + newlyMintedBananas, InitialFPCPrivateBananas, 0n], - ); - await expectMapping( - bananaPublicBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [InitialAlicePublicBananas, InitialFPCPublicBananas + MaxFee - RefundAmount, 0n], - ); - await expectMapping( - gasBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [InitialAliceGas, InitialFPCGas - FeeAmount, InitialSequencerGas + FeeAmount], - ); - - await expect( - // this rejects if note can't be added - addPendingShieldNoteToPXE(0, RefundAmount, computeSecretHash(RefundSecret), tx.txHash), - ).resolves.toBeUndefined(); - }); - - it('pays fees for tx that creates notes in public', async () => { - /** - * PRIVATE SETUP - * check authwit - * reduce alice BC.private by MaxFee - * enqueue public call to increase FPC BC.public by MaxFee - * enqueue public call for fpc.pay_fee_with_shielded_rebate - * - * PRIVATE APP LOGIC - * N/A - * - * PUBLIC SETUP - * increase FPC BC.public by MaxFee - * - * PUBLIC APP LOGIC - * BC decrease Alice public balance by shieldedBananas - * BC create transparent note of shieldedBananas - * - * PUBLIC TEARDOWN - * call gas.pay_fee - * decrease FPC AZT by FeeAmount - * increase sequencer AZT by FeeAmount - * call banana.shield - * decrease FPC BC.public by RefundAmount - * create transparent note with RefundAmount - */ - const shieldedBananas = 1n; - const shieldSecret = Fr.random(); - const shieldSecretHash = computeSecretHash(shieldSecret); - const tx = await bananaCoin.methods - .shield(aliceAddress, shieldedBananas, shieldSecretHash, 0n) - .send({ - fee: { - gasSettings, - paymentMethod: new PrivateFeePaymentMethod( - bananaCoin.address, - bananaFPC.address, - aliceWallet, - RefundSecret, - ), - }, - }) - .wait(); - - await expectMapping( - bananaPrivateBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [InitialAlicePrivateBananas - MaxFee, InitialFPCPrivateBananas, 0n], - ); - await expectMapping( - bananaPublicBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [InitialAlicePublicBananas - shieldedBananas, InitialFPCPublicBananas + MaxFee - RefundAmount, 0n], - ); - await expectMapping( - gasBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [InitialAliceGas, InitialFPCGas - FeeAmount, InitialSequencerGas + FeeAmount], - ); - - await expect(addPendingShieldNoteToPXE(0, shieldedBananas, shieldSecretHash, tx.txHash)).resolves.toBeUndefined(); - - await expect( - addPendingShieldNoteToPXE(0, RefundAmount, computeSecretHash(RefundSecret), tx.txHash), - ).resolves.toBeUndefined(); - }); - - it('pays fees for tx that creates notes in both private and public', async () => { - const privateTransfer = 1n; - const shieldedBananas = 1n; - const shieldSecret = Fr.random(); - const shieldSecretHash = computeSecretHash(shieldSecret); - - /** - * PRIVATE SETUP - * check authwit - * reduce alice BC.private by MaxFee - * enqueue public call to increase FPC BC.public by MaxFee - * enqueue public call for fpc.pay_fee_with_shielded_rebate - * - * PRIVATE APP LOGIC - * reduce Alice's private balance by privateTransfer - * create note for Bob with privateTransfer amount of private BC - * - * PUBLIC SETUP - * increase FPC BC.public by MaxFee - * - * PUBLIC APP LOGIC - * BC decrease Alice public balance by shieldedBananas - * BC create transparent note of shieldedBananas - * - * PUBLIC TEARDOWN - * call gas.pay_fee - * decrease FPC AZT by FeeAmount - * increase sequencer AZT by FeeAmount - * call banana.shield - * decrease FPC BC.public by RefundAmount - * create transparent note with RefundAmount - */ - const tx = await new BatchCall(aliceWallet, [ - bananaCoin.methods.transfer(aliceAddress, bobAddress, privateTransfer, 0n).request(), - bananaCoin.methods.shield(aliceAddress, shieldedBananas, shieldSecretHash, 0n).request(), - ]) - .send({ - fee: { - gasSettings, - paymentMethod: new PrivateFeePaymentMethod( - bananaCoin.address, - bananaFPC.address, - aliceWallet, - RefundSecret, - ), - }, - }) - .wait(); - - await expectMapping( - bananaPrivateBalances, - [aliceAddress, bobAddress, bananaFPC.address, sequencerAddress], - [ - InitialAlicePrivateBananas - MaxFee - privateTransfer, - InitialBobPrivateBananas + privateTransfer, - InitialFPCPrivateBananas, - 0n, - ], - ); - await expectMapping( - bananaPublicBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [InitialAlicePublicBananas - shieldedBananas, InitialFPCPublicBananas + MaxFee - RefundAmount, 0n], - ); - await expectMapping( - gasBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [InitialAliceGas, InitialFPCGas - FeeAmount, InitialSequencerGas + FeeAmount], - ); - - await expect(addPendingShieldNoteToPXE(0, shieldedBananas, shieldSecretHash, tx.txHash)).resolves.toBeUndefined(); - - await expect( - addPendingShieldNoteToPXE(0, RefundAmount, computeSecretHash(RefundSecret), tx.txHash), - ).resolves.toBeUndefined(); - }); - - it('rejects txs that dont have enough balance to cover gas costs', async () => { - // deploy a copy of bananaFPC but don't fund it! - const bankruptFPC = await FPCContract.deploy(aliceWallet, bananaCoin.address, gasTokenContract.address) - .send() - .deployed(); - - await expectMapping(gasBalances, [bankruptFPC.address], [0n]); - - await expect( - bananaCoin.methods - .privately_mint_private_note(10) - .send({ - // we need to skip public simulation otherwise the PXE refuses to accept the TX - skipPublicSimulation: true, - fee: { - gasSettings, - paymentMethod: new PrivateFeePaymentMethod( - bananaCoin.address, - bankruptFPC.address, - aliceWallet, - RefundSecret, - ), - }, - }) - .wait(), - ).rejects.toThrow('Tx dropped by P2P node.'); - }); - }); - - it('fails transaction that error in setup', async () => { - const OutrageousPublicAmountAliceDoesNotHave = BigInt(100e12); - - // simulation throws an error when setup fails - await expect( - bananaCoin.methods - .transfer_public(aliceAddress, sequencerAddress, OutrageousPublicAmountAliceDoesNotHave, 0) - .send({ - fee: { - gasSettings, - paymentMethod: new BuggedSetupFeePaymentMethod(bananaCoin.address, bananaFPC.address, wallets[0]), - }, - }) - .wait(), - ).rejects.toThrow(/Message not authorized by account 'is_valid == true'/); - - // so does the sequencer - await expect( - bananaCoin.methods - .transfer_public(aliceAddress, sequencerAddress, OutrageousPublicAmountAliceDoesNotHave, 0) - .send({ - skipPublicSimulation: true, - fee: { - gasSettings, - paymentMethod: new BuggedSetupFeePaymentMethod(bananaCoin.address, bananaFPC.address, wallets[0]), - }, - }) - .wait(), - ).rejects.toThrow(/Transaction [0-9a-f]{64} was dropped\. Reason: Tx dropped by P2P node\./); - }); - - it('fails transaction that error in teardown', async () => { - /** - * We trigger an error in teardown by having the FPC authorize a transfer of its entire balance to Alice - * as part of app logic. This will cause the FPC to not have enough funds to pay the refund back to Alice. - */ - const PublicMintedAlicePublicBananas = 100_000_000_000n; - - const [initialAlicePrivateBananas, initialFPCPrivateBananas] = await bananaPrivateBalances( - aliceAddress, - bananaFPC.address, - ); - const [initialAlicePublicBananas, initialFPCPublicBananas] = await bananaPublicBalances( - aliceAddress, - bananaFPC.address, - ); - const [initialAliceGas, initialFPCGas, initialSequencerGas] = await gasBalances( - aliceAddress, - bananaFPC.address, - sequencerAddress, - ); - - await bananaCoin.methods.mint_public(aliceAddress, PublicMintedAlicePublicBananas).send().wait(); - - await expect( - bananaCoin.methods - .mint_public(aliceAddress, 1n) // random operation - .send({ - fee: { - gasSettings, - paymentMethod: new BuggedTeardownFeePaymentMethod(bananaCoin.address, bananaFPC.address, wallets[0]), - }, - }) - .wait(), - ).rejects.toThrow(/invalid nonce/); - - // node also drops - await expect( - bananaCoin.methods - .mint_public(aliceAddress, 1n) // random operation - .send({ - skipPublicSimulation: true, - fee: { - gasSettings, - paymentMethod: new BuggedTeardownFeePaymentMethod(bananaCoin.address, bananaFPC.address, wallets[0]), - }, - }) - .wait(), - ).rejects.toThrow(/Transaction [0-9a-f]{64} was dropped\. Reason: Tx dropped by P2P node\./); - - // nothing happened - await expectMapping( - bananaPrivateBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [initialAlicePrivateBananas, initialFPCPrivateBananas, 0n], - ); - await expectMapping( - bananaPublicBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [initialAlicePublicBananas + PublicMintedAlicePublicBananas, initialFPCPublicBananas, 0n], - ); - await expectMapping( - gasBalances, - [aliceAddress, bananaFPC.address, sequencerAddress], - [initialAliceGas, initialFPCGas, initialSequencerGas], - ); - }); - - function logFunctionSignatures(artifact: ContractArtifact, logger: DebugLogger) { - artifact.functions.forEach(fn => { - const sig = decodeFunctionSignature(fn.name, fn.parameters); - logger.verbose(`${FunctionSelector.fromNameAndParameters(fn.name, fn.parameters)} => ${artifact.name}.${sig} `); - }); - } - - const mintPrivate = async (amount: bigint, address: AztecAddress) => { - // Mint bananas privately - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - logger.debug(`Minting ${amount} bananas privately for ${address} with secret ${secretHash.toString()}`); - const receipt = await bananaCoin.methods.mint_private(amount, secretHash).send().wait(); - - // Setup auth wit - await addPendingShieldNoteToPXE(0, amount, secretHash, receipt.txHash); - const txClaim = bananaCoin.methods.redeem_shield(address, amount, secret).send(); - const receiptClaim = await txClaim.wait({ debug: true }); - const { visibleNotes } = receiptClaim.debugInfo!; - expect(visibleNotes[0].note.items[0].toBigInt()).toBe(amount); - }; - - const addPendingShieldNoteToPXE = async (accountIndex: number, amount: bigint, secretHash: Fr, txHash: TxHash) => { - const note = new Note([new Fr(amount), secretHash]); - const extendedNote = new ExtendedNote( - note, - wallets[accountIndex].getAddress(), - bananaCoin.address, - BananaCoin.storage.pending_shields.slot, - BananaCoin.notes.TransparentNote.id, - txHash, - ); - await wallets[accountIndex].addNote(extendedNote); - }; -}); - -class BuggedSetupFeePaymentMethod extends PublicFeePaymentMethod { - override getFunctionCalls(gasSettings: GasSettings): Promise { - const maxFee = gasSettings.getFeeLimit(); - const nonce = Fr.random(); - const messageHash = computeAuthWitMessageHash( - this.paymentContract, - this.wallet.getChainId(), - this.wallet.getVersion(), - { - args: [this.wallet.getAddress(), this.paymentContract, maxFee, nonce], - functionData: new FunctionData( - FunctionSelector.fromSignature('transfer_public((Field),(Field),Field,Field)'), - false, - ), - to: this.asset, - }, - ); - - const tooMuchFee = new Fr(maxFee.toBigInt() * 2n); - - return Promise.resolve([ - this.wallet.setPublicAuthWit(messageHash, true).request(), - { - to: this.getPaymentContract(), - functionData: new FunctionData( - FunctionSelector.fromSignature('fee_entrypoint_public(Field,(Field),Field)'), - true, - ), - args: [tooMuchFee, this.asset, nonce], - }, - ]); - } -} - -class BuggedTeardownFeePaymentMethod extends PublicFeePaymentMethod { - override async getFunctionCalls(gasSettings: GasSettings): Promise { - // authorize the FPC to take the max fee from Alice - const nonce = Fr.random(); - const maxFee = gasSettings.getFeeLimit(); - const messageHash1 = computeAuthWitMessageHash( - this.paymentContract, - this.wallet.getChainId(), - this.wallet.getVersion(), - { - args: [this.wallet.getAddress(), this.paymentContract, maxFee, nonce], - functionData: new FunctionData( - FunctionSelector.fromSignature('transfer_public((Field),(Field),Field,Field)'), - false, - ), - to: this.asset, - }, - ); - - // authorize the FPC to take the maxFee - // do this first because we only get 2 feepayload calls - await this.wallet.setPublicAuthWit(messageHash1, true).send().wait(); - - return Promise.resolve([ - // in this, we're actually paying the fee in setup - { - to: this.getPaymentContract(), - functionData: new FunctionData( - FunctionSelector.fromSignature('fee_entrypoint_public(Field,(Field),Field)'), - true, - ), - args: [maxFee, this.asset, nonce], - }, - // and trying to take a little extra in teardown, but specify a bad nonce - { - to: this.asset, - functionData: new FunctionData( - FunctionSelector.fromSignature('transfer_public((Field),(Field),Field,Field)'), - false, - ), - args: [this.wallet.getAddress(), this.paymentContract, new Fr(1), Fr.random()], - }, - ]); - } -} diff --git a/yarn-project/end-to-end/src/e2e_fees/dapp_subscription.test.ts b/yarn-project/end-to-end/src/e2e_fees/dapp_subscription.test.ts new file mode 100644 index 000000000000..516ac6d02b6b --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_fees/dapp_subscription.test.ts @@ -0,0 +1,235 @@ +import { + type AccountWallet, + type AztecAddress, + type FeePaymentMethod, + Fr, + type PXE, + PrivateFeePaymentMethod, + PublicFeePaymentMethod, + SentTx, +} from '@aztec/aztec.js'; +import { DefaultDappEntrypoint } from '@aztec/entrypoints/dapp'; +import { + type AppSubscriptionContract, + type TokenContract as BananaCoin, + type CounterContract, + type FPCContract, +} from '@aztec/noir-contracts.js'; + +import { expectMapping, expectMappingDelta } from '../fixtures/utils.js'; +import { FeesTest } from './fees_test.js'; + +type Balances = [bigint, bigint, bigint]; + +describe('e2e_fees dapp_subscription', () => { + let pxe: PXE; + + let aliceWallet: AccountWallet; + let aliceAddress: AztecAddress; // Dapp subscriber. + let bobAddress: AztecAddress; // Dapp owner. + let sequencerAddress: AztecAddress; + + let bananaCoin: BananaCoin; + let counterContract: CounterContract; + let subscriptionContract: AppSubscriptionContract; + let bananaFPC: FPCContract; + + let initialSubscriptionContractGasBalance: bigint; + let initialSequencerGasBalance: bigint; + let initialFPCGasBalance: bigint; + let initialBananasPublicBalances: Balances; // alice, bob, fpc + let initialBananasPrivateBalances: Balances; // alice, bob, fpc + + const t = new FeesTest('dapp_subscription'); + + beforeAll(async () => { + await t.applyBaseSnapshots(); + await t.applyFundAlice(); + await t.applySetupSubscription(); + + ({ + aliceWallet, + aliceAddress, + bobAddress, + sequencerAddress, + bananaCoin, + bananaFPC, + subscriptionContract, + counterContract, + pxe, + } = await t.setup()); + }); + + afterAll(async () => { + await t.teardown(); + }); + + beforeAll(async () => { + await expectMapping( + t.gasBalances, + [aliceAddress, sequencerAddress, subscriptionContract.address, bananaFPC.address], + [0n, 0n, t.INITIAL_GAS_BALANCE, t.INITIAL_GAS_BALANCE], + ); + + await expectMapping( + t.bananaPrivateBalances, + [aliceAddress, bobAddress, bananaFPC.address], + [t.ALICE_INITIAL_BANANAS, 0n, 0n], + ); + + await expectMapping( + t.bananaPublicBalances, + [aliceAddress, bobAddress, bananaFPC.address], + [t.ALICE_INITIAL_BANANAS, 0n, 0n], + ); + }); + + beforeEach(async () => { + [initialSubscriptionContractGasBalance, initialSequencerGasBalance, initialFPCGasBalance] = (await t.gasBalances( + subscriptionContract, + sequencerAddress, + bananaFPC, + )) as Balances; + initialBananasPublicBalances = (await t.bananaPublicBalances(aliceAddress, bobAddress, bananaFPC)) as Balances; + initialBananasPrivateBalances = (await t.bananaPrivateBalances(aliceAddress, bobAddress, bananaFPC)) as Balances; + }); + + it('should allow Alice to subscribe by paying privately with bananas', async () => { + /** + PRIVATE SETUP + we first unshield `MAX_FEE` BC from alice's private balance to the FPC's public balance + + PUBLIC APP LOGIC + we then privately transfer `SUBSCRIPTION_AMOUNT` BC from alice to bob's subscription contract + + PUBLIC TEARDOWN + then the FPC calls `pay_fee`, reducing its gas balance by `FEE_AMOUNT`, and increasing the sequencer's gas balance by `FEE_AMOUNT` + the FPC also publicly sends `REFUND` BC to alice + */ + + const { transactionFee } = await subscribe( + new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), + ); + + await expectMapping( + t.gasBalances, + [sequencerAddress, bananaFPC.address], + [initialSequencerGasBalance + transactionFee!, initialFPCGasBalance - transactionFee!], + ); + + // alice, bob, fpc + await expectBananasPrivateDelta(-t.SUBSCRIPTION_AMOUNT - t.maxFee, t.SUBSCRIPTION_AMOUNT, 0n); + await expectBananasPublicDelta(0n, 0n, transactionFee!); + + // REFUND_AMOUNT is a transparent note note + }); + + it('should allow Alice to subscribe by paying with bananas in public', async () => { + /** + PRIVATE SETUP + we publicly transfer `MAX_FEE` BC from alice's public balance to the FPC's public balance + + PUBLIC APP LOGIC + we then privately transfer `SUBSCRIPTION_AMOUNT` BC from alice to bob's subscription contract + + PUBLIC TEARDOWN + then the FPC calls `pay_fee`, reducing its gas balance by `FEE_AMOUNT`, and increasing the sequencer's gas balance by `FEE_AMOUNT` + the FPC also publicly sends `REFUND` BC to alice + */ + const { transactionFee } = await subscribe( + new PublicFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), + ); + + await expectMapping( + t.gasBalances, + [sequencerAddress, bananaFPC.address], + [initialSequencerGasBalance + transactionFee!, initialFPCGasBalance - transactionFee!], + ); + + // alice, bob, fpc + // we pay the fee publicly, but the subscription payment is still private. + await expectBananasPrivateDelta(-t.SUBSCRIPTION_AMOUNT, t.SUBSCRIPTION_AMOUNT, 0n); + // we have the refund from the previous test, + // but since we paid publicly this time, the refund should have been "squashed" + await expectBananasPublicDelta(-transactionFee!, 0n, transactionFee!); + }); + + it('should call dapp subscription entrypoint', async () => { + // Subscribe again, so this test does not depend on the previous ones being run. + const { transactionFee: subscriptionTxFee } = await subscribe( + new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), + ); + + expect(await subscriptionContract.methods.is_initialized(aliceAddress).simulate()).toBe(true); + + const dappPayload = new DefaultDappEntrypoint(aliceAddress, aliceWallet, subscriptionContract.address); + const action = counterContract.methods.increment(bobAddress).request(); + const txExReq = await dappPayload.createTxExecutionRequest({ calls: [action] }); + const tx = await pxe.proveTx(txExReq, true); + const sentTx = new SentTx(pxe, pxe.sendTx(tx)); + const { transactionFee } = await sentTx.wait(); + + expect(await counterContract.methods.get_counter(bobAddress).simulate()).toBe(1n); + + await expectMapping( + t.gasBalances, + [sequencerAddress, subscriptionContract.address], + [ + initialSequencerGasBalance + transactionFee! + subscriptionTxFee!, + initialSubscriptionContractGasBalance - transactionFee!, + ], + ); + }); + + it('should reject after the sub runs out', async () => { + // Subscribe again. This will overwrite the previous subscription. + await subscribe(new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), 0); + await expect(dappIncrement()).rejects.toThrow( + "Failed to solve brillig function '(context.block_number()) as u64 < expiry_block_number as u64'", + ); + }); + + it('should reject after the txs run out', async () => { + // Subscribe again. This will overwrite the previous subscription. + await subscribe(new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), 5, 1); + await expect(dappIncrement()).resolves.toBeDefined(); + await expect(dappIncrement()).rejects.toThrow(/note.remaining_txs as u64 > 0/); + }); + + async function subscribe(paymentMethod: FeePaymentMethod, blockDelta: number = 5, txCount: number = 4) { + const nonce = Fr.random(); + const action = bananaCoin.methods.transfer(aliceAddress, bobAddress, t.SUBSCRIPTION_AMOUNT, nonce); + await aliceWallet.createAuthWit({ caller: subscriptionContract.address, action }); + + return subscriptionContract + .withWallet(aliceWallet) + .methods.subscribe(aliceAddress, nonce, (await pxe.getBlockNumber()) + blockDelta, txCount) + .send({ fee: { gasSettings: t.gasSettings, paymentMethod } }) + .wait(); + } + + async function dappIncrement() { + const dappEntrypoint = new DefaultDappEntrypoint(aliceAddress, aliceWallet, subscriptionContract.address); + const action = counterContract.methods.increment(bobAddress).request(); + const txExReq = await dappEntrypoint.createTxExecutionRequest({ calls: [action] }); + const tx = await pxe.proveTx(txExReq, true); + const sentTx = new SentTx(pxe, pxe.sendTx(tx)); + return sentTx.wait(); + } + + const expectBananasPrivateDelta = (aliceAmount: bigint, bobAmount: bigint, fpcAmount: bigint) => + expectMappingDelta( + initialBananasPrivateBalances, + t.bananaPrivateBalances, + [aliceAddress, bobAddress, bananaFPC.address], + [aliceAmount, bobAmount, fpcAmount], + ); + + const expectBananasPublicDelta = (aliceAmount: bigint, bobAmount: bigint, fpcAmount: bigint) => + expectMappingDelta( + initialBananasPublicBalances, + t.bananaPublicBalances, + [aliceAddress, bobAddress, bananaFPC.address], + [aliceAmount, bobAmount, fpcAmount], + ); +}); diff --git a/yarn-project/end-to-end/src/e2e_fees/failures.test.ts b/yarn-project/end-to-end/src/e2e_fees/failures.test.ts new file mode 100644 index 000000000000..dfde4f8662bf --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_fees/failures.test.ts @@ -0,0 +1,298 @@ +import { + type AccountWallet, + type AztecAddress, + Fr, + type FunctionCall, + FunctionSelector, + PublicFeePaymentMethod, + TxStatus, + computeAuthWitMessageHash, +} from '@aztec/aztec.js'; +import { FunctionData, type GasSettings } from '@aztec/circuits.js'; +import { type TokenContract as BananaCoin, type FPCContract } from '@aztec/noir-contracts.js'; + +import { expectMapping } from '../fixtures/utils.js'; +import { FeesTest } from './fees_test.js'; + +describe('e2e_fees failures', () => { + let aliceWallet: AccountWallet; + let aliceAddress: AztecAddress; + let sequencerAddress: AztecAddress; + let bananaCoin: BananaCoin; + let bananaFPC: FPCContract; + let gasSettings: GasSettings; + + const t = new FeesTest('failures'); + + beforeAll(async () => { + await t.applyBaseSnapshots(); + ({ aliceWallet, aliceAddress, sequencerAddress, bananaCoin, bananaFPC, gasSettings } = await t.setup()); + }); + + afterAll(async () => { + await t.teardown(); + }); + + it('reverts transactions but still pays fees using PublicFeePaymentMethod', async () => { + const OutrageousPublicAmountAliceDoesNotHave = BigInt(1e15); + const PublicMintedAlicePublicBananas = BigInt(1e12); + + const [initialAlicePrivateBananas, initialFPCPrivateBananas] = await t.bananaPrivateBalances( + aliceAddress, + bananaFPC.address, + ); + const [initialAlicePublicBananas, initialFPCPublicBananas] = await t.bananaPublicBalances( + aliceAddress, + bananaFPC.address, + ); + const [initialAliceGas, initialFPCGas, initialSequencerGas] = await t.gasBalances( + aliceAddress, + bananaFPC.address, + sequencerAddress, + ); + + await bananaCoin.methods.mint_public(aliceAddress, PublicMintedAlicePublicBananas).send().wait(); + // if we simulate locally, it throws an error + await expect( + bananaCoin.methods + .transfer_public(aliceAddress, sequencerAddress, OutrageousPublicAmountAliceDoesNotHave, 0) + .send({ + fee: { + gasSettings, + paymentMethod: new PublicFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), + }, + }) + .wait(), + ).rejects.toThrow(/attempt to subtract with underflow 'hi == high'/); + + // we did not pay the fee, because we did not submit the TX + await expectMapping( + t.bananaPrivateBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [initialAlicePrivateBananas, initialFPCPrivateBananas, 0n], + ); + await expectMapping( + t.bananaPublicBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [initialAlicePublicBananas + PublicMintedAlicePublicBananas, initialFPCPublicBananas, 0n], + ); + await expectMapping( + t.gasBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [initialAliceGas, initialFPCGas, initialSequencerGas], + ); + + // if we skip simulation, it includes the failed TX + const txReceipt = await bananaCoin.methods + .transfer_public(aliceAddress, sequencerAddress, OutrageousPublicAmountAliceDoesNotHave, 0) + .send({ + skipPublicSimulation: true, + fee: { + gasSettings, + paymentMethod: new PublicFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), + }, + }) + .wait({ dontThrowOnRevert: true }); + + expect(txReceipt.status).toBe(TxStatus.REVERTED); + const feeAmount = txReceipt.transactionFee!; + + // and thus we paid the fee + await expectMapping( + t.bananaPrivateBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [initialAlicePrivateBananas, initialFPCPrivateBananas, 0n], + ); + await expectMapping( + t.bananaPublicBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [initialAlicePublicBananas + PublicMintedAlicePublicBananas - feeAmount, initialFPCPublicBananas + feeAmount, 0n], + ); + await expectMapping( + t.gasBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [initialAliceGas, initialFPCGas - feeAmount, initialSequencerGas + feeAmount], + ); + + // TODO(#4712) - demonstrate reverts with the PrivateFeePaymentMethod. + // Can't do presently because all logs are "revertible" so we lose notes that get broadcasted during unshielding. + }); + + it('fails transaction that error in setup', async () => { + const OutrageousPublicAmountAliceDoesNotHave = BigInt(100e12); + + // simulation throws an error when setup fails + await expect( + bananaCoin.methods + .transfer_public(aliceAddress, sequencerAddress, OutrageousPublicAmountAliceDoesNotHave, 0) + .send({ + fee: { + gasSettings, + paymentMethod: new BuggedSetupFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), + }, + }) + .wait(), + ).rejects.toThrow(/Message not authorized by account 'is_valid == true'/); + + // so does the sequencer + await expect( + bananaCoin.methods + .transfer_public(aliceAddress, sequencerAddress, OutrageousPublicAmountAliceDoesNotHave, 0) + .send({ + skipPublicSimulation: true, + fee: { + gasSettings, + paymentMethod: new BuggedSetupFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), + }, + }) + .wait(), + ).rejects.toThrow(/Transaction [0-9a-f]{64} was dropped\. Reason: Tx dropped by P2P node\./); + }); + + it('fails transaction that error in teardown', async () => { + /** + * We trigger an error in teardown by having the FPC authorize a transfer of its entire balance to Alice + * as part of app logic. This will cause the FPC to not have enough funds to pay the refund back to Alice. + */ + const PublicMintedAlicePublicBananas = 100_000_000_000n; + + const [initialAlicePrivateBananas, initialFPCPrivateBananas] = await t.bananaPrivateBalances( + aliceAddress, + bananaFPC.address, + ); + const [initialAlicePublicBananas, initialFPCPublicBananas] = await t.bananaPublicBalances( + aliceAddress, + bananaFPC.address, + ); + const [initialAliceGas, initialFPCGas, initialSequencerGas] = await t.gasBalances( + aliceAddress, + bananaFPC.address, + sequencerAddress, + ); + + await bananaCoin.methods.mint_public(aliceAddress, PublicMintedAlicePublicBananas).send().wait(); + + await expect( + bananaCoin.methods + .mint_public(aliceAddress, 1n) // random operation + .send({ + fee: { + gasSettings, + paymentMethod: new BuggedTeardownFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), + }, + }) + .wait(), + ).rejects.toThrow(/invalid nonce/); + + // node also drops + await expect( + bananaCoin.methods + .mint_public(aliceAddress, 1n) // random operation + .send({ + skipPublicSimulation: true, + fee: { + gasSettings, + paymentMethod: new BuggedTeardownFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), + }, + }) + .wait(), + ).rejects.toThrow(/Transaction [0-9a-f]{64} was dropped\. Reason: Tx dropped by P2P node\./); + + // nothing happened + await expectMapping( + t.bananaPrivateBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [initialAlicePrivateBananas, initialFPCPrivateBananas, 0n], + ); + await expectMapping( + t.bananaPublicBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [initialAlicePublicBananas + PublicMintedAlicePublicBananas, initialFPCPublicBananas, 0n], + ); + await expectMapping( + t.gasBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [initialAliceGas, initialFPCGas, initialSequencerGas], + ); + }); +}); + +class BuggedSetupFeePaymentMethod extends PublicFeePaymentMethod { + override getFunctionCalls(gasSettings: GasSettings): Promise { + const maxFee = gasSettings.getFeeLimit(); + const nonce = Fr.random(); + const messageHash = computeAuthWitMessageHash( + this.paymentContract, + this.wallet.getChainId(), + this.wallet.getVersion(), + { + args: [this.wallet.getAddress(), this.paymentContract, maxFee, nonce], + functionData: new FunctionData( + FunctionSelector.fromSignature('transfer_public((Field),(Field),Field,Field)'), + false, + ), + to: this.asset, + }, + ); + + const tooMuchFee = new Fr(maxFee.toBigInt() * 2n); + + return Promise.resolve([ + this.wallet.setPublicAuthWit(messageHash, true).request(), + { + to: this.getPaymentContract(), + functionData: new FunctionData( + FunctionSelector.fromSignature('fee_entrypoint_public(Field,(Field),Field)'), + true, + ), + args: [tooMuchFee, this.asset, nonce], + }, + ]); + } +} + +class BuggedTeardownFeePaymentMethod extends PublicFeePaymentMethod { + override async getFunctionCalls(gasSettings: GasSettings): Promise { + // authorize the FPC to take the max fee from Alice + const nonce = Fr.random(); + const maxFee = gasSettings.getFeeLimit(); + const messageHash1 = computeAuthWitMessageHash( + this.paymentContract, + this.wallet.getChainId(), + this.wallet.getVersion(), + { + args: [this.wallet.getAddress(), this.paymentContract, maxFee, nonce], + functionData: new FunctionData( + FunctionSelector.fromSignature('transfer_public((Field),(Field),Field,Field)'), + false, + ), + to: this.asset, + }, + ); + + // authorize the FPC to take the maxFee + // do this first because we only get 2 feepayload calls + await this.wallet.setPublicAuthWit(messageHash1, true).send().wait(); + + return Promise.resolve([ + // in this, we're actually paying the fee in setup + { + to: this.getPaymentContract(), + functionData: new FunctionData( + FunctionSelector.fromSignature('fee_entrypoint_public(Field,(Field),Field)'), + true, + ), + args: [maxFee, this.asset, nonce], + }, + // and trying to take a little extra in teardown, but specify a bad nonce + { + to: this.asset, + functionData: new FunctionData( + FunctionSelector.fromSignature('transfer_public((Field),(Field),Field,Field)'), + false, + ), + args: [this.wallet.getAddress(), this.paymentContract, new Fr(1), Fr.random()], + }, + ]); + } +} diff --git a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts new file mode 100644 index 000000000000..39cd2308db8f --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts @@ -0,0 +1,269 @@ +import { getSchnorrAccount } from '@aztec/accounts/schnorr'; +import { + type AccountWallet, + type AztecAddress, + type AztecNode, + type DebugLogger, + ExtendedNote, + Fr, + Note, + type PXE, + SignerlessWallet, + type TxHash, + computeSecretHash, + createDebugLogger, +} from '@aztec/aztec.js'; +import { DefaultMultiCallEntrypoint } from '@aztec/aztec.js/entrypoint'; +import { GasSettings } from '@aztec/circuits.js'; +import { createL1Clients } from '@aztec/ethereum'; +import { + AppSubscriptionContract, + TokenContract as BananaCoin, + CounterContract, + FPCContract, + GasTokenContract, +} from '@aztec/noir-contracts.js'; + +import { MNEMONIC } from '../fixtures/fixtures.js'; +import { + type ISnapshotManager, + type SubsystemsContext, + addAccounts, + createSnapshotManager, +} from '../fixtures/snapshot_manager.js'; +import { type BalancesFn, deployCanonicalGasToken, getBalancesFn, publicDeployAccounts } from '../fixtures/utils.js'; +import { GasPortalTestingHarnessFactory } from '../shared/gas_portal_test_harness.js'; + +const { E2E_DATA_PATH: dataPath } = process.env; + +/** + * Test fixture for testing fees. Provides the following snapshots: + * InitialAccounts: Initializes 3 Schnorr account contracts. + * PublicDeployAccounts: Deploys the accounts publicly. + * DeployGasToken: Deploys the gas token contract. + * FPCSetup: Deploys BananaCoin and FPC contracts, and bridges gas from L1. + * FundAlice: Mints private and public bananas to Alice. + * SetupSubscription: Deploys a counter contract and a subscription contract, and mints gas token to the subscription contract. + */ +export class FeesTest { + private snapshotManager: ISnapshotManager; + private wallets: AccountWallet[] = []; + + public logger: DebugLogger; + public pxe!: PXE; + public aztecNode!: AztecNode; + + public aliceWallet!: AccountWallet; + public aliceAddress!: AztecAddress; + public bobWallet!: AccountWallet; + public bobAddress!: AztecAddress; + public sequencerAddress!: AztecAddress; + + public gasSettings = GasSettings.default(); + public maxFee = this.gasSettings.getFeeLimit().toBigInt(); + + public gasTokenContract!: GasTokenContract; + public bananaCoin!: BananaCoin; + public bananaFPC!: FPCContract; + public counterContract!: CounterContract; + public subscriptionContract!: AppSubscriptionContract; + + public gasBalances!: BalancesFn; + public bananaPublicBalances!: BalancesFn; + public bananaPrivateBalances!: BalancesFn; + + public readonly INITIAL_GAS_BALANCE = BigInt(1e15); + public readonly ALICE_INITIAL_BANANAS = BigInt(1e12); + public readonly SUBSCRIPTION_AMOUNT = 10_000n; + public readonly APP_SPONSORED_TX_GAS_LIMIT = BigInt(10e9); + + constructor(testName: string) { + this.logger = createDebugLogger(`aztec:e2e_fees:${testName}`); + this.snapshotManager = createSnapshotManager(`e2e_fees/${testName}`, dataPath); + } + + async setup() { + const context = await this.snapshotManager.setup(); + await context.aztecNode.setConfig({ feeRecipient: this.sequencerAddress }); + ({ pxe: this.pxe, aztecNode: this.aztecNode } = context); + return this; + } + + async teardown() { + await this.snapshotManager.teardown(); + } + + /** Alice mints bananaCoin tokens privately to the target address. */ + async mintPrivate(amount: bigint, address: AztecAddress) { + const secret = Fr.random(); + const secretHash = computeSecretHash(secret); + const balanceBefore = await this.bananaCoin.methods.balance_of_private(this.aliceAddress).simulate(); + this.logger.debug(`Minting ${amount} bananas privately for ${address} with secret ${secretHash.toString()}`); + const receipt = await this.bananaCoin.methods.mint_private(amount, secretHash).send().wait(); + + await this.addPendingShieldNoteToPXE(this.aliceWallet, amount, secretHash, receipt.txHash); + await this.bananaCoin.methods.redeem_shield(address, amount, secret).send().wait(); + const balanceAfter = await this.bananaCoin.methods.balance_of_private(this.aliceAddress).simulate(); + expect(balanceAfter).toEqual(balanceBefore + amount); + } + + async addPendingShieldNoteToPXE(wallet: AccountWallet, amount: bigint, secretHash: Fr, txHash: TxHash) { + const note = new Note([new Fr(amount), secretHash]); + const extendedNote = new ExtendedNote( + note, + wallet.getAddress(), + this.bananaCoin.address, + BananaCoin.storage.pending_shields.slot, + BananaCoin.notes.TransparentNote.id, + txHash, + ); + await wallet.addNote(extendedNote); + } + + public async applyBaseSnapshots() { + await this.applyInitialAccountsSnapshot(); + await this.applyPublicDeployAccountsSnapshot(); + await this.applyDeployGasTokenSnapshot(); + await this.applyFPCSetupSnapshot(); + } + + private async applyInitialAccountsSnapshot() { + await this.snapshotManager.snapshot( + 'initial_accounts', + addAccounts(3, this.logger), + async ({ accountKeys }, { pxe }) => { + const accountManagers = accountKeys.map(ak => getSchnorrAccount(pxe, ak[0], ak[1], 1)); + await Promise.all(accountManagers.map(a => a.register())); + this.wallets = await Promise.all(accountManagers.map(a => a.getWallet())); + this.wallets.forEach((w, i) => this.logger.verbose(`Wallet ${i} address: ${w.getAddress()}`)); + [this.aliceWallet, this.bobWallet] = this.wallets.slice(0, 2); + [this.aliceAddress, this.bobAddress, this.sequencerAddress] = this.wallets.map(w => w.getAddress()); + }, + ); + } + + private async applyPublicDeployAccountsSnapshot() { + await this.snapshotManager.snapshot('public_deploy_accounts', () => + publicDeployAccounts(this.aliceWallet, this.wallets), + ); + } + + private async applyDeployGasTokenSnapshot() { + await this.snapshotManager.snapshot('deploy_gas_token', async context => { + await deployCanonicalGasToken( + new SignerlessWallet( + context.pxe, + new DefaultMultiCallEntrypoint(context.aztecNodeConfig.chainId, context.aztecNodeConfig.version), + ), + ); + }); + } + + private async applyFPCSetupSnapshot() { + await this.snapshotManager.snapshot( + 'fpc_setup', + async context => { + const harness = await this.createGasBridgeTestHarness(context); + const gasTokenContract = harness.l2Token; + expect(await context.pxe.isContractPubliclyDeployed(gasTokenContract.address)).toBe(true); + + const bananaCoin = await BananaCoin.deploy(this.aliceWallet, this.aliceAddress, 'BC', 'BC', 18n) + .send() + .deployed(); + + this.logger.info(`BananaCoin deployed at ${bananaCoin.address}`); + + const bananaFPC = await FPCContract.deploy(this.aliceWallet, bananaCoin.address, gasTokenContract.address) + .send() + .deployed(); + + this.logger.info(`BananaPay deployed at ${bananaFPC.address}`); + + await harness.bridgeFromL1ToL2(this.INITIAL_GAS_BALANCE, this.INITIAL_GAS_BALANCE, bananaFPC.address); + + return { + bananaCoinAddress: bananaCoin.address, + bananaFPCAddress: bananaFPC.address, + gasTokenAddress: gasTokenContract.address, + }; + }, + async data => { + const bananaFPC = await FPCContract.at(data.bananaFPCAddress, this.aliceWallet); + const bananaCoin = await BananaCoin.at(data.bananaCoinAddress, this.aliceWallet); + const gasTokenContract = await GasTokenContract.at(data.gasTokenAddress, this.aliceWallet); + + this.bananaCoin = bananaCoin; + this.bananaFPC = bananaFPC; + this.gasTokenContract = gasTokenContract; + + this.bananaPublicBalances = getBalancesFn('🍌.public', bananaCoin.methods.balance_of_public, this.logger); + this.bananaPrivateBalances = getBalancesFn('🍌.private', bananaCoin.methods.balance_of_private, this.logger); + this.gasBalances = getBalancesFn('⛽', gasTokenContract.methods.balance_of_public, this.logger); + }, + ); + } + + public async applyFundAlice() { + await this.snapshotManager.snapshot( + 'fund_alice', + async () => { + await this.mintPrivate(BigInt(this.ALICE_INITIAL_BANANAS), this.aliceAddress); + await this.bananaCoin.methods.mint_public(this.aliceAddress, this.ALICE_INITIAL_BANANAS).send().wait(); + }, + () => Promise.resolve(), + ); + } + + public async applySetupSubscription() { + await this.snapshotManager.snapshot( + 'setup_subscription', + async () => { + // Deploy counter contract for testing with Bob as owner + const counterContract = await CounterContract.deploy(this.bobWallet, 0, this.bobAddress).send().deployed(); + + // Deploy subscription contract, that allows subscriptions for SUBSCRIPTION_AMOUNT of bananas + const subscriptionContract = await AppSubscriptionContract.deploy( + this.bobWallet, + counterContract.address, + this.bobAddress, + this.bananaCoin.address, + this.SUBSCRIPTION_AMOUNT, + this.gasTokenContract.address, + this.APP_SPONSORED_TX_GAS_LIMIT, + ) + .send() + .deployed(); + + // Mint some gas tokens to the subscription contract + // Could also use bridgeFromL1ToL2 from the harness, but this is more direct + await this.gasTokenContract.methods + .mint_public(subscriptionContract.address, this.INITIAL_GAS_BALANCE) + .send() + .wait(); + + return { + counterContractAddress: counterContract.address, + subscriptionContractAddress: subscriptionContract.address, + }; + }, + async ({ counterContractAddress, subscriptionContractAddress }) => { + this.counterContract = await CounterContract.at(counterContractAddress, this.bobWallet); + this.subscriptionContract = await AppSubscriptionContract.at(subscriptionContractAddress, this.bobWallet); + }, + ); + } + + private createGasBridgeTestHarness(context: SubsystemsContext) { + const { publicClient, walletClient } = createL1Clients(context.aztecNodeConfig.rpcUrl, MNEMONIC); + + return GasPortalTestingHarnessFactory.create({ + aztecNode: context.aztecNode, + pxeService: context.pxe, + publicClient: publicClient, + walletClient: walletClient, + wallet: this.aliceWallet, + logger: this.logger, + mockL1: false, + }); + } +} diff --git a/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts b/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts new file mode 100644 index 000000000000..76a0a712d18b --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts @@ -0,0 +1,388 @@ +import { + type AztecAddress, + BatchCall, + Fr, + PrivateFeePaymentMethod, + type TxReceipt, + type Wallet, + computeSecretHash, +} from '@aztec/aztec.js'; +import { type GasSettings } from '@aztec/circuits.js'; +import { type TokenContract as BananaCoin, FPCContract, type GasTokenContract } from '@aztec/noir-contracts.js'; + +import { expectMapping } from '../fixtures/utils.js'; +import { FeesTest } from './fees_test.js'; + +describe('e2e_fees private_payment', () => { + let aliceWallet: Wallet; + let aliceAddress: AztecAddress; + let bobAddress: AztecAddress; + let sequencerAddress: AztecAddress; + let gasTokenContract: GasTokenContract; + let bananaCoin: BananaCoin; + let bananaFPC: FPCContract; + let gasSettings: GasSettings; + + const t = new FeesTest('private_payment'); + + beforeAll(async () => { + await t.applyBaseSnapshots(); + await t.applyFundAlice(); + ({ aliceWallet, aliceAddress, bobAddress, sequencerAddress, gasTokenContract, bananaCoin, bananaFPC, gasSettings } = + await t.setup()); + }); + + afterAll(async () => { + await t.teardown(); + }); + + let InitialAlicePublicBananas: bigint; + let InitialAlicePrivateBananas: bigint; + let InitialAliceGas: bigint; + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + let InitialBobPublicBananas: bigint; + let InitialBobPrivateBananas: bigint; + + let InitialFPCPublicBananas: bigint; + let InitialFPCPrivateBananas: bigint; + let InitialFPCGas: bigint; + + let InitialSequencerGas: bigint; + + let maxFee: bigint; + let refundSecret: Fr; + + beforeEach(async () => { + maxFee = BigInt(20e9); + refundSecret = Fr.random(); + + expect(gasSettings.getFeeLimit().toBigInt()).toEqual(maxFee); + + [ + [InitialAlicePrivateBananas, InitialBobPrivateBananas, InitialFPCPrivateBananas], + [InitialAlicePublicBananas, InitialBobPublicBananas, InitialFPCPublicBananas], + [InitialAliceGas, InitialFPCGas, InitialSequencerGas], + ] = await Promise.all([ + t.bananaPrivateBalances(aliceAddress, bobAddress, bananaFPC.address), + t.bananaPublicBalances(aliceAddress, bobAddress, bananaFPC.address), + t.gasBalances(aliceAddress, bananaFPC.address, sequencerAddress), + ]); + }); + + const getFeeAndRefund = (tx: Pick) => [tx.transactionFee!, maxFee - tx.transactionFee!]; + + it('pays fees for tx that dont run public app logic', async () => { + /** + * PRIVATE SETUP (1 nullifier for tx) + * check authwit (1 nullifier) + * reduce alice BC.private by MaxFee (1 nullifier) + * enqueue public call to increase FPC BC.public by MaxFee + * enqueue public call for fpc.pay_fee_with_shielded_rebate + * + * PRIVATE APP LOGIC + * reduce Alice's BC.private by transferAmount (1 note) + * create note for Bob of transferAmount (1 note) + * encrypted logs of 944 bytes + * unencrypted logs of 20 bytes + * + * PUBLIC SETUP + * increase FPC BC.public by MaxFee + * + * PUBLIC APP LOGIC + * N/A + * + * PUBLIC TEARDOWN + * call gas.pay_fee + * decrease FPC AZT by FeeAmount + * increase sequencer AZT by FeeAmount + * call banana.shield + * decrease FPC BC.public by RefundAmount + * create transparent note with RefundAmount + * + * this is expected to squash notes and nullifiers + */ + const transferAmount = 5n; + const tx = await bananaCoin.methods + .transfer(aliceAddress, bobAddress, transferAmount, 0n) + .send({ + fee: { + gasSettings, + paymentMethod: new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet, refundSecret), + }, + }) + .wait(); + + /** + * at present the user is paying DA gas for: + * 3 nullifiers = 3 * DA_BYTES_PER_FIELD * DA_GAS_PER_BYTE = 3 * 32 * 16 = 1536 DA gas + * 2 note hashes = 2 * DA_BYTES_PER_FIELD * DA_GAS_PER_BYTE = 2 * 32 * 16 = 1024 DA gas + * 964 bytes of logs = 964 * DA_GAS_PER_BYTE = 964 * 16 = 15424 DA gas + * tx overhead of 512 DA gas + * for a total of 18496 DA gas. + * + * The default teardown gas allocation at present is + * 100_000_000 for both DA and L2 gas. + * + * That produces a grand total of 200018496n. + * + * This will change because: + * 1. Gas use during public execution is not currently incorporated + * 2. We are presently squashing notes/nullifiers across non/revertible during private exeuction, + * but we shouldn't. + */ + expect(tx.transactionFee).toEqual(200018496n); + const [feeAmount, refundAmount] = getFeeAndRefund(tx); + + await expectMapping( + t.bananaPrivateBalances, + [aliceAddress, bobAddress, bananaFPC.address, sequencerAddress], + [InitialAlicePrivateBananas - maxFee - transferAmount, transferAmount, InitialFPCPrivateBananas, 0n], + ); + await expectMapping( + t.bananaPublicBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [InitialAlicePublicBananas, InitialFPCPublicBananas + maxFee - refundAmount, 0n], + ); + await expectMapping( + t.gasBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [InitialAliceGas, InitialFPCGas - feeAmount, InitialSequencerGas + feeAmount], + ); + + await expect( + // this rejects if note can't be added + t.addPendingShieldNoteToPXE(t.aliceWallet, refundAmount, computeSecretHash(refundSecret), tx.txHash), + ).resolves.toBeUndefined(); + }); + + it('pays fees for tx that creates notes in private', async () => { + /** + * PRIVATE SETUP + * check authwit + * reduce alice BC.private by MaxFee + * enqueue public call to increase FPC BC.public by MaxFee + * enqueue public call for fpc.pay_fee_with_shielded_rebate + * + * PRIVATE APP LOGIC + * increase alice BC.private by newlyMintedBananas + * + * PUBLIC SETUP + * increase FPC BC.public by MaxFee + * + * PUBLIC APP LOGIC + * BC increase total supply + * + * PUBLIC TEARDOWN + * call gas.pay_fee + * decrease FPC AZT by FeeAmount + * increase sequencer AZT by FeeAmount + * call banana.shield + * decrease FPC BC.public by RefundAmount + * create transparent note with RefundAmount + */ + const newlyMintedBananas = 10n; + const tx = await bananaCoin.methods + .privately_mint_private_note(newlyMintedBananas) + .send({ + fee: { + gasSettings, + paymentMethod: new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet, refundSecret), + }, + }) + .wait(); + + const [feeAmount, refundAmount] = getFeeAndRefund(tx); + + await expectMapping( + t.bananaPrivateBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [InitialAlicePrivateBananas - maxFee + newlyMintedBananas, InitialFPCPrivateBananas, 0n], + ); + await expectMapping( + t.bananaPublicBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [InitialAlicePublicBananas, InitialFPCPublicBananas + maxFee - refundAmount, 0n], + ); + await expectMapping( + t.gasBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [InitialAliceGas, InitialFPCGas - feeAmount, InitialSequencerGas + feeAmount], + ); + + await expect( + // this rejects if note can't be added + t.addPendingShieldNoteToPXE(t.aliceWallet, refundAmount, computeSecretHash(refundSecret), tx.txHash), + ).resolves.toBeUndefined(); + }); + + it('pays fees for tx that creates notes in public', async () => { + /** + * PRIVATE SETUP + * check authwit + * reduce alice BC.private by MaxFee + * enqueue public call to increase FPC BC.public by MaxFee + * enqueue public call for fpc.pay_fee_with_shielded_rebate + * + * PRIVATE APP LOGIC + * N/A + * + * PUBLIC SETUP + * increase FPC BC.public by MaxFee + * + * PUBLIC APP LOGIC + * BC decrease Alice public balance by shieldedBananas + * BC create transparent note of shieldedBananas + * + * PUBLIC TEARDOWN + * call gas.pay_fee + * decrease FPC AZT by FeeAmount + * increase sequencer AZT by FeeAmount + * call banana.shield + * decrease FPC BC.public by RefundAmount + * create transparent note with RefundAmount + */ + const shieldedBananas = 1n; + const shieldSecret = Fr.random(); + const shieldSecretHash = computeSecretHash(shieldSecret); + const tx = await bananaCoin.methods + .shield(aliceAddress, shieldedBananas, shieldSecretHash, 0n) + .send({ + fee: { + gasSettings, + paymentMethod: new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet, refundSecret), + }, + }) + .wait(); + + const [feeAmount, refundAmount] = getFeeAndRefund(tx); + + await expectMapping( + t.bananaPrivateBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [InitialAlicePrivateBananas - maxFee, InitialFPCPrivateBananas, 0n], + ); + await expectMapping( + t.bananaPublicBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [InitialAlicePublicBananas - shieldedBananas, InitialFPCPublicBananas + maxFee - refundAmount, 0n], + ); + await expectMapping( + t.gasBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [InitialAliceGas, InitialFPCGas - feeAmount, InitialSequencerGas + feeAmount], + ); + + await expect( + t.addPendingShieldNoteToPXE(t.aliceWallet, shieldedBananas, shieldSecretHash, tx.txHash), + ).resolves.toBeUndefined(); + + await expect( + t.addPendingShieldNoteToPXE(t.aliceWallet, refundAmount, computeSecretHash(refundSecret), tx.txHash), + ).resolves.toBeUndefined(); + }); + + it('pays fees for tx that creates notes in both private and public', async () => { + const privateTransfer = 1n; + const shieldedBananas = 1n; + const shieldSecret = Fr.random(); + const shieldSecretHash = computeSecretHash(shieldSecret); + + /** + * PRIVATE SETUP + * check authwit + * reduce alice BC.private by MaxFee + * enqueue public call to increase FPC BC.public by MaxFee + * enqueue public call for fpc.pay_fee_with_shielded_rebate + * + * PRIVATE APP LOGIC + * reduce Alice's private balance by privateTransfer + * create note for Bob with privateTransfer amount of private BC + * + * PUBLIC SETUP + * increase FPC BC.public by MaxFee + * + * PUBLIC APP LOGIC + * BC decrease Alice public balance by shieldedBananas + * BC create transparent note of shieldedBananas + * + * PUBLIC TEARDOWN + * call gas.pay_fee + * decrease FPC AZT by FeeAmount + * increase sequencer AZT by FeeAmount + * call banana.shield + * decrease FPC BC.public by RefundAmount + * create transparent note with RefundAmount + */ + const tx = await new BatchCall(aliceWallet, [ + bananaCoin.methods.transfer(aliceAddress, bobAddress, privateTransfer, 0n).request(), + bananaCoin.methods.shield(aliceAddress, shieldedBananas, shieldSecretHash, 0n).request(), + ]) + .send({ + fee: { + gasSettings, + paymentMethod: new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet, refundSecret), + }, + }) + .wait(); + + const [feeAmount, refundAmount] = getFeeAndRefund(tx); + + await expectMapping( + t.bananaPrivateBalances, + [aliceAddress, bobAddress, bananaFPC.address, sequencerAddress], + [ + InitialAlicePrivateBananas - maxFee - privateTransfer, + InitialBobPrivateBananas + privateTransfer, + InitialFPCPrivateBananas, + 0n, + ], + ); + await expectMapping( + t.bananaPublicBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [InitialAlicePublicBananas - shieldedBananas, InitialFPCPublicBananas + maxFee - refundAmount, 0n], + ); + await expectMapping( + t.gasBalances, + [aliceAddress, bananaFPC.address, sequencerAddress], + [InitialAliceGas, InitialFPCGas - feeAmount, InitialSequencerGas + feeAmount], + ); + + await expect( + t.addPendingShieldNoteToPXE(t.aliceWallet, shieldedBananas, shieldSecretHash, tx.txHash), + ).resolves.toBeUndefined(); + + await expect( + t.addPendingShieldNoteToPXE(t.aliceWallet, refundAmount, computeSecretHash(refundSecret), tx.txHash), + ).resolves.toBeUndefined(); + }); + + it('rejects txs that dont have enough balance to cover gas costs', async () => { + // deploy a copy of bananaFPC but don't fund it! + const bankruptFPC = await FPCContract.deploy(aliceWallet, bananaCoin.address, gasTokenContract.address) + .send() + .deployed(); + + await expectMapping(t.gasBalances, [bankruptFPC.address], [0n]); + + await expect( + bananaCoin.methods + .privately_mint_private_note(10) + .send({ + // we need to skip public simulation otherwise the PXE refuses to accept the TX + skipPublicSimulation: true, + fee: { + gasSettings, + paymentMethod: new PrivateFeePaymentMethod( + bananaCoin.address, + bankruptFPC.address, + aliceWallet, + refundSecret, + ), + }, + }) + .wait(), + ).rejects.toThrow('Tx dropped by P2P node.'); + }); +}); diff --git a/yarn-project/end-to-end/src/e2e_lending_contract.test.ts b/yarn-project/end-to-end/src/e2e_lending_contract.test.ts index 2dd4614f80ed..8ffb4dac4344 100644 --- a/yarn-project/end-to-end/src/e2e_lending_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_lending_contract.test.ts @@ -77,7 +77,7 @@ describe('e2e_lending_contract', () => { new TokenSimulator(collateralAsset, logger, [lendingContract.address, wallet.getAddress()]), new TokenSimulator(stableCoin, logger, [lendingContract.address, wallet.getAddress()]), ); - }, 200_000); + }, 300_000); afterAll(() => teardown()); diff --git a/yarn-project/end-to-end/src/e2e_nested_contract/nested_contract_test.ts b/yarn-project/end-to-end/src/e2e_nested_contract/nested_contract_test.ts index 512320b7fbdd..561b79c7fd3b 100644 --- a/yarn-project/end-to-end/src/e2e_nested_contract/nested_contract_test.ts +++ b/yarn-project/end-to-end/src/e2e_nested_contract/nested_contract_test.ts @@ -9,16 +9,17 @@ import { import { ChildContract, ParentContract } from '@aztec/noir-contracts.js'; import { - SnapshotManager, + type ISnapshotManager, type SubsystemsContext, addAccounts, + createSnapshotManager, publicDeployAccounts, } from '../fixtures/snapshot_manager.js'; const { E2E_DATA_PATH: dataPath } = process.env; export class NestedContractTest { - private snapshotManager: SnapshotManager; + private snapshotManager: ISnapshotManager; logger: DebugLogger; wallets: AccountWallet[] = []; accounts: CompleteAddress[] = []; @@ -29,7 +30,7 @@ export class NestedContractTest { constructor(testName: string) { this.logger = createDebugLogger(`aztec:e2e_nested_contract:${testName}`); - this.snapshotManager = new SnapshotManager(`e2e_nested_contract/${testName}`, dataPath); + this.snapshotManager = createSnapshotManager(`e2e_nested_contract/${testName}`, dataPath); } /** diff --git a/yarn-project/end-to-end/src/e2e_ordering.test.ts b/yarn-project/end-to-end/src/e2e_ordering.test.ts index e5ad26a38684..99aaa3d190b2 100644 --- a/yarn-project/end-to-end/src/e2e_ordering.test.ts +++ b/yarn-project/end-to-end/src/e2e_ordering.test.ts @@ -7,10 +7,12 @@ import { jest } from '@jest/globals'; import { setup } from './fixtures/utils.js'; -jest.setTimeout(30_000); +const TIMEOUT = 300_000; // See https://github.com/AztecProtocol/aztec-packages/issues/1601 describe('e2e_ordering', () => { + jest.setTimeout(TIMEOUT); + let pxe: PXE; let wallet: Wallet; let teardown: () => Promise; @@ -29,7 +31,7 @@ describe('e2e_ordering', () => { beforeEach(async () => { ({ teardown, pxe, wallet } = await setup()); - }, 200_000); + }, TIMEOUT); afterEach(() => teardown()); @@ -42,7 +44,7 @@ describe('e2e_ordering', () => { parent = await ParentContract.deploy(wallet).send().deployed(); child = await ChildContract.deploy(wallet).send().deployed(); pubSetValueSelector = child.methods.pub_set_value.selector; - }); + }, TIMEOUT); describe('enqueued public calls ordering', () => { const nestedValue = 10n; diff --git a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts index 566118a99cc1..72c5c4c0aad0 100644 --- a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts +++ b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts @@ -33,7 +33,7 @@ describe('e2e_public_cross_chain_messaging deposits', () => { ownerAddress = crossChainTestHarness.ownerAddress; l2Bridge = crossChainTestHarness.l2Bridge; l2Token = crossChainTestHarness.l2Token; - }, 200_000); + }, 300_000); afterEach(async () => { await t.teardown(); diff --git a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/failure_cases.test.ts b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/failure_cases.test.ts index 8e8bb4f1bb98..a6b1b6e6ca51 100644 --- a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/failure_cases.test.ts +++ b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/failure_cases.test.ts @@ -17,7 +17,7 @@ describe('e2e_public_cross_chain_messaging failures', () => { ({ crossChainTestHarness, user1Wallet, user2Wallet } = t); ethAccount = crossChainTestHarness.ethAccount; l2Bridge = crossChainTestHarness.l2Bridge; - }, 200_000); + }, 300_000); afterAll(async () => { await t.teardown(); diff --git a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/l1_to_l2.test.ts b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/l1_to_l2.test.ts index 9285a56a36d6..bc31966bfe68 100644 --- a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/l1_to_l2.test.ts +++ b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/l1_to_l2.test.ts @@ -27,7 +27,7 @@ describe('e2e_public_cross_chain_messaging l1_to_l2', () => { aztecNode = crossChainTestHarness.aztecNode; inbox = crossChainTestHarness.inbox; - }, 200_000); + }, 300_000); afterAll(async () => { await t.teardown(); diff --git a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/l2_to_l1.test.ts b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/l2_to_l1.test.ts index b43333c5edf7..0be2acfddbff 100644 --- a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/l2_to_l1.test.ts +++ b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/l2_to_l1.test.ts @@ -21,7 +21,7 @@ describe('e2e_public_cross_chain_messaging l2_to_l1', () => { aztecNode = crossChainTestHarness.aztecNode; outbox = crossChainTestHarness.outbox; - }, 200_000); + }, 300_000); afterAll(async () => { await t.teardown(); diff --git a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/public_cross_chain_messaging_contract_test.ts b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/public_cross_chain_messaging_contract_test.ts index 38add3ebc8a4..b747d543e91f 100644 --- a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/public_cross_chain_messaging_contract_test.ts +++ b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/public_cross_chain_messaging_contract_test.ts @@ -10,26 +10,18 @@ import { type PXE, createDebugLogger, } from '@aztec/aztec.js'; +import { createL1Clients } from '@aztec/ethereum'; import { InboxAbi, OutboxAbi, PortalERC20Abi, TokenPortalAbi } from '@aztec/l1-artifacts'; import { TokenBridgeContract, TokenContract } from '@aztec/noir-contracts.js'; -import { - type Chain, - type HttpTransport, - type PublicClient, - createPublicClient, - createWalletClient, - getContract, - http, -} from 'viem'; -import { mnemonicToAccount } from 'viem/accounts'; -import { foundry } from 'viem/chains'; +import { type Chain, type HttpTransport, type PublicClient, getContract } from 'viem'; import { MNEMONIC } from '../fixtures/fixtures.js'; import { - SnapshotManager, + type ISnapshotManager, type SubsystemsContext, addAccounts, + createSnapshotManager, publicDeployAccounts, } from '../fixtures/snapshot_manager.js'; import { CrossChainTestHarness } from '../shared/cross_chain_test_harness.js'; @@ -37,7 +29,7 @@ import { CrossChainTestHarness } from '../shared/cross_chain_test_harness.js'; const { E2E_DATA_PATH: dataPath } = process.env; export class PublicCrossChainMessagingContractTest { - private snapshotManager: SnapshotManager; + private snapshotManager: ISnapshotManager; logger: DebugLogger; wallets: AccountWallet[] = []; accounts: CompleteAddress[] = []; @@ -60,7 +52,7 @@ export class PublicCrossChainMessagingContractTest { constructor(testName: string) { this.logger = createDebugLogger(`aztec:e2e_public_cross_chain_messaging:${testName}`); - this.snapshotManager = new SnapshotManager(`e2e_public_cross_chain_messaging/${testName}`, dataPath); + this.snapshotManager = createSnapshotManager(`e2e_public_cross_chain_messaging/${testName}`, dataPath); } async setup() { @@ -80,22 +72,6 @@ export class PublicCrossChainMessagingContractTest { await this.snapshotManager.teardown(); } - viemStuff(rpcUrl: string) { - const hdAccount = mnemonicToAccount(MNEMONIC); - - const walletClient = createWalletClient({ - account: hdAccount, - chain: foundry, - transport: http(rpcUrl), - }); - const publicClient = createPublicClient({ - chain: foundry, - transport: http(rpcUrl), - }); - - return { walletClient, publicClient }; - } - async applyBaseSnapshots() { // Note that we are using the same `pxe`, `aztecNodeConfig` and `aztecNode` across all snapshots. // This is to not have issues with different networks. @@ -126,7 +102,7 @@ export class PublicCrossChainMessagingContractTest { this.logger.verbose(`Public deploy accounts...`); await publicDeployAccounts(this.wallets[0], this.accounts.slice(0, 3)); - const { publicClient, walletClient } = this.viemStuff(this.aztecNodeConfig.rpcUrl); + const { publicClient, walletClient } = createL1Clients(this.aztecNodeConfig.rpcUrl, MNEMONIC); this.logger.verbose(`Setting up cross chain harness...`); this.crossChainTestHarness = await CrossChainTestHarness.new( @@ -151,7 +127,7 @@ export class PublicCrossChainMessagingContractTest { this.ownerAddress = AztecAddress.fromString(crossChainContext.ownerAddress.toString()); const tokenPortalAddress = EthAddress.fromString(crossChainContext.tokenPortal.toString()); - const { publicClient, walletClient } = this.viemStuff(this.aztecNodeConfig.rpcUrl); + const { publicClient, walletClient } = createL1Clients(this.aztecNodeConfig.rpcUrl, MNEMONIC); const inbox = getContract({ address: this.aztecNodeConfig.l1Contracts.inboxAddress.toString(), diff --git a/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts b/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts index 6325ead8df74..5e7c977c8458 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts @@ -13,9 +13,10 @@ import { import { DocsExampleContract, TokenContract } from '@aztec/noir-contracts.js'; import { - SnapshotManager, + type ISnapshotManager, type SubsystemsContext, addAccounts, + createSnapshotManager, publicDeployAccounts, } from '../fixtures/snapshot_manager.js'; import { TokenSimulator } from '../simulators/token_simulator.js'; @@ -26,7 +27,7 @@ export class TokenContractTest { static TOKEN_NAME = 'Aztec Token'; static TOKEN_SYMBOL = 'AZT'; static TOKEN_DECIMALS = 18n; - private snapshotManager: SnapshotManager; + private snapshotManager: ISnapshotManager; logger: DebugLogger; wallets: AccountWallet[] = []; accounts: CompleteAddress[] = []; @@ -36,7 +37,7 @@ export class TokenContractTest { constructor(testName: string) { this.logger = createDebugLogger(`aztec:e2e_token_contract:${testName}`); - this.snapshotManager = new SnapshotManager(`e2e_token_contract/${testName}`, dataPath); + this.snapshotManager = createSnapshotManager(`e2e_token_contract/${testName}`, dataPath); } /** diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 9fe93aafd776..2f1e67405272 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -12,7 +12,7 @@ import { } from '@aztec/aztec.js'; import { deployInstance, registerContractClass } from '@aztec/aztec.js/deployment'; import { asyncMap } from '@aztec/foundation/async-map'; -import { createDebugLogger } from '@aztec/foundation/log'; +import { type Logger, createDebugLogger } from '@aztec/foundation/log'; import { makeBackoff, retry } from '@aztec/foundation/retry'; import { resolver, reviver } from '@aztec/foundation/serialize'; import { type PXEService, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; @@ -43,15 +43,30 @@ type SnapshotEntry = { snapshotPath: string; }; -export class SnapshotManager { - private snapshotStack: SnapshotEntry[] = []; +export function createSnapshotManager(testName: string, dataPath?: string) { + return dataPath ? new SnapshotManager(testName, dataPath) : new MockSnapshotManager(testName); +} + +export interface ISnapshotManager { + snapshot( + name: string, + apply: (context: SubsystemsContext) => Promise, + restore?: (snapshotData: T, context: SubsystemsContext) => Promise, + ): Promise; + + setup(): Promise; + + teardown(): Promise; +} + +/** Snapshot manager that does not perform snapshotting, it just applies transition and restoration functions as it receives them. */ +class MockSnapshotManager implements ISnapshotManager { private context?: SubsystemsContext; - private livePath: string; private logger: DebugLogger; - constructor(testName: string, private dataPath?: string) { - this.livePath = this.dataPath ? join(this.dataPath, 'live', testName) : ''; + constructor(testName: string) { this.logger = createDebugLogger(`aztec:snapshot_manager:${testName}`); + this.logger.warn(`No data path given, will not persist any snapshots.`); } public async snapshot( @@ -59,18 +74,49 @@ export class SnapshotManager { apply: (context: SubsystemsContext) => Promise, restore: (snapshotData: T, context: SubsystemsContext) => Promise = () => Promise.resolve(), ) { - if (!this.dataPath) { - // We are running in disabled mode. Just apply the state. - this.logger.verbose(`No data path given, will not persist any snapshots.`); - this.context = await this.setupFromFresh(); - this.logger.verbose(`Applying state transition for ${name}...`); - const snapshotData = await apply(this.context); - this.logger.verbose(`State transition for ${name} complete.`); - // Execute the restoration function. - await restore(snapshotData, this.context); - return; + // We are running in disabled mode. Just apply the state. + const context = await this.setup(); + this.logger.verbose(`Applying state transition for ${name}...`); + const snapshotData = await apply(context); + this.logger.verbose(`State transition for ${name} complete.`); + // Execute the restoration function. + await restore(snapshotData, context); + return; + } + + public async setup() { + if (!this.context) { + this.context = await setupFromFresh(undefined, this.logger); } + return this.context; + } + + public async teardown() { + await teardown(this.context); + this.context = undefined; + } +} + +/** + * Snapshot engine for local e2e tests. Read more: + * https://github.com/AztecProtocol/aztec-packages/pull/5526 + */ +class SnapshotManager implements ISnapshotManager { + private snapshotStack: SnapshotEntry[] = []; + private context?: SubsystemsContext; + private livePath: string; + private logger: DebugLogger; + + constructor(testName: string, private dataPath: string) { + this.livePath = join(this.dataPath, 'live', testName); + this.logger = createDebugLogger(`aztec:snapshot_manager:${testName}`); + } + public async snapshot( + name: string, + apply: (context: SubsystemsContext) => Promise, + restore: (snapshotData: T, context: SubsystemsContext) => Promise = () => Promise.resolve(), + ) { const snapshotPath = join(this.dataPath, 'snapshots', ...this.snapshotStack.map(e => e.name), name, 'snapshot'); if (existsSync(snapshotPath)) { @@ -82,24 +128,21 @@ export class SnapshotManager { } // Snapshot didn't exist at snapshotPath, and by definition none of the child snapshots can exist. - - if (!this.context) { - // We have no subsystem context yet, create it from the top of the snapshot stack (if it exists). - this.context = await this.setup(); - } + // If we have no subsystem context yet, create it from the top of the snapshot stack (if it exists). + const context = await this.setup(); this.snapshotStack.push({ name, apply, restore, snapshotPath }); // Apply current state transition. this.logger.verbose(`Applying state transition for ${name}...`); - const snapshotData = await apply(this.context); + const snapshotData = await apply(context); this.logger.verbose(`State transition for ${name} complete.`); // Execute the restoration function. - await restore(snapshotData, this.context); + await restore(snapshotData, context); // Save the snapshot data. - const ethCheatCodes = new EthCheatCodes(this.context.aztecNodeConfig.rpcUrl); + const ethCheatCodes = new EthCheatCodes(context.aztecNodeConfig.rpcUrl); const anvilStateFile = `${this.livePath}/anvil.dat`; await ethCheatCodes.dumpChainState(anvilStateFile); writeFileSync(`${this.livePath}/${name}.json`, JSON.stringify(snapshotData || {}, resolver)); @@ -132,7 +175,7 @@ export class SnapshotManager { if (previousSnapshotPath) { this.logger.verbose(`Copying snapshot from ${previousSnapshotPath} to ${this.livePath}...`); copySync(previousSnapshotPath, this.livePath); - this.context = await this.setupFromState(this.livePath); + this.context = await setupFromState(this.livePath, this.logger); // Execute each of the previous snapshots restoration functions in turn. await asyncMap(this.snapshotStack, async e => { const snapshotData = JSON.parse(readFileSync(`${e.snapshotPath}/${e.name}.json`, 'utf-8'), reviver); @@ -141,7 +184,7 @@ export class SnapshotManager { this.logger.verbose(`Restoration of ${e.name} complete.`); }); } else { - this.context = await this.setupFromFresh(this.livePath); + this.context = await setupFromFresh(this.livePath, this.logger); } } return this.context; @@ -151,128 +194,135 @@ export class SnapshotManager { * Destroys the current subsystem context. */ public async teardown() { - if (!this.context) { - return; - } - await this.context.aztecNode.stop(); - await this.context.pxe.stop(); - await this.context.acvmConfig?.cleanup(); - await this.context.anvil.stop(); + await teardown(this.context); this.context = undefined; removeSync(this.livePath); } +} - /** - * Initializes a fresh set of subsystems. - * If given a statePath, the state will be written to the path. - * If there is no statePath, in-memory and temporary state locations will be used. - */ - private async setupFromFresh(statePath?: string): Promise { - this.logger.verbose(`Initializing state...`); - - // Fetch the AztecNode config. - // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. - const aztecNodeConfig: AztecNodeConfig = getConfigEnvVars(); - aztecNodeConfig.dataDirectory = statePath; - - // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. - this.logger.verbose('Starting anvil...'); - const anvil = await retry( - async () => { - const ethereumHostPort = await getPort(); - aztecNodeConfig.rpcUrl = `http://127.0.0.1:${ethereumHostPort}`; - const anvil = createAnvil({ anvilBinary: './scripts/anvil_kill_wrapper.sh', port: ethereumHostPort }); - await anvil.start(); - return anvil; - }, - 'Start anvil', - makeBackoff([5, 5, 5]), - ); - - // Deploy our L1 contracts. - this.logger.verbose('Deploying L1 contracts...'); - const hdAccount = mnemonicToAccount(MNEMONIC); - const privKeyRaw = hdAccount.getHdKey().privateKey; - const publisherPrivKey = privKeyRaw === null ? null : Buffer.from(privKeyRaw); - const deployL1ContractsValues = await setupL1Contracts(aztecNodeConfig.rpcUrl, hdAccount, this.logger); - aztecNodeConfig.publisherPrivateKey = `0x${publisherPrivKey!.toString('hex')}`; - aztecNodeConfig.l1Contracts = deployL1ContractsValues.l1ContractAddresses; - aztecNodeConfig.l1BlockPublishRetryIntervalMS = 100; - - const acvmConfig = await getACVMConfig(this.logger); - if (acvmConfig) { - aztecNodeConfig.acvmWorkingDirectory = acvmConfig.acvmWorkingDirectory; - aztecNodeConfig.acvmBinaryPath = acvmConfig.expectedAcvmPath; - } +/** + * Destroys the current subsystem context. + */ +async function teardown(context: SubsystemsContext | undefined) { + if (!context) { + return; + } + await context.aztecNode.stop(); + await context.pxe.stop(); + await context.acvmConfig?.cleanup(); + await context.anvil.stop(); +} - this.logger.verbose('Creating and synching an aztec node...'); - const aztecNode = await AztecNodeService.createAndSync(aztecNodeConfig); +/** + * Initializes a fresh set of subsystems. + * If given a statePath, the state will be written to the path. + * If there is no statePath, in-memory and temporary state locations will be used. + */ +async function setupFromFresh(statePath: string | undefined, logger: Logger): Promise { + logger.verbose(`Initializing state...`); + + // Fetch the AztecNode config. + // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. + const aztecNodeConfig: AztecNodeConfig = getConfigEnvVars(); + aztecNodeConfig.dataDirectory = statePath; + + // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. + logger.verbose('Starting anvil...'); + const anvil = await retry( + async () => { + const ethereumHostPort = await getPort(); + aztecNodeConfig.rpcUrl = `http://127.0.0.1:${ethereumHostPort}`; + const anvil = createAnvil({ anvilBinary: './scripts/anvil_kill_wrapper.sh', port: ethereumHostPort }); + await anvil.start(); + return anvil; + }, + 'Start anvil', + makeBackoff([5, 5, 5]), + ); + + // Deploy our L1 contracts. + logger.verbose('Deploying L1 contracts...'); + const hdAccount = mnemonicToAccount(MNEMONIC); + const privKeyRaw = hdAccount.getHdKey().privateKey; + const publisherPrivKey = privKeyRaw === null ? null : Buffer.from(privKeyRaw); + const deployL1ContractsValues = await setupL1Contracts(aztecNodeConfig.rpcUrl, hdAccount, logger); + aztecNodeConfig.publisherPrivateKey = `0x${publisherPrivKey!.toString('hex')}`; + aztecNodeConfig.l1Contracts = deployL1ContractsValues.l1ContractAddresses; + aztecNodeConfig.l1BlockPublishRetryIntervalMS = 100; + + const acvmConfig = await getACVMConfig(logger); + if (acvmConfig) { + aztecNodeConfig.acvmWorkingDirectory = acvmConfig.acvmWorkingDirectory; + aztecNodeConfig.acvmBinaryPath = acvmConfig.expectedAcvmPath; + } - this.logger.verbose('Creating pxe...'); - const pxeConfig = getPXEServiceConfig(); - pxeConfig.dataDirectory = statePath; - const pxe = await createPXEService(aztecNode, pxeConfig); + logger.verbose('Creating and synching an aztec node...'); + const aztecNode = await AztecNodeService.createAndSync(aztecNodeConfig); - if (statePath) { - writeFileSync(`${statePath}/aztec_node_config.json`, JSON.stringify(aztecNodeConfig)); - } + logger.verbose('Creating pxe...'); + const pxeConfig = getPXEServiceConfig(); + pxeConfig.dataDirectory = statePath; + const pxe = await createPXEService(aztecNode, pxeConfig); - return { - aztecNodeConfig, - anvil, - aztecNode, - pxe, - acvmConfig, - }; + if (statePath) { + writeFileSync(`${statePath}/aztec_node_config.json`, JSON.stringify(aztecNodeConfig)); } - /** - * Given a statePath, setup the system starting from that state. - */ - private async setupFromState(statePath: string): Promise { - this.logger.verbose(`Initializing with saved state at ${statePath}...`); - - // Load config. - // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. - const aztecNodeConfig: AztecNodeConfig = JSON.parse( - readFileSync(`${statePath}/aztec_node_config.json`, 'utf-8'), - reviver, - ); - aztecNodeConfig.dataDirectory = statePath; - - // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. - const ethereumHostPort = await getPort(); - aztecNodeConfig.rpcUrl = `http://localhost:${ethereumHostPort}`; - const anvil = createAnvil({ anvilBinary: './scripts/anvil_kill_wrapper.sh', port: ethereumHostPort }); - await anvil.start(); - // Load anvil state. - const anvilStateFile = `${statePath}/anvil.dat`; - const ethCheatCodes = new EthCheatCodes(aztecNodeConfig.rpcUrl); - await ethCheatCodes.loadChainState(anvilStateFile); - - // TODO: Encapsulate this in a NativeAcvm impl. - const acvmConfig = await getACVMConfig(this.logger); - if (acvmConfig) { - aztecNodeConfig.acvmWorkingDirectory = acvmConfig.acvmWorkingDirectory; - aztecNodeConfig.acvmBinaryPath = acvmConfig.expectedAcvmPath; - } + return { + aztecNodeConfig, + anvil, + aztecNode, + pxe, + acvmConfig, + }; +} - this.logger.verbose('Creating aztec node...'); - const aztecNode = await AztecNodeService.createAndSync(aztecNodeConfig); - - this.logger.verbose('Creating pxe...'); - const pxeConfig = getPXEServiceConfig(); - pxeConfig.dataDirectory = statePath; - const pxe = await createPXEService(aztecNode, pxeConfig); - - return { - aztecNodeConfig, - anvil, - aztecNode, - pxe, - acvmConfig, - }; +/** + * Given a statePath, setup the system starting from that state. + */ +async function setupFromState(statePath: string, logger: Logger): Promise { + logger.verbose(`Initializing with saved state at ${statePath}...`); + + // Load config. + // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. + const aztecNodeConfig: AztecNodeConfig = JSON.parse( + readFileSync(`${statePath}/aztec_node_config.json`, 'utf-8'), + reviver, + ); + aztecNodeConfig.dataDirectory = statePath; + + // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. + const ethereumHostPort = await getPort(); + aztecNodeConfig.rpcUrl = `http://localhost:${ethereumHostPort}`; + const anvil = createAnvil({ anvilBinary: './scripts/anvil_kill_wrapper.sh', port: ethereumHostPort }); + await anvil.start(); + // Load anvil state. + const anvilStateFile = `${statePath}/anvil.dat`; + const ethCheatCodes = new EthCheatCodes(aztecNodeConfig.rpcUrl); + await ethCheatCodes.loadChainState(anvilStateFile); + + // TODO: Encapsulate this in a NativeAcvm impl. + const acvmConfig = await getACVMConfig(logger); + if (acvmConfig) { + aztecNodeConfig.acvmWorkingDirectory = acvmConfig.acvmWorkingDirectory; + aztecNodeConfig.acvmBinaryPath = acvmConfig.expectedAcvmPath; } + + logger.verbose('Creating aztec node...'); + const aztecNode = await AztecNodeService.createAndSync(aztecNodeConfig); + + logger.verbose('Creating pxe...'); + const pxeConfig = getPXEServiceConfig(); + pxeConfig.dataDirectory = statePath; + const pxe = await createPXEService(aztecNode, pxeConfig); + + return { + aztecNodeConfig, + anvil, + aztecNode, + pxe, + acvmConfig, + }; } /** diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 9dca4d82eb1f..e1d257d8204b 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -601,8 +601,9 @@ export function getBalancesFn( symbol: string, method: ContractMethod, logger: any, -): (...addresses: AztecAddress[]) => Promise { - const balances = async (...addresses: AztecAddress[]) => { +): (...addresses: (AztecAddress | { address: AztecAddress })[]) => Promise { + const balances = async (...addressLikes: (AztecAddress | { address: AztecAddress })[]) => { + const addresses = addressLikes.map(addressLike => ('address' in addressLike ? addressLike.address : addressLike)); const b = await Promise.all(addresses.map(address => method(address).simulate())); const debugString = `${symbol} balances: ${addresses.map((address, i) => `${address}: ${b[i]}`).join(', ')}`; logger.verbose(debugString); @@ -624,6 +625,20 @@ export async function expectMapping( expect(outputs).toEqual(expectedOutputs); } +export async function expectMappingDelta( + initialValues: V[], + fn: (...k: K[]) => Promise, + inputs: K[], + expectedDiffs: V[], +): Promise { + expect(inputs.length).toBe(expectedDiffs.length); + + const outputs = await fn(...inputs); + const diffs = outputs.map((output, i) => output - initialValues[i]); + + expect(diffs).toEqual(expectedDiffs); +} + /** * Deploy the protocol contracts to a running instance. */ @@ -633,6 +648,8 @@ export async function deployCanonicalGasToken(deployer: Wallet) { const canonicalGasToken = getCanonicalGasToken(gasPortalAddress); if (await deployer.isContractClassPubliclyRegistered(canonicalGasToken.contractClass.id)) { + getLogger().debug('Gas token already deployed'); + await expect(deployer.isContractPubliclyDeployed(canonicalGasToken.address)).resolves.toBe(true); return; } @@ -640,6 +657,8 @@ export async function deployCanonicalGasToken(deployer: Wallet) { .send({ contractAddressSalt: canonicalGasToken.instance.salt, universalDeploy: true }) .deployed(); + getLogger().info(`Gas token publicly deployed at ${gasToken.address}`); + await expect(deployer.isContractClassPubliclyRegistered(gasToken.instance.contractClassId)).resolves.toBe(true); await expect(deployer.getContractInstance(gasToken.address)).resolves.toBeDefined(); await expect(deployer.isContractPubliclyDeployed(gasToken.address)).resolves.toBe(true); diff --git a/yarn-project/end-to-end/src/flakey_e2e_account_init_fees.test.ts b/yarn-project/end-to-end/src/flakey_e2e_account_init_fees.test.ts index 2f488cca319d..d17f93b4aea3 100644 --- a/yarn-project/end-to-end/src/flakey_e2e_account_init_fees.test.ts +++ b/yarn-project/end-to-end/src/flakey_e2e_account_init_fees.test.ts @@ -40,7 +40,7 @@ const TOKEN_SYMBOL = 'BC'; const TOKEN_DECIMALS = 18n; const BRIDGED_FPC_GAS = BigInt(10e12); -jest.setTimeout(1000_000); +jest.setTimeout(1_000_000); describe('e2e_fees_account_init', () => { let ctx: EndToEndContext; diff --git a/yarn-project/ethereum/src/deploy_l1_contracts.ts b/yarn-project/ethereum/src/deploy_l1_contracts.ts index 8856aa0fc94b..6cec140a9783 100644 --- a/yarn-project/ethereum/src/deploy_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_l1_contracts.ts @@ -15,7 +15,8 @@ import { getContract, http, } from 'viem'; -import { type HDAccount, type PrivateKeyAccount } from 'viem/accounts'; +import { type HDAccount, type PrivateKeyAccount, mnemonicToAccount } from 'viem/accounts'; +import { foundry } from 'viem/chains'; import { type L1ContractAddresses } from './l1_contract_addresses.js'; @@ -85,6 +86,34 @@ export interface L1ContractArtifactsForDeployment { gasPortal: ContractArtifacts; } +/** + * Creates a wallet and a public viem client for interacting with L1. + * @param rpcUrl - RPC URL to connect to L1. + * @param mnemonicOrHdAccount - Mnemonic or account for the wallet client. + * @param chain - Optional chain spec (defaults to local foundry). + * @returns - A wallet and a public client. + */ +export function createL1Clients( + rpcUrl: string, + mnemonicOrHdAccount: string | HDAccount, + chain: Chain = foundry, +): { publicClient: PublicClient; walletClient: WalletClient } { + const hdAccount = + typeof mnemonicOrHdAccount === 'string' ? mnemonicToAccount(mnemonicOrHdAccount) : mnemonicOrHdAccount; + + const walletClient = createWalletClient({ + account: hdAccount, + chain, + transport: http(rpcUrl), + }); + const publicClient = createPublicClient({ + chain, + transport: http(rpcUrl), + }); + + return { walletClient, publicClient }; +} + /** * Deploys the aztec L1 contracts; Rollup, Contract Deployment Emitter & (optionally) Decoder Helper. * @param rpcUrl - URL of the ETH RPC to use for deployment. diff --git a/yarn-project/foundation/src/abi/function_selector.ts b/yarn-project/foundation/src/abi/function_selector.ts index d09669d7ce30..c37df24d8395 100644 --- a/yarn-project/foundation/src/abi/function_selector.ts +++ b/yarn-project/foundation/src/abi/function_selector.ts @@ -3,6 +3,7 @@ import { keccak256, randomBytes } from '../crypto/index.js'; import { type Fr } from '../fields/fields.js'; import { BufferReader } from '../serialize/buffer_reader.js'; import { FieldReader } from '../serialize/field_reader.js'; +import { TypeRegistry } from '../serialize/type_registry.js'; import { type ABIParameter } from './abi.js'; import { decodeFunctionSignature } from './decoder.js'; import { Selector } from './selector.js'; @@ -126,4 +127,14 @@ export class FunctionSelector extends Selector { static random() { return FunctionSelector.fromBuffer(randomBytes(Selector.SIZE)); } + + toJSON() { + return { + type: 'FunctionSelector', + value: this.toString(), + }; + } } + +// For deserializing JSON. +TypeRegistry.register('FunctionSelector', FunctionSelector); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index bcbcf85fe862..9e03c88b08e9 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -197,7 +197,7 @@ export class ProvingOrchestrator { } // we need to pad the rollup with empty transactions - logger.info( + logger.debug( `Padding rollup with ${ this.provingState.totalNumTxs - this.provingState.transactionsReceived } empty transactions`, diff --git a/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts b/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts index c3e5194f0276..1f93a17f3e83 100644 --- a/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts +++ b/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts @@ -132,7 +132,7 @@ export class MemoryProvingQueue implements CircuitProver, ProvingJobSource { signal.addEventListener('abort', () => reject(new AbortedError('Operation has been aborted'))); } - this.log.info( + this.log.debug( `Adding id=${item.id} type=${ProvingRequestType[request.type]} proving job to queue depth=${this.queue.length()}`, ); // TODO (alexg) remove the `any` diff --git a/yarn-project/prover-client/src/prover-pool/prover-agent.ts b/yarn-project/prover-client/src/prover-pool/prover-agent.ts index e5ae9f156f95..401795969375 100644 --- a/yarn-project/prover-client/src/prover-pool/prover-agent.ts +++ b/yarn-project/prover-client/src/prover-pool/prover-agent.ts @@ -40,7 +40,7 @@ export class ProverAgent { try { const [time, result] = await elapsed(() => this.work(job.request)); await queue.resolveProvingJob(job.id, result); - this.log.info( + this.log.debug( `Processed proving job id=${job.id} type=${ProvingRequestType[job.request.type]} duration=${time}ms`, ); } catch (err) { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index d365f94bfbe4..c6e64085fcb8 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -160,7 +160,7 @@ export class Sequencer { if (pendingTxs.length < this.minTxsPerBLock) { return; } - this.log.info(`Retrieved ${pendingTxs.length} txs from P2P pool`); + this.log.debug(`Retrieved ${pendingTxs.length} txs from P2P pool`); const historicalHeader = (await this.l2BlockSource.getBlock(-1))?.header; const newBlockNumber = From 4b432951a9fe46ca1b0e0d38ebafe523bebf04eb Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Fri, 3 May 2024 23:53:49 +0100 Subject: [PATCH 4/9] chore(ci): migrate `protocol-circuits-gate-diff` to earthly (#6204) This workflow has quickly become our slowest workflow due to the need to build `bb` and `nargo` inside github actions. This PR migrates the `protocol-circuits-gate-diff` to earthly so we can reuse the speedy AWS build of bb/nargo we're doing anyway. --- .github/workflows/ci.yml | 35 ++++++ .../workflows/protocol-circuits-gate-diff.yml | 104 ------------------ noir-projects/Earthfile | 10 ++ 3 files changed, 45 insertions(+), 104 deletions(-) delete mode 100644 .github/workflows/protocol-circuits-gate-diff.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cc7fa5753ce0..1f6189ce0b1b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -348,6 +348,41 @@ jobs: working-directory: ./barretenberg/cpp/ timeout-minutes: 15 run: earthly-ci --no-output +bench-ultra-honk --bench_mode=cache + + protocol-circuits-gates-report: + needs: setup + runs-on: ${{ inputs.username || github.actor }}-x86 + steps: + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } + # Only allow one memory-hunger prover test to use this runner + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: protocol-circuits-gates-report-${{ inputs.username || github.actor }}-x86 + - name: "Noir Protocol Circuits Report" + working-directory: ./noir-projects/ + timeout-minutes: 25 + run: | + earthly-ci --artifact +gates-report/gates_report.json + mv gates_report.json ../protocol_circuits_report.json + + - name: Compare gates reports + id: gates_diff + uses: vezenovm/noir-gates-diff@acf12797860f237117e15c0d6e08d64253af52b6 + with: + report: protocol_circuits_report.json + summaryQuantile: 0 # Display any diff in gate count + + - name: Add gates diff to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + # delete the comment in case changes no longer impact circuit sizes + delete: ${{ !steps.gates_diff.outputs.markdown }} + message: ${{ steps.gates_diff.outputs.markdown }} merge-check: runs-on: ubuntu-latest diff --git a/.github/workflows/protocol-circuits-gate-diff.yml b/.github/workflows/protocol-circuits-gate-diff.yml deleted file mode 100644 index b31e371d4716..000000000000 --- a/.github/workflows/protocol-circuits-gate-diff.yml +++ /dev/null @@ -1,104 +0,0 @@ -name: Report gates diff - -on: - push: - branches: - - master - pull_request: - -jobs: - compare_protocol_circuits_gates: - concurrency: - group: compare_protocol_circuits_gates-${{ github.ref_name == 'master' && github.run_id || github.ref_name }} - cancel-in-progress: true - if: "!startsWith(github.head_ref, 'release-please--')" - runs-on: ubuntu-20.04 - steps: - - name: Checkout Code - uses: actions/checkout@v3 - - - name: Install bleeding edge cmake - run: | - sudo apt -y remove --purge cmake - sudo snap install cmake --classic - - - name: Create Build Environment - run: | - sudo apt-get update - sudo apt-get -y install ninja-build - - - name: Install Clang16 - run: | - wget https://github.com/llvm/llvm-project/releases/download/llvmorg-16.0.0/clang+llvm-16.0.0-x86_64-linux-gnu-ubuntu-18.04.tar.xz - tar -xvf clang+llvm-16.0.0-x86_64-linux-gnu-ubuntu-18.04.tar.xz - sudo cp clang+llvm-16.0.0-x86_64-linux-gnu-ubuntu-18.04/bin/* /usr/local/bin/ - sudo cp -r clang+llvm-16.0.0-x86_64-linux-gnu-ubuntu-18.04/include/* /usr/local/include/ - sudo cp -r clang+llvm-16.0.0-x86_64-linux-gnu-ubuntu-18.04/lib/* /usr/local/lib/ - sudo cp -r clang+llvm-16.0.0-x86_64-linux-gnu-ubuntu-18.04/share/* /usr/local/share/ - rm -rf clang+llvm-16.0.0-x86_64-linux-gnu-ubuntu-18.04.tar.xz clang+llvm-16.0.0-x86_64-linux-gnu-ubuntu-18.04 - - - uses: actions/cache@v4 - with: - path: | - barretenberg/cpp/build - barretenberg/cpp/build-wasm - barretenberg/cpp/build-threads - key: ${{ runner.os }}-bb-build - - - name: Compile Barretenberg - run: | - cd barretenberg/cpp - - cmake --preset default -DCMAKE_CXX_FLAGS="-stdlib=libc++" -DCMAKE_BUILD_TYPE=RelWithAssert -DTARGET_ARCH=westmere - cmake --build --preset default --target bb - - - name: Install noirup - run: | - curl -L $INSTALL_URL | bash - echo "${HOME}/.nargo/bin" >> $GITHUB_PATH - env: - INSTALL_URL: https://raw.githubusercontent.com/noir-lang/noirup/main/install - NOIRUP_BIN_URL: https://raw.githubusercontent.com/noir-lang/noirup/main/noirup - - - uses: actions/cache@v4 - with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - key: ${{ runner.os }}-cargo - - - name: Install Barretenberg dependencies - run: sudo apt update && sudo apt install clang lld cmake libomp-dev - - - name: Install nargo from source with noirup - run: noirup $toolchain - env: - toolchain: --path ./noir/noir-repo - - - name: Check nargo installation - run: nargo --version - - - name: Generate gates report - working-directory: ./noir-projects/noir-protocol-circuits - run: | - nargo info --json --silence-warnings > protocol_circuits_report.json - mv protocol_circuits_report.json ../../protocol_circuits_report.json - env: - NARGO_BACKEND_PATH: ../../barretenberg/cpp/build/bin/bb - - - name: Compare gates reports - id: gates_diff - uses: vezenovm/noir-gates-diff@acf12797860f237117e15c0d6e08d64253af52b6 - with: - report: protocol_circuits_report.json - summaryQuantile: 0 # Display any diff in gate count - - - name: Add gates diff to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - # delete the comment in case changes no longer impact circuit sizes - delete: ${{ !steps.gates_diff.outputs.markdown }} - message: ${{ steps.gates_diff.outputs.markdown }} diff --git a/noir-projects/Earthfile b/noir-projects/Earthfile index 85fe5130a92a..645eddc4973f 100644 --- a/noir-projects/Earthfile +++ b/noir-projects/Earthfile @@ -29,3 +29,13 @@ test: RUN cd noir-protocol-circuits && nargo test --silence-warnings RUN cd aztec-nr && nargo test --silence-warnings RUN cd noir-contracts && nargo test --silence-warnings + +gates-report: + FROM +build + WORKDIR /usr/src/noir-projects/noir-protocol-circuits + + COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + + RUN NARGO_BACKEND_PATH=/usr/src/barretenberg/cpp/build/bin/bb nargo info --json > gates_report.json + + SAVE ARTIFACT gates_report.json gates_report.json \ No newline at end of file From 3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Fri, 3 May 2024 20:24:05 -0400 Subject: [PATCH 5/9] feat: Sync from noir (#6203) Automated pull of development from the [noir](https://github.com/noir-lang/noir) programming language, a dependency of Aztec. BEGIN_COMMIT_OVERRIDE feat: Optimize array sets in if conditions (alternate version) (https://github.com/noir-lang/noir/pull/4716) chore: rename instruction checks for side effects (https://github.com/noir-lang/noir/pull/4945) chore: Switch Noir JS to use execute program instead of circuit (https://github.com/noir-lang/noir/pull/4965) fix: Use annotated type when checking declaration (https://github.com/noir-lang/noir/pull/4966) feat: handle empty response foreign calls without an external resolver (https://github.com/noir-lang/noir/pull/4959) feat: Complex outputs from acir call (https://github.com/noir-lang/noir/pull/4952) END_COMMIT_OVERRIDE --------- Co-authored-by: TomAFrench Co-authored-by: vezenovm Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- .noir-sync-commit | 2 +- .../src/brillig/brillig_gen/brillig_block.rs | 5 +- .../compiler/noirc_evaluator/src/ssa.rs | 1 + .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 14 +- .../noirc_evaluator/src/ssa/ir/dfg.rs | 23 +- .../noirc_evaluator/src/ssa/ir/instruction.rs | 144 +++++++++-- .../src/ssa/ir/instruction/call.rs | 4 +- .../noirc_evaluator/src/ssa/ir/printer.rs | 12 +- .../src/ssa/opt/constant_folding.rs | 11 +- .../noirc_evaluator/src/ssa/opt/die.rs | 8 +- .../src/ssa/opt/flatten_cfg.rs | 95 ++----- .../ssa/opt/flatten_cfg/capacity_tracker.rs | 4 +- .../src/ssa/opt/flatten_cfg/value_merger.rs | 244 +++++++++++++++--- .../noirc_evaluator/src/ssa/opt/mod.rs | 1 + .../src/ssa/opt/remove_enable_side_effects.rs | 1 + .../src/ssa/opt/remove_if_else.rs | 236 +++++++++++++++++ .../noirc_frontend/src/hir/type_check/stmt.rs | 4 +- .../fold_complex_outputs/Nargo.toml | 7 + .../fold_complex_outputs/Prover.toml | 2 + .../fold_complex_outputs/src/main.nr | 72 ++++++ .../nested_array_dynamic_simple/Nargo.toml | 7 + .../nested_array_dynamic_simple/Prover.toml | 1 + .../nested_array_dynamic_simple/src/main.nr | 9 + .../slice_init_with_complex_type/Nargo.toml | 7 + .../slice_init_with_complex_type}/Prover.toml | 0 .../slice_init_with_complex_type/src/main.nr | 17 ++ .../brillig_overflow_checks/Prover.toml | 0 .../field_comparisons/Prover.toml | 0 .../ignored_oracle/Nargo.toml | 7 + .../ignored_oracle/src/main.nr | 23 ++ .../noir_test_success/mock_oracle/Prover.toml | 0 .../out_of_bounds_alignment/Prover.toml | 0 .../should_fail_with_matches/Prover.toml | 0 .../tooling/debugger/ignored-tests.txt | 2 + .../tooling/nargo/src/ops/foreign_calls.rs | 68 ++--- .../noir_js/scripts/compile_test_programs.sh | 1 + noir/noir-repo/tooling/noir_js/src/program.ts | 9 +- .../tooling/noir_js/src/witness_generation.ts | 8 +- .../tooling/noir_js/test/node/e2e.test.ts | 23 ++ .../tooling/noir_js/test/node/execute.test.ts | 44 ++++ .../fold_fibonacci/Nargo.toml | 7 + .../fold_fibonacci/src/main.nr | 12 + 42 files changed, 944 insertions(+), 191 deletions(-) create mode 100644 noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs create mode 100644 noir/noir-repo/test_programs/execution_success/fold_complex_outputs/Nargo.toml create mode 100644 noir/noir-repo/test_programs/execution_success/fold_complex_outputs/Prover.toml create mode 100644 noir/noir-repo/test_programs/execution_success/fold_complex_outputs/src/main.nr create mode 100644 noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/Nargo.toml create mode 100644 noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/Prover.toml create mode 100644 noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/src/main.nr create mode 100644 noir/noir-repo/test_programs/execution_success/slice_init_with_complex_type/Nargo.toml rename noir/noir-repo/test_programs/{noir_test_success/bounded_vec => execution_success/slice_init_with_complex_type}/Prover.toml (100%) create mode 100644 noir/noir-repo/test_programs/execution_success/slice_init_with_complex_type/src/main.nr delete mode 100644 noir/noir-repo/test_programs/noir_test_success/brillig_overflow_checks/Prover.toml delete mode 100644 noir/noir-repo/test_programs/noir_test_success/field_comparisons/Prover.toml create mode 100644 noir/noir-repo/test_programs/noir_test_success/ignored_oracle/Nargo.toml create mode 100644 noir/noir-repo/test_programs/noir_test_success/ignored_oracle/src/main.nr delete mode 100644 noir/noir-repo/test_programs/noir_test_success/mock_oracle/Prover.toml delete mode 100644 noir/noir-repo/test_programs/noir_test_success/out_of_bounds_alignment/Prover.toml delete mode 100644 noir/noir-repo/test_programs/noir_test_success/should_fail_with_matches/Prover.toml create mode 100644 noir/noir-repo/tooling/noir_js/test/noir_compiled_examples/fold_fibonacci/Nargo.toml create mode 100644 noir/noir-repo/tooling/noir_js/test/noir_compiled_examples/fold_fibonacci/src/main.nr diff --git a/.noir-sync-commit b/.noir-sync-commit index c7d7dc99a114..caa81d0f7be8 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -d4c68066ab35ce1c52510cf0c038fb627a0677c3 +a87c655c6c8c077c71e3372cc9181b7870348a3d diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 2c9d43dc9196..873ebe51e6f0 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -619,7 +619,7 @@ impl<'block> BrilligBlock<'block> { destination_variable, ); } - Instruction::ArraySet { array, index, value, .. } => { + Instruction::ArraySet { array, index, value, mutable: _ } => { let source_variable = self.convert_ssa_value(*array, dfg); let index_register = self.convert_ssa_single_addr_value(*index, dfg); let value_variable = self.convert_ssa_value(*value, dfg); @@ -700,6 +700,9 @@ impl<'block> BrilligBlock<'block> { Instruction::EnableSideEffects { .. } => { todo!("enable_side_effects not supported by brillig") } + Instruction::IfElse { .. } => { + unreachable!("IfElse instructions should not be possible in brillig") + } }; let dead_variables = self diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index 7d571a2c3bcf..e844bc30354f 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -69,6 +69,7 @@ pub(crate) fn optimize_into_acir( // Run the inlining pass again to handle functions with `InlineType::NoPredicates`. // Before flattening is run, we treat functions marked with the `InlineType::NoPredicates` as an entry point. .run_pass(Ssa::inline_functions_with_no_predicates, "After Inlining:") + .run_pass(Ssa::remove_if_else, "After Remove IfElse:") .run_pass(Ssa::fold_constants, "After Constant Folding:") .run_pass(Ssa::remove_enable_side_effects, "After EnableSideEffects removal:") .run_pass(Ssa::fold_constants_using_constraints, "After Constraint Folding:") diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 9ea5c5e4a963..9a4d4be1145d 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -705,6 +705,9 @@ impl<'a> Context<'a> { assert_message.clone(), )?; } + Instruction::IfElse { .. } => { + unreachable!("IfElse instruction remaining in acir-gen") + } } self.acir_context.set_call_stack(CallStack::new()); @@ -732,11 +735,10 @@ impl<'a> Context<'a> { assert!(!matches!(inline_type, InlineType::Inline), "ICE: Got an ACIR function named {} that should have already been inlined", func.name()); let inputs = vecmap(arguments, |arg| self.convert_value(*arg, dfg)); - // TODO(https://github.com/noir-lang/noir/issues/4608): handle complex return types from ACIR functions - let output_count = - result_ids.iter().fold(0usize, |sum, result_id| { - sum + dfg.try_get_array_length(*result_id).unwrap_or(1) - }); + let output_count = result_ids + .iter() + .map(|result_id| dfg.type_of_value(*result_id).flattened_size()) + .sum(); let acir_function_id = ssa .entry_point_to_generated_index @@ -748,6 +750,7 @@ impl<'a> Context<'a> { output_count, self.current_side_effects_enabled_var, )?; + let output_values = self.convert_vars_to_values(output_vars, dfg, result_ids); @@ -1028,6 +1031,7 @@ impl<'a> Context<'a> { }); } }; + if self.acir_context.is_constant_one(&self.current_side_effects_enabled_var) { // Report the error if side effects are enabled. if index >= array_size { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 6b950c327cf2..85630b756149 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -554,7 +554,10 @@ impl<'dfg> InsertInstructionResult<'dfg> { match self { InsertInstructionResult::SimplifiedTo(value) => *value, InsertInstructionResult::SimplifiedToMultiple(values) => values[0], - InsertInstructionResult::Results(_, results) => results[0], + InsertInstructionResult::Results(_, results) => { + assert_eq!(results.len(), 1); + results[0] + } InsertInstructionResult::InstructionRemoved => { panic!("Instruction was removed, no results") } @@ -583,6 +586,24 @@ impl<'dfg> InsertInstructionResult<'dfg> { } } +impl<'dfg> std::ops::Index for InsertInstructionResult<'dfg> { + type Output = ValueId; + + fn index(&self, index: usize) -> &Self::Output { + match self { + InsertInstructionResult::Results(_, results) => &results[index], + InsertInstructionResult::SimplifiedTo(result) => { + assert_eq!(index, 0); + result + } + InsertInstructionResult::SimplifiedToMultiple(results) => &results[index], + InsertInstructionResult::InstructionRemoved => { + panic!("Cannot index into InsertInstructionResult::InstructionRemoved") + } + } + } +} + #[cfg(test)] mod tests { use super::DataFlowGraph; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 3084899f4552..7cc19e9f2b8d 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -11,6 +11,8 @@ use fxhash::FxHasher; use iter_extended::vecmap; use noirc_frontend::hir_def::types::Type as HirType; +use crate::ssa::opt::flatten_cfg::value_merger::ValueMerger; + use super::{ basic_block::BasicBlockId, dfg::{CallStack, DataFlowGraph}, @@ -216,6 +218,14 @@ pub(crate) enum Instruction { /// implemented via reference counting. In ACIR code this is done with im::Vector and these /// DecrementRc instructions are ignored. DecrementRc { value: ValueId }, + + /// Merge two values returned from opposite branches of a conditional into one. + IfElse { + then_condition: ValueId, + then_value: ValueId, + else_condition: ValueId, + else_value: ValueId, + }, } impl Instruction { @@ -229,10 +239,12 @@ impl Instruction { match self { Instruction::Binary(binary) => binary.result_type(), Instruction::Cast(_, typ) => InstructionResultType::Known(typ.clone()), - Instruction::Not(value) | Instruction::Truncate { value, .. } => { + Instruction::Not(value) + | Instruction::Truncate { value, .. } + | Instruction::ArraySet { array: value, .. } + | Instruction::IfElse { then_value: value, .. } => { InstructionResultType::Operand(*value) } - Instruction::ArraySet { array, .. } => InstructionResultType::Operand(*array), Instruction::Constrain(..) | Instruction::Store { .. } | Instruction::IncrementRc { .. } @@ -252,20 +264,11 @@ impl Instruction { matches!(self.result_type(), InstructionResultType::Unknown) } - /// Pure `Instructions` are instructions which have no side-effects and results are a function of the inputs only, - /// i.e. there are no interactions with memory. - /// - /// Pure instructions can be replaced with the results of another pure instruction with the same inputs. - pub(crate) fn is_pure(&self, dfg: &DataFlowGraph) -> bool { + /// Indicates if the instruction can be safely replaced with the results of another instruction with the same inputs. + pub(crate) fn can_be_deduplicated(&self, dfg: &DataFlowGraph) -> bool { use Instruction::*; match self { - Binary(bin) => { - // In ACIR, a division with a false predicate outputs (0,0), so it cannot replace another instruction unless they have the same predicate - bin.operator != BinaryOp::Div - } - Cast(_, _) | Truncate { .. } | Not(_) => true, - // These either have side-effects or interact with memory Constrain(..) | EnableSideEffects { .. } @@ -276,31 +279,37 @@ impl Instruction { | DecrementRc { .. } | RangeCheck { .. } => false, - // These can have different behavior depending on the EnableSideEffectsIf context. - // Enabling constant folding for these potentially enables replacing an enabled - // array get with one that was disabled. See - // https://github.com/noir-lang/noir/pull/4716#issuecomment-2047846328. - ArrayGet { .. } | ArraySet { .. } => false, - Call { func, .. } => match dfg[*func] { Value::Intrinsic(intrinsic) => !intrinsic.has_side_effects(), _ => false, }, + + // These can have different behavior depending on the EnableSideEffectsIf context. + // Replacing them with a similar instruction potentially enables replacing an instruction + // with one that was disabled. See + // https://github.com/noir-lang/noir/pull/4716#issuecomment-2047846328. + Binary(_) + | Cast(_, _) + | Not(_) + | Truncate { .. } + | IfElse { .. } + | ArrayGet { .. } + | ArraySet { .. } => !self.requires_acir_gen_predicate(dfg), } } - pub(crate) fn has_side_effects(&self, dfg: &DataFlowGraph) -> bool { + pub(crate) fn can_eliminate_if_unused(&self, dfg: &DataFlowGraph) -> bool { use Instruction::*; match self { Binary(binary) => { if matches!(binary.operator, BinaryOp::Div | BinaryOp::Mod) { if let Some(rhs) = dfg.get_numeric_constant(binary.rhs) { - rhs == FieldElement::zero() + rhs != FieldElement::zero() } else { - true + false } } else { - false + true } } Cast(_, _) @@ -309,32 +318,67 @@ impl Instruction { | Allocate | Load { .. } | ArrayGet { .. } - | ArraySet { .. } => false, + | IfElse { .. } + | ArraySet { .. } => true, Constrain(..) | Store { .. } | EnableSideEffects { .. } | IncrementRc { .. } | DecrementRc { .. } - | RangeCheck { .. } => true, + | RangeCheck { .. } => false, // Some `Intrinsic`s have side effects so we must check what kind of `Call` this is. Call { func, .. } => match dfg[*func] { - Value::Intrinsic(intrinsic) => intrinsic.has_side_effects(), + Value::Intrinsic(intrinsic) => !intrinsic.has_side_effects(), // All foreign functions are treated as having side effects. // This is because they can be used to pass information // from the ACVM to the external world during execution. - Value::ForeignFunction(_) => true, + Value::ForeignFunction(_) => false, // We must assume that functions contain a side effect as we cannot inspect more deeply. - Value::Function(_) => true, + Value::Function(_) => false, _ => false, }, } } + /// If true the instruction will depends on enable_side_effects context during acir-gen + fn requires_acir_gen_predicate(&self, dfg: &DataFlowGraph) -> bool { + match self { + Instruction::Binary(binary) + if matches!(binary.operator, BinaryOp::Div | BinaryOp::Mod) => + { + true + } + Instruction::EnableSideEffects { .. } + | Instruction::ArrayGet { .. } + | Instruction::ArraySet { .. } => true, + + Instruction::Call { func, .. } => match dfg[*func] { + Value::Function(_) => true, + Value::Intrinsic(intrinsic) => { + matches!(intrinsic, Intrinsic::SliceInsert | Intrinsic::SliceRemove) + } + _ => false, + }, + Instruction::Cast(_, _) + | Instruction::Binary(_) + | Instruction::Not(_) + | Instruction::Truncate { .. } + | Instruction::Constrain(_, _, _) + | Instruction::RangeCheck { .. } + | Instruction::Allocate + | Instruction::Load { .. } + | Instruction::Store { .. } + | Instruction::IfElse { .. } + | Instruction::IncrementRc { .. } + | Instruction::DecrementRc { .. } => false, + } + } + /// Maps each ValueId inside this instruction to a new ValueId, returning the new instruction. /// Note that the returned instruction is fresh and will not have an assigned InstructionId /// until it is manually inserted in a DataFlowGraph later. @@ -397,6 +441,14 @@ impl Instruction { assert_message: assert_message.clone(), } } + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { + Instruction::IfElse { + then_condition: f(*then_condition), + then_value: f(*then_value), + else_condition: f(*else_condition), + else_value: f(*else_value), + } + } } } @@ -451,6 +503,12 @@ impl Instruction { | Instruction::RangeCheck { value, .. } => { f(*value); } + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { + f(*then_condition); + f(*then_value); + f(*else_condition); + f(*else_value); + } } } @@ -602,6 +660,36 @@ impl Instruction { None } } + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { + let typ = dfg.type_of_value(*then_value); + + if let Some(constant) = dfg.get_numeric_constant(*then_condition) { + if constant.is_one() { + return SimplifiedTo(*then_value); + } else if constant.is_zero() { + return SimplifiedTo(*else_value); + } + } + + if matches!(&typ, Type::Numeric(_)) { + let then_condition = *then_condition; + let then_value = *then_value; + let else_condition = *else_condition; + let else_value = *else_value; + + let result = ValueMerger::merge_numeric_values( + dfg, + block, + then_condition, + else_condition, + then_value, + else_value, + ); + SimplifiedTo(result) + } else { + None + } + } } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index a8365ffef39b..3d7cb478f64c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -354,7 +354,9 @@ fn simplify_slice_push_back( slice_sizes.insert(set_last_slice_value, slice_size / element_size); slice_sizes.insert(new_slice, slice_size / element_size); - let mut value_merger = ValueMerger::new(dfg, block, &mut slice_sizes); + let unknown = &mut HashMap::default(); + let mut value_merger = ValueMerger::new(dfg, block, &mut slice_sizes, unknown, None); + let new_slice = value_merger.merge_values( len_not_equals_capacity, len_equals_capacity, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs index 3e924985185b..58c593b0ad63 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -181,7 +181,7 @@ fn display_instruction_inner( let index = show(*index); let value = show(*value); let mutable = if *mutable { " mut" } else { "" }; - writeln!(f, "array_set{mutable} {array}, index {index}, value {value}",) + writeln!(f, "array_set{mutable} {array}, index {index}, value {value}") } Instruction::IncrementRc { value } => { writeln!(f, "inc_rc {}", show(*value)) @@ -192,6 +192,16 @@ fn display_instruction_inner( Instruction::RangeCheck { value, max_bit_size, .. } => { writeln!(f, "range_check {} to {} bits", show(*value), *max_bit_size,) } + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { + let then_condition = show(*then_condition); + let then_value = show(*then_value); + let else_condition = show(*else_condition); + let else_value = show(*else_value); + writeln!( + f, + "if {then_condition} then {then_value} else if {else_condition} then {else_value}" + ) + } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index 5a7134f34864..ac2f64243328 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -6,9 +6,8 @@ //! by the [`DataFlowGraph`] automatically as new instructions are pushed. //! - Check whether any input values have been constrained to be equal to a value of a simpler form //! by a [constrain instruction][Instruction::Constrain]. If so, replace the input value with the simpler form. -//! - Check whether the instruction is [pure][Instruction::is_pure()] -//! and there exists a duplicate instruction earlier in the same block. -//! If so, the instruction can be replaced with the results of this previous instruction. +//! - Check whether the instruction [can_be_replaced][Instruction::can_be_replaced()] +//! by duplicate instruction earlier in the same block. //! //! These operations are done in parallel so that they can each benefit from each other //! without the need for multiple passes. @@ -257,9 +256,9 @@ impl Context { } } - // If the instruction doesn't have side-effects, cache the results so we can reuse them if - // the same instruction appears again later in the block. - if instruction.is_pure(dfg) { + // If the instruction doesn't have side-effects and if it won't interact with enable_side_effects during acir_gen, + // we cache the results so we can reuse them if the same instruction appears again later in the block. + if instruction.can_be_deduplicated(dfg) { instruction_result_cache.insert(instruction, instruction_results); } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs index d1b3e1e83f50..d045762f9e9f 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -108,12 +108,12 @@ impl Context { fn is_unused(&self, instruction_id: InstructionId, function: &Function) -> bool { let instruction = &function.dfg[instruction_id]; - if instruction.has_side_effects(&function.dfg) { - // If the instruction has side effects we should never remove it. - false - } else { + if instruction.can_eliminate_if_unused(&function.dfg) { let results = function.dfg.instruction_results(instruction_id); results.iter().all(|result| !self.used_values.contains(result)) + } else { + // If the instruction has side effects we should never remove it. + false } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 07771397ce8c..0f8b49b40ec9 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -155,9 +155,6 @@ mod branch_analysis; mod capacity_tracker; pub(crate) mod value_merger; -use capacity_tracker::SliceCapacityTracker; -use value_merger::ValueMerger; - impl Ssa { /// Flattens the control flow graph of main such that the function is left with a /// single block containing all instructions and no more control-flow. @@ -311,18 +308,6 @@ impl<'f> Context<'f> { if self.inserter.function.entry_block() == block { // we do not inline the entry block into itself // for the outer block before we start inlining - let outer_block_instructions = self.inserter.function.dfg[block].instructions(); - let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); - for instruction in outer_block_instructions { - let results = self.inserter.function.dfg.instruction_results(*instruction); - let instruction = &self.inserter.function.dfg[*instruction]; - capacity_tracker.collect_slice_information( - instruction, - &mut self.slice_sizes, - results.to_vec(), - ); - } - return; } @@ -333,14 +318,7 @@ impl<'f> Context<'f> { // unnecessary, when removing it actually causes an aliasing/mutability error. let instructions = self.inserter.function.dfg[block].instructions().to_vec(); for instruction in instructions.iter() { - let results = self.push_instruction(*instruction); - let (instruction, _) = self.inserter.map_instruction(*instruction); - let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); - capacity_tracker.collect_slice_information( - &instruction, - &mut self.slice_sizes, - results, - ); + self.push_instruction(*instruction); } } @@ -543,24 +521,19 @@ impl<'f> Context<'f> { let block = self.inserter.function.entry_block(); - // Make sure we have tracked the slice capacities of any block arguments - let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); - for (then_arg, else_arg) in args.iter() { - capacity_tracker.compute_slice_capacity(*then_arg, &mut self.slice_sizes); - capacity_tracker.compute_slice_capacity(*else_arg, &mut self.slice_sizes); - } - - let mut value_merger = - ValueMerger::new(&mut self.inserter.function.dfg, block, &mut self.slice_sizes); - // Cannot include this in the previous vecmap since it requires exclusive access to self let args = vecmap(args, |(then_arg, else_arg)| { - value_merger.merge_values( - cond_context.then_branch.condition, - cond_context.else_branch.clone().unwrap().condition, - then_arg, - else_arg, - ) + let instruction = Instruction::IfElse { + then_condition: cond_context.then_branch.condition, + then_value: then_arg, + else_condition: cond_context.else_branch.as_ref().unwrap().condition, + else_value: else_arg, + }; + self.inserter + .function + .dfg + .insert_instruction_and_results(instruction, block, None, CallStack::new()) + .first() }); self.merge_stores(cond_context.then_branch, cond_context.else_branch); @@ -643,15 +616,6 @@ impl<'f> Context<'f> { } } - // Most slice information is collected when instructions are inlined. - // We need to collect information on slice values here as we may possibly merge stores - // before any inlining occurs. - let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); - for (then_case, else_case, _) in new_map.values() { - capacity_tracker.compute_slice_capacity(*then_case, &mut self.slice_sizes); - capacity_tracker.compute_slice_capacity(*else_case, &mut self.slice_sizes); - } - let then_condition = then_branch.condition; let else_condition = if let Some(branch) = else_branch { branch.condition @@ -660,13 +624,22 @@ impl<'f> Context<'f> { }; let block = self.inserter.function.entry_block(); - let mut value_merger = - ValueMerger::new(&mut self.inserter.function.dfg, block, &mut self.slice_sizes); // Merging must occur in a separate loop as we cannot borrow `self` as mutable while `value_merger` does let mut new_values = HashMap::default(); for (address, (then_case, else_case, _)) in &new_map { - let value = - value_merger.merge_values(then_condition, else_condition, *then_case, *else_case); + let instruction = Instruction::IfElse { + then_condition, + then_value: *then_case, + else_condition, + else_value: *else_case, + }; + let value = self + .inserter + .function + .dfg + .insert_instruction_and_results(instruction, block, None, CallStack::new()) + .first(); + new_values.insert(address, value); } @@ -683,16 +656,6 @@ impl<'f> Context<'f> { .insert(address, Store { old_value: *old_value, new_value: value }); } } - - // Collect any potential slice information on the stores we are merging - for (address, (_, _, _)) in &new_map { - let value = new_values[address]; - let address = *address; - let instruction = Instruction::Store { address, value }; - - let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); - capacity_tracker.collect_slice_information(&instruction, &mut self.slice_sizes, vec![]); - } } fn remember_store(&mut self, address: ValueId, new_value: ValueId) { @@ -706,14 +669,6 @@ impl<'f> Context<'f> { let old_value = self.insert_instruction_with_typevars(load.clone(), load_type).first(); - // Need this or else we will be missing a the previous value of a slice that we wish to merge - let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); - capacity_tracker.collect_slice_information( - &load, - &mut self.slice_sizes, - vec![old_value], - ); - self.store_values.insert(address, Store { old_value, new_value }); } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs index 93e525422780..4fc19acd2ac8 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -19,10 +19,10 @@ impl<'a> SliceCapacityTracker<'a> { /// Determine how the slice sizes map needs to be updated according to the provided instruction. pub(crate) fn collect_slice_information( - &mut self, + &self, instruction: &Instruction, slice_sizes: &mut HashMap, - results: Vec, + results: &[ValueId], ) { match instruction { Instruction::ArrayGet { array, .. } => { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 0a351148fa38..c47d594545c8 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -1,20 +1,25 @@ use acvm::FieldElement; -use fxhash::FxHashMap as HashMap; +use fxhash::{FxHashMap as HashMap, FxHashSet}; use crate::ssa::ir::{ basic_block::BasicBlockId, - dfg::{CallStack, DataFlowGraph}, + dfg::{CallStack, DataFlowGraph, InsertInstructionResult}, instruction::{BinaryOp, Instruction}, types::Type, - value::ValueId, + value::{Value, ValueId}, }; pub(crate) struct ValueMerger<'a> { dfg: &'a mut DataFlowGraph, block: BasicBlockId, + + current_condition: Option, + // Maps SSA array values with a slice type to their size. // This must be computed before merging values. slice_sizes: &'a mut HashMap, + + array_set_conditionals: &'a mut HashMap, } impl<'a> ValueMerger<'a> { @@ -22,8 +27,10 @@ impl<'a> ValueMerger<'a> { dfg: &'a mut DataFlowGraph, block: BasicBlockId, slice_sizes: &'a mut HashMap, + array_set_conditionals: &'a mut HashMap, + current_condition: Option, ) -> Self { - ValueMerger { dfg, block, slice_sizes } + ValueMerger { dfg, block, slice_sizes, array_set_conditionals, current_condition } } /// Merge two values a and b from separate basic blocks to a single value. @@ -42,9 +49,14 @@ impl<'a> ValueMerger<'a> { else_value: ValueId, ) -> ValueId { match self.dfg.type_of_value(then_value) { - Type::Numeric(_) => { - self.merge_numeric_values(then_condition, else_condition, then_value, else_value) - } + Type::Numeric(_) => Self::merge_numeric_values( + self.dfg, + self.block, + then_condition, + else_condition, + then_value, + else_value, + ), typ @ Type::Array(_, _) => { self.merge_array_values(typ, then_condition, else_condition, then_value, else_value) } @@ -59,58 +71,57 @@ impl<'a> ValueMerger<'a> { /// Merge two numeric values a and b from separate basic blocks to a single value. This /// function would return the result of `if c { a } else { b }` as `c*a + (!c)*b`. pub(crate) fn merge_numeric_values( - &mut self, + dfg: &mut DataFlowGraph, + block: BasicBlockId, then_condition: ValueId, else_condition: ValueId, then_value: ValueId, else_value: ValueId, ) -> ValueId { - let then_type = self.dfg.type_of_value(then_value); - let else_type = self.dfg.type_of_value(else_value); + let then_type = dfg.type_of_value(then_value); + let else_type = dfg.type_of_value(else_value); assert_eq!( then_type, else_type, "Expected values merged to be of the same type but found {then_type} and {else_type}" ); - let then_call_stack = self.dfg.get_value_call_stack(then_value); - let else_call_stack = self.dfg.get_value_call_stack(else_value); + if then_value == else_value { + return then_value; + } + + let then_call_stack = dfg.get_value_call_stack(then_value); + let else_call_stack = dfg.get_value_call_stack(else_value); let call_stack = if then_call_stack.is_empty() { else_call_stack } else { then_call_stack }; // We must cast the bool conditions to the actual numeric type used by each value. - let then_condition = self - .dfg + let then_condition = dfg .insert_instruction_and_results( Instruction::Cast(then_condition, then_type), - self.block, + block, None, call_stack.clone(), ) .first(); - let else_condition = self - .dfg + let else_condition = dfg .insert_instruction_and_results( Instruction::Cast(else_condition, else_type), - self.block, + block, None, call_stack.clone(), ) .first(); let mul = Instruction::binary(BinaryOp::Mul, then_condition, then_value); - let then_value = self - .dfg - .insert_instruction_and_results(mul, self.block, None, call_stack.clone()) - .first(); + let then_value = + dfg.insert_instruction_and_results(mul, block, None, call_stack.clone()).first(); let mul = Instruction::binary(BinaryOp::Mul, else_condition, else_value); - let else_value = self - .dfg - .insert_instruction_and_results(mul, self.block, None, call_stack.clone()) - .first(); + let else_value = + dfg.insert_instruction_and_results(mul, block, None, call_stack.clone()).first(); let add = Instruction::binary(BinaryOp::Add, then_value, else_value); - self.dfg.insert_instruction_and_results(add, self.block, None, call_stack).first() + dfg.insert_instruction_and_results(add, block, None, call_stack).first() } /// Given an if expression that returns an array: `if c { array1 } else { array2 }`, @@ -131,6 +142,18 @@ impl<'a> ValueMerger<'a> { _ => panic!("Expected array type"), }; + let actual_length = len * element_types.len(); + + if let Some(result) = self.try_merge_only_changed_indices( + then_condition, + else_condition, + then_value, + else_value, + actual_length, + ) { + return result; + } + for i in 0..len { for (element_index, element_type) in element_types.iter().enumerate() { let index = ((i * element_types.len() + element_index) as u128).into(); @@ -175,12 +198,18 @@ impl<'a> ValueMerger<'a> { _ => panic!("Expected slice type"), }; - let then_len = *self.slice_sizes.get(&then_value_id).unwrap_or_else(|| { - panic!("ICE: Merging values during flattening encountered slice {then_value_id} without a preset size"); + let then_len = self.slice_sizes.get(&then_value_id).copied().unwrap_or_else(|| { + let (slice, typ) = self.dfg.get_array_constant(then_value_id).unwrap_or_else(|| { + panic!("ICE: Merging values during flattening encountered slice {then_value_id} without a preset size"); + }); + slice.len() / typ.element_types().len() }); - let else_len = *self.slice_sizes.get(&else_value_id).unwrap_or_else(|| { - panic!("ICE: Merging values during flattening encountered slice {else_value_id} without a preset size"); + let else_len = self.slice_sizes.get(&else_value_id).copied().unwrap_or_else(|| { + let (slice, typ) = self.dfg.get_array_constant(else_value_id).unwrap_or_else(|| { + panic!("ICE: Merging values during flattening encountered slice {else_value_id} without a preset size"); + }); + slice.len() / typ.element_types().len() }); let len = then_len.max(else_len); @@ -260,4 +289,157 @@ impl<'a> ValueMerger<'a> { } } } + + fn try_merge_only_changed_indices( + &mut self, + then_condition: ValueId, + else_condition: ValueId, + then_value: ValueId, + else_value: ValueId, + array_length: usize, + ) -> Option { + let mut found = false; + let current_condition = self.current_condition?; + + let mut current_then = then_value; + let mut current_else = else_value; + + // Arbitrarily limit this to looking at at most 10 past ArraySet operations. + // If there are more than that, we assume 2 completely separate arrays are being merged. + let max_iters = 1; + let mut seen_then = Vec::with_capacity(max_iters); + let mut seen_else = Vec::with_capacity(max_iters); + + // We essentially have a tree of ArraySets and want to find a common + // ancestor if it exists, alone with the path to it from each starting node. + // This path will be the indices that were changed to create each result array. + for _ in 0..max_iters { + if current_then == else_value { + seen_else.clear(); + found = true; + break; + } + + if current_else == then_value { + seen_then.clear(); + found = true; + break; + } + + if let Some(index) = seen_then.iter().position(|(elem, _, _, _)| *elem == current_else) + { + seen_else.truncate(index); + found = true; + break; + } + + if let Some(index) = seen_else.iter().position(|(elem, _, _, _)| *elem == current_then) + { + seen_then.truncate(index); + found = true; + break; + } + + current_then = self.find_previous_array_set(current_then, &mut seen_then); + current_else = self.find_previous_array_set(current_else, &mut seen_else); + } + + let changed_indices: FxHashSet<_> = seen_then + .into_iter() + .map(|(_, index, typ, condition)| (index, typ, condition)) + .chain(seen_else.into_iter().map(|(_, index, typ, condition)| (index, typ, condition))) + .collect(); + + if !found || changed_indices.len() >= array_length { + return None; + } + + let mut array = then_value; + + for (index, element_type, condition) in changed_indices { + let typevars = Some(vec![element_type.clone()]); + + let instruction = Instruction::EnableSideEffects { condition }; + self.insert_instruction(instruction); + + let mut get_element = |array, typevars| { + let get = Instruction::ArrayGet { array, index }; + self.dfg + .insert_instruction_and_results(get, self.block, typevars, CallStack::new()) + .first() + }; + + let then_element = get_element(then_value, typevars.clone()); + let else_element = get_element(else_value, typevars); + + let value = + self.merge_values(then_condition, else_condition, then_element, else_element); + + array = self.insert_array_set(array, index, value, Some(condition)).first(); + } + + let instruction = Instruction::EnableSideEffects { condition: current_condition }; + self.insert_instruction(instruction); + Some(array) + } + + fn insert_instruction(&mut self, instruction: Instruction) -> InsertInstructionResult { + self.dfg.insert_instruction_and_results(instruction, self.block, None, CallStack::new()) + } + + fn insert_array_set( + &mut self, + array: ValueId, + index: ValueId, + value: ValueId, + condition: Option, + ) -> InsertInstructionResult { + let instruction = Instruction::ArraySet { array, index, value, mutable: false }; + let result = self.dfg.insert_instruction_and_results( + instruction, + self.block, + None, + CallStack::new(), + ); + + if let Some(condition) = condition { + let result_index = if result.len() == 1 { + 0 + } else { + // Slices return (length, slice) + assert_eq!(result.len(), 2); + 1 + }; + + let result_value = result[result_index]; + self.array_set_conditionals.insert(result_value, condition); + } + + result + } + + fn find_previous_array_set( + &self, + result: ValueId, + changed_indices: &mut Vec<(ValueId, ValueId, Type, ValueId)>, + ) -> ValueId { + match &self.dfg[result] { + Value::Instruction { instruction, .. } => match &self.dfg[*instruction] { + Instruction::ArraySet { array, index, value, .. } => { + let condition = + *self.array_set_conditionals.get(&result).unwrap_or_else(|| { + panic!( + "Expected to have conditional for array set {result}\n{:?}", + self.array_set_conditionals + ) + }); + let element_type = self.dfg.type_of_value(*value); + changed_indices.push((result, *index, element_type, condition)); + *array + } + _ => result, + }, + _ => result, + } + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs index 4452840a28cd..27536d59ea59 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs @@ -16,5 +16,6 @@ mod mem2reg; mod rc; mod remove_bit_shifts; mod remove_enable_side_effects; +mod remove_if_else; mod simplify_cfg; mod unrolling; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index 8535dc2661fd..02b9202b209f 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -125,6 +125,7 @@ impl Context { | Truncate { .. } | Constrain(..) | RangeCheck { .. } + | IfElse { .. } | IncrementRc { .. } | DecrementRc { .. } => false, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs new file mode 100644 index 000000000000..fc915756110d --- /dev/null +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs @@ -0,0 +1,236 @@ +use std::collections::hash_map::Entry; + +use acvm::FieldElement; +use fxhash::FxHashMap as HashMap; + +use crate::ssa::ir::value::ValueId; +use crate::ssa::{ + ir::{ + dfg::DataFlowGraph, + function::Function, + instruction::{Instruction, Intrinsic}, + types::Type, + value::Value, + }, + opt::flatten_cfg::value_merger::ValueMerger, + Ssa, +}; + +impl Ssa { + /// This pass removes `inc_rc` and `dec_rc` instructions + /// as long as there are no `array_set` instructions to an array + /// of the same type in between. + /// + /// Note that this pass is very conservative since the array_set + /// instruction does not need to be to the same array. This is because + /// the given array may alias another array (e.g. function parameters or + /// a `load`ed array from a reference). + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn remove_if_else(mut self) -> Ssa { + for function in self.functions.values_mut() { + // This should match the check in flatten_cfg + if let crate::ssa::ir::function::RuntimeType::Brillig = function.runtime() { + continue; + } + + Context::default().remove_if_else(function); + } + self + } +} + +#[derive(Default)] +struct Context { + slice_sizes: HashMap, + + // Maps array_set result -> element that was overwritten by that instruction. + // Used to undo array_sets while merging values + prev_array_set_elem_values: HashMap, + + // Maps array_set result -> enable_side_effects_if value which was active during it. + array_set_conditionals: HashMap, +} + +impl Context { + fn remove_if_else(&mut self, function: &mut Function) { + let block = function.entry_block(); + let instructions = function.dfg[block].take_instructions(); + let mut current_conditional = function.dfg.make_constant(FieldElement::one(), Type::bool()); + + for instruction in instructions { + match &function.dfg[instruction] { + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { + let then_condition = *then_condition; + let then_value = *then_value; + let else_condition = *else_condition; + let else_value = *else_value; + + let typ = function.dfg.type_of_value(then_value); + assert!(!matches!(typ, Type::Numeric(_))); + + let mut value_merger = ValueMerger::new( + &mut function.dfg, + block, + &mut self.slice_sizes, + &mut self.array_set_conditionals, + Some(current_conditional), + ); + + let value = value_merger.merge_values( + then_condition, + else_condition, + then_value, + else_value, + ); + + let _typ = function.dfg.type_of_value(value); + let results = function.dfg.instruction_results(instruction); + let result = results[0]; + // let result = match typ { + // Type::Array(..) => results[0], + // Type::Slice(..) => results[1], + // other => unreachable!("IfElse instructions should only have arrays or slices at this point. Found {other:?}"), + // }; + + function.dfg.set_value_from_id(result, value); + self.array_set_conditionals.insert(result, current_conditional); + } + Instruction::Call { func, arguments } => { + if let Value::Intrinsic(intrinsic) = function.dfg[*func] { + let results = function.dfg.instruction_results(instruction); + + match slice_capacity_change(&function.dfg, intrinsic, arguments, results) { + SizeChange::None => (), + SizeChange::SetTo(value, new_capacity) => { + self.slice_sizes.insert(value, new_capacity); + } + SizeChange::Inc { old, new } => { + let old_capacity = self.get_or_find_capacity(&function.dfg, old); + self.slice_sizes.insert(new, old_capacity + 1); + } + SizeChange::Dec { old, new } => { + let old_capacity = self.get_or_find_capacity(&function.dfg, old); + self.slice_sizes.insert(new, old_capacity - 1); + } + } + } + function.dfg[block].instructions_mut().push(instruction); + } + Instruction::ArraySet { array, .. } => { + let results = function.dfg.instruction_results(instruction); + let result = if results.len() == 2 { results[1] } else { results[0] }; + + self.array_set_conditionals.insert(result, current_conditional); + + let old_capacity = self.get_or_find_capacity(&function.dfg, *array); + self.slice_sizes.insert(result, old_capacity); + function.dfg[block].instructions_mut().push(instruction); + } + Instruction::EnableSideEffects { condition } => { + current_conditional = *condition; + function.dfg[block].instructions_mut().push(instruction); + } + _ => { + function.dfg[block].instructions_mut().push(instruction); + } + } + } + } + + fn get_or_find_capacity(&mut self, dfg: &DataFlowGraph, value: ValueId) -> usize { + match self.slice_sizes.entry(value) { + Entry::Occupied(entry) => return *entry.get(), + Entry::Vacant(entry) => { + if let Some((array, typ)) = dfg.get_array_constant(value) { + let length = array.len() / typ.element_types().len(); + return *entry.insert(length); + } + + if let Type::Array(_, length) = dfg.type_of_value(value) { + return *entry.insert(length); + } + } + } + + let dbg_value = &dfg[value]; + unreachable!("No size for slice {value} = {dbg_value:?}") + } +} + +enum SizeChange { + None, + SetTo(ValueId, usize), + + // These two variants store the old and new slice ids + // not their lengths which should be old_len = new_len +/- 1 + Inc { old: ValueId, new: ValueId }, + Dec { old: ValueId, new: ValueId }, +} + +/// Find the change to a slice's capacity an instruction would have +fn slice_capacity_change( + dfg: &DataFlowGraph, + intrinsic: Intrinsic, + arguments: &[ValueId], + results: &[ValueId], +) -> SizeChange { + match intrinsic { + Intrinsic::SlicePushBack | Intrinsic::SlicePushFront | Intrinsic::SliceInsert => { + // Expecting: len, slice = ... + assert_eq!(results.len(), 2); + let old = arguments[1]; + let new = results[1]; + assert!(matches!(dfg.type_of_value(old), Type::Slice(_))); + assert!(matches!(dfg.type_of_value(new), Type::Slice(_))); + SizeChange::Inc { old, new } + } + + Intrinsic::SlicePopBack | Intrinsic::SliceRemove => { + let old = arguments[1]; + let new = results[1]; + assert!(matches!(dfg.type_of_value(old), Type::Slice(_))); + assert!(matches!(dfg.type_of_value(new), Type::Slice(_))); + SizeChange::Dec { old, new } + } + + Intrinsic::SlicePopFront => { + let old = arguments[1]; + let new = results[results.len() - 1]; + assert!(matches!(dfg.type_of_value(old), Type::Slice(_))); + assert!(matches!(dfg.type_of_value(new), Type::Slice(_))); + SizeChange::Dec { old, new } + } + + Intrinsic::ToBits(_) => { + assert_eq!(results.len(), 2); + // Some tests fail this check, returning an array instead somehow: + // assert!(matches!(dfg.type_of_value(results[1]), Type::Slice(_))); + SizeChange::SetTo(results[1], FieldElement::max_num_bits() as usize) + } + // ToRadix seems to assume it is to bytes + Intrinsic::ToRadix(_) => { + assert_eq!(results.len(), 2); + assert!(matches!(dfg.type_of_value(results[1]), Type::Slice(_))); + SizeChange::SetTo(results[1], FieldElement::max_num_bytes() as usize) + } + Intrinsic::AsSlice => { + assert_eq!(arguments.len(), 1); + assert_eq!(results.len(), 2); + let length = match dfg.type_of_value(arguments[0]) { + Type::Array(_, length) => length, + other => unreachable!("slice_capacity_change expected array, found {other:?}"), + }; + assert!(matches!(dfg.type_of_value(results[1]), Type::Slice(_))); + SizeChange::SetTo(results[1], length) + } + + // These cases don't affect slice capacities + Intrinsic::AssertConstant + | Intrinsic::ApplyRangeConstraint + | Intrinsic::ArrayLen + | Intrinsic::StrAsBytes + | Intrinsic::BlackBox(_) + | Intrinsic::FromField + | Intrinsic::AsField => SizeChange::None, + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/stmt.rs index f5f6e1e8180e..0760749c9e0e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/stmt.rs @@ -348,8 +348,10 @@ impl<'interner> TypeChecker<'interner> { if annotated_type.is_unsigned() { self.lint_overflowing_uint(&rhs_expr, &annotated_type); } + annotated_type + } else { + expr_type } - expr_type } /// Check if an assignment is overflowing with respect to `annotated_type` diff --git a/noir/noir-repo/test_programs/execution_success/fold_complex_outputs/Nargo.toml b/noir/noir-repo/test_programs/execution_success/fold_complex_outputs/Nargo.toml new file mode 100644 index 000000000000..f00c6520b4ab --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/fold_complex_outputs/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "fold_complex_outputs" +type = "bin" +authors = [""] +compiler_version = ">=0.28.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/fold_complex_outputs/Prover.toml b/noir/noir-repo/test_programs/execution_success/fold_complex_outputs/Prover.toml new file mode 100644 index 000000000000..a26b97d6471c --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/fold_complex_outputs/Prover.toml @@ -0,0 +1,2 @@ +x = "5" +y = "3" diff --git a/noir/noir-repo/test_programs/execution_success/fold_complex_outputs/src/main.nr b/noir/noir-repo/test_programs/execution_success/fold_complex_outputs/src/main.nr new file mode 100644 index 000000000000..309d9747598c --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/fold_complex_outputs/src/main.nr @@ -0,0 +1,72 @@ +struct MyStruct { + x: u32, + y: u32, + z: u32, + nested_struct: InnerStruct +} + +struct InnerStruct { + small_array: [u32; 2], + big_array: [u32; 5], +} + +struct ParentStruct { + basic_array: [Field; 3], + id: u32, + my_structs: [MyStruct; 2], +} + +fn main(x: u32, y: pub u32) { + let nested_struct = InnerStruct { small_array: [1 as u32; 2], big_array: [0 as u32; 5] }; + let s = MyStruct { x, y, z: x + y, nested_struct }; + let parent = ParentStruct { basic_array: [1; 3], id: 100, my_structs: [s, s] }; + let new_parent = map_fields(parent); + + // Now check that the outputs are as we expect them to be + assert(new_parent.basic_array[0] == 1); + assert(new_parent.basic_array[1] == 18); + assert(new_parent.basic_array[2] == 1); + + let struct_0 = new_parent.my_structs[0]; + assert(struct_0.x == 5); + assert(struct_0.y == 3); + assert(struct_0.z == 8); + assert(struct_0.nested_struct.small_array == nested_struct.small_array); + assert(struct_0.nested_struct.big_array == nested_struct.big_array); + + let struct_1 = new_parent.my_structs[1]; + assert(struct_1.x == 50); + assert(struct_1.y == 30); + assert(struct_1.z == 80); + assert(struct_1.nested_struct.small_array == [5, 10]); + assert(struct_1.nested_struct.big_array == [15, 20, 25, 30, 35]); +} + +// Meaningless mapping to test whether the values returned are what we expect +#[fold] +fn map_fields(mut input: ParentStruct) -> ParentStruct { + let current_struct = input.my_structs[0]; + let mut sum = 0; + for value in current_struct.nested_struct.small_array { + sum += value; + } + for value in current_struct.nested_struct.big_array { + sum += value; + } + sum += (current_struct.x + current_struct.y + current_struct.z); + + input.basic_array[1] = sum as Field; + + input.my_structs[1].nested_struct.small_array = [5, 10]; + input.my_structs[1].nested_struct.big_array = [15, 20, 25, 30, 35]; + + // LHS input.my_structs[1].x == 50 + input.my_structs[1].x = input.my_structs[1].x * 10; + // LHS input.my_structs[1].y == 30 + input.my_structs[1].y = input.my_structs[1].y * 10; + // LHS input.my_structs[1].x == 80 + input.my_structs[1].z = input.my_structs[1].x + input.my_structs[1].y; + + input +} + diff --git a/noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/Nargo.toml b/noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/Nargo.toml new file mode 100644 index 000000000000..50ba1d194a62 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "nested_array_dynamic_simple" +type = "bin" +authors = [""] +compiler_version = ">=0.28.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/Prover.toml b/noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/Prover.toml new file mode 100644 index 000000000000..07890234a19b --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/Prover.toml @@ -0,0 +1 @@ +x = "3" diff --git a/noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/src/main.nr b/noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/src/main.nr new file mode 100644 index 000000000000..3b1908a463be --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/nested_array_dynamic_simple/src/main.nr @@ -0,0 +1,9 @@ +fn main(x: Field) { + // x = 3 + let array: [[(Field, [Field; 1], [Field; 1]); 1]; 1] = [[(1, [2], [3])]]; + + let fetched_value = array[x - 3]; + assert(fetched_value[0].0 == 1); + assert(fetched_value[0].1[0] == 2); + assert(fetched_value[0].2[0] == 3); +} diff --git a/noir/noir-repo/test_programs/execution_success/slice_init_with_complex_type/Nargo.toml b/noir/noir-repo/test_programs/execution_success/slice_init_with_complex_type/Nargo.toml new file mode 100644 index 000000000000..a372caf92e96 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/slice_init_with_complex_type/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "slice_init_with_complex_type" +type = "bin" +authors = [""] +compiler_version = ">=0.28.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/noir_test_success/bounded_vec/Prover.toml b/noir/noir-repo/test_programs/execution_success/slice_init_with_complex_type/Prover.toml similarity index 100% rename from noir/noir-repo/test_programs/noir_test_success/bounded_vec/Prover.toml rename to noir/noir-repo/test_programs/execution_success/slice_init_with_complex_type/Prover.toml diff --git a/noir/noir-repo/test_programs/execution_success/slice_init_with_complex_type/src/main.nr b/noir/noir-repo/test_programs/execution_success/slice_init_with_complex_type/src/main.nr new file mode 100644 index 000000000000..01ccf2fdeff2 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/slice_init_with_complex_type/src/main.nr @@ -0,0 +1,17 @@ +struct strct1 { + elem1: Field, +} + +fn main() { + let var1: [[i32; 1]] = [[0]]; + let var2: [[i32; 1]] = var1; + + let var1: [(i32, u8)] = [(1, 2)]; + let var2: [(i32, u8)] = var1; + + let var3: [strct1] = [strct1 { elem1: 1321351 }]; + let var4: [strct1] = var3; + + let var1: [i32; 1] = [0]; + let var2: [[i32; 1]] = [var1]; +} diff --git a/noir/noir-repo/test_programs/noir_test_success/brillig_overflow_checks/Prover.toml b/noir/noir-repo/test_programs/noir_test_success/brillig_overflow_checks/Prover.toml deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/noir/noir-repo/test_programs/noir_test_success/field_comparisons/Prover.toml b/noir/noir-repo/test_programs/noir_test_success/field_comparisons/Prover.toml deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/noir/noir-repo/test_programs/noir_test_success/ignored_oracle/Nargo.toml b/noir/noir-repo/test_programs/noir_test_success/ignored_oracle/Nargo.toml new file mode 100644 index 000000000000..0d9b77c01d77 --- /dev/null +++ b/noir/noir-repo/test_programs/noir_test_success/ignored_oracle/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "ignored_oracle" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/noir_test_success/ignored_oracle/src/main.nr b/noir/noir-repo/test_programs/noir_test_success/ignored_oracle/src/main.nr new file mode 100644 index 000000000000..9e0bc1899397 --- /dev/null +++ b/noir/noir-repo/test_programs/noir_test_success/ignored_oracle/src/main.nr @@ -0,0 +1,23 @@ +// In `nargo test` we want to avoid the need for an external oracle resolver service to be required in the situation +// where its existence doesn't affect whether the tests will pass or fail. We then want to be able to handle any +// oracles which return zero field elements. + +// Note that this custom oracle doesn't return any new values into the program. +// We can then safely continue execution even in the case where there is no oracle resolver to handle it. +#[oracle(custom_debug)] +unconstrained fn custom_debug() {} + +// However this oracle call should return a field element. We expect the ACVM to raise an error when it +// doesn't receive this value. +#[oracle(custom_getter)] +unconstrained fn custom_getter() -> Field {} + +#[test] +unconstrained fn unit_return_oracle_ignored() { + custom_debug(); +} + +#[test(should_fail_with = "0 output values were provided as a foreign call result for 1 destination slots")] +unconstrained fn field_return_oracle_fails() { + let _ = custom_getter(); +} diff --git a/noir/noir-repo/test_programs/noir_test_success/mock_oracle/Prover.toml b/noir/noir-repo/test_programs/noir_test_success/mock_oracle/Prover.toml deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/noir/noir-repo/test_programs/noir_test_success/out_of_bounds_alignment/Prover.toml b/noir/noir-repo/test_programs/noir_test_success/out_of_bounds_alignment/Prover.toml deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/noir/noir-repo/test_programs/noir_test_success/should_fail_with_matches/Prover.toml b/noir/noir-repo/test_programs/noir_test_success/should_fail_with_matches/Prover.toml deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/noir/noir-repo/tooling/debugger/ignored-tests.txt b/noir/noir-repo/tooling/debugger/ignored-tests.txt index cbef395e65c4..d08f5609645e 100644 --- a/noir/noir-repo/tooling/debugger/ignored-tests.txt +++ b/noir/noir-repo/tooling/debugger/ignored-tests.txt @@ -22,3 +22,5 @@ no_predicates_numeric_generic_poseidon regression_4709 fold_distinct_return fold_fibonacci +fold_complex_outputs +slice_init_with_complex_type diff --git a/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs b/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs index 90f6659ad284..c314a230cef2 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs @@ -249,43 +249,49 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { .iter() .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)); - match (mock_response_position, &self.external_resolver) { - (Some(response_position), _) => { - let mock = self - .mocked_responses - .get_mut(response_position) - .expect("Invalid position of mocked response"); - - mock.last_called_params = Some(foreign_call.inputs.clone()); - - let result = mock.result.values.clone(); - - if let Some(times_left) = &mut mock.times_left { - *times_left -= 1; - if *times_left == 0 { - self.mocked_responses.remove(response_position); - } - } + if let Some(response_position) = mock_response_position { + // If the program has registered a mocked response to this oracle call then we prefer responding + // with that. + + let mock = self + .mocked_responses + .get_mut(response_position) + .expect("Invalid position of mocked response"); + + mock.last_called_params = Some(foreign_call.inputs.clone()); + + let result = mock.result.values.clone(); - Ok(result.into()) + if let Some(times_left) = &mut mock.times_left { + *times_left -= 1; + if *times_left == 0 { + self.mocked_responses.remove(response_position); + } } - (None, Some(external_resolver)) => { - let encoded_params: Vec<_> = - foreign_call.inputs.iter().map(build_json_rpc_arg).collect(); - let req = - external_resolver.build_request(foreign_call_name, &encoded_params); + Ok(result.into()) + } else if let Some(external_resolver) = &self.external_resolver { + // If the user has registered an external resolver then we forward any remaining oracle calls there. - let response = external_resolver.send_request(req)?; + let encoded_params: Vec<_> = + foreign_call.inputs.iter().map(build_json_rpc_arg).collect(); - let parsed_response: ForeignCallResult = response.result()?; + let req = external_resolver.build_request(foreign_call_name, &encoded_params); - Ok(parsed_response) - } - (None, None) => panic!( - "No mock for foreign call {}({:?})", - foreign_call_name, &foreign_call.inputs - ), + let response = external_resolver.send_request(req)?; + + let parsed_response: ForeignCallResult = response.result()?; + + Ok(parsed_response) + } else { + // If there's no registered mock oracle response and no registered resolver then we cannot + // return a correct response to the ACVM. The best we can do is to return an empty response, + // this allows us to ignore any foreign calls which exist solely to pass information from inside + // the circuit to the environment (e.g. custom logging) as the execution will still be able to progress. + // + // We optimistically return an empty response for all oracle calls as the ACVM will error + // should a response have been required. + Ok(ForeignCallResult::default()) } } } diff --git a/noir/noir-repo/tooling/noir_js/scripts/compile_test_programs.sh b/noir/noir-repo/tooling/noir_js/scripts/compile_test_programs.sh index b9f12b5deb1c..0642159aa695 100755 --- a/noir/noir-repo/tooling/noir_js/scripts/compile_test_programs.sh +++ b/noir/noir-repo/tooling/noir_js/scripts/compile_test_programs.sh @@ -3,4 +3,5 @@ rm -rf ./test/noir_compiled_examples/**/target nargo --program-dir ./test/noir_compiled_examples/assert_lt compile --force nargo --program-dir ./test/noir_compiled_examples/assert_msg_runtime compile --force +nargo --program-dir ./test/noir_compiled_examples/fold_fibonacci compile --force nargo --program-dir ./test/noir_compiled_examples/assert_raw_payload compile --force diff --git a/noir/noir-repo/tooling/noir_js/src/program.ts b/noir/noir-repo/tooling/noir_js/src/program.ts index 8d80ec3a247e..86aa0f60ddfc 100644 --- a/noir/noir-repo/tooling/noir_js/src/program.ts +++ b/noir/noir-repo/tooling/noir_js/src/program.ts @@ -2,7 +2,7 @@ import { Backend, CompiledCircuit, ProofData } from '@noir-lang/types'; import { generateWitness } from './witness_generation.js'; import initAbi, { abiDecode, InputMap, InputValue } from '@noir-lang/noirc_abi'; -import initACVM, { compressWitness, ForeignCallHandler } from '@noir-lang/acvm_js'; +import initACVM, { compressWitnessStack, ForeignCallHandler } from '@noir-lang/acvm_js'; export class Noir { constructor( @@ -55,9 +55,10 @@ export class Noir { foreignCallHandler?: ForeignCallHandler, ): Promise<{ witness: Uint8Array; returnValue: InputValue }> { await this.init(); - const witness = await generateWitness(this.circuit, inputs, foreignCallHandler); - const { return_value: returnValue } = abiDecode(this.circuit.abi, witness); - return { witness: compressWitness(witness), returnValue }; + const witness_stack = await generateWitness(this.circuit, inputs, foreignCallHandler); + const main_witness = witness_stack[0].witness; + const { return_value: returnValue } = abiDecode(this.circuit.abi, main_witness); + return { witness: compressWitnessStack(witness_stack), returnValue }; } /** diff --git a/noir/noir-repo/tooling/noir_js/src/witness_generation.ts b/noir/noir-repo/tooling/noir_js/src/witness_generation.ts index a22b06870403..7d018c81d53a 100644 --- a/noir/noir-repo/tooling/noir_js/src/witness_generation.ts +++ b/noir/noir-repo/tooling/noir_js/src/witness_generation.ts @@ -1,12 +1,12 @@ import { abiDecodeError, abiEncode, InputMap } from '@noir-lang/noirc_abi'; import { base64Decode } from './base64_decode.js'; import { - WitnessMap, + WitnessStack, ForeignCallHandler, ForeignCallInput, createBlackBoxSolver, WasmBlackBoxFunctionSolver, - executeCircuitWithBlackBoxSolver, + executeProgramWithBlackBoxSolver, ExecutionError, } from '@noir-lang/acvm_js'; import { Abi, CompiledCircuit } from '@noir-lang/types'; @@ -63,14 +63,14 @@ export async function generateWitness( compiledProgram: CompiledCircuit, inputs: InputMap, foreignCallHandler: ForeignCallHandler = defaultForeignCallHandler, -): Promise { +): Promise { // Throws on ABI encoding error const witnessMap = abiEncode(compiledProgram.abi, inputs); // Execute the circuit to generate the rest of the witnesses and serialize // them into a Uint8Array. try { - const solvedWitness = await executeCircuitWithBlackBoxSolver( + const solvedWitness = await executeProgramWithBlackBoxSolver( await getSolver(), base64Decode(compiledProgram.bytecode), witnessMap, diff --git a/noir/noir-repo/tooling/noir_js/test/node/e2e.test.ts b/noir/noir-repo/tooling/noir_js/test/node/e2e.test.ts index 979841c47e6f..dbb9abcc964c 100644 --- a/noir/noir-repo/tooling/noir_js/test/node/e2e.test.ts +++ b/noir/noir-repo/tooling/noir_js/test/node/e2e.test.ts @@ -1,10 +1,12 @@ import { expect } from 'chai'; import assert_lt_json from '../noir_compiled_examples/assert_lt/target/assert_lt.json' assert { type: 'json' }; +import fold_fibonacci_json from '../noir_compiled_examples/fold_fibonacci/target/fold_fibonacci.json' assert { type: 'json' }; import { Noir } from '@noir-lang/noir_js'; import { BarretenbergBackend as Backend, BarretenbergVerifier as Verifier } from '@noir-lang/backend_barretenberg'; import { CompiledCircuit } from '@noir-lang/types'; const assert_lt_program = assert_lt_json as CompiledCircuit; +const fold_fibonacci_program = fold_fibonacci_json as CompiledCircuit; it('end-to-end proof creation and verification (outer)', async () => { // Noir.Js part @@ -149,3 +151,24 @@ it('[BUG] -- bb.js null function or function signature mismatch (outer-inner) ', const isValidInner = await prover.verifyProof(_proofInner); expect(isValidInner).to.be.true; }); + +it('end-to-end proof creation and verification for multiple ACIR circuits (inner)', async () => { + // Noir.Js part + const inputs = { + x: '10', + }; + + const program = new Noir(fold_fibonacci_program); + + const { witness } = await program.execute(inputs); + + // bb.js part + // + // Proof creation + const backend = new Backend(fold_fibonacci_program); + const proof = await backend.generateProof(witness); + + // Proof verification + const isValid = await backend.verifyProof(proof); + expect(isValid).to.be.true; +}); diff --git a/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts b/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts index b7aa4f3135cb..54a42d40b60b 100644 --- a/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts +++ b/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts @@ -1,5 +1,6 @@ import assert_lt_json from '../noir_compiled_examples/assert_lt/target/assert_lt.json' assert { type: 'json' }; import assert_msg_json from '../noir_compiled_examples/assert_msg_runtime/target/assert_msg_runtime.json' assert { type: 'json' }; +import fold_fibonacci_json from '../noir_compiled_examples/fold_fibonacci/target/fold_fibonacci.json' assert { type: 'json' }; import assert_raw_payload_json from '../noir_compiled_examples/assert_raw_payload/target/assert_raw_payload.json' assert { type: 'json' }; import { Noir, ErrorWithPayload } from '@noir-lang/noir_js'; @@ -8,6 +9,7 @@ import { expect } from 'chai'; const assert_lt_program = assert_lt_json as CompiledCircuit; const assert_msg_runtime = assert_msg_json as CompiledCircuit; +const fold_fibonacci_program = fold_fibonacci_json as CompiledCircuit; it('returns the return value of the circuit', async () => { const inputs = { @@ -49,3 +51,45 @@ it('circuit with a raw assert payload should fail with the decoded payload', asy }); } }); + +it('successfully executes a program with multiple acir circuits', async () => { + const inputs = { + x: '10', + }; + try { + await new Noir(fold_fibonacci_program).execute(inputs); + } catch (error) { + const knownError = error as Error; + expect(knownError.message).to.equal('Circuit execution failed: Expected x < y but got 10 < 5'); + } +}); + +it('circuit with a raw assert payload should fail with the decoded payload', async () => { + const inputs = { + x: '7', + y: '5', + }; + try { + await new Noir(assert_raw_payload_json).execute(inputs); + } catch (error) { + const knownError = error as ErrorWithPayload; + const invalidXYErrorSelector = Object.keys(assert_raw_payload_json.abi.error_types)[0]; + expect(knownError.rawAssertionPayload!.selector).to.equal(invalidXYErrorSelector); + expect(knownError.decodedAssertionPayload).to.deep.equal({ + x: '0x07', + y: '0x05', + }); + } +}); + +it('successfully executes a program with multiple acir circuits', async () => { + const inputs = { + x: '10', + }; + try { + await new Noir(fold_fibonacci_program).execute(inputs); + } catch (error) { + const knownError = error as Error; + expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); + } +}); diff --git a/noir/noir-repo/tooling/noir_js/test/noir_compiled_examples/fold_fibonacci/Nargo.toml b/noir/noir-repo/tooling/noir_js/test/noir_compiled_examples/fold_fibonacci/Nargo.toml new file mode 100644 index 000000000000..6d8214689b06 --- /dev/null +++ b/noir/noir-repo/tooling/noir_js/test/noir_compiled_examples/fold_fibonacci/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "fold_fibonacci" +type = "bin" +authors = [""] +compiler_version = ">=0.28.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/tooling/noir_js/test/noir_compiled_examples/fold_fibonacci/src/main.nr b/noir/noir-repo/tooling/noir_js/test/noir_compiled_examples/fold_fibonacci/src/main.nr new file mode 100644 index 000000000000..e150a586086f --- /dev/null +++ b/noir/noir-repo/tooling/noir_js/test/noir_compiled_examples/fold_fibonacci/src/main.nr @@ -0,0 +1,12 @@ +fn main(x: u32) { + assert(fibonacci(x) == 55); +} + +#[fold] +fn fibonacci(x: u32) -> u32 { + if x <= 1 { + x + } else { + fibonacci(x - 1) + fibonacci(x - 2) + } +} From e91c238cd36d378cbb2d67148d883e5124a9c5da Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 4 May 2024 02:09:17 +0000 Subject: [PATCH 6/9] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "4a453f4c7" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "4a453f4c7" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 284bf01d6ff5..410f0811804a 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = d6e0824b144875a9a0fd1561047affa319d64b83 - parent = c9b32061b2849442516ff0395b69d9a230191234 + commit = 4a453f4c7b3b01410968abc95ff3ba13d5d2da8b + parent = 3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0 method = merge cmdver = 0.4.6 From ca29cea33adda120adc90b3a32163625271af319 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 4 May 2024 02:09:45 +0000 Subject: [PATCH 7/9] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- noir-projects/aztec-nr/tests/Nargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af58631..0e8fc6ef1186 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.37.0", directory="noir-projects/noir-protocol-circuits/crates/types" } diff --git a/noir-projects/aztec-nr/tests/Nargo.toml b/noir-projects/aztec-nr/tests/Nargo.toml index 13404b373243..02e153ec117d 100644 --- a/noir-projects/aztec-nr/tests/Nargo.toml +++ b/noir-projects/aztec-nr/tests/Nargo.toml @@ -6,4 +6,4 @@ type = "lib" [dependencies] aztec = { path = "../aztec" } -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.37.0", directory="noir-projects/noir-protocol-circuits/crates/types" } From c9b7d4a9cdd46085b211071da51c3f2bebc32e6d Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 4 May 2024 02:09:45 +0000 Subject: [PATCH 8/9] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index ff3032ad3295..bd8b16035438 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ commit = 8128c63b218dcc1cc7d18db5e19b3e0a9f63d81b method = merge cmdver = 0.4.6 - parent = 380de6c91037690ea6ff7130b7094b3f50a9963e + parent = bb778834de0000e8189fde83f71e4d7173be3801 From 980a62e1fffe3f9533a1c7e8f157b11a21fe6b61 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 4 May 2024 02:09:49 +0000 Subject: [PATCH 9/9] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "500bdad25" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "500bdad25" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- noir-projects/aztec-nr/tests/Nargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index bd8b16035438..d96c91ee0f84 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 8128c63b218dcc1cc7d18db5e19b3e0a9f63d81b + commit = 500bdad257ae68720a8d9b7ea72db491a6f373c8 method = merge cmdver = 0.4.6 - parent = bb778834de0000e8189fde83f71e4d7173be3801 + parent = 12851e40fee04f468855961f75dd31a0b344a1f4 diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 0e8fc6ef1186..7a1f1af58631 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.37.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } diff --git a/noir-projects/aztec-nr/tests/Nargo.toml b/noir-projects/aztec-nr/tests/Nargo.toml index 02e153ec117d..13404b373243 100644 --- a/noir-projects/aztec-nr/tests/Nargo.toml +++ b/noir-projects/aztec-nr/tests/Nargo.toml @@ -6,4 +6,4 @@ type = "lib" [dependencies] aztec = { path = "../aztec" } -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.37.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" }