Skip to content

Commit

Permalink
chore: make public data update requests, note hashes, and unencrypted…
Browse files Browse the repository at this point in the history
… logs readonly in TS (#6658)

Continue making fields in TS readonly.

We do so by forcing side effect counters to be set in public, and then
waiting until public kernel tail to:
- recombine the non/revertible side effects and get hints (in TS)
- provide the hints to the public kernel tail
- using the hints in the public kernel tail while recombining to sort
the public data update requests
  • Loading branch information
just-mitch authored May 29, 2024
1 parent cd1974a commit 1230e56
Show file tree
Hide file tree
Showing 40 changed files with 490 additions and 200 deletions.
4 changes: 2 additions & 2 deletions l1-contracts/src/core/libraries/ConstantsGen.sol
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ library Constants {
uint256 internal constant CONTENT_COMMITMENT_LENGTH = 4;
uint256 internal constant CONTRACT_INSTANCE_LENGTH = 5;
uint256 internal constant CONTRACT_STORAGE_READ_LENGTH = 2;
uint256 internal constant CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH = 2;
uint256 internal constant CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH = 3;
uint256 internal constant ETH_ADDRESS_LENGTH = 1;
uint256 internal constant FUNCTION_DATA_LENGTH = 2;
uint256 internal constant FUNCTION_LEAF_PREIMAGE_LENGTH = 5;
Expand Down Expand Up @@ -187,7 +187,7 @@ library Constants {
+ (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX)
+ (SCOPED_KEY_VALIDATION_REQUEST_AND_GENERATOR_LENGTH * MAX_KEY_VALIDATION_REQUESTS_PER_TX)
+ (PUBLIC_DATA_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_TX);
uint256 internal constant PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 2;
uint256 internal constant PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 3;
uint256 internal constant COMBINED_ACCUMULATED_DATA_LENGTH = MAX_NEW_NOTE_HASHES_PER_TX
+ MAX_NEW_NULLIFIERS_PER_TX + MAX_NEW_L2_TO_L1_MSGS_PER_TX + 6
+ (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + GAS_LENGTH;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,18 @@ use dep::reset_kernel_lib::{
};
use dep::types::{
abis::{
accumulated_data::CombinedAccumulatedData, kernel_circuit_public_inputs::KernelCircuitPublicInputs,
public_kernel_data::PublicKernelData
accumulated_data::{CombinedAccumulatedData, CombineHints},
kernel_circuit_public_inputs::KernelCircuitPublicInputs, public_kernel_data::PublicKernelData,
public_data_update_request::PublicDataUpdateRequest, side_effect::Ordered
},
constants::{
MAX_NEW_NOTE_HASHES_PER_TX, MAX_PUBLIC_DATA_HINTS, MAX_NULLIFIER_READ_REQUESTS_PER_TX,
MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX
},
constants::{MAX_PUBLIC_DATA_HINTS, MAX_NULLIFIER_READ_REQUESTS_PER_TX},
data::public_data_hint::PublicDataHint,
merkle_tree::{conditionally_assert_check_membership, MembershipWitness},
partial_state_reference::PartialStateReference, utils::{arrays::array_length}, address::AztecAddress
partial_state_reference::PartialStateReference,
utils::{arrays::{array_length, assert_sorted_array}}, address::AztecAddress
};

struct PublicKernelTailCircuitPrivateInputs {
Expand All @@ -21,6 +26,7 @@ struct PublicKernelTailCircuitPrivateInputs {
public_data_hints: [PublicDataHint; MAX_PUBLIC_DATA_HINTS],
public_data_read_request_hints: PublicDataReadRequestHints,
start_state: PartialStateReference,
combine_hints: CombineHints
}

impl PublicKernelTailCircuitPrivateInputs {
Expand All @@ -46,10 +52,10 @@ impl PublicKernelTailCircuitPrivateInputs {

fn propagate_accumulated_data(self) -> CombinedAccumulatedData {
let previous_public_inputs = self.previous_kernel.public_inputs;
// TODO: Sort the combined data.
CombinedAccumulatedData::combine(
previous_public_inputs.end_non_revertible,
previous_public_inputs.end
previous_public_inputs.end,
self.combine_hints
)
}

Expand Down Expand Up @@ -98,7 +104,9 @@ mod tests {
use dep::types::{
abis::{
kernel_circuit_public_inputs::KernelCircuitPublicInputs, public_kernel_data::PublicKernelData,
nullifier::ScopedNullifier, nullifier_leaf_preimage::NullifierLeafPreimage
nullifier::ScopedNullifier, nullifier_leaf_preimage::NullifierLeafPreimage,
accumulated_data::{CombinedAccumulatedData, CombineHints},
public_data_update_request::PublicDataUpdateRequest, note_hash::NoteHash, log_hash::LogHash
},
address::AztecAddress,
constants::{
Expand All @@ -110,7 +118,7 @@ mod tests {
},
hash::{silo_nullifier, sha256_to_field},
public_data_tree_leaf_preimage::PublicDataTreeLeafPreimage,
tests::{fixture_builder::FixtureBuilder, merkle_tree_utils::NonEmptyMerkleTree},
tests::{fixture_builder::FixtureBuilder, merkle_tree_utils::NonEmptyMerkleTree, sort::sort_get_sorted_hints},
traits::is_empty, partial_state_reference::PartialStateReference, utils::arrays::array_merge,
merkle_tree::MembershipWitness
};
Expand Down Expand Up @@ -292,13 +300,52 @@ mod tests {
let mut previous_kernel = self.previous_kernel.to_public_kernel_data(false);
previous_kernel.public_inputs.end = self.previous_revertible.to_public_accumulated_data();

// Note: note hashes are a bit odd here: whereas we'd like to use `combined` and then
// sort the result, we cannot because we lose the side effect counters when we combine.
let merged = array_merge(
previous_kernel.public_inputs.end_non_revertible.new_note_hashes,
previous_kernel.public_inputs.end.new_note_hashes
);
let sorted = sort_get_sorted_hints(merged, |a: NoteHash, b: NoteHash| a.counter() < b.counter());
let sorted_note_hashes = sorted.sorted_array;
let sorted_note_hashes_indexes = sorted.sorted_index_hints;

let merged = array_merge(
previous_kernel.public_inputs.end_non_revertible.unencrypted_logs_hashes,
previous_kernel.public_inputs.end.unencrypted_logs_hashes
);
let sorted = sort_get_sorted_hints(merged, |a: LogHash, b: LogHash| a.counter() < b.counter());
let sorted_unencrypted_logs_hashes = sorted.sorted_array;
let sorted_unencrypted_logs_hashes_indexes = sorted.sorted_index_hints;

let merged = array_merge(
previous_kernel.public_inputs.end_non_revertible.public_data_update_requests,
previous_kernel.public_inputs.end.public_data_update_requests
);
let sorted = sort_get_sorted_hints(
merged,
|a: PublicDataUpdateRequest, b: PublicDataUpdateRequest| a.counter() < b.counter()
);
let sorted_public_data_update_requests = sorted.sorted_array;
let sorted_public_data_update_requests_indexes = sorted.sorted_index_hints;

let combine_hints = CombineHints {
sorted_note_hashes,
sorted_note_hashes_indexes,
sorted_unencrypted_logs_hashes,
sorted_unencrypted_logs_hashes_indexes,
sorted_public_data_update_requests,
sorted_public_data_update_requests_indexes
};

let kernel = PublicKernelTailCircuitPrivateInputs {
previous_kernel,
nullifier_read_request_hints: self.nullifier_read_request_hints_builder.to_hints(),
nullifier_non_existent_read_request_hints: self.nullifier_non_existent_read_request_hints_builder.to_hints(),
public_data_hints: self.public_data_hints.storage,
public_data_read_request_hints: self.public_data_read_request_hints_builder.to_hints(),
start_state: self.start_state
start_state: self.start_state,
combine_hints
};

kernel.public_kernel_tail()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -548,23 +548,32 @@ mod tests {
}

#[test]
unconstrained fn correctly_updates_revert_code() {
unconstrained fn correctly_updates_revert_code_0() {
let mut builder = PublicKernelTeardownCircuitPrivateInputsBuilder::new();
let public_inputs = builder.execute();
assert_eq(public_inputs.revert_code, 0);
}

#[test]
unconstrained fn correctly_updates_revert_code_1() {
// Case where we carry forward a revert code from app logic
let mut builder = PublicKernelTeardownCircuitPrivateInputsBuilder::new();
builder.previous_kernel.revert_code = 1;
let public_inputs = builder.execute();
assert_eq(public_inputs.revert_code, 1);
}

#[test]
unconstrained fn correctly_updates_revert_code_2() {
// Case where there is a new error in teardown
let mut builder = PublicKernelTeardownCircuitPrivateInputsBuilder::new();
builder.public_call.public_inputs.revert_code = 1;
let public_inputs = builder.execute();
assert_eq(public_inputs.revert_code, 2);
}

#[test]
unconstrained fn correctly_updates_revert_code_3() {
// Case where there is an error in both app logic and teardown
let mut builder = PublicKernelTeardownCircuitPrivateInputsBuilder::new();
builder.previous_kernel.revert_code = 1;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,10 +138,10 @@ mod tests {
}

global data_writes = [
PublicDataUpdateRequest { leaf_slot: 22, new_value: 200 },
PublicDataUpdateRequest { leaf_slot: 11, new_value: 100 },
PublicDataUpdateRequest { leaf_slot: 33, new_value: 300 },
PublicDataUpdateRequest { leaf_slot: 44, new_value: 400 }
PublicDataUpdateRequest { leaf_slot: 22, new_value: 200, counter: 0 },
PublicDataUpdateRequest { leaf_slot: 11, new_value: 100, counter: 1 },
PublicDataUpdateRequest { leaf_slot: 33, new_value: 300, counter: 2 },
PublicDataUpdateRequest { leaf_slot: 44, new_value: 400, counter: 3 }
];

global leaf_data_hints = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ impl BaseRollupInputs {
);

let new_value = compute_public_data_tree_value(existing_update.new_value - tx_fee);
let protocol_update_request = PublicDataUpdateRequest { leaf_slot, new_value };
let protocol_update_request = PublicDataUpdateRequest { leaf_slot, new_value, counter: 0 };
(protocol_update_request, existing_update_index as u64)
} else {
// Otherwise, build a new one to be inserted into the protocol update requests at the end of the array.
Expand All @@ -284,7 +284,7 @@ impl BaseRollupInputs {
assert(!balance.lt(tx_fee), "Not enough balance for fee payer to pay for transaction");

let new_value = compute_public_data_tree_value(balance - tx_fee);
let protocol_update_request = PublicDataUpdateRequest { leaf_slot, new_value };
let protocol_update_request = PublicDataUpdateRequest { leaf_slot, new_value, counter: 0 };
(protocol_update_request, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX)
}
} else {
Expand Down Expand Up @@ -505,6 +505,7 @@ mod tests {
kernel_data.public_inputs.end.public_data_update_requests[i] = PublicDataUpdateRequest {
leaf_slot : leaf.slot,
new_value : leaf.value,
counter : 0
};
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ mod public_accumulated_data;
mod public_accumulated_data_builder;

use crate::abis::accumulated_data::{
combined_accumulated_data::CombinedAccumulatedData,
combined_accumulated_data::CombinedAccumulatedData, combined_accumulated_data::CombineHints,
private_accumulated_data::PrivateAccumulatedData,
private_accumulated_data_builder::PrivateAccumulatedDataBuilder,
public_accumulated_data::PublicAccumulatedData,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,26 @@ use crate::{
abis::{
accumulated_data::public_accumulated_data::PublicAccumulatedData, note_hash::NoteHash,
nullifier::Nullifier, public_data_update_request::PublicDataUpdateRequest,
log_hash::{LogHash, NoteLogHash}, gas::Gas
log_hash::{LogHash, NoteLogHash}, gas::Gas, side_effect::Ordered
},
constants::{
MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX,
MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, COMBINED_ACCUMULATED_DATA_LENGTH
MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, COMBINED_ACCUMULATED_DATA_LENGTH,
MAX_UNENCRYPTED_LOGS_PER_TX
},
utils::{arrays::array_merge, reader::Reader}, traits::{Empty, Serialize, Deserialize}
utils::{arrays::{array_merge, assert_sorted_array}, reader::Reader},
traits::{Empty, Serialize, Deserialize}
};

struct CombineHints {
sorted_note_hashes: [NoteHash; MAX_NEW_NOTE_HASHES_PER_TX],
sorted_note_hashes_indexes: [u64; MAX_NEW_NOTE_HASHES_PER_TX],
sorted_unencrypted_logs_hashes: [LogHash; MAX_UNENCRYPTED_LOGS_PER_TX],
sorted_unencrypted_logs_hashes_indexes: [u64; MAX_UNENCRYPTED_LOGS_PER_TX],
sorted_public_data_update_requests: [PublicDataUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX],
sorted_public_data_update_requests_indexes: [u64; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX],
}

struct CombinedAccumulatedData {
new_note_hashes: [Field; MAX_NEW_NOTE_HASHES_PER_TX],
new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_TX],
Expand All @@ -32,8 +43,35 @@ struct CombinedAccumulatedData {
gas_used: Gas,
}

fn asc_sort_by_counters<T>(a: T, b: T) -> bool where T: Ordered {
a.counter() <= b.counter()
}

impl CombinedAccumulatedData {
pub fn combine(non_revertible: PublicAccumulatedData, revertible: PublicAccumulatedData) -> Self {
pub fn combine(
non_revertible: PublicAccumulatedData,
revertible: PublicAccumulatedData,
combine_hints: CombineHints
) -> Self {
let merged_note_hashes = array_merge(non_revertible.new_note_hashes, revertible.new_note_hashes);
assert_sorted_array(
merged_note_hashes,
combine_hints.sorted_note_hashes,
combine_hints.sorted_note_hashes_indexes,
asc_sort_by_counters
);

let merged_public_data_update_requests = array_merge(
non_revertible.public_data_update_requests,
revertible.public_data_update_requests
);
assert_sorted_array(
merged_public_data_update_requests,
combine_hints.sorted_public_data_update_requests,
combine_hints.sorted_public_data_update_requests_indexes,
asc_sort_by_counters
);

// TODO(Miranda): Hash here or elsewhere?
let note_encrypted_logs_hash = compute_tx_note_logs_hash(
array_merge(
Expand All @@ -47,20 +85,27 @@ impl CombinedAccumulatedData {
revertible.encrypted_logs_hashes
)
);
let unencrypted_logs_hash = compute_tx_logs_hash(
array_merge(
non_revertible.unencrypted_logs_hashes,
revertible.unencrypted_logs_hashes
)

let merged_unencrypted_logs_hashes = array_merge(
non_revertible.unencrypted_logs_hashes,
revertible.unencrypted_logs_hashes
);
assert_sorted_array(
merged_unencrypted_logs_hashes,
combine_hints.sorted_unencrypted_logs_hashes,
combine_hints.sorted_unencrypted_logs_hashes_indexes,
asc_sort_by_counters
);
let unencrypted_logs_hash = compute_tx_logs_hash(combine_hints.sorted_unencrypted_logs_hashes);

let note_encrypted_log_preimages_length = non_revertible.note_encrypted_logs_hashes.fold(0, |a, b: LogHash| a + b.length)
+ revertible.note_encrypted_logs_hashes.fold(0, |a, b: LogHash| a + b.length);
let encrypted_log_preimages_length = non_revertible.encrypted_logs_hashes.fold(0, |a, b: LogHash| a + b.length)
+ revertible.encrypted_logs_hashes.fold(0, |a, b: LogHash| a + b.length);
let unencrypted_log_preimages_length = non_revertible.unencrypted_logs_hashes.fold(0, |a, b: LogHash| a + b.length)
+ revertible.unencrypted_logs_hashes.fold(0, |a, b: LogHash| a + b.length);
CombinedAccumulatedData {
new_note_hashes: array_merge(non_revertible.new_note_hashes, revertible.new_note_hashes).map(|n: NoteHash| n.value),
new_note_hashes: combine_hints.sorted_note_hashes.map(|n: NoteHash| n.value),
new_nullifiers: array_merge(non_revertible.new_nullifiers, revertible.new_nullifiers).map(|n: Nullifier| n.value),
new_l2_to_l1_msgs: array_merge(
non_revertible.new_l2_to_l1_msgs,
Expand All @@ -72,10 +117,7 @@ impl CombinedAccumulatedData {
note_encrypted_log_preimages_length,
encrypted_log_preimages_length,
unencrypted_log_preimages_length,
public_data_update_requests: array_merge(
non_revertible.public_data_update_requests,
revertible.public_data_update_requests
),
public_data_update_requests: combine_hints.sorted_public_data_update_requests,
gas_used: revertible.gas_used + non_revertible.gas_used
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ mod tests {
let call_stack_item = PublicCallStackItem { contract_address, public_inputs, is_execution_request: true, function_data };

// Value from public_call_stack_item.test.ts "Computes a callstack item request hash" test
let test_data_call_stack_item_request_hash = 0x11998b1d33b8ba1c8fa7a6c2f5bc76b31bbaa80400554465c335ba31559ac1f9;
let test_data_call_stack_item_request_hash = 0x09b460df8be10a6bd56588c93b478243fdf5cc92db59d9b1670ce2a044fab6d6;
assert_eq(call_stack_item.hash(), test_data_call_stack_item_request_hash);
}

Expand All @@ -87,7 +87,7 @@ mod tests {
let call_stack_item = PublicCallStackItem { contract_address, public_inputs, is_execution_request: false, function_data };

// Value from public_call_stack_item.test.ts "Computes a callstack item hash" test
let test_data_call_stack_item_hash = 0x2b7f8b68d96d0011ecc576459899e9451fbd880568ccc7a071d9cf04e59abb65;
let test_data_call_stack_item_hash = 0x0931a8de516f3f49dff48fbdea57f01b706dc67cbd1fd8bde97d26c4a53afd0a;
assert_eq(call_stack_item.hash(), test_data_call_stack_item_hash);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,6 @@ fn empty_hash() {
let hash = inputs.hash();

// Value from public_circuit_public_inputs.test.ts "computes empty item hash" test
let test_data_empty_hash = 0x1e4351db0c9aa20836e7009bc3e6a4555c92622c5e9cb3b49e2ec0fbbf59d0bd;
let test_data_empty_hash = 0x05061edabeae1057b6f6216afc118e46ea0b77044d4c57ed12e5712dab01b8d4;
assert_eq(hash, test_data_empty_hash);
}
Loading

0 comments on commit 1230e56

Please sign in to comment.