Skip to content

Commit

Permalink
feat: VK tree (#6914)
Browse files Browse the repository at this point in the history
Implements the VK tree in circuits, contract and TS
- Adds VK generation to bootstrapping protocol circuits with multiple
caching levels (disk, remote cache, bootstrap fast)
 - Adds VK tree root to the constants of the TX
- Adds membership checks for VK tree in all circuits that verify
previous circuits, including checks on the index by expressing
ALLOWED_PREVIOUS_CIRCUITS
 - Adds VK tree root to public inputs of root rollup
 - Adds VK tree root in the rollup contract
 - Exports VKs and VK tree in noir-protocol-circuits-types
- Refactors TS to use the the VKs in noir-protocol-circuits-types and
removes mocked VKs
- Updates TS to pass the VK tree root on contract deployment and private
kernel init
  • Loading branch information
sirasistant authored Jul 8, 2024
1 parent 0414eb5 commit 8631237
Show file tree
Hide file tree
Showing 136 changed files with 3,295 additions and 922 deletions.
32 changes: 27 additions & 5 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,11 @@ jobs:
# prepare images locally, tagged by commit hash
- name: "Build E2E Image"
timeout-minutes: 40
run: earthly-ci ./yarn-project+export-e2e-test-images
run: |
earthly-ci \
--secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \
--secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \
./yarn-project+export-e2e-test-images
# We base our e2e list used in e2e-x86 off the targets in ./yarn-project/end-to-end
# (Note ARM uses just 2 tests as a smoketest)
- name: Create list of non-bench end-to-end jobs
Expand Down Expand Up @@ -377,7 +381,11 @@ jobs:
- name: "Format noir-projects"
working-directory: ./noir-projects/
timeout-minutes: 40
run: earthly-ci --no-output ./+format
run: |
earthly-ci --no-output \
--secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \
--secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \
./+format
noir-test:
needs: [setup, changes]
Expand Down Expand Up @@ -430,7 +438,11 @@ jobs:
concurrency_key: noir-projects-x86
- name: "Noir Projects"
timeout-minutes: 40
run: earthly-ci --no-output ./noir-projects/+test
run: |
earthly-ci --no-output \
--secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \
--secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \
./noir-projects/+test
avm-format:
needs: [setup, changes]
Expand Down Expand Up @@ -515,7 +527,14 @@ jobs:
- name: "Docs Preview"
if: github.event.number
timeout-minutes: 40
run: earthly-ci --no-output ./docs/+deploy-preview --ENV=staging --PR=${{ github.event.number }} --AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} --NETLIFY_AUTH_TOKEN=${{ secrets.NETLIFY_AUTH_TOKEN }} --NETLIFY_SITE_ID=${{ secrets.NETLIFY_SITE_ID }}
run: |
earthly-ci --no-output \
--secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \
--secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \
./docs/+deploy-preview --ENV=staging --PR=${{ github.event.number }} \
--AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} \
--NETLIFY_AUTH_TOKEN=${{ secrets.NETLIFY_AUTH_TOKEN }} \
--NETLIFY_SITE_ID=${{ secrets.NETLIFY_SITE_ID }}
bb-bench:
runs-on: ubuntu-20.04
Expand Down Expand Up @@ -617,7 +636,10 @@ jobs:
working-directory: ./noir-projects/
timeout-minutes: 40
run: |
earthly-ci --artifact +gates-report/gates_report.json
earthly-ci \
--secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \
--secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \
--artifact +gates-report/gates_report.json
mv gates_report.json ../protocol_circuits_report.json
- name: Compare gates reports
Expand Down
22 changes: 18 additions & 4 deletions l1-contracts/src/core/Rollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -45,17 +45,25 @@ contract Rollup is IRollup {
// See https://github.com/AztecProtocol/aztec-packages/issues/1614
uint256 public lastWarpedBlockTs;

bytes32 public vkTreeRoot;

using EnumerableSet for EnumerableSet.AddressSet;

EnumerableSet.AddressSet private sequencers;

constructor(IRegistry _registry, IAvailabilityOracle _availabilityOracle, IERC20 _gasToken) {
constructor(
IRegistry _registry,
IAvailabilityOracle _availabilityOracle,
IERC20 _gasToken,
bytes32 _vkTreeRoot
) {
verifier = new MockVerifier();
REGISTRY = _registry;
AVAILABILITY_ORACLE = _availabilityOracle;
GAS_TOKEN = _gasToken;
INBOX = new Inbox(address(this), Constants.L1_TO_L2_MSG_SUBTREE_HEIGHT);
OUTBOX = new Outbox(address(this));
vkTreeRoot = _vkTreeRoot;
VERSION = 1;
}

Expand Down Expand Up @@ -85,6 +93,10 @@ contract Rollup is IRollup {
verifier = IVerifier(_verifier);
}

function setVkTreeRoot(bytes32 _vkTreeRoot) external {
vkTreeRoot = _vkTreeRoot;
}

/**
* @notice Process an incoming L2 block and progress the state
* @param _header - The L2 block header
Expand Down Expand Up @@ -113,17 +125,19 @@ contract Rollup is IRollup {
}

bytes32[] memory publicInputs =
new bytes32[](2 + Constants.HEADER_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH);
new bytes32[](3 + Constants.HEADER_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH);
// the archive tree root
publicInputs[0] = _archive;
// this is the _next_ available leaf in the archive tree
// normally this should be equal to the block number (since leaves are 0-indexed and blocks 1-indexed)
// but in yarn-project/merkle-tree/src/new_tree.ts we prefill the tree so that block N is in leaf N
publicInputs[1] = bytes32(header.globalVariables.blockNumber + 1);

publicInputs[2] = vkTreeRoot;

bytes32[] memory headerFields = HeaderLib.toFields(header);
for (uint256 i = 0; i < headerFields.length; i++) {
publicInputs[i + 2] = headerFields[i];
publicInputs[i + 3] = headerFields[i];
}

// the block proof is recursive, which means it comes with an aggregation object
Expand All @@ -135,7 +149,7 @@ contract Rollup is IRollup {
assembly {
part := calldataload(add(_aggregationObject.offset, mul(i, 32)))
}
publicInputs[i + 2 + Constants.HEADER_LENGTH] = part;
publicInputs[i + 3 + Constants.HEADER_LENGTH] = part;
}

if (!verifier.verify(_proof, publicInputs)) {
Expand Down
34 changes: 26 additions & 8 deletions l1-contracts/src/core/libraries/ConstantsGen.sol
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,12 @@ library Constants {
uint256 internal constant MAX_UNENCRYPTED_LOGS_PER_TX = 8;
uint256 internal constant MAX_PUBLIC_DATA_HINTS = 128;
uint256 internal constant NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP = 16;
uint256 internal constant VK_TREE_HEIGHT = 3;
uint256 internal constant VK_TREE_HEIGHT = 5;
uint256 internal constant FUNCTION_TREE_HEIGHT = 5;
uint256 internal constant NOTE_HASH_TREE_HEIGHT = 32;
uint256 internal constant PUBLIC_DATA_TREE_HEIGHT = 40;
uint256 internal constant NULLIFIER_TREE_HEIGHT = 20;
uint256 internal constant L1_TO_L2_MSG_TREE_HEIGHT = 16;
uint256 internal constant ROLLUP_VK_TREE_HEIGHT = 8;
uint256 internal constant ARTIFACT_FUNCTION_TREE_MAX_HEIGHT = 5;
uint256 internal constant NULLIFIER_TREE_ID = 0;
uint256 internal constant NOTE_HASH_TREE_ID = 1;
Expand All @@ -71,6 +70,25 @@ library Constants {
uint256 internal constant PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH = 34;
uint256 internal constant L1_TO_L2_MSG_SUBTREE_HEIGHT = 4;
uint256 internal constant L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH = 12;
uint256 internal constant PRIVATE_KERNEL_INIT_INDEX = 0;
uint256 internal constant PRIVATE_KERNEL_INNER_INDEX = 1;
uint256 internal constant PRIVATE_KERNEL_RESET_FULL_INDEX = 2;
uint256 internal constant PRIVATE_KERNEL_RESET_BIG_INDEX = 3;
uint256 internal constant PRIVATE_KERNEL_RESET_MEDIUM_INDEX = 4;
uint256 internal constant PRIVATE_KERNEL_RESET_SMALL_INDEX = 5;
uint256 internal constant PRIVATE_KERNEL_TAIL_INDEX = 10;
uint256 internal constant PRIVATE_KERNEL_TAIL_TO_PUBLIC_INDEX = 11;
uint256 internal constant EMPTY_NESTED_INDEX = 12;
uint256 internal constant PRIVATE_KERNEL_EMPTY_INDEX = 13;
uint256 internal constant PUBLIC_KERNEL_SETUP_INDEX = 14;
uint256 internal constant PUBLIC_KERNEL_APP_LOGIC_INDEX = 15;
uint256 internal constant PUBLIC_KERNEL_TEARDOWN_INDEX = 16;
uint256 internal constant PUBLIC_KERNEL_TAIL_INDEX = 17;
uint256 internal constant BASE_PARITY_INDEX = 18;
uint256 internal constant ROOT_PARITY_INDEX = 19;
uint256 internal constant BASE_ROLLUP_INDEX = 20;
uint256 internal constant MERGE_ROLLUP_INDEX = 21;
uint256 internal constant ROOT_ROLLUP_INDEX = 22;
uint256 internal constant FUNCTION_SELECTOR_NUM_BYTES = 4;
uint256 internal constant ARGS_HASH_CHUNK_LENGTH = 16;
uint256 internal constant ARGS_HASH_CHUNK_COUNT = 16;
Expand Down Expand Up @@ -164,16 +182,16 @@ library Constants {
uint256 internal constant VALIDATION_REQUESTS_LENGTH = 1026;
uint256 internal constant PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 3;
uint256 internal constant COMBINED_ACCUMULATED_DATA_LENGTH = 333;
uint256 internal constant COMBINED_CONSTANT_DATA_LENGTH = 40;
uint256 internal constant COMBINED_CONSTANT_DATA_LENGTH = 41;
uint256 internal constant PUBLIC_CALL_STACK_ITEM_COMPRESSED_LENGTH = 16;
uint256 internal constant CALL_REQUEST_LENGTH = 7;
uint256 internal constant PRIVATE_ACCUMULATED_DATA_LENGTH = 1168;
uint256 internal constant PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 2243;
uint256 internal constant PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 2244;
uint256 internal constant PUBLIC_ACCUMULATED_DATA_LENGTH = 983;
uint256 internal constant PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 3258;
uint256 internal constant KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 383;
uint256 internal constant CONSTANT_ROLLUP_DATA_LENGTH = 14;
uint256 internal constant BASE_OR_MERGE_PUBLIC_INPUTS_LENGTH = 31;
uint256 internal constant PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 3259;
uint256 internal constant KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 384;
uint256 internal constant CONSTANT_ROLLUP_DATA_LENGTH = 11;
uint256 internal constant BASE_OR_MERGE_PUBLIC_INPUTS_LENGTH = 28;
uint256 internal constant ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH = 9;
uint256 internal constant GET_NOTES_ORACLE_RETURN_LENGTH = 674;
uint256 internal constant NOTE_HASHES_NUM_BYTES_PER_BASE_ROLLUP = 2048;
Expand Down
2 changes: 1 addition & 1 deletion l1-contracts/test/Rollup.t.sol
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ contract RollupTest is DecoderBase {
registry = new Registry();
availabilityOracle = new AvailabilityOracle();
portalERC20 = new PortalERC20();
rollup = new Rollup(registry, availabilityOracle, IERC20(address(portalERC20)));
rollup = new Rollup(registry, availabilityOracle, IERC20(address(portalERC20)), bytes32(0));
inbox = Inbox(address(rollup.INBOX()));
outbox = Outbox(address(rollup.OUTBOX()));

Expand Down
3 changes: 2 additions & 1 deletion l1-contracts/test/portals/TokenPortal.t.sol
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ contract TokenPortalTest is Test {
function setUp() public {
registry = new Registry();
portalERC20 = new PortalERC20();
rollup = new Rollup(registry, new AvailabilityOracle(), IERC20(address(portalERC20)));
rollup =
new Rollup(registry, new AvailabilityOracle(), IERC20(address(portalERC20)), bytes32(0));
inbox = rollup.INBOX();
outbox = rollup.OUTBOX();

Expand Down
3 changes: 2 additions & 1 deletion l1-contracts/test/portals/UniswapPortal.t.sol
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@ contract UniswapPortalTest is Test {

registry = new Registry();
PortalERC20 portalERC20 = new PortalERC20();
rollup = new Rollup(registry, new AvailabilityOracle(), IERC20(address(portalERC20)));
rollup =
new Rollup(registry, new AvailabilityOracle(), IERC20(address(portalERC20)), bytes32(0));
registry.upgrade(address(rollup), address(rollup.INBOX()), address(rollup.OUTBOX()));
portalERC20.mint(address(rollup), 1000000);

Expand Down
4 changes: 4 additions & 0 deletions noir-projects/Earthfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ source:

# Install nargo
COPY ../noir/+nargo/nargo /usr/bin/nargo
# Install bb
COPY ../barretenberg/cpp/+preset-release/bin/bb /usr/src/barretenberg/cpp/build/bin/bb

WORKDIR /usr/src/noir-projects

Expand All @@ -25,6 +27,8 @@ build-contracts:

build-protocol-circuits:
FROM +source
RUN --secret AWS_ACCESS_KEY_ID --secret AWS_SECRET_ACCESS_KEY mkdir -p ~/.aws && \
bash -c 'echo -e "[default]\naws_access_key_id=$AWS_ACCESS_KEY_ID\naws_secret_access_key=$AWS_SECRET_ACCESS_KEY" > ~/.aws/credentials'
RUN cd noir-protocol-circuits && NARGO=nargo ./bootstrap.sh
SAVE ARTIFACT noir-protocol-circuits

Expand Down
42 changes: 40 additions & 2 deletions noir-projects/noir-protocol-circuits/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,46 @@ if [ -n "$CMD" ]; then
fi

yarn
node ./index.js
node ./generate_variants.js

echo "Compiling protocol circuits..."
NARGO=${NARGO:-../../noir/noir-repo/target/release/nargo}
$NARGO compile --silence-warnings --use-legacy
$NARGO compile --silence-warnings --use-legacy

mkdir -p "./target/keys"

AVAILABLE_MEMORY=0

case "$(uname)" in
Linux*)
# Check available memory on Linux
AVAILABLE_MEMORY=$(awk '/MemTotal/ { printf $2 }' /proc/meminfo)
;;
*)
echo "Parallel vk generation not supported on this operating system"
;;
esac
# This value may be too low.
# If vk generation fail with an amount of free memory greater than this value then it should be increased.
MIN_PARALLEL_VK_GENERATION_MEMORY=500000000

if [[ AVAILABLE_MEMORY -lt MIN_PARALLEL_VK_GENERATION_MEMORY ]]; then
echo "System does not have enough memory for parallel vk generation, falling back to sequential"

for pathname in "./target"/*.json; do
node ./scripts/generate_vk_json.js "$pathname" "./target/keys"
done

else

echo "Generating vks in parallel..."
for pathname in "./target"/*.json; do
node ./scripts/generate_vk_json.js "$pathname" "./target/keys" &
done

for job in $(jobs -p); do
wait $job || exit 1
done

fi

Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,19 @@ use dep::types::{constants::NUM_MSGS_PER_BASE_PARITY, merkle_tree::MerkleTree, u

struct BaseParityInputs {
msgs: [Field; NUM_MSGS_PER_BASE_PARITY],
vk_tree_root: Field,
}

impl BaseParityInputs {
pub fn base_parity_circuit(self) -> ParityPublicInputs {
let sha_tree = Sha256MerkleTree::new(self.msgs);
let pedersen_tree = MerkleTree::new(self.msgs);

ParityPublicInputs { sha_root: sha_tree.get_root(), converted_root: pedersen_tree.get_root() }
ParityPublicInputs {
sha_root: sha_tree.get_root(),
converted_root: pedersen_tree.get_root(),
vk_tree_root: self.vk_tree_root
}
}
}

Expand All @@ -24,7 +29,7 @@ fn test_sha_root_matches_frontier_tree() {
0x2806c860af67e9cd50000378411b8c4c4db172ceb2daa862b259b689ccbdc1
];

let base_parity_inputs = BaseParityInputs { msgs };
let base_parity_inputs = BaseParityInputs { msgs, vk_tree_root: 42 };
let public_inputs = base_parity_inputs.base_parity_circuit();

// 31 byte truncated root hash
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,32 +2,36 @@ use dep::types::{traits::{Empty, Serialize, Deserialize}};

struct ParityPublicInputs {
sha_root: Field,
converted_root: Field,
converted_root: Field,
vk_tree_root: Field,
}

impl Empty for ParityPublicInputs {
fn empty() -> Self {
ParityPublicInputs {
sha_root: 0,
converted_root: 0,
vk_tree_root: 0,
}
}
}

impl Serialize<2> for ParityPublicInputs {
fn serialize(self) -> [Field; 2] {
let mut fields = [0; 2];
impl Serialize<3> for ParityPublicInputs {
fn serialize(self) -> [Field; 3] {
let mut fields = [0; 3];
fields[0] = self.sha_root;
fields[1] = self.converted_root;
fields[2] = self.vk_tree_root;
fields
}
}

impl Deserialize<2> for ParityPublicInputs {
fn deserialize(fields: [Field; 2]) -> Self {
impl Deserialize<3> for ParityPublicInputs {
fn deserialize(fields: [Field; 3]) -> Self {
ParityPublicInputs {
sha_root: fields[0],
converted_root: fields[1],
vk_tree_root: fields[2],
}
}
}
Loading

0 comments on commit 8631237

Please sign in to comment.