Skip to content

Commit

Permalink
chore: Merge provernet to master (#8373)
Browse files Browse the repository at this point in the history
This PR brings the provernet changes back to master

---------

Co-authored-by: Santiago Palladino <santiago@aztecprotocol.com>
Co-authored-by: Alex Gherghisan <alexghr@users.noreply.github.com>
Co-authored-by: spypsy <spypsy@users.noreply.github.com>
  • Loading branch information
4 people authored Sep 4, 2024
1 parent 176bce6 commit e1dc987
Show file tree
Hide file tree
Showing 29 changed files with 776 additions and 15 deletions.
42 changes: 39 additions & 3 deletions .github/workflows/devnet-deploys.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,8 @@ env:
TF_VAR_FORK_MNEMONIC: ${{ secrets.FORK_MNEMONIC }}
TF_VAR_INFURA_API_KEY: ${{ secrets.INFURA_API_KEY }}
TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets.DEVNET_API_KEY }}
TF_VAR_MAINNET_FORK_CPU_UNITS: 2048
TF_VAR_MAINNET_FORK_MEMORY_UNITS: 4096

# Faucet
TF_VAR_FAUCET_ACCOUNT_INDEX: 9
Expand Down Expand Up @@ -123,6 +125,12 @@ jobs:
min_txs_per_block: ${{ steps.set_network_vars.outputs.min_txs_per_block }}
bot_flush_setup_txs: ${{ steps.set_network_vars.outputs.bot_flush_setup_txs }}
bot_max_pending_txs: ${{ steps.set_network_vars.outputs.bot_max_pending_txs }}
mainnet_fork_cpu_units: ${{ steps.set_network_vars.outputs.mainnet_fork_cpu_units }}
mainnet_fork_memory_units: ${{ steps.set_network_vars.outputs.mainnet_fork_memory_units }}
bot_skip_simulation: ${{ steps.set_network_vars.outputs.bot_skip_simulation }}
bot_l2_gas_limit: ${{ steps.set_network_vars.outputs.bot_l2_gas_limit }}
bot_da_gas_limit: ${{ steps.set_network_vars.outputs.bot_da_gas_limit }}
bot_count: ${{ steps.set_network_vars.outputs.bot_count }}
steps:
- name: Set network vars
shell: bash
Expand All @@ -135,7 +143,7 @@ jobs:
echo "branch_name=devnet" >> $GITHUB_OUTPUT
echo "network_api_key=DEVNET_API_KEY" >> $GITHUB_OUTPUT
echo "network_fork_admin_api_key=DEVNET_API_KEY" >> $GITHUB_OUTPUT
echo "agents_per_prover=4" >> $GITHUB_OUTPUT
echo "agents_per_prover=2" >> $GITHUB_OUTPUT
echo "bot_interval=180" >> $GITHUB_OUTPUT
echo "node_tcp_range_start=40100" >> $GITHUB_OUTPUT
echo "node_udp_range_start=45100" >> $GITHUB_OUTPUT
Expand All @@ -147,9 +155,15 @@ jobs:
echo "faucet_lb_priority=601" >> $GITHUB_OUTPUT
echo "min_txs_per_block=1" >> $GITHUB_OUTPUT
echo "max_txs_per_block=64" >> $GITHUB_OUTPUT
echo "bot_follow_chain=NONE" >> $GITHUB_OUTPUT
echo "bot_follow_chain=PROVEN" >> $GITHUB_OUTPUT
echo "bot_flush_setup_txs=false" >> $GITHUB_OUTPUT
echo "bot_max_pending_txs=1" >> $GITHUB_OUTPUT
echo "mainnet_fork_cpu_units=2048" >> $GITHUB_OUTPUT
echo "mainnet_fork_memory_units=4096" >> $GITHUB_OUTPUT
echo "bot_skip_simulation=false" >> $GITHUB_OUTPUT
echo "bot_l2_gas_limit=" >> $GITHUB_OUTPUT
echo "bot_da_gas_limit=" >> $GITHUB_OUTPUT
echo "bot_count=1" >> $GITHUB_OUTPUT
elif [ "$BRANCH_NAME" = "provernet" ]
then
echo "deploy_tag=provernet" >> $GITHUB_OUTPUT
Expand All @@ -171,14 +185,20 @@ jobs:
echo "bot_follow_chain=NONE" >> $GITHUB_OUTPUT
echo "bot_flush_setup_txs=true" >> $GITHUB_OUTPUT
echo "bot_max_pending_txs=32" >> $GITHUB_OUTPUT
echo "mainnet_fork_cpu_units=8192" >> $GITHUB_OUTPUT
echo "mainnet_fork_memory_units=32768" >> $GITHUB_OUTPUT
echo "bot_skip_simulation=true" >> $GITHUB_OUTPUT
echo "bot_l2_gas_limit=1000000000" >> $GITHUB_OUTPUT
echo "bot_da_gas_limit=1000000000" >> $GITHUB_OUTPUT
echo "bot_count=1" >> $GITHUB_OUTPUT
elif [ "$BRANCH_NAME" = "alphanet" ]
then
echo "deploy_tag=alphanet" >> $GITHUB_OUTPUT
echo "branch_name=alphanet" >> $GITHUB_OUTPUT
echo "network_api_key=ALPHANET_API_KEY" >> $GITHUB_OUTPUT
echo "network_fork_admin_api_key=ALPHANET_API_KEY" >> $GITHUB_OUTPUT
echo "agents_per_prover=1" >> $GITHUB_OUTPUT
echo "bot_interval=30" >> $GITHUB_OUTPUT
echo "bot_interval=10" >> $GITHUB_OUTPUT
echo "node_tcp_range_start=40000" >> $GITHUB_OUTPUT
echo "node_udp_range_start=45000" >> $GITHUB_OUTPUT
echo "prover_node_tcp_range_start=41000" >> $GITHUB_OUTPUT
Expand All @@ -192,6 +212,12 @@ jobs:
echo "bot_follow_chain=PROVEN" >> $GITHUB_OUTPUT
echo "bot_flush_setup_txs=false" >> $GITHUB_OUTPUT
echo "bot_max_pending_txs=1" >> $GITHUB_OUTPUT
echo "mainnet_fork_cpu_units=2048" >> $GITHUB_OUTPUT
echo "mainnet_fork_memory_units=4096" >> $GITHUB_OUTPUT
echo "bot_skip_simulation=false" >> $GITHUB_OUTPUT
echo "bot_l2_gas_limit=" >> $GITHUB_OUTPUT
echo "bot_da_gas_limit=" >> $GITHUB_OUTPUT
echo "bot_count=1" >> $GITHUB_OUTPUT
else
echo "Unrecognized Branch!!"
exit 1
Expand Down Expand Up @@ -462,6 +488,12 @@ jobs:
TF_VAR_PROVER_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.prover_node_lb_priority_range_start }}
TF_VAR_SEQ_MIN_TX_PER_BLOCK: 1
TF_VAR_SEQ_MAX_TX_PER_BLOCK: ${{ needs.set-network.outputs.max_txs_per_block }}
TF_VAR_MAINNET_FORK_CPU_UNITS: ${{ needs.set-network.outputs.mainnet_fork_cpu_units }}
TF_VAR_MAINNET_FORK_MEMORY_UNITS: ${{ needs.set-network.outputs.mainnet_fork_memory_units }}
TF_VAR_BOT_SKIP_PUBLIC_SIMULATION: ${{ needs.set-network.outputs.bot_skip_simulation }}
TF_VAR_BOT_L2_GAS_LIMIT: ${{ needs.set-network.outputs.bot_l2_gas_limit }}
TF_VAR_BOT_DA_GAS_LIMIT: ${{ needs.set-network.outputs.bot_da_gas_limit }}
TF_VAR_BOT_COUNT: ${{ needs.set-network.outputs.bot_count }}
steps:
- uses: actions/checkout@v4
with:
Expand Down Expand Up @@ -679,6 +711,10 @@ jobs:
TF_VAR_BOT_FOLLOW_CHAIN: ${{ needs.set-network.outputs.bot_follow_chain }}
TF_VAR_PROVING_ENABLED: true
TF_VAR_BOT_NO_START: false
TF_VAR_BOT_SKIP_PUBLIC_SIMULATION: ${{ needs.set-network.outputs.bot_skip_simulation }}
TF_VAR_BOT_L2_GAS_LIMIT: ${{ needs.set-network.outputs.bot_l2_gas_limit }}
TF_VAR_BOT_DA_GAS_LIMIT: ${{ needs.set-network.outputs.bot_da_gas_limit }}
TF_VAR_BOT_COUNT: ${{ needs.set-network.outputs.bot_count }}
steps:
- uses: actions/checkout@v4
with:
Expand Down
4 changes: 2 additions & 2 deletions iac/mainnet-fork/terraform/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,8 @@ resource "aws_ecs_task_definition" "aztec_mainnet_fork" {
family = "${var.DEPLOY_TAG}-mainnet-fork"
requires_compatibilities = ["FARGATE"]
network_mode = "awsvpc"
cpu = "2048"
memory = "4096"
cpu = var.MAINNET_FORK_CPU_UNITS
memory = var.MAINNET_FORK_MEMORY_UNITS
execution_role_arn = data.terraform_remote_state.setup_iac.outputs.ecs_task_execution_role_arn

volume {
Expand Down
10 changes: 10 additions & 0 deletions iac/mainnet-fork/terraform/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,13 @@ variable "DEPLOY_TAG" {
variable "L1_CHAIN_ID" {
type = string
}

variable "MAINNET_FORK_CPU_UNITS" {
type = string
default = "2048"
}

variable "MAINNET_FORK_MEMORY_UNITS" {
type = string
default = "4096"
}
5 changes: 4 additions & 1 deletion yarn-project/archiver/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,10 @@
"name": "@aztec/archiver",
"version": "0.1.0",
"type": "module",
"exports": "./dest/index.js",
"exports": {
".": "./dest/index.js",
"./data-retrieval": "./dest/archiver/data_retrieval.js"
},
"typedocOptions": {
"entryPoints": [
"./src/index.ts"
Expand Down
24 changes: 23 additions & 1 deletion yarn-project/archiver/src/archiver/data_retrieval.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import { type Body, type InboxLeaf } from '@aztec/circuit-types';
import { type AppendOnlyTreeSnapshot, Fr, type Header } from '@aztec/circuits.js';
import { type AppendOnlyTreeSnapshot, Fr, type Header, type Proof } from '@aztec/circuits.js';
import { type EthAddress } from '@aztec/foundation/eth-address';
import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log';
import { RollupAbi } from '@aztec/l1-artifacts';

import { type Hex, type PublicClient, getAbiItem } from 'viem';

import {
getBlockProofFromSubmitProofTx,
getL2BlockProposedLogs,
getMessageSentLogs,
getTxsPublishedLogs,
Expand Down Expand Up @@ -163,3 +164,24 @@ export async function retrieveL2ProofVerifiedEvents(
txHash: log.transactionHash,
}));
}

/** Retrieve submitted proofs from the rollup contract */
export async function retrieveL2ProofsFromRollup(
publicClient: PublicClient,
rollupAddress: EthAddress,
searchStartBlock: bigint,
searchEndBlock?: bigint,
): Promise<DataRetrieval<{ proof: Proof; proverId: Fr; l2BlockNumber: bigint; txHash: `0x${string}` }>> {
const logs = await retrieveL2ProofVerifiedEvents(publicClient, rollupAddress, searchStartBlock, searchEndBlock);
const retrievedData: { proof: Proof; proverId: Fr; l2BlockNumber: bigint; txHash: `0x${string}` }[] = [];
const lastProcessedL1BlockNumber = logs.length > 0 ? logs.at(-1)!.l1BlockNumber : searchStartBlock - 1n;

for (const { txHash, proverId, l2BlockNumber } of logs) {
const proofData = await getBlockProofFromSubmitProofTx(publicClient, txHash, l2BlockNumber, proverId);
retrievedData.push({ proof: proofData.proof, proverId: proofData.proverId, l2BlockNumber, txHash });
}
return {
retrievedData,
lastProcessedL1BlockNumber,
};
}
56 changes: 55 additions & 1 deletion yarn-project/archiver/src/archiver/eth_log_handlers.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Body, InboxLeaf } from '@aztec/circuit-types';
import { AppendOnlyTreeSnapshot, Header } from '@aztec/circuits.js';
import { AppendOnlyTreeSnapshot, Header, Proof } from '@aztec/circuits.js';
import { type EthAddress } from '@aztec/foundation/eth-address';
import { Fr } from '@aztec/foundation/fields';
import { numToUInt32BE } from '@aztec/foundation/serialize';
Expand Down Expand Up @@ -257,3 +257,57 @@ export function getMessageSentLogs(
toBlock: toBlock + 1n, // the toBlock argument in getLogs is exclusive
});
}

export type SubmitBlockProof = {
header: Header;
archiveRoot: Fr;
proverId: Fr;
aggregationObject: Buffer;
proof: Proof;
};

/**
* Gets block metadata (header and archive snapshot) from the calldata of an L1 transaction.
* Assumes that the block was published from an EOA.
* TODO: Add retries and error management.
* @param publicClient - The viem public client to use for transaction retrieval.
* @param txHash - Hash of the tx that published it.
* @param l2BlockNum - L2 block number.
* @returns L2 block metadata (header and archive) from the calldata, deserialized
*/
export async function getBlockProofFromSubmitProofTx(
publicClient: PublicClient,
txHash: `0x${string}`,
l2BlockNum: bigint,
expectedProverId: Fr,
): Promise<SubmitBlockProof> {
const { input: data } = await publicClient.getTransaction({ hash: txHash });
const { functionName, args } = decodeFunctionData({
abi: RollupAbi,
data,
});

if (!(functionName === 'submitBlockRootProof')) {
throw new Error(`Unexpected method called ${functionName}`);
}
const [headerHex, archiveHex, proverIdHex, aggregationObjectHex, proofHex] = args!;

const header = Header.fromBuffer(Buffer.from(hexToBytes(headerHex)));
const proverId = Fr.fromString(proverIdHex);

const blockNumberFromHeader = header.globalVariables.blockNumber.toBigInt();
if (blockNumberFromHeader !== l2BlockNum) {
throw new Error(`Block number mismatch: expected ${l2BlockNum} but got ${blockNumberFromHeader}`);
}
if (!proverId.equals(expectedProverId)) {
throw new Error(`Prover ID mismatch: expected ${expectedProverId} but got ${proverId}`);
}

return {
header,
proverId,
aggregationObject: Buffer.from(hexToBytes(aggregationObjectHex)),
archiveRoot: Fr.fromString(archiveHex),
proof: Proof.fromBuffer(Buffer.from(hexToBytes(proofHex))),
};
}
1 change: 1 addition & 0 deletions yarn-project/aztec/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
"@aztec/noir-protocol-circuits-types": "workspace:^",
"@aztec/p2p": "workspace:^",
"@aztec/p2p-bootstrap": "workspace:^",
"@aztec/proof-verifier": "workspace:^",
"@aztec/protocol-contracts": "workspace:^",
"@aztec/prover-client": "workspace:^",
"@aztec/prover-node": "workspace:^",
Expand Down
10 changes: 10 additions & 0 deletions yarn-project/aztec/src/cli/aztec_start_options.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import {
isBooleanConfigValue,
} from '@aztec/foundation/config';
import { bootnodeConfigMappings, p2pConfigMappings } from '@aztec/p2p';
import { proofVerifierConfigMappings } from '@aztec/proof-verifier';
import { proverClientConfigMappings } from '@aztec/prover-client';
import { proverNodeConfigMappings } from '@aztec/prover-node';
import { allPxeConfigMappings } from '@aztec/pxe';
Expand Down Expand Up @@ -303,6 +304,15 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = {
},
...getOptions('bot', botConfigMappings),
],
'PROOF VERIFIER': [
{
flag: '--proof-verifier',
description: 'Starts Aztec Proof Verifier with options',
defaultValue: undefined,
envVar: undefined,
},
...getOptions('proofVerifier', proofVerifierConfigMappings),
],
TXE: [
{
flag: '--txe',
Expand Down
5 changes: 4 additions & 1 deletion yarn-project/aztec/src/cli/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge
if (options.node) {
const { startNode } = await import('./cmds/start_node.js');
services = await startNode(options, signalHandlers, userLog);
} else if (options.proofVerifier) {
const { startProofVerifier } = await import('./cmds/start_proof_verifier.js');
services = await startProofVerifier(options, signalHandlers, userLog);
} else if (options.bot) {
const { startBot } = await import('./cmds/start_bot.js');
services = await startBot(options, signalHandlers, userLog);
Expand All @@ -101,7 +104,7 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge
userLog(`Cannot run a standalone sequencer without a node`);
process.exit(1);
} else {
userLog(`No module specified to start ${JSON.stringify(options, null, 2)}`);
userLog(`No module specified to start`);
process.exit(1);
}
}
Expand Down
26 changes: 26 additions & 0 deletions yarn-project/aztec/src/cli/cmds/start_proof_verifier.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import { type ServerList } from '@aztec/foundation/json-rpc/server';
import { type LogFn } from '@aztec/foundation/log';
import { ProofVerifier, proofVerifierConfigMappings } from '@aztec/proof-verifier';
import { createAndStartTelemetryClient, telemetryClientConfigMappings } from '@aztec/telemetry-client/start';

import { extractRelevantOptions } from '../util.js';

export async function startProofVerifier(
options: any,
signalHandlers: (() => Promise<void>)[],
userLog: LogFn,
): Promise<ServerList> {
const services: ServerList = [];

const config = extractRelevantOptions(options, proofVerifierConfigMappings, 'proofVerifier');

const telemetryConfig = extractRelevantOptions(options, telemetryClientConfigMappings, 'tel');
const telemetry = await createAndStartTelemetryClient(telemetryConfig);
const proofVerifier = await ProofVerifier.new(config, telemetry);

userLog('Starting proof verifier');
proofVerifier.start();

signalHandlers.push(() => proofVerifier.stop());
return services;
}
3 changes: 3 additions & 0 deletions yarn-project/aztec/terraform/bot/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,9 @@ resource "aws_ecs_task_definition" "aztec-bot" {
{ name = "NETWORK", value = var.DEPLOY_TAG },
{ name = "BOT_FLUSH_SETUP_TRANSACTIONS", value = tostring(var.BOT_FLUSH_SETUP_TRANSACTIONS) },
{ name = "BOT_MAX_PENDING_TXS", value = tostring(var.BOT_MAX_PENDING_TXS) },
{ name = "BOT_SKIP_PUBLIC_SIMULATION", value = tostring(var.BOT_SKIP_PUBLIC_SIMULATION) },
{ name = "BOT_L2_GAS_LIMIT", value = var.BOT_L2_GAS_LIMIT },
{ name = "BOT_DA_GAS_LIMIT", value = var.BOT_DA_GAS_LIMIT },
{ name = "LOG_JSON", value = "1" }
]
logConfiguration = {
Expand Down
15 changes: 15 additions & 0 deletions yarn-project/aztec/terraform/bot/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -66,3 +66,18 @@ variable "BOT_MAX_PENDING_TXS" {
type = number
default = 1
}

variable "BOT_SKIP_PUBLIC_SIMULATION" {
type = bool
default = false
}

variable "BOT_L2_GAS_LIMIT" {
type = string
}

variable "BOT_DA_GAS_LIMIT" {
type = string
}


Loading

0 comments on commit e1dc987

Please sign in to comment.