diff --git a/Cargo.lock b/Cargo.lock index 50f0784d9fa2..8cf76eb7c1cb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9211,6 +9211,7 @@ dependencies = [ "ethabi", "hex", "num_enum 0.7.2", + "secrecy", "serde", "serde_json", "serde_with", diff --git a/core/bin/zksync_server/src/main.rs b/core/bin/zksync_server/src/main.rs index 84898d6da067..c518684b0e38 100644 --- a/core/bin/zksync_server/src/main.rs +++ b/core/bin/zksync_server/src/main.rs @@ -11,6 +11,7 @@ use zksync_config::{ }, fri_prover_group::FriProverGroupConfig, house_keeper::HouseKeeperConfig, + secrets::DataAvailabilitySecrets, BasicWitnessInputProducerConfig, ContractsConfig, DatabaseSecrets, ExperimentalVmConfig, ExternalPriceApiClientConfig, FriProofCompressorConfig, FriProverConfig, FriProverGatewayConfig, FriWitnessGeneratorConfig, FriWitnessVectorGeneratorConfig, @@ -126,6 +127,7 @@ fn main() -> anyhow::Result<()> { consensus: config::read_consensus_secrets().context("read_consensus_secrets()")?, database: DatabaseSecrets::from_env().ok(), l1: L1Secrets::from_env().ok(), + data_availability: DataAvailabilitySecrets::from_env().ok(), }, }; diff --git a/core/bin/zksync_server/src/node_builder.rs b/core/bin/zksync_server/src/node_builder.rs index 14db83b9f25a..4600b0f9e543 100644 --- a/core/bin/zksync_server/src/node_builder.rs +++ b/core/bin/zksync_server/src/node_builder.rs @@ -4,8 +4,8 @@ use anyhow::Context; use zksync_config::{ configs::{ - da_client::DAClient, eth_sender::PubdataSendingMode, wallets::Wallets, GeneralConfig, - Secrets, + da_client::DAClientConfig, eth_sender::PubdataSendingMode, + secrets::DataAvailabilitySecrets, wallets::Wallets, GeneralConfig, Secrets, }, ContractsConfig, GenesisConfig, }; @@ -509,11 +509,14 @@ impl MainNodeBuilder { return Ok(self); }; - match da_client_config.client { - DAClient::Avail(config) => { - self.node.add_layer(AvailWiringLayer::new(config)); + let secrets = try_load_config!(self.secrets.data_availability); + + match (da_client_config, secrets) { + (DAClientConfig::Avail(config), DataAvailabilitySecrets::Avail(secret)) => { + self.node.add_layer(AvailWiringLayer::new(config, secret)); } - DAClient::ObjectStore(config) => { + + (DAClientConfig::ObjectStore(config), _) => { self.node .add_layer(ObjectStorageClientWiringLayer::new(config)); } diff --git a/core/lib/basic_types/Cargo.toml b/core/lib/basic_types/Cargo.toml index 84411405c2a4..616b959b0783 100644 --- a/core/lib/basic_types/Cargo.toml +++ b/core/lib/basic_types/Cargo.toml @@ -23,6 +23,7 @@ num_enum.workspace = true anyhow.workspace = true url = { workspace = true, features = ["serde"] } serde_with.workspace = true +secrecy.workspace = true [dev-dependencies] bincode.workspace = true diff --git a/core/lib/basic_types/src/lib.rs b/core/lib/basic_types/src/lib.rs index 6e73d9f5facd..8b6a7f949dd1 100644 --- a/core/lib/basic_types/src/lib.rs +++ b/core/lib/basic_types/src/lib.rs @@ -28,6 +28,7 @@ pub mod commitment; pub mod network; pub mod protocol_version; pub mod prover_dal; +pub mod seed_phrase; pub mod settlement; pub mod tee_types; pub mod url; diff --git a/core/lib/basic_types/src/seed_phrase.rs b/core/lib/basic_types/src/seed_phrase.rs new file mode 100644 index 000000000000..332bfd585945 --- /dev/null +++ b/core/lib/basic_types/src/seed_phrase.rs @@ -0,0 +1,20 @@ +use std::str::FromStr; + +use secrecy::{ExposeSecret, Secret}; + +#[derive(Debug, Clone)] +pub struct SeedPhrase(pub Secret); + +impl PartialEq for SeedPhrase { + fn eq(&self, other: &Self) -> bool { + self.0.expose_secret().eq(other.0.expose_secret()) + } +} + +impl FromStr for SeedPhrase { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + Ok(SeedPhrase(s.parse()?)) + } +} diff --git a/core/lib/config/src/configs/da_client/avail.rs b/core/lib/config/src/configs/da_client/avail.rs index e8d119787912..590dc5fef18a 100644 --- a/core/lib/config/src/configs/da_client/avail.rs +++ b/core/lib/config/src/configs/da_client/avail.rs @@ -1,11 +1,16 @@ use serde::Deserialize; +use zksync_basic_types::seed_phrase::SeedPhrase; #[derive(Clone, Debug, PartialEq, Deserialize)] pub struct AvailConfig { pub api_node_url: String, pub bridge_api_url: String, - pub seed: String, pub app_id: u32, pub timeout: usize, pub max_retries: usize, } + +#[derive(Clone, Debug, PartialEq)] +pub struct AvailSecrets { + pub seed_phrase: Option, +} diff --git a/core/lib/config/src/configs/da_client/mod.rs b/core/lib/config/src/configs/da_client/mod.rs index 38337438c10e..406305a77b16 100644 --- a/core/lib/config/src/configs/da_client/mod.rs +++ b/core/lib/config/src/configs/da_client/mod.rs @@ -1,5 +1,3 @@ -use serde::Deserialize; - use crate::{AvailConfig, ObjectStoreConfig}; pub mod avail; @@ -8,13 +6,7 @@ pub const AVAIL_CLIENT_CONFIG_NAME: &str = "Avail"; pub const OBJECT_STORE_CLIENT_CONFIG_NAME: &str = "ObjectStore"; #[derive(Debug, Clone, PartialEq)] -pub struct DAClientConfig { - pub client: DAClient, -} - -#[derive(Debug, Clone, PartialEq, Deserialize)] -#[serde(tag = "client")] -pub enum DAClient { +pub enum DAClientConfig { Avail(AvailConfig), ObjectStore(ObjectStoreConfig), } diff --git a/core/lib/config/src/configs/secrets.rs b/core/lib/config/src/configs/secrets.rs index 71197f5d9306..779bad370659 100644 --- a/core/lib/config/src/configs/secrets.rs +++ b/core/lib/config/src/configs/secrets.rs @@ -1,7 +1,7 @@ use anyhow::Context; use zksync_basic_types::url::SensitiveUrl; -use crate::configs::consensus::ConsensusSecrets; +use crate::configs::{consensus::ConsensusSecrets, da_client::avail::AvailSecrets}; #[derive(Debug, Clone, PartialEq)] pub struct DatabaseSecrets { @@ -15,11 +15,17 @@ pub struct L1Secrets { pub l1_rpc_url: SensitiveUrl, } +#[derive(Debug, Clone, PartialEq)] +pub enum DataAvailabilitySecrets { + Avail(AvailSecrets), +} + #[derive(Debug, Clone, PartialEq)] pub struct Secrets { pub consensus: Option, pub database: Option, pub l1: Option, + pub data_availability: Option, } impl DatabaseSecrets { diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index 5a5a54304425..dbcccfe52ca1 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -1,11 +1,13 @@ use std::num::NonZeroUsize; use rand::{distributions::Distribution, Rng}; +use secrecy::Secret; use zksync_basic_types::{ basic_fri_types::CircuitIdRoundTuple, commitment::L1BatchCommitmentMode, network::Network, protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, + seed_phrase::SeedPhrase, vm::FastVmMode, L1BatchNumber, L1ChainId, L2ChainId, }; @@ -14,7 +16,7 @@ use zksync_crypto_primitives::K256PrivateKey; use crate::{ configs::{ - self, da_client::DAClient::Avail, eth_sender::PubdataSendingMode, + self, da_client::DAClientConfig::Avail, eth_sender::PubdataSendingMode, external_price_api_client::ForcedPriceClientConfig, }, AvailConfig, @@ -863,6 +865,7 @@ impl Distribution for EncodeDist { consensus: self.sample_opt(|| self.sample(rng)), database: self.sample_opt(|| self.sample(rng)), l1: self.sample_opt(|| self.sample(rng)), + data_availability: self.sample_opt(|| self.sample(rng)), } } } @@ -934,16 +937,21 @@ impl Distribution for EncodeDist { impl Distribution for EncodeDist { fn sample(&self, rng: &mut R) -> configs::da_client::DAClientConfig { - configs::da_client::DAClientConfig { - client: Avail(AvailConfig { - api_node_url: self.sample(rng), - bridge_api_url: self.sample(rng), - seed: self.sample(rng), - app_id: self.sample(rng), - timeout: self.sample(rng), - max_retries: self.sample(rng), - }), - } + Avail(AvailConfig { + api_node_url: self.sample(rng), + bridge_api_url: self.sample(rng), + app_id: self.sample(rng), + timeout: self.sample(rng), + max_retries: self.sample(rng), + }) + } +} + +impl Distribution for EncodeDist { + fn sample(&self, rng: &mut R) -> configs::secrets::DataAvailabilitySecrets { + configs::secrets::DataAvailabilitySecrets::Avail(configs::da_client::avail::AvailSecrets { + seed_phrase: Some(SeedPhrase(Secret::new(self.sample(rng)))), + }) } } diff --git a/core/lib/env_config/src/da_client.rs b/core/lib/env_config/src/da_client.rs index f2da3b83f18a..0fc3ad216f87 100644 --- a/core/lib/env_config/src/da_client.rs +++ b/core/lib/env_config/src/da_client.rs @@ -1,5 +1,11 @@ -use zksync_config::configs::da_client::{ - DAClient, DAClientConfig, AVAIL_CLIENT_CONFIG_NAME, OBJECT_STORE_CLIENT_CONFIG_NAME, +use std::env; + +use zksync_config::configs::{ + da_client::{ + avail::AvailSecrets, DAClientConfig, AVAIL_CLIENT_CONFIG_NAME, + OBJECT_STORE_CLIENT_CONFIG_NAME, + }, + secrets::DataAvailabilitySecrets, }; use crate::{envy_load, FromEnv}; @@ -7,15 +13,33 @@ use crate::{envy_load, FromEnv}; impl FromEnv for DAClientConfig { fn from_env() -> anyhow::Result { let client_tag = std::env::var("DA_CLIENT")?; - let client = match client_tag.as_str() { - AVAIL_CLIENT_CONFIG_NAME => DAClient::Avail(envy_load("da_avail_config", "DA_")?), + let config = match client_tag.as_str() { + AVAIL_CLIENT_CONFIG_NAME => Self::Avail(envy_load("da_avail_config", "DA_")?), OBJECT_STORE_CLIENT_CONFIG_NAME => { - DAClient::ObjectStore(envy_load("da_object_store", "DA_")?) + Self::ObjectStore(envy_load("da_object_store", "DA_")?) + } + _ => anyhow::bail!("Unknown DA client name: {}", client_tag), + }; + + Ok(config) + } +} + +impl FromEnv for DataAvailabilitySecrets { + fn from_env() -> anyhow::Result { + let client_tag = std::env::var("DA_CLIENT")?; + let secrets = match client_tag.as_str() { + AVAIL_CLIENT_CONFIG_NAME => { + let seed_phrase = env::var("DA_SECRETS_SEED_PHRASE") + .ok() + .map(|s| s.parse()) + .transpose()?; + Self::Avail(AvailSecrets { seed_phrase }) } _ => anyhow::bail!("Unknown DA client name: {}", client_tag), }; - Ok(Self { client }) + Ok(secrets) } } @@ -23,10 +47,10 @@ impl FromEnv for DAClientConfig { mod tests { use zksync_config::{ configs::{ - da_client::{DAClient, DAClient::ObjectStore}, + da_client::{DAClientConfig, DAClientConfig::ObjectStore}, object_store::ObjectStoreMode::GCS, }, - AvailConfig, DAClientConfig, ObjectStoreConfig, + AvailConfig, ObjectStoreConfig, }; use super::*; @@ -35,15 +59,13 @@ mod tests { static MUTEX: EnvMutex = EnvMutex::new(); fn expected_object_store_da_client_config(url: String, max_retries: u16) -> DAClientConfig { - DAClientConfig { - client: ObjectStore(ObjectStoreConfig { - mode: GCS { - bucket_base_url: url, - }, - max_retries, - local_mirror_path: None, - }), - } + ObjectStore(ObjectStoreConfig { + mode: GCS { + bucket_base_url: url, + }, + max_retries, + local_mirror_path: None, + }) } #[test] @@ -67,21 +89,17 @@ mod tests { fn expected_avail_da_layer_config( api_node_url: &str, bridge_api_url: &str, - seed: &str, app_id: u32, timeout: usize, max_retries: usize, ) -> DAClientConfig { - DAClientConfig { - client: DAClient::Avail(AvailConfig { - api_node_url: api_node_url.to_string(), - bridge_api_url: bridge_api_url.to_string(), - seed: seed.to_string(), - app_id, - timeout, - max_retries, - }), - } + DAClientConfig::Avail(AvailConfig { + api_node_url: api_node_url.to_string(), + bridge_api_url: bridge_api_url.to_string(), + app_id, + timeout, + max_retries, + }) } #[test] @@ -91,7 +109,6 @@ mod tests { DA_CLIENT="Avail" DA_API_NODE_URL="localhost:12345" DA_BRIDGE_API_URL="localhost:54321" - DA_SEED="bottom drive obey lake curtain smoke basket hold race lonely fit walk" DA_APP_ID="1" DA_TIMEOUT="2" DA_MAX_RETRIES="3" @@ -105,11 +122,32 @@ mod tests { expected_avail_da_layer_config( "localhost:12345", "localhost:54321", - "bottom drive obey lake curtain smoke basket hold race lonely fit walk", "1".parse::().unwrap(), "2".parse::().unwrap(), "3".parse::().unwrap(), ) ); } + + #[test] + fn from_env_avail_secrets() { + let mut lock = MUTEX.lock(); + let config = r#" + DA_CLIENT="Avail" + DA_SECRETS_SEED_PHRASE="bottom drive obey lake curtain smoke basket hold race lonely fit walk" + "#; + + lock.set_env(config); + + let actual = match DataAvailabilitySecrets::from_env().unwrap() { + DataAvailabilitySecrets::Avail(avail) => avail.seed_phrase, + }; + + assert_eq!( + actual.unwrap(), + "bottom drive obey lake curtain smoke basket hold race lonely fit walk" + .parse() + .unwrap() + ); + } } diff --git a/core/lib/protobuf_config/src/da_client.rs b/core/lib/protobuf_config/src/da_client.rs index 2009d32db17c..1499e88efb4c 100644 --- a/core/lib/protobuf_config/src/da_client.rs +++ b/core/lib/protobuf_config/src/da_client.rs @@ -1,7 +1,7 @@ use anyhow::Context; use zksync_config::{ configs::{ - da_client::DAClient::{Avail, ObjectStore}, + da_client::DAClientConfig::{Avail, ObjectStore}, {self}, }, AvailConfig, @@ -24,7 +24,6 @@ impl ProtoRepr for proto::DataAvailabilityClient { bridge_api_url: required(&conf.bridge_api_url) .context("bridge_api_url")? .clone(), - seed: required(&conf.seed).context("seed")?.clone(), app_id: *required(&conf.app_id).context("app_id")?, timeout: *required(&conf.timeout).context("timeout")? as usize, max_retries: *required(&conf.max_retries).context("max_retries")? as usize, @@ -34,17 +33,16 @@ impl ProtoRepr for proto::DataAvailabilityClient { } }; - Ok(configs::DAClientConfig { client }) + Ok(client) } fn build(this: &Self::Type) -> Self { - match &this.client { + match &this { Avail(config) => Self { config: Some(proto::data_availability_client::Config::Avail( proto::AvailConfig { api_node_url: Some(config.api_node_url.clone()), bridge_api_url: Some(config.bridge_api_url.clone()), - seed: Some(config.seed.clone()), app_id: Some(config.app_id), timeout: Some(config.timeout as u64), max_retries: Some(config.max_retries as u64), diff --git a/core/lib/protobuf_config/src/proto/config/da_client.proto b/core/lib/protobuf_config/src/proto/config/da_client.proto index ef58fbcecb4f..d01bda2c8470 100644 --- a/core/lib/protobuf_config/src/proto/config/da_client.proto +++ b/core/lib/protobuf_config/src/proto/config/da_client.proto @@ -7,10 +7,10 @@ import "zksync/config/object_store.proto"; message AvailConfig { optional string api_node_url = 1; optional string bridge_api_url = 2; - optional string seed = 3; optional uint32 app_id = 4; optional uint64 timeout = 5; optional uint64 max_retries = 6; + reserved 3; reserved "seed"; } message DataAvailabilityClient { diff --git a/core/lib/protobuf_config/src/proto/config/secrets.proto b/core/lib/protobuf_config/src/proto/config/secrets.proto index b711d81d5754..17b915b3f087 100644 --- a/core/lib/protobuf_config/src/proto/config/secrets.proto +++ b/core/lib/protobuf_config/src/proto/config/secrets.proto @@ -19,9 +19,19 @@ message ConsensusSecrets { optional string attester_key = 3; // required for attester nodes; AttesterSecretKey } +message AvailSecret { + optional string seed_phrase = 1; +} + +message DataAvailabilitySecrets { + oneof da_secrets { + AvailSecret avail = 1; + } +} + message Secrets { optional DatabaseSecrets database = 1; // optional secrets for database optional L1Secrets l1 = 2; // optional secrets for l1 communication optional ConsensusSecrets consensus = 3; // optional secrets for consensus + optional DataAvailabilitySecrets da = 4; // optional secrets for data availability } - diff --git a/core/lib/protobuf_config/src/secrets.rs b/core/lib/protobuf_config/src/secrets.rs index 7d10bef88a55..587351480078 100644 --- a/core/lib/protobuf_config/src/secrets.rs +++ b/core/lib/protobuf_config/src/secrets.rs @@ -2,15 +2,22 @@ use std::str::FromStr; use anyhow::Context; use secrecy::ExposeSecret; -use zksync_basic_types::url::SensitiveUrl; +use zksync_basic_types::{seed_phrase::SeedPhrase, url::SensitiveUrl}; use zksync_config::configs::{ consensus::{AttesterSecretKey, ConsensusSecrets, NodeSecretKey, ValidatorSecretKey}, - secrets::Secrets, + da_client::avail::AvailSecrets, + secrets::{DataAvailabilitySecrets, Secrets}, DatabaseSecrets, L1Secrets, }; use zksync_protobuf::{required, ProtoRepr}; -use crate::{proto::secrets as proto, read_optional_repr}; +use crate::{ + proto::{ + secrets as proto, + secrets::{data_availability_secrets::DaSecrets, AvailSecret}, + }, + read_optional_repr, +}; impl ProtoRepr for proto::Secrets { type Type = Secrets; @@ -20,6 +27,7 @@ impl ProtoRepr for proto::Secrets { consensus: read_optional_repr(&self.consensus), database: read_optional_repr(&self.database), l1: read_optional_repr(&self.l1), + data_availability: read_optional_repr(&self.da), }) } @@ -28,6 +36,7 @@ impl ProtoRepr for proto::Secrets { database: this.database.as_ref().map(ProtoRepr::build), l1: this.l1.as_ref().map(ProtoRepr::build), consensus: this.consensus.as_ref().map(ProtoRepr::build), + da: this.data_availability.as_ref().map(ProtoRepr::build), } } } @@ -87,6 +96,53 @@ impl ProtoRepr for proto::L1Secrets { } } +impl ProtoRepr for proto::DataAvailabilitySecrets { + type Type = DataAvailabilitySecrets; + + fn read(&self) -> anyhow::Result { + let secrets = required(&self.da_secrets).context("config")?; + + let client = match secrets { + DaSecrets::Avail(avail_secret) => DataAvailabilitySecrets::Avail(AvailSecrets { + seed_phrase: Some( + SeedPhrase::from_str( + required(&avail_secret.seed_phrase).context("seed_phrase")?, + ) + .unwrap(), + ), + }), + }; + + Ok(client) + } + + fn build(this: &Self::Type) -> Self { + let secrets = match &this { + DataAvailabilitySecrets::Avail(config) => { + let seed_phrase = if config.seed_phrase.is_some() { + Some( + config + .clone() + .seed_phrase + .unwrap() + .0 + .expose_secret() + .to_string(), + ) + } else { + None + }; + + Some(DaSecrets::Avail(AvailSecret { seed_phrase })) + } + }; + + Self { + da_secrets: secrets, + } + } +} + impl ProtoRepr for proto::ConsensusSecrets { type Type = ConsensusSecrets; fn read(&self) -> anyhow::Result { diff --git a/core/node/da_clients/src/avail/client.rs b/core/node/da_clients/src/avail/client.rs index 021906d73a01..7718691bf185 100644 --- a/core/node/da_clients/src/avail/client.rs +++ b/core/node/da_clients/src/avail/client.rs @@ -2,7 +2,8 @@ use std::{fmt::Debug, sync::Arc}; use async_trait::async_trait; use jsonrpsee::ws_client::WsClientBuilder; -use zksync_config::AvailConfig; +use subxt_signer::ExposeSecret; +use zksync_config::configs::da_client::avail::{AvailConfig, AvailSecrets}; use zksync_da_client::{ types::{DAError, DispatchResponse, InclusionData}, DataAvailabilityClient, @@ -18,8 +19,11 @@ pub struct AvailClient { } impl AvailClient { - pub async fn new(config: AvailConfig) -> anyhow::Result { - let sdk_client = RawAvailClient::new(config.app_id, config.seed.clone()).await?; + pub async fn new(config: AvailConfig, secrets: AvailSecrets) -> anyhow::Result { + let seed_phrase = secrets + .seed_phrase + .ok_or_else(|| anyhow::anyhow!("seed phrase"))?; + let sdk_client = RawAvailClient::new(config.app_id, seed_phrase.0.expose_secret()).await?; Ok(Self { config, diff --git a/core/node/da_clients/src/avail/sdk.rs b/core/node/da_clients/src/avail/sdk.rs index 5e67540fcc69..002422109d05 100644 --- a/core/node/da_clients/src/avail/sdk.rs +++ b/core/node/da_clients/src/avail/sdk.rs @@ -40,7 +40,7 @@ struct BoundedVec<_0>(pub Vec<_0>); impl RawAvailClient { pub(crate) const MAX_BLOB_SIZE: usize = 512 * 1024; // 512kb - pub(crate) async fn new(app_id: u32, seed: String) -> anyhow::Result { + pub(crate) async fn new(app_id: u32, seed: &str) -> anyhow::Result { let mnemonic = Mnemonic::parse(seed)?; let keypair = Keypair::from_phrase(&mnemonic, None)?; diff --git a/core/node/node_framework/src/implementations/layers/da_clients/avail.rs b/core/node/node_framework/src/implementations/layers/da_clients/avail.rs index 7c3d82b6d25b..06f5dbb72eb3 100644 --- a/core/node/node_framework/src/implementations/layers/da_clients/avail.rs +++ b/core/node/node_framework/src/implementations/layers/da_clients/avail.rs @@ -1,4 +1,4 @@ -use zksync_config::AvailConfig; +use zksync_config::{configs::da_client::avail::AvailSecrets, AvailConfig}; use zksync_da_client::DataAvailabilityClient; use zksync_da_clients::avail::AvailClient; @@ -11,11 +11,12 @@ use crate::{ #[derive(Debug)] pub struct AvailWiringLayer { config: AvailConfig, + secrets: AvailSecrets, } impl AvailWiringLayer { - pub fn new(config: AvailConfig) -> Self { - Self { config } + pub fn new(config: AvailConfig, secrets: AvailSecrets) -> Self { + Self { config, secrets } } } @@ -36,7 +37,7 @@ impl WiringLayer for AvailWiringLayer { async fn wire(self, _input: Self::Input) -> Result { let client: Box = - Box::new(AvailClient::new(self.config).await?); + Box::new(AvailClient::new(self.config, self.secrets).await?); Ok(Self::Output { client: DAClientResource(client), diff --git a/docs/guides/external-node/00_quick_start.md b/docs/guides/external-node/00_quick_start.md index 75d8ba891512..287a4d2d47c0 100644 --- a/docs/guides/external-node/00_quick_start.md +++ b/docs/guides/external-node/00_quick_start.md @@ -34,8 +34,7 @@ cd docker-compose-examples docker compose --file testnet-external-node-docker-compose.yml down --volumes ``` -You can see the status of the node (after recovery) in -[local grafana dashboard](http://localhost:3000/dashboards). +You can see the status of the node (after recovery) in [local grafana dashboard](http://localhost:3000/dashboards). Those commands start ZKsync node locally inside docker. diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 88c0d1114fc4..3fb71bd3f500 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -7330,6 +7330,7 @@ dependencies = [ "ethabi", "hex", "num_enum 0.7.2", + "secrecy", "serde", "serde_json", "serde_with", diff --git a/prover/crates/lib/prover_dal/.sqlx/query-1297f0977132185d6bd4501f490f1cdac8b194f09926c133985479c533a651f2.json b/prover/crates/lib/prover_dal/.sqlx/query-1297f0977132185d6bd4501f490f1cdac8b194f09926c133985479c533a651f2.json new file mode 100644 index 000000000000..c99572bcc8e5 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-1297f0977132185d6bd4501f490f1cdac8b194f09926c133985479c533a651f2.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = $1\n WHERE\n l1_batch_number = $2\n AND sequence_number = $3\n AND aggregation_round = $4\n AND circuit_id = $5\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int8", + "Int4", + "Int2", + "Int2" + ] + }, + "nullable": [] + }, + "hash": "1297f0977132185d6bd4501f490f1cdac8b194f09926c133985479c533a651f2" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-1926cf432237684de2383179a6d0d001cdf5bc7ba988b742571ec90a938434e3.json b/prover/crates/lib/prover_dal/.sqlx/query-1926cf432237684de2383179a6d0d001cdf5bc7ba988b742571ec90a938434e3.json deleted file mode 100644 index 4015a22ff3fd..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-1926cf432237684de2383179a6d0d001cdf5bc7ba988b742571ec90a938434e3.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "UPDATE leaf_aggregation_witness_jobs_fri \n SET status = $1, attempts = $2\n WHERE l1_batch_number = $3\n AND circuit_id = $4", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Int2", - "Int8", - "Int2" - ] - }, - "nullable": [] - }, - "hash": "1926cf432237684de2383179a6d0d001cdf5bc7ba988b742571ec90a938434e3" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-97adb49780c9edde6a3cfda09dadbd694e1781e013247d090a280a1f894de464.json b/prover/crates/lib/prover_dal/.sqlx/query-29f7a564a8373f7e44840e8e9e7d0cd5c6b1122c35d7ffdbbba30327ca3fb5a8.json similarity index 53% rename from prover/crates/lib/prover_dal/.sqlx/query-97adb49780c9edde6a3cfda09dadbd694e1781e013247d090a280a1f894de464.json rename to prover/crates/lib/prover_dal/.sqlx/query-29f7a564a8373f7e44840e8e9e7d0cd5c6b1122c35d7ffdbbba30327ca3fb5a8.json index ce9e492a7d4a..05163dcfa2e6 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-97adb49780c9edde6a3cfda09dadbd694e1781e013247d090a280a1f894de464.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-29f7a564a8373f7e44840e8e9e7d0cd5c6b1122c35d7ffdbbba30327ca3fb5a8.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n protocol_version AS \"protocol_version!\",\n protocol_version_patch AS \"protocol_version_patch!\",\n COUNT(*) FILTER (WHERE status = 'queued') as queued,\n COUNT(*) FILTER (WHERE status = 'in_progress') as in_progress\n FROM\n prover_jobs_fri\n WHERE\n status IN ('queued', 'in_progress')\n AND protocol_version IS NOT NULL\n GROUP BY\n protocol_version,\n protocol_version_patch\n ", + "query": "\n SELECT\n protocol_version AS \"protocol_version!\",\n protocol_version_patch AS \"protocol_version_patch!\",\n COUNT(*) FILTER (\n WHERE\n status = 'queued'\n ) AS queued,\n COUNT(*) FILTER (\n WHERE\n status = 'in_progress'\n ) AS in_progress\n FROM\n prover_jobs_fri\n WHERE\n status IN ('queued', 'in_progress')\n AND protocol_version IS NOT NULL\n GROUP BY\n protocol_version,\n protocol_version_patch\n ", "describe": { "columns": [ { @@ -34,5 +34,5 @@ null ] }, - "hash": "97adb49780c9edde6a3cfda09dadbd694e1781e013247d090a280a1f894de464" + "hash": "29f7a564a8373f7e44840e8e9e7d0cd5c6b1122c35d7ffdbbba30327ca3fb5a8" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-2d1461e068e43fd16714610b383cf8c93630d529ec96e67aac078f18196f61a5.json b/prover/crates/lib/prover_dal/.sqlx/query-2d1461e068e43fd16714610b383cf8c93630d529ec96e67aac078f18196f61a5.json new file mode 100644 index 000000000000..50d121213fb9 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-2d1461e068e43fd16714610b383cf8c93630d529ec96e67aac078f18196f61a5.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = $1,\n attempts = $2\n WHERE\n l1_batch_number = $3\n AND sequence_number = $4\n AND aggregation_round = $5\n AND circuit_id = $6\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int2", + "Int8", + "Int4", + "Int2", + "Int2" + ] + }, + "nullable": [] + }, + "hash": "2d1461e068e43fd16714610b383cf8c93630d529ec96e67aac078f18196f61a5" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-75c1affbca0901edd5d0e2f12ef4d935674a5aff2f34421d753b4d1a9dea5b12.json b/prover/crates/lib/prover_dal/.sqlx/query-35a76415cb746d03da31481edc65adefab0bf3abf6853a6d36123c8adcaf813b.json similarity index 70% rename from prover/crates/lib/prover_dal/.sqlx/query-75c1affbca0901edd5d0e2f12ef4d935674a5aff2f34421d753b4d1a9dea5b12.json rename to prover/crates/lib/prover_dal/.sqlx/query-35a76415cb746d03da31481edc65adefab0bf3abf6853a6d36123c8adcaf813b.json index 14463ecbe426..bf8db798e7d4 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-75c1affbca0901edd5d0e2f12ef4d935674a5aff2f34421d753b4d1a9dea5b12.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-35a76415cb746d03da31481edc65adefab0bf3abf6853a6d36123c8adcaf813b.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW()\n WHERE\n l1_batch_number = $1\n AND attempts >= $2\n AND (status = 'in_progress' OR status = 'failed')\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", + "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW()\n WHERE\n l1_batch_number = $1\n AND attempts >= $2\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -43,5 +43,5 @@ true ] }, - "hash": "75c1affbca0901edd5d0e2f12ef4d935674a5aff2f34421d753b4d1a9dea5b12" + "hash": "35a76415cb746d03da31481edc65adefab0bf3abf6853a6d36123c8adcaf813b" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-548414f8148740c991c345e5fd46ea738d209eb07e7a6bcbdb33e25b3347a08c.json b/prover/crates/lib/prover_dal/.sqlx/query-3727d5614d2fe2a4d96f880eb72cd48c95ca5b4520dde415a2b5ff32ece47c86.json similarity index 70% rename from prover/crates/lib/prover_dal/.sqlx/query-548414f8148740c991c345e5fd46ea738d209eb07e7a6bcbdb33e25b3347a08c.json rename to prover/crates/lib/prover_dal/.sqlx/query-3727d5614d2fe2a4d96f880eb72cd48c95ca5b4520dde415a2b5ff32ece47c86.json index 8f5b046b974f..d7eb6a32b421 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-548414f8148740c991c345e5fd46ea738d209eb07e7a6bcbdb33e25b3347a08c.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-3727d5614d2fe2a4d96f880eb72cd48c95ca5b4520dde415a2b5ff32ece47c86.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE witness_inputs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW()\n WHERE\n l1_batch_number = $1\n AND attempts >= $2\n AND (status = 'in_progress' OR status = 'failed')\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", + "query": "\n UPDATE witness_inputs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW()\n WHERE\n l1_batch_number = $1\n AND attempts >= $2\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -43,5 +43,5 @@ true ] }, - "hash": "548414f8148740c991c345e5fd46ea738d209eb07e7a6bcbdb33e25b3347a08c" + "hash": "3727d5614d2fe2a4d96f880eb72cd48c95ca5b4520dde415a2b5ff32ece47c86" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-c19fc4c8e4b3a3ef4f9c0f4c22ed68c598eada8e60938a8e4b5cd32b53f5a574.json b/prover/crates/lib/prover_dal/.sqlx/query-37ad15f54f4a6f4f79c71a857f3a8d4cc59246dda91b19526e73f27a17c8e3da.json similarity index 69% rename from prover/crates/lib/prover_dal/.sqlx/query-c19fc4c8e4b3a3ef4f9c0f4c22ed68c598eada8e60938a8e4b5cd32b53f5a574.json rename to prover/crates/lib/prover_dal/.sqlx/query-37ad15f54f4a6f4f79c71a857f3a8d4cc59246dda91b19526e73f27a17c8e3da.json index 3c4c8d7a29f3..c97fe7f4042b 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-c19fc4c8e4b3a3ef4f9c0f4c22ed68c598eada8e60938a8e4b5cd32b53f5a574.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-37ad15f54f4a6f4f79c71a857f3a8d4cc59246dda91b19526e73f27a17c8e3da.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW()\n WHERE\n l1_batch_number = $1\n AND attempts >= $2\n AND (status = 'in_progress' OR status = 'failed')\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", + "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW()\n WHERE\n l1_batch_number = $1\n AND attempts >= $2\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -43,5 +43,5 @@ true ] }, - "hash": "c19fc4c8e4b3a3ef4f9c0f4c22ed68c598eada8e60938a8e4b5cd32b53f5a574" + "hash": "37ad15f54f4a6f4f79c71a857f3a8d4cc59246dda91b19526e73f27a17c8e3da" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-39f60c638d445c5dbf23e01fd89a468057599be1e6c6c96a947c33df53a68224.json b/prover/crates/lib/prover_dal/.sqlx/query-39f60c638d445c5dbf23e01fd89a468057599be1e6c6c96a947c33df53a68224.json deleted file mode 100644 index 5cec4d7d7d03..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-39f60c638d445c5dbf23e01fd89a468057599be1e6c6c96a947c33df53a68224.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n recursion_tip_witness_jobs_fri (\n l1_batch_number,\n status,\n number_of_final_node_jobs,\n created_at,\n updated_at\n )\n VALUES\n ($1, 'waiting_for_proofs',1, NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO\n UPDATE\n SET status = $2\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [] - }, - "hash": "39f60c638d445c5dbf23e01fd89a468057599be1e6c6c96a947c33df53a68224" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-3a9ffd4d88f2cfac22835aac2512e61157bf58aec70903623afc9da24d46a336.json b/prover/crates/lib/prover_dal/.sqlx/query-3a9ffd4d88f2cfac22835aac2512e61157bf58aec70903623afc9da24d46a336.json deleted file mode 100644 index 063ae8fc90a3..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-3a9ffd4d88f2cfac22835aac2512e61157bf58aec70903623afc9da24d46a336.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n node_aggregation_witness_jobs_fri (\n l1_batch_number,\n circuit_id,\n status,\n created_at,\n updated_at\n )\n VALUES\n ($1, $2, 'waiting_for_proofs', NOW(), NOW())\n ON CONFLICT (l1_batch_number, circuit_id, depth) DO\n UPDATE\n SET status = $3\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int2", - "Text" - ] - }, - "nullable": [] - }, - "hash": "3a9ffd4d88f2cfac22835aac2512e61157bf58aec70903623afc9da24d46a336" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-3bb8fbd9e83703887e0a3c196031b931c0d8dbc6835dfac20107ea7412ce9fbb.json b/prover/crates/lib/prover_dal/.sqlx/query-3bb8fbd9e83703887e0a3c196031b931c0d8dbc6835dfac20107ea7412ce9fbb.json deleted file mode 100644 index 693905084151..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-3bb8fbd9e83703887e0a3c196031b931c0d8dbc6835dfac20107ea7412ce9fbb.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n proof_compression_jobs_fri (\n l1_batch_number,\n status,\n created_at,\n updated_at\n )\n VALUES\n ($1, $2, NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO\n UPDATE\n SET status = $2\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [] - }, - "hash": "3bb8fbd9e83703887e0a3c196031b931c0d8dbc6835dfac20107ea7412ce9fbb" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-434f7cb51a7d22948cd26e962679a67936d572f8046d3a1c7a4f100ff209d81d.json b/prover/crates/lib/prover_dal/.sqlx/query-434f7cb51a7d22948cd26e962679a67936d572f8046d3a1c7a4f100ff209d81d.json deleted file mode 100644 index 7615523f92f1..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-434f7cb51a7d22948cd26e962679a67936d572f8046d3a1c7a4f100ff209d81d.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "UPDATE prover_jobs_fri SET status = $1\n WHERE l1_batch_number = $2\n AND sequence_number = $3\n AND aggregation_round = $4\n AND circuit_id = $5", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Int8", - "Int4", - "Int2", - "Int2" - ] - }, - "nullable": [] - }, - "hash": "434f7cb51a7d22948cd26e962679a67936d572f8046d3a1c7a4f100ff209d81d" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-73266a8526c6adc315900e2e95441976a264759c4060c1a38e466ee2052fc17d.json b/prover/crates/lib/prover_dal/.sqlx/query-73266a8526c6adc315900e2e95441976a264759c4060c1a38e466ee2052fc17d.json new file mode 100644 index 000000000000..f8b141a8dac9 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-73266a8526c6adc315900e2e95441976a264759c4060c1a38e466ee2052fc17d.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n recursion_tip_witness_jobs_fri (l1_batch_number, status, number_of_final_node_jobs, created_at, updated_at)\n VALUES\n ($1, 'waiting_for_proofs', 1, NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO\n UPDATE\n SET\n status = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "73266a8526c6adc315900e2e95441976a264759c4060c1a38e466ee2052fc17d" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-9730c8225ff2cf3111185e81f602a4a98ec63eb942c73ce4448d0957346047cd.json b/prover/crates/lib/prover_dal/.sqlx/query-9730c8225ff2cf3111185e81f602a4a98ec63eb942c73ce4448d0957346047cd.json new file mode 100644 index 000000000000..d23ed8d9fc8a --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-9730c8225ff2cf3111185e81f602a4a98ec63eb942c73ce4448d0957346047cd.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE leaf_aggregation_witness_jobs_fri\n SET\n status = $1,\n attempts = $2\n WHERE\n l1_batch_number = $3\n AND circuit_id = $4\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int2", + "Int8", + "Int2" + ] + }, + "nullable": [] + }, + "hash": "9730c8225ff2cf3111185e81f602a4a98ec63eb942c73ce4448d0957346047cd" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-a817f0fec85388b3e2510ce259208a01b63ae4aa03c983c3a52c802d585e5a80.json b/prover/crates/lib/prover_dal/.sqlx/query-a817f0fec85388b3e2510ce259208a01b63ae4aa03c983c3a52c802d585e5a80.json new file mode 100644 index 000000000000..93532150f7f8 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-a817f0fec85388b3e2510ce259208a01b63ae4aa03c983c3a52c802d585e5a80.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n proof_compression_jobs_fri (l1_batch_number, status, created_at, updated_at)\n VALUES\n ($1, $2, NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO\n UPDATE\n SET\n status = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "a817f0fec85388b3e2510ce259208a01b63ae4aa03c983c3a52c802d585e5a80" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-aabcfa9005b8e1d84cfa083a47a700302981be0afef31a8864613484f8521f9e.json b/prover/crates/lib/prover_dal/.sqlx/query-aabcfa9005b8e1d84cfa083a47a700302981be0afef31a8864613484f8521f9e.json deleted file mode 100644 index 3d60050c92ed..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-aabcfa9005b8e1d84cfa083a47a700302981be0afef31a8864613484f8521f9e.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "UPDATE prover_jobs_fri \n SET status = $1, attempts = $2\n WHERE l1_batch_number = $3\n AND sequence_number =$4\n AND aggregation_round = $5\n AND circuit_id = $6", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Int2", - "Int8", - "Int4", - "Int2", - "Int2" - ] - }, - "nullable": [] - }, - "hash": "aabcfa9005b8e1d84cfa083a47a700302981be0afef31a8864613484f8521f9e" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-c8daa62b3835c15fafb3f83deabb5a4672ad50a9de92c84d939ac4c69842e355.json b/prover/crates/lib/prover_dal/.sqlx/query-c8daa62b3835c15fafb3f83deabb5a4672ad50a9de92c84d939ac4c69842e355.json new file mode 100644 index 000000000000..cadc931fa1ca --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-c8daa62b3835c15fafb3f83deabb5a4672ad50a9de92c84d939ac4c69842e355.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n node_aggregation_witness_jobs_fri (l1_batch_number, circuit_id, status, created_at, updated_at)\n VALUES\n ($1, $2, 'waiting_for_proofs', NOW(), NOW())\n ON CONFLICT (l1_batch_number, circuit_id, depth) DO\n UPDATE\n SET\n status = $3\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int2", + "Text" + ] + }, + "nullable": [] + }, + "hash": "c8daa62b3835c15fafb3f83deabb5a4672ad50a9de92c84d939ac4c69842e355" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-63cf7038e6c48af8ed9afc7d6ea07edd87cb16a79c13e7d4291d99736e51d3b9.json b/prover/crates/lib/prover_dal/.sqlx/query-e875dcbbdaed6998dbea45d4eab5d005d8760c4809b7aef902155196873da66e.json similarity index 82% rename from prover/crates/lib/prover_dal/.sqlx/query-63cf7038e6c48af8ed9afc7d6ea07edd87cb16a79c13e7d4291d99736e51d3b9.json rename to prover/crates/lib/prover_dal/.sqlx/query-e875dcbbdaed6998dbea45d4eab5d005d8760c4809b7aef902155196873da66e.json index 208b23d939f8..4ee9278fe42a 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-63cf7038e6c48af8ed9afc7d6ea07edd87cb16a79c13e7d4291d99736e51d3b9.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-e875dcbbdaed6998dbea45d4eab5d005d8760c4809b7aef902155196873da66e.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n scheduler_witness_jobs_fri (\n l1_batch_number,\n scheduler_partial_input_blob_url,\n status,\n created_at,\n updated_at\n )\n VALUES\n ($1, '', 'waiting_for_proofs', NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO\n UPDATE\n SET status = $2\n ", + "query": "\n INSERT INTO\n scheduler_witness_jobs_fri (\n l1_batch_number,\n scheduler_partial_input_blob_url,\n status,\n created_at,\n updated_at\n )\n VALUES\n ($1, '', 'waiting_for_proofs', NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO\n UPDATE\n SET\n status = $2\n ", "describe": { "columns": [], "parameters": { @@ -11,5 +11,5 @@ }, "nullable": [] }, - "hash": "63cf7038e6c48af8ed9afc7d6ea07edd87cb16a79c13e7d4291d99736e51d3b9" + "hash": "e875dcbbdaed6998dbea45d4eab5d005d8760c4809b7aef902155196873da66e" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-0eac6f7b2d799059328584029b437891598dc79b5ed11258b2c90c3f282929ad.json b/prover/crates/lib/prover_dal/.sqlx/query-eec29cbff034818f4fb5ec1e6ad38e1010d7389457b3c97e9b238a3a0291a54e.json similarity index 78% rename from prover/crates/lib/prover_dal/.sqlx/query-0eac6f7b2d799059328584029b437891598dc79b5ed11258b2c90c3f282929ad.json rename to prover/crates/lib/prover_dal/.sqlx/query-eec29cbff034818f4fb5ec1e6ad38e1010d7389457b3c97e9b238a3a0291a54e.json index 61518273b4d3..f8e92b1ad666 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-0eac6f7b2d799059328584029b437891598dc79b5ed11258b2c90c3f282929ad.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-eec29cbff034818f4fb5ec1e6ad38e1010d7389457b3c97e9b238a3a0291a54e.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n leaf_aggregation_witness_jobs_fri (\n l1_batch_number,\n circuit_id,\n status,\n number_of_basic_circuits,\n created_at,\n updated_at\n )\n VALUES\n ($1, $2, 'waiting_for_proofs', 2, NOW(), NOW())\n ON CONFLICT (l1_batch_number, circuit_id) DO\n UPDATE\n SET status = $3\n ", + "query": "\n INSERT INTO\n leaf_aggregation_witness_jobs_fri (\n l1_batch_number,\n circuit_id,\n status,\n number_of_basic_circuits,\n created_at,\n updated_at\n )\n VALUES\n ($1, $2, 'waiting_for_proofs', 2, NOW(), NOW())\n ON CONFLICT (l1_batch_number, circuit_id) DO\n UPDATE\n SET\n status = $3\n ", "describe": { "columns": [], "parameters": { @@ -12,5 +12,5 @@ }, "nullable": [] }, - "hash": "0eac6f7b2d799059328584029b437891598dc79b5ed11258b2c90c3f282929ad" + "hash": "eec29cbff034818f4fb5ec1e6ad38e1010d7389457b3c97e9b238a3a0291a54e" } diff --git a/prover/crates/lib/prover_dal/src/cli_test_dal.rs b/prover/crates/lib/prover_dal/src/cli_test_dal.rs index 19fe0e4f57b0..d08418203378 100644 --- a/prover/crates/lib/prover_dal/src/cli_test_dal.rs +++ b/prover/crates/lib/prover_dal/src/cli_test_dal.rs @@ -21,11 +21,16 @@ impl CliTestDal<'_, '_> { sequence_number: usize, ) { sqlx::query!( - "UPDATE prover_jobs_fri SET status = $1 - WHERE l1_batch_number = $2 + r#" + UPDATE prover_jobs_fri + SET + status = $1 + WHERE + l1_batch_number = $2 AND sequence_number = $3 AND aggregation_round = $4 - AND circuit_id = $5", + AND circuit_id = $5 + "#, status.to_string(), batch_number.0 as i64, sequence_number as i64, @@ -44,7 +49,7 @@ impl CliTestDal<'_, '_> { circuit_id: u8, ) { sqlx::query!( - " + r#" INSERT INTO leaf_aggregation_witness_jobs_fri ( l1_batch_number, @@ -58,8 +63,9 @@ impl CliTestDal<'_, '_> { ($1, $2, 'waiting_for_proofs', 2, NOW(), NOW()) ON CONFLICT (l1_batch_number, circuit_id) DO UPDATE - SET status = $3 - ", + SET + status = $3 + "#, batch_number.0 as i64, circuit_id as i16, status.to_string() @@ -76,21 +82,16 @@ impl CliTestDal<'_, '_> { circuit_id: u8, ) { sqlx::query!( - " + r#" INSERT INTO - node_aggregation_witness_jobs_fri ( - l1_batch_number, - circuit_id, - status, - created_at, - updated_at - ) + node_aggregation_witness_jobs_fri (l1_batch_number, circuit_id, status, created_at, updated_at) VALUES ($1, $2, 'waiting_for_proofs', NOW(), NOW()) ON CONFLICT (l1_batch_number, circuit_id, depth) DO UPDATE - SET status = $3 - ", + SET + status = $3 + "#, batch_number.0 as i64, circuit_id as i16, status.to_string(), @@ -102,21 +103,16 @@ impl CliTestDal<'_, '_> { pub async fn insert_rt_job(&mut self, status: WitnessJobStatus, batch_number: L1BatchNumber) { sqlx::query!( - " + r#" INSERT INTO - recursion_tip_witness_jobs_fri ( - l1_batch_number, - status, - number_of_final_node_jobs, - created_at, - updated_at - ) + recursion_tip_witness_jobs_fri (l1_batch_number, status, number_of_final_node_jobs, created_at, updated_at) VALUES - ($1, 'waiting_for_proofs',1, NOW(), NOW()) + ($1, 'waiting_for_proofs', 1, NOW(), NOW()) ON CONFLICT (l1_batch_number) DO UPDATE - SET status = $2 - ", + SET + status = $2 + "#, batch_number.0 as i64, status.to_string(), ) @@ -131,7 +127,7 @@ impl CliTestDal<'_, '_> { batch_number: L1BatchNumber, ) { sqlx::query!( - " + r#" INSERT INTO scheduler_witness_jobs_fri ( l1_batch_number, @@ -144,8 +140,9 @@ impl CliTestDal<'_, '_> { ($1, '', 'waiting_for_proofs', NOW(), NOW()) ON CONFLICT (l1_batch_number) DO UPDATE - SET status = $2 - ", + SET + status = $2 + "#, batch_number.0 as i64, status.to_string(), ) @@ -160,20 +157,16 @@ impl CliTestDal<'_, '_> { batch_number: L1BatchNumber, ) { sqlx::query!( - " + r#" INSERT INTO - proof_compression_jobs_fri ( - l1_batch_number, - status, - created_at, - updated_at - ) + proof_compression_jobs_fri (l1_batch_number, status, created_at, updated_at) VALUES ($1, $2, NOW(), NOW()) ON CONFLICT (l1_batch_number) DO UPDATE - SET status = $2 - ", + SET + status = $2 + "#, batch_number.0 as i64, status.to_string(), ) @@ -192,12 +185,17 @@ impl CliTestDal<'_, '_> { sequence_number: usize, ) { sqlx::query!( - "UPDATE prover_jobs_fri - SET status = $1, attempts = $2 - WHERE l1_batch_number = $3 - AND sequence_number =$4 + r#" + UPDATE prover_jobs_fri + SET + status = $1, + attempts = $2 + WHERE + l1_batch_number = $3 + AND sequence_number = $4 AND aggregation_round = $5 - AND circuit_id = $6", + AND circuit_id = $6 + "#, status.to_string(), attempts as i64, batch_number.0 as i64, @@ -218,10 +216,15 @@ impl CliTestDal<'_, '_> { batch_number: L1BatchNumber, ) { sqlx::query!( - "UPDATE leaf_aggregation_witness_jobs_fri - SET status = $1, attempts = $2 - WHERE l1_batch_number = $3 - AND circuit_id = $4", + r#" + UPDATE leaf_aggregation_witness_jobs_fri + SET + status = $1, + attempts = $2 + WHERE + l1_batch_number = $3 + AND circuit_id = $4 + "#, status.to_string(), attempts as i64, batch_number.0 as i64, diff --git a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs index 1a3b8de0ce4b..71d0c11728b1 100644 --- a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs @@ -528,8 +528,14 @@ impl FriProverDal<'_, '_> { SELECT protocol_version AS "protocol_version!", protocol_version_patch AS "protocol_version_patch!", - COUNT(*) FILTER (WHERE status = 'queued') as queued, - COUNT(*) FILTER (WHERE status = 'in_progress') as in_progress + COUNT(*) FILTER ( + WHERE + status = 'queued' + ) AS queued, + COUNT(*) FILTER ( + WHERE + status = 'in_progress' + ) AS in_progress FROM prover_jobs_fri WHERE diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs index 66e34f7f8e75..c7ba0f60ef3f 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs @@ -1719,7 +1719,10 @@ impl FriWitnessGeneratorDal<'_, '_> { WHERE l1_batch_number = $1 AND attempts >= $2 - AND (status = 'in_progress' OR status = 'failed') + AND ( + status = 'in_progress' + OR status = 'failed' + ) RETURNING l1_batch_number, status, @@ -1786,7 +1789,10 @@ impl FriWitnessGeneratorDal<'_, '_> { WHERE l1_batch_number = $1 AND attempts >= $2 - AND (status = 'in_progress' OR status = 'failed') + AND ( + status = 'in_progress' + OR status = 'failed' + ) RETURNING l1_batch_number, status, @@ -1827,7 +1833,10 @@ impl FriWitnessGeneratorDal<'_, '_> { WHERE l1_batch_number = $1 AND attempts >= $2 - AND (status = 'in_progress' OR status = 'failed') + AND ( + status = 'in_progress' + OR status = 'failed' + ) RETURNING l1_batch_number, status, diff --git a/zk_toolbox/Cargo.lock b/zk_toolbox/Cargo.lock index 296037094529..6a09886ed0b2 100644 --- a/zk_toolbox/Cargo.lock +++ b/zk_toolbox/Cargo.lock @@ -6487,6 +6487,7 @@ dependencies = [ "ethabi", "hex", "num_enum 0.7.2", + "secrecy", "serde", "serde_json", "serde_with", diff --git a/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs b/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs index 89e08418c6e5..38523db4dacc 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs @@ -121,6 +121,7 @@ fn prepare_configs( l1: Some(L1Secrets { l1_rpc_url: SensitiveUrl::from_str(&args.l1_rpc_url).context("l1_rpc_url")?, }), + data_availability: None, }; secrets.save_with_base_path(shell, en_configs_path)?; let dirs = recreate_rocksdb_dirs(shell, &config.rocks_db_path, RocksDBDirOption::ExternalNode)?;