Skip to content

Commit

Permalink
Add storage migration
Browse files Browse the repository at this point in the history
  • Loading branch information
popzxc committed Jan 24, 2025
1 parent b6d2bf2 commit 514900f
Show file tree
Hide file tree
Showing 9 changed files with 208 additions and 5 deletions.
30 changes: 29 additions & 1 deletion core/bin/contract-verifier/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use tokio::sync::watch;
use zksync_config::configs::PrometheusConfig;
use zksync_contract_verifier_lib::ContractVerifier;
use zksync_core_leftovers::temp_config_store::{load_database_secrets, load_general_config};
use zksync_dal::{ConnectionPool, Core};
use zksync_dal::{ConnectionPool, Core, CoreDal};
use zksync_queued_job_processor::JobProcessor;
use zksync_utils::wait_for_tasks::ManagedTasks;
use zksync_vlog::prometheus::PrometheusExporterConfig;
Expand All @@ -25,6 +25,32 @@ struct Opt {
secrets_path: Option<PathBuf>,
}

async fn perform_storage_migration(pool: &ConnectionPool<Core>) -> anyhow::Result<()> {
const BATCH_SIZE: usize = 1000;

// Make it possible to override just in case.
let batch_size = std::env::var("CONTRACT_VERIFIER_MIGRATION_BATCH_SIZE")
.ok()
.and_then(|v| v.parse().ok())
.unwrap_or(BATCH_SIZE);

let mut storage = pool.connection().await?;
let migration_performed = storage
.contract_verification_dal()
.is_verification_info_migration_performed()
.await?;
if !migration_performed {
tracing::info!("Running the storage migration for the contract verifier table");
storage
.contract_verification_dal()
.perform_verification_info_migration(batch_size)
.await?;
} else {
tracing::info!("Storage migration is not needed");
}
Ok(())
}

#[tokio::main]
async fn main() -> anyhow::Result<()> {
let opt = Opt::parse();
Expand All @@ -51,6 +77,8 @@ async fn main() -> anyhow::Result<()> {
.build()
.await?;

perform_storage_migration(&pool).await?;

let (stop_sender, stop_receiver) = watch::channel(false);
let contract_verifier = ContractVerifier::new(verifier_config.compilation_timeout(), pool)
.await
Expand Down
Empty file.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

129 changes: 129 additions & 0 deletions core/lib/dal/src/contract_verification_dal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -733,6 +733,135 @@ impl ContractVerificationDal<'_, '_> {

Ok(count_v2 >= count_v1)
}

pub async fn perform_verification_info_migration(
&mut self,
batch_size: usize,
) -> anyhow::Result<()> {
// Offset is a number of already migrated contracts.
let mut offset = sqlx::query!(
r#"
SELECT
COUNT(*)
FROM
contract_verification_info_v2
"#,
)
.instrument("perform_verification_info_migration#count")
.fetch_one(self.storage)
.await?
.count
.unwrap() as usize;

loop {
let mut transaction = self.storage.start_transaction().await?;
let contracts: Vec<(Vec<u8>, serde_json::Value)> = sqlx::query!(
r#"
SELECT
address,
verification_info
FROM
contracts_verification_info
ORDER BY
address
OFFSET $1
LIMIT $2
"#,
offset as i64,
batch_size as i64,
)
.instrument("perform_verification_info_migration#select")
.with_arg("offset", &offset)
.with_arg("batch_size", &batch_size)
.fetch_all(&mut transaction)
.await?
.into_iter()
.filter_map(|row| row.verification_info.map(|info| (row.address, info)))
.collect();

if contracts.is_empty() {
tracing::info!("No more contracts to process");
break;
}

tracing::info!(
"Processing {} contracts; offset {}",
contracts.len(),
offset
);

let mut addresses = Vec::with_capacity(batch_size);
let mut verification_infos = Vec::with_capacity(batch_size);
let mut bytecode_keccak256s = Vec::with_capacity(batch_size);
let mut bytecode_without_metadata_keccak256s = Vec::with_capacity(batch_size);

for (address, info_json) in contracts {
let verification_info =
serde_json::from_value::<VerificationInfo>(info_json.clone())
.context("Failed to deserialize verification info")?;
let bytecode_marker = if verification_info.artifacts.deployed_bytecode.is_some() {
BytecodeMarker::Evm
} else {
BytecodeMarker::EraVm
};
let identifier = ContractIdentifier::from_bytecode(
bytecode_marker,
verification_info.artifacts.deployed_bytecode(),
);

addresses.push(address);
verification_infos.push(info_json);
bytecode_keccak256s.push(identifier.bytecode_sha3.as_bytes().to_vec());
bytecode_without_metadata_keccak256s.push(
identifier
.bytecode_without_metadata_sha3
.as_ref()
.map(|h| h.hash().as_bytes().to_vec())
.unwrap_or_default(),
);
}

// Insert all the values
sqlx::query!(
r#"
INSERT INTO
contract_verification_info_v2 (
initial_contract_addr,
bytecode_keccak256,
bytecode_without_metadata_keccak256,
verification_info
)
SELECT
u.address,
u.bytecode_keccak256,
u.bytecode_without_metadata_keccak256,
u.verification_info
FROM
UNNEST($1::BYTEA [], $2::BYTEA [], $3::BYTEA [], $4::JSON []) AS u (
address,
bytecode_keccak256,
bytecode_without_metadata_keccak256,
verification_info
)
ON CONFLICT (initial_contract_addr) DO NOTHING
"#,
&addresses as _,
&bytecode_keccak256s as _,
&bytecode_without_metadata_keccak256s as _,
&verification_infos as _,
)
.instrument("perform_verification_info_migration#insert")
.with_arg("offset", &offset)
.with_arg("batch_size", &batch_size)
.execute(&mut transaction)
.await?;

offset += batch_size;
transaction.commit().await?;
}

Ok(())
}
}

#[cfg(test)]
Expand Down
2 changes: 1 addition & 1 deletion core/tests/ts-integration/hardhat.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import '@matterlabs/hardhat-zksync-vyper';

export default {
zksolc: {
version: '1.5.3',
version: '1.5.10',
compilerSource: 'binary',
settings: {
enableEraVMExtensions: true
Expand Down
2 changes: 1 addition & 1 deletion core/tests/ts-integration/scripts/compile-yul.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import { getZksolcUrl, saltFromUrl } from '@matterlabs/hardhat-zksync-solc';
import { getCompilersDir } from 'hardhat/internal/util/global-dir';
import path from 'path';

const COMPILER_VERSION = '1.5.3';
const COMPILER_VERSION = '1.5.10';
const IS_COMPILER_PRE_RELEASE = false;

async function compilerLocation(): Promise<string> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import { NodeMode } from '../../src/types';
// Regular expression to match ISO dates.
const DATE_REGEX = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{6})?/;

const ZKSOLC_VERSION = 'v1.5.3';
const ZKSOLC_VERSION = 'v1.5.10';
const SOLC_VERSION = '0.8.26';
const ZK_VM_SOLC_VERSION = 'zkVM-0.8.26-1.0.1';

Expand Down
2 changes: 1 addition & 1 deletion core/tests/ts-integration/tests/api/debug.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ describe('Debug methods', () => {
test('Should not fail for infinity recursion', async () => {
const bytecodePath = `${
testMaster.environment().pathToHome
}/core/tests/ts-integration/contracts/zkasm/artifacts/deep_stak.zkasm/zkasm/deep_stak.zkasm.zbin`;
}/core/tests/ts-integration/contracts/zkasm/artifacts/deep_stak.zkasm/deep_stak.zkasm/deep_stak.zkasm.zbin`;
const bytecode = fs.readFileSync(bytecodePath, 'utf-8');

const contractFactory = new zksync.ContractFactory([], bytecode, testMaster.mainAccount());
Expand Down

0 comments on commit 514900f

Please sign in to comment.