Skip to content

Commit

Permalink
rust change
Browse files Browse the repository at this point in the history
  • Loading branch information
msmouse committed Sep 20, 2024
1 parent ff148d3 commit df96ff6
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 19 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/workflow-run-replay-verify.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ on:

jobs:
prepare:
runs-on: "runs-on,cpu=64,family=c7,hdd=500,image=aptos-ubuntu-x64,spot=false"
runs-on: ${{ inputs.RUNS_ON }}
outputs:
jobs_ids: ${{ steps.gen-jobs.outputs.job_ids }}
steps:
Expand All @@ -108,7 +108,7 @@ jobs:

- name: Prepare for build if not cached
if: steps.cache-aptos-debugger-binary.outputs.cache-hit != 'true'
uses: aptos-labs/aptos-core/.github/actions/rust-setup@main
uses: aptos-debugger-ee4ecab92c937d27426acce2c8e9e3da88f94c53 #aptos-labs/aptos-core/.github/actions/rust-setup@main
with:
GIT_CREDENTIALS: ${{ inputs.GIT_CREDENTIALS }}

Expand Down Expand Up @@ -171,15 +171,15 @@ jobs:
strategy:
fail-fast: false
matrix:
job_id: ${{ fromJson(steps.prepare.outputs.job_ids) }}
job_id: ${{ fromJson(needs.prepare.outputs.job_ids) }}
steps:
- name: Load cached aptos-debugger binary and replay_verify.py script
uses: actions/cache/restore@v4
with:
path: |
aptos-debugger
testsuite/replay_verify.py
key: aptos-debugger-${{ inputs.GIT_SHA || github.sha }}
key: aptos-debugger-ee4ecab92c937d27426acce2c8e9e3da88f94c53 #aptos-debugger-${{ inputs.GIT_SHA || github.sha }}
fail-on-cache-miss: true

- name: Load cached backup storage metadata cache dir
Expand Down
27 changes: 12 additions & 15 deletions storage/db-tool/src/gen_replay_verify_jobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use aptos_backup_cli::{
use aptos_logger::info;
use aptos_types::transaction::Version;
use clap::Parser;
use itertools::{zip_eq, Itertools};
use itertools::Itertools;
use std::{
io::Write,
iter::{once, zip},
Expand Down Expand Up @@ -50,14 +50,14 @@ pub struct Opt {
value_delimiter = ' '
)]
ranges_to_skip: Vec<String>,
#[clap(long, help = "Output job ranges to json files, evenly distributed.")]
output_json_files: Vec<PathBuf>,
#[clap(long, help = "Maximum ranges per job.")]
max_ranges_per_job: u64,
#[clap(long, help = "Output json file containing the jobs.")]
output_json_file: PathBuf,
}

impl Opt {
pub async fn run(self) -> anyhow::Result<()> {
assert!(!self.output_json_files.is_empty());

let storage = self.storage.init_storage().await?;
let metadata_view = sync_and_load(
&self.metadata_cache_opt,
Expand Down Expand Up @@ -185,24 +185,21 @@ impl Opt {
job_ranges.len()
);

let mut outputs = vec![vec![]; self.output_json_files.len()];
let num_jobs = (job_ranges.len() as f32 / self.max_ranges_per_job as f32).ceil() as usize;
let mut jobs = vec![vec![]; num_jobs];

let mut job_idx = -1;
zip(job_ranges, (0..self.output_json_files.len()).cycle()).for_each(
zip(job_ranges, (0..num_jobs).cycle()).for_each(
|((partial, first, last, desc), output_idx)| {
job_idx += 1;
let suffix = if partial { "-partial" } else { "" };
let job = format!("{output_idx}-{job_idx}{suffix} {first} {last} {desc}");
outputs[output_idx].push(job);
jobs[output_idx].push(job);
},
);

zip_eq(self.output_json_files.iter(), outputs.into_iter()).try_for_each(
|(path, jobs)| {
info!("Writing to {:?}", path);
info!("{}", serde_json::to_string_pretty(&jobs)?);
std::fs::File::create(path)?.write_all(&serde_json::to_vec(&jobs)?)
},
)?;
info!("{}", serde_json::to_string_pretty(&jobs)?);
std::fs::File::create(self.output_json_file)?.write_all(&serde_json::to_vec(&jobs)?)?;

Ok(())
}
Expand Down

0 comments on commit df96ff6

Please sign in to comment.