From 4399a4203f1eccdc1ade0dc615558d7a24d5900c Mon Sep 17 00:00:00 2001 From: ethan Date: Sat, 20 Jul 2024 13:38:49 +0100 Subject: [PATCH 1/7] add flush to arbitrary file --- src/backend.rs | 14 ++++++++------ src/cache.rs | 29 +++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 6 deletions(-) diff --git a/src/backend.rs b/src/backend.rs index 958020c..b46a440 100644 --- a/src/backend.rs +++ b/src/backend.rs @@ -21,14 +21,10 @@ use revm::{ }; use rustc_hash::FxHashMap; use std::{ - collections::{hash_map::Entry, HashMap, VecDeque}, - future::IntoFuture, - marker::PhantomData, - pin::Pin, - sync::{ + collections::{hash_map::Entry, HashMap, VecDeque}, future::IntoFuture, marker::PhantomData, path::PathBuf, pin::Pin, sync::{ mpsc::{channel as oneshot_channel, Sender as OneshotSender}, Arc, - }, + } }; /// Logged when an error is indicative that the user is trying to fork from a non-archive node. @@ -656,6 +652,11 @@ impl SharedBackend { pub fn flush_cache(&self) { self.cache.0.flush(); } + + + pub fn flush_cache_to(&self, cache_path: Option) { + self.cache.0.flush_to(cache_path); + } } impl DatabaseRef for SharedBackend { @@ -767,4 +768,5 @@ mod tests { let json = JsonBlockCacheDB::load(cache_path).unwrap(); assert!(!json.db().accounts.read().is_empty()); } + } diff --git a/src/cache.rs b/src/cache.rs index b4f738b..f5ae630 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -403,6 +403,35 @@ impl JsonBlockCacheDB { trace!(target: "cache", "saved json cache"); } + + #[instrument(level = "warn", skip_all, fields(path = ?self.cache_path))] + pub fn flush_to(&self, cache_path: Option) { + let Some(path) = cache_path else { + trace!(target: "cache", "no cache path provided, skipping flush"); + return; + }; + + trace!(target: "cache", "saving json cache"); + + if let Some(parent) = path.parent() { + let _ = fs::create_dir_all(parent); + } + + let file = match fs::File::create(path) { + Ok(file) => file, + Err(e) => return warn!(target: "cache", %e, "Failed to open json cache for writing"), + }; + + let mut writer = BufWriter::new(file); + if let Err(e) = serde_json::to_writer(&mut writer, &self.data) { + return warn!(target: "cache", %e, "Failed to write to json cache"); + } + if let Err(e) = writer.flush() { + return warn!(target: "cache", %e, "Failed to flush to json cache"); + } + + trace!(target: "cache", "saved json cache"); + } } /// The Data the [JsonBlockCacheDB] can read and flush From d4a8074ade6c6c47ae3a3403aca072761eca20a9 Mon Sep 17 00:00:00 2001 From: ethan Date: Sat, 20 Jul 2024 13:44:52 +0100 Subject: [PATCH 2/7] add documentation --- src/cache.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/cache.rs b/src/cache.rs index f5ae630..8dcc4d0 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -404,6 +404,7 @@ impl JsonBlockCacheDB { trace!(target: "cache", "saved json cache"); } + /// Flushes the DB to disk #[instrument(level = "warn", skip_all, fields(path = ?self.cache_path))] pub fn flush_to(&self, cache_path: Option) { let Some(path) = cache_path else { From 44ba87849d65e7913f0340c29ec8eaa7d8878891 Mon Sep 17 00:00:00 2001 From: ethan Date: Sat, 20 Jul 2024 19:34:26 +0100 Subject: [PATCH 3/7] formating --- src/backend.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/backend.rs b/src/backend.rs index b46a440..a7d43b6 100644 --- a/src/backend.rs +++ b/src/backend.rs @@ -21,10 +21,15 @@ use revm::{ }; use rustc_hash::FxHashMap; use std::{ - collections::{hash_map::Entry, HashMap, VecDeque}, future::IntoFuture, marker::PhantomData, path::PathBuf, pin::Pin, sync::{ + collections::{hash_map::Entry, HashMap, VecDeque}, + future::IntoFuture, + marker::PhantomData, + path::PathBuf, + pin::Pin, + sync::{ mpsc::{channel as oneshot_channel, Sender as OneshotSender}, Arc, - } + }, }; /// Logged when an error is indicative that the user is trying to fork from a non-archive node. @@ -653,7 +658,6 @@ impl SharedBackend { self.cache.0.flush(); } - pub fn flush_cache_to(&self, cache_path: Option) { self.cache.0.flush_to(cache_path); } @@ -768,5 +772,4 @@ mod tests { let json = JsonBlockCacheDB::load(cache_path).unwrap(); assert!(!json.db().accounts.read().is_empty()); } - } From 0bccc4804fe1208867e6244a6afb29eb1dfa59d0 Mon Sep 17 00:00:00 2001 From: ethan Date: Mon, 22 Jul 2024 15:07:18 +0100 Subject: [PATCH 4/7] refactoring --- src/backend.rs | 2 +- src/cache.rs | 28 +++------------------------- 2 files changed, 4 insertions(+), 26 deletions(-) diff --git a/src/backend.rs b/src/backend.rs index a7d43b6..1754c35 100644 --- a/src/backend.rs +++ b/src/backend.rs @@ -658,7 +658,7 @@ impl SharedBackend { self.cache.0.flush(); } - pub fn flush_cache_to(&self, cache_path: Option) { + pub fn flush_cache_to(&self, cache_path: PathBuf) { self.cache.0.flush_to(cache_path); } } diff --git a/src/cache.rs b/src/cache.rs index 8dcc4d0..37c38ad 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -382,35 +382,13 @@ impl JsonBlockCacheDB { #[instrument(level = "warn", skip_all, fields(path = ?self.cache_path))] pub fn flush(&self) { let Some(path) = &self.cache_path else { return }; - trace!(target: "cache", "saving json cache"); - - if let Some(parent) = path.parent() { - let _ = fs::create_dir_all(parent); - } - - let file = match fs::File::create(path) { - Ok(file) => file, - Err(e) => return warn!(target: "cache", %e, "Failed to open json cache for writing"), - }; - - let mut writer = BufWriter::new(file); - if let Err(e) = serde_json::to_writer(&mut writer, &self.data) { - return warn!(target: "cache", %e, "Failed to write to json cache"); - } - if let Err(e) = writer.flush() { - return warn!(target: "cache", %e, "Failed to flush to json cache"); - } - - trace!(target: "cache", "saved json cache"); + self.flush_to(path.clone()); } /// Flushes the DB to disk #[instrument(level = "warn", skip_all, fields(path = ?self.cache_path))] - pub fn flush_to(&self, cache_path: Option) { - let Some(path) = cache_path else { - trace!(target: "cache", "no cache path provided, skipping flush"); - return; - }; + pub fn flush_to(&self, cache_path: PathBuf) { + let path: PathBuf = cache_path; trace!(target: "cache", "saving json cache"); From fa6d8fa26fcad358b99a3af4600e34a1bedb7a07 Mon Sep 17 00:00:00 2001 From: ethan Date: Mon, 22 Jul 2024 15:15:11 +0100 Subject: [PATCH 5/7] docs --- src/backend.rs | 1 + src/cache.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend.rs b/src/backend.rs index 1754c35..851a01f 100644 --- a/src/backend.rs +++ b/src/backend.rs @@ -658,6 +658,7 @@ impl SharedBackend { self.cache.0.flush(); } + /// Flushes the DB to a specific file pub fn flush_cache_to(&self, cache_path: PathBuf) { self.cache.0.flush_to(cache_path); } diff --git a/src/cache.rs b/src/cache.rs index 37c38ad..70d0cda 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -385,7 +385,7 @@ impl JsonBlockCacheDB { self.flush_to(path.clone()); } - /// Flushes the DB to disk + /// Flushes the DB to a specific file #[instrument(level = "warn", skip_all, fields(path = ?self.cache_path))] pub fn flush_to(&self, cache_path: PathBuf) { let path: PathBuf = cache_path; From d1d7aadcf669f00f858a193da8d4dc9c97159b13 Mon Sep 17 00:00:00 2001 From: ethan Date: Mon, 22 Jul 2024 15:17:21 +0100 Subject: [PATCH 6/7] eliminate reduntant code --- src/cache.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/cache.rs b/src/cache.rs index 70d0cda..d2c4efd 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -386,7 +386,6 @@ impl JsonBlockCacheDB { } /// Flushes the DB to a specific file - #[instrument(level = "warn", skip_all, fields(path = ?self.cache_path))] pub fn flush_to(&self, cache_path: PathBuf) { let path: PathBuf = cache_path; From c43ea62611e14feeadab79a4656f346afd58ac14 Mon Sep 17 00:00:00 2001 From: ethan Date: Tue, 23 Jul 2024 15:22:01 +0100 Subject: [PATCH 7/7] change to &Path --- src/backend.rs | 4 ++-- src/cache.rs | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/backend.rs b/src/backend.rs index 851a01f..f98e254 100644 --- a/src/backend.rs +++ b/src/backend.rs @@ -24,7 +24,7 @@ use std::{ collections::{hash_map::Entry, HashMap, VecDeque}, future::IntoFuture, marker::PhantomData, - path::PathBuf, + path::Path, pin::Pin, sync::{ mpsc::{channel as oneshot_channel, Sender as OneshotSender}, @@ -659,7 +659,7 @@ impl SharedBackend { } /// Flushes the DB to a specific file - pub fn flush_cache_to(&self, cache_path: PathBuf) { + pub fn flush_cache_to(&self, cache_path: &Path) { self.cache.0.flush_to(cache_path); } } diff --git a/src/cache.rs b/src/cache.rs index d2c4efd..ad1217e 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -10,7 +10,7 @@ use std::{ collections::{BTreeSet, HashMap}, fs, io::{BufWriter, Write}, - path::PathBuf, + path::{Path, PathBuf}, sync::Arc, }; use url::Url; @@ -382,12 +382,12 @@ impl JsonBlockCacheDB { #[instrument(level = "warn", skip_all, fields(path = ?self.cache_path))] pub fn flush(&self) { let Some(path) = &self.cache_path else { return }; - self.flush_to(path.clone()); + self.flush_to(path.as_path()); } /// Flushes the DB to a specific file - pub fn flush_to(&self, cache_path: PathBuf) { - let path: PathBuf = cache_path; + pub fn flush_to(&self, cache_path: &Path) { + let path: &Path = cache_path; trace!(target: "cache", "saving json cache");