From 4195662a12d965b75adeda9277cd95af028dc6c5 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Mon, 23 Sep 2024 13:14:26 +0200 Subject: [PATCH 1/8] chore: add path to io errors --- .../rattler_repodata_gateway/src/fetch/mod.rs | 31 ++++++++++--------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/crates/rattler_repodata_gateway/src/fetch/mod.rs b/crates/rattler_repodata_gateway/src/fetch/mod.rs index 6f5724687..75c88d3e0 100644 --- a/crates/rattler_repodata_gateway/src/fetch/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/mod.rs @@ -57,17 +57,17 @@ pub enum FetchRepoDataError { #[error("repodata not found")] NotFound(#[from] RepoDataNotFoundError), - #[error("failed to create temporary file for repodata.json")] - FailedToCreateTemporaryFile(#[source] std::io::Error), + #[error("failed to create temporary file for repodata.json at: '{0}'")] + FailedToCreateTemporaryFile(PathBuf, #[source] std::io::Error), #[error("failed to persist temporary repodata.json file")] FailedToPersistTemporaryFile(#[from] tempfile::PersistError), - #[error("failed to get metadata from repodata.json file")] - FailedToGetMetadata(#[source] std::io::Error), + #[error("failed to get metadata from repodata.json file at: '{0}'")] + FailedToGetMetadata(PathBuf, #[source] std::io::Error), - #[error("failed to write cache state")] - FailedToWriteCacheState(#[source] std::io::Error), + #[error("failed to write cache state to: '{0}'")] + FailedToWriteCacheState(PathBuf, #[source] std::io::Error), #[error("there is no cache available")] NoCacheAvailable, @@ -267,7 +267,7 @@ async fn repodata_from_file( new_cache_state .to_path(&cache_state_path) .map(|_| new_cache_state) - .map_err(FetchRepoDataError::FailedToWriteCacheState) + .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e)) }) .await??; @@ -432,7 +432,7 @@ pub async fn fetch_repo_data( cache_state .to_path(&cache_state_path) .map(|_| cache_state) - .map_err(FetchRepoDataError::FailedToWriteCacheState) + .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e)) }) .await??; @@ -522,7 +522,7 @@ pub async fn fetch_repo_data( cache_state .to_path(&cache_state_path) .map(|_| cache_state) - .map_err(FetchRepoDataError::FailedToWriteCacheState) + .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e)) }) .await??; @@ -562,13 +562,14 @@ pub async fn fetch_repo_data( let repo_data_destination_path = repo_data_json_path.clone(); let repo_data_json_metadata = tokio::task::spawn_blocking(move || { let file = temp_file - .persist(repo_data_destination_path) + .persist(repo_data_destination_path.clone()) .map_err(FetchRepoDataError::FailedToPersistTemporaryFile)?; // Determine the last modified date and size of the repodata.json file. We store these values in // the cache to link the cache to the corresponding repodata.json file. - file.metadata() - .map_err(FetchRepoDataError::FailedToGetMetadata) + file.metadata().map_err(|e| { + FetchRepoDataError::FailedToGetMetadata(repo_data_destination_path.clone(), e) + }) }) .await??; @@ -579,7 +580,7 @@ pub async fn fetch_repo_data( cache_headers, cache_last_modified: repo_data_json_metadata .modified() - .map_err(FetchRepoDataError::FailedToGetMetadata)?, + .map_err(|e| FetchRepoDataError::FailedToGetMetadata(repo_data_json_path.clone(), e))?, cache_size: repo_data_json_metadata.len(), blake2_hash: Some(blake2_hash), blake2_hash_nominal: Some(blake2_hash), @@ -593,7 +594,7 @@ pub async fn fetch_repo_data( new_cache_state .to_path(&cache_state_path) .map(|_| new_cache_state) - .map_err(FetchRepoDataError::FailedToWriteCacheState) + .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e)) }) .await??; @@ -648,7 +649,7 @@ async fn stream_and_decode_to_file( // Construct a temporary file let temp_file = - NamedTempFile::new_in(temp_dir).map_err(FetchRepoDataError::FailedToCreateTemporaryFile)?; + NamedTempFile::new_in(temp_dir).map_err(|e| FetchRepoDataError::FailedToCreateTemporaryFile(temp_dir.to_path_buf(), e))?; // Clone the file handle and create a hashing writer so we can compute a hash while the content // is being written to disk. From 218963f5f77911738b89350ae9dedb39d19f3ce0 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Mon, 23 Sep 2024 13:32:08 +0200 Subject: [PATCH 2/8] fmt --- crates/rattler_repodata_gateway/src/fetch/mod.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/crates/rattler_repodata_gateway/src/fetch/mod.rs b/crates/rattler_repodata_gateway/src/fetch/mod.rs index 75c88d3e0..e686dc795 100644 --- a/crates/rattler_repodata_gateway/src/fetch/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/mod.rs @@ -432,7 +432,9 @@ pub async fn fetch_repo_data( cache_state .to_path(&cache_state_path) .map(|_| cache_state) - .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e)) + .map_err(|e| { + FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e) + }) }) .await??; @@ -648,8 +650,8 @@ async fn stream_and_decode_to_file( ); // Construct a temporary file - let temp_file = - NamedTempFile::new_in(temp_dir).map_err(|e| FetchRepoDataError::FailedToCreateTemporaryFile(temp_dir.to_path_buf(), e))?; + let temp_file = NamedTempFile::new_in(temp_dir) + .map_err(|e| FetchRepoDataError::FailedToCreateTemporaryFile(temp_dir.to_path_buf(), e))?; // Clone the file handle and create a hashing writer so we can compute a hash while the content // is being written to disk. From 01f93d0eead8c7f05cf0c2b9e0179ef3048ef643 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Mon, 23 Sep 2024 14:34:51 +0200 Subject: [PATCH 3/8] feat: use fs_err instead of fs --- Cargo.toml | 2 +- crates/rattler_repodata_gateway/Cargo.toml | 1 + crates/rattler_repodata_gateway/src/fetch/cache/mod.rs | 5 +++-- crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs | 10 +++++++--- crates/rattler_repodata_gateway/src/fetch/mod.rs | 9 ++++++--- crates/rattler_repodata_gateway/src/sparse/mod.rs | 8 +++++--- 6 files changed, 23 insertions(+), 12 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index d1792b93d..7824b0543 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -61,7 +61,7 @@ digest = "0.10.7" dirs = "5.0.1" dunce = "1.0.4" enum_dispatch = "0.3.13" -fs-err = "2.11.0" +fs-err = { version = "2.11.0", features = ["tokio"] } fslock = "0.2.1" futures = "0.3.30" futures-util = "0.3.30" diff --git a/crates/rattler_repodata_gateway/Cargo.toml b/crates/rattler_repodata_gateway/Cargo.toml index e4133417c..31eb062fa 100644 --- a/crates/rattler_repodata_gateway/Cargo.toml +++ b/crates/rattler_repodata_gateway/Cargo.toml @@ -11,6 +11,7 @@ license.workspace = true readme.workspace = true [dependencies] +fs-err = { workspace = true} anyhow = { workspace = true } async-fd-lock = { workspace = true } async-compression = { workspace = true, features = ["gzip", "tokio", "bzip2", "zstd"] } diff --git a/crates/rattler_repodata_gateway/src/fetch/cache/mod.rs b/crates/rattler_repodata_gateway/src/fetch/cache/mod.rs index 6f4e448a3..78f0a21ac 100644 --- a/crates/rattler_repodata_gateway/src/fetch/cache/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/cache/mod.rs @@ -4,7 +4,8 @@ pub use cache_headers::CacheHeaders; use rattler_digest::{serde::SerializableHash, Blake2b256}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_with::serde_as; -use std::{fs, fs::File, path::Path, str::FromStr, time::SystemTime}; +use std::{path::Path, str::FromStr, time::SystemTime}; +use fs_err as fs; use url::Url; /// Representation of the `.info.json` file alongside a `repodata.json` file. @@ -72,7 +73,7 @@ impl RepoDataState { /// Save the cache state to the specified file. pub fn to_path(&self, path: &Path) -> Result<(), std::io::Error> { - let file = File::create(path)?; + let file = fs::File::create(path)?; Ok(serde_json::to_writer_pretty(file, self)?) } } diff --git a/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs b/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs index 468a088c2..f002ca575 100644 --- a/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs @@ -101,6 +101,8 @@ use std::str::FromStr; use std::sync::Arc; use tempfile::NamedTempFile; use url::Url; +use fs_err as fs; +use fs_err::tokio as tokio_fs; pub use crate::fetch::cache::{JLAPFooter, JLAPState, RepoDataState}; use crate::reporter::ResponseReporterExt; @@ -538,7 +540,7 @@ fn apply_jlap_patches( // Read the contents of the current repodata to a string let repo_data_contents = - std::fs::read_to_string(repo_data_path).map_err(JLAPError::FileSystem)?; + fs::read_to_string(repo_data_path).map_err(JLAPError::FileSystem)?; // Parse the JSON so we can manipulate it tracing::info!("parsing cached repodata.json as JSON"); @@ -637,6 +639,8 @@ mod test { use tempfile::TempDir; use url::Url; + use fs_err::tokio as tokio_fs; + const FAKE_STATE_DATA_INITIAL: &str = r#"{ "url": "https://repo.example.com/pkgs/main/osx-64/repodata.json.zst", "etag": "W/\"49aa6d9ea6f3285efe657780a7c8cd58\"", @@ -851,14 +855,14 @@ mod test { if let Some(content) = server_jlap { // Add files we need to request to the server - tokio::fs::write(subdir_path.path().join("repodata.jlap"), content) + tokio_fs::write(subdir_path.path().join("repodata.jlap"), content) .await .unwrap(); } if let Some(content) = server_repo_data { // Add files we need to request to the server - tokio::fs::write(subdir_path.path().join("repodata.json"), content) + tokio_fs::write(subdir_path.path().join("repodata.json"), content) .await .unwrap(); } diff --git a/crates/rattler_repodata_gateway/src/fetch/mod.rs b/crates/rattler_repodata_gateway/src/fetch/mod.rs index e686dc795..52fde7a35 100644 --- a/crates/rattler_repodata_gateway/src/fetch/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/mod.rs @@ -24,6 +24,9 @@ use tokio_util::io::StreamReader; use tracing::instrument; use url::Url; +// use fs-err for better error reporting +use fs_err::tokio as tokio_fs; + mod cache; pub mod jlap; @@ -231,7 +234,7 @@ async fn repodata_from_file( lock_file: LockedFile, ) -> Result { // copy file from subdir_url to out_path - if let Err(e) = tokio::fs::copy(&subdir_url.to_file_path().unwrap(), &out_path).await { + if let Err(e) = tokio_fs::copy(&subdir_url.to_file_path().unwrap(), &out_path).await { return if e.kind() == ErrorKind::NotFound { Err(FetchRepoDataError::NotFound( RepoDataNotFoundError::FileSystemError(e), @@ -244,7 +247,7 @@ async fn repodata_from_file( // create a dummy cache state let new_cache_state = RepoDataState { url: subdir_url.clone(), - cache_size: tokio::fs::metadata(&out_path) + cache_size: tokio_fs::metadata(&out_path) .await .map_err(FetchRepoDataError::IoError)? .len(), @@ -655,7 +658,7 @@ async fn stream_and_decode_to_file( // Clone the file handle and create a hashing writer so we can compute a hash while the content // is being written to disk. - let file = tokio::fs::File::from_std(temp_file.as_file().try_clone().unwrap()); + let file = tokio_fs::File::from_std(fs_err::File::from_parts(temp_file.as_file().try_clone().unwrap(), temp_file.path())); let mut hashing_file_writer = HashingWriter::<_, Blake2b256>::new(file); // Decode, hash and write the data to the file. diff --git a/crates/rattler_repodata_gateway/src/sparse/mod.rs b/crates/rattler_repodata_gateway/src/sparse/mod.rs index a9d90ed0e..a707a2b43 100644 --- a/crates/rattler_repodata_gateway/src/sparse/mod.rs +++ b/crates/rattler_repodata_gateway/src/sparse/mod.rs @@ -23,6 +23,7 @@ use serde::{ use serde_json::value::RawValue; use superslice::Ext; use thiserror::Error; +use fs_err as fs; /// A struct to enable loading records from a `repodata.json` file on demand. /// Since most of the time you don't need all the records from the @@ -102,7 +103,7 @@ impl SparseRepoData { path: impl AsRef, patch_function: Option, ) -> Result { - let file = std::fs::File::open(path)?; + let file = fs::File::open(path)?; let memory_map = unsafe { memmap2::Mmap::map(&file) }?; Ok(SparseRepoData { inner: SparseRepoDataInner::Memmapped( @@ -482,6 +483,7 @@ mod test { use super::{load_repo_data_recursively, PackageFilename, SparseRepoData}; use crate::utils::test::fetch_repo_data; + use fs_err as fs; fn test_dir() -> PathBuf { Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data") @@ -510,7 +512,7 @@ mod test { .await .into_iter() .map(|(channel, subdir, path)| { - let bytes = std::fs::read(path).unwrap(); + let bytes = fs::read(path).unwrap(); (channel, subdir, bytes.into()) }) .collect() @@ -658,7 +660,7 @@ mod test { test_dir().join("channels/conda-forge/noarch/repodata.json"), test_dir().join("channels/conda-forge/linux-64/repodata.json"), ] { - let str = std::fs::read_to_string(&path).unwrap(); + let str = fs::read_to_string(&path).unwrap(); let repo_data: RepoData = serde_json::from_str(&str).unwrap(); records.push(repo_data); } From b54c7b0020d25e4f9094b4a299febcf52ec0d504 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Mon, 23 Sep 2024 14:35:37 +0200 Subject: [PATCH 4/8] Revert "chore: add path to io errors" This reverts commit 4195662a12d965b75adeda9277cd95af028dc6c5. --- .../rattler_repodata_gateway/src/fetch/mod.rs | 33 +++++++++---------- 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/crates/rattler_repodata_gateway/src/fetch/mod.rs b/crates/rattler_repodata_gateway/src/fetch/mod.rs index 52fde7a35..d6449b3a7 100644 --- a/crates/rattler_repodata_gateway/src/fetch/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/mod.rs @@ -60,17 +60,17 @@ pub enum FetchRepoDataError { #[error("repodata not found")] NotFound(#[from] RepoDataNotFoundError), - #[error("failed to create temporary file for repodata.json at: '{0}'")] - FailedToCreateTemporaryFile(PathBuf, #[source] std::io::Error), + #[error("failed to create temporary file for repodata.json")] + FailedToCreateTemporaryFile(#[source] std::io::Error), #[error("failed to persist temporary repodata.json file")] FailedToPersistTemporaryFile(#[from] tempfile::PersistError), - #[error("failed to get metadata from repodata.json file at: '{0}'")] - FailedToGetMetadata(PathBuf, #[source] std::io::Error), + #[error("failed to get metadata from repodata.json file")] + FailedToGetMetadata(#[source] std::io::Error), - #[error("failed to write cache state to: '{0}'")] - FailedToWriteCacheState(PathBuf, #[source] std::io::Error), + #[error("failed to write cache state")] + FailedToWriteCacheState(#[source] std::io::Error), #[error("there is no cache available")] NoCacheAvailable, @@ -270,7 +270,7 @@ async fn repodata_from_file( new_cache_state .to_path(&cache_state_path) .map(|_| new_cache_state) - .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e)) + .map_err(FetchRepoDataError::FailedToWriteCacheState) }) .await??; @@ -435,9 +435,7 @@ pub async fn fetch_repo_data( cache_state .to_path(&cache_state_path) .map(|_| cache_state) - .map_err(|e| { - FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e) - }) + .map_err(FetchRepoDataError::FailedToWriteCacheState) }) .await??; @@ -527,7 +525,7 @@ pub async fn fetch_repo_data( cache_state .to_path(&cache_state_path) .map(|_| cache_state) - .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e)) + .map_err(FetchRepoDataError::FailedToWriteCacheState) }) .await??; @@ -567,14 +565,13 @@ pub async fn fetch_repo_data( let repo_data_destination_path = repo_data_json_path.clone(); let repo_data_json_metadata = tokio::task::spawn_blocking(move || { let file = temp_file - .persist(repo_data_destination_path.clone()) + .persist(repo_data_destination_path) .map_err(FetchRepoDataError::FailedToPersistTemporaryFile)?; // Determine the last modified date and size of the repodata.json file. We store these values in // the cache to link the cache to the corresponding repodata.json file. - file.metadata().map_err(|e| { - FetchRepoDataError::FailedToGetMetadata(repo_data_destination_path.clone(), e) - }) + file.metadata() + .map_err(FetchRepoDataError::FailedToGetMetadata) }) .await??; @@ -585,7 +582,7 @@ pub async fn fetch_repo_data( cache_headers, cache_last_modified: repo_data_json_metadata .modified() - .map_err(|e| FetchRepoDataError::FailedToGetMetadata(repo_data_json_path.clone(), e))?, + .map_err(FetchRepoDataError::FailedToGetMetadata)?, cache_size: repo_data_json_metadata.len(), blake2_hash: Some(blake2_hash), blake2_hash_nominal: Some(blake2_hash), @@ -599,7 +596,7 @@ pub async fn fetch_repo_data( new_cache_state .to_path(&cache_state_path) .map(|_| new_cache_state) - .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e)) + .map_err(FetchRepoDataError::FailedToWriteCacheState) }) .await??; @@ -654,7 +651,7 @@ async fn stream_and_decode_to_file( // Construct a temporary file let temp_file = NamedTempFile::new_in(temp_dir) - .map_err(|e| FetchRepoDataError::FailedToCreateTemporaryFile(temp_dir.to_path_buf(), e))?; + .map_err(FetchRepoDataError::FailedToCreateTemporaryFile)?; // Clone the file handle and create a hashing writer so we can compute a hash while the content // is being written to disk. From 5cd1615566d5ad2bed5ffb37518cfc5d40bfcf35 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Mon, 23 Sep 2024 14:36:00 +0200 Subject: [PATCH 5/8] Revert "fmt" This reverts commit 218963f5f77911738b89350ae9dedb39d19f3ce0. --- crates/rattler_repodata_gateway/src/fetch/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/rattler_repodata_gateway/src/fetch/mod.rs b/crates/rattler_repodata_gateway/src/fetch/mod.rs index d6449b3a7..24ce5d63c 100644 --- a/crates/rattler_repodata_gateway/src/fetch/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/mod.rs @@ -435,7 +435,7 @@ pub async fn fetch_repo_data( cache_state .to_path(&cache_state_path) .map(|_| cache_state) - .map_err(FetchRepoDataError::FailedToWriteCacheState) + .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e)) }) .await??; @@ -650,8 +650,8 @@ async fn stream_and_decode_to_file( ); // Construct a temporary file - let temp_file = NamedTempFile::new_in(temp_dir) - .map_err(FetchRepoDataError::FailedToCreateTemporaryFile)?; + let temp_file = + NamedTempFile::new_in(temp_dir).map_err(FetchRepoDataError::FailedToCreateTemporaryFile)?; // Clone the file handle and create a hashing writer so we can compute a hash while the content // is being written to disk. From 6fa279a8d9e80f091e0e681e17bc67cc5bd15aa7 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Mon, 23 Sep 2024 14:43:38 +0200 Subject: [PATCH 6/8] fmt & lint --- crates/rattler_repodata_gateway/src/fetch/cache/mod.rs | 2 +- crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs | 6 ++---- crates/rattler_repodata_gateway/src/fetch/mod.rs | 9 +++++++-- crates/rattler_repodata_gateway/src/sparse/mod.rs | 2 +- 4 files changed, 11 insertions(+), 8 deletions(-) diff --git a/crates/rattler_repodata_gateway/src/fetch/cache/mod.rs b/crates/rattler_repodata_gateway/src/fetch/cache/mod.rs index 78f0a21ac..3cd28368d 100644 --- a/crates/rattler_repodata_gateway/src/fetch/cache/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/cache/mod.rs @@ -1,11 +1,11 @@ mod cache_headers; pub use cache_headers::CacheHeaders; +use fs_err as fs; use rattler_digest::{serde::SerializableHash, Blake2b256}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_with::serde_as; use std::{path::Path, str::FromStr, time::SystemTime}; -use fs_err as fs; use url::Url; /// Representation of the `.info.json` file alongside a `repodata.json` file. diff --git a/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs b/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs index f002ca575..98ad950fe 100644 --- a/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs @@ -81,6 +81,7 @@ use blake2::digest::Output; use blake2::digest::{FixedOutput, Update}; +use fs_err as fs; use rattler_digest::{ parse_digest_from_hex, serde::SerializableHash, Blake2b256, Blake2b256Hash, Blake2bMac256, }; @@ -101,8 +102,6 @@ use std::str::FromStr; use std::sync::Arc; use tempfile::NamedTempFile; use url::Url; -use fs_err as fs; -use fs_err::tokio as tokio_fs; pub use crate::fetch::cache::{JLAPFooter, JLAPState, RepoDataState}; use crate::reporter::ResponseReporterExt; @@ -539,8 +538,7 @@ fn apply_jlap_patches( } // Read the contents of the current repodata to a string - let repo_data_contents = - fs::read_to_string(repo_data_path).map_err(JLAPError::FileSystem)?; + let repo_data_contents = fs::read_to_string(repo_data_path).map_err(JLAPError::FileSystem)?; // Parse the JSON so we can manipulate it tracing::info!("parsing cached repodata.json as JSON"); diff --git a/crates/rattler_repodata_gateway/src/fetch/mod.rs b/crates/rattler_repodata_gateway/src/fetch/mod.rs index 24ce5d63c..815c31d63 100644 --- a/crates/rattler_repodata_gateway/src/fetch/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/mod.rs @@ -435,7 +435,9 @@ pub async fn fetch_repo_data( cache_state .to_path(&cache_state_path) .map(|_| cache_state) - .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(cache_state_path, e)) + .map_err(|e| { + FetchRepoDataError::FailedToWriteCacheState(e) + }) }) .await??; @@ -655,7 +657,10 @@ async fn stream_and_decode_to_file( // Clone the file handle and create a hashing writer so we can compute a hash while the content // is being written to disk. - let file = tokio_fs::File::from_std(fs_err::File::from_parts(temp_file.as_file().try_clone().unwrap(), temp_file.path())); + let file = tokio_fs::File::from_std(fs_err::File::from_parts( + temp_file.as_file().try_clone().unwrap(), + temp_file.path(), + )); let mut hashing_file_writer = HashingWriter::<_, Blake2b256>::new(file); // Decode, hash and write the data to the file. diff --git a/crates/rattler_repodata_gateway/src/sparse/mod.rs b/crates/rattler_repodata_gateway/src/sparse/mod.rs index a707a2b43..bc543af88 100644 --- a/crates/rattler_repodata_gateway/src/sparse/mod.rs +++ b/crates/rattler_repodata_gateway/src/sparse/mod.rs @@ -11,6 +11,7 @@ use std::{ }; use bytes::Bytes; +use fs_err as fs; use futures::{stream, StreamExt, TryFutureExt, TryStreamExt}; use itertools::Itertools; use rattler_conda_types::{ @@ -23,7 +24,6 @@ use serde::{ use serde_json::value::RawValue; use superslice::Ext; use thiserror::Error; -use fs_err as fs; /// A struct to enable loading records from a `repodata.json` file on demand. /// Since most of the time you don't need all the records from the From 6314cff93e4f7ba9cfadc3df5d2e52346baa962c Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Mon, 23 Sep 2024 14:45:45 +0200 Subject: [PATCH 7/8] fmt --- crates/rattler_repodata_gateway/src/fetch/mod.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/rattler_repodata_gateway/src/fetch/mod.rs b/crates/rattler_repodata_gateway/src/fetch/mod.rs index 815c31d63..54f70564a 100644 --- a/crates/rattler_repodata_gateway/src/fetch/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/mod.rs @@ -435,9 +435,7 @@ pub async fn fetch_repo_data( cache_state .to_path(&cache_state_path) .map(|_| cache_state) - .map_err(|e| { - FetchRepoDataError::FailedToWriteCacheState(e) - }) + .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(e)) }) .await??; From ffe7ee5ce6db1d75b637619497fda732f1c7018d Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Mon, 23 Sep 2024 14:55:39 +0200 Subject: [PATCH 8/8] another fmt and clippy --- crates/rattler_repodata_gateway/src/fetch/mod.rs | 2 +- crates/rattler_repodata_gateway/src/sparse/mod.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/rattler_repodata_gateway/src/fetch/mod.rs b/crates/rattler_repodata_gateway/src/fetch/mod.rs index 54f70564a..4fa1dfa98 100644 --- a/crates/rattler_repodata_gateway/src/fetch/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/mod.rs @@ -435,7 +435,7 @@ pub async fn fetch_repo_data( cache_state .to_path(&cache_state_path) .map(|_| cache_state) - .map_err(|e| FetchRepoDataError::FailedToWriteCacheState(e)) + .map_err(FetchRepoDataError::FailedToWriteCacheState) }) .await??; diff --git a/crates/rattler_repodata_gateway/src/sparse/mod.rs b/crates/rattler_repodata_gateway/src/sparse/mod.rs index bc543af88..dea1363ab 100644 --- a/crates/rattler_repodata_gateway/src/sparse/mod.rs +++ b/crates/rattler_repodata_gateway/src/sparse/mod.rs @@ -103,7 +103,7 @@ impl SparseRepoData { path: impl AsRef, patch_function: Option, ) -> Result { - let file = fs::File::open(path)?; + let file = fs::File::open(path.as_ref().to_owned())?; let memory_map = unsafe { memmap2::Mmap::map(&file) }?; Ok(SparseRepoData { inner: SparseRepoDataInner::Memmapped(