Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: start using fs-err in repodata_gateway #877

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ digest = "0.10.7"
dirs = "5.0.1"
dunce = "1.0.4"
enum_dispatch = "0.3.13"
fs-err = "2.11.0"
fs-err = { version = "2.11.0", features = ["tokio"] }
fslock = "0.2.1"
futures = "0.3.30"
futures-util = "0.3.30"
Expand Down
1 change: 1 addition & 0 deletions crates/rattler_repodata_gateway/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ license.workspace = true
readme.workspace = true

[dependencies]
fs-err = { workspace = true}
anyhow = { workspace = true }
async-fd-lock = { workspace = true }
async-compression = { workspace = true, features = ["gzip", "tokio", "bzip2", "zstd"] }
Expand Down
5 changes: 3 additions & 2 deletions crates/rattler_repodata_gateway/src/fetch/cache/mod.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
mod cache_headers;

pub use cache_headers::CacheHeaders;
use fs_err as fs;
use rattler_digest::{serde::SerializableHash, Blake2b256};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_with::serde_as;
use std::{fs, fs::File, path::Path, str::FromStr, time::SystemTime};
use std::{path::Path, str::FromStr, time::SystemTime};
use url::Url;

/// Representation of the `.info.json` file alongside a `repodata.json` file.
Expand Down Expand Up @@ -72,7 +73,7 @@ impl RepoDataState {

/// Save the cache state to the specified file.
pub fn to_path(&self, path: &Path) -> Result<(), std::io::Error> {
let file = File::create(path)?;
let file = fs::File::create(path)?;
Ok(serde_json::to_writer_pretty(file, self)?)
}
}
Expand Down
10 changes: 6 additions & 4 deletions crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@

use blake2::digest::Output;
use blake2::digest::{FixedOutput, Update};
use fs_err as fs;
use rattler_digest::{
parse_digest_from_hex, serde::SerializableHash, Blake2b256, Blake2b256Hash, Blake2bMac256,
};
Expand Down Expand Up @@ -537,8 +538,7 @@ fn apply_jlap_patches(
}

// Read the contents of the current repodata to a string
let repo_data_contents =
std::fs::read_to_string(repo_data_path).map_err(JLAPError::FileSystem)?;
let repo_data_contents = fs::read_to_string(repo_data_path).map_err(JLAPError::FileSystem)?;

// Parse the JSON so we can manipulate it
tracing::info!("parsing cached repodata.json as JSON");
Expand Down Expand Up @@ -637,6 +637,8 @@ mod test {
use tempfile::TempDir;
use url::Url;

use fs_err::tokio as tokio_fs;

const FAKE_STATE_DATA_INITIAL: &str = r#"{
"url": "https://repo.example.com/pkgs/main/osx-64/repodata.json.zst",
"etag": "W/\"49aa6d9ea6f3285efe657780a7c8cd58\"",
Expand Down Expand Up @@ -851,14 +853,14 @@ mod test {

if let Some(content) = server_jlap {
// Add files we need to request to the server
tokio::fs::write(subdir_path.path().join("repodata.jlap"), content)
tokio_fs::write(subdir_path.path().join("repodata.jlap"), content)
.await
.unwrap();
}

if let Some(content) = server_repo_data {
// Add files we need to request to the server
tokio::fs::write(subdir_path.path().join("repodata.json"), content)
tokio_fs::write(subdir_path.path().join("repodata.json"), content)
.await
.unwrap();
}
Expand Down
12 changes: 9 additions & 3 deletions crates/rattler_repodata_gateway/src/fetch/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ use tokio_util::io::StreamReader;
use tracing::instrument;
use url::Url;

// use fs-err for better error reporting
use fs_err::tokio as tokio_fs;

mod cache;
pub mod jlap;

Expand Down Expand Up @@ -231,7 +234,7 @@ async fn repodata_from_file(
lock_file: LockedFile,
) -> Result<CachedRepoData, FetchRepoDataError> {
// copy file from subdir_url to out_path
if let Err(e) = tokio::fs::copy(&subdir_url.to_file_path().unwrap(), &out_path).await {
if let Err(e) = tokio_fs::copy(&subdir_url.to_file_path().unwrap(), &out_path).await {
return if e.kind() == ErrorKind::NotFound {
Err(FetchRepoDataError::NotFound(
RepoDataNotFoundError::FileSystemError(e),
Expand All @@ -244,7 +247,7 @@ async fn repodata_from_file(
// create a dummy cache state
let new_cache_state = RepoDataState {
url: subdir_url.clone(),
cache_size: tokio::fs::metadata(&out_path)
cache_size: tokio_fs::metadata(&out_path)
.await
.map_err(FetchRepoDataError::IoError)?
.len(),
Expand Down Expand Up @@ -652,7 +655,10 @@ async fn stream_and_decode_to_file(

// Clone the file handle and create a hashing writer so we can compute a hash while the content
// is being written to disk.
let file = tokio::fs::File::from_std(temp_file.as_file().try_clone().unwrap());
let file = tokio_fs::File::from_std(fs_err::File::from_parts(
temp_file.as_file().try_clone().unwrap(),
temp_file.path(),
));
let mut hashing_file_writer = HashingWriter::<_, Blake2b256>::new(file);

// Decode, hash and write the data to the file.
Expand Down
8 changes: 5 additions & 3 deletions crates/rattler_repodata_gateway/src/sparse/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ use std::{
};

use bytes::Bytes;
use fs_err as fs;
use futures::{stream, StreamExt, TryFutureExt, TryStreamExt};
use itertools::Itertools;
use rattler_conda_types::{
Expand Down Expand Up @@ -102,7 +103,7 @@ impl SparseRepoData {
path: impl AsRef<Path>,
patch_function: Option<fn(&mut PackageRecord)>,
) -> Result<Self, io::Error> {
let file = std::fs::File::open(path)?;
let file = fs::File::open(path.as_ref().to_owned())?;
let memory_map = unsafe { memmap2::Mmap::map(&file) }?;
Ok(SparseRepoData {
inner: SparseRepoDataInner::Memmapped(
Expand Down Expand Up @@ -482,6 +483,7 @@ mod test {

use super::{load_repo_data_recursively, PackageFilename, SparseRepoData};
use crate::utils::test::fetch_repo_data;
use fs_err as fs;

fn test_dir() -> PathBuf {
Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data")
Expand Down Expand Up @@ -510,7 +512,7 @@ mod test {
.await
.into_iter()
.map(|(channel, subdir, path)| {
let bytes = std::fs::read(path).unwrap();
let bytes = fs::read(path).unwrap();
(channel, subdir, bytes.into())
})
.collect()
Expand Down Expand Up @@ -658,7 +660,7 @@ mod test {
test_dir().join("channels/conda-forge/noarch/repodata.json"),
test_dir().join("channels/conda-forge/linux-64/repodata.json"),
] {
let str = std::fs::read_to_string(&path).unwrap();
let str = fs::read_to_string(&path).unwrap();
let repo_data: RepoData = serde_json::from_str(&str).unwrap();
records.push(repo_data);
}
Expand Down
Loading