Skip to content

Commit

Permalink
Return computed hashes from metadata requests
Browse files Browse the repository at this point in the history
  • Loading branch information
charliermarsh committed Apr 10, 2024
1 parent d8f5d37 commit da47a09
Show file tree
Hide file tree
Showing 9 changed files with 100 additions and 60 deletions.
14 changes: 8 additions & 6 deletions crates/uv-distribution/src/distribution_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use uv_types::BuildContext;

use crate::archive::Archive;
use crate::locks::Locks;
use crate::{Error, LocalWheel, Reporter, SourceDistributionBuilder};
use crate::{ArchiveMetadata, Error, LocalWheel, Reporter, SourceDistributionBuilder};

/// A cached high-level interface to convert distributions (a requirement resolved to a location)
/// to a wheel or wheel metadata.
Expand Down Expand Up @@ -109,7 +109,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
&self,
dist: &Dist,
hashes: &[HashDigest],
) -> Result<Metadata23, Error> {
) -> Result<ArchiveMetadata, Error> {
match dist {
Dist::Built(built) => self.get_wheel_metadata(built, hashes).await,
Dist::Source(source) => {
Expand Down Expand Up @@ -343,16 +343,18 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
&self,
dist: &BuiltDist,
hashes: &[HashDigest],
) -> Result<Metadata23, Error> {
) -> Result<ArchiveMetadata, Error> {
match self.client.wheel_metadata(dist).boxed().await {
Ok(metadata) => Ok(metadata),
Ok(metadata) => Ok(ArchiveMetadata::from(metadata)),
Err(err) if err.is_http_streaming_unsupported() => {
warn!("Streaming unsupported when fetching metadata for {dist}; downloading wheel directly ({err})");

// If the request failed due to an error that could be resolved by
// downloading the wheel directly, try that.
let wheel = self.get_wheel(dist, hashes).await?;
Ok(wheel.metadata()?)
let metadata = wheel.metadata()?;
let hashes = wheel.hashes;
Ok(ArchiveMetadata { metadata, hashes })
}
Err(err) => Err(err.into()),
}
Expand All @@ -366,7 +368,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
&self,
source: &BuildableSource<'_>,
hashes: &[HashDigest],
) -> Result<Metadata23, Error> {
) -> Result<ArchiveMetadata, Error> {
let no_build = match self.build_context.no_build() {
NoBuild::All => true,
NoBuild::None => false,
Expand Down
19 changes: 19 additions & 0 deletions crates/uv-distribution/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ pub use download::LocalWheel;
pub use error::Error;
pub use git::{is_same_reference, to_precise};
pub use index::{BuiltWheelIndex, RegistryWheelIndex};
use pypi_types::{HashDigest, Metadata23};
pub use reporter::Reporter;
pub use source::SourceDistributionBuilder;

Expand All @@ -16,3 +17,21 @@ mod index;
mod locks;
mod reporter;
mod source;

/// The metadata associated with an archive.
#[derive(Debug, Clone)]
pub struct ArchiveMetadata {
/// The [`Metadata23`] for the underlying distribution.
pub metadata: Metadata23,
/// The hashes of the source or built archive.
pub hashes: Vec<HashDigest>,
}

impl From<Metadata23> for ArchiveMetadata {
fn from(metadata: Metadata23) -> Self {
Self {
metadata,
hashes: vec![],
}
}
}
54 changes: 36 additions & 18 deletions crates/uv-distribution/src/source/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ use crate::error::Error;
use crate::git::{fetch_git_archive, resolve_precise};
use crate::source::built_wheel_metadata::BuiltWheelMetadata;
use crate::source::revision::Revision;
use crate::Reporter;
use crate::{ArchiveMetadata, Reporter};

mod built_wheel_metadata;
mod revision;
Expand Down Expand Up @@ -215,7 +215,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
&self,
source: &BuildableSource<'_>,
hashes: &[HashDigest],
) -> Result<Metadata23, Error> {
) -> Result<ArchiveMetadata, Error> {
let metadata = match &source {
BuildableSource::Dist(SourceDist::Registry(dist)) => {
let url = match &dist.file.url {
Expand Down Expand Up @@ -419,7 +419,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
cache_shard: &CacheShard,
subdirectory: Option<&'data Path>,
hashes: &[HashDigest],
) -> Result<Metadata23, Error> {
) -> Result<ArchiveMetadata, Error> {
// Fetch the revision for the source distribution.
let revision = self
.url_revision(source, filename, url, cache_shard, hashes)
Expand All @@ -442,7 +442,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let metadata_entry = cache_shard.entry(METADATA);
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for: {source}");
return Ok(metadata);
return Ok(ArchiveMetadata {
metadata,
hashes: revision.into_hashes(),
});
}

// Otherwise, we either need to build the metadata or the wheel.
Expand All @@ -463,7 +466,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await
.map_err(Error::CacheWrite)?;

return Ok(metadata);
return Ok(ArchiveMetadata {
metadata,
hashes: revision.into_hashes(),
});
}

let task = self
Expand All @@ -488,7 +494,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
}
}

Ok(metadata)
Ok(ArchiveMetadata {
metadata,
hashes: revision.into_hashes(),
})
}

/// Return the [`Revision`] for a remote URL, refreshing it if necessary.
Expand Down Expand Up @@ -632,7 +641,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>,
resource: &PathSourceUrl<'_>,
hashes: &[HashDigest],
) -> Result<Metadata23, Error> {
) -> Result<ArchiveMetadata, Error> {
let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels,
WheelCache::Path(resource.url).root(),
Expand Down Expand Up @@ -660,7 +669,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let metadata_entry = cache_shard.entry(METADATA);
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for: {source}");
return Ok(metadata);
return Ok(ArchiveMetadata {
metadata,
hashes: revision.into_hashes(),
});
}

let source_entry = cache_shard.entry("source");
Expand All @@ -680,7 +692,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await
.map_err(Error::CacheWrite)?;

return Ok(metadata);
return Ok(ArchiveMetadata {
metadata,
hashes: revision.into_hashes(),
});
}

// Otherwise, we need to build a wheel.
Expand All @@ -705,7 +720,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await
.map_err(Error::CacheWrite)?;

Ok(metadata)
Ok(ArchiveMetadata {
metadata,
hashes: revision.into_hashes(),
})
}

/// Return the [`Revision`] for a local archive, refreshing it if necessary.
Expand Down Expand Up @@ -826,7 +844,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>,
resource: &PathSourceUrl<'_>,
hashes: &[HashDigest],
) -> Result<Metadata23, Error> {
) -> Result<ArchiveMetadata, Error> {
// Before running the build, check that the hashes match.
if !hashes.is_empty() {
return Err(Error::HashesNotSupportedSourceTree(source.to_string()));
Expand All @@ -850,7 +868,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let metadata_entry = cache_shard.entry(METADATA);
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for: {source}");
return Ok(metadata);
return Ok(ArchiveMetadata::from(metadata));
}

// If the backend supports `prepare_metadata_for_build_wheel`, use it.
Expand All @@ -868,7 +886,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await
.map_err(Error::CacheWrite)?;

return Ok(metadata);
return Ok(ArchiveMetadata::from(metadata));
}

// Otherwise, we need to build a wheel.
Expand All @@ -893,7 +911,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await
.map_err(Error::CacheWrite)?;

Ok(metadata)
Ok(ArchiveMetadata::from(metadata))
}

/// Return the [`Revision`] for a local source tree, refreshing it if necessary.
Expand Down Expand Up @@ -1000,7 +1018,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>,
resource: &GitSourceUrl<'_>,
hashes: &[HashDigest],
) -> Result<Metadata23, Error> {
) -> Result<ArchiveMetadata, Error> {
// Before running the build, check that the hashes match.
if !hashes.is_empty() {
return Err(Error::HashesNotSupportedGit(source.to_string()));
Expand Down Expand Up @@ -1039,7 +1057,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
{
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for: {source}");
return Ok(metadata);
return Ok(ArchiveMetadata::from(metadata));
}
}

Expand All @@ -1058,7 +1076,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await
.map_err(Error::CacheWrite)?;

return Ok(metadata);
return Ok(ArchiveMetadata::from(metadata));
}

// Otherwise, we need to build a wheel.
Expand All @@ -1083,7 +1101,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await
.map_err(Error::CacheWrite)?;

Ok(metadata)
Ok(ArchiveMetadata::from(metadata))
}

/// Download and unzip a source distribution into the cache from an HTTP response.
Expand Down
14 changes: 7 additions & 7 deletions crates/uv-requirements/src/lookahead.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,24 +139,24 @@ impl<'a, Context: BuildContext + Send + Sync> LookaheadResolver<'a, Context> {
// Fetch the metadata for the distribution.
let requires_dist = {
let id = dist.package_id();
if let Some(metadata) = self
if let Some(archive) = self
.index
.get_metadata(&id)
.as_deref()
.and_then(|response| {
if let MetadataResponse::Found(metadata) = response {
Some(metadata)
if let MetadataResponse::Found(archive, ..) = response {
Some(archive)
} else {
None
}
})
{
// If the metadata is already in the index, return it.
metadata.requires_dist.clone()
archive.metadata.requires_dist.clone()
} else {
// Run the PEP 517 build process to extract metadata from the source distribution.
let hashes = self.hashes.get(dist.name()).unwrap_or_default();
let metadata = self
let archive = self
.database
.get_or_build_wheel_metadata(&dist, hashes)
.await
Expand All @@ -165,11 +165,11 @@ impl<'a, Context: BuildContext + Send + Sync> LookaheadResolver<'a, Context> {
Dist::Source(source) => format!("Failed to download and build: {source}"),
})?;

let requires_dist = metadata.requires_dist.clone();
let requires_dist = archive.metadata.requires_dist.clone();

// Insert the metadata into the index.
self.index
.insert_metadata(id, MetadataResponse::Found(metadata));
.insert_metadata(id, MetadataResponse::Found(archive));

requires_dist
}
Expand Down
14 changes: 7 additions & 7 deletions crates/uv-requirements/src/source_tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,30 +100,30 @@ impl<'a, Context: BuildContext + Send + Sync> SourceTreeResolver<'a, Context> {
// Fetch the metadata for the distribution.
let metadata = {
let id = PackageId::from_url(source.url());
if let Some(metadata) = self
if let Some(archive) = self
.index
.get_metadata(&id)
.as_deref()
.and_then(|response| {
if let MetadataResponse::Found(metadata) = response {
Some(metadata)
if let MetadataResponse::Found(archive) = response {
Some(archive)
} else {
None
}
})
{
// If the metadata is already in the index, return it.
metadata.clone()
archive.metadata.clone()
} else {
// Run the PEP 517 build process to extract metadata from the source distribution.
let source = BuildableSource::Url(source);
let metadata = self.database.build_wheel_metadata(&source, &[]).await?;
let archive = self.database.build_wheel_metadata(&source, &[]).await?;

// Insert the metadata into the index.
self.index
.insert_metadata(id, MetadataResponse::Found(metadata.clone()));
.insert_metadata(id, MetadataResponse::Found(archive.clone()));

metadata
archive.metadata
}
};

Expand Down
14 changes: 7 additions & 7 deletions crates/uv-requirements/src/unnamed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -250,24 +250,24 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
// Fetch the metadata for the distribution.
let name = {
let id = PackageId::from_url(source.url());
if let Some(metadata) = index.get_metadata(&id).as_deref().and_then(|response| {
if let MetadataResponse::Found(metadata) = response {
Some(metadata)
if let Some(archive) = index.get_metadata(&id).as_deref().and_then(|response| {
if let MetadataResponse::Found(archive) = response {
Some(archive)
} else {
None
}
}) {
// If the metadata is already in the index, return it.
metadata.name.clone()
archive.metadata.name.clone()
} else {
// Run the PEP 517 build process to extract metadata from the source distribution.
let source = BuildableSource::Url(source);
let metadata = database.build_wheel_metadata(&source, &[]).await?;
let archive = database.build_wheel_metadata(&source, &[]).await?;

let name = metadata.name.clone();
let name = archive.metadata.name.clone();

// Insert the metadata into the index.
index.insert_metadata(id, MetadataResponse::Found(metadata));
index.insert_metadata(id, MetadataResponse::Found(archive));

name
}
Expand Down
Loading

0 comments on commit da47a09

Please sign in to comment.