Skip to content

Commit

Permalink
Generate hashes for --find-links entries (#5544)
Browse files Browse the repository at this point in the history
## Summary

Closes #3874.
  • Loading branch information
charliermarsh authored Jul 29, 2024
1 parent 51b7e9b commit 41c1fc0
Show file tree
Hide file tree
Showing 4 changed files with 338 additions and 284 deletions.
18 changes: 10 additions & 8 deletions crates/uv-distribution/src/distribution_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -428,14 +428,16 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
// For hash _validation_, callers are expected to enforce the policy when retrieving the
// wheel.
// TODO(charlie): Request the hashes via a separate method, to reduce the coupling in this API.
if hashes.is_generate() && matches!(dist, BuiltDist::DirectUrl(_) | BuiltDist::Path(_)) {
let wheel = self.get_wheel(dist, hashes).await?;
let metadata = wheel.metadata()?;
let hashes = wheel.hashes;
return Ok(ArchiveMetadata {
metadata: Metadata::from_metadata23(metadata),
hashes,
});
if hashes.is_generate() {
if dist.file().map_or(true, |file| file.hashes.is_empty()) {
let wheel = self.get_wheel(dist, hashes).await?;
let metadata = wheel.metadata()?;
let hashes = wheel.hashes;
return Ok(ArchiveMetadata {
metadata: Metadata::from_metadata23(metadata),
hashes,
});
}
}

let result = self
Expand Down
149 changes: 71 additions & 78 deletions crates/uv-resolver/src/resolution/graph.rs
Original file line number Diff line number Diff line change
@@ -1,24 +1,24 @@
use indexmap::IndexSet;
use petgraph::{
graph::{Graph, NodeIndex},
Directed,
};
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};

use distribution_types::{
Dist, DistributionMetadata, Name, ResolutionDiagnostic, ResolvedDist, VersionId,
VersionOrUrlRef,
};
use indexmap::IndexSet;
use pep440_rs::{Version, VersionSpecifier};
use pep508_rs::{MarkerEnvironment, MarkerTree};
use petgraph::{
graph::{Graph, NodeIndex},
Directed,
};
use pypi_types::{HashDigest, ParsedUrlError, Requirement, VerbatimParsedUrl, Yanked};
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use uv_configuration::{Constraints, Overrides};
use uv_distribution::Metadata;
use uv_git::GitResolver;
use uv_normalize::{ExtraName, GroupName, PackageName};

use crate::pins::FilePins;
use crate::preferences::Preferences;
use crate::pubgrub::PubGrubDistribution;
use crate::python_requirement::PythonTarget;
use crate::redirect::url_to_precise;
use crate::resolution::AnnotatedDist;
Expand Down Expand Up @@ -274,44 +274,19 @@ impl ResolutionGraph {
// Create the distribution.
let dist = Dist::from_url(name.clone(), url_to_precise(url.clone(), git))?;

// Extract the hashes, preserving those that were already present in the
// lockfile if necessary.
let hashes = if let Some(digests) = preferences
.match_hashes(name, version)
.filter(|digests| !digests.is_empty())
{
digests.to_vec()
} else if let Some(metadata_response) = index.distributions().get(&dist.version_id()) {
if let MetadataResponse::Found(ref archive) = *metadata_response {
let mut digests = archive.hashes.clone();
digests.sort_unstable();
digests
} else {
vec![]
}
} else {
vec![]
};
let version_id = VersionId::from_url(&url.verbatim);

// Extract the hashes.
let hashes = Self::get_hashes(&version_id, name, version, preferences, index);

// Extract the metadata.
let metadata = {
let dist = PubGrubDistribution::from_url(name, url);

let response = index
.distributions()
.get(&dist.version_id())
.unwrap_or_else(|| {
panic!(
"Every package should have metadata: {:?}",
dist.version_id()
)
});
let response = index.distributions().get(&version_id).unwrap_or_else(|| {
panic!("Every package should have metadata: {version_id:?}")
});

let MetadataResponse::Found(archive) = &*response else {
panic!(
"Every package should have metadata: {:?}",
dist.version_id()
)
panic!("Every package should have metadata: {version_id:?}")
};

archive.metadata.clone()
Expand All @@ -324,6 +299,8 @@ impl ResolutionGraph {
.expect("Every package should be pinned")
.clone();

let version_id = dist.version_id();

// Track yanks for any registry distributions.
match dist.yanked() {
None | Some(Yanked::Bool(false)) => {}
Expand All @@ -341,49 +318,17 @@ impl ResolutionGraph {
}
}

// Extract the hashes, preserving those that were already present in the
// lockfile if necessary.
let hashes = if let Some(digests) = preferences
.match_hashes(name, version)
.filter(|digests| !digests.is_empty())
{
digests.to_vec()
} else if let Some(versions_response) = index.packages().get(name) {
if let VersionsResponse::Found(ref version_maps) = *versions_response {
version_maps
.iter()
.find_map(|version_map| version_map.hashes(version))
.map(|mut digests| {
digests.sort_unstable();
digests
})
.unwrap_or_default()
} else {
vec![]
}
} else {
vec![]
};
// Extract the hashes.
let hashes = Self::get_hashes(&version_id, name, version, preferences, index);

// Extract the metadata.
let metadata = {
let dist = PubGrubDistribution::from_registry(name, version);

let response = index
.distributions()
.get(&dist.version_id())
.unwrap_or_else(|| {
panic!(
"Every package should have metadata: {:?}",
dist.version_id()
)
});
let response = index.distributions().get(&version_id).unwrap_or_else(|| {
panic!("Every package should have metadata: {version_id:?}")
});

let MetadataResponse::Found(archive) = &*response else {
panic!(
"Every package should have metadata: {:?}",
dist.version_id()
)
panic!("Every package should have metadata: {version_id:?}")
};

archive.metadata.clone()
Expand All @@ -393,6 +338,54 @@ impl ResolutionGraph {
})
}

/// Identify the hashes for the [`VersionId`], preserving any hashes that were provided by the
/// lockfile.
fn get_hashes(
version_id: &VersionId,
name: &PackageName,
version: &Version,
preferences: &Preferences,
index: &InMemoryIndex,
) -> Vec<HashDigest> {
// 1. Look for hashes from the lockfile.
if let Some(digests) = preferences.match_hashes(name, version) {
if !digests.is_empty() {
return digests.to_vec();
}
}

// 2. Look for hashes from the registry, which are served at the package level.
if let Some(versions_response) = index.packages().get(name) {
if let VersionsResponse::Found(ref version_maps) = *versions_response {
if let Some(digests) = version_maps
.iter()
.find_map(|version_map| version_map.hashes(version))
.map(|mut digests| {
digests.sort_unstable();
digests
})
{
if !digests.is_empty() {
return digests;
}
}
}
}

// 3. Look for hashes for the distribution (i.e., the specific wheel or source distribution).
if let Some(metadata_response) = index.distributions().get(version_id) {
if let MetadataResponse::Found(ref archive) = *metadata_response {
let mut digests = archive.hashes.clone();
digests.sort_unstable();
if !digests.is_empty() {
return digests;
}
}
}

vec![]
}

/// Returns an iterator over the distinct packages in the graph.
fn dists(&self) -> impl Iterator<Item = &AnnotatedDist> {
self.petgraph
Expand Down
4 changes: 2 additions & 2 deletions crates/uv/tests/common/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -879,9 +879,9 @@ pub fn run_and_format<T: AsRef<str>>(
if let Some(windows_filters) = windows_filters {
// The optional leading +/- is for install logs, the optional next line is for lockfiles
let windows_only_deps = [
("( [+-] )?colorama==\\d+(\\.[\\d+])+\n( # via .*\n)?"),
("( [+-] )?colorama==\\d+(\\.[\\d+])+( \\\\\n --hash=.*)?\n( # via .*\n)?"),
("( [+-] )?colorama==\\d+(\\.[\\d+])+(\\s+# via .*)?\n"),
("( [+-] )?tzdata==\\d+(\\.[\\d+])+\n( # via .*\n)?"),
("( [+-] )?tzdata==\\d+(\\.[\\d+])+( \\\\\n --hash=.*)?\n( # via .*\n)?"),
("( [+-] )?tzdata==\\d+(\\.[\\d+])+(\\s+# via .*)?\n"),
];
let mut removed_packages = 0;
Expand Down
Loading

0 comments on commit 41c1fc0

Please sign in to comment.