Skip to content

Commit

Permalink
fix: use two in memory indexes, for resolve and builds (#1969)
Browse files Browse the repository at this point in the history
This PR makes the resolver use two memory indexes, one for the
resolution process and one for the potential building of wheels. Even
though uv uses different internal revolvers it was sometimes still
getting data that was overridden by us during wheel builds, which was
unexpected.

This was cropping up in issues like #1686. We've made a minimal version
of the lock file where this problem was occuring. And added that as a
test.

---------

Co-authored-by: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com>
  • Loading branch information
tdejager and Hofer-Julian authored Sep 3, 2024
1 parent 620f9ac commit b114912
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 8 deletions.
15 changes: 12 additions & 3 deletions src/lock_file/resolve/pypi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,13 @@ pub async fn resolve_pypi(
};

// Create a shared in-memory index.
let in_memory_index = InMemoryIndex::default();
// We need two in-memory indexes, one for the build dispatch and one for the resolver.
// because we manually override requests for the resolver,
// but we don't want to override requests for the build dispatch.
//
// The BuildDispatch might resolve or install when building wheels which will be mostly
// with build isolation. In that case we want to use fresh non-tampered requests.
let build_dispatch_in_memory_index = InMemoryIndex::default();
let config_settings = ConfigSettings::default();

let env = PythonEnvironment::from_interpreter(interpreter.clone());
Expand All @@ -303,7 +309,7 @@ pub async fn resolve_pypi(
&interpreter,
&index_locations,
&flat_index,
&in_memory_index,
&build_dispatch_in_memory_index,
&git_resolver,
&context.in_flight,
IndexStrategy::default(),
Expand Down Expand Up @@ -406,6 +412,9 @@ pub async fn resolve_pypi(
package_requests: package_requests.clone(),
};

// We need a new in-memory index for the resolver so that it does not conflict with the build dispatch
// one. As we have noted in the comment above.
let resolver_in_memory_index = InMemoryIndex::default();
let python_version = PythonVersion::from_str(&interpreter_version.to_string())
.expect("could not get version from interpreter");
let resolution = Resolver::new_custom_io(
Expand All @@ -414,7 +423,7 @@ pub async fn resolve_pypi(
&context.hash_strategy,
ResolverMarkers::SpecificEnvironment(marker_environment.into()),
&PythonRequirement::from_python_version(&interpreter, &python_version),
&in_memory_index,
&resolver_in_memory_index,
&git_resolver,
provider,
EmptyInstalledPackages,
Expand Down
14 changes: 9 additions & 5 deletions src/lock_file/resolve/resolver_provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,13 @@ impl<'a, Context: BuildContext> ResolverProvider for CondaResolverProvider<'a, C
) -> impl Future<Output = uv_resolver::PackageVersionsResult> + 'io {
if let Some((repodata_record, identifier)) = self.conda_python_identifiers.get(package_name)
{
let version = repodata_record.version().to_string();

tracing::debug!(
"overriding PyPI package version request {}=={}",
package_name,
version
);
// If we encounter a package that was installed by conda we simply return a single
// available version in the form of a source distribution with the URL of the
// conda package.
Expand All @@ -60,11 +67,7 @@ impl<'a, Context: BuildContext> ResolverProvider for CondaResolverProvider<'a, C

let source_dist = RegistrySourceDist {
name: identifier.name.as_normalized().clone(),
version: repodata_record
.version()
.to_string()
.parse()
.expect("could not convert to pypi version"),
version: version.parse().expect("could not convert to pypi version"),
file: Box::new(file),
index: IndexUrl::Pypi(VerbatimUrl::from_url(
consts::DEFAULT_PYPI_INDEX_URL.clone(),
Expand Down Expand Up @@ -104,6 +107,7 @@ impl<'a, Context: BuildContext> ResolverProvider for CondaResolverProvider<'a, C
) -> impl Future<Output = WheelMetadataResult> + 'io {
if let Dist::Source(SourceDist::Registry(RegistrySourceDist { name, .. })) = dist {
if let Some((_, iden)) = self.conda_python_identifiers.get(name) {
tracing::debug!("overriding PyPI package metadata request {}", name);
// If this is a Source dist and the package is actually installed by conda we
// create fake metadata with no dependencies. We assume that all conda installed
// packages are properly installed including its dependencies.
Expand Down
24 changes: 24 additions & 0 deletions tests/install_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -650,6 +650,30 @@ setup(
pixi.install().await.expect("cannot install project");
}

#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
#[cfg_attr(not(feature = "slow_integration_tests"), ignore)]
async fn test_setuptools_override_failure() {
// This was causing issues like: https://github.com/prefix-dev/pixi/issues/1686
let manifest = format!(
r#"
[project]
channels = ["conda-forge"]
name = "pixi-source-problem"
platforms = ["{platform}"]
[dependencies]
pip = ">=24.0,<25"
# The transitive dependencies of viser were causing issues
[pypi-dependencies]
viser = "==0.2.7"
"#,
platform = Platform::current()
);
let pixi = PixiControl::from_manifest(&manifest).expect("cannot instantiate pixi project");
pixi.install().await.expect("cannot install project");
}

#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
#[cfg_attr(not(feature = "slow_integration_tests"), ignore)]
async fn test_many_linux_wheel_tag() {
Expand Down

0 comments on commit b114912

Please sign in to comment.