Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Perform source distribution builds in archive directory #7240

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
166 changes: 86 additions & 80 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -167,85 +167,85 @@ jobs:
# For Ubuntu and Windows, this requires Organization-level configuration
# See: https://docs.github.com/en/actions/using-github-hosted-runners/about-larger-runners/about-larger-runners#about-ubuntu-and-windows-larger-runners

cargo-test-linux:
timeout-minutes: 10
needs: determine_changes
if: ${{ github.repository == 'astral-sh/uv' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
runs-on:
labels: "ubuntu-latest-xlarge"
name: "cargo test | ubuntu"
steps:
- uses: actions/checkout@v4

- uses: rui314/setup-mold@v1

- uses: Swatinem/rust-cache@v2

- name: "Install Rust toolchain"
run: rustup show

- name: "Install required Python versions"
run: |
curl -LsSf https://astral.sh/uv/install.sh | sh
uv python install

- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest

- name: "Cargo test"
run: |
cargo nextest run \
--features python-patch \
--workspace \
--status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow

- name: "Smoke test"
run: |
uv="./target/debug/uv"
$uv venv
$uv pip install ruff

cargo-test-macos:
timeout-minutes: 10
needs: determine_changes
if: ${{ github.repository == 'astral-sh/uv' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
runs-on:
labels: "macos-latest-xlarge"
name: "cargo test | macos"
steps:
- uses: actions/checkout@v4

- uses: rui314/setup-mold@v1

- uses: Swatinem/rust-cache@v2

- name: "Install Rust toolchain"
run: rustup show

- name: "Install required Python versions"
run: |
curl -LsSf https://astral.sh/uv/install.sh | sh
uv python install

- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest

- name: "Cargo test"
run: |
cargo nextest run \
--features python-patch \
--workspace \
--status-level skip --failure-output immediate-final --no-fail-fast -j 12 --final-status-level slow

- name: "Smoke test"
run: |
uv="./target/debug/uv"
$uv venv
$uv pip install ruff
# cargo-test-linux:
# timeout-minutes: 10
# needs: determine_changes
# if: ${{ github.repository == 'astral-sh/uv' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
# runs-on:
# labels: "ubuntu-latest-xlarge"
# name: "cargo test | ubuntu"
# steps:
# - uses: actions/checkout@v4
#
# - uses: rui314/setup-mold@v1
#
# - uses: Swatinem/rust-cache@v2
#
# - name: "Install Rust toolchain"
# run: rustup show
#
# - name: "Install required Python versions"
# run: |
# curl -LsSf https://astral.sh/uv/install.sh | sh
# uv python install
#
# - name: "Install cargo nextest"
# uses: taiki-e/install-action@v2
# with:
# tool: cargo-nextest
#
# - name: "Cargo test"
# run: |
# cargo nextest run \
# --features python-patch \
# --workspace \
# --status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow
#
# - name: "Smoke test"
# run: |
# uv="./target/debug/uv"
# $uv venv
# $uv pip install ruff
#
# cargo-test-macos:
# timeout-minutes: 10
# needs: determine_changes
# if: ${{ github.repository == 'astral-sh/uv' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
# runs-on:
# labels: "macos-latest-xlarge"
# name: "cargo test | macos"
# steps:
# - uses: actions/checkout@v4
#
# - uses: rui314/setup-mold@v1
#
# - uses: Swatinem/rust-cache@v2
#
# - name: "Install Rust toolchain"
# run: rustup show
#
# - name: "Install required Python versions"
# run: |
# curl -LsSf https://astral.sh/uv/install.sh | sh
# uv python install
#
# - name: "Install cargo nextest"
# uses: taiki-e/install-action@v2
# with:
# tool: cargo-nextest
#
# - name: "Cargo test"
# run: |
# cargo nextest run \
# --features python-patch \
# --workspace \
# --status-level skip --failure-output immediate-final --no-fail-fast -j 12 --final-status-level slow
#
# - name: "Smoke test"
# run: |
# uv="./target/debug/uv"
# $uv venv
# $uv pip install ruff

cargo-test-windows:
timeout-minutes: 15
Expand Down Expand Up @@ -292,8 +292,14 @@ jobs:

- name: "Cargo test"
working-directory: ${{ env.UV_WORKSPACE }}
env:
# Avoid debug build stack overflows.
UV_STACK_SIZE: 2000000 # 2 megabyte, double the default on windows
UV_CACHE_DIR: C:\Users\runneradmin\very\long\path\to\uv\that\may\exceed\the\allowed\character\limit\for\certain\builds\with\long\paths
run: |
cargo nextest run --no-default-features --features python,pypi --workspace --status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow
git config --system core.longpaths false
cargo run --no-default-features --features flate2/rust_backend venv
cargo run --no-default-features --features flate2/rust_backend pip install nuitka==2.4.8 --verbose

- name: "Smoke test"
working-directory: ${{ env.UV_WORKSPACE }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/setup-dev-drive.ps1
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,5 @@ Write-Output `
"RUSTUP_HOME=$($Drive)/.rustup" `
"CARGO_HOME=$($Drive)/.cargo" `
"UV_WORKSPACE=$($Drive)/uv" `
"UV_CACHE_DIR=$($Drive)/very/long/path/to/uv/that/may/exceed/the/allowed/character/limit/for/certain/builds/with/long/paths" `
>> $env:GITHUB_ENV

68 changes: 41 additions & 27 deletions crates/uv-distribution/src/source/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ use uv_client::{
};
use uv_configuration::{BuildKind, BuildOutput};
use uv_extract::hash::Hasher;
use uv_fs::{rename_with_retry, write_atomic, LockedFile};
use uv_fs::{write_atomic, LockedFile};
use uv_metadata::read_archive_metadata;
use uv_types::{BuildContext, SourceBuildTrait};
use zip::ZipArchive;
Expand Down Expand Up @@ -446,8 +446,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {

// Build the source distribution.
let source_dist_entry = cache_shard.entry(filename);
let source_dist_path = fs::read_link(&source_dist_entry.path())
.await
.map_err(Error::CacheRead)?;
let (disk_filename, wheel_filename, metadata) = self
.build_distribution(source, source_dist_entry.path(), subdirectory, &cache_shard)
.build_distribution(source, &source_dist_path, subdirectory, &cache_shard)
.await?;

if let Some(task) = task {
Expand Down Expand Up @@ -506,10 +509,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// freshness, since entries have to be fresher than the revision itself.
let cache_shard = cache_shard.shard(revision.id());
let source_dist_entry = cache_shard.entry(filename);
let source_dist_path = fs::read_link(&source_dist_entry.path())
.await
.map_err(Error::CacheRead)?;

// If the metadata is static, return it.
if let Some(metadata) =
Self::read_static_metadata(source, source_dist_entry.path(), subdirectory).await?
Self::read_static_metadata(source, &source_dist_path, subdirectory).await?
{
return Ok(ArchiveMetadata {
metadata: Metadata::from_metadata23(metadata),
Expand Down Expand Up @@ -538,7 +544,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// Otherwise, we either need to build the metadata.
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_metadata(source, source_dist_entry.path(), subdirectory)
.build_metadata(source, &source_dist_path, subdirectory)
.boxed_local()
.await?
{
Expand All @@ -563,7 +569,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {

// Build the source distribution.
let (_disk_filename, _wheel_filename, metadata) = self
.build_distribution(source, source_dist_entry.path(), subdirectory, &cache_shard)
.build_distribution(source, &source_dist_path, subdirectory, &cache_shard)
.await?;

// Store the metadata.
Expand Down Expand Up @@ -701,6 +707,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
}

let source_entry = cache_shard.entry("source");
let source_path = fs::read_link(&source_entry.path())
.await
.map_err(Error::CacheRead)?;

// Otherwise, we need to build a wheel.
let task = self
Expand All @@ -709,7 +718,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source));

let (disk_filename, filename, metadata) = self
.build_distribution(source, source_entry.path(), None, &cache_shard)
.build_distribution(source, &source_path, None, &cache_shard)
.await?;

if let Some(task) = task {
Expand Down Expand Up @@ -764,11 +773,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// freshness, since entries have to be fresher than the revision itself.
let cache_shard = cache_shard.shard(revision.id());
let source_entry = cache_shard.entry("source");
let source_path = fs::read_link(&source_entry.path())
.await
.map_err(Error::CacheRead)?;

// If the metadata is static, return it.
if let Some(metadata) =
Self::read_static_metadata(source, source_entry.path(), None).await?
{
if let Some(metadata) = Self::read_static_metadata(source, &source_path, None).await? {
return Ok(ArchiveMetadata {
metadata: Metadata::from_metadata23(metadata),
hashes: revision.into_hashes(),
Expand All @@ -787,7 +797,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {

// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_metadata(source, source_entry.path(), None)
.build_metadata(source, &source_path, None)
.boxed_local()
.await?
{
Expand Down Expand Up @@ -820,7 +830,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source));

let (_disk_filename, _filename, metadata) = self
.build_distribution(source, source_entry.path(), None, &cache_shard)
.build_distribution(source, &source_path, None, &cache_shard)
.await?;

if let Some(task) = task {
Expand Down Expand Up @@ -1357,12 +1367,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
target: &Path,
hashes: HashPolicy<'_>,
) -> Result<Vec<HashDigest>, Error> {
let temp_dir = tempfile::tempdir_in(
self.build_context
.cache()
.bucket(CacheBucket::SourceDistributions),
)
.map_err(Error::CacheWrite)?;
let temp_dir = self
.build_context
.cache()
.environment()
.map_err(Error::CacheWrite)?;

let reader = response
.bytes_stream()
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
Expand Down Expand Up @@ -1396,9 +1406,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
fs_err::tokio::create_dir_all(target.parent().expect("Cache entry to have parent"))
.await
.map_err(Error::CacheWrite)?;
rename_with_retry(extracted, target)
self.build_context
.cache()
.persist(extracted, target)
.await
.map_err(Error::CacheWrite)?;
.map_err(Error::CacheRead)?;

Ok(hashes)
}
Expand All @@ -1413,12 +1425,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
) -> Result<Vec<HashDigest>, Error> {
debug!("Unpacking for build: {}", path.display());

let temp_dir = tempfile::tempdir_in(
self.build_context
.cache()
.bucket(CacheBucket::SourceDistributions),
)
.map_err(Error::CacheWrite)?;
let temp_dir = self
.build_context
.cache()
.environment()
.map_err(Error::CacheWrite)?;

let reader = fs_err::tokio::File::open(&path)
.await
.map_err(Error::CacheRead)?;
Expand Down Expand Up @@ -1449,9 +1461,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
fs_err::tokio::create_dir_all(target.parent().expect("Cache entry to have parent"))
.await
.map_err(Error::CacheWrite)?;
rename_with_retry(extracted, &target)
self.build_context
.cache()
.persist(extracted, target)
.await
.map_err(Error::CacheWrite)?;
.map_err(Error::CacheRead)?;

Ok(hashes)
}
Expand Down
Loading