diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e7f67b39dd0b..ed6bb28c3d50 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -167,85 +167,85 @@ jobs: # For Ubuntu and Windows, this requires Organization-level configuration # See: https://docs.github.com/en/actions/using-github-hosted-runners/about-larger-runners/about-larger-runners#about-ubuntu-and-windows-larger-runners - cargo-test-linux: - timeout-minutes: 10 - needs: determine_changes - if: ${{ github.repository == 'astral-sh/uv' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} - runs-on: - labels: "ubuntu-latest-xlarge" - name: "cargo test | ubuntu" - steps: - - uses: actions/checkout@v4 - - - uses: rui314/setup-mold@v1 - - - uses: Swatinem/rust-cache@v2 - - - name: "Install Rust toolchain" - run: rustup show - - - name: "Install required Python versions" - run: | - curl -LsSf https://astral.sh/uv/install.sh | sh - uv python install - - - name: "Install cargo nextest" - uses: taiki-e/install-action@v2 - with: - tool: cargo-nextest - - - name: "Cargo test" - run: | - cargo nextest run \ - --features python-patch \ - --workspace \ - --status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow - - - name: "Smoke test" - run: | - uv="./target/debug/uv" - $uv venv - $uv pip install ruff - - cargo-test-macos: - timeout-minutes: 10 - needs: determine_changes - if: ${{ github.repository == 'astral-sh/uv' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} - runs-on: - labels: "macos-latest-xlarge" - name: "cargo test | macos" - steps: - - uses: actions/checkout@v4 - - - uses: rui314/setup-mold@v1 - - - uses: Swatinem/rust-cache@v2 - - - name: "Install Rust toolchain" - run: rustup show - - - name: "Install required Python versions" - run: | - curl -LsSf https://astral.sh/uv/install.sh | sh - uv python install - - - name: "Install cargo nextest" - uses: taiki-e/install-action@v2 - with: - tool: cargo-nextest - - - name: "Cargo test" - run: | - cargo nextest run \ - --features python-patch \ - --workspace \ - --status-level skip --failure-output immediate-final --no-fail-fast -j 12 --final-status-level slow - - - name: "Smoke test" - run: | - uv="./target/debug/uv" - $uv venv - $uv pip install ruff + # cargo-test-linux: + # timeout-minutes: 10 + # needs: determine_changes + # if: ${{ github.repository == 'astral-sh/uv' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} + # runs-on: + # labels: "ubuntu-latest-xlarge" + # name: "cargo test | ubuntu" + # steps: + # - uses: actions/checkout@v4 + # + # - uses: rui314/setup-mold@v1 + # + # - uses: Swatinem/rust-cache@v2 + # + # - name: "Install Rust toolchain" + # run: rustup show + # + # - name: "Install required Python versions" + # run: | + # curl -LsSf https://astral.sh/uv/install.sh | sh + # uv python install + # + # - name: "Install cargo nextest" + # uses: taiki-e/install-action@v2 + # with: + # tool: cargo-nextest + # + # - name: "Cargo test" + # run: | + # cargo nextest run \ + # --features python-patch \ + # --workspace \ + # --status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow + # + # - name: "Smoke test" + # run: | + # uv="./target/debug/uv" + # $uv venv + # $uv pip install ruff + # + # cargo-test-macos: + # timeout-minutes: 10 + # needs: determine_changes + # if: ${{ github.repository == 'astral-sh/uv' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} + # runs-on: + # labels: "macos-latest-xlarge" + # name: "cargo test | macos" + # steps: + # - uses: actions/checkout@v4 + # + # - uses: rui314/setup-mold@v1 + # + # - uses: Swatinem/rust-cache@v2 + # + # - name: "Install Rust toolchain" + # run: rustup show + # + # - name: "Install required Python versions" + # run: | + # curl -LsSf https://astral.sh/uv/install.sh | sh + # uv python install + # + # - name: "Install cargo nextest" + # uses: taiki-e/install-action@v2 + # with: + # tool: cargo-nextest + # + # - name: "Cargo test" + # run: | + # cargo nextest run \ + # --features python-patch \ + # --workspace \ + # --status-level skip --failure-output immediate-final --no-fail-fast -j 12 --final-status-level slow + # + # - name: "Smoke test" + # run: | + # uv="./target/debug/uv" + # $uv venv + # $uv pip install ruff cargo-test-windows: timeout-minutes: 15 @@ -292,8 +292,14 @@ jobs: - name: "Cargo test" working-directory: ${{ env.UV_WORKSPACE }} + env: + # Avoid debug build stack overflows. + UV_STACK_SIZE: 2000000 # 2 megabyte, double the default on windows + UV_CACHE_DIR: C:\Users\runneradmin\very\long\path\to\uv\that\may\exceed\the\allowed\character\limit\for\certain\builds\with\long\paths run: | - cargo nextest run --no-default-features --features python,pypi --workspace --status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow + git config --system core.longpaths false + cargo run --no-default-features --features flate2/rust_backend venv + cargo run --no-default-features --features flate2/rust_backend pip install nuitka==2.4.8 --verbose - name: "Smoke test" working-directory: ${{ env.UV_WORKSPACE }} diff --git a/.github/workflows/setup-dev-drive.ps1 b/.github/workflows/setup-dev-drive.ps1 index 709931107990..e8736b9b67a7 100644 --- a/.github/workflows/setup-dev-drive.ps1 +++ b/.github/workflows/setup-dev-drive.ps1 @@ -20,5 +20,5 @@ Write-Output ` "RUSTUP_HOME=$($Drive)/.rustup" ` "CARGO_HOME=$($Drive)/.cargo" ` "UV_WORKSPACE=$($Drive)/uv" ` + "UV_CACHE_DIR=$($Drive)/very/long/path/to/uv/that/may/exceed/the/allowed/character/limit/for/certain/builds/with/long/paths" ` >> $env:GITHUB_ENV - diff --git a/crates/uv-distribution/src/source/mod.rs b/crates/uv-distribution/src/source/mod.rs index bedf6bab40be..50672e6fb6b3 100644 --- a/crates/uv-distribution/src/source/mod.rs +++ b/crates/uv-distribution/src/source/mod.rs @@ -32,7 +32,7 @@ use uv_client::{ }; use uv_configuration::{BuildKind, BuildOutput}; use uv_extract::hash::Hasher; -use uv_fs::{rename_with_retry, write_atomic, LockedFile}; +use uv_fs::{write_atomic, LockedFile}; use uv_metadata::read_archive_metadata; use uv_types::{BuildContext, SourceBuildTrait}; use zip::ZipArchive; @@ -446,8 +446,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // Build the source distribution. let source_dist_entry = cache_shard.entry(filename); + let source_dist_path = fs::read_link(&source_dist_entry.path()) + .await + .map_err(Error::CacheRead)?; let (disk_filename, wheel_filename, metadata) = self - .build_distribution(source, source_dist_entry.path(), subdirectory, &cache_shard) + .build_distribution(source, &source_dist_path, subdirectory, &cache_shard) .await?; if let Some(task) = task { @@ -506,10 +509,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // freshness, since entries have to be fresher than the revision itself. let cache_shard = cache_shard.shard(revision.id()); let source_dist_entry = cache_shard.entry(filename); + let source_dist_path = fs::read_link(&source_dist_entry.path()) + .await + .map_err(Error::CacheRead)?; // If the metadata is static, return it. if let Some(metadata) = - Self::read_static_metadata(source, source_dist_entry.path(), subdirectory).await? + Self::read_static_metadata(source, &source_dist_path, subdirectory).await? { return Ok(ArchiveMetadata { metadata: Metadata::from_metadata23(metadata), @@ -538,7 +544,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // Otherwise, we either need to build the metadata. // If the backend supports `prepare_metadata_for_build_wheel`, use it. if let Some(metadata) = self - .build_metadata(source, source_dist_entry.path(), subdirectory) + .build_metadata(source, &source_dist_path, subdirectory) .boxed_local() .await? { @@ -563,7 +569,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // Build the source distribution. let (_disk_filename, _wheel_filename, metadata) = self - .build_distribution(source, source_dist_entry.path(), subdirectory, &cache_shard) + .build_distribution(source, &source_dist_path, subdirectory, &cache_shard) .await?; // Store the metadata. @@ -701,6 +707,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { } let source_entry = cache_shard.entry("source"); + let source_path = fs::read_link(&source_entry.path()) + .await + .map_err(Error::CacheRead)?; // Otherwise, we need to build a wheel. let task = self @@ -709,7 +718,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .map(|reporter| reporter.on_build_start(source)); let (disk_filename, filename, metadata) = self - .build_distribution(source, source_entry.path(), None, &cache_shard) + .build_distribution(source, &source_path, None, &cache_shard) .await?; if let Some(task) = task { @@ -764,11 +773,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // freshness, since entries have to be fresher than the revision itself. let cache_shard = cache_shard.shard(revision.id()); let source_entry = cache_shard.entry("source"); + let source_path = fs::read_link(&source_entry.path()) + .await + .map_err(Error::CacheRead)?; // If the metadata is static, return it. - if let Some(metadata) = - Self::read_static_metadata(source, source_entry.path(), None).await? - { + if let Some(metadata) = Self::read_static_metadata(source, &source_path, None).await? { return Ok(ArchiveMetadata { metadata: Metadata::from_metadata23(metadata), hashes: revision.into_hashes(), @@ -787,7 +797,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // If the backend supports `prepare_metadata_for_build_wheel`, use it. if let Some(metadata) = self - .build_metadata(source, source_entry.path(), None) + .build_metadata(source, &source_path, None) .boxed_local() .await? { @@ -820,7 +830,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .map(|reporter| reporter.on_build_start(source)); let (_disk_filename, _filename, metadata) = self - .build_distribution(source, source_entry.path(), None, &cache_shard) + .build_distribution(source, &source_path, None, &cache_shard) .await?; if let Some(task) = task { @@ -1357,12 +1367,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { target: &Path, hashes: HashPolicy<'_>, ) -> Result, Error> { - let temp_dir = tempfile::tempdir_in( - self.build_context - .cache() - .bucket(CacheBucket::SourceDistributions), - ) - .map_err(Error::CacheWrite)?; + let temp_dir = self + .build_context + .cache() + .environment() + .map_err(Error::CacheWrite)?; + let reader = response .bytes_stream() .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)) @@ -1396,9 +1406,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { fs_err::tokio::create_dir_all(target.parent().expect("Cache entry to have parent")) .await .map_err(Error::CacheWrite)?; - rename_with_retry(extracted, target) + self.build_context + .cache() + .persist(extracted, target) .await - .map_err(Error::CacheWrite)?; + .map_err(Error::CacheRead)?; Ok(hashes) } @@ -1413,12 +1425,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { ) -> Result, Error> { debug!("Unpacking for build: {}", path.display()); - let temp_dir = tempfile::tempdir_in( - self.build_context - .cache() - .bucket(CacheBucket::SourceDistributions), - ) - .map_err(Error::CacheWrite)?; + let temp_dir = self + .build_context + .cache() + .environment() + .map_err(Error::CacheWrite)?; + let reader = fs_err::tokio::File::open(&path) .await .map_err(Error::CacheRead)?; @@ -1449,9 +1461,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { fs_err::tokio::create_dir_all(target.parent().expect("Cache entry to have parent")) .await .map_err(Error::CacheWrite)?; - rename_with_retry(extracted, &target) + self.build_context + .cache() + .persist(extracted, target) .await - .map_err(Error::CacheWrite)?; + .map_err(Error::CacheRead)?; Ok(hashes) }