From b7c6abc985803bcb846364e15cc6df23f1ba09ed Mon Sep 17 00:00:00 2001
From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com>
Date: Sat, 10 Aug 2024 06:12:36 -0400
Subject: [PATCH] revert all test changes to see if this solves the problem
---
tests/conftest.py | 405 ++---------------------------
tests/functional/test_download.py | 4 +-
tests/functional/test_fast_deps.py | 215 +--------------
tests/lib/__init__.py | 2 -
tests/lib/wheel.py | 50 ++--
5 files changed, 39 insertions(+), 637 deletions(-)
diff --git a/tests/conftest.py b/tests/conftest.py
index e2f4a2feb04..da4ab5b9dfb 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,4 +1,3 @@
-import abc
import compileall
import contextlib
import fnmatch
@@ -9,7 +8,6 @@
import subprocess
import sys
import threading
-from contextlib import AbstractContextManager, contextmanager
from dataclasses import dataclass
from enum import Enum
from hashlib import sha256
@@ -19,7 +17,6 @@
TYPE_CHECKING,
Any,
AnyStr,
- BinaryIO,
Callable,
ClassVar,
ContextManager,
@@ -30,8 +27,6 @@
Optional,
Set,
Tuple,
- Type,
- Union,
)
from unittest.mock import patch
from zipfile import ZipFile
@@ -60,7 +55,6 @@
PipTestEnvironment,
ScriptFactory,
TestData,
- create_basic_wheel_for_package,
)
from tests.lib.server import MockServer, make_mock_server
from tests.lib.venv import VirtualEnvironment, VirtualEnvironmentType
@@ -657,24 +651,6 @@ def script(
return script_factory(tmpdir.joinpath("workspace"), virtualenv)
-@pytest.fixture(scope="session")
-def session_script(
- request: pytest.FixtureRequest,
- tmpdir_factory: pytest.TempPathFactory,
- virtualenv_factory: Callable[[Path], VirtualEnvironment],
- script_factory: ScriptFactory,
-) -> PipTestEnvironment:
- """PipTestEnvironment shared across the whole session.
-
- This is used by session-scoped fixtures. Tests should use the
- function-scoped ``script`` fixture instead.
- """
- virtualenv = virtualenv_factory(
- tmpdir_factory.mktemp("session_venv").joinpath("venv")
- )
- return script_factory(tmpdir_factory.mktemp("session_workspace"), virtualenv)
-
-
@pytest.fixture(scope="session")
def common_wheels() -> Path:
"""Provide a directory with latest setuptools and wheel wheels"""
@@ -757,39 +733,6 @@ class MetadataKind(Enum):
NoFile = "no-file"
-@dataclass(frozen=True)
-class FakePackageSource:
- """A test package file which may be hardcoded or generated dynamically."""
-
- source_file: Union[str, Path]
-
- @classmethod
- def shared_data_package(cls, name: str) -> "FakePackageSource":
- return cls(source_file=name)
-
- @property
- def _is_shared_data(self) -> bool:
- return isinstance(self.source_file, str)
-
- @classmethod
- def generated_wheel(cls, path: Path) -> "FakePackageSource":
- return cls(source_file=path)
-
- @property
- def filename(self) -> str:
- if self._is_shared_data:
- assert isinstance(self.source_file, str)
- return self.source_file
- assert isinstance(self.source_file, Path)
- return self.source_file.name
-
- def source_path(self, shared_data: TestData) -> Path:
- if self._is_shared_data:
- return shared_data.packages / self.filename
- assert isinstance(self.source_file, Path)
- return self.source_file
-
-
@dataclass(frozen=True)
class FakePackage:
"""Mock package structure used to generate a PyPI repository.
@@ -799,20 +742,13 @@ class FakePackage:
name: str
version: str
- source_file: FakePackageSource
+ filename: str
metadata: MetadataKind
# This will override any dependencies specified in the actual dist's METADATA.
requires_dist: Tuple[str, ...] = ()
# This will override the Name specified in the actual dist's METADATA.
metadata_name: Optional[str] = None
- @property
- def filename(self) -> str:
- return self.source_file.filename
-
- def source_path(self, shared_data: TestData) -> Path:
- return self.source_file.source_path(shared_data)
-
def metadata_filename(self) -> str:
"""This is specified by PEP 658."""
return f"{self.filename}.metadata"
@@ -850,49 +786,14 @@ def generate_metadata(self) -> bytes:
@pytest.fixture(scope="session")
-def fake_packages(session_script: PipTestEnvironment) -> Dict[str, List[FakePackage]]:
+def fake_packages() -> Dict[str, List[FakePackage]]:
"""The package database we generate for testing PEP 658 support."""
- large_compilewheel_metadata_first = create_basic_wheel_for_package(
- session_script,
- "compilewheel",
- "2.0",
- extra_files={"asdf.txt": b"a" * 10_000},
- metadata_first=True,
- )
- # This wheel must be larger than 10KB to trigger the lazy wheel behavior we want
- # to test.
- assert large_compilewheel_metadata_first.stat().st_size > 10_000
-
- large_translationstring_metadata_last = create_basic_wheel_for_package(
- session_script,
- "translationstring",
- "0.1",
- extra_files={"asdf.txt": b"a" * 10_000},
- metadata_first=False,
- )
- assert large_translationstring_metadata_last.stat().st_size > 10_000
-
return {
"simple": [
- FakePackage(
- "simple",
- "1.0",
- FakePackageSource.shared_data_package("simple-1.0.tar.gz"),
- MetadataKind.Sha256,
- ),
- FakePackage(
- "simple",
- "2.0",
- FakePackageSource.shared_data_package("simple-2.0.tar.gz"),
- MetadataKind.No,
- ),
+ FakePackage("simple", "1.0", "simple-1.0.tar.gz", MetadataKind.Sha256),
+ FakePackage("simple", "2.0", "simple-2.0.tar.gz", MetadataKind.No),
# This will raise a hashing error.
- FakePackage(
- "simple",
- "3.0",
- FakePackageSource.shared_data_package("simple-3.0.tar.gz"),
- MetadataKind.WrongHash,
- ),
+ FakePackage("simple", "3.0", "simple-3.0.tar.gz", MetadataKind.WrongHash),
],
"simple2": [
# Override the dependencies here in order to force pip to download
@@ -900,22 +801,17 @@ def fake_packages(session_script: PipTestEnvironment) -> Dict[str, List[FakePack
FakePackage(
"simple2",
"1.0",
- FakePackageSource.shared_data_package("simple2-1.0.tar.gz"),
+ "simple2-1.0.tar.gz",
MetadataKind.Unhashed,
("simple==1.0",),
),
# This will raise an error when pip attempts to fetch the metadata file.
- FakePackage(
- "simple2",
- "2.0",
- FakePackageSource.shared_data_package("simple2-2.0.tar.gz"),
- MetadataKind.NoFile,
- ),
+ FakePackage("simple2", "2.0", "simple2-2.0.tar.gz", MetadataKind.NoFile),
# This has a METADATA file with a mismatched name.
FakePackage(
"simple2",
"3.0",
- FakePackageSource.shared_data_package("simple2-3.0.tar.gz"),
+ "simple2-3.0.tar.gz",
MetadataKind.Sha256,
metadata_name="not-simple2",
),
@@ -926,9 +822,7 @@ def fake_packages(session_script: PipTestEnvironment) -> Dict[str, List[FakePack
FakePackage(
"colander",
"0.9.9",
- FakePackageSource.shared_data_package(
- "colander-0.9.9-py2.py3-none-any.whl"
- ),
+ "colander-0.9.9-py2.py3-none-any.whl",
MetadataKind.No,
),
],
@@ -938,47 +832,25 @@ def fake_packages(session_script: PipTestEnvironment) -> Dict[str, List[FakePack
FakePackage(
"compilewheel",
"1.0",
- FakePackageSource.shared_data_package(
- "compilewheel-1.0-py2.py3-none-any.whl"
- ),
+ "compilewheel-1.0-py2.py3-none-any.whl",
MetadataKind.Unhashed,
("simple==1.0",),
),
- # This inserts a wheel larger than the default fast-deps request size with
- # .dist-info metadata at the front.
- FakePackage(
- "compilewheel",
- "2.0",
- FakePackageSource.generated_wheel(large_compilewheel_metadata_first),
- MetadataKind.No,
- ),
],
"has-script": [
# Ensure we check PEP 658 metadata hashing errors for wheel files.
FakePackage(
"has-script",
"1.0",
- FakePackageSource.shared_data_package(
- "has.script-1.0-py2.py3-none-any.whl"
- ),
+ "has.script-1.0-py2.py3-none-any.whl",
MetadataKind.WrongHash,
),
],
"translationstring": [
- # This inserts a wheel larger than the default fast-deps request size with
- # .dist-info metadata at the back.
- FakePackage(
- "translationstring",
- "0.1",
- FakePackageSource.generated_wheel(
- large_translationstring_metadata_last
- ),
- MetadataKind.No,
- ),
FakePackage(
"translationstring",
"1.1",
- FakePackageSource.shared_data_package("translationstring-1.1.tar.gz"),
+ "translationstring-1.1.tar.gz",
MetadataKind.No,
),
],
@@ -987,9 +859,7 @@ def fake_packages(session_script: PipTestEnvironment) -> Dict[str, List[FakePack
FakePackage(
"priority",
"1.0",
- FakePackageSource.shared_data_package(
- "priority-1.0-py2.py3-none-any.whl"
- ),
+ "priority-1.0-py2.py3-none-any.whl",
MetadataKind.NoFile,
),
],
@@ -998,9 +868,7 @@ def fake_packages(session_script: PipTestEnvironment) -> Dict[str, List[FakePack
FakePackage(
"requires-simple-extra",
"0.1",
- FakePackageSource.shared_data_package(
- "requires_simple_extra-0.1-py2.py3-none-any.whl"
- ),
+ "requires_simple_extra-0.1-py2.py3-none-any.whl",
MetadataKind.Sha256,
metadata_name="Requires_Simple.Extra",
),
@@ -1053,11 +921,9 @@ def html_index_for_packages(
download_links.append(
f' {package_link.filename}
' # noqa: E501
)
- # (3.2) Copy over the corresponding file in `shared_data.packages`, or the
- # generated wheel path if provided.
- source_path = package_link.source_path(shared_data)
+ # (3.2) Copy over the corresponding file in `shared_data.packages`.
shutil.copy(
- source_path,
+ shared_data.packages / package_link.filename,
pkg_subdir / package_link.filename,
)
# (3.3) Write a metadata file, if applicable.
@@ -1093,7 +959,6 @@ class OneTimeDownloadHandler(http.server.SimpleHTTPRequestHandler):
"""Serve files from the current directory, but error if a file is downloaded more
than once."""
- # NB: Needs to be set on per-function subclass.
_seen_paths: ClassVar[Set[str]] = set()
def do_GET(self) -> None:
@@ -1139,241 +1004,3 @@ class Handler(OneTimeDownloadHandler):
finally:
httpd.shutdown()
server_thread.join()
-
-
-class RangeHandler(Enum):
- """All the modes of handling range requests we want pip to handle."""
-
- Always200OK = "always-200-ok"
- NoNegativeRange = "no-negative-range"
- SneakilyCoerceNegativeRange = "sneakily-coerce-negative-range"
- SupportsNegativeRange = "supports-negative-range"
- NegativeRangeOverflowing = "negative-range-overflowing"
-
- def supports_range(self) -> bool:
- return self in [
- type(self).NoNegativeRange,
- type(self).SneakilyCoerceNegativeRange,
- type(self).SupportsNegativeRange,
- type(self).NegativeRangeOverflowing,
- ]
-
- def supports_negative_range(self) -> bool:
- return self in [
- type(self).SupportsNegativeRange,
- type(self).NegativeRangeOverflowing,
- ]
-
- def sneakily_coerces_negative_range(self) -> bool:
- return self == type(self).SneakilyCoerceNegativeRange
-
- def overflows_negative_range(self) -> bool:
- return self == type(self).NegativeRangeOverflowing
-
-
-class ContentRangeDownloadHandler(
- http.server.SimpleHTTPRequestHandler, metaclass=abc.ABCMeta
-):
- """Extend the basic ``http.server`` to support content ranges."""
-
- @abc.abstractproperty
- def range_handler(self) -> RangeHandler: ...
-
- # NB: Needs to be set on per-function subclasses.
- get_request_counts: ClassVar[Dict[str, int]] = {}
- positive_range_request_paths: ClassVar[Set[str]] = set()
- negative_range_request_paths: ClassVar[Set[str]] = set()
- head_request_paths: ClassVar[Set[str]] = set()
- ok_response_counts: ClassVar[Dict[str, int]] = {}
-
- @contextmanager
- def _translate_path(self) -> Iterator[Optional[Tuple[BinaryIO, str, int]]]:
- # Only test fast-deps, not PEP 658.
- if self.path.endswith(".metadata"):
- self.send_error(http.HTTPStatus.NOT_FOUND, "File not found")
- yield None
- return
-
- path = self.translate_path(self.path)
- if os.path.isdir(path):
- path = os.path.join(path, "index.html")
-
- ctype = self.guess_type(path)
- try:
- with open(path, "rb") as f:
- fs = os.fstat(f.fileno())
- full_file_length = fs[6]
-
- yield (f, ctype, full_file_length)
- except OSError:
- self.send_error(http.HTTPStatus.NOT_FOUND, "File not found")
- yield None
- return
-
- def _send_basic_headers(self, ctype: str) -> None:
- self.send_header("Content-Type", ctype)
- if self.range_handler.supports_range():
- self.send_header("Accept-Ranges", "bytes")
- # NB: callers must call self.end_headers()!
-
- def _send_full_file_headers(self, ctype: str, full_file_length: int) -> None:
- self.send_response(http.HTTPStatus.OK)
- self.ok_response_counts.setdefault(self.path, 0)
- self.ok_response_counts[self.path] += 1
- self._send_basic_headers(ctype)
- self.send_header("Content-Length", str(full_file_length))
- self.end_headers()
-
- def do_HEAD(self) -> None:
- self.head_request_paths.add(self.path)
-
- with self._translate_path() as x:
- if x is None:
- return
- (_, ctype, full_file_length) = x
- self._send_full_file_headers(ctype, full_file_length)
-
- def do_GET(self) -> None:
- self.get_request_counts.setdefault(self.path, 0)
- self.get_request_counts[self.path] += 1
-
- with self._translate_path() as x:
- if x is None:
- return
- (f, ctype, full_file_length) = x
- range_arg = self.headers.get("Range", None)
- if range_arg is not None:
- m = re.match(r"bytes=([0-9]+)?-([0-9]+)", range_arg)
- if m is not None:
- if m.group(1) is None:
- self.negative_range_request_paths.add(self.path)
- else:
- self.positive_range_request_paths.add(self.path)
- # If no range given, return the whole file.
- if range_arg is None or not self.range_handler.supports_range():
- self._send_full_file_headers(ctype, full_file_length)
- self.copyfile(f, self.wfile) # type: ignore[misc]
- return
- # Otherwise, return the requested contents.
- assert m is not None
- # This is a "start-end" range.
- if m.group(1) is not None:
- start = int(m.group(1))
- end = int(m.group(2))
- assert start <= end
- was_out_of_bounds = (end + 1) > full_file_length
- else:
- # This is a "-end" range.
- if self.range_handler.sneakily_coerces_negative_range():
- end = int(m.group(2))
- self.send_response(http.HTTPStatus.PARTIAL_CONTENT)
- self._send_basic_headers(ctype)
- self.send_header("Content-Length", str(end + 1))
- self.send_header(
- "Content-Range", f"bytes 0-{end}/{full_file_length}"
- )
- self.end_headers()
- f.seek(0)
- self.wfile.write(f.read(end + 1))
- return
- if not self.range_handler.supports_negative_range():
- self.send_response(http.HTTPStatus.NOT_IMPLEMENTED)
- self._send_basic_headers(ctype)
- self.end_headers()
- return
- end = full_file_length - 1
- start = end - int(m.group(2)) + 1
- was_out_of_bounds = start < 0
- if was_out_of_bounds:
- if self.range_handler.overflows_negative_range():
- self._send_full_file_headers(ctype, full_file_length)
- self.copyfile(f, self.wfile) # type: ignore[misc]
- return
- self.send_response(http.HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
- self._send_basic_headers(ctype)
- self.send_header("Content-Range", f"bytes */{full_file_length}")
- self.end_headers()
- return
- sent_length = end - start + 1
- self.send_response(http.HTTPStatus.PARTIAL_CONTENT)
- self._send_basic_headers(ctype)
- self.send_header("Content-Length", str(sent_length))
- self.send_header("Content-Range", f"bytes {start}-{end}/{full_file_length}")
- self.end_headers()
- f.seek(start)
- self.wfile.write(f.read(sent_length))
-
-
-@pytest.fixture(scope="session")
-def html_index_no_metadata(
- html_index_for_packages: Path,
- tmpdir_factory: pytest.TempPathFactory,
-) -> Path:
- """Return an index like ``html_index_for_packages`` without any PEP 658 metadata.
-
- While we already return a 404 in ``ContentRangeDownloadHandler`` for ``.metadata``
- paths, we need to also remove ``data-dist-info-metadata`` attrs on ```` tags,
- otherwise pip will error after attempting to retrieve the metadata files."""
- new_html_dir = tmpdir_factory.mktemp("fake_index_html_content_no_metadata")
- new_html_dir.rmdir()
- shutil.copytree(html_index_for_packages, new_html_dir)
- for index_page in new_html_dir.rglob("index.html"):
- prev_index = index_page.read_text()
- no_metadata_index = re.sub(r'data-dist-info-metadata="[^"]+"', "", prev_index)
- index_page.write_text(no_metadata_index)
- return new_html_dir
-
-
-HTMLIndexWithRangeServer = Callable[
- [RangeHandler],
- "AbstractContextManager[Type[ContentRangeDownloadHandler]]",
-]
-
-
-@pytest.fixture
-def html_index_with_range_server(
- html_index_no_metadata: Path,
- port: int = 8000,
-) -> HTMLIndexWithRangeServer:
- """Serve files from a generated pypi index, with support for range requests.
-
- Provide `-i http://localhost:` to pip invocations to point them at
- this server.
- """
-
- class InDirectoryServer(http.server.ThreadingHTTPServer):
- def finish_request(self, request: Any, client_address: Any) -> None:
- self.RequestHandlerClass(
- request, client_address, self, directory=str(html_index_no_metadata) # type: ignore[call-arg,arg-type]
- )
-
- @contextmanager
- def inner(
- range_handler: RangeHandler,
- ) -> Iterator[Type[ContentRangeDownloadHandler]]:
- class Handler(ContentRangeDownloadHandler):
- @property
- def range_handler(self) -> RangeHandler:
- return range_handler
-
- get_request_counts: ClassVar[Dict[str, int]] = {}
- positive_range_request_paths: ClassVar[Set[str]] = set()
- negative_range_request_paths: ClassVar[Set[str]] = set()
- head_request_paths: ClassVar[Set[str]] = set()
- ok_response_counts: ClassVar[Dict[str, int]] = {}
-
- with InDirectoryServer(("", port), Handler) as httpd:
- server_thread = threading.Thread(target=httpd.serve_forever)
- server_thread.start()
-
- try:
- yield Handler
- finally:
- httpd.shutdown()
- server_thread.join(timeout=3.0)
- if server_thread.is_alive():
- raise RuntimeError(
- "failed to shutdown http server within 3 seconds"
- )
-
- return inner
diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py
index b2a82b68d93..d469e71c360 100644
--- a/tests/functional/test_download.py
+++ b/tests/functional/test_download.py
@@ -1306,7 +1306,7 @@ def run_for_generated_index(
["colander-0.9.9-py2.py3-none-any.whl", "translationstring-1.1.tar.gz"],
),
(
- "compilewheel==1.0",
+ "compilewheel",
["compilewheel-1.0-py2.py3-none-any.whl", "simple-1.0.tar.gz"],
),
],
@@ -1339,7 +1339,7 @@ def test_download_metadata(
"/colander/colander-0.9.9-py2.py3-none-any.whl",
),
(
- "compilewheel==1.0",
+ "compilewheel",
[
"compilewheel-1.0-py2.py3-none-any.whl",
"simple-1.0.tar.gz",
diff --git a/tests/functional/test_fast_deps.py b/tests/functional/test_fast_deps.py
index 0e7e45eabc4..5a910b89763 100644
--- a/tests/functional/test_fast_deps.py
+++ b/tests/functional/test_fast_deps.py
@@ -4,14 +4,12 @@
import pathlib
import re
from os.path import basename
-from pathlib import Path
-from typing import Iterable, List
+from typing import Iterable
import pytest
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.utils.misc import hash_file
-from tests.conftest import HTMLIndexWithRangeServer, RangeHandler
from tests.lib import PipTestEnvironment, TestData, TestPipResult
@@ -22,7 +20,6 @@ def pip(script: PipTestEnvironment, command: str, requirement: str) -> TestPipRe
"--no-cache-dir",
"--use-feature=fast-deps",
requirement,
- # TODO: remove this when fast-deps is on by default.
allow_stderr_warning=True,
)
@@ -48,31 +45,6 @@ def test_install_from_pypi(
assert_installed(script, expected)
-@pytest.mark.network
-@pytest.mark.parametrize(
- "requirement, url, expected",
- [
- (
- "wcwidth==0.2.1",
- "https://files.pythonhosted.org/packages/6c/a6/cdb485093ad4017d874d7a2e6a736d02720258f57876548eea2bf04c76f0/wcwidth-0.2.1-py2.py3-none-any.whl",
- "multiple .dist-info directories found",
- ),
- ],
-)
-def test_invalid_wheel_parse_error(
- requirement: str, url: str, expected: str, script: PipTestEnvironment
-) -> None:
- """Check for both the full download URL and the reason for the error."""
- result = script.pip(
- "install",
- "--use-feature=fast-deps",
- requirement,
- expect_error=True,
- )
- assert url in result.stderr
- assert expected in result.stderr
-
-
@pytest.mark.network
@pytest.mark.parametrize(
"requirement, expected",
@@ -164,188 +136,3 @@ def test_hash_mismatch_existing_download_for_metadata_only_wheel(
hash_file(str(idna_wheel))[0].hexdigest()
== "b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
)
-
-
-@pytest.mark.parametrize("range_handler", list(RangeHandler))
-def test_download_range(
- script: PipTestEnvironment,
- tmpdir: Path,
- html_index_with_range_server: HTMLIndexWithRangeServer,
- range_handler: RangeHandler,
-) -> None:
- """Execute `pip download` against a generated PyPI index."""
- download_dir = tmpdir / "download_dir"
-
- def run_for_generated_index(args: List[str]) -> TestPipResult:
- """
- Produce a PyPI directory structure pointing to the specified packages, then
- execute `pip download -i ...` pointing to our generated index.
- """
- pip_args = [
- "download",
- "--use-feature=fast-deps",
- "-d",
- str(download_dir),
- "-i",
- "http://localhost:8000",
- *args,
- ]
- return script.pip(*pip_args, allow_stderr_warning=True)
-
- with html_index_with_range_server(range_handler) as handler:
- run_for_generated_index(
- ["colander", "compilewheel==2.0", "has-script", "translationstring==0.1"]
- )
- generated_files = os.listdir(download_dir)
- assert fnmatch.filter(generated_files, "colander*.whl")
- assert fnmatch.filter(generated_files, "compilewheel*.whl")
- assert fnmatch.filter(generated_files, "has.script*.whl")
- assert fnmatch.filter(generated_files, "translationstring*.whl")
-
- colander_wheel_path = "/colander/colander-0.9.9-py2.py3-none-any.whl"
- compile_wheel_path = "/compilewheel/compilewheel-2.0-py2.py3-none-any.whl"
- has_script_path = "/has-script/has.script-1.0-py2.py3-none-any.whl"
- translationstring_path = (
- "/translationstring/translationstring-0.1-py2.py3-none-any.whl"
- )
-
- if range_handler == RangeHandler.Always200OK:
- assert not handler.head_request_paths
- assert not handler.positive_range_request_paths
- assert {colander_wheel_path} == handler.negative_range_request_paths
- # Tries a range request, finds it's unsupported, so doesn't try it again.
- assert handler.get_request_counts[colander_wheel_path] == 2
- assert handler.ok_response_counts[colander_wheel_path] == 2
- assert handler.get_request_counts[compile_wheel_path] == 1
- assert handler.ok_response_counts[compile_wheel_path] == 1
- assert handler.get_request_counts[has_script_path] == 1
- assert handler.ok_response_counts[has_script_path] == 1
- assert handler.get_request_counts[translationstring_path] == 1
- assert handler.ok_response_counts[translationstring_path] == 1
- elif range_handler == RangeHandler.NoNegativeRange:
- assert {
- colander_wheel_path,
- compile_wheel_path,
- has_script_path,
- translationstring_path,
- } == handler.head_request_paths
- assert {
- colander_wheel_path,
- compile_wheel_path,
- has_script_path,
- translationstring_path,
- } == handler.positive_range_request_paths
- # Tries this first, finds that negative offsets are unsupported, so doesn't
- # try it again.
- assert {colander_wheel_path} == handler.negative_range_request_paths
- # Two more for the first wheel, because it has the failing negative
- # byte request and is larger than the initial chunk size.
- assert handler.get_request_counts[colander_wheel_path] == 4
- assert handler.ok_response_counts[colander_wheel_path] == 2
- # The .dist-info dir at the start requires an additional ranged GET vs
- # translationstring.
- assert handler.get_request_counts[compile_wheel_path] == 3
- assert handler.ok_response_counts[compile_wheel_path] == 2
- # The entire file should have been pulled in with a single ranged GET.
- assert handler.get_request_counts[has_script_path] == 2
- assert handler.ok_response_counts[has_script_path] == 2
- # The entire .dist-info dir should have been pulled in with a single
- # ranged GET. The second GET is for the end of the download, pulling down
- # the entire file contents.
- assert handler.get_request_counts[translationstring_path] == 2
- assert handler.ok_response_counts[translationstring_path] == 2
- elif range_handler == RangeHandler.SneakilyCoerceNegativeRange:
- assert {
- colander_wheel_path,
- compile_wheel_path,
- has_script_path,
- translationstring_path,
- } == handler.head_request_paths
- assert {
- colander_wheel_path,
- compile_wheel_path,
- has_script_path,
- translationstring_path,
- } == handler.positive_range_request_paths
- # Tries this first, finds that negative offsets are unsupported, so doesn't
- # try it again.
- assert {colander_wheel_path} == handler.negative_range_request_paths
- # Two more for the first wheel, because it has the failing negative
- # byte request and is larger than the initial chunk size.
- assert handler.get_request_counts[colander_wheel_path] == 4
- assert handler.ok_response_counts[colander_wheel_path] == 2
- # The .dist-info dir at the start requires an additional ranged GET vs
- # translationstring.
- assert handler.get_request_counts[compile_wheel_path] == 3
- assert handler.ok_response_counts[compile_wheel_path] == 2
- # The entire file should have been pulled in with a single ranged GET.
- assert handler.get_request_counts[has_script_path] == 2
- assert handler.ok_response_counts[has_script_path] == 2
- # The entire .dist-info dir should have been pulled in with a single
- # ranged GET. The second GET is for the end of the download, pulling down
- # the entire file contents.
- assert handler.get_request_counts[translationstring_path] == 2
- assert handler.ok_response_counts[translationstring_path] == 2
- elif range_handler == RangeHandler.SupportsNegativeRange:
- # The negative byte index worked, so no head requests.
- assert not handler.head_request_paths
- # The negative range request was in bounds and pulled in the entire
- # .dist-info directory (at the end of the zip) for translationstring==0.1,
- # so we didn't need another range request for it. compilewheel==2.0 has the
- # .dist-info dir at the start of the zip, so we still need another request
- # for that.
- assert {
- colander_wheel_path,
- has_script_path,
- compile_wheel_path,
- } == handler.positive_range_request_paths
- assert {
- colander_wheel_path,
- compile_wheel_path,
- has_script_path,
- translationstring_path,
- } == handler.negative_range_request_paths
- assert handler.get_request_counts[colander_wheel_path] == 3
- assert handler.ok_response_counts[colander_wheel_path] == 1
- # One more than translationstring, because the .dist-info dir is at the
- # front of the wheel.
- assert handler.get_request_counts[compile_wheel_path] == 3
- assert handler.ok_response_counts[compile_wheel_path] == 1
- # One more than NoNegativeRange, because the negative byte index failed.
- assert handler.get_request_counts[has_script_path] == 3
- assert handler.ok_response_counts[has_script_path] == 1
- assert handler.get_request_counts[translationstring_path] == 2
- assert handler.ok_response_counts[translationstring_path] == 1
- else:
- assert range_handler == RangeHandler.NegativeRangeOverflowing
- # The negative byte index worked, so no head requests.
- assert not handler.head_request_paths
- # The negative range request was in bounds and pulled in the entire
- # .dist-info directory (at the end of the zip) for translationstring==0.1,
- # so we didn't need another range request for it. compilewheel==2.0 has the
- # .dist-info dir at the start of the zip, so we still need another request
- # for that.
- assert {
- colander_wheel_path,
- compile_wheel_path,
- } == handler.positive_range_request_paths
- assert {
- colander_wheel_path,
- compile_wheel_path,
- has_script_path,
- translationstring_path,
- has_script_path,
- } == handler.negative_range_request_paths
- assert handler.get_request_counts[colander_wheel_path] == 3
- assert handler.ok_response_counts[colander_wheel_path] == 1
- # One more than translationstring, because the .dist-info dir is at the
- # front of the wheel.
- assert handler.get_request_counts[compile_wheel_path] == 3
- assert handler.ok_response_counts[compile_wheel_path] == 1
- # One *less* request for has-script than SupportsNegativeRange, because the
- # server returned a full 200 OK response when the negative byte range was
- # larger than the actual file size.
- assert handler.get_request_counts[has_script_path] == 2
- assert handler.ok_response_counts[has_script_path] == 2
- assert handler.get_request_counts[translationstring_path] == 2
- assert handler.ok_response_counts[translationstring_path] == 1
diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py
index df980cc01f1..e318f7155d2 100644
--- a/tests/lib/__init__.py
+++ b/tests/lib/__init__.py
@@ -1178,7 +1178,6 @@ def create_basic_wheel_for_package(
extras: Optional[Dict[str, List[str]]] = None,
requires_python: Optional[str] = None,
extra_files: Optional[Dict[str, Union[bytes, str]]] = None,
- metadata_first: bool = True,
) -> pathlib.Path:
if depends is None:
depends = []
@@ -1225,7 +1224,6 @@ def hello():
extra_files=extra_files,
# Have an empty RECORD because we don't want to be checking hashes.
record="",
- metadata_first=metadata_first,
)
wheel_builder.save_to(archive_path)
diff --git a/tests/lib/wheel.py b/tests/lib/wheel.py
index 41098c60cb8..342abbacaad 100644
--- a/tests/lib/wheel.py
+++ b/tests/lib/wheel.py
@@ -199,19 +199,22 @@ def record_file_maker_wrapper(
version: str,
files: Iterable[File],
record: Defaulted[Optional[AnyStr]],
-) -> Optional[File]:
- records: List[Record] = [
- Record(file.name, digest(file.contents), str(len(file.contents)))
- for file in files
- ]
+) -> Iterable[File]:
+ records: List[Record] = []
+ for file in files:
+ records.append(
+ Record(file.name, digest(file.contents), str(len(file.contents)))
+ )
+ yield file
if record is None:
- return None
+ return
record_path = dist_info_path(name, version, "RECORD")
if record is not _default:
- return File(record_path, ensure_binary(record))
+ yield File(record_path, ensure_binary(record))
+ return
records.append(Record(record_path, "", ""))
@@ -221,7 +224,7 @@ def record_file_maker_wrapper(
writer.writerow(r)
contents = buf.getvalue().encode("utf-8")
- return File(record_path, contents)
+ yield File(record_path, contents)
def wheel_name(
@@ -299,7 +302,6 @@ def make_wheel(
console_scripts: Defaulted[List[str]] = _default,
entry_points: Defaulted[Dict[str, List[str]]] = _default,
record: Defaulted[Optional[AnyStr]] = _default,
- metadata_first: bool = True,
) -> WheelBuilder:
"""
Helper function for generating test wheels which are compliant by default.
@@ -360,14 +362,13 @@ def make_wheel(
:param entry_points:
:param record: if provided and None, then no RECORD file is generated;
else if a string then sets the content of the RECORD file
- :param metadata_first: Put the .dist-info metadata at the front of the zip file.
"""
pythons = ["py2", "py3"]
abis = ["none"]
platforms = ["any"]
tags = list(itertools.product(pythons, abis, platforms))
- metadata_files = [
+ possible_files = [
make_metadata_file(name, version, metadata, metadata_updates, metadata_body),
make_wheel_metadata_file(
name, version, wheel_metadata, tags, wheel_metadata_updates
@@ -375,31 +376,20 @@ def make_wheel(
make_entry_points_file(name, version, entry_points, console_scripts),
]
- non_metadata_files = []
-
if extra_files is not _default:
- non_metadata_files.extend(make_files(extra_files))
+ possible_files.extend(make_files(extra_files))
if extra_metadata_files is not _default:
- metadata_files.extend(make_metadata_files(name, version, extra_metadata_files))
+ possible_files.extend(make_metadata_files(name, version, extra_metadata_files))
if extra_data_files is not _default:
- non_metadata_files.extend(make_data_files(name, version, extra_data_files))
+ possible_files.extend(make_data_files(name, version, extra_data_files))
- actual_metadata_files = list(filter(None, metadata_files))
-
- if metadata_first:
- actual_files = actual_metadata_files + non_metadata_files
- else:
- actual_files = non_metadata_files + actual_metadata_files
-
- record_file = record_file_maker_wrapper(name, version, actual_files, record)
- if record_file:
- if metadata_first:
- actual_files.insert(0, record_file)
- else:
- actual_files.append(record_file)
+ actual_files = filter(None, possible_files)
+ files_and_record_file = record_file_maker_wrapper(
+ name, version, actual_files, record
+ )
wheel_file_name = wheel_name(name, version, pythons, abis, platforms)
- return WheelBuilder(wheel_file_name, actual_files)
+ return WheelBuilder(wheel_file_name, files_and_record_file)