diff --git a/CHANGES.md b/CHANGES.md
index dc3525ca1..6b89dfc3e 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,3 +1,9 @@
+# 6.0.0
+
+## New Features
+
+- Add PEP691 simple index support `PR #1154`
+
# 5.3.0 (2022-07-29)
## New Features
diff --git a/setup.cfg b/setup.cfg
index e45b0b50d..497885d56 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -16,7 +16,7 @@ project_urls =
Source Code = https://github.com/pypa/bandersnatch
Change Log = https://github.com/pypa/bandersnatch/blob/master/CHANGES.md
url = https://github.com/pypa/bandersnatch/
-version = 5.3.0
+version = 6.0.0.dev0
[options]
install_requires =
diff --git a/src/bandersnatch/__init__.py b/src/bandersnatch/__init__.py
index 8eca49fe7..f639b3985 100644
--- a/src/bandersnatch/__init__.py
+++ b/src/bandersnatch/__init__.py
@@ -19,10 +19,10 @@ def version_str(self) -> str:
__version_info__ = _VersionInfo(
- major=5,
- minor=3,
+ major=6,
+ minor=0,
micro=0,
- releaselevel="",
+ releaselevel="dev0",
serial=0, # Not currently in use with Bandersnatch versioning
)
__version__ = __version_info__.version_str
diff --git a/src/bandersnatch/configuration.py b/src/bandersnatch/configuration.py
index 20c97ac18..00688fdcd 100644
--- a/src/bandersnatch/configuration.py
+++ b/src/bandersnatch/configuration.py
@@ -7,6 +7,8 @@
from pathlib import Path
from typing import Any, Dict, List, NamedTuple, Optional, Type
+from .simple import SimpleFormat, get_format_value
+
logger = logging.getLogger("bandersnatch")
@@ -22,6 +24,7 @@ class SetConfigValues(NamedTuple):
compare_method: str
download_mirror: str
download_mirror_no_fallback: bool
+ simple_format: SimpleFormat
class Singleton(type): # pragma: no cover
@@ -75,7 +78,6 @@ def load_configuration(self) -> None:
self.config.read(config_file)
-# 11-15, 84-89, 98-99, 117-118, 124-126, 144-149
def validate_config_values( # noqa: C901
config: configparser.ConfigParser,
) -> SetConfigValues:
@@ -205,6 +207,12 @@ def validate_config_values( # noqa: C901
+ "is not set in config."
)
+ try:
+ simple_format = get_format_value(config.get("mirror", "simple-format"))
+ except configparser.NoOptionError:
+ logger.debug("Storing all Simple Formats by default ...")
+ simple_format = SimpleFormat.ALL
+
return SetConfigValues(
json_save,
root_uri,
@@ -217,4 +225,5 @@ def validate_config_values( # noqa: C901
compare_method,
download_mirror,
download_mirror_no_fallback,
+ simple_format,
)
diff --git a/src/bandersnatch/default.conf b/src/bandersnatch/default.conf
index c9fc2fbe3..ef7794f32 100644
--- a/src/bandersnatch/default.conf
+++ b/src/bandersnatch/default.conf
@@ -49,6 +49,10 @@ workers = 3
; Recommended setting: the default of false for full pip/pypi compatibility.
hash-index = false
+; Format for simple API to be stored in
+; Since PEP691 we have HTML and JSON
+simple-format = ALL
+
; Whether to stop a sync quickly after an error is found or whether to continue
; syncing but not marking the sync as successful. Value should be "true" or
; "false".
diff --git a/src/bandersnatch/mirror.py b/src/bandersnatch/mirror.py
index 352339799..82654c786 100644
--- a/src/bandersnatch/mirror.py
+++ b/src/bandersnatch/mirror.py
@@ -2,7 +2,6 @@
import configparser
import datetime
import hashlib
-import html
import logging
import os
import sys
@@ -14,7 +13,6 @@
from urllib.parse import unquote, urlparse
from filelock import Timeout
-from packaging.utils import canonicalize_name
from . import utils
from .configuration import validate_config_values
@@ -22,6 +20,7 @@
from .filter import LoadedFilters
from .master import Master
from .package import Package
+from .simple import SimpleAPI, SimpleFormat, SimpleFormats
from .storage import storage_backend_plugins
LOG_PLUGINS = True
@@ -38,9 +37,6 @@ class Mirror:
# of it when starting to sync.
now = None
- # PEP620 Simple API Version
- pypi_repository_version = "1.0"
-
def __init__(self, master: Master, workers: int = 3):
self.master = master
self.filters = LoadedFilters(load_all=True)
@@ -195,13 +191,14 @@ def __init__(
diff_append_epoch: bool = False,
diff_full_path: Optional[Union[Path, str]] = None,
flock_timeout: int = 1,
- diff_file_list: Optional[List] = None,
+ diff_file_list: Optional[List[Path]] = None,
*,
cleanup: bool = False,
release_files_save: bool = True,
compare_method: Optional[str] = None,
download_mirror: Optional[str] = None,
download_mirror_no_fallback: Optional[bool] = False,
+ simple_format: Union[SimpleFormat, str] = "ALL",
) -> None:
super().__init__(master=master, workers=workers)
self.cleanup = cleanup
@@ -231,7 +228,7 @@ def __init__(
# This is generally not necessary, but was added for the official internal
# PyPI mirror, which requires serving packages from
# https://files.pythonhosted.org
- self.root_uri: Optional[str] = root_uri or ""
+ self.root_uri = root_uri or ""
self.diff_file = diff_file
self.diff_append_epoch = diff_append_epoch
self.diff_full_path = diff_full_path
@@ -244,6 +241,15 @@ def __init__(
self.diff_file_list = diff_file_list or []
if self.workers > 10:
raise ValueError("Downloading with more than 10 workers is not allowed.")
+ # Use self. variables to pass as some defaults are defined ...
+ self.simple_api = SimpleAPI(
+ self.storage_backend,
+ simple_format,
+ self.diff_file_list,
+ self.digest_name,
+ hash_index,
+ self.root_uri,
+ )
self._bootstrap(flock_timeout)
self._finish_lock = RLock()
@@ -331,7 +337,7 @@ async def process_package(self, package: Package) -> None:
await self.sync_release_files(package)
await loop.run_in_executor(
- self.storage_backend.executor, self.sync_simple_page, package
+ self.storage_backend.executor, self.sync_simple_pages, package
)
# XMLRPC PyPI Endpoint stores raw_name so we need to provide it
await loop.run_in_executor(
@@ -345,7 +351,9 @@ async def process_package(self, package: Package) -> None:
def finalize_sync(self, sync_index_page: bool = True) -> None:
if sync_index_page:
- self.sync_index_page()
+ self.simple_api.sync_index_page(
+ self.need_index_sync, self.webdir, self.synced_serial
+ )
if self.need_wrapup:
self.wrapup_successful_sync()
return None
@@ -455,59 +463,6 @@ def normalized_legacy_simple_directory() -> Path:
f"Unable to cleanup non PEP 503 dir {deprecated_dir}"
)
- # TODO: This can return SwiftPath types now
- def get_simple_dirs(self, simple_dir: Path) -> List[Path]:
- """Return a list of simple index directories that should be searched
- for package indexes when compiling the main index page."""
- if self.hash_index:
- # We are using index page directory hashing, so the directory
- # format is /simple/f/foo/. We want to return a list of dirs
- # like "simple/f".
- subdirs = [simple_dir / x for x in simple_dir.iterdir() if x.is_dir()]
- else:
- # This is the traditional layout of /simple/foo/. We should
- # return a single directory, "simple".
- subdirs = [simple_dir]
- return subdirs
-
- def find_package_indexes_in_dir(self, simple_dir: Path) -> List[str]:
- """Given a directory that contains simple packages indexes, return
- a sorted list of normalized package names. This presumes every
- directory within is a simple package index directory."""
- simple_path = self.storage_backend.PATH_BACKEND(str(simple_dir))
- return sorted(
- {
- canonicalize_name(str(x.parent.relative_to(simple_path)))
- for x in simple_path.glob("**/index.html")
- if str(x.parent.relative_to(simple_path)) != "."
- }
- )
-
- def sync_index_page(self) -> None:
- if not self.need_index_sync:
- return
- logger.info("Generating global index page.")
- simple_dir = self.webdir / "simple"
- with self.storage_backend.rewrite(str(simple_dir / "index.html")) as f:
- f.write("\n")
- f.write("\n")
- f.write("
\n")
- f.write(
- ' \n'
- )
- f.write(" Simple Index\n")
- f.write(" \n")
- f.write(" \n")
- # This will either be the simple dir, or if we are using index
- # directory hashing, a list of subdirs to process.
- for subdir in self.get_simple_dirs(simple_dir):
- for pkg in self.find_package_indexes_in_dir(subdir):
- # We're really trusty that this is all encoded in UTF-8. :/
- f.write(f' {pkg}
\n')
- f.write(" \n")
- self.diff_file_list.append(simple_dir / "index.html")
-
def wrapup_successful_sync(self) -> None:
if self.errors:
return
@@ -739,99 +694,78 @@ async def sync_release_files(self, package: Package) -> None:
self.altered_packages[package.name] = downloaded_files
- def gen_html_file_tags(self, release: Dict) -> str:
- file_tags = ""
-
- # data-requires-python: requires_python
- if "requires_python" in release and release["requires_python"] is not None:
- file_tags += (
- f' data-requires-python="{html.escape(release["requires_python"])}"'
- )
-
- # data-yanked: yanked_reason
- if "yanked" in release and release["yanked"]:
- if "yanked_reason" in release and release["yanked_reason"]:
- file_tags += f' data-yanked="{html.escape(release["yanked_reason"])}"'
- else:
- file_tags += ' data-yanked=""'
-
- return file_tags
-
- def generate_simple_page(self, package: Package) -> str:
- # Generate the header of our simple page.
- simple_page_content = (
- "\n"
- "\n"
- " \n"
- ' \n'
- " Links for {1}\n"
- " \n"
- " \n"
- " Links for {1}
\n"
- ).format(self.pypi_repository_version, package.raw_name)
-
- release_files = package.release_files
- logger.debug(f"There are {len(release_files)} releases for {package.name}")
- # Lets sort based on the filename rather than the whole URL
- # Typing is hard here as we allow Any/Dict[Any, Any] for JSON
- release_files.sort(key=lambda x: x["filename"]) # type: ignore
-
- digest_name = self.digest_name
-
- simple_page_content += "\n".join(
- [
- ' {}
'.format(
- self._file_url_to_local_url(r["url"]),
- digest_name,
- r["digests"][digest_name],
- self.gen_html_file_tags(r),
- r["filename"],
- )
- for r in release_files
- ]
- )
-
- simple_page_content += (
- f"\n \n\n"
- )
-
- return simple_page_content
-
- def sync_simple_page(self, package: Package) -> None:
+ def sync_simple_pages(self, package: Package) -> None:
logger.info(
- f"Storing index page: {package.name} - in {self.simple_directory(package)}"
+ f"Storing index page(s): {package.name} - in "
+ + str(self.simple_directory(package))
)
- simple_page_content = self.generate_simple_page(package)
+ simple_pages_content = self.simple_api.generate_simple_pages(package)
+
if not self.simple_directory(package).exists():
self.simple_directory(package).mkdir(parents=True)
if self.keep_index_versions > 0:
- self._save_simple_page_version(simple_page_content, package)
+ self._save_simple_page_version(package, simple_pages_content)
else:
- simple_page = self.simple_directory(package) / "index.html"
- with self.storage_backend.rewrite(simple_page, "w", encoding="utf-8") as f:
- f.write(simple_page_content)
+ self.write_simple_pages(package, simple_pages_content)
+
+ def write_simple_pages(self, package: Package, content: SimpleFormats) -> None:
+ logger.debug(f"Attempting to write PEP691 simple pages for {package.name}")
+ if content.html:
+ for html_page in ("index.html", "index.v1_html"):
+ simple_page = self.simple_directory(package) / html_page
+ with self.storage_backend.rewrite(
+ simple_page, "w", encoding="utf-8"
+ ) as f:
+ f.write(content.html)
+ self.diff_file_list.append(simple_page)
+ if content.json:
+ simple_json_page = self.simple_directory(package) / "index.v1_json"
+ with self.storage_backend.rewrite(
+ simple_json_page, "w", encoding="utf-8"
+ ) as f:
+ f.write(content.json)
self.diff_file_list.append(simple_page)
def _save_simple_page_version(
- self, simple_page_content: str, package: Package
+ self, package: Package, content: SimpleFormats
) -> None:
+ logger.debug(
+ "Attempting to write PEP691 versioned simple pages for " + package.name
+ )
versions_path = self._prepare_versions_path(package)
timestamp = utils.make_time_stamp()
- version_file_name = f"index_{package.serial}_{timestamp}.html"
- full_version_path = versions_path / version_file_name
- # TODO: Change based on storage backend
- with self.storage_backend.rewrite(
- full_version_path, "w", encoding="utf-8"
- ) as f:
- f.write(simple_page_content)
- self.diff_file_list.append(full_version_path)
+ version_file_names = (
+ ("index.html", f"index_{package.serial}_{timestamp}.html", content.html),
+ (
+ "index.v1_html",
+ f"index_{package.serial}_{timestamp}.v1_html",
+ content.html,
+ ),
+ (
+ "index.v1_json",
+ f"index_{package.serial}_{timestamp}.v1_json",
+ content.json,
+ ),
+ )
+ for link_name, version_file, page_content in version_file_names:
+ if not page_content:
+ logger.debug(f"No {link_name} content for {package.name}. Skipping.")
+ continue
+ full_version_path = versions_path / version_file
+ with self.storage_backend.rewrite(
+ full_version_path, "w", encoding="utf-8"
+ ) as f:
+ f.write(page_content)
+
+ self.diff_file_list.append(full_version_path)
- symlink_path = self.simple_directory(package) / "index.html"
- if symlink_path.exists() or symlink_path.is_symlink():
- symlink_path.unlink()
+ symlink_path = self.simple_directory(package) / link_name
+ # TODO: Should this be and rather than or?
+ if symlink_path.exists() or symlink_path.is_symlink():
+ symlink_path.unlink()
- symlink_path.symlink_to(full_version_path)
+ symlink_path.symlink_to(full_version_path)
def _prepare_versions_path(self, package: Package) -> Path:
versions_path = (
@@ -841,20 +775,18 @@ def _prepare_versions_path(self, package: Package) -> Path:
if not versions_path.exists():
versions_path.mkdir()
else:
- version_files = list(sorted(versions_path.iterdir()))
- version_files_to_remove = len(version_files) - self.keep_index_versions + 1
- for i in range(version_files_to_remove):
- version_files[i].unlink()
+ for ext in (".html", ".v1_html", ".v1_json"):
+ version_files = sorted(
+ p for p in versions_path.iterdir() if p.name.endswith(ext)
+ )
+ version_files_to_remove = (
+ len(version_files) - self.keep_index_versions + 1
+ )
+ for i in range(version_files_to_remove):
+ version_files[i].unlink()
return versions_path
- def _file_url_to_local_url(self, url: str) -> str:
- parsed = urlparse(url)
- if not parsed.path.startswith("/packages"):
- raise RuntimeError(f"Got invalid download URL: {url}")
- prefix = self.root_uri if self.root_uri else "../.."
- return prefix + parsed.path
-
# TODO: This can also return SwiftPath instances now...
def _file_url_to_local_path(self, url: str) -> Path:
path = urlparse(url).path
@@ -874,7 +806,7 @@ async def download_file(
chunk_size: int = 64 * 1024,
urlpath: str = "",
) -> Optional[Path]:
- if urlparse != "":
+ if urlpath != "":
path = self._file_url_to_local_path(urlpath)
else:
path = self._file_url_to_local_path(url)
@@ -1049,6 +981,7 @@ async def mirror(
release_files_save=config_values.release_files_save,
download_mirror=config_values.download_mirror,
download_mirror_no_fallback=config_values.download_mirror_no_fallback,
+ simple_format=config_values.simple_format,
)
changed_packages = await mirror.synchronize(
specific_packages, sync_simple_index=sync_simple_index
diff --git a/src/bandersnatch/simple.py b/src/bandersnatch/simple.py
new file mode 100644
index 000000000..4fe35f9dc
--- /dev/null
+++ b/src/bandersnatch/simple.py
@@ -0,0 +1,272 @@
+import html
+import json
+import logging
+from enum import Enum, auto
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Dict, List, NamedTuple, Optional, Union
+from urllib.parse import urlparse
+
+from packaging.utils import canonicalize_name
+
+from .package import Package
+
+if TYPE_CHECKING:
+ from .storage import Storage
+
+
+class SimpleFormats(NamedTuple):
+ html: str
+ json: str
+
+
+class SimpleFormat(Enum):
+ ALL = auto()
+ HTML = auto()
+ JSON = auto()
+
+
+logger = logging.getLogger(__name__)
+
+
+class InvalidSimpleFormat(KeyError):
+ """We don't have a valid format choice from configuration"""
+
+ pass
+
+
+def get_format_value(format: str) -> SimpleFormat:
+ try:
+ return SimpleFormat[format.upper()]
+ except KeyError:
+ valid_formats = [v.name for v in SimpleFormat].sort()
+ raise InvalidSimpleFormat(
+ f"{format.upper()} is not a valid Simple API format. "
+ + f"Valid Options: {valid_formats}"
+ )
+
+
+class SimpleAPI:
+ """Handle all Simple API file generation"""
+
+ # PEP620 Simple API Version
+ pypi_repository_version = "1.0"
+ # PEP691 Simple API Version
+ pypi_simple_api_version = "1.0"
+
+ def __init__(
+ self,
+ storage_backend: "Storage",
+ format: Union[SimpleFormat, str],
+ diff_file_list: List[Path],
+ digest_name: str,
+ hash_index: bool,
+ root_uri: Optional[str],
+ ) -> None:
+ self.diff_file_list = diff_file_list
+ self.digest_name = digest_name
+ self.format = get_format_value(format) if isinstance(format, str) else format
+ self.hash_index = hash_index
+ self.root_uri = root_uri
+ self.storage_backend = storage_backend
+
+ def html_enabled(self) -> bool:
+ return self.format in {SimpleFormat.ALL, SimpleFormat.HTML}
+
+ def json_enabled(self) -> bool:
+ return self.format in {SimpleFormat.ALL, SimpleFormat.JSON}
+
+ def find_package_indexes_in_dir(self, simple_dir: Path) -> List[str]:
+ """Given a directory that contains simple packages indexes, return
+ a sorted list of normalized package names. This presumes every
+ directory within is a simple package index directory."""
+ simple_path = self.storage_backend.PATH_BACKEND(str(simple_dir))
+ return sorted(
+ {
+ canonicalize_name(str(x.parent.relative_to(simple_path)))
+ for x in simple_path.glob("**/index.html")
+ if str(x.parent.relative_to(simple_path)) != "."
+ }
+ )
+
+ def gen_html_file_tags(self, release: Dict) -> str:
+ file_tags = ""
+
+ # data-requires-python: requires_python
+ if "requires_python" in release and release["requires_python"] is not None:
+ file_tags += (
+ f' data-requires-python="{html.escape(release["requires_python"])}"'
+ )
+
+ # data-yanked: yanked_reason
+ if "yanked" in release and release["yanked"]:
+ if "yanked_reason" in release and release["yanked_reason"]:
+ file_tags += f' data-yanked="{html.escape(release["yanked_reason"])}"'
+ else:
+ file_tags += ' data-yanked=""'
+
+ return file_tags
+
+ # TODO: This can return SwiftPath types now
+ def get_simple_dirs(self, simple_dir: Path) -> List[Path]:
+ """Return a list of simple index directories that should be searched
+ for package indexes when compiling the main index page."""
+ if self.hash_index:
+ # We are using index page directory hashing, so the directory
+ # format is /simple/f/foo/. We want to return a list of dirs
+ # like "simple/f".
+ subdirs = [simple_dir / x for x in simple_dir.iterdir() if x.is_dir()]
+ else:
+ # This is the traditional layout of /simple/foo/. We should
+ # return a single directory, "simple".
+ subdirs = [simple_dir]
+ return subdirs
+
+ def _file_url_to_local_url(self, url: str) -> str:
+ parsed = urlparse(url)
+ if not parsed.path.startswith("/packages"):
+ raise RuntimeError(f"Got invalid download URL: {url}")
+ prefix = self.root_uri if self.root_uri else "../.."
+ return prefix + parsed.path
+
+ def generate_html_simple_page(self, package: Package) -> str:
+ # Generate the header of our simple page.
+ simple_page_content = (
+ "\n"
+ "\n"
+ " \n"
+ ' \n'
+ " Links for {1}\n"
+ " \n"
+ " \n"
+ " Links for {1}
\n"
+ ).format(self.pypi_repository_version, package.raw_name)
+
+ release_files = package.release_files
+ logger.debug(f"There are {len(release_files)} releases for {package.name}")
+ # Lets sort based on the filename rather than the whole URL
+ # Typing is hard here as we allow Any/Dict[Any, Any] for JSON
+ release_files.sort(key=lambda x: x["filename"]) # type: ignore
+
+ digest_name = self.digest_name
+
+ simple_page_content += "\n".join(
+ [
+ ' {}
'.format(
+ self._file_url_to_local_url(r["url"]),
+ digest_name,
+ r["digests"][digest_name],
+ self.gen_html_file_tags(r),
+ r["filename"],
+ )
+ for r in release_files
+ ]
+ )
+
+ simple_page_content += (
+ f"\n \n\n"
+ )
+
+ return simple_page_content
+
+ def generate_json_simple_page(
+ self, package: Package, *, pretty: bool = False
+ ) -> str:
+ package_json: Dict[str, Any] = {
+ "files": [],
+ "meta": {
+ "api-version": self.pypi_simple_api_version,
+ "_last-serial": str(package.last_serial),
+ },
+ "name": package.name,
+ }
+
+ release_files = package.release_files
+ release_files.sort(key=lambda x: x["filename"]) # type: ignore
+
+ # Add release files into the JSON dict
+ for r in release_files:
+ package_json["files"].append(
+ {
+ "filename": r["filename"],
+ "hashes": {
+ digest_name: digest_hash
+ for digest_name, digest_hash in r["digests"].items()
+ },
+ "requires-python": r.get("requires_python", ""),
+ "url": self._file_url_to_local_url(r["url"]),
+ "yanked": r.get("yanked", False),
+ }
+ )
+
+ if pretty:
+ return json.dumps(package_json, indent=4)
+ return json.dumps(package_json)
+
+ def generate_simple_pages(self, package: Package) -> SimpleFormats:
+ simple_html_content = ""
+ simple_json_content = ""
+ if self.format in {SimpleFormat.ALL, SimpleFormat.HTML}:
+ simple_html_content = self.generate_html_simple_page(package)
+ logger.debug(f"Generated simple HTML format for {package.name}")
+ if self.format in {SimpleFormat.ALL, SimpleFormat.JSON}:
+ simple_json_content = self.generate_json_simple_page(package)
+ logger.debug(f"Generated simple JSON format for {package.name}")
+ assert simple_html_content or simple_json_content
+ return SimpleFormats(simple_html_content, simple_json_content)
+
+ def sync_index_page(
+ self, need_index_sync: bool, webdir: Path, serial: int, *, pretty: bool = False
+ ) -> None:
+ if not need_index_sync:
+ return
+
+ logger.info("Generating global index page.")
+ simple_dir = webdir / "simple"
+ simple_html_path = simple_dir / "index.html"
+ simple_html_version_path = simple_dir / "index.v1_html"
+ simple_json_path = simple_dir / "index.v1_json"
+
+ simple_json: Dict[str, Any] = {
+ "meta": {"_last-serial": serial, "api-version": "1.0"},
+ "projects": [],
+ }
+
+ with self.storage_backend.rewrite(str(simple_html_path)) as f:
+ f.write("\n")
+ f.write("\n")
+ f.write(" \n")
+ f.write(
+ ' \n'
+ )
+ f.write(" Simple Index\n")
+ f.write(" \n")
+ f.write(" \n")
+ # This will either be the simple dir, or if we are using index
+ # directory hashing, a list of subdirs to process.
+ for subdir in self.get_simple_dirs(simple_dir):
+ for pkg in self.find_package_indexes_in_dir(subdir):
+ # We're really trusty that this is all encoded in UTF-8. :/
+ f.write(f' {pkg}
\n')
+ if self.json_enabled:
+ simple_json["projects"].append({"name": pkg})
+ f.write(" \n")
+
+ if self.html_enabled():
+ self.diff_file_list.append(simple_html_path)
+ self.storage_backend.copy_file(simple_html_path, simple_html_version_path)
+ self.diff_file_list.append(simple_html_version_path)
+ else:
+ self.storage_backend.delete_file(simple_html_path)
+ logger.debug(
+ f"Deleting simple {simple_html_path} as HTML format is disabled"
+ )
+
+ # TODO: If memory usage gets to high we can write out json as we go like HTML
+ if self.json_enabled():
+ with self.storage_backend.rewrite(str(simple_json_path)) as f:
+ if pretty:
+ json.dump(simple_json, f, indent=4)
+ else:
+ json.dump(simple_json, f)
+ self.diff_file_list.append(simple_json_path)
diff --git a/src/bandersnatch/tests/__init__.py b/src/bandersnatch/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/bandersnatch/tests/conftest.py b/src/bandersnatch/tests/conftest.py
index bcaa2fc9b..229fe0e99 100644
--- a/src/bandersnatch/tests/conftest.py
+++ b/src/bandersnatch/tests/conftest.py
@@ -69,6 +69,8 @@ def package_json() -> Dict[str, Any]:
"md5_digest": "b6bcb391b040c4468262706faf9d3cce",
"size": 0,
"upload_time_iso_8601": "2000-02-02T01:23:45.123456Z",
+ "python_requires": ">=3.6",
+ "yanked": False,
},
{
"url": "https://pypi.example.com/packages/2.7/f/foo/foo.whl",
@@ -80,6 +82,7 @@ def package_json() -> Dict[str, Any]:
"md5_digest": "6bd3ddc295176f4dca196b5eb2c4d858",
"size": 12345,
"upload_time_iso_8601": "2000-03-03T01:23:45.123456Z",
+ "yanked": False,
},
]
},
diff --git a/src/bandersnatch/tests/plugins/test_allowlist_name.py b/src/bandersnatch/tests/plugins/test_allowlist_name.py
index 7ea545219..df8c14f76 100644
--- a/src/bandersnatch/tests/plugins/test_allowlist_name.py
+++ b/src/bandersnatch/tests/plugins/test_allowlist_name.py
@@ -4,13 +4,12 @@
from tempfile import TemporaryDirectory
from unittest import TestCase
-from mock_config import mock_config
-
import bandersnatch.filter
import bandersnatch.storage
from bandersnatch.master import Master
from bandersnatch.mirror import BandersnatchMirror
from bandersnatch.package import Package
+from bandersnatch.tests.mock_config import mock_config
class TestAllowListProject(TestCase):
diff --git a/src/bandersnatch/tests/plugins/test_blocklist_name.py b/src/bandersnatch/tests/plugins/test_blocklist_name.py
index 91a373965..983d8b257 100644
--- a/src/bandersnatch/tests/plugins/test_blocklist_name.py
+++ b/src/bandersnatch/tests/plugins/test_blocklist_name.py
@@ -3,12 +3,11 @@
from tempfile import TemporaryDirectory
from unittest import TestCase
-from mock_config import mock_config
-
import bandersnatch.filter
from bandersnatch.master import Master
from bandersnatch.mirror import BandersnatchMirror
from bandersnatch.package import Package
+from bandersnatch.tests.mock_config import mock_config
class TestBlockListProject(TestCase):
diff --git a/src/bandersnatch/tests/plugins/test_filename.py b/src/bandersnatch/tests/plugins/test_filename.py
index af163850b..605eef6e7 100644
--- a/src/bandersnatch/tests/plugins/test_filename.py
+++ b/src/bandersnatch/tests/plugins/test_filename.py
@@ -3,12 +3,11 @@
from tempfile import TemporaryDirectory
from unittest import TestCase
-from mock_config import mock_config
-
import bandersnatch.filter
from bandersnatch.master import Master
from bandersnatch.mirror import BandersnatchMirror
from bandersnatch.package import Package
+from bandersnatch.tests.mock_config import mock_config
from bandersnatch_filter_plugins import filename_name
diff --git a/src/bandersnatch/tests/plugins/test_latest_release.py b/src/bandersnatch/tests/plugins/test_latest_release.py
index 20e2b1ad3..fc990458b 100644
--- a/src/bandersnatch/tests/plugins/test_latest_release.py
+++ b/src/bandersnatch/tests/plugins/test_latest_release.py
@@ -3,12 +3,11 @@
from tempfile import TemporaryDirectory
from unittest import TestCase
-from mock_config import mock_config
-
import bandersnatch.filter
from bandersnatch.master import Master
from bandersnatch.mirror import BandersnatchMirror
from bandersnatch.package import Package
+from bandersnatch.tests.mock_config import mock_config
from bandersnatch_filter_plugins import latest_name
diff --git a/src/bandersnatch/tests/plugins/test_metadata_plugins.py b/src/bandersnatch/tests/plugins/test_metadata_plugins.py
index 2e1b53d25..be4573f54 100644
--- a/src/bandersnatch/tests/plugins/test_metadata_plugins.py
+++ b/src/bandersnatch/tests/plugins/test_metadata_plugins.py
@@ -4,12 +4,11 @@
from typing import cast
from unittest import TestCase
-from mock_config import mock_config
-
import bandersnatch.filter
from bandersnatch.master import Master
from bandersnatch.mirror import BandersnatchMirror
from bandersnatch.package import Package
+from bandersnatch.tests.mock_config import mock_config
from bandersnatch_filter_plugins.metadata_filter import SizeProjectMetadataFilter
diff --git a/src/bandersnatch/tests/plugins/test_prerelease_name.py b/src/bandersnatch/tests/plugins/test_prerelease_name.py
index 3193ff8fe..b2babc924 100644
--- a/src/bandersnatch/tests/plugins/test_prerelease_name.py
+++ b/src/bandersnatch/tests/plugins/test_prerelease_name.py
@@ -4,12 +4,11 @@
from tempfile import TemporaryDirectory
from unittest import TestCase
-from mock_config import mock_config
-
import bandersnatch.filter
from bandersnatch.master import Master
from bandersnatch.mirror import BandersnatchMirror
from bandersnatch.package import Package
+from bandersnatch.tests.mock_config import mock_config
from bandersnatch_filter_plugins import prerelease_name
diff --git a/src/bandersnatch/tests/plugins/test_regex_name.py b/src/bandersnatch/tests/plugins/test_regex_name.py
index 9f4818c30..1c60141bd 100644
--- a/src/bandersnatch/tests/plugins/test_regex_name.py
+++ b/src/bandersnatch/tests/plugins/test_regex_name.py
@@ -4,12 +4,11 @@
from tempfile import TemporaryDirectory
from unittest import TestCase
-from mock_config import mock_config
-
import bandersnatch.filter
from bandersnatch.master import Master
from bandersnatch.mirror import BandersnatchMirror
from bandersnatch.package import Package
+from bandersnatch.tests.mock_config import mock_config
from bandersnatch_filter_plugins import regex_name
diff --git a/src/bandersnatch/tests/plugins/test_storage_plugin_s3.py b/src/bandersnatch/tests/plugins/test_storage_plugin_s3.py
index d16be54bb..4f7d6daa2 100644
--- a/src/bandersnatch/tests/plugins/test_storage_plugin_s3.py
+++ b/src/bandersnatch/tests/plugins/test_storage_plugin_s3.py
@@ -1,8 +1,8 @@
from datetime import datetime
-from mock_config import mock_config
from s3path import S3Path
+from bandersnatch.tests.mock_config import mock_config
from bandersnatch_storage_plugins import s3
diff --git a/src/bandersnatch/tests/plugins/test_storage_plugins.py b/src/bandersnatch/tests/plugins/test_storage_plugins.py
index 6b91583c9..6124b48c0 100644
--- a/src/bandersnatch/tests/plugins/test_storage_plugins.py
+++ b/src/bandersnatch/tests/plugins/test_storage_plugins.py
@@ -14,13 +14,12 @@
from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Tuple, Union
from unittest import TestCase, mock
-from mock_config import mock_config
-
import bandersnatch.storage
from bandersnatch.master import Master
from bandersnatch.mirror import BandersnatchMirror
from bandersnatch.package import Package
from bandersnatch.storage import PATH_TYPES
+from bandersnatch.tests.mock_config import mock_config
from bandersnatch_storage_plugins import filesystem, swift
if TYPE_CHECKING:
diff --git a/src/bandersnatch/tests/test_configuration.py b/src/bandersnatch/tests/test_configuration.py
index 416d484cc..2a0fc3402 100644
--- a/src/bandersnatch/tests/test_configuration.py
+++ b/src/bandersnatch/tests/test_configuration.py
@@ -11,6 +11,7 @@
Singleton,
validate_config_values,
)
+from bandersnatch.simple import SimpleFormat
class TestBandersnatchConf(TestCase):
@@ -63,6 +64,7 @@ def test_single_config__default__mirror__setting_attributes(self) -> None:
"json",
"master",
"release-files",
+ "simple-format",
"stop-on-error",
"storage-backend",
"timeout",
@@ -139,6 +141,7 @@ def test_validate_config_values(self) -> None:
"hash",
"",
False,
+ SimpleFormat.ALL,
)
no_options_configparser = configparser.ConfigParser()
no_options_configparser["mirror"] = {}
@@ -159,6 +162,7 @@ def test_validate_config_values_release_files_false_sets_root_uri(self) -> None:
"hash",
"",
False,
+ SimpleFormat.ALL,
)
release_files_false_configparser = configparser.ConfigParser()
release_files_false_configparser["mirror"] = {"release-files": "false"}
@@ -181,6 +185,7 @@ def test_validate_config_values_download_mirror_false_sets_no_fallback(
"hash",
"",
False,
+ SimpleFormat.ALL,
)
release_files_false_configparser = configparser.ConfigParser()
release_files_false_configparser["mirror"] = {
diff --git a/src/bandersnatch/tests/test_filter.py b/src/bandersnatch/tests/test_filter.py
index 2660479f1..4932db05c 100644
--- a/src/bandersnatch/tests/test_filter.py
+++ b/src/bandersnatch/tests/test_filter.py
@@ -4,9 +4,8 @@
from tempfile import TemporaryDirectory
from unittest import TestCase
-from mock_config import mock_config
-
from bandersnatch.configuration import BandersnatchConfig
+from bandersnatch.tests.mock_config import mock_config
from bandersnatch.filter import ( # isort:skip
Filter,
diff --git a/src/bandersnatch/tests/test_main.py b/src/bandersnatch/tests/test_main.py
index 1b692f8cf..b9ba3fa2a 100644
--- a/src/bandersnatch/tests/test_main.py
+++ b/src/bandersnatch/tests/test_main.py
@@ -14,6 +14,7 @@
import bandersnatch.storage
from bandersnatch.configuration import Singleton
from bandersnatch.main import main
+from bandersnatch.simple import SimpleFormat
if TYPE_CHECKING:
from bandersnatch.mirror import BandersnatchMirror
@@ -94,6 +95,7 @@ def test_main_reads_config_values(mirror_mock: mock.MagicMock, tmpdir: Path) ->
"compare_method": "hash",
"download_mirror": "",
"download_mirror_no_fallback": False,
+ "simple_format": SimpleFormat.ALL,
} == kwargs
diff --git a/src/bandersnatch/tests/test_mirror.py b/src/bandersnatch/tests/test_mirror.py
index 23d7e0307..ec257e5af 100644
--- a/src/bandersnatch/tests/test_mirror.py
+++ b/src/bandersnatch/tests/test_mirror.py
@@ -14,6 +14,7 @@
from bandersnatch.master import Master
from bandersnatch.mirror import BandersnatchMirror
from bandersnatch.package import Package
+from bandersnatch.tests.test_simple_fixtures import SIXTYNINE_METADATA
from bandersnatch.utils import WINDOWS, make_time_stamp
EXPECTED_REL_HREFS = (
@@ -225,7 +226,9 @@ async def test_mirror_empty_master_gets_index(mirror: BandersnatchMirror) -> Non
local-stats{0}days
packages
simple
-simple{0}index.html""".format(
+simple{0}index.html
+simple{0}index.v1_html
+simple{0}index.v1_json""".format(
sep
) == utils.find(
mirror.webdir
@@ -273,7 +276,11 @@ async def test_mirror_empty_resume_from_todo_list(mirror: BandersnatchMirror) ->
web{0}simple
web{0}simple{0}foobar
web{0}simple{0}foobar{0}index.html
-web{0}simple{0}index.html""".format(
+web{0}simple{0}foobar{0}index.v1_html
+web{0}simple{0}foobar{0}index.v1_json
+web{0}simple{0}index.html
+web{0}simple{0}index.v1_html
+web{0}simple{0}index.v1_json""".format(
sep
)
if WINDOWS:
@@ -311,7 +318,9 @@ async def test_mirror_sync_package_skip_index(mirror: BandersnatchMirror) -> Non
packages{0}2.7{0}f{0}foo{0}foo.whl
packages{0}any{0}f{0}foo{0}foo.zip
pypi{0}foo{0}json
-simple{0}foo{0}index.html""".format(
+simple{0}foo{0}index.html
+simple{0}foo{0}index.v1_html
+simple{0}foo{0}index.v1_json""".format(
sep
) == utils.find(
mirror.webdir, dirs=False
@@ -334,7 +343,11 @@ async def test_mirror_sync_package(mirror: BandersnatchMirror) -> None:
packages{0}any{0}f{0}foo{0}foo.zip
pypi{0}foo{0}json
simple{0}foo{0}index.html
-simple{0}index.html""".format(
+simple{0}foo{0}index.v1_html
+simple{0}foo{0}index.v1_json
+simple{0}index.html
+simple{0}index.v1_html
+simple{0}index.v1_json""".format(
sep
) == utils.find(
mirror.webdir, dirs=False
@@ -371,7 +384,11 @@ async def test_mirror_sync_package_error_no_early_exit(
web{0}packages{0}2.7{0}f{0}foo{0}foo.whl
web{0}packages{0}any{0}f{0}foo{0}foo.zip
web{0}simple{0}foo{0}index.html
-web{0}simple{0}index.html""".format(
+web{0}simple{0}foo{0}index.v1_html
+web{0}simple{0}foo{0}index.v1_json
+web{0}simple{0}index.html
+web{0}simple{0}index.v1_html
+web{0}simple{0}index.v1_json""".format(
sep
)
if WINDOWS:
@@ -445,7 +462,11 @@ async def test_mirror_sync_package_with_hash(
packages{0}2.7{0}f{0}foo{0}foo.whl
packages{0}any{0}f{0}foo{0}foo.zip
simple{0}f{0}foo{0}index.html
-simple{0}index.html""".format(
+simple{0}f{0}foo{0}index.v1_html
+simple{0}f{0}foo{0}index.v1_json
+simple{0}index.html
+simple{0}index.v1_html
+simple{0}index.v1_json""".format(
sep
) == utils.find(
mirror_hash_index.webdir, dirs=False
@@ -487,7 +508,11 @@ async def test_mirror_sync_package_download_mirror(
packages{0}any{0}f{0}foo{0}foo.zip
pypi{0}foo{0}json
simple{0}foo{0}index.html
-simple{0}index.html""".format(
+simple{0}foo{0}index.v1_html
+simple{0}foo{0}index.v1_json
+simple{0}index.html
+simple{0}index.v1_html
+simple{0}index.v1_json""".format(
sep
) == utils.find(
mirror.webdir, dirs=False
@@ -528,7 +553,11 @@ async def test_mirror_sync_package_download_mirror_fallback(
packages{0}any{0}f{0}foo{0}foo.zip
pypi{0}foo{0}json
simple{0}foo{0}index.html
-simple{0}index.html""".format(
+simple{0}foo{0}index.v1_html
+simple{0}foo{0}index.v1_json
+simple{0}index.html
+simple{0}index.v1_html
+simple{0}index.v1_json""".format(
sep
) == utils.find(
mirror.webdir, dirs=False
@@ -634,7 +663,9 @@ def test_find_package_indexes_in_dir_threaded(mirror: BandersnatchMirror) -> Non
with (mirror_base / "web/simple/index.html").open("w") as index:
index.write("")
- packages = local_mirror.find_package_indexes_in_dir(mirror_base / "web/simple")
+ packages = local_mirror.simple_api.find_package_indexes_in_dir(
+ mirror_base / "web/simple"
+ )
assert "index.html" not in packages # This should never be in the list
assert len(packages) == 6 # We expect 6 packages with 6 dirs created
assert packages[0] == "click" # Check sorted - click should be first
@@ -807,9 +838,9 @@ async def test_package_sync_with_normalized_simple_page(
@pytest.mark.asyncio
async def test_package_sync_simple_page_root_uri(mirror: BandersnatchMirror) -> None:
mirror.packages_to_sync = {"foo": 1}
- mirror.root_uri = "https://files.pythonhosted.org"
+ mirror.simple_api.root_uri = "https://files.pythonhosted.org"
await mirror.sync_packages()
- mirror.root_uri = None
+ mirror.simple_api.root_uri = None
expected_root_uri_hrefs = (
' None:
mirror.packages_to_sync = {"foo": 1}
mirror.keep_index_versions = 1
- package = Package("foo", serial=1)
await mirror.sync_packages()
assert not mirror.errors
simple_path = Path("web/simple/foo")
versions_path = simple_path / "versions"
- version_files = os.listdir(versions_path)
- assert len(version_files) == 1
- assert version_files[0] == f"index_{package.serial}_{make_time_stamp()}.html"
+ version_files = sorted(list(versions_path.iterdir()))
+ assert len(version_files) == 3 # html, v1_html, v1_json
+ assert version_files[0].name == f"index_1_{make_time_stamp()}.html"
+ assert version_files[2].name == f"index_1_{make_time_stamp()}.v1_json"
link_path = simple_path / "index.html"
assert link_path.is_symlink()
- assert os.path.basename(os.readlink(str(link_path))) == version_files[0]
+ assert link_path.resolve().name == version_files[0].name
@pytest.mark.asyncio
@@ -1164,12 +1199,14 @@ async def test_keep_index_versions_stores_different_prior_versions(
assert not mirror.errors
version_files = sorted(os.listdir(versions_path))
- assert len(version_files) == 2
+ assert len(version_files) == 6
assert version_files[0].startswith("index_1_2018-10-27")
- assert version_files[1].startswith("index_1_2018-10-28")
- link_path = simple_path / "index.html"
- assert os.path.islink(link_path)
- assert os.path.basename(os.readlink(str(link_path))) == version_files[1]
+ assert version_files[3].startswith("index_1_2018-10-28")
+ html_link_path = simple_path / "index.html"
+ json_link_path = simple_path / "index.html"
+ assert html_link_path.is_symlink()
+ assert json_link_path.is_symlink()
+ assert html_link_path.resolve().name == version_files[3]
@pytest.mark.asyncio
@@ -1188,7 +1225,7 @@ async def test_keep_index_versions_removes_old_versions(
await mirror.sync_packages()
version_files = sorted(f for f in versions_path.iterdir())
- assert len(version_files) == 2
+ assert len(version_files) == 4 # Old + new html + v1_html/json
assert version_files[0].name.startswith("index_1_2018-10-27")
assert version_files[1].name.startswith("index_1_2018-10-28")
link_path = simple_path / "index.html"
@@ -1221,5 +1258,15 @@ def test_determine_packages_to_sync(mirror: BandersnatchMirror) -> None:
assert target_serial == 69
+def test_write_simple_pages(mirror: BandersnatchMirror) -> None:
+ package = Package("69")
+ package._metadata = SIXTYNINE_METADATA
+ with TemporaryDirectory() as td:
+ td_path = Path(td)
+ package_simple_dir = td_path / "simple" / package.name
+ package_simple_dir.mkdir(parents=True)
+ mirror.homedir = mirror.storage_backend.PATH_BACKEND(str(td_path))
+
+
if __name__ == "__main__":
pytest.main(sys.argv)
diff --git a/src/bandersnatch/tests/test_simple.py b/src/bandersnatch/tests/test_simple.py
new file mode 100644
index 000000000..77fe8a443
--- /dev/null
+++ b/src/bandersnatch/tests/test_simple.py
@@ -0,0 +1,81 @@
+from configparser import ConfigParser
+from os import sep
+from pathlib import Path
+from tempfile import TemporaryDirectory
+
+import pytest
+
+from bandersnatch import utils
+from bandersnatch.package import Package
+from bandersnatch.simple import InvalidSimpleFormat, SimpleAPI, SimpleFormat
+from bandersnatch.storage import Storage
+from bandersnatch.tests.test_simple_fixtures import (
+ EXPECTED_SIMPLE_GLOBAL_JSON_PRETTY,
+ EXPECTED_SIMPLE_SIXTYNINE_JSON,
+ EXPECTED_SIMPLE_SIXTYNINE_JSON_PRETTY,
+ SIXTYNINE_METADATA,
+)
+from bandersnatch_storage_plugins.filesystem import FilesystemStorage
+
+
+def test_format_invalid() -> None:
+ with pytest.raises(InvalidSimpleFormat):
+ SimpleAPI(Storage(), "l33t", [], "digest", False, None)
+
+
+def test_format_valid() -> None:
+ s = SimpleAPI(Storage(), "ALL", [], "digest", False, None)
+ assert s.format == SimpleFormat.ALL
+
+
+def test_json_package_page() -> None:
+ s = SimpleAPI(Storage(), SimpleFormat.JSON, [], "digest", False, None)
+ p = Package("69")
+ p._metadata = SIXTYNINE_METADATA
+ assert EXPECTED_SIMPLE_SIXTYNINE_JSON == s.generate_json_simple_page(p)
+ # Only testing pretty so it's easier for humans ...
+ assert EXPECTED_SIMPLE_SIXTYNINE_JSON_PRETTY == s.generate_json_simple_page(
+ p, pretty=True
+ )
+
+
+def test_json_index_page() -> None:
+ c = ConfigParser()
+ c.add_section("mirror")
+ c["mirror"]["workers"] = "1"
+ s = SimpleAPI(
+ FilesystemStorage(config=c), SimpleFormat.ALL, [], "digest", False, None
+ )
+ with TemporaryDirectory() as td:
+ td_path = Path(td)
+ simple_dir = td_path / "simple"
+ sixtynine_dir = simple_dir / "69"
+ foo_dir = simple_dir / "foo"
+ for a_dir in (sixtynine_dir, foo_dir):
+ a_dir.mkdir(parents=True)
+
+ sixtynine_html = sixtynine_dir / "index.html"
+ foo_html = foo_dir / "index.html"
+ for a_file in (sixtynine_html, foo_html):
+ a_file.touch()
+
+ s.sync_index_page(True, td_path, 12345, pretty=True)
+ # See we get the files we expect on the file system
+ # index.html is needed to trigger the global index finding the package
+ assert """\
+simple
+simple{0}69
+simple{0}69{0}index.html
+simple{0}foo
+simple{0}foo{0}index.html
+simple{0}index.html
+simple{0}index.v1_html
+simple{0}index.v1_json""".format(
+ sep
+ ) == utils.find(
+ td_path
+ )
+ # Check format of JSON
+ assert (simple_dir / "index.v1_json").open(
+ "r"
+ ).read() == EXPECTED_SIMPLE_GLOBAL_JSON_PRETTY
diff --git a/src/bandersnatch/tests/test_simple_fixtures.py b/src/bandersnatch/tests/test_simple_fixtures.py
new file mode 100644
index 000000000..eeb6ff7d3
--- /dev/null
+++ b/src/bandersnatch/tests/test_simple_fixtures.py
@@ -0,0 +1,160 @@
+# flake8: noqa
+
+SIXTYNINE_METADATA = {
+ "info": {
+ "author": "Cooper Lees",
+ "author_email": "me@cooperlees.com",
+ "bugtrack_url": None,
+ "classifiers": [
+ "Development Status :: 3 - Alpha",
+ "License :: OSI Approved :: BSD License",
+ "Programming Language :: Python :: 3 :: Only",
+ "Programming Language :: Python :: 3.6",
+ ],
+ "description": "# 69",
+ "description_content_type": "",
+ "docs_url": None,
+ "download_url": "",
+ "downloads": {"last_day": -1, "last_month": -1, "last_week": -1},
+ "home_page": "http://github.com/cooperlees/69",
+ "keywords": "",
+ "license": "BSD",
+ "maintainer": "",
+ "maintainer_email": "",
+ "name": "69",
+ "package_url": "https://pypi.org/project/69/",
+ "platform": "",
+ "project_url": "https://pypi.org/project/69/",
+ "project_urls": {"Homepage": "http://github.com/cooperlees/69"},
+ "release_url": "https://pypi.org/project/69/6.9/",
+ "requires_dist": None,
+ "requires_python": ">=3.6",
+ "summary": "Handy module for 2",
+ "version": "6.9",
+ "yanked": False,
+ "yanked_reason": None,
+ },
+ "last_serial": 10333928,
+ "releases": {
+ "0.69": [
+ {
+ "comment_text": "",
+ "digests": {
+ "md5": "4328d962656395fbd3e730c9d30bb48c",
+ "sha256": "5c11f48399f9b1bca802751513f1f97bff6ce97e6facb576b7729e1351453c10",
+ },
+ "downloads": -1,
+ "filename": "69-0.69.tar.gz",
+ "has_sig": False,
+ "md5_digest": "4328d962656395fbd3e730c9d30bb48c",
+ "packagetype": "sdist",
+ "python_version": "source",
+ "requires_python": ">=3.6",
+ "size": 1078,
+ "upload_time": "2018-05-17T03:37:19",
+ "upload_time_iso_8601": "2018-05-17T03:37:19.330556Z",
+ "url": "https://files.pythonhosted.org/packages/d3/cc/95dc5434362bd333a1fec275231775d748315b26edf1e7e568e6f8660238/69-0.69.tar.gz",
+ "yanked": False,
+ "yanked_reason": None,
+ }
+ ],
+ "6.9": [
+ {
+ "comment_text": "",
+ "digests": {
+ "md5": "ff4bf804ef3722a1fd8853a8a32513d4",
+ "sha256": "0c8deb7c8574787283c3fc08b714ee63fd6752a38d13515a9d8508798d428597",
+ },
+ "downloads": -1,
+ "filename": "69-6.9.tar.gz",
+ "has_sig": False,
+ "md5_digest": "ff4bf804ef3722a1fd8853a8a32513d4",
+ "packagetype": "sdist",
+ "python_version": "source",
+ "requires_python": ">=3.6",
+ "size": 1077,
+ "upload_time": "2018-05-17T03:47:45",
+ "upload_time_iso_8601": "2018-05-17T03:47:45.953704Z",
+ "url": "https://files.pythonhosted.org/packages/7b/6e/7c4ce77c6ca092e94e19b78282b459e7f8270362da655cbc6a75eeb9cdd7/69-6.9.tar.gz",
+ "yanked": False,
+ "yanked_reason": None,
+ }
+ ],
+ },
+ "urls": [
+ {
+ "comment_text": "",
+ "digests": {
+ "md5": "ff4bf804ef3722a1fd8853a8a32513d4",
+ "sha256": "0c8deb7c8574787283c3fc08b714ee63fd6752a38d13515a9d8508798d428597",
+ },
+ "downloads": -1,
+ "filename": "69-6.9.tar.gz",
+ "has_sig": False,
+ "md5_digest": "ff4bf804ef3722a1fd8853a8a32513d4",
+ "packagetype": "sdist",
+ "python_version": "source",
+ "requires_python": ">=3.6",
+ "size": 1077,
+ "upload_time": "2018-05-17T03:47:45",
+ "upload_time_iso_8601": "2018-05-17T03:47:45.953704Z",
+ "url": "https://files.pythonhosted.org/packages/7b/6e/7c4ce77c6ca092e94e19b78282b459e7f8270362da655cbc6a75eeb9cdd7/69-6.9.tar.gz",
+ "yanked": False,
+ "yanked_reason": None,
+ }
+ ],
+ "vulnerabilities": [],
+}
+
+EXPECTED_SIMPLE_SIXTYNINE_JSON = """\
+{"files": [{"filename": "69-0.69.tar.gz", "hashes": {"md5": "4328d962656395fbd3e730c9d30bb48c", "sha256": "5c11f48399f9b1bca802751513f1f97bff6ce97e6facb576b7729e1351453c10"}, "requires-python": ">=3.6", "url": "../../packages/d3/cc/95dc5434362bd333a1fec275231775d748315b26edf1e7e568e6f8660238/69-0.69.tar.gz", "yanked": false}, {"filename": "69-6.9.tar.gz", "hashes": {"md5": "ff4bf804ef3722a1fd8853a8a32513d4", "sha256": "0c8deb7c8574787283c3fc08b714ee63fd6752a38d13515a9d8508798d428597"}, "requires-python": ">=3.6", "url": "../../packages/7b/6e/7c4ce77c6ca092e94e19b78282b459e7f8270362da655cbc6a75eeb9cdd7/69-6.9.tar.gz", "yanked": false}], "meta": {"api-version": "1.0", "_last-serial": "10333928"}, "name": "69"}\
+"""
+
+EXPECTED_SIMPLE_SIXTYNINE_JSON_PRETTY = """\
+{
+ "files": [
+ {
+ "filename": "69-0.69.tar.gz",
+ "hashes": {
+ "md5": "4328d962656395fbd3e730c9d30bb48c",
+ "sha256": "5c11f48399f9b1bca802751513f1f97bff6ce97e6facb576b7729e1351453c10"
+ },
+ "requires-python": ">=3.6",
+ "url": "../../packages/d3/cc/95dc5434362bd333a1fec275231775d748315b26edf1e7e568e6f8660238/69-0.69.tar.gz",
+ "yanked": false
+ },
+ {
+ "filename": "69-6.9.tar.gz",
+ "hashes": {
+ "md5": "ff4bf804ef3722a1fd8853a8a32513d4",
+ "sha256": "0c8deb7c8574787283c3fc08b714ee63fd6752a38d13515a9d8508798d428597"
+ },
+ "requires-python": ">=3.6",
+ "url": "../../packages/7b/6e/7c4ce77c6ca092e94e19b78282b459e7f8270362da655cbc6a75eeb9cdd7/69-6.9.tar.gz",
+ "yanked": false
+ }
+ ],
+ "meta": {
+ "api-version": "1.0",
+ "_last-serial": "10333928"
+ },
+ "name": "69"
+}\
+"""
+
+EXPECTED_SIMPLE_GLOBAL_JSON_PRETTY = """\
+{
+ "meta": {
+ "_last-serial": 12345,
+ "api-version": "1.0"
+ },
+ "projects": [
+ {
+ "name": "69"
+ },
+ {
+ "name": "foo"
+ }
+ ]
+}\
+"""
diff --git a/src/bandersnatch/tests/test_sync.py b/src/bandersnatch/tests/test_sync.py
index eaf331977..90a9e5fcd 100644
--- a/src/bandersnatch/tests/test_sync.py
+++ b/src/bandersnatch/tests/test_sync.py
@@ -26,7 +26,11 @@ async def test_sync_specific_packages(mirror: BandersnatchMirror) -> None:
packages{0}any{0}f{0}foo{0}foo.zip
pypi{0}foo{0}json
simple{0}foo{0}index.html
-simple{0}index.html""".format(
+simple{0}foo{0}index.v1_html
+simple{0}foo{0}index.v1_json
+simple{0}index.html
+simple{0}index.v1_html
+simple{0}index.v1_json""".format(
sep
) == utils.find(
mirror.webdir, dirs=False
diff --git a/src/bandersnatch/unittest.conf b/src/bandersnatch/unittest.conf
index 0edf53fe1..9e7f63c83 100644
--- a/src/bandersnatch/unittest.conf
+++ b/src/bandersnatch/unittest.conf
@@ -42,6 +42,10 @@ workers = 3
; Recommended setting: the default of false for full pip/pypi compatibility.
hash-index = false
+; Format for simple API to be stored in
+; Since PEP691 we have HTML and JSON
+simple-format = ALL
+
; Whether to stop a sync quickly after an error is found or whether to continue
; syncing but not marking the sync as successful. Value should be "true" or
; "false".
diff --git a/test_runner.py b/test_runner.py
index a7a935463..f2b9fbc6f 100644
--- a/test_runner.py
+++ b/test_runner.py
@@ -8,6 +8,7 @@
then check for expected outputs to exist
"""
+import json
from configparser import ConfigParser
from os import environ
from pathlib import Path
@@ -42,6 +43,7 @@
def check_ci(suppress_errors: bool = False) -> int:
black_index = MIRROR_BASE / "simple/b/black/index.html"
pyaib_index = MIRROR_BASE / "simple/p/pyaib/index.html"
+ pyaib_json_index = MIRROR_BASE / "simple/p/pyaib/index.v1_json"
pyaib_json = MIRROR_BASE / "json/pyaib"
pyaib_tgz = (
MIRROR_BASE
@@ -77,6 +79,13 @@ def check_ci(suppress_errors: bool = False) -> int:
print(f"{EOP} {A_BLACK_WHL} exists ... delete failed?")
return 74
+ if not suppress_errors and not pyaib_json_index.exists():
+ print(f"{EOP} {pyaib_json_index} does not exist ...")
+ return 75
+ else:
+ with pyaib_json_index.open("r") as fp:
+ json.load(fp) # Check it's valid JSON
+
rmtree(MIRROR_ROOT)
print("Bandersnatch PyPI CI finished successfully!")