From bd623d616eb6b3a9a94f078392f6fed388f3215a Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Mon, 30 Aug 2021 18:26:35 +0300 Subject: [PATCH 01/11] Reuse `temporary_filename` in upload code --- warehouse/forklift/legacy.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index b0d27483c6b4..c1282b0c00f1 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -1321,7 +1321,7 @@ def file_upload(request): # Also buffer the entire signature file to disk. if "gpg_signature" in request.POST: has_signature = True - with open(os.path.join(tmpdir, filename + ".asc"), "wb") as fp: + with open(temporary_filename + ".asc", "wb") as fp: signature_size = 0 for chunk in iter( lambda: request.POST["gpg_signature"].file.read(8096), b"" @@ -1332,7 +1332,7 @@ def file_upload(request): fp.write(chunk) # Check whether signature is ASCII armored - with open(os.path.join(tmpdir, filename + ".asc"), "rb") as fp: + with open(temporary_filename + ".asc", "rb") as fp: if not fp.read().startswith(b"-----BEGIN PGP SIGNATURE-----"): raise _exc_with_message( HTTPBadRequest, "PGP signature isn't ASCII armored." @@ -1412,7 +1412,7 @@ def file_upload(request): storage = request.find_service(IFileStorage, name="primary") storage.store( file_.path, - os.path.join(tmpdir, filename), + temporary_filename, meta={ "project": file_.release.project.normalized_name, "version": file_.release.version, @@ -1423,7 +1423,7 @@ def file_upload(request): if has_signature: storage.store( file_.pgp_path, - os.path.join(tmpdir, filename + ".asc"), + temporary_filename + ".asc", meta={ "project": file_.release.project.normalized_name, "version": file_.release.version, From ef5ebbd91ff95750d12a41babb6e4262a7611519 Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Tue, 31 Aug 2021 07:19:58 +0300 Subject: [PATCH 02/11] Extract METADATA on wheel upload as *.whl.metadata (#8254) This allows to download just the .metadata file for dependency resolution instead of full wheels as it happens today https://github.com/pypa/warehouse/issues/8254#issuecomment-908327875 The filename convention and download location is covered by PEP-0658 https://www.python.org/dev/peps/pep-0658/#specification --- warehouse/forklift/legacy.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index c1282b0c00f1..e24d52af15c7 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -1308,6 +1308,7 @@ def file_upload(request): # Check that if it's a binary wheel, it's on a supported platform if filename.endswith(".whl"): + has_wheel_metadata = True wheel_info = _wheel_file_re.match(filename) plats = wheel_info.group("plat").split(".") for plat in plats: @@ -1317,6 +1318,14 @@ def file_upload(request): "Binary wheel '{filename}' has an unsupported " "platform tag '{plat}'.".format(filename=filename, plat=plat), ) + # Extract .metadata file + # https://www.python.org/dev/peps/pep-0658/#specification + with zipfile.ZipFile(temporary_filename) as zfp: + metafile = wheel_info.group("namever") + ".dist-info/METADATA" + with open(temporary_filename + ".metadata", "wb") as fp: + fp.write(zfp.read(metafile)) + else: + has_wheel_metadata = False # Also buffer the entire signature file to disk. if "gpg_signature" in request.POST: @@ -1420,6 +1429,17 @@ def file_upload(request): "python-version": file_.python_version, }, ) + if has_wheel_metadata: + storage.store( + file_.path + ".metadata", + temporary_filename + ".metadata", + meta={ + "project": file_.release.project.normalized_name, + "version": file_.release.version, + "package-type": file_.packagetype, + "python-version": file_.python_version, + }, + ) if has_signature: storage.store( file_.pgp_path, From 6f9eaa364c3d23962613e5b4fe29a6daa37af8c6 Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Thu, 2 Sep 2021 23:29:43 +0300 Subject: [PATCH 03/11] Fix tests by using .zip content for a .whl Tests were failing, because wheel upload test used .tar.gz content, and zipfile could not open that content to extract METADATA (which was also absent). The fix adds two helpers to avoid repeated code. --- tests/unit/forklift/test_legacy.py | 53 +++++++++++++++++++++++------- 1 file changed, 41 insertions(+), 12 deletions(-) diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index 6129068c5ed3..9b7b6ad9d0e5 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -72,19 +72,26 @@ def _get_tar_testdata(compression_type=""): return temp_f.getvalue() +def _get_whl_testdata(name="fake_package", version="1.0"): + temp_f = io.BytesIO() + with zipfile.ZipFile(file=temp_f, mode="w") as zfp: + zfp.writestr(f"{name}-{version}.dist-info/METADATA", "Fake metadata") + return temp_f.getvalue() + + +def _storage_hash(data): + return hashlib.blake2b(data, digest_size=256 // 8).hexdigest() + + _TAR_GZ_PKG_TESTDATA = _get_tar_testdata("gz") _TAR_GZ_PKG_MD5 = hashlib.md5(_TAR_GZ_PKG_TESTDATA).hexdigest() _TAR_GZ_PKG_SHA256 = hashlib.sha256(_TAR_GZ_PKG_TESTDATA).hexdigest() -_TAR_GZ_PKG_STORAGE_HASH = hashlib.blake2b( - _TAR_GZ_PKG_TESTDATA, digest_size=256 // 8 -).hexdigest() +_TAR_GZ_PKG_STORAGE_HASH = _storage_hash(_TAR_GZ_PKG_TESTDATA) _TAR_BZ2_PKG_TESTDATA = _get_tar_testdata("bz2") _TAR_BZ2_PKG_MD5 = hashlib.md5(_TAR_BZ2_PKG_TESTDATA).hexdigest() _TAR_BZ2_PKG_SHA256 = hashlib.sha256(_TAR_BZ2_PKG_TESTDATA).hexdigest() -_TAR_BZ2_PKG_STORAGE_HASH = hashlib.blake2b( - _TAR_BZ2_PKG_TESTDATA, digest_size=256 // 8 -).hexdigest() +_TAR_BZ2_PKG_STORAGE_HASH = _storage_hash(_TAR_BZ2_PKG_TESTDATA) class TestExcWithMessage: @@ -2761,6 +2768,8 @@ def test_upload_succeeds_with_wheel( RoleFactory.create(user=user, project=project) filename = f"{project.name}-{release.version}-cp34-none-{plat}.whl" + filebody = _get_whl_testdata(project.name) + file_storage_hash = _storage_hash(filebody) pyramid_config.testing_securitypolicy(identity=user) db_request.user = user @@ -2772,10 +2781,10 @@ def test_upload_succeeds_with_wheel( "version": release.version, "filetype": "bdist_wheel", "pyversion": "cp34", - "md5_digest": _TAR_GZ_PKG_MD5, + "md5_digest": hashlib.md5(filebody).hexdigest(), "content": pretend.stub( filename=filename, - file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), + file=io.BytesIO(filebody), type="application/tar", ), } @@ -2784,7 +2793,10 @@ def test_upload_succeeds_with_wheel( @pretend.call_recorder def storage_service_store(path, file_path, *, meta): with open(file_path, "rb") as fp: - assert fp.read() == _TAR_GZ_PKG_TESTDATA + if file_path.endswith(".metadata"): + assert fp.read() == b"Fake metadata" + else: + assert fp.read() == filebody storage_service = pretend.stub(store=storage_service_store) @@ -2808,9 +2820,9 @@ def storage_service_store(path, file_path, *, meta): pretend.call( "/".join( [ - _TAR_GZ_PKG_STORAGE_HASH[:2], - _TAR_GZ_PKG_STORAGE_HASH[2:4], - _TAR_GZ_PKG_STORAGE_HASH[4:], + file_storage_hash[:2], + file_storage_hash[2:4], + file_storage_hash[4:], filename, ] ), @@ -2821,6 +2833,23 @@ def storage_service_store(path, file_path, *, meta): "package-type": "bdist_wheel", "python-version": "cp34", }, + ), + pretend.call( + "/".join( + [ + file_storage_hash[:2], + file_storage_hash[2:4], + file_storage_hash[4:], + filename + '.metadata', + ] + ), + mock.ANY, + meta={ + "project": project.normalized_name, + "version": release.version, + "package-type": "bdist_wheel", + "python-version": "cp34", + }, ) ] From db93dd2bd719545b43c30ec32490245f6d528e19 Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Fri, 3 Sep 2021 00:21:25 +0300 Subject: [PATCH 04/11] Fix one more test that decided to fail I changed tested content-type to the more appropriate for .zip Twine sends application/octet-stream. --- tests/unit/forklift/test_legacy.py | 38 +++++++++++++++++++++++------- 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index 9b7b6ad9d0e5..1cd425db5a4b 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -2785,7 +2785,7 @@ def test_upload_succeeds_with_wheel( "content": pretend.stub( filename=filename, file=io.BytesIO(filebody), - type="application/tar", + type="application/octet-stream", ), } ) @@ -2903,6 +2903,8 @@ def test_upload_succeeds_with_wheel_after_sdist( RoleFactory.create(user=user, project=project) filename = f"{project.name}-{release.version}-cp34-none-any.whl" + filebody = _get_whl_testdata(project.name) + file_storage_hash = _storage_hash(filebody) pyramid_config.testing_securitypolicy(identity=user) db_request.user = user @@ -2914,11 +2916,11 @@ def test_upload_succeeds_with_wheel_after_sdist( "version": release.version, "filetype": "bdist_wheel", "pyversion": "cp34", - "md5_digest": "335c476dc930b959dda9ec82bd65ef19", + "md5_digest": hashlib.md5(filebody).hexdigest(), "content": pretend.stub( filename=filename, - file=io.BytesIO(b"A fake file."), - type="application/tar", + file=io.BytesIO(filebody), + type="application/zip", ), } ) @@ -2926,7 +2928,10 @@ def test_upload_succeeds_with_wheel_after_sdist( @pretend.call_recorder def storage_service_store(path, file_path, *, meta): with open(file_path, "rb") as fp: - assert fp.read() == b"A fake file." + if file_path.endswith(".metadata"): + assert fp.read() == b"Fake metadata" + else: + assert fp.read() == filebody storage_service = pretend.stub(store=storage_service_store) db_request.find_service = pretend.call_recorder( @@ -2949,9 +2954,9 @@ def storage_service_store(path, file_path, *, meta): pretend.call( "/".join( [ - "4e", - "6e", - "fa4c0ee2bbad071b4f5b5ea68f1aea89fa716e7754eb13e2314d45a5916e", + file_storage_hash[:2], + file_storage_hash[2:4], + file_storage_hash[4:], filename, ] ), @@ -2962,6 +2967,23 @@ def storage_service_store(path, file_path, *, meta): "package-type": "bdist_wheel", "python-version": "cp34", }, + ), + pretend.call( + "/".join( + [ + file_storage_hash[:2], + file_storage_hash[2:4], + file_storage_hash[4:], + filename + '.metadata', + ] + ), + mock.ANY, + meta={ + "project": project.normalized_name, + "version": release.version, + "package-type": "bdist_wheel", + "python-version": "cp34", + }, ) ] From 3508eea3d89f84a0efc9f0606476c71e0e74066d Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Fri, 3 Sep 2021 01:26:28 +0300 Subject: [PATCH 05/11] Black formatting fix --- tests/unit/forklift/test_legacy.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index 1cd425db5a4b..62aea04bdd07 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -2840,7 +2840,7 @@ def storage_service_store(path, file_path, *, meta): file_storage_hash[:2], file_storage_hash[2:4], file_storage_hash[4:], - filename + '.metadata', + filename + ".metadata", ] ), mock.ANY, @@ -2850,7 +2850,7 @@ def storage_service_store(path, file_path, *, meta): "package-type": "bdist_wheel", "python-version": "cp34", }, - ) + ), ] # Ensure that a File object has been created. @@ -2974,7 +2974,7 @@ def storage_service_store(path, file_path, *, meta): file_storage_hash[:2], file_storage_hash[2:4], file_storage_hash[4:], - filename + '.metadata', + filename + ".metadata", ] ), mock.ANY, @@ -2984,7 +2984,7 @@ def storage_service_store(path, file_path, *, meta): "package-type": "bdist_wheel", "python-version": "cp34", }, - ) + ), ] # Ensure that a File object has been created. From 4004c9cdbfb5df1e449df22f88061a59ed5a9eac Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Fri, 17 Sep 2021 14:28:03 +0300 Subject: [PATCH 06/11] Move extraction code into extract_wheel_metadata() This will be useful for backfill scripts. --- warehouse/forklift/legacy.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index e24d52af15c7..d5164026906b 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -790,6 +790,21 @@ def _is_duplicate_file(db_session, filename, hashes): return None +def extract_wheel_metadata(path): + """ + Extract METADATA file and return it as a content. The name of the + .whl file is used to find the corresponding .dist-info dir. + + See https://www.python.org/dev/peps/pep-0658/#specification + """ + global _wheel_file_re + filename = os.path.basename(path) + namever = _wheel_file_re.match(filename).group("namever") + metafile = namever + ".dist-info/METADATA" + with zipfile.ZipFile(path) as zfp: + return zfp.read(metafile) + + @view_config( route_name="forklift.legacy.file_upload", uses_session=True, @@ -1318,12 +1333,9 @@ def file_upload(request): "Binary wheel '{filename}' has an unsupported " "platform tag '{plat}'.".format(filename=filename, plat=plat), ) - # Extract .metadata file - # https://www.python.org/dev/peps/pep-0658/#specification - with zipfile.ZipFile(temporary_filename) as zfp: - metafile = wheel_info.group("namever") + ".dist-info/METADATA" - with open(temporary_filename + ".metadata", "wb") as fp: - fp.write(zfp.read(metafile)) + wheel_metadata = extract_wheel_metadata(temporary_filename) + with open(temporary_filename + ".metadata", "wb") as fp: + fp.write(wheel_metadata) else: has_wheel_metadata = False From b880692eeb4a89744e3e0e25a1ff32417805c184 Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Fri, 17 Sep 2021 15:46:53 +0300 Subject: [PATCH 07/11] Generate metadata hash as base64 digest of black2s-128 --- warehouse/forklift/legacy.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index d5164026906b..bccc0d76d8cd 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import base64 import email import hashlib import hmac @@ -1323,7 +1324,6 @@ def file_upload(request): # Check that if it's a binary wheel, it's on a supported platform if filename.endswith(".whl"): - has_wheel_metadata = True wheel_info = _wheel_file_re.match(filename) plats = wheel_info.group("plat").split(".") for plat in plats: @@ -1336,8 +1336,11 @@ def file_upload(request): wheel_metadata = extract_wheel_metadata(temporary_filename) with open(temporary_filename + ".metadata", "wb") as fp: fp.write(wheel_metadata) + metadata_hash = base64.b64encode( + hashlib.blake2s(wheel_metadata, digest_size=128 // 8).digest() + ).decode("utf-8") else: - has_wheel_metadata = False + metadata_hash = None # Also buffer the entire signature file to disk. if "gpg_signature" in request.POST: @@ -1441,7 +1444,7 @@ def file_upload(request): "python-version": file_.python_version, }, ) - if has_wheel_metadata: + if metadata_hash is not None: storage.store( file_.path + ".metadata", temporary_filename + ".metadata", From 77022a61e7ae5a602107e32673ef570f6835699d Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Sat, 18 Sep 2021 10:39:12 +0300 Subject: [PATCH 08/11] Store `metadata_hash` in DB --- warehouse/forklift/legacy.py | 1 + ...9fe2_add_a_metadata_hash_column_to_file.py | 45 +++++++++++++++++++ warehouse/packaging/models.py | 1 + 3 files changed, 47 insertions(+) create mode 100644 warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index bccc0d76d8cd..cc0aae045494 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -1381,6 +1381,7 @@ def file_upload(request): md5_digest=file_hashes["md5"], sha256_digest=file_hashes["sha256"], blake2_256_digest=file_hashes["blake2_256"], + metadata_hash=metadata_hash, # Figure out what our filepath is going to be, we're going to use a # directory structure based on the hash of the file contents. This # will ensure that the contents of the file cannot change without diff --git a/warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py b/warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py new file mode 100644 index 000000000000..b21a66360e04 --- /dev/null +++ b/warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py @@ -0,0 +1,45 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Add a metadata_hash column to File + +Revision ID: 9b9778779fe2 +Revises: 1dbb95161e5a +Create Date: 2021-09-18 07:34:31.828437 +""" + +from alembic import op +import sqlalchemy as sa + + +revision = '9b9778779fe2' +down_revision = '1dbb95161e5a' + +# Note: It is VERY important to ensure that a migration does not lock for a +# long period of time and to ensure that each individual migration does +# not break compatibility with the *previous* version of the code base. +# This is because the migrations will be ran automatically as part of the +# deployment process, but while the previous version of the code is still +# up and running. Thus backwards incompatible changes must be broken up +# over multiple migrations inside of multiple pull requests in order to +# phase them in over multiple deploys. + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('release_files', sa.Column('metadata_hash', sa.Text(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('release_files', 'metadata_hash') + # ### end Alembic commands ### diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index a3a64593a3a4..10503d315fb5 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -666,6 +666,7 @@ def __table_args__(cls): # noqa md5_digest = Column(Text, unique=True, nullable=False) sha256_digest = Column(CIText, unique=True, nullable=False) blake2_256_digest = Column(CIText, unique=True, nullable=False) + metadata_hash = Column(Text, unique=False, nullable=True) upload_time = Column(DateTime(timezone=False), server_default=func.now()) uploaded_via = Column(Text) From a9d0ce446e400c974c0e00d5913a790fb18d2fbe Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Sat, 18 Sep 2021 10:46:11 +0300 Subject: [PATCH 09/11] Autogenerated migration fails linter tests. Fixing --- ...9fe2_add_a_metadata_hash_column_to_file.py | 22 +++++-------------- 1 file changed, 5 insertions(+), 17 deletions(-) diff --git a/warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py b/warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py index b21a66360e04..62d6e8658ee5 100644 --- a/warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py +++ b/warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py @@ -17,29 +17,17 @@ Create Date: 2021-09-18 07:34:31.828437 """ -from alembic import op import sqlalchemy as sa +from alembic import op -revision = '9b9778779fe2' -down_revision = '1dbb95161e5a' +revision = "9b9778779fe2" +down_revision = "1dbb95161e5a" -# Note: It is VERY important to ensure that a migration does not lock for a -# long period of time and to ensure that each individual migration does -# not break compatibility with the *previous* version of the code base. -# This is because the migrations will be ran automatically as part of the -# deployment process, but while the previous version of the code is still -# up and running. Thus backwards incompatible changes must be broken up -# over multiple migrations inside of multiple pull requests in order to -# phase them in over multiple deploys. def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('release_files', sa.Column('metadata_hash', sa.Text(), nullable=True)) - # ### end Alembic commands ### + op.add_column("release_files", sa.Column("metadata_hash", sa.Text(), nullable=True)) def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('release_files', 'metadata_hash') - # ### end Alembic commands ### + op.drop_column("release_files", "metadata_hash") From aff2dfad0f27a2ea2beadec436ca3c6a9e7a660a Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Mon, 6 Dec 2021 18:47:42 +0300 Subject: [PATCH 10/11] Rebase Alembic migration --- .../9b9778779fe2_add_a_metadata_hash_column_to_file.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py b/warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py index 62d6e8658ee5..0367955e38ad 100644 --- a/warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py +++ b/warehouse/migrations/versions/9b9778779fe2_add_a_metadata_hash_column_to_file.py @@ -13,7 +13,7 @@ Add a metadata_hash column to File Revision ID: 9b9778779fe2 -Revises: 1dbb95161e5a +Revises: d582fb87b94c Create Date: 2021-09-18 07:34:31.828437 """ @@ -22,7 +22,7 @@ from alembic import op revision = "9b9778779fe2" -down_revision = "1dbb95161e5a" +down_revision = "d582fb87b94c" def upgrade(): From 86cc1414be8088944b1428539d2bbb7d1eaf49e7 Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Fri, 28 Apr 2023 10:23:09 +0300 Subject: [PATCH 11/11] Remove `global` keyword as it alters usage MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Éric --- warehouse/forklift/legacy.py | 1 - 1 file changed, 1 deletion(-) diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index cc0aae045494..b5ec696094b1 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -798,7 +798,6 @@ def extract_wheel_metadata(path): See https://www.python.org/dev/peps/pep-0658/#specification """ - global _wheel_file_re filename = os.path.basename(path) namever = _wheel_file_re.match(filename).group("namever") metafile = namever + ".dist-info/METADATA"