From c6f8cd4ac985d4215ad6419116de84c1041813d6 Mon Sep 17 00:00:00 2001 From: Olivier Grisel Date: Fri, 13 Oct 2023 11:28:53 +0200 Subject: [PATCH] Official support for python 3.12 and code clean-up after EOLed Python 3.6 and 3.7. (#517) Co-authored-by: Hugo van Kemenade --- .github/scripts/flake8_diff.sh | 95 -- .github/workflows/testing.yml | 60 +- CHANGES.md | 8 +- README.md | 19 +- ci/install_coverage_subprocess_pth.py | 2 +- cloudpickle/__init__.py | 17 +- cloudpickle/cloudpickle.py | 1206 ++++++++++++----- cloudpickle/cloudpickle_fast.py | 870 +----------- cloudpickle/compat.py | 18 - dev-requirements.txt | 6 +- setup.py | 12 +- tests/cloudpickle_file_test.py | 6 +- tests/cloudpickle_test.py | 244 ++-- tests/cloudpickle_testpkg/setup.py | 4 +- tests/generate_old_pickles.py | 4 + .../cpython_310/class_with_type_hints.pkl | Bin 0 -> 828 bytes .../cpython_310/cloudpickle_version.txt | 1 + .../cpython_310/function_with_type_hints.pkl | Bin 0 -> 999 bytes .../cpython_310/nested_function.pkl | Bin 0 -> 590 bytes .../old_pickles/cpython_310/simple_class.pkl | Bin 0 -> 687 bytes tests/old_pickles/cpython_310/simple_enum.pkl | Bin 0 -> 279 bytes tests/old_pickles/cpython_310/simple_func.pkl | Bin 0 -> 510 bytes .../old_pickles/cpython_310/simple_module.pkl | Bin 0 -> 515 bytes .../cpython_311/class_with_type_hints.pkl | Bin 0 -> 854 bytes .../cpython_311/cloudpickle_version.txt | 1 + .../cpython_311/function_with_type_hints.pkl | Bin 0 -> 1094 bytes .../cpython_311/nested_function.pkl | Bin 0 -> 611 bytes .../old_pickles/cpython_311/simple_class.pkl | Bin 0 -> 713 bytes tests/old_pickles/cpython_311/simple_enum.pkl | Bin 0 -> 286 bytes tests/old_pickles/cpython_311/simple_func.pkl | Bin 0 -> 528 bytes .../old_pickles/cpython_311/simple_module.pkl | Bin 0 -> 533 bytes .../cpython_36/class_with_type_hints.pkl | Bin 944 -> 0 bytes .../cpython_36/function_with_type_hints.pkl | Bin 1124 -> 0 bytes .../cpython_36/nested_function.pkl | Bin 513 -> 0 bytes tests/old_pickles/cpython_36/simple_class.pkl | Bin 681 -> 0 bytes tests/old_pickles/cpython_36/simple_enum.pkl | Bin 225 -> 0 bytes tests/old_pickles/cpython_36/simple_func.pkl | Bin 485 -> 0 bytes .../old_pickles/cpython_36/simple_module.pkl | Bin 473 -> 0 bytes .../cpython_37/class_with_type_hints.pkl | Bin 822 -> 0 bytes .../cpython_37/function_with_type_hints.pkl | Bin 991 -> 0 bytes .../cpython_37/nested_function.pkl | Bin 513 -> 0 bytes tests/old_pickles/cpython_37/simple_class.pkl | Bin 681 -> 0 bytes tests/old_pickles/cpython_37/simple_enum.pkl | Bin 225 -> 0 bytes tests/old_pickles/cpython_37/simple_func.pkl | Bin 485 -> 0 bytes .../old_pickles/cpython_37/simple_module.pkl | Bin 473 -> 0 bytes .../cpython_38/class_with_type_hints.pkl | Bin 846 -> 814 bytes .../cpython_38/cloudpickle_version.txt | 1 + .../cpython_38/function_with_type_hints.pkl | Bin 1017 -> 985 bytes .../cpython_38/nested_function.pkl | Bin 590 -> 560 bytes tests/old_pickles/cpython_38/simple_class.pkl | Bin 705 -> 673 bytes tests/old_pickles/cpython_38/simple_enum.pkl | Bin 279 -> 279 bytes tests/old_pickles/cpython_38/simple_func.pkl | Bin 533 -> 501 bytes .../cpython_39/class_with_type_hints.pkl | Bin 0 -> 821 bytes .../cpython_39/cloudpickle_version.txt | 1 + .../cpython_39/function_with_type_hints.pkl | Bin 0 -> 992 bytes .../cpython_39/nested_function.pkl | Bin 0 -> 567 bytes tests/old_pickles/cpython_39/simple_class.pkl | Bin 0 -> 680 bytes tests/old_pickles/cpython_39/simple_enum.pkl | Bin 0 -> 279 bytes tests/old_pickles/cpython_39/simple_func.pkl | Bin 0 -> 508 bytes .../old_pickles/cpython_39/simple_module.pkl | Bin 0 -> 513 bytes .../pypy_36/class_with_type_hints.pkl | Bin 927 -> 0 bytes .../pypy_36/function_with_type_hints.pkl | Bin 1111 -> 0 bytes tests/old_pickles/pypy_36/simple_class.pkl | Bin 669 -> 0 bytes tests/old_pickles/pypy_36/simple_enum.pkl | Bin 225 -> 0 bytes tests/old_pickles/pypy_36/simple_func.pkl | Bin 454 -> 0 bytes tests/old_pickles/pypy_36/simple_module.pkl | Bin 473 -> 0 bytes tests/test_backward_compat.py | 5 +- tests/testutils.py | 2 +- tox.ini | 4 +- 69 files changed, 1021 insertions(+), 1565 deletions(-) delete mode 100644 .github/scripts/flake8_diff.sh delete mode 100644 cloudpickle/compat.py create mode 100644 tests/old_pickles/cpython_310/class_with_type_hints.pkl create mode 100644 tests/old_pickles/cpython_310/cloudpickle_version.txt create mode 100644 tests/old_pickles/cpython_310/function_with_type_hints.pkl create mode 100644 tests/old_pickles/cpython_310/nested_function.pkl create mode 100644 tests/old_pickles/cpython_310/simple_class.pkl create mode 100644 tests/old_pickles/cpython_310/simple_enum.pkl create mode 100644 tests/old_pickles/cpython_310/simple_func.pkl create mode 100644 tests/old_pickles/cpython_310/simple_module.pkl create mode 100644 tests/old_pickles/cpython_311/class_with_type_hints.pkl create mode 100644 tests/old_pickles/cpython_311/cloudpickle_version.txt create mode 100644 tests/old_pickles/cpython_311/function_with_type_hints.pkl create mode 100644 tests/old_pickles/cpython_311/nested_function.pkl create mode 100644 tests/old_pickles/cpython_311/simple_class.pkl create mode 100644 tests/old_pickles/cpython_311/simple_enum.pkl create mode 100644 tests/old_pickles/cpython_311/simple_func.pkl create mode 100644 tests/old_pickles/cpython_311/simple_module.pkl delete mode 100644 tests/old_pickles/cpython_36/class_with_type_hints.pkl delete mode 100644 tests/old_pickles/cpython_36/function_with_type_hints.pkl delete mode 100644 tests/old_pickles/cpython_36/nested_function.pkl delete mode 100644 tests/old_pickles/cpython_36/simple_class.pkl delete mode 100644 tests/old_pickles/cpython_36/simple_enum.pkl delete mode 100644 tests/old_pickles/cpython_36/simple_func.pkl delete mode 100644 tests/old_pickles/cpython_36/simple_module.pkl delete mode 100644 tests/old_pickles/cpython_37/class_with_type_hints.pkl delete mode 100644 tests/old_pickles/cpython_37/function_with_type_hints.pkl delete mode 100644 tests/old_pickles/cpython_37/nested_function.pkl delete mode 100644 tests/old_pickles/cpython_37/simple_class.pkl delete mode 100644 tests/old_pickles/cpython_37/simple_enum.pkl delete mode 100644 tests/old_pickles/cpython_37/simple_func.pkl delete mode 100644 tests/old_pickles/cpython_37/simple_module.pkl create mode 100644 tests/old_pickles/cpython_38/cloudpickle_version.txt create mode 100644 tests/old_pickles/cpython_39/class_with_type_hints.pkl create mode 100644 tests/old_pickles/cpython_39/cloudpickle_version.txt create mode 100644 tests/old_pickles/cpython_39/function_with_type_hints.pkl create mode 100644 tests/old_pickles/cpython_39/nested_function.pkl create mode 100644 tests/old_pickles/cpython_39/simple_class.pkl create mode 100644 tests/old_pickles/cpython_39/simple_enum.pkl create mode 100644 tests/old_pickles/cpython_39/simple_func.pkl create mode 100644 tests/old_pickles/cpython_39/simple_module.pkl delete mode 100644 tests/old_pickles/pypy_36/class_with_type_hints.pkl delete mode 100644 tests/old_pickles/pypy_36/function_with_type_hints.pkl delete mode 100644 tests/old_pickles/pypy_36/simple_class.pkl delete mode 100644 tests/old_pickles/pypy_36/simple_enum.pkl delete mode 100644 tests/old_pickles/pypy_36/simple_func.pkl delete mode 100644 tests/old_pickles/pypy_36/simple_module.pkl diff --git a/.github/scripts/flake8_diff.sh b/.github/scripts/flake8_diff.sh deleted file mode 100644 index ced33f722..000000000 --- a/.github/scripts/flake8_diff.sh +++ /dev/null @@ -1,95 +0,0 @@ -#!/bin/bash - -# Adapted from -# scikit-learn/scikit-learn/blob/master/build_tools/circle/linting.sh -# This script is used in github-actions to check that PRs do not add obvious -# flake8 violations. It runs flake8 --diff on the diff between the branch and -# the common ancestor. - -set -e -# pipefail is necessary to propagate exit codes -set -o pipefail - -PROJECT=cloudpipe/cloudpickle -PROJECT_URL=https://github.com/$PROJECT.git - -GIT_ROOT=$(git rev-parse --show-toplevel) -cd $GIT_ROOT - -# Find the remote with the project name (upstream in most cases) -REMOTE=$(git remote -v | grep $PROJECT | cut -f1 | head -1 || echo '') - -# Add a temporary remote if needed. For example this is necessary when -# github-actions is configured to run in a fork. In this case 'origin' is the -# fork and not the reference repo we want to diff against. -if [[ -z "$REMOTE" ]]; then - TMP_REMOTE=tmp_reference_upstream - REMOTE=$TMP_REMOTE - git remote add $REMOTE $PROJECT_URL -fi - -echo "Remotes:" -echo '--------------------------------------------------------------------------------' -git remote --verbose - -# find the common ancestor between $LOCAL_BRANCH_REF and $REMOTE/master -if [[ -z "$LOCAL_BRANCH_REF" ]]; then - LOCAL_BRANCH_REF=$(git rev-parse --abbrev-ref HEAD) -fi -echo -e "\nLast 2 commits in $LOCAL_BRANCH_REF:" -echo '--------------------------------------------------------------------------------' -git --no-pager log -2 $LOCAL_BRANCH_REF - -REMOTE_MASTER_REF="$REMOTE/master" -# Make sure that $REMOTE_MASTER_REF is a valid reference -echo -e "\nFetching $REMOTE_MASTER_REF" -echo '--------------------------------------------------------------------------------' -git fetch $REMOTE master:refs/remotes/$REMOTE_MASTER_REF -LOCAL_BRANCH_SHORT_HASH=$(git rev-parse --short $LOCAL_BRANCH_REF) -REMOTE_MASTER_SHORT_HASH=$(git rev-parse --short $REMOTE_MASTER_REF) - -COMMIT=$(git merge-base $LOCAL_BRANCH_REF $REMOTE_MASTER_REF) || \ - echo "No common ancestor found for $(git show $LOCAL_BRANCH_REF -q) and $(git show $REMOTE_MASTER_REF -q)" - -if [ -z "$COMMIT" ]; then - exit 1 -fi - -COMMIT_SHORT_HASH=$(git rev-parse --short $COMMIT) - -echo -e "\nCommon ancestor between $LOCAL_BRANCH_REF ($LOCAL_BRANCH_SHORT_HASH)"\ - "and $REMOTE_MASTER_REF ($REMOTE_MASTER_SHORT_HASH) is $COMMIT_SHORT_HASH:" -echo '--------------------------------------------------------------------------------' -git --no-pager show --no-patch $COMMIT_SHORT_HASH - -COMMIT_RANGE="$COMMIT_SHORT_HASH..$LOCAL_BRANCH_SHORT_HASH" - -if [[ -n "$TMP_REMOTE" ]]; then - git remote remove $TMP_REMOTE -fi - -echo -e '\nRunning flake8 on the diff in the range' "$COMMIT_RANGE" \ - "($(git rev-list $COMMIT_RANGE | wc -l) commit(s)):" -echo '--------------------------------------------------------------------------------' - -MODIFIED_FILES="$(git diff --name-only $COMMIT_RANGE || echo "no_match")" - -check_files() { - files="$1" - shift - options="$*" - if [ -n "$files" ]; then - # Conservative approach: diff without context (--unified=0) so that code - # that was not changed does not create failures - # The github terminal is 127 characters wide - git diff --unified=0 $COMMIT_RANGE -- $files | flake8 --diff --show-source \ - --max-complexity=40 --max-line-length=127 $options - fi -} - -if [[ "$MODIFIED_FILES" == "no_match" ]] || [[ "$MODIFIED_FILES" == "" ]]; then - echo "No file has been modified" -else - check_files "$(echo "$MODIFIED_FILES" | grep -v ^examples)" - echo -e "No problem detected by flake8\n" -fi diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index eeedf68fa..c9b7981bf 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -10,42 +10,34 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Set up Python 3.11 - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: 3.11 - - name: Install flake8 + - name: Install ruff shell: bash run: | python -V - python -m pip install "flake8<6.0.0" - # XXX: flake8 --diff is deprecated, broken and was removed - # in flake8 6.0.0: we should instead black-ify the full repo - # and run a full flake8 check at each PR. - - name: Run flake8 on diff with upstream/master + python -m pip install ruff + - name: Run ruff shell: bash run: | - bash ./.github/scripts/flake8_diff.sh + ruff . build: strategy: matrix: os: [ubuntu-latest, windows-latest, macos-latest] - # TODO: add "3.12-dev" to the list - python_version: [3.7, 3.8, 3.9, "3.10", "3.11", "pypy-3.9"] + python_version: ["3.8", "3.9", "3.10", "3.11", "3.12", "pypy-3.9"] exclude: # Do not test all minor versions on all platforms, especially if they # are not the oldest/newest supported versions - - os: windows-latest - python_version: 3.7 - os: windows-latest python_version: 3.8 # as of 4/02/2020, psutil won't build under PyPy + Windows - os: windows-latest python_version: "pypy-3.9" - - os: macos-latest - python_version: 3.7 - os: macos-latest python_version: 3.8 - os: macos-latest @@ -56,11 +48,12 @@ jobs: runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python_version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python_version }} + allow-prereleases: true - name: Install project and dependencies shell: bash run: | @@ -72,11 +65,6 @@ jobs: - name: Display Python version shell: bash run: python -c "import sys; print(sys.version)" - - name: Look for syntax errors/undefined names - shell: bash - run: | - python -m flake8 . --count --verbose --select=E901,E999,F821,F822,F823 \ - --show-source --statistics - name: Test with pytest shell: bash run: | @@ -85,7 +73,7 @@ jobs: coverage combine --append coverage xml -i - name: Publish coverage results - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} file: ./coverage.xml @@ -101,9 +89,9 @@ jobs: matrix: python_version: ["3.10"] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python_version }} - name: Install project and dependencies @@ -119,9 +107,11 @@ jobs: # FIXME ipv6-related failures on Ubuntu github actions CI # https://github.com/dask/distributed/issues/4514 export DISABLE_IPV6=1 - # test_decide_worker_coschedule_order_neighbors is skipped because of: - # https://github.com/dask/distributed/issues/8255 - export PYTEST_ADDOPTS=("-m" "not avoid_ci" "-k" "not test_decide_worker_coschedule_order_neighbors") + # - test_decide_worker_coschedule_order_neighbors is skipped because of: + # https://github.com/dask/distributed/issues/8255 + # - test_client_worker is skipped because it's a time-related test that + # randomly fails on CI but seems unrelated to cloudpickle. + export PYTEST_ADDOPTS=("-m" "not avoid_ci" "-k" "not test_decide_worker_coschedule_order_neighbors and not test_client_worker") source ./.github/scripts/test_downstream_project.sh joblib-downstream-build: @@ -135,9 +125,9 @@ jobs: matrix: python_version: ["3.10"] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python_version }} - name: Install project and dependencies @@ -163,9 +153,9 @@ jobs: matrix: python_version: ["3.10"] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python_version }} - name: Install downstream project and dependencies @@ -188,9 +178,9 @@ jobs: matrix: python_version: ["3.11"] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python_version }} - name: Install project and tests dependencies @@ -204,7 +194,7 @@ jobs: run: | RAY_PACKAGE_DIR=$(python -c "import os, ray; print(os.path.dirname(ray.__file__), flush=True)") cp cloudpickle/cloudpickle.py $RAY_PACKAGE_DIR/cloudpickle/cloudpickle.py - cp cloudpickle/compat.py $RAY_PACKAGE_DIR/cloudpickle/compat.py + rm -rf $RAY_PACKAGE_DIR/cloudpickle/compat.py cp cloudpickle/cloudpickle_fast.py $RAY_PACKAGE_DIR/cloudpickle/cloudpickle_fast.py - name: Fetch the Ray test suite from github run: | diff --git a/CHANGES.md b/CHANGES.md index c037af409..2041d0dbb 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,12 @@ -2.3.0 (development) +3.0.0 (development) =================== +- Officially support Python 3.12 and drop support for Python 3.6 and 3.7. + Dropping support for older Python versions made it possible to simplify the + code base signficantly, hopefully making it easier to contribute to and + maintain the project. + ([PR #515](https://github.com/cloudpipe/cloudpickle/pull/515)) + - Fix pickling of dataclasses and their instances. ([issue #386](https://github.com/cloudpipe/cloudpickle/issues/386), [PR #513](https://github.com/cloudpipe/cloudpickle/pull/513)) diff --git a/README.md b/README.md index 701b3d1dc..12df59038 100644 --- a/README.md +++ b/README.md @@ -84,12 +84,13 @@ that pickles such constructs **by value**. Another case where the importability assumption is expected to break is when developing a module in a distributed execution environment: the worker processes may not have access to the said module, for example if they live on a -different machine than the process in which the module is being developed. -By itself, `cloudpickle` cannot detect such "locally importable" modules and -switch to serialization by value; instead, it relies on its default mode, -which is serialization by reference. However, since `cloudpickle 2.0.0`, one -can explicitly specify modules for which serialization by value should be used, -using the `register_pickle_by_value(module)`/`/unregister_pickle(module)` API: +different machine than the process in which the module is being developed. By +itself, `cloudpickle` cannot detect such "locally importable" modules and +switch to serialization by value; instead, it relies on its default mode, which +is serialization by reference. However, since `cloudpickle 2.0.0`, one can +explicitly specify modules for which serialization by value should be used, +using the +`register_pickle_by_value(module)`/`/unregister_pickle_by_value(module)` API: ```python >>> import cloudpickle @@ -130,14 +131,14 @@ Running the tests or alternatively for a specific environment: - tox -e py37 + tox -e py312 -- With `py.test` to only run the tests for your current version of +- With `pytest` to only run the tests for your current version of Python: pip install -r dev-requirements.txt - PYTHONPATH='.:tests' py.test + PYTHONPATH='.:tests' pytest History ------- diff --git a/ci/install_coverage_subprocess_pth.py b/ci/install_coverage_subprocess_pth.py index 6a273e4c3..927820b97 100644 --- a/ci/install_coverage_subprocess_pth.py +++ b/ci/install_coverage_subprocess_pth.py @@ -5,7 +5,7 @@ import os.path as op from sysconfig import get_path -FILE_CONTENT = u"""\ +FILE_CONTENT = """\ import coverage; coverage.process_startup() """ diff --git a/cloudpickle/__init__.py b/cloudpickle/__init__.py index 48f8993f6..c88e58658 100644 --- a/cloudpickle/__init__.py +++ b/cloudpickle/__init__.py @@ -1,8 +1,15 @@ from cloudpickle.cloudpickle import * # noqa -from cloudpickle.cloudpickle_fast import CloudPickler, dumps, dump # noqa -# Conform to the convention used by python serialization libraries, which -# expose their Pickler subclass at top-level under the "Pickler" name. -Pickler = CloudPickler +__version__ = "3.0.0.dev0" -__version__ = '2.3.0.dev0' +__all__ = [ # noqa + "__version__", + "Pickler", + "CloudPickler", + "dumps", + "loads", + "dump", + "load", + "register_pickle_by_value", + "unregister_pickle_by_value", +] diff --git a/cloudpickle/cloudpickle.py b/cloudpickle/cloudpickle.py index 317be6915..b4c9f44fb 100644 --- a/cloudpickle/cloudpickle.py +++ b/cloudpickle/cloudpickle.py @@ -1,16 +1,25 @@ -""" -This class is defined to override standard pickle functionality +"""Pickler class to extend the standard pickle.Pickler functionality + +The main objective is to make it natural to perform distributed computing on +clusters (such as PySpark, Dask, Ray...) with interactively defined code +(functions, classes, ...) written in notebooks or console. -The goals of it follow: --Serialize lambdas and nested functions to compiled byte code --Deal with main module correctly --Deal with other non-serializable objects +In particular this pickler adds the following features: +- serialize interactively-defined or locally-defined functions, classes, + enums, typevars, lambdas and nested functions to compiled byte code; +- deal with some other non-serializable objects in an ad-hoc manner where + applicable. -It does not include an unpickler, as standard python unpickling suffices. +This pickler is therefore meant to be used for the communication between short +lived Python processes running the same version of Python and libraries. In +particular, it is not meant to be used for long term storage of Python objects. + +It does not include an unpickler, as standard Python unpickling suffices. This module was extracted from the `cloud` package, developed by `PiCloud, Inc. `_. +Copyright (c) 2012-now, CloudPickle developers and contributors. Copyright (c) 2012, Regents of the University of California. Copyright (c) 2009 `PiCloud, Inc. `_. All rights reserved. @@ -41,40 +50,34 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ +import _collections_abc +from collections import ChainMap, OrderedDict +import abc import builtins +import copyreg +import dataclasses import dis +from enum import Enum +import io +import itertools +import logging import opcode +import pickle +from pickle import _getattribute import platform +import struct import sys -import types -import weakref -import uuid import threading +import types import typing +import uuid import warnings +import weakref -from .compat import pickle -from collections import OrderedDict -from typing import ClassVar, Generic, Union, Tuple, Callable -from pickle import _getattribute -from importlib._bootstrap import _find_spec - -try: # pragma: no branch - import typing_extensions as _typing_extensions - from typing_extensions import Literal, Final -except ImportError: - _typing_extensions = Literal = Final = None - -if sys.version_info >= (3, 8): - from types import CellType -else: - def f(): - a = 1 - - def g(): - return a - return g - CellType = type(f().__closure__[0]) +# The following import is required to be imported in the cloudpickle +# namespace to be able to load pickle files generated with older versions of +# cloudpickle. See: tests/test_backward_compat.py +from types import CellType # noqa: F401 # cloudpickle is meant for inter process communication: we expect all @@ -201,20 +204,7 @@ def _whichmodule(obj, name): - Errors arising during module introspection are ignored, as those errors are considered unwanted side effects. """ - if sys.version_info[:2] < (3, 7) and isinstance(obj, typing.TypeVar): # pragma: no branch # noqa - # Workaround bug in old Python versions: prior to Python 3.7, - # T.__module__ would always be set to "typing" even when the TypeVar T - # would be defined in a different module. - if name is not None and getattr(typing, name, None) is obj: - # Built-in TypeVar defined in typing such as AnyStr - return 'typing' - else: - # User defined or third-party TypeVar: __module__ attribute is - # irrelevant, thus trigger a exhaustive search for obj in all - # modules. - module_name = None - else: - module_name = getattr(obj, '__module__', None) + module_name = getattr(obj, '__module__', None) if module_name is not None: return module_name @@ -316,22 +306,20 @@ def _lookup_module_and_qualname(obj, name=None): def _extract_code_globals(co): - """ - Find all globals names read or written to by codeblock co - """ + """Find all globals names read or written to by codeblock co.""" out_names = _extract_code_globals_cache.get(co) if out_names is None: # We use a dict with None values instead of a set to get a - # deterministic order (assuming Python 3.6+) and avoid introducing - # non-deterministic pickle bytes as a results. + # deterministic order and avoid introducing non-deterministic pickle + # bytes as a results. out_names = {name: None for name in _walk_global_ops(co)} - # Declaring a function inside another one using the "def ..." - # syntax generates a constant code object corresponding to the one - # of the nested function's As the nested function may itself need - # global variables, we need to introspect its code, extract its - # globals, (look for code object in it's co_consts attribute..) and - # add the result to code_globals + # Declaring a function inside another one using the "def ..." syntax + # generates a constant code object corresponding to the one of the + # nested function's As the nested function may itself need global + # variables, we need to introspect its code, extract its globals, (look + # for code object in it's co_consts attribute..) and add the result to + # code_globals if co.co_consts: for const in co.co_consts: if isinstance(const, types.CodeType): @@ -343,8 +331,7 @@ def _extract_code_globals(co): def _find_imported_submodules(code, top_level_dependencies): - """ - Find currently imported submodules used by a function. + """Find currently imported submodules used by a function. Submodules used by a function need to be detected and referenced for the function to work correctly at depickling time. Because submodules can be @@ -389,101 +376,6 @@ def func(): return subimports -def cell_set(cell, value): - """Set the value of a closure cell. - - The point of this function is to set the cell_contents attribute of a cell - after its creation. This operation is necessary in case the cell contains a - reference to the function the cell belongs to, as when calling the - function's constructor - ``f = types.FunctionType(code, globals, name, argdefs, closure)``, - closure will not be able to contain the yet-to-be-created f. - - In Python3.7, cell_contents is writeable, so setting the contents of a cell - can be done simply using - >>> cell.cell_contents = value - - In earlier Python3 versions, the cell_contents attribute of a cell is read - only, but this limitation can be worked around by leveraging the Python 3 - ``nonlocal`` keyword. - - In Python2 however, this attribute is read only, and there is no - ``nonlocal`` keyword. For this reason, we need to come up with more - complicated hacks to set this attribute. - - The chosen approach is to create a function with a STORE_DEREF opcode, - which sets the content of a closure variable. Typically: - - >>> def inner(value): - ... lambda: cell # the lambda makes cell a closure - ... cell = value # cell is a closure, so this triggers a STORE_DEREF - - (Note that in Python2, A STORE_DEREF can never be triggered from an inner - function. The function g for example here - >>> def f(var): - ... def g(): - ... var += 1 - ... return g - - will not modify the closure variable ``var```inplace, but instead try to - load a local variable var and increment it. As g does not assign the local - variable ``var`` any initial value, calling f(1)() will fail at runtime.) - - Our objective is to set the value of a given cell ``cell``. So we need to - somewhat reference our ``cell`` object into the ``inner`` function so that - this object (and not the smoke cell of the lambda function) gets affected - by the STORE_DEREF operation. - - In inner, ``cell`` is referenced as a cell variable (an enclosing variable - that is referenced by the inner function). If we create a new function - cell_set with the exact same code as ``inner``, but with ``cell`` marked as - a free variable instead, the STORE_DEREF will be applied on its closure - - ``cell``, which we can specify explicitly during construction! The new - cell_set variable thus actually sets the contents of a specified cell! - - Note: we do not make use of the ``nonlocal`` keyword to set the contents of - a cell in early python3 versions to limit possible syntax errors in case - test and checker libraries decide to parse the whole file. - """ - - if sys.version_info[:2] >= (3, 7): # pragma: no branch - cell.cell_contents = value - else: - _cell_set = types.FunctionType( - _cell_set_template_code, {}, '_cell_set', (), (cell,),) - _cell_set(value) - - -def _make_cell_set_template_code(): - def _cell_set_factory(value): - lambda: cell - cell = value - - co = _cell_set_factory.__code__ - - _cell_set_template_code = types.CodeType( - co.co_argcount, - co.co_kwonlyargcount, # Python 3 only argument - co.co_nlocals, - co.co_stacksize, - co.co_flags, - co.co_code, - co.co_consts, - co.co_names, - co.co_varnames, - co.co_filename, - co.co_name, - co.co_firstlineno, - co.co_lnotab, - co.co_cellvars, # co_freevars is initialized with co_cellvars - (), # co_cellvars is made empty - ) - return _cell_set_template_code - - -if sys.version_info[:2] < (3, 7): - _cell_set_template_code = _make_cell_set_template_code() - # relevant opcodes STORE_GLOBAL = opcode.opmap['STORE_GLOBAL'] DELETE_GLOBAL = opcode.opmap['DELETE_GLOBAL'] @@ -509,9 +401,7 @@ def _builtin_type(name): def _walk_global_ops(code): - """ - Yield referenced name for all global-referencing instructions in *code*. - """ + """Yield referenced name for global-referencing instructions in code.""" for instr in dis.get_instructions(code): op = instr.opcode if op in GLOBAL_OPS: @@ -519,7 +409,7 @@ def _walk_global_ops(code): def _extract_class_dict(cls): - """Retrieve a copy of the dict of a class without the inherited methods""" + """Retrieve a copy of the dict of a class without the inherited method.""" clsdict = dict(cls.__dict__) # copy dict proxy to a dict if len(cls.__bases__) == 1: inherited_dict = cls.__bases__[0].__dict__ @@ -540,88 +430,17 @@ def _extract_class_dict(cls): return clsdict -if sys.version_info[:2] < (3, 7): # pragma: no branch - def _is_parametrized_type_hint(obj): - # This is very cheap but might generate false positives. So try to - # narrow it down is good as possible. - type_module = getattr(type(obj), '__module__', None) - from_typing_extensions = type_module == 'typing_extensions' - from_typing = type_module == 'typing' - - # general typing Constructs - is_typing = getattr(obj, '__origin__', None) is not None - - # typing_extensions.Literal - is_literal = ( - (getattr(obj, '__values__', None) is not None) - and from_typing_extensions - ) - - # typing_extensions.Final - is_final = ( - (getattr(obj, '__type__', None) is not None) - and from_typing_extensions - ) - - # typing.ClassVar - is_classvar = ( - (getattr(obj, '__type__', None) is not None) and from_typing - ) - - # typing.Union/Tuple for old Python 3.5 - is_union = getattr(obj, '__union_params__', None) is not None - is_tuple = getattr(obj, '__tuple_params__', None) is not None - is_callable = ( - getattr(obj, '__result__', None) is not None and - getattr(obj, '__args__', None) is not None - ) - return any((is_typing, is_literal, is_final, is_classvar, is_union, - is_tuple, is_callable)) - - def _create_parametrized_type_hint(origin, args): - return origin[args] -else: - _is_parametrized_type_hint = None - _create_parametrized_type_hint = None - - -def parametrized_type_hint_getinitargs(obj): - # The distorted type check sematic for typing construct becomes: - # ``type(obj) is type(TypeHint)``, which means "obj is a - # parametrized TypeHint" - if type(obj) is type(Literal): # pragma: no branch - initargs = (Literal, obj.__values__) - elif type(obj) is type(Final): # pragma: no branch - initargs = (Final, obj.__type__) - elif type(obj) is type(ClassVar): - initargs = (ClassVar, obj.__type__) - elif type(obj) is type(Generic): - initargs = (obj.__origin__, obj.__args__) - elif type(obj) is type(Union): - initargs = (Union, obj.__args__) - elif type(obj) is type(Tuple): - initargs = (Tuple, obj.__args__) - elif type(obj) is type(Callable): - (*args, result) = obj.__args__ - if len(args) == 1 and args[0] is Ellipsis: - args = Ellipsis - else: - args = list(args) - initargs = (Callable, (args, result)) - else: # pragma: no cover - raise pickle.PicklingError( - f"Cloudpickle Error: Unknown type {type(obj)}" - ) - return initargs - - -# Tornado support - def is_tornado_coroutine(func): - """ - Return whether *func* is a Tornado coroutine function. + """Return whether `func` is a Tornado coroutine function. + Running coroutines are not supported. """ + warnings.warn( + "is_tornado_coroutine is deprecated in cloudpickle 3.0 and will be " + "removed in cloudpickle 4.0. Use tornado.gen.is_coroutine_function " + "directly instead.", + category=DeprecationWarning, + ) if 'tornado.gen' not in sys.modules: return False gen = sys.modules['tornado.gen'] @@ -631,16 +450,6 @@ def is_tornado_coroutine(func): return gen.is_coroutine_function(func) -def _rebuild_tornado_coroutine(func): - from tornado import gen - return gen.coroutine(func) - - -# including pickles unloading functions in this namespace -load = pickle.load -loads = pickle.loads - - def subimport(name): # We cannot do simply: `return __import__(name)`: Indeed, if ``name`` is # the name of a submodule, __import__ will return the top-level root module @@ -657,19 +466,11 @@ def dynamic_subimport(name, vars): return mod -def _gen_ellipsis(): - return Ellipsis - - -def _gen_not_implemented(): - return NotImplemented - - def _get_cell_contents(cell): try: return cell.cell_contents except ValueError: - # sentinel used by ``_fill_function`` which will leave the cell empty + # Handle empty cells explicitly with a sentinel value. return _empty_cell_value @@ -691,78 +492,12 @@ def instance(cls): @instance class _empty_cell_value: - """sentinel for empty closures - """ + """Sentinel for empty closures.""" @classmethod def __reduce__(cls): return cls.__name__ -def _fill_function(*args): - """Fills in the rest of function data into the skeleton function object - - The skeleton itself is create by _make_skel_func(). - """ - if len(args) == 2: - func = args[0] - state = args[1] - elif len(args) == 5: - # Backwards compat for cloudpickle v0.4.0, after which the `module` - # argument was introduced - func = args[0] - keys = ['globals', 'defaults', 'dict', 'closure_values'] - state = dict(zip(keys, args[1:])) - elif len(args) == 6: - # Backwards compat for cloudpickle v0.4.1, after which the function - # state was passed as a dict to the _fill_function it-self. - func = args[0] - keys = ['globals', 'defaults', 'dict', 'module', 'closure_values'] - state = dict(zip(keys, args[1:])) - else: - raise ValueError(f'Unexpected _fill_value arguments: {args!r}') - - # - At pickling time, any dynamic global variable used by func is - # serialized by value (in state['globals']). - # - At unpickling time, func's __globals__ attribute is initialized by - # first retrieving an empty isolated namespace that will be shared - # with other functions pickled from the same original module - # by the same CloudPickler instance and then updated with the - # content of state['globals'] to populate the shared isolated - # namespace with all the global variables that are specifically - # referenced for this function. - func.__globals__.update(state['globals']) - - func.__defaults__ = state['defaults'] - func.__dict__ = state['dict'] - if 'annotations' in state: - func.__annotations__ = state['annotations'] - if 'doc' in state: - func.__doc__ = state['doc'] - if 'name' in state: - func.__name__ = state['name'] - if 'module' in state: - func.__module__ = state['module'] - if 'qualname' in state: - func.__qualname__ = state['qualname'] - if 'kwdefaults' in state: - func.__kwdefaults__ = state['kwdefaults'] - # _cloudpickle_subimports is a set of submodules that must be loaded for - # the pickled function to work correctly at unpickling time. Now that these - # submodules are depickled (hence imported), they can be removed from the - # object's state (the object state only served as a reference holder to - # these submodules) - if '_cloudpickle_submodules' in state: - state.pop('_cloudpickle_submodules') - - cells = func.__closure__ - if cells is not None: - for cell, value in zip(cells, state['closure_values']): - if value is not _empty_cell_value: - cell_set(cell, value) - - return func - - def _make_function(code, globals, name, argdefs, closure): # Setting __builtins__ in globals is needed for nogil CPython. globals["__builtins__"] = __builtins__ @@ -781,37 +516,10 @@ def _make_empty_cell(): def _make_cell(value=_empty_cell_value): cell = _make_empty_cell() if value is not _empty_cell_value: - cell_set(cell, value) + cell.cell_contents = value return cell -def _make_skel_func(code, cell_count, base_globals=None): - """ Creates a skeleton function object that contains just the provided - code and the correct number of cells in func_closure. All other - func attributes (e.g. func_globals) are empty. - """ - # This function is deprecated and should be removed in cloudpickle 1.7 - warnings.warn( - "A pickle file created using an old (<=1.4.1) version of cloudpickle " - "is currently being loaded. This is not supported by cloudpickle and " - "will break in cloudpickle 1.7", category=UserWarning - ) - # This is backward-compatibility code: for cloudpickle versions between - # 0.5.4 and 0.7, base_globals could be a string or None. base_globals - # should now always be a dictionary. - if base_globals is None or isinstance(base_globals, str): - base_globals = {} - - base_globals['__builtins__'] = __builtins__ - - closure = ( - tuple(_make_empty_cell() for _ in range(cell_count)) - if cell_count >= 0 else - None - ) - return types.FunctionType(code, base_globals, None, None, closure) - - def _make_skeleton_class(type_constructor, name, bases, type_kwargs, class_tracker_id, extra): """Build dynamic class with an empty __dict__ to be filled once memoized @@ -831,24 +539,6 @@ class id will also reuse this class definition. return _lookup_class_or_track(class_tracker_id, skeleton_class) -def _rehydrate_skeleton_class(skeleton_class, class_dict): - """Put attributes from `class_dict` back on `skeleton_class`. - - See CloudPickler.save_dynamic_class for more info. - """ - registry = None - for attrname, attr in class_dict.items(): - if attrname == "_abc_impl": - registry = attr - else: - setattr(skeleton_class, attrname, attr) - if registry is not None: - for subclass in registry: - skeleton_class.register(subclass) - - return skeleton_class - - def _make_skeleton_enum(bases, name, qualname, members, module, class_tracker_id, extra): """Build dynamic enum with an empty __dict__ to be filled once memoized @@ -885,11 +575,7 @@ def _make_typevar(name, bound, constraints, covariant, contravariant, name, *constraints, bound=bound, covariant=covariant, contravariant=contravariant ) - if class_tracker_id is not None: - return _lookup_class_or_track(class_tracker_id, tv) - else: # pragma: nocover - # Only for Python 3.5.3 compat. - return tv + return _lookup_class_or_track(class_tracker_id, tv) def _decompose_typevar(obj): @@ -946,3 +632,775 @@ def _make_dict_items(obj, is_ordered=False): return OrderedDict(obj).items() else: return obj.items() + + +# COLLECTION OF OBJECTS __getnewargs__-LIKE METHODS +# ------------------------------------------------- + +def _class_getnewargs(obj): + type_kwargs = {} + if "__module__" in obj.__dict__: + type_kwargs["__module__"] = obj.__module__ + + __dict__ = obj.__dict__.get('__dict__', None) + if isinstance(__dict__, property): + type_kwargs['__dict__'] = __dict__ + + return (type(obj), obj.__name__, _get_bases(obj), type_kwargs, + _get_or_create_tracker_id(obj), None) + + +def _enum_getnewargs(obj): + members = {e.name: e.value for e in obj} + return (obj.__bases__, obj.__name__, obj.__qualname__, members, + obj.__module__, _get_or_create_tracker_id(obj), None) + + +# COLLECTION OF OBJECTS RECONSTRUCTORS +# ------------------------------------ +def _file_reconstructor(retval): + return retval + + +# COLLECTION OF OBJECTS STATE GETTERS +# ----------------------------------- + +def _function_getstate(func): + # - Put func's dynamic attributes (stored in func.__dict__) in state. These + # attributes will be restored at unpickling time using + # f.__dict__.update(state) + # - Put func's members into slotstate. Such attributes will be restored at + # unpickling time by iterating over slotstate and calling setattr(func, + # slotname, slotvalue) + slotstate = { + "__name__": func.__name__, + "__qualname__": func.__qualname__, + "__annotations__": func.__annotations__, + "__kwdefaults__": func.__kwdefaults__, + "__defaults__": func.__defaults__, + "__module__": func.__module__, + "__doc__": func.__doc__, + "__closure__": func.__closure__, + } + + f_globals_ref = _extract_code_globals(func.__code__) + f_globals = {k: func.__globals__[k] for k in f_globals_ref if k in + func.__globals__} + + if func.__closure__ is not None: + closure_values = list(map(_get_cell_contents, func.__closure__)) + else: + closure_values = () + + # Extract currently-imported submodules used by func. Storing these modules + # in a smoke _cloudpickle_subimports attribute of the object's state will + # trigger the side effect of importing these modules at unpickling time + # (which is necessary for func to work correctly once depickled) + slotstate["_cloudpickle_submodules"] = _find_imported_submodules( + func.__code__, itertools.chain(f_globals.values(), closure_values)) + slotstate["__globals__"] = f_globals + + state = func.__dict__ + return state, slotstate + + +def _class_getstate(obj): + clsdict = _extract_class_dict(obj) + clsdict.pop('__weakref__', None) + + if issubclass(type(obj), abc.ABCMeta): + # If obj is an instance of an ABCMeta subclass, don't pickle the + # cache/negative caches populated during isinstance/issubclass + # checks, but pickle the list of registered subclasses of obj. + clsdict.pop('_abc_cache', None) + clsdict.pop('_abc_negative_cache', None) + clsdict.pop('_abc_negative_cache_version', None) + registry = clsdict.pop('_abc_registry', None) + if registry is None: + # The abc caches and registered subclasses of a + # class are bundled into the single _abc_impl attribute + clsdict.pop('_abc_impl', None) + (registry, _, _, _) = abc._get_dump(obj) + + clsdict["_abc_impl"] = [subclass_weakref() + for subclass_weakref in registry] + else: + # In the above if clause, registry is a set of weakrefs -- in + # this case, registry is a WeakSet + clsdict["_abc_impl"] = [type_ for type_ in registry] + + if "__slots__" in clsdict: + # pickle string length optimization: member descriptors of obj are + # created automatically from obj's __slots__ attribute, no need to + # save them in obj's state + if isinstance(obj.__slots__, str): + clsdict.pop(obj.__slots__) + else: + for k in obj.__slots__: + clsdict.pop(k, None) + + clsdict.pop('__dict__', None) # unpicklable property object + + return (clsdict, {}) + + +def _enum_getstate(obj): + clsdict, slotstate = _class_getstate(obj) + + members = {e.name: e.value for e in obj} + # Cleanup the clsdict that will be passed to _make_skeleton_enum: + # Those attributes are already handled by the metaclass. + for attrname in ["_generate_next_value_", "_member_names_", + "_member_map_", "_member_type_", + "_value2member_map_"]: + clsdict.pop(attrname, None) + for member in members: + clsdict.pop(member) + # Special handling of Enum subclasses + return clsdict, slotstate + + +# COLLECTIONS OF OBJECTS REDUCERS +# ------------------------------- +# A reducer is a function taking a single argument (obj), and that returns a +# tuple with all the necessary data to re-construct obj. Apart from a few +# exceptions (list, dict, bytes, int, etc.), a reducer is necessary to +# correctly pickle an object. +# While many built-in objects (Exceptions objects, instances of the "object" +# class, etc), are shipped with their own built-in reducer (invoked using +# obj.__reduce__), some do not. The following methods were created to "fill +# these holes". + +def _code_reduce(obj): + """code object reducer.""" + # If you are not sure about the order of arguments, take a look at help + # of the specific type from types, for example: + # >>> from types import CodeType + # >>> help(CodeType) + if hasattr(obj, "co_exceptiontable"): + # Python 3.11 and later: there are some new attributes + # related to the enhanced exceptions. + args = ( + obj.co_argcount, obj.co_posonlyargcount, + obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, + obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, + obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, + obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable, + obj.co_freevars, obj.co_cellvars, + ) + elif hasattr(obj, "co_linetable"): + # Python 3.10 and later: obj.co_lnotab is deprecated and constructor + # expects obj.co_linetable instead. + args = ( + obj.co_argcount, obj.co_posonlyargcount, + obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, + obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, + obj.co_varnames, obj.co_filename, obj.co_name, + obj.co_firstlineno, obj.co_linetable, obj.co_freevars, + obj.co_cellvars + ) + elif hasattr(obj, "co_nmeta"): # pragma: no cover + # "nogil" Python: modified attributes from 3.9 + args = ( + obj.co_argcount, obj.co_posonlyargcount, + obj.co_kwonlyargcount, obj.co_nlocals, obj.co_framesize, + obj.co_ndefaultargs, obj.co_nmeta, + obj.co_flags, obj.co_code, obj.co_consts, + obj.co_varnames, obj.co_filename, obj.co_name, + obj.co_firstlineno, obj.co_lnotab, obj.co_exc_handlers, + obj.co_jump_table, obj.co_freevars, obj.co_cellvars, + obj.co_free2reg, obj.co_cell2reg + ) + else: + # Backward compat for 3.8 and 3.9 + args = ( + obj.co_argcount, obj.co_posonlyargcount, + obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, + obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, + obj.co_varnames, obj.co_filename, obj.co_name, + obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, + obj.co_cellvars + ) + return types.CodeType, args + + +def _cell_reduce(obj): + """Cell (containing values of a function's free variables) reducer.""" + try: + obj.cell_contents + except ValueError: # cell is empty + return _make_empty_cell, () + else: + return _make_cell, (obj.cell_contents, ) + + +def _classmethod_reduce(obj): + orig_func = obj.__func__ + return type(obj), (orig_func,) + + +def _file_reduce(obj): + """Save a file.""" + import io + + if not hasattr(obj, "name") or not hasattr(obj, "mode"): + raise pickle.PicklingError( + "Cannot pickle files that do not map to an actual file" + ) + if obj is sys.stdout: + return getattr, (sys, "stdout") + if obj is sys.stderr: + return getattr, (sys, "stderr") + if obj is sys.stdin: + raise pickle.PicklingError("Cannot pickle standard input") + if obj.closed: + raise pickle.PicklingError("Cannot pickle closed files") + if hasattr(obj, "isatty") and obj.isatty(): + raise pickle.PicklingError( + "Cannot pickle files that map to tty objects" + ) + if "r" not in obj.mode and "+" not in obj.mode: + raise pickle.PicklingError( + "Cannot pickle files that are not opened for reading: %s" + % obj.mode + ) + + name = obj.name + + retval = io.StringIO() + + try: + # Read the whole file + curloc = obj.tell() + obj.seek(0) + contents = obj.read() + obj.seek(curloc) + except OSError as e: + raise pickle.PicklingError( + "Cannot pickle file %s as it cannot be read" % name + ) from e + retval.write(contents) + retval.seek(curloc) + + retval.name = name + return _file_reconstructor, (retval,) + + +def _getset_descriptor_reduce(obj): + return getattr, (obj.__objclass__, obj.__name__) + + +def _mappingproxy_reduce(obj): + return types.MappingProxyType, (dict(obj),) + + +def _memoryview_reduce(obj): + return bytes, (obj.tobytes(),) + + +def _module_reduce(obj): + if _should_pickle_by_reference(obj): + return subimport, (obj.__name__,) + else: + # Some external libraries can populate the "__builtins__" entry of a + # module's `__dict__` with unpicklable objects (see #316). For that + # reason, we do not attempt to pickle the "__builtins__" entry, and + # restore a default value for it at unpickling time. + state = obj.__dict__.copy() + state.pop('__builtins__', None) + return dynamic_subimport, (obj.__name__, state) + + +def _method_reduce(obj): + return (types.MethodType, (obj.__func__, obj.__self__)) + + +def _logger_reduce(obj): + return logging.getLogger, (obj.name,) + + +def _root_logger_reduce(obj): + return logging.getLogger, () + + +def _property_reduce(obj): + return property, (obj.fget, obj.fset, obj.fdel, obj.__doc__) + + +def _weakset_reduce(obj): + return weakref.WeakSet, (list(obj),) + + +def _dynamic_class_reduce(obj): + """Save a class that can't be referenced as a module attribute. + + This method is used to serialize classes that are defined inside + functions, or that otherwise can't be serialized as attribute lookups + from importable modules. + """ + if Enum is not None and issubclass(obj, Enum): + return ( + _make_skeleton_enum, _enum_getnewargs(obj), _enum_getstate(obj), + None, None, _class_setstate + ) + else: + return ( + _make_skeleton_class, _class_getnewargs(obj), _class_getstate(obj), + None, None, _class_setstate + ) + + +def _class_reduce(obj): + """Select the reducer depending on the dynamic nature of the class obj.""" + if obj is type(None): # noqa + return type, (None,) + elif obj is type(Ellipsis): + return type, (Ellipsis,) + elif obj is type(NotImplemented): + return type, (NotImplemented,) + elif obj in _BUILTIN_TYPE_NAMES: + return _builtin_type, (_BUILTIN_TYPE_NAMES[obj],) + elif not _should_pickle_by_reference(obj): + return _dynamic_class_reduce(obj) + return NotImplemented + + +def _dict_keys_reduce(obj): + # Safer not to ship the full dict as sending the rest might + # be unintended and could potentially cause leaking of + # sensitive information + return _make_dict_keys, (list(obj), ) + + +def _dict_values_reduce(obj): + # Safer not to ship the full dict as sending the rest might + # be unintended and could potentially cause leaking of + # sensitive information + return _make_dict_values, (list(obj), ) + + +def _dict_items_reduce(obj): + return _make_dict_items, (dict(obj), ) + + +def _odict_keys_reduce(obj): + # Safer not to ship the full dict as sending the rest might + # be unintended and could potentially cause leaking of + # sensitive information + return _make_dict_keys, (list(obj), True) + + +def _odict_values_reduce(obj): + # Safer not to ship the full dict as sending the rest might + # be unintended and could potentially cause leaking of + # sensitive information + return _make_dict_values, (list(obj), True) + + +def _odict_items_reduce(obj): + return _make_dict_items, (dict(obj), True) + + +def _dataclass_field_base_reduce(obj): + return _get_dataclass_field_type_sentinel, (obj.name,) + + +# COLLECTIONS OF OBJECTS STATE SETTERS +# ------------------------------------ +# state setters are called at unpickling time, once the object is created and +# it has to be updated to how it was at unpickling time. + + +def _function_setstate(obj, state): + """Update the state of a dynamic function. + + As __closure__ and __globals__ are readonly attributes of a function, we + cannot rely on the native setstate routine of pickle.load_build, that calls + setattr on items of the slotstate. Instead, we have to modify them inplace. + """ + state, slotstate = state + obj.__dict__.update(state) + + obj_globals = slotstate.pop("__globals__") + obj_closure = slotstate.pop("__closure__") + # _cloudpickle_subimports is a set of submodules that must be loaded for + # the pickled function to work correctly at unpickling time. Now that these + # submodules are depickled (hence imported), they can be removed from the + # object's state (the object state only served as a reference holder to + # these submodules) + slotstate.pop("_cloudpickle_submodules") + + obj.__globals__.update(obj_globals) + obj.__globals__["__builtins__"] = __builtins__ + + if obj_closure is not None: + for i, cell in enumerate(obj_closure): + try: + value = cell.cell_contents + except ValueError: # cell is empty + continue + obj.__closure__[i].cell_contents = value + + for k, v in slotstate.items(): + setattr(obj, k, v) + + +def _class_setstate(obj, state): + state, slotstate = state + registry = None + for attrname, attr in state.items(): + if attrname == "_abc_impl": + registry = attr + else: + setattr(obj, attrname, attr) + if registry is not None: + for subclass in registry: + obj.register(subclass) + + return obj + + +# COLLECTION OF DATACLASS UTILITIES +# --------------------------------- +# There are some internal sentinel values whose identity must be preserved when +# unpickling dataclass fields. Each sentinel value has a unique name that we can +# use to retrieve its identity at unpickling time. + + +_DATACLASSE_FIELD_TYPE_SENTINELS = { + dataclasses._FIELD.name: dataclasses._FIELD, + dataclasses._FIELD_CLASSVAR.name: dataclasses._FIELD_CLASSVAR, + dataclasses._FIELD_INITVAR.name: dataclasses._FIELD_INITVAR, +} + + +def _get_dataclass_field_type_sentinel(name): + return _DATACLASSE_FIELD_TYPE_SENTINELS[name] + + +class Pickler(pickle.Pickler): + # set of reducers defined and used by cloudpickle (private) + _dispatch_table = {} + _dispatch_table[classmethod] = _classmethod_reduce + _dispatch_table[io.TextIOWrapper] = _file_reduce + _dispatch_table[logging.Logger] = _logger_reduce + _dispatch_table[logging.RootLogger] = _root_logger_reduce + _dispatch_table[memoryview] = _memoryview_reduce + _dispatch_table[property] = _property_reduce + _dispatch_table[staticmethod] = _classmethod_reduce + _dispatch_table[CellType] = _cell_reduce + _dispatch_table[types.CodeType] = _code_reduce + _dispatch_table[types.GetSetDescriptorType] = _getset_descriptor_reduce + _dispatch_table[types.ModuleType] = _module_reduce + _dispatch_table[types.MethodType] = _method_reduce + _dispatch_table[types.MappingProxyType] = _mappingproxy_reduce + _dispatch_table[weakref.WeakSet] = _weakset_reduce + _dispatch_table[typing.TypeVar] = _typevar_reduce + _dispatch_table[_collections_abc.dict_keys] = _dict_keys_reduce + _dispatch_table[_collections_abc.dict_values] = _dict_values_reduce + _dispatch_table[_collections_abc.dict_items] = _dict_items_reduce + _dispatch_table[type(OrderedDict().keys())] = _odict_keys_reduce + _dispatch_table[type(OrderedDict().values())] = _odict_values_reduce + _dispatch_table[type(OrderedDict().items())] = _odict_items_reduce + _dispatch_table[abc.abstractmethod] = _classmethod_reduce + _dispatch_table[abc.abstractclassmethod] = _classmethod_reduce + _dispatch_table[abc.abstractstaticmethod] = _classmethod_reduce + _dispatch_table[abc.abstractproperty] = _property_reduce + _dispatch_table[dataclasses._FIELD_BASE] = _dataclass_field_base_reduce + + dispatch_table = ChainMap(_dispatch_table, copyreg.dispatch_table) + + # function reducers are defined as instance methods of cloudpickle.Pickler + # objects, as they rely on a cloudpickle.Pickler attribute (globals_ref) + def _dynamic_function_reduce(self, func): + """Reduce a function that is not pickleable via attribute lookup.""" + newargs = self._function_getnewargs(func) + state = _function_getstate(func) + return (_make_function, newargs, state, None, None, + _function_setstate) + + def _function_reduce(self, obj): + """Reducer for function objects. + + If obj is a top-level attribute of a file-backed module, this reducer + returns NotImplemented, making the cloudpickle.Pickler fall back to + traditional pickle.Pickler routines to save obj. Otherwise, it reduces + obj using a custom cloudpickle reducer designed specifically to handle + dynamic functions. + """ + if _should_pickle_by_reference(obj): + return NotImplemented + else: + return self._dynamic_function_reduce(obj) + + def _function_getnewargs(self, func): + code = func.__code__ + + # base_globals represents the future global namespace of func at + # unpickling time. Looking it up and storing it in + # cloudpickle.Pickler.globals_ref allow functions sharing the same + # globals at pickling time to also share them once unpickled, at one + # condition: since globals_ref is an attribute of a cloudpickle.Pickler + # instance, and that a new cloudpickle.Pickler is created each time + # cloudpickle.dump or cloudpickle.dumps is called, functions also need + # to be saved within the same invocation of + # cloudpickle.dump/cloudpickle.dumps (for example: + # cloudpickle.dumps([f1, f2])). There is no such limitation when using + # cloudpickle.Pickler.dump, as long as the multiple invocations are + # bound to the same cloudpickle.Pickler instance. + base_globals = self.globals_ref.setdefault(id(func.__globals__), {}) + + if base_globals == {}: + # Add module attributes used to resolve relative imports + # instructions inside func. + for k in ["__package__", "__name__", "__path__", "__file__"]: + if k in func.__globals__: + base_globals[k] = func.__globals__[k] + + # Do not bind the free variables before the function is created to + # avoid infinite recursion. + if func.__closure__ is None: + closure = None + else: + closure = tuple( + _make_empty_cell() for _ in range(len(code.co_freevars))) + + return code, base_globals, None, None, closure + + def dump(self, obj): + try: + return super().dump(obj) + except RuntimeError as e: + if len(e.args) > 0 and "recursion" in e.args[0]: + msg = ( + "Could not pickle object as excessively deep recursion " + "required." + ) + raise pickle.PicklingError(msg) from e + else: + raise + + def __init__(self, file, protocol=None, buffer_callback=None): + if protocol is None: + protocol = DEFAULT_PROTOCOL + super().__init__( + file, protocol=protocol, buffer_callback=buffer_callback + ) + # map functions __globals__ attribute ids, to ensure that functions + # sharing the same global namespace at pickling time also share + # their global namespace at unpickling time. + self.globals_ref = {} + self.proto = int(protocol) + + if not PYPY: + # pickle.Pickler is the C implementation of the CPython pickler and + # therefore we rely on reduce_override method to customize the pickler + # behavior. + + # `cloudpickle.Pickler.dispatch` is only left for backward + # compatibility - note that when using protocol 5, + # `cloudpickle.Pickler.dispatch` is not an extension of + # `pickle._Pickler.dispatch` dictionary, because `cloudpickle.Pickler` + # subclasses the C-implemented `pickle.Pickler`, which does not expose + # a `dispatch` attribute. Earlier versions of `cloudpickle.Pickler` + # used `cloudpickle.Pickler.dispatch` as a class-level attribute + # storing all reducers implemented by cloudpickle, but the attribute + # name was not a great choice given because it would collide with a + # similarly named attribute in the pure-Python `pickle._Pickler` + # implementation in the standard library. + dispatch = dispatch_table + + # Implementation of the reducer_override callback, in order to + # efficiently serialize dynamic functions and classes by subclassing + # the C-implemented `pickle.Pickler`. + # TODO: decorrelate reducer_override (which is tied to CPython's + # implementation - would it make sense to backport it to pypy? - and + # pickle's protocol 5 which is implementation agnostic. Currently, the + # availability of both notions coincide on CPython's pickle, but it may + # not be the case anymore when pypy implements protocol 5. + + def reducer_override(self, obj): + """Type-agnostic reducing callback for function and classes. + + For performance reasons, subclasses of the C `pickle.Pickler` class + cannot register custom reducers for functions and classes in the + dispatch_table attribute. Reducers for such types must instead + implemented via the special `reducer_override` method. + + Note that this method will be called for any object except a few + builtin-types (int, lists, dicts etc.), which differs from reducers + in the Pickler's dispatch_table, each of them being invoked for + objects of a specific type only. + + This property comes in handy for classes: although most classes are + instances of the ``type`` metaclass, some of them can be instances + of other custom metaclasses (such as enum.EnumMeta for example). In + particular, the metaclass will likely not be known in advance, and + thus cannot be special-cased using an entry in the dispatch_table. + reducer_override, among other things, allows us to register a + reducer that will be called for any class, independently of its + type. + + Notes: + + * reducer_override has the priority over dispatch_table-registered + reducers. + * reducer_override can be used to fix other limitations of + cloudpickle for other types that suffered from type-specific + reducers, such as Exceptions. See + https://github.com/cloudpipe/cloudpickle/issues/248 + """ + t = type(obj) + try: + is_anyclass = issubclass(t, type) + except TypeError: # t is not a class (old Boost; see SF #502085) + is_anyclass = False + + if is_anyclass: + return _class_reduce(obj) + elif isinstance(obj, types.FunctionType): + return self._function_reduce(obj) + else: + # fallback to save_global, including the Pickler's + # dispatch_table + return NotImplemented + + else: + # When reducer_override is not available, hack the pure-Python + # Pickler's types.FunctionType and type savers. Note: the type saver + # must override Pickler.save_global, because pickle.py contains a + # hard-coded call to save_global when pickling meta-classes. + dispatch = pickle.Pickler.dispatch.copy() + + def _save_reduce_pickle5(self, func, args, state=None, listitems=None, + dictitems=None, state_setter=None, obj=None): + save = self.save + write = self.write + self.save_reduce( + func, args, state=None, listitems=listitems, + dictitems=dictitems, obj=obj + ) + # backport of the Python 3.8 state_setter pickle operations + save(state_setter) + save(obj) # simple BINGET opcode as obj is already memoized. + save(state) + write(pickle.TUPLE2) + # Trigger a state_setter(obj, state) function call. + write(pickle.REDUCE) + # The purpose of state_setter is to carry-out an + # inplace modification of obj. We do not care about what the + # method might return, so its output is eventually removed from + # the stack. + write(pickle.POP) + + def save_global(self, obj, name=None, pack=struct.pack): + """Main dispatch method. + + The name of this method is somewhat misleading: all types get + dispatched here. + """ + if obj is type(None): # noqa + return self.save_reduce(type, (None,), obj=obj) + elif obj is type(Ellipsis): + return self.save_reduce(type, (Ellipsis,), obj=obj) + elif obj is type(NotImplemented): + return self.save_reduce(type, (NotImplemented,), obj=obj) + elif obj in _BUILTIN_TYPE_NAMES: + return self.save_reduce( + _builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj) + + if name is not None: + super().save_global(obj, name=name) + elif not _should_pickle_by_reference(obj, name=name): + self._save_reduce_pickle5(*_dynamic_class_reduce(obj), obj=obj) + else: + super().save_global(obj, name=name) + dispatch[type] = save_global + + def save_function(self, obj, name=None): + """ Registered with the dispatch to handle all function types. + + Determines what kind of function obj is (e.g. lambda, defined at + interactive prompt, etc) and handles the pickling appropriately. + """ + if _should_pickle_by_reference(obj, name=name): + return super().save_global(obj, name=name) + elif PYPY and isinstance(obj.__code__, builtin_code_type): + return self.save_pypy_builtin_func(obj) + else: + return self._save_reduce_pickle5( + *self._dynamic_function_reduce(obj), obj=obj + ) + + def save_pypy_builtin_func(self, obj): + """Save pypy equivalent of builtin functions. + + PyPy does not have the concept of builtin-functions. Instead, + builtin-functions are simple function instances, but with a + builtin-code attribute. + Most of the time, builtin functions should be pickled by attribute. + But PyPy has flaky support for __qualname__, so some builtin + functions such as float.__new__ will be classified as dynamic. For + this reason only, we created this special routine. Because + builtin-functions are not expected to have closure or globals, + there is no additional hack (compared the one already implemented + in pickle) to protect ourselves from reference cycles. A simple + (reconstructor, newargs, obj.__dict__) tuple is save_reduced. Note + also that PyPy improved their support for __qualname__ in v3.6, so + this routing should be removed when cloudpickle supports only PyPy + 3.6 and later. + """ + rv = (types.FunctionType, (obj.__code__, {}, obj.__name__, + obj.__defaults__, obj.__closure__), + obj.__dict__) + self.save_reduce(*rv, obj=obj) + + dispatch[types.FunctionType] = save_function + + +# Shorthands similar to pickle.dump/pickle.dumps + +def dump(obj, file, protocol=None, buffer_callback=None): + """Serialize obj as bytes streamed into file + + protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to + pickle.HIGHEST_PROTOCOL. This setting favors maximum communication + speed between processes running the same Python version. + + Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure + compatibility with older versions of Python (although this is not always + guaranteed to work because cloudpickle relies on some internal + implementation details that can change from one Python version to the + next). + """ + Pickler( + file, protocol=protocol, buffer_callback=buffer_callback + ).dump(obj) + + +def dumps(obj, protocol=None, buffer_callback=None): + """Serialize obj as a string of bytes allocated in memory + + protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to + pickle.HIGHEST_PROTOCOL. This setting favors maximum communication + speed between processes running the same Python version. + + Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure + compatibility with older versions of Python (although this is not always + guaranteed to work because cloudpickle relies on some internal + implementation details that can change from one Python version to the + next). + """ + with io.BytesIO() as file: + cp = Pickler( + file, protocol=protocol, buffer_callback=buffer_callback + ) + cp.dump(obj) + return file.getvalue() + + +# Include pickles unloading functions in this namespace for convenience. +load, loads = pickle.load, pickle.loads + +# Backward compat alias. +CloudPickler = Pickler diff --git a/cloudpickle/cloudpickle_fast.py b/cloudpickle/cloudpickle_fast.py index 30a0ce4f1..561adb966 100644 --- a/cloudpickle/cloudpickle_fast.py +++ b/cloudpickle/cloudpickle_fast.py @@ -1,868 +1,14 @@ -""" -New, fast version of the CloudPickler. +"""Compatibility module. -This new CloudPickler class can now extend the fast C Pickler instead of the -previous Python implementation of the Pickler class. Because this functionality -is only available for Python versions 3.8+, a lot of backward-compatibility -code is also removed. +It can be necessary to load files generated by previous versions of cloudpickle +that rely on symbols being defined under the `cloudpickle.cloudpickle_fast` +namespace. -Note that the C Pickler subclassing API is CPython-specific. Therefore, some -guards present in cloudpickle.py that were written to handle PyPy specificities -are not present in cloudpickle_fast.py +See: tests/test_backward_compat.py """ -import _collections_abc -import abc -import copyreg -import dataclasses -import io -import itertools -import logging -import sys -import struct -import types -import weakref -import typing - -from enum import Enum -from collections import ChainMap, OrderedDict - -from .compat import pickle, Pickler -from .cloudpickle import ( - _extract_code_globals, _BUILTIN_TYPE_NAMES, DEFAULT_PROTOCOL, - _find_imported_submodules, _get_cell_contents, _should_pickle_by_reference, - _builtin_type, _get_or_create_tracker_id, _make_skeleton_class, - _make_skeleton_enum, _extract_class_dict, dynamic_subimport, subimport, - _typevar_reduce, _get_bases, _make_cell, _make_empty_cell, CellType, - _is_parametrized_type_hint, PYPY, cell_set, - parametrized_type_hint_getinitargs, _create_parametrized_type_hint, - builtin_code_type, - _make_dict_keys, _make_dict_values, _make_dict_items, _make_function, -) - - -if pickle.HIGHEST_PROTOCOL >= 5: - # Shorthands similar to pickle.dump/pickle.dumps - - def dump(obj, file, protocol=None, buffer_callback=None): - """Serialize obj as bytes streamed into file - - protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to - pickle.HIGHEST_PROTOCOL. This setting favors maximum communication - speed between processes running the same Python version. - - Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure - compatibility with older versions of Python. - """ - CloudPickler( - file, protocol=protocol, buffer_callback=buffer_callback - ).dump(obj) - - def dumps(obj, protocol=None, buffer_callback=None): - """Serialize obj as a string of bytes allocated in memory - - protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to - pickle.HIGHEST_PROTOCOL. This setting favors maximum communication - speed between processes running the same Python version. - - Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure - compatibility with older versions of Python. - """ - with io.BytesIO() as file: - cp = CloudPickler( - file, protocol=protocol, buffer_callback=buffer_callback - ) - cp.dump(obj) - return file.getvalue() - -else: - # Shorthands similar to pickle.dump/pickle.dumps - def dump(obj, file, protocol=None): - """Serialize obj as bytes streamed into file - - protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to - pickle.HIGHEST_PROTOCOL. This setting favors maximum communication - speed between processes running the same Python version. - - Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure - compatibility with older versions of Python. - """ - CloudPickler(file, protocol=protocol).dump(obj) - - def dumps(obj, protocol=None): - """Serialize obj as a string of bytes allocated in memory - - protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to - pickle.HIGHEST_PROTOCOL. This setting favors maximum communication - speed between processes running the same Python version. - - Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure - compatibility with older versions of Python. - """ - with io.BytesIO() as file: - cp = CloudPickler(file, protocol=protocol) - cp.dump(obj) - return file.getvalue() - - -load, loads = pickle.load, pickle.loads - - -# COLLECTION OF OBJECTS __getnewargs__-LIKE METHODS -# ------------------------------------------------- - -def _class_getnewargs(obj): - type_kwargs = {} - if "__module__" in obj.__dict__: - type_kwargs["__module__"] = obj.__module__ - - __dict__ = obj.__dict__.get('__dict__', None) - if isinstance(__dict__, property): - type_kwargs['__dict__'] = __dict__ - - return (type(obj), obj.__name__, _get_bases(obj), type_kwargs, - _get_or_create_tracker_id(obj), None) - - -def _enum_getnewargs(obj): - members = {e.name: e.value for e in obj} - return (obj.__bases__, obj.__name__, obj.__qualname__, members, - obj.__module__, _get_or_create_tracker_id(obj), None) - - -# COLLECTION OF OBJECTS RECONSTRUCTORS -# ------------------------------------ -def _file_reconstructor(retval): - return retval - - -# COLLECTION OF OBJECTS STATE GETTERS -# ----------------------------------- -def _function_getstate(func): - # - Put func's dynamic attributes (stored in func.__dict__) in state. These - # attributes will be restored at unpickling time using - # f.__dict__.update(state) - # - Put func's members into slotstate. Such attributes will be restored at - # unpickling time by iterating over slotstate and calling setattr(func, - # slotname, slotvalue) - slotstate = { - "__name__": func.__name__, - "__qualname__": func.__qualname__, - "__annotations__": func.__annotations__, - "__kwdefaults__": func.__kwdefaults__, - "__defaults__": func.__defaults__, - "__module__": func.__module__, - "__doc__": func.__doc__, - "__closure__": func.__closure__, - } - - f_globals_ref = _extract_code_globals(func.__code__) - f_globals = {k: func.__globals__[k] for k in f_globals_ref if k in - func.__globals__} - - closure_values = ( - list(map(_get_cell_contents, func.__closure__)) - if func.__closure__ is not None else () - ) - - # Extract currently-imported submodules used by func. Storing these modules - # in a smoke _cloudpickle_subimports attribute of the object's state will - # trigger the side effect of importing these modules at unpickling time - # (which is necessary for func to work correctly once depickled) - slotstate["_cloudpickle_submodules"] = _find_imported_submodules( - func.__code__, itertools.chain(f_globals.values(), closure_values)) - slotstate["__globals__"] = f_globals - - state = func.__dict__ - return state, slotstate - - -def _class_getstate(obj): - clsdict = _extract_class_dict(obj) - clsdict.pop('__weakref__', None) - - if issubclass(type(obj), abc.ABCMeta): - # If obj is an instance of an ABCMeta subclass, don't pickle the - # cache/negative caches populated during isinstance/issubclass - # checks, but pickle the list of registered subclasses of obj. - clsdict.pop('_abc_cache', None) - clsdict.pop('_abc_negative_cache', None) - clsdict.pop('_abc_negative_cache_version', None) - registry = clsdict.pop('_abc_registry', None) - if registry is None: - # in Python3.7+, the abc caches and registered subclasses of a - # class are bundled into the single _abc_impl attribute - clsdict.pop('_abc_impl', None) - (registry, _, _, _) = abc._get_dump(obj) - - clsdict["_abc_impl"] = [subclass_weakref() - for subclass_weakref in registry] - else: - # In the above if clause, registry is a set of weakrefs -- in - # this case, registry is a WeakSet - clsdict["_abc_impl"] = [type_ for type_ in registry] - - if "__slots__" in clsdict: - # pickle string length optimization: member descriptors of obj are - # created automatically from obj's __slots__ attribute, no need to - # save them in obj's state - if isinstance(obj.__slots__, str): - clsdict.pop(obj.__slots__) - else: - for k in obj.__slots__: - clsdict.pop(k, None) - - clsdict.pop('__dict__', None) # unpicklable property object - - return (clsdict, {}) - - -def _enum_getstate(obj): - clsdict, slotstate = _class_getstate(obj) - - members = {e.name: e.value for e in obj} - # Cleanup the clsdict that will be passed to _rehydrate_skeleton_class: - # Those attributes are already handled by the metaclass. - for attrname in ["_generate_next_value_", "_member_names_", - "_member_map_", "_member_type_", - "_value2member_map_"]: - clsdict.pop(attrname, None) - for member in members: - clsdict.pop(member) - # Special handling of Enum subclasses - return clsdict, slotstate - - -# COLLECTIONS OF OBJECTS REDUCERS -# ------------------------------- -# A reducer is a function taking a single argument (obj), and that returns a -# tuple with all the necessary data to re-construct obj. Apart from a few -# exceptions (list, dict, bytes, int, etc.), a reducer is necessary to -# correctly pickle an object. -# While many built-in objects (Exceptions objects, instances of the "object" -# class, etc), are shipped with their own built-in reducer (invoked using -# obj.__reduce__), some do not. The following methods were created to "fill -# these holes". - -def _code_reduce(obj): - """codeobject reducer""" - # If you are not sure about the order of arguments, take a look at help - # of the specific type from types, for example: - # >>> from types import CodeType - # >>> help(CodeType) - if hasattr(obj, "co_exceptiontable"): # pragma: no branch - # Python 3.11 and later: there are some new attributes - # related to the enhanced exceptions. - args = ( - obj.co_argcount, obj.co_posonlyargcount, - obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, - obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, - obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, - obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable, - obj.co_freevars, obj.co_cellvars, - ) - elif hasattr(obj, "co_linetable"): # pragma: no branch - # Python 3.10 and later: obj.co_lnotab is deprecated and constructor - # expects obj.co_linetable instead. - args = ( - obj.co_argcount, obj.co_posonlyargcount, - obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, - obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, - obj.co_varnames, obj.co_filename, obj.co_name, - obj.co_firstlineno, obj.co_linetable, obj.co_freevars, - obj.co_cellvars - ) - elif hasattr(obj, "co_nmeta"): # pragma: no cover - # "nogil" Python: modified attributes from 3.9 - args = ( - obj.co_argcount, obj.co_posonlyargcount, - obj.co_kwonlyargcount, obj.co_nlocals, obj.co_framesize, - obj.co_ndefaultargs, obj.co_nmeta, - obj.co_flags, obj.co_code, obj.co_consts, - obj.co_varnames, obj.co_filename, obj.co_name, - obj.co_firstlineno, obj.co_lnotab, obj.co_exc_handlers, - obj.co_jump_table, obj.co_freevars, obj.co_cellvars, - obj.co_free2reg, obj.co_cell2reg - ) - elif hasattr(obj, "co_posonlyargcount"): - # Backward compat for 3.9 and older - args = ( - obj.co_argcount, obj.co_posonlyargcount, - obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, - obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, - obj.co_varnames, obj.co_filename, obj.co_name, - obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, - obj.co_cellvars - ) - else: - # Backward compat for even older versions of Python - args = ( - obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals, - obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, - obj.co_names, obj.co_varnames, obj.co_filename, - obj.co_name, obj.co_firstlineno, obj.co_lnotab, - obj.co_freevars, obj.co_cellvars - ) - return types.CodeType, args - - -def _cell_reduce(obj): - """Cell (containing values of a function's free variables) reducer""" - try: - obj.cell_contents - except ValueError: # cell is empty - return _make_empty_cell, () - else: - return _make_cell, (obj.cell_contents, ) - - -def _classmethod_reduce(obj): - orig_func = obj.__func__ - return type(obj), (orig_func,) - - -def _file_reduce(obj): - """Save a file""" - import io - - if not hasattr(obj, "name") or not hasattr(obj, "mode"): - raise pickle.PicklingError( - "Cannot pickle files that do not map to an actual file" - ) - if obj is sys.stdout: - return getattr, (sys, "stdout") - if obj is sys.stderr: - return getattr, (sys, "stderr") - if obj is sys.stdin: - raise pickle.PicklingError("Cannot pickle standard input") - if obj.closed: - raise pickle.PicklingError("Cannot pickle closed files") - if hasattr(obj, "isatty") and obj.isatty(): - raise pickle.PicklingError( - "Cannot pickle files that map to tty objects" - ) - if "r" not in obj.mode and "+" not in obj.mode: - raise pickle.PicklingError( - "Cannot pickle files that are not opened for reading: %s" - % obj.mode - ) - - name = obj.name - - retval = io.StringIO() - - try: - # Read the whole file - curloc = obj.tell() - obj.seek(0) - contents = obj.read() - obj.seek(curloc) - except IOError as e: - raise pickle.PicklingError( - "Cannot pickle file %s as it cannot be read" % name - ) from e - retval.write(contents) - retval.seek(curloc) - - retval.name = name - return _file_reconstructor, (retval,) - - -def _getset_descriptor_reduce(obj): - return getattr, (obj.__objclass__, obj.__name__) - - -def _mappingproxy_reduce(obj): - return types.MappingProxyType, (dict(obj),) - - -def _memoryview_reduce(obj): - return bytes, (obj.tobytes(),) - - -def _module_reduce(obj): - if _should_pickle_by_reference(obj): - return subimport, (obj.__name__,) - else: - # Some external libraries can populate the "__builtins__" entry of a - # module's `__dict__` with unpicklable objects (see #316). For that - # reason, we do not attempt to pickle the "__builtins__" entry, and - # restore a default value for it at unpickling time. - state = obj.__dict__.copy() - state.pop('__builtins__', None) - return dynamic_subimport, (obj.__name__, state) - - -def _method_reduce(obj): - return (types.MethodType, (obj.__func__, obj.__self__)) - - -def _logger_reduce(obj): - return logging.getLogger, (obj.name,) - - -def _root_logger_reduce(obj): - return logging.getLogger, () - - -def _property_reduce(obj): - return property, (obj.fget, obj.fset, obj.fdel, obj.__doc__) - - -def _weakset_reduce(obj): - return weakref.WeakSet, (list(obj),) - - -def _dynamic_class_reduce(obj): - """ - Save a class that can't be stored as module global. - - This method is used to serialize classes that are defined inside - functions, or that otherwise can't be serialized as attribute lookups - from global modules. - """ - if Enum is not None and issubclass(obj, Enum): - return ( - _make_skeleton_enum, _enum_getnewargs(obj), _enum_getstate(obj), - None, None, _class_setstate - ) - else: - return ( - _make_skeleton_class, _class_getnewargs(obj), _class_getstate(obj), - None, None, _class_setstate - ) - - -def _class_reduce(obj): - """Select the reducer depending on the dynamic nature of the class obj""" - if obj is type(None): # noqa - return type, (None,) - elif obj is type(Ellipsis): - return type, (Ellipsis,) - elif obj is type(NotImplemented): - return type, (NotImplemented,) - elif obj in _BUILTIN_TYPE_NAMES: - return _builtin_type, (_BUILTIN_TYPE_NAMES[obj],) - elif not _should_pickle_by_reference(obj): - return _dynamic_class_reduce(obj) - return NotImplemented - - -def _dict_keys_reduce(obj): - # Safer not to ship the full dict as sending the rest might - # be unintended and could potentially cause leaking of - # sensitive information - return _make_dict_keys, (list(obj), ) - - -def _dict_values_reduce(obj): - # Safer not to ship the full dict as sending the rest might - # be unintended and could potentially cause leaking of - # sensitive information - return _make_dict_values, (list(obj), ) - - -def _dict_items_reduce(obj): - return _make_dict_items, (dict(obj), ) - - -def _odict_keys_reduce(obj): - # Safer not to ship the full dict as sending the rest might - # be unintended and could potentially cause leaking of - # sensitive information - return _make_dict_keys, (list(obj), True) - - -def _odict_values_reduce(obj): - # Safer not to ship the full dict as sending the rest might - # be unintended and could potentially cause leaking of - # sensitive information - return _make_dict_values, (list(obj), True) - - -def _odict_items_reduce(obj): - return _make_dict_items, (dict(obj), True) - - -def _dataclass_field_base_reduce(obj): - return _get_dataclass_field_type_sentinel, (obj.name,) - - -# COLLECTIONS OF OBJECTS STATE SETTERS -# ------------------------------------ -# state setters are called at unpickling time, once the object is created and -# it has to be updated to how it was at unpickling time. - - -def _function_setstate(obj, state): - """Update the state of a dynamic function. - - As __closure__ and __globals__ are readonly attributes of a function, we - cannot rely on the native setstate routine of pickle.load_build, that calls - setattr on items of the slotstate. Instead, we have to modify them inplace. - """ - state, slotstate = state - obj.__dict__.update(state) - - obj_globals = slotstate.pop("__globals__") - obj_closure = slotstate.pop("__closure__") - # _cloudpickle_subimports is a set of submodules that must be loaded for - # the pickled function to work correctly at unpickling time. Now that these - # submodules are depickled (hence imported), they can be removed from the - # object's state (the object state only served as a reference holder to - # these submodules) - slotstate.pop("_cloudpickle_submodules") - - obj.__globals__.update(obj_globals) - obj.__globals__["__builtins__"] = __builtins__ - - if obj_closure is not None: - for i, cell in enumerate(obj_closure): - try: - value = cell.cell_contents - except ValueError: # cell is empty - continue - cell_set(obj.__closure__[i], value) - - for k, v in slotstate.items(): - setattr(obj, k, v) - - -def _class_setstate(obj, state): - state, slotstate = state - registry = None - for attrname, attr in state.items(): - if attrname == "_abc_impl": - registry = attr - else: - setattr(obj, attrname, attr) - if registry is not None: - for subclass in registry: - obj.register(subclass) - - return obj - - -# COLLECTION OF DATACLASS UTILITIES -# --------------------------------- -# There are some internal sentinel values whose identity must be preserved when -# unpickling dataclass fields. Each sentinel value has a unique name that we can -# use to retrieve its identity at unpickling time. - - -_DATACLASSE_FIELD_TYPE_SENTINELS = { - dataclasses._FIELD.name: dataclasses._FIELD, - dataclasses._FIELD_CLASSVAR.name: dataclasses._FIELD_CLASSVAR, - dataclasses._FIELD_INITVAR.name: dataclasses._FIELD_INITVAR, -} - - -def _get_dataclass_field_type_sentinel(name): - return _DATACLASSE_FIELD_TYPE_SENTINELS[name] - - -class CloudPickler(Pickler): - # set of reducers defined and used by cloudpickle (private) - _dispatch_table = {} - _dispatch_table[classmethod] = _classmethod_reduce - _dispatch_table[io.TextIOWrapper] = _file_reduce - _dispatch_table[logging.Logger] = _logger_reduce - _dispatch_table[logging.RootLogger] = _root_logger_reduce - _dispatch_table[memoryview] = _memoryview_reduce - _dispatch_table[property] = _property_reduce - _dispatch_table[staticmethod] = _classmethod_reduce - _dispatch_table[CellType] = _cell_reduce - _dispatch_table[types.CodeType] = _code_reduce - _dispatch_table[types.GetSetDescriptorType] = _getset_descriptor_reduce - _dispatch_table[types.ModuleType] = _module_reduce - _dispatch_table[types.MethodType] = _method_reduce - _dispatch_table[types.MappingProxyType] = _mappingproxy_reduce - _dispatch_table[weakref.WeakSet] = _weakset_reduce - _dispatch_table[typing.TypeVar] = _typevar_reduce - _dispatch_table[_collections_abc.dict_keys] = _dict_keys_reduce - _dispatch_table[_collections_abc.dict_values] = _dict_values_reduce - _dispatch_table[_collections_abc.dict_items] = _dict_items_reduce - _dispatch_table[type(OrderedDict().keys())] = _odict_keys_reduce - _dispatch_table[type(OrderedDict().values())] = _odict_values_reduce - _dispatch_table[type(OrderedDict().items())] = _odict_items_reduce - _dispatch_table[abc.abstractmethod] = _classmethod_reduce - _dispatch_table[abc.abstractclassmethod] = _classmethod_reduce - _dispatch_table[abc.abstractstaticmethod] = _classmethod_reduce - _dispatch_table[abc.abstractproperty] = _property_reduce - _dispatch_table[dataclasses._FIELD_BASE] = _dataclass_field_base_reduce - - dispatch_table = ChainMap(_dispatch_table, copyreg.dispatch_table) - - # function reducers are defined as instance methods of CloudPickler - # objects, as they rely on a CloudPickler attribute (globals_ref) - def _dynamic_function_reduce(self, func): - """Reduce a function that is not pickleable via attribute lookup.""" - newargs = self._function_getnewargs(func) - state = _function_getstate(func) - return (_make_function, newargs, state, None, None, - _function_setstate) - - def _function_reduce(self, obj): - """Reducer for function objects. - - If obj is a top-level attribute of a file-backed module, this - reducer returns NotImplemented, making the CloudPickler fallback to - traditional _pickle.Pickler routines to save obj. Otherwise, it reduces - obj using a custom cloudpickle reducer designed specifically to handle - dynamic functions. - - As opposed to cloudpickle.py, There no special handling for builtin - pypy functions because cloudpickle_fast is CPython-specific. - """ - if _should_pickle_by_reference(obj): - return NotImplemented - else: - return self._dynamic_function_reduce(obj) - - def _function_getnewargs(self, func): - code = func.__code__ - - # base_globals represents the future global namespace of func at - # unpickling time. Looking it up and storing it in - # CloudpiPickler.globals_ref allow functions sharing the same globals - # at pickling time to also share them once unpickled, at one condition: - # since globals_ref is an attribute of a CloudPickler instance, and - # that a new CloudPickler is created each time pickle.dump or - # pickle.dumps is called, functions also need to be saved within the - # same invocation of cloudpickle.dump/cloudpickle.dumps (for example: - # cloudpickle.dumps([f1, f2])). There is no such limitation when using - # CloudPickler.dump, as long as the multiple invocations are bound to - # the same CloudPickler. - base_globals = self.globals_ref.setdefault(id(func.__globals__), {}) - - if base_globals == {}: - # Add module attributes used to resolve relative imports - # instructions inside func. - for k in ["__package__", "__name__", "__path__", "__file__"]: - if k in func.__globals__: - base_globals[k] = func.__globals__[k] - - # Do not bind the free variables before the function is created to - # avoid infinite recursion. - if func.__closure__ is None: - closure = None - else: - closure = tuple( - _make_empty_cell() for _ in range(len(code.co_freevars))) - - return code, base_globals, None, None, closure - - def dump(self, obj): - try: - return Pickler.dump(self, obj) - except RuntimeError as e: - if len(e.args) > 0 and "recursion" in e.args[0]: - msg = ( - "Could not pickle object as excessively deep recursion " - "required." - ) - raise pickle.PicklingError(msg) from e - else: - raise - - if pickle.HIGHEST_PROTOCOL >= 5: - def __init__(self, file, protocol=None, buffer_callback=None): - if protocol is None: - protocol = DEFAULT_PROTOCOL - Pickler.__init__( - self, file, protocol=protocol, buffer_callback=buffer_callback - ) - # map functions __globals__ attribute ids, to ensure that functions - # sharing the same global namespace at pickling time also share - # their global namespace at unpickling time. - self.globals_ref = {} - self.proto = int(protocol) - else: - def __init__(self, file, protocol=None): - if protocol is None: - protocol = DEFAULT_PROTOCOL - Pickler.__init__(self, file, protocol=protocol) - # map functions __globals__ attribute ids, to ensure that functions - # sharing the same global namespace at pickling time also share - # their global namespace at unpickling time. - self.globals_ref = {} - assert hasattr(self, 'proto') - - if pickle.HIGHEST_PROTOCOL >= 5 and not PYPY: - # Pickler is the C implementation of the CPython pickler and therefore - # we rely on reduce_override method to customize the pickler behavior. - - # `CloudPickler.dispatch` is only left for backward compatibility - note - # that when using protocol 5, `CloudPickler.dispatch` is not an - # extension of `Pickler.dispatch` dictionary, because CloudPickler - # subclasses the C-implemented Pickler, which does not expose a - # `dispatch` attribute. Earlier versions of the protocol 5 CloudPickler - # used `CloudPickler.dispatch` as a class-level attribute storing all - # reducers implemented by cloudpickle, but the attribute name was not a - # great choice given the meaning of `CloudPickler.dispatch` when - # `CloudPickler` extends the pure-python pickler. - dispatch = dispatch_table - - # Implementation of the reducer_override callback, in order to - # efficiently serialize dynamic functions and classes by subclassing - # the C-implemented Pickler. - # TODO: decorrelate reducer_override (which is tied to CPython's - # implementation - would it make sense to backport it to pypy? - and - # pickle's protocol 5 which is implementation agnostic. Currently, the - # availability of both notions coincide on CPython's pickle and the - # pickle5 backport, but it may not be the case anymore when pypy - # implements protocol 5 - - def reducer_override(self, obj): - """Type-agnostic reducing callback for function and classes. - - For performance reasons, subclasses of the C _pickle.Pickler class - cannot register custom reducers for functions and classes in the - dispatch_table. Reducer for such types must instead implemented in - the special reducer_override method. - - Note that method will be called for any object except a few - builtin-types (int, lists, dicts etc.), which differs from reducers - in the Pickler's dispatch_table, each of them being invoked for - objects of a specific type only. - - This property comes in handy for classes: although most classes are - instances of the ``type`` metaclass, some of them can be instances - of other custom metaclasses (such as enum.EnumMeta for example). In - particular, the metaclass will likely not be known in advance, and - thus cannot be special-cased using an entry in the dispatch_table. - reducer_override, among other things, allows us to register a - reducer that will be called for any class, independently of its - type. - - - Notes: - - * reducer_override has the priority over dispatch_table-registered - reducers. - * reducer_override can be used to fix other limitations of - cloudpickle for other types that suffered from type-specific - reducers, such as Exceptions. See - https://github.com/cloudpipe/cloudpickle/issues/248 - """ - if sys.version_info[:2] < (3, 7) and _is_parametrized_type_hint(obj): # noqa # pragma: no branch - return ( - _create_parametrized_type_hint, - parametrized_type_hint_getinitargs(obj) - ) - t = type(obj) - try: - is_anyclass = issubclass(t, type) - except TypeError: # t is not a class (old Boost; see SF #502085) - is_anyclass = False - - if is_anyclass: - return _class_reduce(obj) - elif isinstance(obj, types.FunctionType): - return self._function_reduce(obj) - else: - # fallback to save_global, including the Pickler's - # dispatch_table - return NotImplemented - - else: - # When reducer_override is not available, hack the pure-Python - # Pickler's types.FunctionType and type savers. Note: the type saver - # must override Pickler.save_global, because pickle.py contains a - # hard-coded call to save_global when pickling meta-classes. - dispatch = Pickler.dispatch.copy() - - def _save_reduce_pickle5(self, func, args, state=None, listitems=None, - dictitems=None, state_setter=None, obj=None): - save = self.save - write = self.write - self.save_reduce( - func, args, state=None, listitems=listitems, - dictitems=dictitems, obj=obj - ) - # backport of the Python 3.8 state_setter pickle operations - save(state_setter) - save(obj) # simple BINGET opcode as obj is already memoized. - save(state) - write(pickle.TUPLE2) - # Trigger a state_setter(obj, state) function call. - write(pickle.REDUCE) - # The purpose of state_setter is to carry-out an - # inplace modification of obj. We do not care about what the - # method might return, so its output is eventually removed from - # the stack. - write(pickle.POP) - - def save_global(self, obj, name=None, pack=struct.pack): - """ - Save a "global". - - The name of this method is somewhat misleading: all types get - dispatched here. - """ - if obj is type(None): # noqa - return self.save_reduce(type, (None,), obj=obj) - elif obj is type(Ellipsis): - return self.save_reduce(type, (Ellipsis,), obj=obj) - elif obj is type(NotImplemented): - return self.save_reduce(type, (NotImplemented,), obj=obj) - elif obj in _BUILTIN_TYPE_NAMES: - return self.save_reduce( - _builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj) - - if sys.version_info[:2] < (3, 7) and _is_parametrized_type_hint(obj): # noqa # pragma: no branch - # Parametrized typing constructs in Python < 3.7 are not - # compatible with type checks and ``isinstance`` semantics. For - # this reason, it is easier to detect them using a - # duck-typing-based check (``_is_parametrized_type_hint``) than - # to populate the Pickler's dispatch with type-specific savers. - self.save_reduce( - _create_parametrized_type_hint, - parametrized_type_hint_getinitargs(obj), - obj=obj - ) - elif name is not None: - Pickler.save_global(self, obj, name=name) - elif not _should_pickle_by_reference(obj, name=name): - self._save_reduce_pickle5(*_dynamic_class_reduce(obj), obj=obj) - else: - Pickler.save_global(self, obj, name=name) - dispatch[type] = save_global - - def save_function(self, obj, name=None): - """ Registered with the dispatch to handle all function types. +from . import cloudpickle - Determines what kind of function obj is (e.g. lambda, defined at - interactive prompt, etc) and handles the pickling appropriately. - """ - if _should_pickle_by_reference(obj, name=name): - return Pickler.save_global(self, obj, name=name) - elif PYPY and isinstance(obj.__code__, builtin_code_type): - return self.save_pypy_builtin_func(obj) - else: - return self._save_reduce_pickle5( - *self._dynamic_function_reduce(obj), obj=obj - ) - def save_pypy_builtin_func(self, obj): - """Save pypy equivalent of builtin functions. - PyPy does not have the concept of builtin-functions. Instead, - builtin-functions are simple function instances, but with a - builtin-code attribute. - Most of the time, builtin functions should be pickled by attribute. - But PyPy has flaky support for __qualname__, so some builtin - functions such as float.__new__ will be classified as dynamic. For - this reason only, we created this special routine. Because - builtin-functions are not expected to have closure or globals, - there is no additional hack (compared the one already implemented - in pickle) to protect ourselves from reference cycles. A simple - (reconstructor, newargs, obj.__dict__) tuple is save_reduced. Note - also that PyPy improved their support for __qualname__ in v3.6, so - this routing should be removed when cloudpickle supports only PyPy - 3.6 and later. - """ - rv = (types.FunctionType, (obj.__code__, {}, obj.__name__, - obj.__defaults__, obj.__closure__), - obj.__dict__) - self.save_reduce(*rv, obj=obj) +def __getattr__(name): + return getattr(cloudpickle, name) - dispatch[types.FunctionType] = save_function diff --git a/cloudpickle/compat.py b/cloudpickle/compat.py deleted file mode 100644 index 5e9b52773..000000000 --- a/cloudpickle/compat.py +++ /dev/null @@ -1,18 +0,0 @@ -import sys - - -if sys.version_info < (3, 8): - try: - import pickle5 as pickle # noqa: F401 - from pickle5 import Pickler # noqa: F401 - except ImportError: - import pickle # noqa: F401 - - # Use the Python pickler for old CPython versions - from pickle import _Pickler as Pickler # noqa: F401 -else: - import pickle # noqa: F401 - - # Pickler will the C implementation in CPython and the Python - # implementation in PyPy - from pickle import Pickler # noqa: F401 diff --git a/dev-requirements.txt b/dev-requirements.txt index 53e56a1c0..aa5db9500 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,10 +1,8 @@ -# Dependencies for running the tests with py.test -flake8 +# Dependencies for running the tests with pytest +ruff pytest pytest-cov psutil -# To test on older Python versions -pickle5 >=0.0.11 ; python_version == '3.7' and python_implementation == 'CPython' # To be able to test tornado coroutines tornado # To be able to test numpy specific things diff --git a/setup.py b/setup.py index a96140d6d..8b1021aec 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- import os import re @@ -12,7 +11,7 @@ # Function to parse __version__ in `cloudpickle/__init__.py` def find_version(): here = os.path.abspath(os.path.dirname(__file__)) - with open(os.path.join(here, 'cloudpickle', '__init__.py'), 'r') as fp: + with open(os.path.join(here, 'cloudpickle', '__init__.py')) as fp: version_file = fp.read() version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) @@ -21,7 +20,7 @@ def find_version(): raise RuntimeError("Unable to find version string.") -dist = setup( +setup( name='cloudpickle', version=find_version(), description='Extended pickling support for Python objects', @@ -39,10 +38,11 @@ def find_version(): 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Software Development :: Libraries :: Python Modules', @@ -50,5 +50,5 @@ def find_version(): 'Topic :: System :: Distributed Computing', ], test_suite='tests', - python_requires='>=3.6', + python_requires='>=3.8', ) diff --git a/tests/cloudpickle_file_test.py b/tests/cloudpickle_file_test.py index 25fd9844c..b742d174a 100644 --- a/tests/cloudpickle_file_test.py +++ b/tests/cloudpickle_file_test.py @@ -3,11 +3,11 @@ import sys import tempfile import unittest +import pickle import pytest import cloudpickle -from cloudpickle.compat import pickle class CloudPickleFileTests(unittest.TestCase): @@ -25,7 +25,7 @@ def tearDown(self): def test_empty_file(self): # Empty file open(self.tmpfilepath, 'w').close() - with open(self.tmpfilepath, 'r') as f: + with open(self.tmpfilepath) as f: self.assertEqual('', pickle.loads(cloudpickle.dumps(f)).read()) os.remove(self.tmpfilepath) @@ -43,7 +43,7 @@ def test_r_mode(self): with open(self.tmpfilepath, 'w') as f: f.write(self.teststring) # Open for reading - with open(self.tmpfilepath, 'r') as f: + with open(self.tmpfilepath) as f: new_f = pickle.loads(cloudpickle.dumps(f)) self.assertEqual(self.teststring, new_f.read()) os.remove(self.tmpfilepath) diff --git a/tests/cloudpickle_test.py b/tests/cloudpickle_test.py index dc60d782f..0d1d32402 100644 --- a/tests/cloudpickle_test.py +++ b/tests/cloudpickle_test.py @@ -26,6 +26,7 @@ import enum import typing from functools import wraps +import pickle import pytest @@ -38,19 +39,12 @@ np = None spp = None -try: - # Ditto for Tornado - import tornado -except ImportError: - tornado = None - import cloudpickle -from cloudpickle.compat import pickle from cloudpickle import register_pickle_by_value from cloudpickle import unregister_pickle_by_value from cloudpickle import list_registry_pickle_by_value from cloudpickle.cloudpickle import _should_pickle_by_reference -from cloudpickle.cloudpickle import _make_empty_cell, cell_set +from cloudpickle.cloudpickle import _make_empty_cell from cloudpickle.cloudpickle import _extract_class_dict, _whichmodule from cloudpickle.cloudpickle import _lookup_module_and_qualname @@ -59,8 +53,6 @@ from .testutils import assert_run_python_script from .testutils import subprocess_worker -from _cloudpickle_testpkg import relative_imports_factory - _TEST_GLOBAL_VARIABLE = "default_value" _TEST_GLOBAL_VARIABLE2 = "another_value" @@ -133,7 +125,7 @@ def tearDown(self): @pytest.mark.skipif( platform.python_implementation() != "CPython" or - (sys.version_info >= (3, 8, 0) and sys.version_info < (3, 8, 2)), + sys.version_info < (3, 8, 2), reason="Underlying bug fixed upstream starting Python 3.8.2") def test_reducer_override_reference_cycle(self): # Early versions of Python 3.8 introduced a reference cycle between a @@ -207,17 +199,6 @@ def foo(): self.assertTrue("exit" in foo.__code__.co_names) cloudpickle.dumps(foo) - def test_buffer(self): - try: - buffer_obj = buffer("Hello") - buffer_clone = pickle_depickle(buffer_obj, protocol=self.protocol) - self.assertEqual(buffer_clone, str(buffer_obj)) - buffer_obj = buffer("Hello", 2, 3) - buffer_clone = pickle_depickle(buffer_obj, protocol=self.protocol) - self.assertEqual(buffer_clone, str(buffer_obj)) - except NameError: # Python 3 does no longer support buffers - pass - def test_memoryview(self): buffer_obj = memoryview(b"Hello") self.assertEqual(pickle_depickle(buffer_obj, protocol=self.protocol), @@ -245,19 +226,19 @@ def test_odict_keys(self): keys = collections.OrderedDict([("a", 1), ("b", 2)]).keys() results = pickle_depickle(keys) self.assertEqual(results, keys) - assert type(keys) == type(results) + assert type(keys) is type(results) def test_odict_values(self): values = collections.OrderedDict([("a", 1), ("b", 2)]).values() results = pickle_depickle(values) self.assertEqual(list(results), list(values)) - assert type(values) == type(results) + assert type(values) is type(results) def test_odict_items(self): items = collections.OrderedDict([("a", 1), ("b", 2)]).items() results = pickle_depickle(items) self.assertEqual(results, items) - assert type(items) == type(results) + assert type(items) is type(results) def test_sliced_and_non_contiguous_memoryview(self): buffer_obj = memoryview(b"Hello!" * 3)[2:15:2] @@ -275,8 +256,8 @@ def test_lambda(self): def test_nested_lambdas(self): a, b = 1, 2 - f1 = lambda x: x + a - f2 = lambda x: f1(x) // b + f1 = lambda x: x + a # noqa: E731 + f2 = lambda x: f1(x) // b # noqa: E731 self.assertEqual(pickle_depickle(f2, protocol=self.protocol)(1), 1) def test_recursive_closure(self): @@ -298,8 +279,7 @@ def g(n): def test_closure_none_is_preserved(self): def f(): - """a function with no closure cells - """ + """A function with no closure cells""" self.assertTrue( f.__closure__ is None, @@ -397,10 +377,12 @@ def some_function(x, y): return (x + y) / LOCAL_CONSTANT # pickle the function definition - self.assertEqual(pickle_depickle(some_function, protocol=self.protocol)(41, 1), 1) - self.assertEqual(pickle_depickle(some_function, protocol=self.protocol)(81, 3), 2) + result = pickle_depickle(some_function, protocol=self.protocol)(41, 1) + assert result == 1 + result = pickle_depickle(some_function, protocol=self.protocol)(81, 3) + assert result == 2 - hidden_constant = lambda: LOCAL_CONSTANT + hidden_constant = lambda: LOCAL_CONSTANT # noqa: E731 class SomeClass: """Overly complicated class with nested references to symbols""" @@ -476,12 +458,11 @@ def test_load_namespace(self): def test_generator(self): def some_generator(cnt): - for i in range(cnt): - yield i + yield from range(cnt) gen2 = pickle_depickle(some_generator, protocol=self.protocol) - assert type(gen2(3)) == type(some_generator(3)) + assert isinstance(gen2(3), type(some_generator(3))) assert list(gen2(3)) == list(range(3)) def test_classmethod(self): @@ -489,6 +470,7 @@ class A: @staticmethod def test_sm(): return "sm" + @classmethod def test_cm(cls): return "cm" @@ -528,7 +510,7 @@ def test_module(self): pickle_clone = pickle_depickle(pickle, protocol=self.protocol) self.assertEqual(pickle, pickle_clone) - def test_dynamic_module(self): + def _check_dynamic_module(self, mod): mod = types.ModuleType('mod') code = ''' x = 1 @@ -565,6 +547,18 @@ def method(self, x): finally: sys.modules.pop('mod', None) + def test_dynamic_module(self): + mod = types.ModuleType('mod') + assert mod.__package__ is None + self._check_dynamic_module(mod) + + def test_dynamic_module_no_package(self): + # non-regression test for #116 + mod = types.ModuleType('mod') + del mod.__package__ + assert not hasattr(mod, '__package__') + self._check_dynamic_module(mod) + def test_module_locals_behavior(self): # Makes sure that a local function defined in another module is # correctly serialized. This notably checks that the globals are @@ -573,7 +567,7 @@ def test_module_locals_behavior(self): pickled_func_path = os.path.join(self.tmpdir, 'local_func_g.pkl') child_process_script = ''' - from cloudpickle.compat import pickle + import pickle import gc with open("{pickled_func_path}", 'rb') as f: func = pickle.load(f) @@ -664,7 +658,7 @@ def test_load_dynamic_module_in_grandchild_process(self): child_process_module_file = os.path.join( self.tmpdir, 'dynamic_module_from_child_process.pkl') child_process_script = ''' - from cloudpickle.compat import pickle + import pickle import textwrap import cloudpickle @@ -684,7 +678,7 @@ def test_load_dynamic_module_in_grandchild_process(self): # The script ran by the process created by the child process child_of_child_process_script = """ ''' - from cloudpickle.compat import pickle + import pickle with open('{child_process_module_file}','rb') as fid: mod = pickle.load(fid) ''' """ @@ -739,7 +733,7 @@ def my_small_function(x, y): assert b'math' not in b def test_module_importability(self): - from cloudpickle.compat import pickle + import pickle import os.path import collections import collections.abc @@ -760,15 +754,13 @@ def test_module_importability(self): # their parent modules are considered importable by cloudpickle. # See the mod_with_dynamic_submodule documentation for more # details of this use case. - import _cloudpickle_testpkg.mod.dynamic_submodule as m + m = pytest.importorskip("_cloudpickle_testpkg.mod.dynamic_submodule") # noqa F841 assert _should_pickle_by_reference(m) assert pickle_depickle(m, protocol=self.protocol) is m # Check for similar behavior for a module that cannot be imported by # attribute lookup. from _cloudpickle_testpkg.mod import dynamic_submodule_two as m2 - # Note: import _cloudpickle_testpkg.mod.dynamic_submodule_two as m2 - # works only for Python 3.7+ assert _should_pickle_by_reference(m2) assert pickle_depickle(m2, protocol=self.protocol) is m2 @@ -933,7 +925,7 @@ def test_builtin_classmethod_descriptor(self): # __func__ attribute instead. We do not test the the identity of # the functions as __func__ attributes of classmethods are not # pickleable and must be reconstructed at depickling time. - assert type(depickled_clsdict_meth) == type(clsdict_clsmethod) + assert type(depickled_clsdict_meth) is type(clsdict_clsmethod) assert depickled_clsdict_meth.__func__( float, arg) == clsdict_clsmethod.__func__(float, arg) @@ -984,11 +976,10 @@ def test_builtin_staticmethod(self): assert depickled_clsdict_meth.__func__ is clsdict_staticmethod.__func__ type(depickled_clsdict_meth) is type(clsdict_staticmethod) - @pytest.mark.skipif(tornado is None, - reason="test needs Tornado installed") def test_tornado_coroutine(self): # Pickling a locally defined coroutine function - from tornado import gen, ioloop + gen = pytest.importorskip('tornado.gen') + ioloop = pytest.importorskip('tornado.ioloop') @gen.coroutine def f(x, y): @@ -997,16 +988,19 @@ def f(x, y): @gen.coroutine def g(y): - res = yield f(0.01, y) + res = yield f(0.01, y) # noqa: F821 raise gen.Return(res + 1) + with pytest.warns(DeprecationWarning): + assert cloudpickle.is_tornado_coroutine(g) + data = cloudpickle.dumps([g, g], protocol=self.protocol) - f = g = None + del f, g g2, g3 = pickle.loads(data) - self.assertTrue(g2 is g3) - loop = ioloop.IOLoop.current() + assert g2 is g3 + loop = ioloop.IOLoop(make_current=False) res = loop.run_sync(functools.partial(g2, 5)) - self.assertEqual(res, 7) + assert res == 7 @pytest.mark.skipif( (3, 11, 0, 'beta') <= sys.version_info < (3, 11, 0, 'beta', 4), @@ -1043,12 +1037,14 @@ def test_submodule(self): # Choose any module NOT imported by __init__ of its parent package # examples in standard library include: - # - http.cookies, unittest.mock, curses.textpad, xml.etree.ElementTree - - global xml # imitate performing this import at top of file + # http.cookies, unittest.mock, curses.textpad, xml.etree.ElementTree + import xml import xml.etree.ElementTree + def example(): - x = xml.etree.ElementTree.Comment # potential AttributeError + _ = xml.etree.ElementTree.Comment # noqa: F821 + + example() # smoke test s = cloudpickle.dumps(example, protocol=self.protocol) @@ -1060,16 +1056,19 @@ def example(): # deserialise f = pickle.loads(s) - f() # perform test for error + f() # smoke test def test_submodule_closure(self): - # Same as test_submodule except the package is not a global + # Same as test_submodule except the xml package has not been imported def scope(): import xml.etree.ElementTree + def example(): - x = xml.etree.ElementTree.Comment # potential AttributeError + _ = xml.etree.ElementTree.Comment # potential AttributeError return example + example = scope() + example() # smoke test s = cloudpickle.dumps(example, protocol=self.protocol) @@ -1079,13 +1078,13 @@ def example(): del sys.modules[item] f = cloudpickle.loads(s) - f() # test + f() # smoke test def test_multiprocess(self): # running a function pickled by another process (a la dask.distributed) def scope(): def example(): - x = xml.etree.ElementTree.Comment + _ = xml.etree.ElementTree.Comment return example global xml import xml.etree.ElementTree @@ -1096,7 +1095,7 @@ def example(): # choose "subprocess" rather than "multiprocessing" because the latter # library uses fork to preserve the parent environment. command = ("import base64; " - "from cloudpickle.compat import pickle; " + "import pickle; " "pickle.loads(base64.b32decode('" + base64.b32encode(s).decode('ascii') + "'))()") @@ -1106,11 +1105,13 @@ def test_import(self): # like test_multiprocess except subpackage modules referenced directly # (unlike test_submodule) global etree + def scope(): import xml.etree as foobar + def example(): - x = etree.Comment - x = foobar.ElementTree + _ = etree.Comment + _ = foobar.ElementTree return example example = scope() import xml.etree.ElementTree as etree @@ -1118,15 +1119,18 @@ def example(): s = cloudpickle.dumps(example, protocol=self.protocol) command = ("import base64; " - "from cloudpickle.compat import pickle; " - "pickle.loads(base64.b32decode('" + + "from pickle import loads; " + "loads(base64.b32decode('" + base64.b32encode(s).decode('ascii') + "'))()") assert not subprocess.call([sys.executable, '-c', command]) def test_multiprocessing_lock_raises(self): lock = multiprocessing.Lock() - with pytest.raises(RuntimeError, match="only be shared between processes through inheritance"): + with pytest.raises( + RuntimeError, + match="only be shared between processes through inheritance" + ): cloudpickle.dumps(lock) def test_cell_manipulation(self): @@ -1136,11 +1140,8 @@ def test_cell_manipulation(self): cell.cell_contents ob = object() - cell_set(cell, ob) - self.assertTrue( - cell.cell_contents is ob, - msg='cell contents not set correctly', - ) + cell.cell_contents = ob + assert cell.cell_contents is ob def check_logger(self, name): logger = logging.getLogger(name) @@ -1497,33 +1498,13 @@ def foo(): finally: sys.modules.pop("_faulty_module", None) - def test_dynamic_pytest_module(self): - # Test case for pull request https://github.com/cloudpipe/cloudpickle/pull/116 - - # This test does not longer make sense with pytest >= 7.2 - py = pytest.importorskip("py") - if not hasattr(py, "builtin"): - pytest.skip("py.builtin is not available") - - def f(): - s = py.builtin.set([1]) - return s.pop() - - # some setup is required to allow pytest apimodules to be correctly - # serializable. - from cloudpickle import CloudPickler - from cloudpickle import cloudpickle_fast as cp_fast - CloudPickler.dispatch_table[type(py.builtin)] = cp_fast._module_reduce - - g = cloudpickle.loads(cloudpickle.dumps(f, protocol=self.protocol)) - - result = g() - self.assertEqual(1, result) - def test_function_module_name(self): - func = lambda x: x - cloned = pickle_depickle(func, protocol=self.protocol) - self.assertEqual(cloned.__module__, func.__module__) + def local_func(x): + return x + + for func in [local_func, lambda x: x]: + cloned = pickle_depickle(func, protocol=self.protocol) + self.assertEqual(cloned.__module__, func.__module__) def test_function_qualname(self): def func(x): @@ -1558,8 +1539,6 @@ def read_write_value(self): def read_write_value(self, value): self._read_write_value = value - - my_object = MyObject() assert my_object.read_only_value == 1 @@ -1583,7 +1562,6 @@ def read_write_value(self, value): assert depickled_obj.read_write_value == 3 type(depickled_obj).read_only_value.__doc__ == "A read-only attribute" - def test_namedtuple(self): MyTuple = collections.namedtuple('MyTuple', ['a', 'b', 'c']) t1 = MyTuple(1, 2, 3) @@ -2018,14 +1996,14 @@ def process_data(): growth = w.memsize() - reference_size # For some reason, the memory growth after processing 100MB of - # data is ~10MB on MacOS, and ~1MB on Linux, so the upper bound on + # data is ~50MB on MacOS, and ~1MB on Linux, so the upper bound on # memory growth we use is only tight for MacOS. However, - # - 10MB is still 10x lower than the expected memory growth in case + # - 50MB is still 2x lower than the expected memory growth in case # of a leak (which would be the total size of the processed data, # 100MB) # - the memory usage growth does not increase if using 10000 # iterations instead of 100 as used now (100x more data) - assert growth < 1.5e7, growth + assert growth < 5e7, growth """.format(protocol=self.protocol) assert_run_python_script(code) @@ -2230,6 +2208,8 @@ def test_relative_import_inside_function(self): # Make sure relative imports inside round-tripped functions is not # broken. This was a bug in cloudpickle versions <= 0.5.3 and was # re-introduced in 0.8.0. + _cloudpickle_testpkg = pytest.importorskip("_cloudpickle_testpkg") + relative_imports_factory = _cloudpickle_testpkg.relative_imports_factory f, g = relative_imports_factory() for func, source in zip([f, g], ["module", "package"]): # Make sure relative imports are initially working @@ -2279,7 +2259,8 @@ def f(a, /, b=1): def test___reduce___returns_string(self): # Non regression test for objects with a __reduce__ method returning a # string, meaning "save by attribute using save_global" - from _cloudpickle_testpkg import some_singleton + _cloudpickle_testpkg = pytest.importorskip("_cloudpickle_testpkg") + some_singleton = _cloudpickle_testpkg.some_singleton assert some_singleton.__reduce__() == "some_singleton" depickled_singleton = pickle_depickle( some_singleton, protocol=self.protocol) @@ -2348,9 +2329,9 @@ def test_pickle_dynamic_typevar_memoization(self): assert depickled_T1 is depickled_T2 def test_pickle_importable_typevar(self): - from _cloudpickle_testpkg import T - T1 = pickle_depickle(T, protocol=self.protocol) - assert T1 is T + _cloudpickle_testpkg = pytest.importorskip("_cloudpickle_testpkg") + T1 = pickle_depickle(_cloudpickle_testpkg.T, protocol=self.protocol) + assert T1 is _cloudpickle_testpkg.T # Standard Library TypeVar from typing import AnyStr @@ -2370,28 +2351,21 @@ class C(typing.Generic[T]): with subprocess_worker(protocol=self.protocol) as worker: - def check_generic(generic, origin, type_value, use_args): + def check_generic(generic, origin, type_value): assert generic.__origin__ is origin assert len(origin.__orig_bases__) == 1 ob = origin.__orig_bases__[0] assert ob.__origin__ is typing.Generic - if use_args: - assert len(generic.__args__) == 1 - assert generic.__args__[0] is type_value - else: - assert len(generic.__parameters__) == 1 - assert generic.__parameters__[0] is type_value + assert len(generic.__args__) == 1 + assert generic.__args__[0] is type_value assert len(ob.__parameters__) == 1 return "ok" - # backward-compat for old Python 3.5 versions that sometimes relies - # on __parameters__ - use_args = getattr(C[int], '__args__', ()) != () - assert check_generic(C[int], C, int, use_args) == "ok" - assert worker.run(check_generic, C[int], C, int, use_args) == "ok" + assert check_generic(C[int], C, int) == "ok" + assert worker.run(check_generic, C[int], C, int) == "ok" def test_generic_subclass(self): T = typing.TypeVar('T') @@ -2459,18 +2433,6 @@ def check_annotations(obj, expected_type, expected_type_str): worker.run(check_annotations, obj, type_, "type_") == "ok" ) - def test_generic_extensions_literal(self): - typing_extensions = pytest.importorskip('typing_extensions') - for obj in [typing_extensions.Literal, typing_extensions.Literal['a']]: - depickled_obj = pickle_depickle(obj, protocol=self.protocol) - assert depickled_obj == obj - - def test_generic_extensions_final(self): - typing_extensions = pytest.importorskip('typing_extensions') - for obj in [typing_extensions.Final, typing_extensions.Final[int]]: - depickled_obj = pickle_depickle(obj, protocol=self.protocol) - assert depickled_obj == obj - def test_class_annotations(self): class C: pass @@ -2675,11 +2637,11 @@ def test_pickle_constructs_from_installed_packages_registered_for_pickling_by_va ): for package_or_module in ["package", "module"]: if package_or_module == "package": - import _cloudpickle_testpkg as m + m = pytest.importorskip("_cloudpickle_testpkg") f = m.package_function_with_global _original_global = m.global_variable elif package_or_module == "module": - import _cloudpickle_testpkg.mod as m + m = pytest.importorskip("_cloudpickle_testpkg.mod") f = m.module_function_with_global _original_global = m.global_variable try: @@ -2707,8 +2669,8 @@ def test_pickle_various_versions_of_the_same_function_with_different_pickling_me # pickled in a different way - by value and/or by reference) can # peacefully co-exist (e.g. without globals interaction) in a remote # worker. - import _cloudpickle_testpkg - from _cloudpickle_testpkg import package_function_with_global as f + _cloudpickle_testpkg = pytest.importorskip("_cloudpickle_testpkg") + f = _cloudpickle_testpkg.package_function_with_global _original_global = _cloudpickle_testpkg.global_variable def _create_registry(): @@ -2745,10 +2707,6 @@ def _call_from_registry(k): if "_cloudpickle_testpkg" in list_registry_pickle_by_value(): unregister_pickle_by_value(_cloudpickle_testpkg) - @pytest.mark.skipif( - sys.version_info < (3, 7), - reason="Determinism can only be guaranteed for Python 3.7+" - ) def test_deterministic_pickle_bytes_for_function(self): # Ensure that functions with references to several global names are # pickled to fixed bytes that do not depend on the PYTHONHASHSEED of @@ -2864,7 +2822,7 @@ def test_lookup_module_and_qualname_dynamic_typevar(): def test_lookup_module_and_qualname_importable_typevar(): - import _cloudpickle_testpkg + _cloudpickle_testpkg = pytest.importorskip("_cloudpickle_testpkg") T = _cloudpickle_testpkg.T module_and_name = _lookup_module_and_qualname(T, name=T.__name__) assert module_and_name is not None @@ -2883,8 +2841,8 @@ def test_lookup_module_and_qualname_stdlib_typevar(): def test_register_pickle_by_value(): - import _cloudpickle_testpkg as pkg - import _cloudpickle_testpkg.mod as mod + pkg = pytest.importorskip("_cloudpickle_testpkg") + mod = pytest.importorskip("_cloudpickle_testpkg.mod") assert list_registry_pickle_by_value() == set() diff --git a/tests/cloudpickle_testpkg/setup.py b/tests/cloudpickle_testpkg/setup.py index a503b8d37..5cb49f907 100644 --- a/tests/cloudpickle_testpkg/setup.py +++ b/tests/cloudpickle_testpkg/setup.py @@ -4,7 +4,7 @@ from distutils.core import setup -dist = setup( +setup( name='cloudpickle_testpkg', version='0.0.0', description='Package used only for cloudpickle testing purposes', @@ -12,5 +12,5 @@ author_email='cloudpipe@googlegroups.com', license='BSD 3-Clause License', packages=['_cloudpickle_testpkg'], - python_requires='>=3.5', + python_requires='>=3.8', ) diff --git a/tests/generate_old_pickles.py b/tests/generate_old_pickles.py index c5a1d1c44..d91aad6ef 100644 --- a/tests/generate_old_pickles.py +++ b/tests/generate_old_pickles.py @@ -89,3 +89,7 @@ def add(x: MyClass[int], y: MyClass[int]): # Locally defined closure nested_function = nested_function_factory() dump_obj(nested_function, "nested_function.pkl") + + # Store the cloudpickle version used to generate the pickles + version_file = PICKLE_DIRECTORY / "cloudpickle_version.txt" + version_file.write_text(cloudpickle.__version__) diff --git a/tests/old_pickles/cpython_310/class_with_type_hints.pkl b/tests/old_pickles/cpython_310/class_with_type_hints.pkl new file mode 100644 index 0000000000000000000000000000000000000000..1153fcb5c072809278349049c4cbebb34a41ec64 GIT binary patch literal 828 zcmY+COOMkq6ou0=FrD%kRDp`bEV^LPvS^$1y`>8l6+s$ii!3K`VykW(VrNjLsl=w0 zM)D5xf4NRtRANQ)J#p^w`Rrd$|F}<_^3m;(iz3QcxaOpPHaGeW+Tb-oa!oiQw(IOtX@znLaF^R*uQRdbh;N6>@ z(M^OlA}TmR$mj+Xn58h4x)V?0(GX9Dvw0Yg7w!;~&|7%!#GQ@j-pm~t-!{@WXZg=r zpcqSK^mT;*Nus2}%EqSeccx3*^quu%DO2DFnO;?y;vx+d6R8bVCOvXPCuohOh{&BK zxCj64s=jhy)L|@_&4=29gPh3u<`{o(^c7YrXF&nJP+dsEV?#e4jQ%jla&`~<8K_M* zNGw|@kl+!jj#c)vt)ZRO*JZs~GrgY83ftDt5z275#)$>y+XN}z*m)HvW+h45FMPk` zLvM5ooihDLVYagk>6mu(3xs|Ym>)ao*T-wo|Az-KBAliI2m%@jc00&HTN0?D_0NdJ zxZuj_zHN41m!K~}6Jhxo0>FvMBCi6pZFRcKNEShL8>IPeY}SN}0Q1VQIGDrj&^n5o gB`Cm>*bnqA{p4FU!#SW(Brnz0-V{5yXYfn^FKPZdjsO4v literal 0 HcmV?d00001 diff --git a/tests/old_pickles/cpython_310/cloudpickle_version.txt b/tests/old_pickles/cpython_310/cloudpickle_version.txt new file mode 100644 index 000000000..fae692e41 --- /dev/null +++ b/tests/old_pickles/cpython_310/cloudpickle_version.txt @@ -0,0 +1 @@ +2.2.1 \ No newline at end of file diff --git a/tests/old_pickles/cpython_310/function_with_type_hints.pkl b/tests/old_pickles/cpython_310/function_with_type_hints.pkl new file mode 100644 index 0000000000000000000000000000000000000000..dd1f6a55471bd89527170befaebd33d1f5673e0c GIT binary patch literal 999 zcmY+CO>fgc5QgofkT!f(6j6|Z)CvwMNcm2iA|V6;^$=l1$%R9;tnGEijbj_{x)3Rm zkWeC3+8fKiD{!SZID|eidjNMlwfwmgfG}c@3@Xz z{cwcI6Be?>cQ6U2yp_x7Iu|s_>YGDl9b8_@J^QfL>NQyR&~@xiyGd#L*lRbv!)B}1 zVV=`dLY=B5ehW2MvP{S%a?^kjqU0t9)DLmV<(}8|+6~%m9Q7QpbKGoD=CqDm&2ICk z(`y|y+se{~WM$3&It$`aE|k2JBOsg!E~wD9p)yxhcmNNzo*()G_0ZGp+>@7vj_{*U zo64MCaU&~eoBhaTLrrjj`qe>UVBlN8x8E<%%$Y$9*Z62CYoX*86(aHN6m>D$;4JVI zJXUDFCUJeF*C-;^|`coOQeNL^PAhW*0!st3?sfqf32Rs@Ie0*3HX uEEx57Vxy0tAPWjJRVFg$rTH|&%+Y;_RnBBy zf?QS1ILn!>W<3=}sN=L%M~g}>A)$<+l+hG{Ev~-WabnjMm=Ph3KKmsFf=VHgK3>nR zSC&zBk@a;C;8_v2r83#mf;frf_0qp%JRwZn`+E&2AApo4M{G1-AoGUg|?U=1J3uKA|X~>c#~}cUuE~q=&8@9u8+cAyV>Z)?{|yALq2g3@{X4m;6rb0h#54DzjKA^3(9;$-=z0LFGPQ~WpzBh?WI>^J z??WRt$@Ds#OlSPl<<(?%jgi4ybZ=VR+B_j++Mqja=z-=&8)iHu zak{~w=!;{ICktr^PXz9yk(JpZF|sVYAgaECCrz*y)SZ_p|Kinc34gfgECRY6y7PJO z@!&ClAPw#UjJD`rGGo>@pgyq!?;?@@5Ex!jhsx@PKx(7ifVtvnR^X;P5-&@apN0` zp%}PhfP2f-QztiQjcoMKBC--#QIzB&*`o=P#&-gc|44bptTMh7`Cje3_W6ivWl}#A se;Sy!>xTci&-QBu+Q!W?X#7f>1F%$O%v6JAceuN~6rUrz!8#oM1(Tr#Jpcdz literal 0 HcmV?d00001 diff --git a/tests/old_pickles/cpython_310/simple_enum.pkl b/tests/old_pickles/cpython_310/simple_enum.pkl new file mode 100644 index 0000000000000000000000000000000000000000..d331a249b2c8e0dfb7645f40481fa40f2a0fd456 GIT binary patch literal 279 zcmYk1F;BxV5Jo8!1yMu?1{Q{nOi`eXOGZ#LpdyAy%qL?zN)*RN*cliqvC!$Z9{(37 zEV=2^r_;T6->1J9lcQnqT4>wJk6JWZ#iLAk1ubtBxJGH^tpR1amJW1_(;;E>Y<< z{SeMJUxv4}7@8{Dg}sv^-_Uk~)2ElWoVLduo zmA%q@ZA)n%y`x#0@FXh@FVXBPe|(tz0+E@+egFUf literal 0 HcmV?d00001 diff --git a/tests/old_pickles/cpython_310/simple_module.pkl b/tests/old_pickles/cpython_310/simple_module.pkl new file mode 100644 index 0000000000000000000000000000000000000000..a4fcaff70cc8961be01b6217ad0bf68681e3367e GIT binary patch literal 515 zcmYk3u};G<5QdXhrBQ$)Sh~R8E)@@eSTaV+P*~B)O`JNa8;9B!RB9!bcJjvYu8f=; z6~&e;-}yiLzt3NTpWiSz4L8oEvPDznE6Hb_J-XXsTeEeQgSLxm-KdShZ*-$`cC894 zIqq@dA^=iy02F;U1SnKa`^;SeG%R1Sr9aAC3?LON_~sN*k#2Y^3`-P&8^M~bIFz=| zO{HpjZ9>^=SlCLMss^)dxVI1`s^G6aIN%&7X)g_E_Gl(avfgc5Qg0kXbKciR1_qNlmkL3hs4c?lbnsjA;L(dSF~E&YkQTAZM^HENR8A> zqE^})gZOLw9A@nXg{)%jyk5`D`^?@x`ZYVSs-JEJA}hn32Xn!?JA2TlWI^YQ$T<^C zWhn^+l@fkJM>qVE3&m3j`cN%%hN3?DymYGr`j})nE2zo}RAZ*NVheNPF&^bUPMYVKnI5{V2E?vZ*s2_~URK_Ng62<0zounMC?* z$Nw%15>cr@zo-!q$&^%78QaO56?BqT^4iqnDOXqzJw2~IMP(W&o~6cA<@A9Y*+Of$ zS;(#o!8O+J%=Tv%u6j)}W86QU&)*>;R)f$wUOnhYjd= zy}aPp*jZ04@?LBxV?Z(yl6t_hn=c`0>z5D1>i%nb_QgH9wf?+)fA>*)ceuc{;QZWR zL5KP&Avq1^G&XEKV>YD=v#*^;yn>!|OV6uNCTF@yw)X!6HeH#7iGy7{BjkHY#Wqt9 z0TY_08L~xa60@(+SJITolgxkM5NRot(LM9+Zd{Gv2(>JzUSq#FOIa3mzbzAOZ#j{r zU*8i6-@sUlMdnjc7c4ihxwK8itl%;6sbuCw@+xsXqvo_g=ZY1T+FV1q!W)R6bpHZ7 CDngh5 literal 0 HcmV?d00001 diff --git a/tests/old_pickles/cpython_311/cloudpickle_version.txt b/tests/old_pickles/cpython_311/cloudpickle_version.txt new file mode 100644 index 000000000..fae692e41 --- /dev/null +++ b/tests/old_pickles/cpython_311/cloudpickle_version.txt @@ -0,0 +1 @@ +2.2.1 \ No newline at end of file diff --git a/tests/old_pickles/cpython_311/function_with_type_hints.pkl b/tests/old_pickles/cpython_311/function_with_type_hints.pkl new file mode 100644 index 0000000000000000000000000000000000000000..56c5e1839134aaa6d863285a6dfd2ba532610aec GIT binary patch literal 1094 zcmY*Y-A@xi5WhmTfFec+NCF8)V~QsI0$M;5jm3|Ict$>6G-R{A+v}XY-j&-uv`Q0R zXdu}K6W8$SgYm_GMiL)<@qcmljuyPlU1nxyXXf|&?HyeD`XFt@&yLE8LE+glXN+^<9De z5Yx2;lN0%ta06e1ijki2MtYUYaw{QEwt`#o`k7d{ z{?O~Ms-p+SpV_-dYtlTzrK*OSpVzb?LV1Od7PT6*uDLZeSf4hvogjDIm~_ZTrWsQS zv+@S%!T*o2)s8L*6`bNKA-fUvdX)-DFrmI5Ah$>w$Lbx#1>+p~B#rkdB8@zuMhRC>43X;VXY3pA`rdx(-2R9uKXB(~ ze;0*@dyt7mO?^-@)y5u>mxI){6>P6>Ri_V(zhjf1ch#qFufM-NohqD;6@QNve+?BC zloH)wsVBxmc++Ej8y>+nJk}CyzzEDhS`NCtz#FQ6BiIBQ!4ngzre+i&!&4KU?I?JG MQegLZI+0v*!9UgUHSx_A_z!OhU6IM5Pp38qBVBojEOfh-9U z;D!Rdps$uk$=IWnq;90;?sPnU|9`sLz5li&HV^hQZG7I8*+Q#$D<ReW_hpgIhIm9sc@!=WjbS7KD*xilJlVH4X4BKqIq-oGSoIaSx!DRft1aNtWA& zvr>m@(eY`@ZXC8Asj6`+$duM>$Z*1g!(9BZ<>$7yOl%$DyBASlm9viJ^YP;2IuI58 z;s84UKfKgWC`A{00J5$PxAI0TgRH}zU~!SFne^I)QW}o^m+!L#llzwZ2;cB$lWjU# xHZxkk+=vpm-|Bf%l0;#nysLibdj3nm`e{4P(sv5Z&9#}b@f#d}LpPI?_%DKJ>%IU0 literal 0 HcmV?d00001 diff --git a/tests/old_pickles/cpython_311/simple_class.pkl b/tests/old_pickles/cpython_311/simple_class.pkl new file mode 100644 index 0000000000000000000000000000000000000000..1c41f5346504ce06af8d8835732c50e8ab60dd88 GIT binary patch literal 713 zcmY+C&u-H|5XMbfXdwNAP^4Y}AwVcaYLd2rlC!x*@}cF5MjLybUA(bPcDJZfBlVJ~ zhrKa~x8XT>9A<5Tj7PF&X6>Et`|aM||M9C8RF6B1Rbi7-#+l;%MvkrvbCz+?8CTpC zImC)-jo-27+EXi)k-0{NO7Jm1J0 z&e`1Owfj_7;KfTDMHNo**gb&_g8!f$Fa3t%Eh*Ur_-dKj?DUEt1IzP*_!2lxyVJq$ z17zP4o-(V9Z$z>>FMd-_dd^m=981*@TN9>kZ(=h8nqZ{!Z~!JRp44>ls&Y-62~HoMUrO4X2E2gyv#|h#cQRvHmgbDJT=Es?q9}@G zx?koyR`7JiXzQsWyd5!(`Cu=cC%lH*2S6H2G4bdD4ubxMJ(BDJy_#m&9oP-C51APiBMBFXCOw!~<6raV z*k#c$y|hi+?=kD@SGDZy9(H||+8~Skxsro}oUjeCm`j-2#5Ylz&?ogm2U{?)kup&N zW|>KDV$Dqhd6yR(8dGmn9yuIkj*8p-c3z#=(hFnVI^=DrkELhA(vr4>m&0rkJqja- zCeDU4naBkfkg5O*K6;QX2^%_!GgT0wusynR8*AsMbNq4hZr_^ZI@IqQMqW`58vvQ` z=VB%S$io^yBH}#H(^xR;BArHj5GZU}&-3zSyg1N)=mSiJHiRetK7f`+8z#!y!}C=R z8=i)3Yyv!3q4s#(=m3Z$NqLmr(~P#Fu#UC8eVVWmC5De^b{ghSft(7fOg`x4#}4XI zX+v0N0cl_h-e-8k6RE!&X3&Ixjbr?4 z{v4-lG}|UkXZlY2&eL_{E2;&%;m#7NY+7W=RPxKp9^Fy8$k{AQK-*C^E7aWJC%VpG zcBWD*Ij*qlngFEa04VBiA0Sl;?PJ#hC|ELO6Mq!D2tX>9^7$^JB3-c{t^+eR zcTgJyG;1{L{q}dT43^<5HFqCi zyuqFeU(kgM+*09+@gp?G^q7k>)_|)^!@4M`t1(;0rq-^ZlspPek>ZIZsR!i z8FyYGQ>6rBrVYB|f7773(S{jsR9vifcQr)h+5k_M$v;eyI|N{Pu1HF)Y9g{CwgA)T zl#iK}#;0OmR*47U20g_RJqbiz?Cmga&?fX{SV>_|e<^Pge0SGe=VEf8!`GJ4%%5NUD8^K|-)xscXv^yPt zzt!f4jYDkPzRSL(dMg0KEG#5arm2y^L^`{QRA{yR+j4d-IEqn{E|o8+^cGH(uY( zm(pt7P@B%6smi(k2VZ=hv9%P-EMQU*b*2_##0seu0aru8wR5hK{* z>nc`Vaq88~)28TYo#gjNte}cu8mjCEQW{IsOfyHftCs90kCmx?Dwr%m%%hp|>HxDn znU%r$eM^NtHVY7Ed2EvnvjHsNQ}zUh6k`HDL*R@jNVVV`*&xSAp(Y`{Bjbc_x6}u1 zjJAI#Y0Q=5?um0{xriZP3QN~B(lY2AN$X4{6!fk3@ym}F?-#r)vs@Mz!;vIF>Z|@6 DJ2h-j diff --git a/tests/old_pickles/cpython_36/function_with_type_hints.pkl b/tests/old_pickles/cpython_36/function_with_type_hints.pkl deleted file mode 100644 index 69fe362b957f53390c6da71f8e2418c377c24975..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1124 zcmah||4S4>6yMcTPbCu)BodYA2L-XSyS?)uin>DTa03-0Qis{yclXv?cdwmU?Lt8) z4P$0N8p|A83+G>U!^3K2R)Tr!r!?>;0UT6TM?5uSD0@e{%a@mGu~MygYpt?Z zDpy(+zvS?;16AT?U7SitChX=O0^1} zSFY9TPN`b+*URfr)2_)rrG68P9VS5*0cENpIXsL9W*CQ(Oqz3wwZbT3tt|Fsn8apK zI=y{^SAKQ9lb3~&q$*}DI#b0)62Oje&Z+5AbEs+2JJf76CQj@Vi&=rSZE5#ht8p#~ zHZPMVR22k7EsZM|TFpxkQaogB1HZvIi2~LqS*&(@8W)>y8bg+?mzT|AuYb~Gwwq+c zhTq|Bvz}}Ai-{Vu)Piwnt9Cw;iN_;B(5YeoEuKYEP~k!l`qCUuk>o^{ z0z2YS2IgwC53lK1FwJRJHB3y=)hx;Hw^*6Tynd+Ad!#fT$BEiGnoQuoeQL(Z^ivDKjwBvtTQ#9 zHxCcH5f!Al@ct4$z~yTp43iM1rFCK**eCXZJ!^fo`*OhXoO}DDR(6ksjwLnBgFrWT z{-zo$1A_1|K{bwZ0-w0}lCV#C<_xyy97P}v2={op|GNp<2Y>;Cl0j@sss{C1;cHaUuEXp z46V9g$kTqA6$8~^RcONK+Jc_qPA zvU?(!&~wOKschMRUAK}jjHf!6kD=N4xgV!d%IF@Y7t{EQy|EE{V1Bmo6O1yNz$+__ zH8%^{$Q8E|w926~Ws-L7aoEti%y(|m>**1TJVu=86H6Lt^zO&l2WYvd_(B5s8O8t& zuR|Tmnv=t>q`z&^e}vs9Z4P@+7fNSb8EP=jWx<`Yh6H+fDQo}-6rOP_2`^kZY5Wrz z;&@W)+$rgom)HwN{BnrLxfaA6!Cu^8T-J>SCiZ#LXv;(Ho#M0OYRkvlgCwBf!s^vJ Wxl>w@!TaQ3z~foakLe|}Wl_`f-iVY#%N32h>)EVX$6jK)C{mHQ1TMZ| zuKX&DHw*0n3GZ4OduBZEy_qlRkDr62TmHC|+VNJDjl}5#JNOr{#(Klo%p#ZuC0ebZ zl+2j$JxuDvk731{fBF2da6($4Oz^4Qv`7j5@J6g!iFx-x{n+p8;schJT-)$D_yYj0 zOMtL2{-nHmGdr(lm)Y6XMRu80Gx~}P&a>+LHJ)V{7B*Z{*2IqhR+>ABl<9|bU_$k# zP{NW)K#!mjQbOfaX@yoj{Ac&@I~QG3>{IBm!yn+oTyuOES!}83WImWDv)f)Z=-U50brb zdK!tn{Y5LXI|OK1*|24#w}|`{TSdEm9uLH-K{?3$aPH9@OO5=xMCM-!h7mAQ%YMF<=$2M b=$P#C{rJHkOab~f`BC%f9+9)x^iO{SGr0(t diff --git a/tests/old_pickles/cpython_36/simple_enum.pkl b/tests/old_pickles/cpython_36/simple_enum.pkl deleted file mode 100644 index 7351bda2a293c0cd9c245aee9a34be7bdb5d9f4b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 225 zcmYj}Jr9B~7=-;G@heUa#?8@*7!l}fWH1^B66dD2B@jw6PzDDR7q@rw^{@GRL|MG) za?jn}t=GHN2&P=+mAE84Rnq?R2s2v9_$mr!BrQ{^q{%hqij^fkaS^v*LNk{BF{!KU zi@^Fh=MBjM}r~W5Qg~S`RfMO|u$qIZc{lR3 zXHQ_^gtS6|-L%|um}I)-H=)_moW^-CkNoT9S(ZFSPjMPuM>N~g1mj1-ctfv*=aw65 zE@8zLUo*==%Mvs@gxHX4+CLK3^ z|3RZmCplFq4uTP?&ahwV!jDqeOB;+^bYBhK`a_vg+OiOPSXdon@{_M2>G`VFiBy~h9m diff --git a/tests/old_pickles/cpython_36/simple_module.pkl b/tests/old_pickles/cpython_36/simple_module.pkl deleted file mode 100644 index 1403335533a1e6dd1b5a3c078c4218bb0628a2f2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 473 zcmYk2!A`?442Iiouxb-Soc0RaWlw;(^hTu~23KTdZMsHHo6;n}q#E z%FrxDihui;?eA^)_3e9yaI;h@n>8XWC10NW(OqTRnpGkNZP%h|RBP}P-SntdDzlQ~ zE6!aAK%E=_i-8*fWGbb9;wAtMOG~!tN{Jf-NX0VV9x^iNhWCwrj)mu@kP9gxw{>cS zs%hGs)Jv#X$w8OAFOf{@1+1-*M$}-o4eu5UBbD*{F4^M>=W!5w^ny5w!kxGCXWpHM z$sQNZe?jMO@trK!+O(qH++Z<|gDCKPTr3Q^Z+|H0vSO4JXT`vNkk%bK+DNr#Qd8o> zjOWZsqcQgP=!Tg{O{Y%@qP8uECnhb|bocomDuOfYIMFmWAVyT0!3pvXblSMc9NRyPDE(II>^>+3pR4iX7710dTk_o0| z0aOsB+z)q}2etz%viz6NwqnDD=7n;$sn;dL1bg^lJ*$)L5y+BZSy`bqW14HW@(~9Z z%8Lw*?^wYqPSJ=k&$ckW&JC$NwryOyMgQT!_Tq6ko<=DdPp0fVjKXM`QZkK3@sLL8 zG-OV@D|e4~Hf!HDZUYFC;8Wlh8)uJ*))kM9_Mx_^m`GiIcJJ&f&6HMeA@kb#nMskr zcCKnru3g@nf1K?F4cG5`V()+9@$8NPNoxk=UWL5M2UwQL=r`BO6 zWK4weg0~Wu5+k(2!p($J?Tsh6Wy(|xZiq0v&$6oC|P6}OJ+_*pc3ChIV zP~|>5drXQ#YU1VgX!%**QMUEP5K$MgFLoO@0NBmRwwGU@S~B_y#|E=5WZ_dO16d(}V+ zf}~Q|H`o8n|3_z&)QSi$EX>Z%%s2CW^EvDv3!TsFdcLDTAwTks%%Y>rWT_j0?qMX>?wVM+_76wYTHWELj&xGGbUZ zNzvP9iYqp>5z83L<32iHv6!ViKqtc7n8VmQr!=+L+U*%{`{F{E{Ccb2Y)7=-XtLc_ zyVb5n0d2Nxel=)E%@#At9GY{yQ*(xO2?>*c5aUT*+V^X$5$rYE`whBF_kva{^cf~q z!>CrTRqMvHxo3#BLdY-)Gr=%Z7w|!OyfAsp6`I)P8i}|NB+BAI@g%lwqW9(%MYFHV zi5C2jOL7XGBxF6y)D%I>-KDOBcd6UytW4aAL!8h#a*Q`Mx=58uxt}Q;s`HYG$iU9I z(RUS-N@7Hh$Jl)lwj@09;3?YAJm^3Pp2K^1;laye i1FumkID+f21CV#C<_xyy97P}v2={op|GNp<2Y>;Cl0j@sss{C1;cHaUuEXp z46V9g$kTqA6$8~^RcONK+Jc_qPA zvU?(!&~wOKschMRUAK}jjHf!6kD=N4xgV!d%IF@Y7t{EQy|EE{V1Bmo6O1yNz$+__ zH8%^{$Q8E|w926~Ws-L7aoEti%y(|m>**1TJVu=86H6Lt^zO&l2WYvd_(B5s8O8t& zuR|Tmnv=t>q`z&^e}vs9Z4P@+7fNSb8EP=jWx<`Yh6H+fDQo}-6rOP_2`^kZY5Wrz z;&@W)+$rgom)HwN{BnrLxfaA6!Cu^8T-J>SCiZ#LXv;(Ho#M0OYRkvlgCwBf!s^vJ Wxl>w@!TaQ3z~foCI95LTrAm(TYLC!`h11fSYX!!f}h-iTErdEOSNp8I`WeB{)kYa6}>e*nNV z1qch{&*(H`SLM5D$>{9zVwz=>l9jX|*~Qz*ESpWj!iH%wP@eF+_Q_yfG3YsPOQi!C-C&3p3%|K2>$2X}jS z2_!7JNy1_q{0n03S`^O4P@ftu%TS%&2Y+h0vE~gdx#F#!S~A#aZpO_fRKxjY-b?nv z`FSMv{uiFe?hv3Mv?j|)ZxQ(^vWj+f952YK<^8|%?x9obU@3J$q=|y}883;G)?ne3 z37iBz9^nQUx90GfNXMgVeFMUqLIPSBOITbn2jWtnpc_&lImdrvwGM)^GaMHVy#npxLqpq5_2u zL9Q-Sycv5~oP0uEr+71!_He`l&CJY;kDt<`kdkU?YLsGPY+_`ZY+_+)nPh5_nwDZ{ mkz#I{Vr*b+ImNGJO3;+rDLq_3P5CLMIjKNx8N9_ML3#kUPEq{; diff --git a/tests/old_pickles/cpython_37/simple_func.pkl b/tests/old_pickles/cpython_37/simple_func.pkl deleted file mode 100644 index a6b82cd59b775835d80730d0603a7bba8ea3ccc7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 485 zcmah_u};G<5KY<&A_|2q8*@8QJ|Y{DIuurPauc_);@F|Kfl45;6qegKevY33y8+aV zlPtY^XT5v(UIrhZakMK;3#nb%ilUZ$x`zozuoP0l(y77~S`RfMO|u$qIZc{lR3 zXHQ_^gtS6|-L%|um}I)-H=)_moW^-CkNoT9S(ZFSPjMPuM>N~g1mj1-ctfv*=aw65 zE@8zLUo*==%Mvs@gxHX4+CLK3^ z|3RZmCplFq4uTP?&ahwV!jDqeOB;+^bYBhK`a_vg+OiOPSXdon@{_M2>G`VFiBy~h9m diff --git a/tests/old_pickles/cpython_37/simple_module.pkl b/tests/old_pickles/cpython_37/simple_module.pkl deleted file mode 100644 index 1403335533a1e6dd1b5a3c078c4218bb0628a2f2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 473 zcmYk2!A`?442Iiouxb-Soc0RaWlw;(^hTu~23KTdZMsHHo6;n}q#E z%FrxDihui;?eA^)_3e9yaI;h@n>8XWC10NW(OqTRnpGkNZP%h|RBP}P-SntdDzlQ~ zE6!aAK%E=_i-8*fWGbb9;wAtMOG~!tN{Jf-NX0VV9x^iNhWCwrj)mu@kP9gxw{>cS zs%hGs)Jv#X$w8OAFOf{@1+1-*M$}-o4eu5UBbD*{F4^M>=W!5w^ny5w!kxGCXWpHM z$sQNZe?jMO@trK!+O(qH++Z<|gDCKPTr3Q^Z+|H0vSO4JXT`vNkk%bK+DNr#Qd8o> zjOWZsqcQgP=!Tg{O{Y%@qP8uECnhb|bocomDuOfYIMFmWAVyT0!3pvXbl>UxtfchJ1!XhO1wOn_q@|?34`8+9?_A8B)ciZByEU3{v#~ DYWGYD diff --git a/tests/old_pickles/cpython_38/nested_function.pkl b/tests/old_pickles/cpython_38/nested_function.pkl index e495cd765df40bb3434b45a1a9f4fae9a73a1bbf..1fe67c53a35d801a7e870793fe0e31632a253db1 100644 GIT binary patch delta 31 ncmX@dvVn!Afpx0tM3(nV8N8E)8ABN5CucCOVw9dN%w!D!mP-hQ delta 30 mcmdnMa*lQD*X2Mr#1ApbGr} diff --git a/tests/old_pickles/cpython_38/simple_class.pkl b/tests/old_pickles/cpython_38/simple_class.pkl index bc22f06d80c53ba76a016681d64ad36edaa73ce0..4ca72f5c50d09ea63a0d8114f84c18c5e9350c7d 100644 GIT binary patch delta 95 zcmV-l0HFWD1)&85fCZJ7kpyrkH(_I8WjQrtGB;#nG-NqtVPZHiW@9#CIXE#eV>V+p zlumS%Qk0AwkqsA-v#MF8^c3xEIs diff --git a/tests/old_pickles/cpython_38/simple_func.pkl b/tests/old_pickles/cpython_38/simple_func.pkl index df7c83a5db954f528a35dec1ddf5d869900c0493..45c99738525fc99596c1d8e83ff813a7a97c5471 100644 GIT binary patch delta 37 tcmbQr@|BsjfpzLD#)+&o?6M`P#U;i16FbwGGWaG7FxoRpP7Yu^3IObd3(^1p delta 68 zcmey$Je7sDfpsbu(?r%94!M%l;*w(h_=z29iajt+dTL&3QDRAIe11+!d_iV%c1~)s SUP0xQi6874r6wCN9t8k0SQ!HV diff --git a/tests/old_pickles/cpython_39/class_with_type_hints.pkl b/tests/old_pickles/cpython_39/class_with_type_hints.pkl new file mode 100644 index 0000000000000000000000000000000000000000..f35182765b99002cb4df44f66e64f1c0282b8919 GIT binary patch literal 821 zcmYk4-;2{g5XaMcdcFFSq8y575g)D!ZPQ+D``iZ~P9vQ6D#IkZ=`Kx^nBANduHZ`x z2m9vw|2muYuvr2#-%Muq^WFV@|L=!;UiFyMD9K8e%V?4C?m=wLb6DbngIe%}>nw#R zL8a^;duAG8DHAPIWzCUZ<(x!w{B<>}3d{*)IWJIW1*vncrRGal@qkKYIw$cnPkA9D z5`||T@5Z=3*H~2jYgabt+}@gY$ota~V4*CP;`#falQ#PE&pT>g$A5Z*#jMMSR2Cm+`KEOOgh8UH$=IM?U6xT|lc68$9 zny{My%Z!x?2e76=3CeUI%2a9!E1bPKaOX_(3ob*3SI(#n%|8?GOk1E!VJ5wovEMc%a zPu0y=RL4dvnNGz%)f0H-dyyTqW%aCc0~{;-a9o@5~=yMg7_ZZG?8VphmG egs8atAYO=B;M5!ovO@EsQd=8auIYl&FWrC5_&DeXMV!He#ojzwpRDZfc6;? zeHJnqN5l)M5c(IsS#vWUN*)QVR^>2Z2&&?zVJ8nz>m*KCN@biP+GCPSHZT#_F_cF= z1mClWrQAahY;|i^#^kxAX`bIc)lTK=O6|C;);{q4gSt=M{bs#R8_jyqq|KuvO51L& zsii*FQ~W1-tkq7?qK#U^uh*ID@3;MCJ2-4L8mx6xuLZQ-I?#@ZR}YtHE>k7}6;i7^ zGp0l^DI}GK9c+)0c8QQd>}Me(M5{G)D39hQkGMoLr!yCaER-pIqyyKdr$(p=)-S_j zwbqG!_8;l4bS*q9csiZZ$R1h5@~sm~I}@$esgx;qGwcMtRl&kQ!}FPQc z<~X@x@Ic)qK^A$E;~p&Q(jJ?Ha0t8VCLv!l8qPDRjrl%Q37Lz7!V(%qF=~ow1eO~k z57-pyHX;3QJ_~3TN`pHl*|HChu(Kj!v6o*X0i2=8(iwp*<1Ln$h|HahN$7KJxc0)> zrQxh#Hr3PPnyDD4yhmIrn7M%a@WwG{LQ|ANvNT6GCpsJBg5eW*T82vb-=GROs10e2 ztP6W&U)WpLPaDSt&!Cu(#{RCAJ;aw#Qw8e#x_i1b1K3{(`vSf!6g-2Ma0ai8oX1dt qZCF(W9!cCjv*2(7G=tX;bQ~iFb_;JEcz3Sh1FBerZ82lUs(%60fpQiA literal 0 HcmV?d00001 diff --git a/tests/old_pickles/cpython_39/nested_function.pkl b/tests/old_pickles/cpython_39/nested_function.pkl new file mode 100644 index 0000000000000000000000000000000000000000..db284fe24de58489d7296430f7f1e73f4e0a9ea0 GIT binary patch literal 567 zcmZ9J!AiqG5QdY2SkzWfFb4$>dT2q?K7e@XF_J^|D#Iq(Hlf>W?CvTB3tn10?9K6g zolR-2&0c0EyZ?Ou%;&++UBjzC8>B*-tSZup;Neb;wgrhPgf2?Z^NQm)+M`D{O)~cE z(j^YDlQ(R0AT$0~iTuc;<429?e9-*zKD@wt@-SXv*ZK*)vbVizR=hFIdwN>+#=PV+ zNq zcHxu}{5l*J!d3H@iQ}4@3$dl{4oT7}+N+%j>`ntlOlh=dTO3eaD^1`um;8F+Sn`X! zZBGDR4HFxP{+0w!2wZuyc~B-+LLcQ zj{L7YC!I)qdB5lAz2Ap_J^u5z7c`$cOH^gkS|+*T<4%ok2nEYI=$tEVsuB{#w8r0f z?)tHn%E(fqJ2ZDSr=~l)l|`+1*!(0OxhGZpjVFe#wzxs}_TuB!g3Ys(f4ZJcuP$ec z>p0D(ESn{lES=347#X}l_qs!&O9wKh4Z0UC3((wX!;If7jyFVU08mt^RU80ap8%8P zzLcdjREoeI!=5-iupWG7MVzuPUc{F6pNm(nA66;<-&yvS0i7P5FdW<;+y@Y(!EJ!i z7Tq&u%v#2lm=HSDTxBTU?9jcR)NA>J7*0%4PgcC-YXVTI6k4zKxV}R%Tu#GYaDbzc zSMP-!0n{wX*~%-4yp)m^z7{V%$Ydi?yt7f%I3`ZqDP*=xjHHaZzI@pDfnp?v?m57B z%hZ078+Ni91MCeVbAgp*MZVG~O)Dw46M+0@$}?t_@vX?u>SA$9#GX>GN}6lpPZ4Un nZuy^i^zgnw+qg|h<5zkz04r6+Otri04tF;fVjkHIHs$yqvwj2M literal 0 HcmV?d00001 diff --git a/tests/old_pickles/cpython_39/simple_enum.pkl b/tests/old_pickles/cpython_39/simple_enum.pkl new file mode 100644 index 0000000000000000000000000000000000000000..4e49b68540e41f6650f67ee38dbb6751aed5598a GIT binary patch literal 279 zcmYk1F;Bxl3`Qvw1yMu?1{Q{nOi>OR(Tt#GKt&9Zn61mjQKBSwh`S67m00NH4fB7w zge9IneX{(XzfONIW=G@DwQ5aYyw|E}F`Q(kE9hi{U>j`F84alWj(_-;=3}DZS-bSV z`(|2fKeg;?H8xeS_j@NR)&NYaXdOMsjo64-WNt_5Q zG0E?fSd}v8SH~IC&BSTy2c@)*X$h*8)`CT6opk60tIGj??op=bTlBDMK>ecW7en?jv9df_t10S*|`~$V_X4(J% literal 0 HcmV?d00001 diff --git a/tests/old_pickles/cpython_39/simple_func.pkl b/tests/old_pickles/cpython_39/simple_func.pkl new file mode 100644 index 0000000000000000000000000000000000000000..34d515dc3158b0f9e57588382af9cc12e72fd08e GIT binary patch literal 508 zcmYjOJx{|x3{8p<5rtMlf(nU|p&e-X4;h0p04uUC$)$;U`Ji_PRB9!bcJhY#q5K1! zrih#uKg;%e&-Z!!{o^|Ohoe|kHmUP?q2#oa6P`n4b7k@p%&M03MR@ukiYO7!4cyWr zjk6wlewE1oO3(`&jvj~aduwOyj+`fl!YxhE{UCH#v?IKo)yw=%7&$XVJ)6r?E*U_j z5@?m^X}u!s>%6Fygw&QX1$TbWacMF!%)Fs7_5o@UFT`8|P>3EtDT*d<(n7HAHcfM0 z0hA4F7>3O--gJC+JwqzAA-vcVn!GAOOQQ`-WB1whrlHL)vmu@Vyjr0Sd|Vg;h_bA> zm8UebZ7A!bcQ~LXJjzSMOEeAu7Vn8ng;l0$44Y*g9JpIPG#jWA+v>i;6Yi+ueKVV8 j@!Bq1)1wCceIIBWwPTuI_<8_yRYgLzdhCvNkN4AGn`FXr literal 0 HcmV?d00001 diff --git a/tests/old_pickles/cpython_39/simple_module.pkl b/tests/old_pickles/cpython_39/simple_module.pkl new file mode 100644 index 0000000000000000000000000000000000000000..a5f850623abe2833cca4ae844ca5eb889f83e4f3 GIT binary patch literal 513 zcmYjOu};G<5KU4QqEJAvbV1DRQt<(ZC1X&A!ir9A;?z-{IK;M~QY*2vlQ)iEWyVfa z5uaq;o!{NPd;S{#{Q6#Z%rxUd=S`VyI9&{UG&lLSCRLe%)T^>;M62){&DAMeiClAv zdz_gNz#=ICvXMCl$VF!T)LZ~GB-@a+drD0Nzy--^+eNlWHna!&1v1ZE!b+E1l{Khs zL*20GnN+Ksyt?p!OPsNZ31gAd|4b4jp0&VQ9EUq^=TE&C57PtAjsJLVVP5B6T-;t#qL!h8S# literal 0 HcmV?d00001 diff --git a/tests/old_pickles/pypy_36/class_with_type_hints.pkl b/tests/old_pickles/pypy_36/class_with_type_hints.pkl deleted file mode 100644 index 3bda8c2ae5037d279e307fa9e085c82636f4c57d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 927 zcmZuv&rcIU6kbY!0uhZGi6J2z;ZkY0t@PZ32@N#?5>Mu3cINHw*y(QFnT6Cq;sp$5 zZjAqr|Czqo7SY5>b~EqI`}KYA`&$3umV@fpZLacCOeG&HZ2on3_B9lk&P2fsf*zwn zlV`vc)7ss;{WP#IVZz4$d$X%%HkDc8?B+Ynu#o%&4Wlf(b}}1O6)1fdc!9q7lof1( zrjTDy0A(ViGBXsOuGqCj0dEEN$-JYFxnPAK!!Dh>(ZSuF-Rt-J?U={C_$cahx}6@2 z!=rXz9EMD^d!aaT#+|wwcU%3hX%qmFa{xEecCX7re%R`^;6^vjt}{clMWykfwysgd|2S`* z-8HCy{D!WRgf@f4{STp}A67;7{no)2W4(e0k*pe@nHA7qS$ s_T;?quDp;5L`-AldPVvLQy}S^>6C(wzyJJHv+Gq~IhiV|MI$wT1D8E#ga7~l diff --git a/tests/old_pickles/pypy_36/function_with_type_hints.pkl b/tests/old_pickles/pypy_36/function_with_type_hints.pkl deleted file mode 100644 index d310462a0d1c947d57c3c129c2125854dc0bc4ce..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1111 zcmZuwT~8B16y0qr6o^P72{95N#s``hpe?QR!Dy^sp|B=E;)~5>Iy2k5blI&tvtSJ* zCI$><-WdOn|4i@f7E$9SySewy+^=)aelC6ADLL8GHLH4og>lM<0ULy3?GM}Ndn6Gs z;t5rP$bkrjiX*~8Dy6wHtKjN+azqFJJJSd;_NT?+EBsjrLK_G|}UWW-iIWAy5SKrP%+B?TIv14fWz<8C*OTE%+wQ4@| zoBmdBbF;D8q<(#?*5Vs=%4^L!-!jVVnR9cIea~b4OV9x)ke*K+^TLi z8?{zd)O)aB*Lb-|5iv(K>KG$BVZPeXy3x{MADqOlo7fkG4KoF#q`hQv=Jrh+)K zgJSf~4PM#N^-jGs2o+YbYhg^6+c6gh);Y(ftDQo}!MD(9x2I3tQ-?U**>{Y0ZuAsY z2wN{z_Np#P5&C}xFN|JNLMpjV`j!Zbb{uk&(=FGAqil!q;GMVag5#P>#qPTO6CbLuyNwY;^-QT|{>9XaObnTcAXyz5J>)T~rs1qF7N2vF!%ya98T`hm?j{ z5j{6n+ZrL`74`JsNH5P}N1sx#=AIOCvPL z+`tn=X$lr4JoVri+Rr^`!!o=80xvyy)iv-2IfwT!gMgIB!jwiA^B*$*eV_|*3fp-% K*zs^?vGxnFjooSl(?ziqE zh-h>b`PtgLH^i7F&#VchF4jyG{{$bs`(T(h`UK`ovPM}YC~VZ%gL>6)Xn{}1X(!t9 z$H#%|=ihh&;az~5X&4!Y>Aw8rUg35yb%-zE>Airq*``$c8{9CFQW+8?-rV-Z a64CU#gBQym0JIEUulo9o$eK(127drldIOsP diff --git a/tests/old_pickles/pypy_36/simple_enum.pkl b/tests/old_pickles/pypy_36/simple_enum.pkl deleted file mode 100644 index 64cbbdddc6ec94d36136d41f74f3be7ace4d96e4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 225 zcmYj}JqyAx7=-;O_=S^$xH&o%D-r7|c94og!8xQ!N=wsJY&tjyE^hDU>tFNtSnblA zj(hIzVZA@CS}^4*FU2L{sgicXBh2Vk#y4?gM$#gcN}60#u2@mv3m0(n;jD#=CxZdr5r)+$7p0QbEFKpou)o$BQquqc diff --git a/tests/old_pickles/pypy_36/simple_func.pkl b/tests/old_pickles/pypy_36/simple_func.pkl deleted file mode 100644 index 1761a387a18ee87832894a106086e35642876bd7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 454 zcmYjNu};G<5KY<&A_|2q8*@8QJ|Y{DIuurPauc_);@F|Kfl5VUDJ-{fd<`E5b_1v< zS$g-*clYkS4nDu)XdjpsQoFJhMJ@UC027X2DWrs@Q-u{;(FcvI2+yHmH3w7kuHqGIKttFU%6bb8CJ&Cu Pzo{qn)4!^jv-9Z>6NIqV diff --git a/tests/old_pickles/pypy_36/simple_module.pkl b/tests/old_pickles/pypy_36/simple_module.pkl deleted file mode 100644 index 1403335533a1e6dd1b5a3c078c4218bb0628a2f2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 473 zcmYk2!A`?442Iiouxb-Soc0RaWlw;(^hTu~23KTdZMsHHo6;n}q#E z%FrxDihui;?eA^)_3e9yaI;h@n>8XWC10NW(OqTRnpGkNZP%h|RBP}P-SntdDzlQ~ zE6!aAK%E=_i-8*fWGbb9;wAtMOG~!tN{Jf-NX0VV9x^iNhWCwrj)mu@kP9gxw{>cS zs%hGs)Jv#X$w8OAFOf{@1+1-*M$}-o4eu5UBbD*{F4^M>=W!5w^ny5w!kxGCXWpHM z$sQNZe?jMO@trK!+O(qH++Z<|gDCKPTr3Q^Z+|H0vSO4JXT`vNkk%bK+DNr#Qd8o> zjOWZsqcQgP=!Tg{O{Y%@qP8uECnhb|bocomDuOfYIMFmWAVyT0!3pvXbl