diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index c9b7981b..6a56b47e 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -15,15 +15,15 @@ jobs: uses: actions/setup-python@v4 with: python-version: 3.11 - - name: Install ruff + - name: Install pre-commit shell: bash run: | python -V - python -m pip install ruff - - name: Run ruff + python -m pip install pre-commit + - name: Run pre-commit linters shell: bash run: | - ruff . + pre-commit run --files * build: strategy: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..d07f8329 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,16 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace +- repo: https://github.com/psf/black + rev: 23.9.1 + hooks: + - id: black +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.0.292 + hooks: + - id: ruff + args: ["--fix", "--show-source"] diff --git a/CHANGES.md b/CHANGES.md index 2041d0db..e9d3f45f 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,5 @@ -3.0.0 (development) -=================== +3.0.0 +===== - Officially support Python 3.12 and drop support for Python 3.6 and 3.7. Dropping support for older Python versions made it possible to simplify the @@ -11,6 +11,12 @@ ([issue #386](https://github.com/cloudpipe/cloudpickle/issues/386), [PR #513](https://github.com/cloudpipe/cloudpickle/pull/513)) +- Any color you like as long as it's black. + ([PR #521](https://github.com/cloudpipe/cloudpickle/pull/521)) + +- Drop `setup.py` and `setuptools` in favor of `pyproject.toml` and `flit`. + ([PR #521](https://github.com/cloudpipe/cloudpickle/pull/521)) + 2.2.1 ===== @@ -140,7 +146,7 @@ - Fix a bug affecting cloudpickle when non-modules objects are added into sys.modules ([PR #326](https://github.com/cloudpipe/cloudpickle/pull/326)). - + - Fix a regression in cloudpickle and python3.8 causing an error when trying to pickle property objects. ([PR #329](https://github.com/cloudpipe/cloudpickle/pull/329)). diff --git a/cloudpickle/__init__.py b/cloudpickle/__init__.py index c88e5865..58a8d086 100644 --- a/cloudpickle/__init__.py +++ b/cloudpickle/__init__.py @@ -1,6 +1,9 @@ -from cloudpickle.cloudpickle import * # noqa +from . import cloudpickle +from .cloudpickle import * # noqa -__version__ = "3.0.0.dev0" +__doc__ = cloudpickle.__doc__ + +__version__ = "3.0.0" __all__ = [ # noqa "__version__", diff --git a/cloudpickle/cloudpickle.py b/cloudpickle/cloudpickle.py index b4c9f44f..eb43a967 100644 --- a/cloudpickle/cloudpickle.py +++ b/cloudpickle/cloudpickle.py @@ -119,7 +119,8 @@ def _lookup_class_or_track(class_tracker_id, class_def): if class_tracker_id is not None: with _DYNAMIC_CLASS_TRACKER_LOCK: class_def = _DYNAMIC_CLASS_TRACKER_BY_ID.setdefault( - class_tracker_id, class_def) + class_tracker_id, class_def + ) _DYNAMIC_CLASS_TRACKER_BY_CLASS[class_def] = class_tracker_id return class_def @@ -144,9 +145,7 @@ def register_pickle_by_value(module): README.md file for more details and limitations. """ if not isinstance(module, types.ModuleType): - raise ValueError( - f"Input should be a module object, got {str(module)} instead" - ) + raise ValueError(f"Input should be a module object, got {str(module)} instead") # In the future, cloudpickle may need a way to access any module registered # for pickling by value in order to introspect relative imports inside # functions pickled by value. (see @@ -160,7 +159,7 @@ def register_pickle_by_value(module): if module.__name__ not in sys.modules: raise ValueError( f"{module} was not imported correctly, have you used an " - f"`import` statement to access it?" + "`import` statement to access it?" ) _PICKLE_BY_VALUE_MODULES.add(module.__name__) @@ -168,9 +167,7 @@ def register_pickle_by_value(module): def unregister_pickle_by_value(module): """Unregister that the input module should be pickled by value.""" if not isinstance(module, types.ModuleType): - raise ValueError( - f"Input should be a module object, got {str(module)} instead" - ) + raise ValueError(f"Input should be a module object, got {str(module)} instead") if module.__name__ not in _PICKLE_BY_VALUE_MODULES: raise ValueError(f"{module} is not registered for pickle by value") else: @@ -204,7 +201,7 @@ def _whichmodule(obj, name): - Errors arising during module introspection are ignored, as those errors are considered unwanted side effects. """ - module_name = getattr(obj, '__module__', None) + module_name = getattr(obj, "__module__", None) if module_name is not None: return module_name @@ -215,9 +212,9 @@ def _whichmodule(obj, name): # Some modules such as coverage can inject non-module objects inside # sys.modules if ( - module_name == '__main__' or - module is None or - not isinstance(module, types.ModuleType) + module_name == "__main__" + or module is None + or not isinstance(module, types.ModuleType) ): continue try: @@ -231,17 +228,17 @@ def _whichmodule(obj, name): def _should_pickle_by_reference(obj, name=None): """Test whether an function or a class should be pickled by reference - Pickling by reference means by that the object (typically a function or a - class) is an attribute of a module that is assumed to be importable in the - target Python environment. Loading will therefore rely on importing the - module and then calling `getattr` on it to access the function or class. - - Pickling by reference is the only option to pickle functions and classes - in the standard library. In cloudpickle the alternative option is to - pickle by value (for instance for interactively or locally defined - functions and classes or for attributes of modules that have been - explicitly registered to be pickled by value. - """ + Pickling by reference means by that the object (typically a function or a + class) is an attribute of a module that is assumed to be importable in the + target Python environment. Loading will therefore rely on importing the + module and then calling `getattr` on it to access the function or class. + + Pickling by reference is the only option to pickle functions and classes + in the standard library. In cloudpickle the alternative option is to + pickle by value (for instance for interactively or locally defined + functions and classes or for attributes of modules that have been + explicitly registered to be pickled by value. + """ if isinstance(obj, types.FunctionType) or issubclass(type(obj), type): module_and_name = _lookup_module_and_qualname(obj, name=name) if module_and_name is None: @@ -260,19 +257,18 @@ def _should_pickle_by_reference(obj, name=None): return obj.__name__ in sys.modules else: raise TypeError( - "cannot check importability of {} instances".format( - type(obj).__name__) + "cannot check importability of {} instances".format(type(obj).__name__) ) def _lookup_module_and_qualname(obj, name=None): if name is None: - name = getattr(obj, '__qualname__', None) + name = getattr(obj, "__qualname__", None) if name is None: # pragma: no cover # This used to be needed for Python 2.7 support but is probably not # needed anymore. However we keep the __name__ introspection in case # users of cloudpickle rely on this old behavior for unknown reasons. - name = getattr(obj, '__name__', None) + name = getattr(obj, "__name__", None) module_name = _whichmodule(obj, name) @@ -359,10 +355,13 @@ def func(): subimports = [] # check if any known dependency is an imported package for x in top_level_dependencies: - if (isinstance(x, types.ModuleType) and - hasattr(x, '__package__') and x.__package__): + if ( + isinstance(x, types.ModuleType) + and hasattr(x, "__package__") + and x.__package__ + ): # check if the package has any currently loaded sub-imports - prefix = x.__name__ + '.' + prefix = x.__name__ + "." # A concurrent thread could mutate sys.modules, # make sure we iterate over a copy to avoid exceptions for name in list(sys.modules): @@ -370,16 +369,16 @@ def func(): # sys.modules. if name is not None and name.startswith(prefix): # check whether the function can address the sub-module - tokens = set(name[len(prefix):].split('.')) + tokens = set(name[len(prefix) :].split(".")) if not tokens - set(code.co_names): subimports.append(sys.modules[name]) return subimports # relevant opcodes -STORE_GLOBAL = opcode.opmap['STORE_GLOBAL'] -DELETE_GLOBAL = opcode.opmap['DELETE_GLOBAL'] -LOAD_GLOBAL = opcode.opmap['LOAD_GLOBAL'] +STORE_GLOBAL = opcode.opmap["STORE_GLOBAL"] +DELETE_GLOBAL = opcode.opmap["DELETE_GLOBAL"] +LOAD_GLOBAL = opcode.opmap["LOAD_GLOBAL"] GLOBAL_OPS = (STORE_GLOBAL, DELETE_GLOBAL, LOAD_GLOBAL) HAVE_ARGUMENT = dis.HAVE_ARGUMENT EXTENDED_ARG = dis.EXTENDED_ARG @@ -441,9 +440,9 @@ def is_tornado_coroutine(func): "directly instead.", category=DeprecationWarning, ) - if 'tornado.gen' not in sys.modules: + if "tornado.gen" not in sys.modules: return False - gen = sys.modules['tornado.gen'] + gen = sys.modules["tornado.gen"] if not hasattr(gen, "is_coroutine_function"): # Tornado version is too old return False @@ -462,7 +461,7 @@ def subimport(name): def dynamic_subimport(name, vars): mod = types.ModuleType(name) mod.__dict__.update(vars) - mod.__dict__['__builtins__'] = builtins.__dict__ + mod.__dict__["__builtins__"] = builtins.__dict__ return mod @@ -493,6 +492,7 @@ def instance(cls): @instance class _empty_cell_value: """Sentinel for empty closures.""" + @classmethod def __reduce__(cls): return cls.__name__ @@ -508,7 +508,7 @@ def _make_empty_cell(): if False: # trick the compiler into creating an empty cell in our lambda cell = None - raise AssertionError('this route should not be executed') + raise AssertionError("this route should not be executed") return (lambda: cell).__closure__[0] @@ -520,8 +520,9 @@ def _make_cell(value=_empty_cell_value): return cell -def _make_skeleton_class(type_constructor, name, bases, type_kwargs, - class_tracker_id, extra): +def _make_skeleton_class( + type_constructor, name, bases, type_kwargs, class_tracker_id, extra +): """Build dynamic class with an empty __dict__ to be filled once memoized If class_tracker_id is not None, try to lookup an existing class definition @@ -533,14 +534,14 @@ class id will also reuse this class definition. forward compatibility shall the need arise. """ skeleton_class = types.new_class( - name, bases, {'metaclass': type_constructor}, - lambda ns: ns.update(type_kwargs) + name, bases, {"metaclass": type_constructor}, lambda ns: ns.update(type_kwargs) ) return _lookup_class_or_track(class_tracker_id, skeleton_class) -def _make_skeleton_enum(bases, name, qualname, members, module, - class_tracker_id, extra): +def _make_skeleton_enum( + bases, name, qualname, members, module, class_tracker_id, extra +): """Build dynamic enum with an empty __dict__ to be filled once memoized The creation of the enum class is inspired by the code of @@ -569,19 +570,24 @@ class id will also reuse this enum definition. return _lookup_class_or_track(class_tracker_id, enum_class) -def _make_typevar(name, bound, constraints, covariant, contravariant, - class_tracker_id): +def _make_typevar(name, bound, constraints, covariant, contravariant, class_tracker_id): tv = typing.TypeVar( - name, *constraints, bound=bound, - covariant=covariant, contravariant=contravariant + name, + *constraints, + bound=bound, + covariant=covariant, + contravariant=contravariant, ) return _lookup_class_or_track(class_tracker_id, tv) def _decompose_typevar(obj): return ( - obj.__name__, obj.__bound__, obj.__constraints__, - obj.__covariant__, obj.__contravariant__, + obj.__name__, + obj.__bound__, + obj.__constraints__, + obj.__covariant__, + obj.__contravariant__, _get_or_create_tracker_id(obj), ) @@ -600,16 +606,16 @@ def _typevar_reduce(obj): def _get_bases(typ): - if '__orig_bases__' in getattr(typ, '__dict__', {}): + if "__orig_bases__" in getattr(typ, "__dict__", {}): # For generic types (see PEP 560) # Note that simply checking `hasattr(typ, '__orig_bases__')` is not # correct. Subclasses of a fully-parameterized generic class does not # have `__orig_bases__` defined, but `hasattr(typ, '__orig_bases__')` # will return True because it's defined in the base class. - bases_attr = '__orig_bases__' + bases_attr = "__orig_bases__" else: # For regular class objects - bases_attr = '__bases__' + bases_attr = "__bases__" return getattr(typ, bases_attr) @@ -637,23 +643,37 @@ def _make_dict_items(obj, is_ordered=False): # COLLECTION OF OBJECTS __getnewargs__-LIKE METHODS # ------------------------------------------------- + def _class_getnewargs(obj): type_kwargs = {} if "__module__" in obj.__dict__: type_kwargs["__module__"] = obj.__module__ - __dict__ = obj.__dict__.get('__dict__', None) + __dict__ = obj.__dict__.get("__dict__", None) if isinstance(__dict__, property): - type_kwargs['__dict__'] = __dict__ + type_kwargs["__dict__"] = __dict__ - return (type(obj), obj.__name__, _get_bases(obj), type_kwargs, - _get_or_create_tracker_id(obj), None) + return ( + type(obj), + obj.__name__, + _get_bases(obj), + type_kwargs, + _get_or_create_tracker_id(obj), + None, + ) def _enum_getnewargs(obj): members = {e.name: e.value for e in obj} - return (obj.__bases__, obj.__name__, obj.__qualname__, members, - obj.__module__, _get_or_create_tracker_id(obj), None) + return ( + obj.__bases__, + obj.__name__, + obj.__qualname__, + members, + obj.__module__, + _get_or_create_tracker_id(obj), + None, + ) # COLLECTION OF OBJECTS RECONSTRUCTORS @@ -665,6 +685,7 @@ def _file_reconstructor(retval): # COLLECTION OF OBJECTS STATE GETTERS # ----------------------------------- + def _function_getstate(func): # - Put func's dynamic attributes (stored in func.__dict__) in state. These # attributes will be restored at unpickling time using @@ -684,8 +705,7 @@ def _function_getstate(func): } f_globals_ref = _extract_code_globals(func.__code__) - f_globals = {k: func.__globals__[k] for k in f_globals_ref if k in - func.__globals__} + f_globals = {k: func.__globals__[k] for k in f_globals_ref if k in func.__globals__} if func.__closure__ is not None: closure_values = list(map(_get_cell_contents, func.__closure__)) @@ -697,7 +717,8 @@ def _function_getstate(func): # trigger the side effect of importing these modules at unpickling time # (which is necessary for func to work correctly once depickled) slotstate["_cloudpickle_submodules"] = _find_imported_submodules( - func.__code__, itertools.chain(f_globals.values(), closure_values)) + func.__code__, itertools.chain(f_globals.values(), closure_values) + ) slotstate["__globals__"] = f_globals state = func.__dict__ @@ -706,24 +727,23 @@ def _function_getstate(func): def _class_getstate(obj): clsdict = _extract_class_dict(obj) - clsdict.pop('__weakref__', None) + clsdict.pop("__weakref__", None) if issubclass(type(obj), abc.ABCMeta): # If obj is an instance of an ABCMeta subclass, don't pickle the # cache/negative caches populated during isinstance/issubclass # checks, but pickle the list of registered subclasses of obj. - clsdict.pop('_abc_cache', None) - clsdict.pop('_abc_negative_cache', None) - clsdict.pop('_abc_negative_cache_version', None) - registry = clsdict.pop('_abc_registry', None) + clsdict.pop("_abc_cache", None) + clsdict.pop("_abc_negative_cache", None) + clsdict.pop("_abc_negative_cache_version", None) + registry = clsdict.pop("_abc_registry", None) if registry is None: # The abc caches and registered subclasses of a # class are bundled into the single _abc_impl attribute - clsdict.pop('_abc_impl', None) + clsdict.pop("_abc_impl", None) (registry, _, _, _) = abc._get_dump(obj) - clsdict["_abc_impl"] = [subclass_weakref() - for subclass_weakref in registry] + clsdict["_abc_impl"] = [subclass_weakref() for subclass_weakref in registry] else: # In the above if clause, registry is a set of weakrefs -- in # this case, registry is a WeakSet @@ -739,7 +759,7 @@ def _class_getstate(obj): for k in obj.__slots__: clsdict.pop(k, None) - clsdict.pop('__dict__', None) # unpicklable property object + clsdict.pop("__dict__", None) # unpicklable property object return (clsdict, {}) @@ -750,9 +770,13 @@ def _enum_getstate(obj): members = {e.name: e.value for e in obj} # Cleanup the clsdict that will be passed to _make_skeleton_enum: # Those attributes are already handled by the metaclass. - for attrname in ["_generate_next_value_", "_member_names_", - "_member_map_", "_member_type_", - "_value2member_map_"]: + for attrname in [ + "_generate_next_value_", + "_member_names_", + "_member_map_", + "_member_type_", + "_value2member_map_", + ]: clsdict.pop(attrname, None) for member in members: clsdict.pop(member) @@ -771,6 +795,7 @@ def _enum_getstate(obj): # obj.__reduce__), some do not. The following methods were created to "fill # these holes". + def _code_reduce(obj): """code object reducer.""" # If you are not sure about the order of arguments, take a look at help @@ -781,45 +806,90 @@ def _code_reduce(obj): # Python 3.11 and later: there are some new attributes # related to the enhanced exceptions. args = ( - obj.co_argcount, obj.co_posonlyargcount, - obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, - obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, - obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, - obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable, - obj.co_freevars, obj.co_cellvars, + obj.co_argcount, + obj.co_posonlyargcount, + obj.co_kwonlyargcount, + obj.co_nlocals, + obj.co_stacksize, + obj.co_flags, + obj.co_code, + obj.co_consts, + obj.co_names, + obj.co_varnames, + obj.co_filename, + obj.co_name, + obj.co_qualname, + obj.co_firstlineno, + obj.co_linetable, + obj.co_exceptiontable, + obj.co_freevars, + obj.co_cellvars, ) elif hasattr(obj, "co_linetable"): # Python 3.10 and later: obj.co_lnotab is deprecated and constructor # expects obj.co_linetable instead. args = ( - obj.co_argcount, obj.co_posonlyargcount, - obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, - obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, - obj.co_varnames, obj.co_filename, obj.co_name, - obj.co_firstlineno, obj.co_linetable, obj.co_freevars, - obj.co_cellvars + obj.co_argcount, + obj.co_posonlyargcount, + obj.co_kwonlyargcount, + obj.co_nlocals, + obj.co_stacksize, + obj.co_flags, + obj.co_code, + obj.co_consts, + obj.co_names, + obj.co_varnames, + obj.co_filename, + obj.co_name, + obj.co_firstlineno, + obj.co_linetable, + obj.co_freevars, + obj.co_cellvars, ) elif hasattr(obj, "co_nmeta"): # pragma: no cover # "nogil" Python: modified attributes from 3.9 args = ( - obj.co_argcount, obj.co_posonlyargcount, - obj.co_kwonlyargcount, obj.co_nlocals, obj.co_framesize, - obj.co_ndefaultargs, obj.co_nmeta, - obj.co_flags, obj.co_code, obj.co_consts, - obj.co_varnames, obj.co_filename, obj.co_name, - obj.co_firstlineno, obj.co_lnotab, obj.co_exc_handlers, - obj.co_jump_table, obj.co_freevars, obj.co_cellvars, - obj.co_free2reg, obj.co_cell2reg + obj.co_argcount, + obj.co_posonlyargcount, + obj.co_kwonlyargcount, + obj.co_nlocals, + obj.co_framesize, + obj.co_ndefaultargs, + obj.co_nmeta, + obj.co_flags, + obj.co_code, + obj.co_consts, + obj.co_varnames, + obj.co_filename, + obj.co_name, + obj.co_firstlineno, + obj.co_lnotab, + obj.co_exc_handlers, + obj.co_jump_table, + obj.co_freevars, + obj.co_cellvars, + obj.co_free2reg, + obj.co_cell2reg, ) else: # Backward compat for 3.8 and 3.9 args = ( - obj.co_argcount, obj.co_posonlyargcount, - obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, - obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, - obj.co_varnames, obj.co_filename, obj.co_name, - obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, - obj.co_cellvars + obj.co_argcount, + obj.co_posonlyargcount, + obj.co_kwonlyargcount, + obj.co_nlocals, + obj.co_stacksize, + obj.co_flags, + obj.co_code, + obj.co_consts, + obj.co_names, + obj.co_varnames, + obj.co_filename, + obj.co_name, + obj.co_firstlineno, + obj.co_lnotab, + obj.co_freevars, + obj.co_cellvars, ) return types.CodeType, args @@ -831,7 +901,7 @@ def _cell_reduce(obj): except ValueError: # cell is empty return _make_empty_cell, () else: - return _make_cell, (obj.cell_contents, ) + return _make_cell, (obj.cell_contents,) def _classmethod_reduce(obj): @@ -856,13 +926,10 @@ def _file_reduce(obj): if obj.closed: raise pickle.PicklingError("Cannot pickle closed files") if hasattr(obj, "isatty") and obj.isatty(): - raise pickle.PicklingError( - "Cannot pickle files that map to tty objects" - ) + raise pickle.PicklingError("Cannot pickle files that map to tty objects") if "r" not in obj.mode and "+" not in obj.mode: raise pickle.PicklingError( - "Cannot pickle files that are not opened for reading: %s" - % obj.mode + "Cannot pickle files that are not opened for reading: %s" % obj.mode ) name = obj.name @@ -907,7 +974,7 @@ def _module_reduce(obj): # reason, we do not attempt to pickle the "__builtins__" entry, and # restore a default value for it at unpickling time. state = obj.__dict__.copy() - state.pop('__builtins__', None) + state.pop("__builtins__", None) return dynamic_subimport, (obj.__name__, state) @@ -940,13 +1007,21 @@ def _dynamic_class_reduce(obj): """ if Enum is not None and issubclass(obj, Enum): return ( - _make_skeleton_enum, _enum_getnewargs(obj), _enum_getstate(obj), - None, None, _class_setstate + _make_skeleton_enum, + _enum_getnewargs(obj), + _enum_getstate(obj), + None, + None, + _class_setstate, ) else: return ( - _make_skeleton_class, _class_getnewargs(obj), _class_getstate(obj), - None, None, _class_setstate + _make_skeleton_class, + _class_getnewargs(obj), + _class_getstate(obj), + None, + None, + _class_setstate, ) @@ -969,18 +1044,18 @@ def _dict_keys_reduce(obj): # Safer not to ship the full dict as sending the rest might # be unintended and could potentially cause leaking of # sensitive information - return _make_dict_keys, (list(obj), ) + return _make_dict_keys, (list(obj),) def _dict_values_reduce(obj): # Safer not to ship the full dict as sending the rest might # be unintended and could potentially cause leaking of # sensitive information - return _make_dict_values, (list(obj), ) + return _make_dict_values, (list(obj),) def _dict_items_reduce(obj): - return _make_dict_items, (dict(obj), ) + return _make_dict_items, (dict(obj),) def _odict_keys_reduce(obj): @@ -1116,8 +1191,7 @@ def _dynamic_function_reduce(self, func): """Reduce a function that is not pickleable via attribute lookup.""" newargs = self._function_getnewargs(func) state = _function_getstate(func) - return (_make_function, newargs, state, None, None, - _function_setstate) + return (_make_function, newargs, state, None, None, _function_setstate) def _function_reduce(self, obj): """Reducer for function objects. @@ -1162,8 +1236,7 @@ def _function_getnewargs(self, func): if func.__closure__ is None: closure = None else: - closure = tuple( - _make_empty_cell() for _ in range(len(code.co_freevars))) + closure = tuple(_make_empty_cell() for _ in range(len(code.co_freevars))) return code, base_globals, None, None, closure @@ -1172,10 +1245,7 @@ def dump(self, obj): return super().dump(obj) except RuntimeError as e: if len(e.args) > 0 and "recursion" in e.args[0]: - msg = ( - "Could not pickle object as excessively deep recursion " - "required." - ) + msg = "Could not pickle object as excessively deep recursion required." raise pickle.PicklingError(msg) from e else: raise @@ -1183,9 +1253,7 @@ def dump(self, obj): def __init__(self, file, protocol=None, buffer_callback=None): if protocol is None: protocol = DEFAULT_PROTOCOL - super().__init__( - file, protocol=protocol, buffer_callback=buffer_callback - ) + super().__init__(file, protocol=protocol, buffer_callback=buffer_callback) # map functions __globals__ attribute ids, to ensure that functions # sharing the same global namespace at pickling time also share # their global namespace at unpickling time. @@ -1272,13 +1340,25 @@ def reducer_override(self, obj): # hard-coded call to save_global when pickling meta-classes. dispatch = pickle.Pickler.dispatch.copy() - def _save_reduce_pickle5(self, func, args, state=None, listitems=None, - dictitems=None, state_setter=None, obj=None): + def _save_reduce_pickle5( + self, + func, + args, + state=None, + listitems=None, + dictitems=None, + state_setter=None, + obj=None, + ): save = self.save write = self.write self.save_reduce( - func, args, state=None, listitems=listitems, - dictitems=dictitems, obj=obj + func, + args, + state=None, + listitems=listitems, + dictitems=dictitems, + obj=obj, ) # backport of the Python 3.8 state_setter pickle operations save(state_setter) @@ -1307,7 +1387,8 @@ def save_global(self, obj, name=None, pack=struct.pack): return self.save_reduce(type, (NotImplemented,), obj=obj) elif obj in _BUILTIN_TYPE_NAMES: return self.save_reduce( - _builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj) + _builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj + ) if name is not None: super().save_global(obj, name=name) @@ -1315,10 +1396,11 @@ def save_global(self, obj, name=None, pack=struct.pack): self._save_reduce_pickle5(*_dynamic_class_reduce(obj), obj=obj) else: super().save_global(obj, name=name) + dispatch[type] = save_global def save_function(self, obj, name=None): - """ Registered with the dispatch to handle all function types. + """Registered with the dispatch to handle all function types. Determines what kind of function obj is (e.g. lambda, defined at interactive prompt, etc) and handles the pickling appropriately. @@ -1350,9 +1432,11 @@ def save_pypy_builtin_func(self, obj): this routing should be removed when cloudpickle supports only PyPy 3.6 and later. """ - rv = (types.FunctionType, (obj.__code__, {}, obj.__name__, - obj.__defaults__, obj.__closure__), - obj.__dict__) + rv = ( + types.FunctionType, + (obj.__code__, {}, obj.__name__, obj.__defaults__, obj.__closure__), + obj.__dict__, + ) self.save_reduce(*rv, obj=obj) dispatch[types.FunctionType] = save_function @@ -1360,6 +1444,7 @@ def save_pypy_builtin_func(self, obj): # Shorthands similar to pickle.dump/pickle.dumps + def dump(obj, file, protocol=None, buffer_callback=None): """Serialize obj as bytes streamed into file @@ -1373,9 +1458,7 @@ def dump(obj, file, protocol=None, buffer_callback=None): implementation details that can change from one Python version to the next). """ - Pickler( - file, protocol=protocol, buffer_callback=buffer_callback - ).dump(obj) + Pickler(file, protocol=protocol, buffer_callback=buffer_callback).dump(obj) def dumps(obj, protocol=None, buffer_callback=None): @@ -1392,9 +1475,7 @@ def dumps(obj, protocol=None, buffer_callback=None): next). """ with io.BytesIO() as file: - cp = Pickler( - file, protocol=protocol, buffer_callback=buffer_callback - ) + cp = Pickler(file, protocol=protocol, buffer_callback=buffer_callback) cp.dump(obj) return file.getvalue() diff --git a/cloudpickle/cloudpickle_fast.py b/cloudpickle/cloudpickle_fast.py index 561adb96..52d6732e 100644 --- a/cloudpickle/cloudpickle_fast.py +++ b/cloudpickle/cloudpickle_fast.py @@ -11,4 +11,3 @@ def __getattr__(name): return getattr(cloudpickle, name) - diff --git a/dev-requirements.txt b/dev-requirements.txt index aa5db950..933e6909 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,5 +1,7 @@ -# Dependencies for running the tests with pytest +# Linting tools ruff +pre-commit +# Dependencies for running the tests with pytest pytest pytest-cov psutil diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..8bf56a9f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,39 @@ +[build-system] +requires = ["flit_core"] +build-backend = "flit_core.buildapi" + +[tool.flit.metadata] +module = "cloudpickle" +author = "The cloudpickle developer team" +author-email='cloudpipe@googlegroups.com' +home-page = "https://github.com/cloudpipe/cloudpickle" +description-file = "README.md" +requires-python = ">=3.8" +license = "BSD-3-Clause" +classifiers = [ + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: BSD License', + 'Operating System :: POSIX', + 'Operating System :: Microsoft :: Windows', + 'Operating System :: MacOS :: MacOS X', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', + 'Programming Language :: Python :: Implementation :: CPython', + 'Programming Language :: Python :: Implementation :: PyPy', + 'Topic :: Software Development :: Libraries :: Python Modules', + 'Topic :: Scientific/Engineering', + 'Topic :: System :: Distributed Computing', +] + +[tool.black] +line-length = 88 +target_version = ['py38', 'py39', 'py310', 'py311', 'py312'] +preview = true + +[tool.ruff] +line-length = 88 +target-version = "py38" diff --git a/setup.py b/setup.py deleted file mode 100644 index 8b1021ae..00000000 --- a/setup.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python -import os -import re - -try: - from setuptools import setup -except ImportError: - from distutils.core import setup - - -# Function to parse __version__ in `cloudpickle/__init__.py` -def find_version(): - here = os.path.abspath(os.path.dirname(__file__)) - with open(os.path.join(here, 'cloudpickle', '__init__.py')) as fp: - version_file = fp.read() - version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", - version_file, re.M) - if version_match: - return version_match.group(1) - raise RuntimeError("Unable to find version string.") - - -setup( - name='cloudpickle', - version=find_version(), - description='Extended pickling support for Python objects', - author='Cloudpipe', - author_email='cloudpipe@googlegroups.com', - url='https://github.com/cloudpipe/cloudpickle', - license='BSD-3-Clause', - packages=['cloudpickle'], - long_description=open('README.md').read(), - long_description_content_type="text/markdown", - classifiers=[ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: BSD License', - 'Operating System :: POSIX', - 'Operating System :: Microsoft :: Windows', - 'Operating System :: MacOS :: MacOS X', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.12', - 'Programming Language :: Python :: Implementation :: CPython', - 'Programming Language :: Python :: Implementation :: PyPy', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Topic :: Scientific/Engineering', - 'Topic :: System :: Distributed Computing', - ], - test_suite='tests', - python_requires='>=3.8', -) diff --git a/tests/cloudpickle_file_test.py b/tests/cloudpickle_file_test.py index b742d174..df46ab6a 100644 --- a/tests/cloudpickle_file_test.py +++ b/tests/cloudpickle_file_test.py @@ -16,22 +16,22 @@ class CloudPickleFileTests(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp() - self.tmpfilepath = os.path.join(self.tmpdir, 'testfile') - self.teststring = 'Hello world!' + self.tmpfilepath = os.path.join(self.tmpdir, "testfile") + self.teststring = "Hello world!" def tearDown(self): shutil.rmtree(self.tmpdir) def test_empty_file(self): # Empty file - open(self.tmpfilepath, 'w').close() + open(self.tmpfilepath, "w").close() with open(self.tmpfilepath) as f: - self.assertEqual('', pickle.loads(cloudpickle.dumps(f)).read()) + self.assertEqual("", pickle.loads(cloudpickle.dumps(f)).read()) os.remove(self.tmpfilepath) def test_closed_file(self): # Write & close - with open(self.tmpfilepath, 'w') as f: + with open(self.tmpfilepath, "w") as f: f.write(self.teststring) with pytest.raises(pickle.PicklingError) as excinfo: cloudpickle.dumps(f) @@ -40,7 +40,7 @@ def test_closed_file(self): def test_r_mode(self): # Write & close - with open(self.tmpfilepath, 'w') as f: + with open(self.tmpfilepath, "w") as f: f.write(self.teststring) # Open for reading with open(self.tmpfilepath) as f: @@ -49,16 +49,15 @@ def test_r_mode(self): os.remove(self.tmpfilepath) def test_w_mode(self): - with open(self.tmpfilepath, 'w') as f: + with open(self.tmpfilepath, "w") as f: f.write(self.teststring) f.seek(0) - self.assertRaises(pickle.PicklingError, - lambda: cloudpickle.dumps(f)) + self.assertRaises(pickle.PicklingError, lambda: cloudpickle.dumps(f)) os.remove(self.tmpfilepath) def test_plus_mode(self): # Write, then seek to 0 - with open(self.tmpfilepath, 'w+') as f: + with open(self.tmpfilepath, "w+") as f: f.write(self.teststring) f.seek(0) new_f = pickle.loads(cloudpickle.dumps(f)) @@ -67,7 +66,7 @@ def test_plus_mode(self): def test_seek(self): # Write, then seek to arbitrary position - with open(self.tmpfilepath, 'w+') as f: + with open(self.tmpfilepath, "w+") as f: f.write(self.teststring) f.seek(4) unpickled = pickle.loads(cloudpickle.dumps(f)) @@ -83,9 +82,8 @@ def test_pickling_special_file_handles(self): # Warning: if you want to run your tests with nose, add -s option for out in sys.stdout, sys.stderr: # Regression test for SPARK-3415 self.assertEqual(out, pickle.loads(cloudpickle.dumps(out))) - self.assertRaises(pickle.PicklingError, - lambda: cloudpickle.dumps(sys.stdin)) + self.assertRaises(pickle.PicklingError, lambda: cloudpickle.dumps(sys.stdin)) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/cloudpickle_test.py b/tests/cloudpickle_test.py index 0d1d3240..4041bf72 100644 --- a/tests/cloudpickle_test.py +++ b/tests/cloudpickle_test.py @@ -59,7 +59,6 @@ class RaiserOnPickle: - def __init__(self, exc): self.exc = exc @@ -90,11 +89,13 @@ def _maybe_remove(list_, item): def test_extract_class_dict(): class A(int): """A docstring""" + def method(self): return "a" class B: """B docstring""" + B_CONSTANT = 42 def method(self): @@ -114,7 +115,6 @@ def method_c(self): class CloudPickleTest(unittest.TestCase): - protocol = cloudpickle.DEFAULT_PROTOCOL def setUp(self): @@ -124,9 +124,9 @@ def tearDown(self): shutil.rmtree(self.tmpdir) @pytest.mark.skipif( - platform.python_implementation() != "CPython" or - sys.version_info < (3, 8, 2), - reason="Underlying bug fixed upstream starting Python 3.8.2") + platform.python_implementation() != "CPython" or sys.version_info < (3, 8, 2), + reason="Underlying bug fixed upstream starting Python 3.8.2", + ) def test_reducer_override_reference_cycle(self): # Early versions of Python 3.8 introduced a reference cycle between a # Pickler and it's reducer_override method. Because a Pickler @@ -160,6 +160,7 @@ def test_attrgetter(self): class C: def __getattr__(self, item): return item + d = C() getter = attrgetter("a") getter2 = pickle_depickle(getter, protocol=self.protocol) @@ -190,8 +191,9 @@ def __reduce__(self): global exit exit = Unpicklable() - self.assertRaises(Exception, lambda: cloudpickle.dumps( - exit, protocol=self.protocol)) + self.assertRaises( + Exception, lambda: cloudpickle.dumps(exit, protocol=self.protocol) + ) def foo(): sys.exit(0) @@ -201,8 +203,9 @@ def foo(): def test_memoryview(self): buffer_obj = memoryview(b"Hello") - self.assertEqual(pickle_depickle(buffer_obj, protocol=self.protocol), - buffer_obj.tobytes()) + self.assertEqual( + pickle_depickle(buffer_obj, protocol=self.protocol), buffer_obj.tobytes() + ) def test_dict_keys(self): keys = {"a": 1, "b": 2}.keys() @@ -242,17 +245,18 @@ def test_odict_items(self): def test_sliced_and_non_contiguous_memoryview(self): buffer_obj = memoryview(b"Hello!" * 3)[2:15:2] - self.assertEqual(pickle_depickle(buffer_obj, protocol=self.protocol), - buffer_obj.tobytes()) + self.assertEqual( + pickle_depickle(buffer_obj, protocol=self.protocol), buffer_obj.tobytes() + ) def test_large_memoryview(self): buffer_obj = memoryview(b"Hello!" * int(1e7)) - self.assertEqual(pickle_depickle(buffer_obj, protocol=self.protocol), - buffer_obj.tobytes()) + self.assertEqual( + pickle_depickle(buffer_obj, protocol=self.protocol), buffer_obj.tobytes() + ) def test_lambda(self): - self.assertEqual( - pickle_depickle(lambda: 1, protocol=self.protocol)(), 1) + self.assertEqual(pickle_depickle(lambda: 1, protocol=self.protocol)(), 1) def test_nested_lambdas(self): a, b = 1, 2 @@ -264,11 +268,13 @@ def test_recursive_closure(self): def f1(): def g(): return g + return g def f2(base): def g(n): return base if n <= 1 else n * g(n - 1) + return g g1 = pickle_depickle(f1(), protocol=self.protocol) @@ -283,14 +289,14 @@ def f(): self.assertTrue( f.__closure__ is None, - msg='f actually has closure cells!', + msg="f actually has closure cells!", ) g = pickle_depickle(f, protocol=self.protocol) self.assertTrue( g.__closure__ is None, - msg='g now has closure cells even though f does not', + msg="g now has closure cells even though f does not", ) def test_empty_cell_preserved(self): @@ -324,13 +330,13 @@ def g(): self.assertEqual(g(), 2) def test_dynamically_generated_class_that_uses_super(self): - class Base: def method(self): return 1 class Derived(Base): "Derived Docstring" + def method(self): return super().method() + 1 @@ -350,9 +356,7 @@ def method(self): self.assertEqual(d.method(), 2) def test_cycle_in_classdict_globals(self): - class C: - def it_works(self): return "woohoo!" @@ -386,6 +390,7 @@ def some_function(x, y): class SomeClass: """Overly complicated class with nested references to symbols""" + def __init__(self, value): self.value = value @@ -405,15 +410,15 @@ def some_method(self, x): # pickle the class instances self.assertEqual(pickle_depickle(SomeClass(1)).one(), 1) self.assertEqual(pickle_depickle(SomeClass(5)).some_method(41), 7) - new_instance = subprocess_pickle_echo(SomeClass(5), - protocol=self.protocol) + new_instance = subprocess_pickle_echo(SomeClass(5), protocol=self.protocol) self.assertEqual(new_instance.some_method(41), 7) # pickle the method instances self.assertEqual(pickle_depickle(SomeClass(1).one)(), 1) self.assertEqual(pickle_depickle(SomeClass(5).some_method)(41), 7) - new_method = subprocess_pickle_echo(SomeClass(5).some_method, - protocol=self.protocol) + new_method = subprocess_pickle_echo( + SomeClass(5).some_method, protocol=self.protocol + ) self.assertEqual(new_method(41), 7) def test_partial(self): @@ -421,30 +426,29 @@ def test_partial(self): partial_clone = pickle_depickle(partial_obj, protocol=self.protocol) self.assertEqual(partial_clone(4), 1) - @pytest.mark.skipif(platform.python_implementation() == 'PyPy', - reason="Skip numpy and scipy tests on PyPy") + @pytest.mark.skipif( + platform.python_implementation() == "PyPy", + reason="Skip numpy and scipy tests on PyPy", + ) def test_ufunc(self): # test a numpy ufunc (universal function), which is a C-based function # that is applied on a numpy array if np: # simple ufunc: np.add - self.assertEqual(pickle_depickle(np.add, protocol=self.protocol), - np.add) + self.assertEqual(pickle_depickle(np.add, protocol=self.protocol), np.add) else: # skip if numpy is not available pass if spp: # custom ufunc: scipy.special.iv - self.assertEqual(pickle_depickle(spp.iv, protocol=self.protocol), - spp.iv) + self.assertEqual(pickle_depickle(spp.iv, protocol=self.protocol), spp.iv) else: # skip if scipy is not available pass def test_loads_namespace(self): obj = 1, 2, 3, 4 - returned_obj = cloudpickle.loads(cloudpickle.dumps( - obj, protocol=self.protocol)) + returned_obj = cloudpickle.loads(cloudpickle.dumps(obj, protocol=self.protocol)) self.assertEqual(obj, returned_obj) def test_load_namespace(self): @@ -456,7 +460,6 @@ def test_load_namespace(self): self.assertEqual(obj, returned_obj) def test_generator(self): - def some_generator(cnt): yield from range(cnt) @@ -495,7 +498,7 @@ def test_cm(cls): def test_method_descriptors(self): f = pickle_depickle(str.upper) - self.assertEqual(f('abc'), 'ABC') + self.assertEqual(f("abc"), "ABC") def test_instancemethods_without_self(self): class F: @@ -511,8 +514,8 @@ def test_module(self): self.assertEqual(pickle, pickle_clone) def _check_dynamic_module(self, mod): - mod = types.ModuleType('mod') - code = ''' + mod = types.ModuleType("mod") + code = """ x = 1 def f(y): return x + y @@ -520,14 +523,14 @@ def f(y): class Foo: def method(self, x): return f(x) - ''' + """ exec(textwrap.dedent(code), mod.__dict__) mod2 = pickle_depickle(mod, protocol=self.protocol) self.assertEqual(mod.x, mod2.x) self.assertEqual(mod.f(5), mod2.f(5)) self.assertEqual(mod.Foo().method(5), mod2.Foo().method(5)) - if platform.python_implementation() != 'PyPy': + if platform.python_implementation() != "PyPy": # XXX: this fails with excessive recursion on PyPy. mod3 = subprocess_pickle_echo(mod, protocol=self.protocol) self.assertEqual(mod.x, mod3.x) @@ -541,22 +544,22 @@ def method(self, x): # Ensure proper pickling of mod's functions when module "looks" like a # file-backed module even though it is not: try: - sys.modules['mod'] = mod + sys.modules["mod"] = mod depickled_f = pickle_depickle(mod.f, protocol=self.protocol) self.assertEqual(mod.f(5), depickled_f(5)) finally: - sys.modules.pop('mod', None) + sys.modules.pop("mod", None) def test_dynamic_module(self): - mod = types.ModuleType('mod') + mod = types.ModuleType("mod") assert mod.__package__ is None self._check_dynamic_module(mod) def test_dynamic_module_no_package(self): # non-regression test for #116 - mod = types.ModuleType('mod') + mod = types.ModuleType("mod") del mod.__package__ - assert not hasattr(mod, '__package__') + assert not hasattr(mod, "__package__") self._check_dynamic_module(mod) def test_module_locals_behavior(self): @@ -564,26 +567,26 @@ def test_module_locals_behavior(self): # correctly serialized. This notably checks that the globals are # accessible and that there is no issue with the builtins (see #211) - pickled_func_path = os.path.join(self.tmpdir, 'local_func_g.pkl') + pickled_func_path = os.path.join(self.tmpdir, "local_func_g.pkl") - child_process_script = ''' + child_process_script = """ import pickle import gc with open("{pickled_func_path}", 'rb') as f: func = pickle.load(f) assert func(range(10)) == 45 - ''' + """ child_process_script = child_process_script.format( - pickled_func_path=_escape(pickled_func_path)) + pickled_func_path=_escape(pickled_func_path) + ) try: - from .testutils import make_local_function g = make_local_function() - with open(pickled_func_path, 'wb') as f: + with open(pickled_func_path, "wb") as f: cloudpickle.dump(g, f, protocol=self.protocol) assert_run_python_script(textwrap.dedent(child_process_script)) @@ -599,13 +602,13 @@ def test_dynamic_module_with_unpicklable_builtin(self): # when pickling dynamic modules. class UnpickleableObject: def __reduce__(self): - raise ValueError('Unpicklable object') + raise ValueError("Unpicklable object") mod = types.ModuleType("mod") - exec('f = lambda x: abs(x)', mod.__dict__) + exec("f = lambda x: abs(x)", mod.__dict__) assert mod.f(-1) == 1 - assert '__builtins__' in mod.__dict__ + assert "__builtins__" in mod.__dict__ unpicklable_obj = UnpickleableObject() with pytest.raises(ValueError): @@ -616,18 +619,17 @@ def __reduce__(self): # The __builtins__ entry of mod's __dict__ can either be the # __builtins__ module, or the __builtins__ module's __dict__. #316 # happens only in the latter case. - if isinstance(mod.__dict__['__builtins__'], dict): - mod.__dict__['__builtins__']['unpickleable_obj'] = unpicklable_obj - elif isinstance(mod.__dict__['__builtins__'], types.ModuleType): - mod.__dict__['__builtins__'].unpickleable_obj = unpicklable_obj + if isinstance(mod.__dict__["__builtins__"], dict): + mod.__dict__["__builtins__"]["unpickleable_obj"] = unpicklable_obj + elif isinstance(mod.__dict__["__builtins__"], types.ModuleType): + mod.__dict__["__builtins__"].unpickleable_obj = unpicklable_obj depickled_mod = pickle_depickle(mod, protocol=self.protocol) - assert '__builtins__' in depickled_mod.__dict__ + assert "__builtins__" in depickled_mod.__dict__ - if isinstance(depickled_mod.__dict__['__builtins__'], dict): + if isinstance(depickled_mod.__dict__["__builtins__"], dict): assert "abs" in depickled_mod.__builtins__ - elif isinstance( - depickled_mod.__dict__['__builtins__'], types.ModuleType): + elif isinstance(depickled_mod.__dict__["__builtins__"], types.ModuleType): assert hasattr(depickled_mod.__builtins__, "abs") assert depickled_mod.f(-1) == 1 @@ -643,10 +645,10 @@ def test_load_dynamic_module_in_grandchild_process(self): # the child process and reloaded in another one. # We create a new dynamic module - mod = types.ModuleType('mod') - code = ''' + mod = types.ModuleType("mod") + code = """ x = 1 - ''' + """ exec(textwrap.dedent(code), mod.__dict__) # This script will be ran in a separate child process. It will import @@ -654,10 +656,12 @@ def test_load_dynamic_module_in_grandchild_process(self): # Finally, it will create a child process that will load the re-pickled # dynamic module. parent_process_module_file = os.path.join( - self.tmpdir, 'dynamic_module_from_parent_process.pkl') + self.tmpdir, "dynamic_module_from_parent_process.pkl" + ) child_process_module_file = os.path.join( - self.tmpdir, 'dynamic_module_from_child_process.pkl') - child_process_script = ''' + self.tmpdir, "dynamic_module_from_child_process.pkl" + ) + child_process_script = """ import pickle import textwrap @@ -674,7 +678,7 @@ def test_load_dynamic_module_in_grandchild_process(self): cloudpickle.dump(mod, f, protocol={protocol}) assert_run_python_script(textwrap.dedent(child_of_child_process_script)) - ''' + """ # The script ran by the process created by the child process child_of_child_process_script = """ ''' @@ -687,16 +691,18 @@ def test_load_dynamic_module_in_grandchild_process(self): # for the first child process, the script to be executed by its # own child process. child_of_child_process_script = child_of_child_process_script.format( - child_process_module_file=child_process_module_file) + child_process_module_file=child_process_module_file + ) child_process_script = child_process_script.format( parent_process_module_file=_escape(parent_process_module_file), child_process_module_file=_escape(child_process_module_file), child_of_child_process_script=_escape(child_of_child_process_script), - protocol=self.protocol) + protocol=self.protocol, + ) try: - with open(parent_process_module_file, 'wb') as fid: + with open(parent_process_module_file, "wb") as fid: cloudpickle.dump(mod, fid, protocol=self.protocol) assert_run_python_script(textwrap.dedent(child_process_script)) @@ -726,11 +732,11 @@ def my_small_function(x, y): # module are not included so as to keep the pickle payload as # lightweight as possible. - assert b'my_small_function' in b - assert b'nested_function' in b + assert b"my_small_function" in b + assert b"nested_function" in b - assert b'unwanted_function' not in b - assert b'math' not in b + assert b"unwanted_function" not in b + assert b"math" not in b def test_module_importability(self): import pickle @@ -743,24 +749,28 @@ def test_module_importability(self): assert _should_pickle_by_reference(collections) # package assert _should_pickle_by_reference(collections.abc) # module in package - dynamic_module = types.ModuleType('dynamic_module') + dynamic_module = types.ModuleType("dynamic_module") assert not _should_pickle_by_reference(dynamic_module) - if platform.python_implementation() == 'PyPy': + if platform.python_implementation() == "PyPy": import _codecs + assert _should_pickle_by_reference(_codecs) # #354: Check that modules created dynamically during the import of # their parent modules are considered importable by cloudpickle. # See the mod_with_dynamic_submodule documentation for more # details of this use case. - m = pytest.importorskip("_cloudpickle_testpkg.mod.dynamic_submodule") # noqa F841 + m = pytest.importorskip( + "_cloudpickle_testpkg.mod.dynamic_submodule" + ) # noqa F841 assert _should_pickle_by_reference(m) assert pickle_depickle(m, protocol=self.protocol) is m # Check for similar behavior for a module that cannot be imported by # attribute lookup. from _cloudpickle_testpkg.mod import dynamic_submodule_two as m2 + assert _should_pickle_by_reference(m2) assert pickle_depickle(m2, protocol=self.protocol) is m2 @@ -768,11 +778,12 @@ def test_module_importability(self): with pytest.raises(ImportError): import _cloudpickle_testpkg.mod.submodule_three # noqa from _cloudpickle_testpkg.mod import submodule_three as m3 + assert not _should_pickle_by_reference(m3) # This module cannot be pickled using attribute lookup (as it does not # have a `__module__` attribute like classes and functions. - assert not hasattr(m3, '__module__') + assert not hasattr(m3, "__module__") depickled_m3 = pickle_depickle(m3, protocol=self.protocol) assert depickled_m3 is not m3 assert m3.f(1) == depickled_m3.f(1) @@ -780,6 +791,7 @@ def test_module_importability(self): # Do the same for an importable dynamic submodule inside a dynamic # module inside a file-backed module. import _cloudpickle_testpkg.mod.dynamic_submodule.dynamic_subsubmodule as sm # noqa + assert _should_pickle_by_reference(sm) assert pickle_depickle(sm, protocol=self.protocol) is sm @@ -788,8 +800,7 @@ def test_module_importability(self): _should_pickle_by_reference(object()) def test_Ellipsis(self): - self.assertEqual(Ellipsis, - pickle_depickle(Ellipsis, protocol=self.protocol)) + self.assertEqual(Ellipsis, pickle_depickle(Ellipsis, protocol=self.protocol)) def test_NotImplemented(self): ExcClone = pickle_depickle(NotImplemented, protocol=self.protocol) @@ -815,6 +826,7 @@ def test_builtin_function(self): assert pickle_depickle(zip, protocol=self.protocol) is zip from os import mkdir + # builtin function from a "regular" module assert pickle_depickle(mkdir, protocol=self.protocol) is mkdir @@ -844,16 +856,19 @@ def test_builtin_classicmethod(self): bound_classicmethod = obj.hex # builtin_function_or_method unbound_classicmethod = type(obj).hex # method_descriptor - clsdict_classicmethod = type(obj).__dict__['hex'] # method_descriptor + clsdict_classicmethod = type(obj).__dict__["hex"] # method_descriptor assert unbound_classicmethod is clsdict_classicmethod depickled_bound_meth = pickle_depickle( - bound_classicmethod, protocol=self.protocol) + bound_classicmethod, protocol=self.protocol + ) depickled_unbound_meth = pickle_depickle( - unbound_classicmethod, protocol=self.protocol) + unbound_classicmethod, protocol=self.protocol + ) depickled_clsdict_meth = pickle_depickle( - clsdict_classicmethod, protocol=self.protocol) + clsdict_classicmethod, protocol=self.protocol + ) # No identity on the bound methods they are bound to different float # instances @@ -867,10 +882,10 @@ def test_builtin_classmethod(self): bound_clsmethod = obj.fromhex # builtin_function_or_method unbound_clsmethod = type(obj).fromhex # builtin_function_or_method - depickled_bound_meth = pickle_depickle( - bound_clsmethod, protocol=self.protocol) + depickled_bound_meth = pickle_depickle(bound_clsmethod, protocol=self.protocol) depickled_unbound_meth = pickle_depickle( - unbound_clsmethod, protocol=self.protocol) + unbound_clsmethod, protocol=self.protocol + ) # float.fromhex takes a string as input. arg = "0x1" @@ -883,15 +898,13 @@ def test_builtin_classmethod(self): @pytest.mark.skipif( ( - sys.version_info >= (3, 10, 8) and - platform.python_implementation() == 'CPython' + sys.version_info >= (3, 10, 8) + and platform.python_implementation() == "CPython" ), - reason=( "CPython dropped support for pickling classmethod_descriptor," "https://github.com/python/cpython/issues/95196" - ) - + ), ) def test_builtin_classmethod_descriptor(self): # `classmethod_descriptor` is the analogue `classmethod` (used for @@ -907,19 +920,19 @@ def test_builtin_classmethod_descriptor(self): # has always been broken. obj = 1.5 # float object - clsdict_clsmethod = type( - obj).__dict__['fromhex'] # classmethod_descriptor + clsdict_clsmethod = type(obj).__dict__["fromhex"] # classmethod_descriptor depickled_clsdict_meth = pickle_depickle( - clsdict_clsmethod, protocol=self.protocol) + clsdict_clsmethod, protocol=self.protocol + ) # float.fromhex takes a string as input. arg = "0x1" - if platform.python_implementation() == 'CPython': + if platform.python_implementation() == "CPython": # Roundtripping a classmethod_descriptor results in a # builtin_function_or_method (CPython upstream issue). assert depickled_clsdict_meth(arg) == clsdict_clsmethod(float, arg) - if platform.python_implementation() == 'PyPy': + if platform.python_implementation() == "PyPy": # builtin-classmethods are simple classmethod in PyPy (not # callable). We test equality of types and the functionality of the # __func__ attribute instead. We do not test the the identity of @@ -927,21 +940,23 @@ def test_builtin_classmethod_descriptor(self): # pickleable and must be reconstructed at depickling time. assert type(depickled_clsdict_meth) is type(clsdict_clsmethod) assert depickled_clsdict_meth.__func__( - float, arg) == clsdict_clsmethod.__func__(float, arg) + float, arg + ) == clsdict_clsmethod.__func__(float, arg) def test_builtin_slotmethod(self): obj = 1.5 # float object bound_slotmethod = obj.__repr__ # method-wrapper unbound_slotmethod = type(obj).__repr__ # wrapper_descriptor - clsdict_slotmethod = type(obj).__dict__['__repr__'] # ditto + clsdict_slotmethod = type(obj).__dict__["__repr__"] # ditto - depickled_bound_meth = pickle_depickle( - bound_slotmethod, protocol=self.protocol) + depickled_bound_meth = pickle_depickle(bound_slotmethod, protocol=self.protocol) depickled_unbound_meth = pickle_depickle( - unbound_slotmethod, protocol=self.protocol) + unbound_slotmethod, protocol=self.protocol + ) depickled_clsdict_meth = pickle_depickle( - clsdict_slotmethod, protocol=self.protocol) + clsdict_slotmethod, protocol=self.protocol + ) # No identity tests on the bound slotmethod are they are bound to # different float instances @@ -951,22 +966,26 @@ def test_builtin_slotmethod(self): @pytest.mark.skipif( platform.python_implementation() == "PyPy", - reason="No known staticmethod example in the pypy stdlib") + reason="No known staticmethod example in the pypy stdlib", + ) def test_builtin_staticmethod(self): obj = "foo" # str object bound_staticmethod = obj.maketrans # builtin_function_or_method unbound_staticmethod = type(obj).maketrans # ditto - clsdict_staticmethod = type(obj).__dict__['maketrans'] # staticmethod + clsdict_staticmethod = type(obj).__dict__["maketrans"] # staticmethod assert bound_staticmethod is unbound_staticmethod depickled_bound_meth = pickle_depickle( - bound_staticmethod, protocol=self.protocol) + bound_staticmethod, protocol=self.protocol + ) depickled_unbound_meth = pickle_depickle( - unbound_staticmethod, protocol=self.protocol) + unbound_staticmethod, protocol=self.protocol + ) depickled_clsdict_meth = pickle_depickle( - clsdict_staticmethod, protocol=self.protocol) + clsdict_staticmethod, protocol=self.protocol + ) assert depickled_bound_meth is bound_staticmethod assert depickled_unbound_meth is unbound_staticmethod @@ -978,8 +997,8 @@ def test_builtin_staticmethod(self): def test_tornado_coroutine(self): # Pickling a locally defined coroutine function - gen = pytest.importorskip('tornado.gen') - ioloop = pytest.importorskip('tornado.ioloop') + gen = pytest.importorskip("tornado.gen") + ioloop = pytest.importorskip("tornado.ioloop") @gen.coroutine def f(x, y): @@ -1003,14 +1022,14 @@ def g(y): assert res == 7 @pytest.mark.skipif( - (3, 11, 0, 'beta') <= sys.version_info < (3, 11, 0, 'beta', 4), - reason="https://github.com/python/cpython/issues/92932" + (3, 11, 0, "beta") <= sys.version_info < (3, 11, 0, "beta", 4), + reason="https://github.com/python/cpython/issues/92932", ) def test_extended_arg(self): # Functions with more than 65535 global vars prefix some global # variable references with the EXTENDED_ARG opcode. nvars = 65537 + 258 - names = ['g%d' % i for i in range(1, nvars)] + names = ["g%d" % i for i in range(1, nvars)] r = random.Random(42) d = {name: r.randrange(100) for name in names} # def f(x): @@ -1022,9 +1041,9 @@ def test_extended_arg(self): def f(): x = {tup} return zlib.crc32(bytes(bytearray(x))) - """.format(tup=', '.join(names)) + """.format(tup=", ".join(names)) exec(textwrap.dedent(code), d, d) - f = d['f'] + f = d["f"] res = f() data = cloudpickle.dumps([f, f], protocol=self.protocol) d = f = None @@ -1051,7 +1070,7 @@ def example(): # refresh the environment, i.e., unimport the dependency del xml for item in list(sys.modules): - if item.split('.')[0] == 'xml': + if item.split(".")[0] == "xml": del sys.modules[item] # deserialise @@ -1065,6 +1084,7 @@ def scope(): def example(): _ = xml.etree.ElementTree.Comment # potential AttributeError + return example example = scope() @@ -1074,7 +1094,7 @@ def example(): # refresh the environment (unimport dependency) for item in list(sys.modules): - if item.split('.')[0] == 'xml': + if item.split(".")[0] == "xml": del sys.modules[item] f = cloudpickle.loads(s) @@ -1085,21 +1105,24 @@ def test_multiprocess(self): def scope(): def example(): _ = xml.etree.ElementTree.Comment + return example + global xml import xml.etree.ElementTree + example = scope() s = cloudpickle.dumps(example, protocol=self.protocol) # choose "subprocess" rather than "multiprocessing" because the latter # library uses fork to preserve the parent environment. - command = ("import base64; " - "import pickle; " - "pickle.loads(base64.b32decode('" + - base64.b32encode(s).decode('ascii') + - "'))()") - assert not subprocess.call([sys.executable, '-c', command]) + command = ( + "import base64; import pickle; pickle.loads(base64.b32decode('" + + base64.b32encode(s).decode("ascii") + + "'))()" + ) + assert not subprocess.call([sys.executable, "-c", command]) def test_import(self): # like test_multiprocess except subpackage modules referenced directly @@ -1112,24 +1135,25 @@ def scope(): def example(): _ = etree.Comment _ = foobar.ElementTree + return example + example = scope() import xml.etree.ElementTree as etree s = cloudpickle.dumps(example, protocol=self.protocol) - command = ("import base64; " - "from pickle import loads; " - "loads(base64.b32decode('" + - base64.b32encode(s).decode('ascii') + - "'))()") - assert not subprocess.call([sys.executable, '-c', command]) + command = ( + "import base64; from pickle import loads; loads(base64.b32decode('" + + base64.b32encode(s).decode("ascii") + + "'))()" + ) + assert not subprocess.call([sys.executable, "-c", command]) def test_multiprocessing_lock_raises(self): lock = multiprocessing.Lock() with pytest.raises( - RuntimeError, - match="only be shared between processes through inheritance" + RuntimeError, match="only be shared between processes through inheritance" ): cloudpickle.dumps(lock) @@ -1156,20 +1180,21 @@ def check_logger(self, name): logging.basicConfig(level=logging.INFO) logger = cloudpickle.loads(base64.b32decode(b'{}')) logger.info('hello') - """.format(base64.b32encode(dumped).decode('ascii')) - proc = subprocess.Popen([sys.executable, "-W ignore", "-c", code], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) + """.format(base64.b32encode(dumped).decode("ascii")) + proc = subprocess.Popen( + [sys.executable, "-W ignore", "-c", code], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) out, _ = proc.communicate() self.assertEqual(proc.wait(), 0) - self.assertEqual(out.strip().decode(), - f'INFO:{logger.name}:hello') + self.assertEqual(out.strip().decode(), f"INFO:{logger.name}:hello") def test_logger(self): # logging.RootLogger object self.check_logger(None) # logging.Logger object - self.check_logger('cloudpickle.dummy_test_logger') + self.check_logger("cloudpickle.dummy_test_logger") def test_getset_descriptor(self): assert isinstance(float.real, types.GetSetDescriptorType) @@ -1180,7 +1205,7 @@ def test_abc_cache_not_pickled(self): # cloudpickle issue #302: make sure that cloudpickle does not pickle # the caches populated during instance/subclass checks of abc.ABCMeta # instances. - MyClass = abc.ABCMeta('MyClass', (), {}) + MyClass = abc.ABCMeta("MyClass", (), {}) class MyUnrelatedClass: pass @@ -1203,7 +1228,6 @@ class MyRelatedClass: assert issubclass(MyRelatedClass, depickled_class) def test_abc(self): - class AbstractClass(abc.ABC): @abc.abstractmethod def some_method(self): @@ -1226,20 +1250,20 @@ def some_property(): class ConcreteClass(AbstractClass): def some_method(self): - return 'it works!' + return "it works!" @classmethod def some_classmethod(cls): assert cls == ConcreteClass - return 'it works!' + return "it works!" @staticmethod def some_staticmethod(): - return 'it works!' + return "it works!" @property def some_property(self): - return 'it works!' + return "it works!" # This abstract class is locally defined so we can safely register # tuple in it to verify the unpickled class also register tuple. @@ -1247,49 +1271,47 @@ def some_property(self): concrete_instance = ConcreteClass() depickled_base = pickle_depickle(AbstractClass, protocol=self.protocol) - depickled_class = pickle_depickle(ConcreteClass, - protocol=self.protocol) + depickled_class = pickle_depickle(ConcreteClass, protocol=self.protocol) depickled_instance = pickle_depickle(concrete_instance) assert issubclass(tuple, AbstractClass) assert issubclass(tuple, depickled_base) - self.assertEqual(depickled_class().some_method(), 'it works!') - self.assertEqual(depickled_instance.some_method(), 'it works!') + self.assertEqual(depickled_class().some_method(), "it works!") + self.assertEqual(depickled_instance.some_method(), "it works!") - self.assertEqual(depickled_class.some_classmethod(), 'it works!') - self.assertEqual(depickled_instance.some_classmethod(), 'it works!') + self.assertEqual(depickled_class.some_classmethod(), "it works!") + self.assertEqual(depickled_instance.some_classmethod(), "it works!") - self.assertEqual(depickled_class().some_staticmethod(), 'it works!') - self.assertEqual(depickled_instance.some_staticmethod(), 'it works!') + self.assertEqual(depickled_class().some_staticmethod(), "it works!") + self.assertEqual(depickled_instance.some_staticmethod(), "it works!") - self.assertEqual(depickled_class().some_property, 'it works!') - self.assertEqual(depickled_instance.some_property, 'it works!') + self.assertEqual(depickled_class().some_property, "it works!") + self.assertEqual(depickled_instance.some_property, "it works!") self.assertRaises(TypeError, depickled_base) class DepickledBaseSubclass(depickled_base): def some_method(self): - return 'it works for realz!' + return "it works for realz!" @classmethod def some_classmethod(cls): assert cls == DepickledBaseSubclass - return 'it works for realz!' + return "it works for realz!" @staticmethod def some_staticmethod(): - return 'it works for realz!' + return "it works for realz!" @property def some_property(): - return 'it works for realz!' + return "it works for realz!" - self.assertEqual(DepickledBaseSubclass().some_method(), - 'it works for realz!') + self.assertEqual(DepickledBaseSubclass().some_method(), "it works for realz!") class IncompleteBaseSubclass(depickled_base): def some_method(self): - return 'this class lacks some concrete methods' + return "this class lacks some concrete methods" self.assertRaises(TypeError, IncompleteBaseSubclass) @@ -1316,20 +1338,20 @@ def some_property(self): class ConcreteClass(AbstractClass): def some_method(self): - return 'it works!' + return "it works!" @classmethod def some_classmethod(cls): assert cls == ConcreteClass - return 'it works!' + return "it works!" @staticmethod def some_staticmethod(): - return 'it works!' + return "it works!" @property def some_property(self): - return 'it works!' + return "it works!" # This abstract class is locally defined so we can safely register # tuple in it to verify the unpickled class also register tuple. @@ -1337,49 +1359,47 @@ def some_property(self): concrete_instance = ConcreteClass() depickled_base = pickle_depickle(AbstractClass, protocol=self.protocol) - depickled_class = pickle_depickle(ConcreteClass, - protocol=self.protocol) + depickled_class = pickle_depickle(ConcreteClass, protocol=self.protocol) depickled_instance = pickle_depickle(concrete_instance) assert issubclass(tuple, AbstractClass) assert issubclass(tuple, depickled_base) - self.assertEqual(depickled_class().some_method(), 'it works!') - self.assertEqual(depickled_instance.some_method(), 'it works!') + self.assertEqual(depickled_class().some_method(), "it works!") + self.assertEqual(depickled_instance.some_method(), "it works!") - self.assertEqual(depickled_class.some_classmethod(), 'it works!') - self.assertEqual(depickled_instance.some_classmethod(), 'it works!') + self.assertEqual(depickled_class.some_classmethod(), "it works!") + self.assertEqual(depickled_instance.some_classmethod(), "it works!") - self.assertEqual(depickled_class().some_staticmethod(), 'it works!') - self.assertEqual(depickled_instance.some_staticmethod(), 'it works!') + self.assertEqual(depickled_class().some_staticmethod(), "it works!") + self.assertEqual(depickled_instance.some_staticmethod(), "it works!") - self.assertEqual(depickled_class().some_property, 'it works!') - self.assertEqual(depickled_instance.some_property, 'it works!') + self.assertEqual(depickled_class().some_property, "it works!") + self.assertEqual(depickled_instance.some_property, "it works!") self.assertRaises(TypeError, depickled_base) class DepickledBaseSubclass(depickled_base): def some_method(self): - return 'it works for realz!' + return "it works for realz!" @classmethod def some_classmethod(cls): assert cls == DepickledBaseSubclass - return 'it works for realz!' + return "it works for realz!" @staticmethod def some_staticmethod(): - return 'it works for realz!' + return "it works for realz!" @property def some_property(self): - return 'it works for realz!' + return "it works for realz!" - self.assertEqual(DepickledBaseSubclass().some_method(), - 'it works for realz!') + self.assertEqual(DepickledBaseSubclass().some_method(), "it works for realz!") class IncompleteBaseSubclass(depickled_base): def some_method(self): - return 'this class lacks some concrete methods' + return "this class lacks some concrete methods" self.assertRaises(TypeError, IncompleteBaseSubclass) @@ -1414,7 +1434,7 @@ def test_non_module_object_passing_whichmodule_test(self): # carried out on the entries of sys.modules, causing cloupdickle to # then error in unexpected ways def func(x): - return x ** 2 + return x**2 # Trigger a loop during the execution of whichmodule(func) by # explicitly setting the function's module to None @@ -1430,7 +1450,7 @@ def __getattr__(self, name): # entries of sys.modules is not carried out, but manipulating # this instance thinking it really is a module later on in the # pickling process of func errors out - if name == 'func': + if name == "func": return func else: raise AttributeError @@ -1446,17 +1466,17 @@ def __getattr__(self, name): _ = non_module_object.some_attr try: - sys.modules['NonModuleObject'] = non_module_object + sys.modules["NonModuleObject"] = non_module_object func_module_name = _whichmodule(func, None) - assert func_module_name != 'NonModuleObject' + assert func_module_name != "NonModuleObject" assert func_module_name is None depickled_func = pickle_depickle(func, protocol=self.protocol) assert depickled_func(2) == 4 finally: - sys.modules.pop('NonModuleObject') + sys.modules.pop("NonModuleObject") def test_unrelated_faulty_module(self): # Check that pickling a dynamically defined function or class does not @@ -1464,7 +1484,8 @@ def test_unrelated_faulty_module(self): # as long as those faulty modules are unrelated to the class or # function we are currently pickling. for base_class in (object, types.ModuleType): - for module_name in ['_missing_module', None]: + for module_name in ["_missing_module", None]: + class FaultyModule(base_class): def __getattr__(self, name): # This throws an exception while looking up within @@ -1483,7 +1504,7 @@ def foo(): foo.__module__ = module_name if base_class is types.ModuleType: # noqa - faulty_module = FaultyModule('_faulty_module') + faulty_module = FaultyModule("_faulty_module") else: faulty_module = FaultyModule() sys.modules["_faulty_module"] = faulty_module @@ -1509,13 +1530,14 @@ def local_func(x): def test_function_qualname(self): def func(x): return x + # Default __qualname__ attribute (Python 3 only) - if hasattr(func, '__qualname__'): + if hasattr(func, "__qualname__"): cloned = pickle_depickle(func, protocol=self.protocol) self.assertEqual(cloned.__qualname__, func.__qualname__) # Mutated __qualname__ attribute - func.__qualname__ = '' + func.__qualname__ = "" cloned = pickle_depickle(func, protocol=self.protocol) self.assertEqual(cloned.__qualname__, func.__qualname__) @@ -1563,12 +1585,13 @@ def read_write_value(self, value): type(depickled_obj).read_only_value.__doc__ == "A read-only attribute" def test_namedtuple(self): - MyTuple = collections.namedtuple('MyTuple', ['a', 'b', 'c']) + MyTuple = collections.namedtuple("MyTuple", ["a", "b", "c"]) t1 = MyTuple(1, 2, 3) t2 = MyTuple(3, 2, 1) depickled_t1, depickled_MyTuple, depickled_t2 = pickle_depickle( - [t1, MyTuple, t2], protocol=self.protocol) + [t1, MyTuple, t2], protocol=self.protocol + ) assert isinstance(depickled_t1, MyTuple) assert depickled_t1 == t1 @@ -1586,7 +1609,8 @@ class MyTuple(typing.NamedTuple): t2 = MyTuple(3, 2, 1) depickled_t1, depickled_MyTuple, depickled_t2 = pickle_depickle( - [t1, MyTuple, t2], protocol=self.protocol) + [t1, MyTuple, t2], protocol=self.protocol + ) assert isinstance(depickled_t1, MyTuple) assert depickled_t1 == t1 @@ -1715,9 +1739,8 @@ def f1(): new_global_var = new_cloned_f1() assert new_global_var == "default_value", new_global_var """ - for clone_func in ['local_clone', 'subprocess_pickle_echo']: - code = code_template.format(protocol=self.protocol, - clone_func=clone_func) + for clone_func in ["local_clone", "subprocess_pickle_echo"]: + code = code_template.format(protocol=self.protocol, clone_func=clone_func) assert_run_python_script(textwrap.dedent(code)) def test_closure_interacting_with_a_global_variable(self): @@ -1725,6 +1748,7 @@ def test_closure_interacting_with_a_global_variable(self): assert _TEST_GLOBAL_VARIABLE == "default_value" orig_value = _TEST_GLOBAL_VARIABLE try: + def f0(): global _TEST_GLOBAL_VARIABLE _TEST_GLOBAL_VARIABLE = "changed_by_f0" @@ -1733,8 +1757,7 @@ def f1(): return _TEST_GLOBAL_VARIABLE # pickle f0 and f1 inside the same pickle_string - cloned_f0, cloned_f1 = pickle_depickle([f0, f1], - protocol=self.protocol) + cloned_f0, cloned_f1 = pickle_depickle([f0, f1], protocol=self.protocol) # cloned_f0 and cloned_f1 now share a global namespace that is # isolated from any previously existing namespace @@ -1960,8 +1983,10 @@ class A: """.format(protocol=self.protocol) assert_run_python_script(code) - @pytest.mark.skipif(platform.python_implementation() == 'PyPy', - reason="Skip PyPy because memory grows too much") + @pytest.mark.skipif( + platform.python_implementation() == "PyPy", + reason="Skip PyPy because memory grows too much", + ) def test_interactive_remote_function_calls_no_memory_leak(self): code = """if __name__ == "__main__": from testutils import subprocess_worker @@ -2015,10 +2040,10 @@ def test_pickle_reraise(self): cloudpickle.dumps(obj, protocol=self.protocol) def test_unhashable_function(self): - d = {'a': 1} + d = {"a": 1} depickled_method = pickle_depickle(d.get, protocol=self.protocol) - self.assertEqual(depickled_method('a'), 1) - self.assertEqual(depickled_method('b'), None) + self.assertEqual(depickled_method("a"), 1) + self.assertEqual(depickled_method("b"), None) def test_itertools_count(self): counter = itertools.count(1, step=2) @@ -2067,7 +2092,7 @@ def test_wraps_preserves_function_annotations(self): def f(x): pass - f.__annotations__ = {'x': 1, 'return': float} + f.__annotations__ = {"x": 1, "return": float} @wraps(f) def g(x): @@ -2083,6 +2108,7 @@ def test_type_hint(self): def test_instance_with_slots(self): for slots in [["registered_attribute"], "registered_attribute"]: + class ClassWithSlots: __slots__ = slots @@ -2090,8 +2116,7 @@ def __init__(self): self.registered_attribute = 42 initial_obj = ClassWithSlots() - depickled_obj = pickle_depickle( - initial_obj, protocol=self.protocol) + depickled_obj = pickle_depickle(initial_obj, protocol=self.protocol) assert depickled_obj.__class__.__slots__ == slots @@ -2110,9 +2135,10 @@ def __init__(self): depickled_obj = cloudpickle.loads(s) assert depickled_obj.unregistered_attribute == 1 - - @unittest.skipIf(not hasattr(types, "MappingProxyType"), - "Old versions of Python do not have this type.") + @unittest.skipIf( + not hasattr(types, "MappingProxyType"), + "Old versions of Python do not have this type.", + ) def test_mappingproxy(self): mp = types.MappingProxyType({"some_key": "some value"}) assert mp == pickle_depickle(mp, protocol=self.protocol) @@ -2120,7 +2146,7 @@ def test_mappingproxy(self): def test_dataclass(self): dataclasses = pytest.importorskip("dataclasses") - DataClass = dataclasses.make_dataclass('DataClass', [('x', int)]) + DataClass = dataclasses.make_dataclass("DataClass", [("x", int)]) data = DataClass(x=42) pickle_depickle(DataClass, protocol=self.protocol) @@ -2132,6 +2158,7 @@ class StringEnum(str, enum.Enum): class Color(StringEnum): """3-element color space""" + RED = "1" GREEN = "2" BLUE = "3" @@ -2140,7 +2167,8 @@ def is_green(self): return self is Color.GREEN green1, green2, ClonedColor = pickle_depickle( - [Color.GREEN, Color.GREEN, Color], protocol=self.protocol) + [Color.GREEN, Color.GREEN, Color], protocol=self.protocol + ) assert green1 is green2 assert green1 is ClonedColor.GREEN assert green1 is not ClonedColor.BLUE @@ -2161,7 +2189,8 @@ def test_locally_defined_intenum(self): green1, green2, ClonedDynamicColor = pickle_depickle( [DynamicColor.GREEN, DynamicColor.GREEN, DynamicColor], - protocol=self.protocol) + protocol=self.protocol, + ) assert green1 is green2 assert green1 is ClonedDynamicColor.GREEN @@ -2228,10 +2257,12 @@ def f(a, *, b=1): for func in (f, depickled_f): assert func(2) == 3 - assert func.__kwdefaults__ == {'b': 1} + assert func.__kwdefaults__ == {"b": 1} - @pytest.mark.skipif(not hasattr(types.CodeType, "co_posonlyargcount"), - reason="Requires positional-only argument syntax") + @pytest.mark.skipif( + not hasattr(types.CodeType, "co_posonlyargcount"), + reason="Requires positional-only argument syntax", + ) def test_interactively_defined_func_with_positional_only_argument(self): # Fixes https://github.com/cloudpipe/cloudpickle/issues/266 # The source code of this test is bundled in a string and is ran from @@ -2262,8 +2293,7 @@ def test___reduce___returns_string(self): _cloudpickle_testpkg = pytest.importorskip("_cloudpickle_testpkg") some_singleton = _cloudpickle_testpkg.some_singleton assert some_singleton.__reduce__() == "some_singleton" - depickled_singleton = pickle_depickle( - some_singleton, protocol=self.protocol) + depickled_singleton = pickle_depickle(some_singleton, protocol=self.protocol) assert depickled_singleton is some_singleton def test_cloudpickle_extract_nested_globals(self): @@ -2271,14 +2301,17 @@ def function_factory(): def inner_function(): global _TEST_GLOBAL_VARIABLE return _TEST_GLOBAL_VARIABLE + return inner_function - globals_ = set(cloudpickle.cloudpickle._extract_code_globals( - function_factory.__code__).keys()) - assert globals_ == {'_TEST_GLOBAL_VARIABLE'} + globals_ = set( + cloudpickle.cloudpickle._extract_code_globals( + function_factory.__code__ + ).keys() + ) + assert globals_ == {"_TEST_GLOBAL_VARIABLE"} - depickled_factory = pickle_depickle(function_factory, - protocol=self.protocol) + depickled_factory = pickle_depickle(function_factory, protocol=self.protocol) inner_func = depickled_factory() assert inner_func() == _TEST_GLOBAL_VARIABLE @@ -2288,7 +2321,7 @@ def __getattribute__(self, name): return getattr(self, name) a = A() - with pytest.raises(pickle.PicklingError, match='recursion'): + with pytest.raises(pickle.PicklingError, match="recursion"): cloudpickle.dumps(a) def test_out_of_band_buffers(self): @@ -2301,18 +2334,22 @@ class LocallyDefinedClass: data_instance = LocallyDefinedClass() buffers = [] - pickle_bytes = cloudpickle.dumps(data_instance, protocol=self.protocol, - buffer_callback=buffers.append) + pickle_bytes = cloudpickle.dumps( + data_instance, protocol=self.protocol, buffer_callback=buffers.append + ) assert len(buffers) == 1 reconstructed = pickle.loads(pickle_bytes, buffers=buffers) np.testing.assert_allclose(reconstructed.data, data_instance.data) def test_pickle_dynamic_typevar(self): - T = typing.TypeVar('T') + T = typing.TypeVar("T") depickled_T = pickle_depickle(T, protocol=self.protocol) attr_list = [ - "__name__", "__bound__", "__constraints__", "__covariant__", - "__contravariant__" + "__name__", + "__bound__", + "__constraints__", + "__covariant__", + "__contravariant__", ] for attr in attr_list: assert getattr(T, attr) == getattr(depickled_T, attr) @@ -2323,9 +2360,8 @@ def test_pickle_dynamic_typevar_tracking(self): assert T is T2 def test_pickle_dynamic_typevar_memoization(self): - T = typing.TypeVar('T') - depickled_T1, depickled_T2 = pickle_depickle((T, T), - protocol=self.protocol) + T = typing.TypeVar("T") + depickled_T1, depickled_T2 = pickle_depickle((T, T), protocol=self.protocol) assert depickled_T1 is depickled_T2 def test_pickle_importable_typevar(self): @@ -2335,10 +2371,11 @@ def test_pickle_importable_typevar(self): # Standard Library TypeVar from typing import AnyStr + assert AnyStr is pickle_depickle(AnyStr, protocol=self.protocol) def test_generic_type(self): - T = typing.TypeVar('T') + T = typing.TypeVar("T") class C(typing.Generic[T]): pass @@ -2368,7 +2405,7 @@ def check_generic(generic, origin, type_value): assert worker.run(check_generic, C[int], C, int) == "ok" def test_generic_subclass(self): - T = typing.TypeVar('T') + T = typing.TypeVar("T") class Base(typing.Generic[T]): pass @@ -2391,9 +2428,7 @@ class DerivedT(Base[T]): class LeafT(DerivedT[T]): pass - klasses = [ - Base, DerivedAny, LeafAny, DerivedInt, LeafInt, DerivedT, LeafT - ] + klasses = [Base, DerivedAny, LeafAny, DerivedInt, LeafInt, DerivedT, LeafT] for klass in klasses: assert pickle_depickle(klass, protocol=self.protocol) is klass @@ -2411,32 +2446,28 @@ def check_mro(klass, expected_mro): def test_locally_defined_class_with_type_hints(self): with subprocess_worker(protocol=self.protocol) as worker: for type_ in _all_types_to_test(): + class MyClass: def method(self, arg: type_) -> type_: return arg - MyClass.__annotations__ = {'attribute': type_} + + MyClass.__annotations__ = {"attribute": type_} def check_annotations(obj, expected_type, expected_type_str): assert obj.__annotations__["attribute"] == expected_type - assert ( - obj.method.__annotations__["arg"] == expected_type - ) - assert ( - obj.method.__annotations__["return"] - == expected_type - ) + assert obj.method.__annotations__["arg"] == expected_type + assert obj.method.__annotations__["return"] == expected_type return "ok" obj = MyClass() assert check_annotations(obj, type_, "type_") == "ok" - assert ( - worker.run(check_annotations, obj, type_, "type_") == "ok" - ) + assert worker.run(check_annotations, obj, type_, "type_") == "ok" def test_class_annotations(self): class C: pass - C.__annotations__ = {'a': int} + + C.__annotations__ = {"a": int} C1 = pickle_depickle(C, protocol=self.protocol) assert C1.__annotations__ == C.__annotations__ @@ -2452,17 +2483,19 @@ def test_always_use_up_to_date_copyreg(self): # test that updates of copyreg.dispatch_table are taken in account by # cloudpickle import copyreg + try: + class MyClass: pass def reduce_myclass(x): - return MyClass, (), {'custom_reduce': True} + return MyClass, (), {"custom_reduce": True} copyreg.dispatch_table[MyClass] = reduce_myclass my_obj = MyClass() depickled_myobj = pickle_depickle(my_obj, protocol=self.protocol) - assert hasattr(depickled_myobj, 'custom_reduce') + assert hasattr(depickled_myobj, "custom_reduce") finally: copyreg.dispatch_table.pop(MyClass) @@ -2486,7 +2519,9 @@ def __type__(self): o = MyClass() pickle_depickle(o, protocol=self.protocol) - def test_pickle_constructs_from_module_registered_for_pickling_by_value(self): # noqa + def test_pickle_constructs_from_module_registered_for_pickling_by_value( + self, + ): # noqa _prev_sys_path = sys.path.copy() try: # We simulate an interactive session that: @@ -2501,7 +2536,7 @@ def test_pickle_constructs_from_module_registered_for_pickling_by_value(self): # First, remove sys.path entries that could point to # /path/to/cloudpickle/tests and be in inherited by the worker - _maybe_remove(sys.path, '') + _maybe_remove(sys.path, "") _maybe_remove(sys.path, _mock_interactive_session_cwd) # Add the desired session working directory @@ -2509,9 +2544,7 @@ def test_pickle_constructs_from_module_registered_for_pickling_by_value(self): with subprocess_worker(protocol=self.protocol) as w: # Make the module unavailable in the remote worker - w.run( - lambda p: sys.path.remove(p), _mock_interactive_session_cwd - ) + w.run(lambda p: sys.path.remove(p), _mock_interactive_session_cwd) # Import the actual file after starting the module since the # worker is started using fork on Linux, which will inherits # the parent sys.modules. On Python>3.6, the worker can be @@ -2519,21 +2552,18 @@ def test_pickle_constructs_from_module_registered_for_pickling_by_value(self): # TODO Once Python 3.6 reaches end of life, rely on mp_context # instead. import mock_local_folder.mod as mod + # The constructs whose pickling mechanism is changed using # register_pickle_by_value are functions, classes, TypeVar and # modules. - from mock_local_folder.mod import ( - local_function, LocalT, LocalClass - ) + from mock_local_folder.mod import local_function, LocalT, LocalClass # Make sure the module/constructs are unimportable in the # worker. with pytest.raises(ImportError): w.run(lambda: __import__("mock_local_folder.mod")) with pytest.raises(ImportError): - w.run( - lambda: __import__("mock_local_folder.subfolder.mod") - ) + w.run(lambda: __import__("mock_local_folder.subfolder.mod")) for o in [mod, local_function, LocalT, LocalClass]: with pytest.raises(ImportError): @@ -2545,14 +2575,9 @@ def test_pickle_constructs_from_module_registered_for_pickling_by_value(self): # typevar assert w.run(lambda: LocalT.__name__) == LocalT.__name__ # classes - assert ( - w.run(lambda: LocalClass().method()) - == LocalClass().method() - ) + assert w.run(lambda: LocalClass().method()) == LocalClass().method() # modules - assert ( - w.run(lambda: mod.local_function()) == local_function() - ) + assert w.run(lambda: mod.local_function()) == local_function() # Constructs from modules inside subfolders should be pickled # by value if a namespace module pointing to some parent folder @@ -2560,16 +2585,19 @@ def test_pickle_constructs_from_module_registered_for_pickling_by_value(self): # namespace module falls into that category, but a # "mock_local_folder.mod" one does not. from mock_local_folder.subfolder.submod import ( - LocalSubmodClass, LocalSubmodT, local_submod_function + LocalSubmodClass, + LocalSubmodT, + local_submod_function, ) + # Shorter aliases to comply with line-length limits _t, _func, _class = ( - LocalSubmodT, local_submod_function, LocalSubmodClass + LocalSubmodT, + local_submod_function, + LocalSubmodClass, ) with pytest.raises(ImportError): - w.run( - lambda: __import__("mock_local_folder.subfolder.mod") - ) + w.run(lambda: __import__("mock_local_folder.subfolder.mod")) with pytest.raises(ImportError): w.run(lambda: local_submod_function) @@ -2583,6 +2611,7 @@ def test_pickle_constructs_from_module_registered_for_pickling_by_value(self): # Test the namespace folder case import mock_local_folder + register_pickle_by_value(mock_local_folder) assert w.run(lambda: local_function()) == local_function() assert w.run(lambda: _func()) == _func() @@ -2596,6 +2625,7 @@ def test_pickle_constructs_from_module_registered_for_pickling_by_value(self): # Test the case of registering a single module inside a # subfolder. import mock_local_folder.subfolder.submod + register_pickle_by_value(mock_local_folder.subfolder.submod) assert w.run(lambda: _func()) == _func() assert w.run(lambda: _t.__name__) == _t.__name__ @@ -2609,14 +2639,13 @@ def test_pickle_constructs_from_module_registered_for_pickling_by_value(self): with pytest.raises(ImportError): w.run(lambda: __import__("mock_local_folder.mod")) - unregister_pickle_by_value( - mock_local_folder.subfolder.submod - ) + unregister_pickle_by_value(mock_local_folder.subfolder.submod) with pytest.raises(ImportError): w.run(lambda: local_submod_function) # Test the subfolder namespace module case import mock_local_folder.subfolder + register_pickle_by_value(mock_local_folder.subfolder) assert w.run(lambda: _func()) == _func() assert w.run(lambda: _t.__name__) == _t.__name__ @@ -2626,14 +2655,18 @@ def test_pickle_constructs_from_module_registered_for_pickling_by_value(self): finally: _fname = "mock_local_folder" sys.path = _prev_sys_path - for m in [_fname, f"{_fname}.mod", f"{_fname}.subfolder", - f"{_fname}.subfolder.submod"]: + for m in [ + _fname, + f"{_fname}.mod", + f"{_fname}.subfolder", + f"{_fname}.subfolder.submod", + ]: mod = sys.modules.pop(m, None) if mod and mod.__name__ in list_registry_pickle_by_value(): unregister_pickle_by_value(mod) def test_pickle_constructs_from_installed_packages_registered_for_pickling_by_value( # noqa - self + self, ): for package_or_module in ["package", "module"]: if package_or_module == "package": @@ -2663,7 +2696,7 @@ def test_pickle_constructs_from_installed_packages_registered_for_pickling_by_va unregister_pickle_by_value(m) def test_pickle_various_versions_of_the_same_function_with_different_pickling_method( # noqa - self + self, ): # Make sure that different versions of the same function (possibly # pickled in a different way - by value and/or by reference) can @@ -2694,12 +2727,8 @@ def _call_from_registry(k): register_pickle_by_value(_cloudpickle_testpkg) _cloudpickle_testpkg.global_variable = "modified global" w.run(_add_to_registry, f, "f_by_val") - assert ( - w.run(_call_from_registry, "f_by_ref") == _original_global - ) - assert ( - w.run(_call_from_registry, "f_by_val") == "modified global" - ) + assert w.run(_call_from_registry, "f_by_ref") == _original_global + assert w.run(_call_from_registry, "f_by_val") == "modified global" finally: _cloudpickle_testpkg.global_variable = _original_global @@ -2718,19 +2747,19 @@ def func_with_globals(): for i in range(5): vals.add( - subprocess_pickle_string(func_with_globals, - protocol=self.protocol, - add_env={"PYTHONHASHSEED": str(i)})) + subprocess_pickle_string( + func_with_globals, + protocol=self.protocol, + add_env={"PYTHONHASHSEED": str(i)}, + ) + ) if len(vals) > 1: # Print additional debug info on stdout with dis: for val in vals: pickletools.dis(val) - pytest.fail( - "Expected a single deterministic payload, got %d/5" % len(vals) - ) + pytest.fail("Expected a single deterministic payload, got %d/5" % len(vals)) def test_dataclass_fields_are_preserved(self): - @dataclasses.dataclass class SampleDataclass: x: int @@ -2742,9 +2771,7 @@ class SampleDataclass: ) found_fields = list(PickledSampleDataclass.__dataclass_fields__.values()) - assert set(f.name for f in found_fields) == { - "x", "y", "z" - } + assert set(f.name for f in found_fields) == {"x", "y", "z"} expected_ftypes = { "x": dataclasses._FIELD, @@ -2811,12 +2838,11 @@ def echo(*args): class Protocol2CloudPickleTest(CloudPickleTest): - protocol = 2 def test_lookup_module_and_qualname_dynamic_typevar(): - T = typing.TypeVar('T') + T = typing.TypeVar("T") module_and_name = _lookup_module_and_qualname(T, name=T.__name__) assert module_and_name is None @@ -2828,16 +2854,17 @@ def test_lookup_module_and_qualname_importable_typevar(): assert module_and_name is not None module, name = module_and_name assert module is _cloudpickle_testpkg - assert name == 'T' + assert name == "T" def test_lookup_module_and_qualname_stdlib_typevar(): - module_and_name = _lookup_module_and_qualname(typing.AnyStr, - name=typing.AnyStr.__name__) + module_and_name = _lookup_module_and_qualname( + typing.AnyStr, name=typing.AnyStr.__name__ + ) assert module_and_name is not None module, name = module_and_name assert module is typing - assert name == 'AnyStr' + assert name == "AnyStr" def test_register_pickle_by_value(): @@ -2870,25 +2897,29 @@ def test_register_pickle_by_value(): with pytest.raises(ValueError, match=msg): register_pickle_by_value(pkg.__name__) - dynamic_mod = types.ModuleType('dynamic_mod') + dynamic_mod = types.ModuleType("dynamic_mod") msg = ( f"{dynamic_mod} was not imported correctly, have you used an " - f"`import` statement to access it?" + "`import` statement to access it?" ) with pytest.raises(ValueError, match=re.escape(msg)): register_pickle_by_value(dynamic_mod) def _all_types_to_test(): - T = typing.TypeVar('T') + T = typing.TypeVar("T") class C(typing.Generic[T]): pass types_to_test = [ - C, C[int], - T, typing.Any, typing.Optional, - typing.Generic, typing.Union, + C, + C[int], + T, + typing.Any, + typing.Optional, + typing.Generic, + typing.Union, typing.Optional[int], typing.Generic[T], typing.Callable[[int], typing.Any], @@ -2912,5 +2943,5 @@ def test_module_level_pickler(): assert cloudpickle.Pickler is cloudpickle.CloudPickler -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_backward_compat.py b/tests/test_backward_compat.py index de811db6..1abb3da4 100644 --- a/tests/test_backward_compat.py +++ b/tests/test_backward_compat.py @@ -16,8 +16,8 @@ from .generate_old_pickles import PICKLE_DIRECTORY -def load_obj(filename, check_deprecation_warning='auto'): - if check_deprecation_warning == 'auto': +def load_obj(filename, check_deprecation_warning="auto"): + if check_deprecation_warning == "auto": check_deprecation_warning = False pickle_filepath = PICKLE_DIRECTORY / filename if not pickle_filepath.exists(): diff --git a/tests/testutils.py b/tests/testutils.py index 63c340a4..24bef9e5 100644 --- a/tests/testutils.py +++ b/tests/testutils.py @@ -28,17 +28,16 @@ def g(x): def _make_cwd_env(): """Helper to prepare environment for the child processes""" - cloudpickle_repo_folder = op.normpath( - op.join(op.dirname(__file__), '..')) + cloudpickle_repo_folder = op.normpath(op.join(op.dirname(__file__), "..")) env = os.environ.copy() pythonpath = "{src}{sep}tests{pathsep}{src}".format( - src=cloudpickle_repo_folder, sep=os.sep, pathsep=os.pathsep) - env['PYTHONPATH'] = pythonpath + src=cloudpickle_repo_folder, sep=os.sep, pathsep=os.pathsep + ) + env["PYTHONPATH"] = pythonpath return cloudpickle_repo_folder, env -def subprocess_pickle_string(input_data, protocol=None, timeout=TIMEOUT, - add_env=None): +def subprocess_pickle_string(input_data, protocol=None, timeout=TIMEOUT, add_env=None): """Retrieve pickle string of an object generated by a child Python process Pickle the input data into a buffer, send it to a subprocess via @@ -54,31 +53,31 @@ def subprocess_pickle_string(input_data, protocol=None, timeout=TIMEOUT, # Protect stderr from any warning, as we will assume an error will happen # if it is not empty. A concrete example is pytest using the imp module, # which is deprecated in python 3.8 - cmd = [sys.executable, '-W ignore', __file__, "--protocol", str(protocol)] + cmd = [sys.executable, "-W ignore", __file__, "--protocol", str(protocol)] cwd, env = _make_cwd_env() if add_env: env.update(add_env) - proc = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, cwd=cwd, env=env, - bufsize=4096) + proc = Popen( + cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, cwd=cwd, env=env, bufsize=4096 + ) pickle_string = dumps(input_data, protocol=protocol) try: comm_kwargs = {} - comm_kwargs['timeout'] = timeout + comm_kwargs["timeout"] = timeout out, err = proc.communicate(pickle_string, **comm_kwargs) if proc.returncode != 0 or len(err): message = "Subprocess returned %d: " % proc.returncode - message += err.decode('utf-8') + message += err.decode("utf-8") raise RuntimeError(message) return out except TimeoutExpired as e: proc.kill() out, err = proc.communicate() - message = "\n".join([out.decode('utf-8'), err.decode('utf-8')]) + message = "\n".join([out.decode("utf-8"), err.decode("utf-8")]) raise RuntimeError(message) from e -def subprocess_pickle_echo(input_data, protocol=None, timeout=TIMEOUT, - add_env=None): +def subprocess_pickle_echo(input_data, protocol=None, timeout=TIMEOUT, add_env=None): """Echo function with a child Python process Pickle the input data into a buffer, send it to a subprocess via stdin, expect the subprocess to unpickle, re-pickle that data back @@ -86,10 +85,9 @@ def subprocess_pickle_echo(input_data, protocol=None, timeout=TIMEOUT, >>> subprocess_pickle_echo([1, 'a', None]) [1, 'a', None] """ - out = subprocess_pickle_string(input_data, - protocol=protocol, - timeout=timeout, - add_env=add_env) + out = subprocess_pickle_string( + input_data, protocol=protocol, timeout=timeout, add_env=add_env + ) return loads(out) @@ -111,9 +109,9 @@ def pickle_echo(stream_in=None, stream_out=None, protocol=None): stream_out = sys.stdout # Force the use of bytes streams under Python 3 - if hasattr(stream_in, 'buffer'): + if hasattr(stream_in, "buffer"): stream_in = stream_in.buffer - if hasattr(stream_out, 'buffer'): + if hasattr(stream_out, "buffer"): stream_out = stream_out.buffer input_bytes = _read_all_bytes(stream_in) @@ -145,7 +143,8 @@ def run(self, func, *args, **kwargs): input_payload = dumps((func, args, kwargs), protocol=self.protocol) result_payload = self.pool.submit( - call_func, input_payload, self.protocol).result() + call_func, input_payload, self.protocol + ).result() result = loads(result_payload) if isinstance(result, BaseException): @@ -153,14 +152,14 @@ def run(self, func, *args, **kwargs): return result def memsize(self): - workers_pids = [p.pid if hasattr(p, "pid") else p - for p in list(self.pool._processes)] + workers_pids = [ + p.pid if hasattr(p, "pid") else p for p in list(self.pool._processes) + ] num_workers = len(workers_pids) if num_workers == 0: return 0 elif num_workers > 1: - raise RuntimeError("Unexpected number of workers: %d" - % num_workers) + raise RuntimeError("Unexpected number of workers: %d" % num_workers) return psutil.Process(workers_pids[0]).memory_info().rss def close(self): @@ -180,38 +179,40 @@ def assert_run_python_script(source_code, timeout=TIMEOUT): The script provided in the source code should return 0 and not print anything on stderr or stdout. """ - fd, source_file = tempfile.mkstemp(suffix='_src_test_cloudpickle.py') + fd, source_file = tempfile.mkstemp(suffix="_src_test_cloudpickle.py") os.close(fd) try: - with open(source_file, 'wb') as f: - f.write(source_code.encode('utf-8')) - cmd = [sys.executable, '-W ignore', source_file] + with open(source_file, "wb") as f: + f.write(source_code.encode("utf-8")) + cmd = [sys.executable, "-W ignore", source_file] cwd, env = _make_cwd_env() kwargs = { - 'cwd': cwd, - 'stderr': STDOUT, - 'env': env, + "cwd": cwd, + "stderr": STDOUT, + "env": env, } # If coverage is running, pass the config file to the subprocess coverage_rc = os.environ.get("COVERAGE_PROCESS_START") if coverage_rc: - kwargs['env']['COVERAGE_PROCESS_START'] = coverage_rc - kwargs['timeout'] = timeout + kwargs["env"]["COVERAGE_PROCESS_START"] = coverage_rc + kwargs["timeout"] = timeout try: try: out = check_output(cmd, **kwargs) except CalledProcessError as e: - raise RuntimeError("script errored with output:\n%s" - % e.output.decode('utf-8')) from e + raise RuntimeError( + "script errored with output:\n%s" % e.output.decode("utf-8") + ) from e if out != b"": - raise AssertionError(out.decode('utf-8')) + raise AssertionError(out.decode("utf-8")) except TimeoutExpired as e: - raise RuntimeError("script timeout, output so far:\n%s" - % e.output.decode('utf-8')) from e + raise RuntimeError( + "script timeout, output so far:\n%s" % e.output.decode("utf-8") + ) from e finally: os.unlink(source_file) -if __name__ == '__main__': - protocol = int(sys.argv[sys.argv.index('--protocol') + 1]) +if __name__ == "__main__": + protocol = int(sys.argv[sys.argv.index("--protocol") + 1]) pickle_echo(protocol=protocol)