diff --git a/metaflow/_vendor/click/_compat.py b/metaflow/_vendor/click/_compat.py index 60cb115bc5..7aec09977e 100644 --- a/metaflow/_vendor/click/_compat.py +++ b/metaflow/_vendor/click/_compat.py @@ -270,7 +270,6 @@ def filename_to_ui(value): value = value.decode(get_filesystem_encoding(), "replace") return value - else: import io @@ -725,7 +724,6 @@ def get_winterm_size(): ).srWindow return win.Right - win.Left, win.Bottom - win.Top - else: def _get_argv_encoding(): diff --git a/metaflow/_vendor/click/_termui_impl.py b/metaflow/_vendor/click/_termui_impl.py index 88bec37701..cd9e3d016c 100644 --- a/metaflow/_vendor/click/_termui_impl.py +++ b/metaflow/_vendor/click/_termui_impl.py @@ -459,7 +459,9 @@ def edit_file(self, filename): environ = None try: c = subprocess.Popen( - '{} "{}"'.format(editor, filename), env=environ, shell=True, + '{} "{}"'.format(editor, filename), + env=environ, + shell=True, ) exit_code = c.wait() if exit_code != 0: @@ -563,11 +565,11 @@ def _unquote_file(url): def _translate_ch_to_exc(ch): - if ch == u"\x03": + if ch == "\x03": raise KeyboardInterrupt() - if ch == u"\x04" and not WIN: # Unix-like, Ctrl+D + if ch == "\x04" and not WIN: # Unix-like, Ctrl+D raise EOFError() - if ch == u"\x1a" and WIN: # Windows, Ctrl+Z + if ch == "\x1a" and WIN: # Windows, Ctrl+Z raise EOFError() @@ -614,14 +616,13 @@ def getchar(echo): func = msvcrt.getwch rv = func() - if rv in (u"\x00", u"\xe0"): + if rv in ("\x00", "\xe0"): # \x00 and \xe0 are control characters that indicate special key, # see above. rv += func() _translate_ch_to_exc(rv) return rv - else: import tty import termios diff --git a/metaflow/_vendor/click/globals.py b/metaflow/_vendor/click/globals.py index 1649f9a0bf..feac2e91f5 100644 --- a/metaflow/_vendor/click/globals.py +++ b/metaflow/_vendor/click/globals.py @@ -36,7 +36,7 @@ def pop_context(): def resolve_color_default(color=None): - """"Internal helper to get the default value of the color flag. If a + """ "Internal helper to get the default value of the color flag. If a value is passed it's returned unchanged, otherwise it's looked up from the current context. """ diff --git a/metaflow/_vendor/click/utils.py b/metaflow/_vendor/click/utils.py index 79265e732d..1423596c66 100644 --- a/metaflow/_vendor/click/utils.py +++ b/metaflow/_vendor/click/utils.py @@ -234,9 +234,9 @@ def echo(message=None, file=None, nl=True, err=False, color=None): message = text_type(message) if nl: - message = message or u"" + message = message or "" if isinstance(message, text_type): - message += u"\n" + message += "\n" else: message += b"\n" diff --git a/metaflow/_vendor/v3_5/__init__.py b/metaflow/_vendor/v3_5/__init__.py index 22ae0c5f40..932b79829c 100644 --- a/metaflow/_vendor/v3_5/__init__.py +++ b/metaflow/_vendor/v3_5/__init__.py @@ -1 +1 @@ -# Empty file \ No newline at end of file +# Empty file diff --git a/metaflow/_vendor/v3_5/importlib_metadata/__init__.py b/metaflow/_vendor/v3_5/importlib_metadata/__init__.py index 429bfa66c4..c25bc3cb30 100644 --- a/metaflow/_vendor/v3_5/importlib_metadata/__init__.py +++ b/metaflow/_vendor/v3_5/importlib_metadata/__init__.py @@ -30,7 +30,7 @@ PyPy_repr, unique_ordered, str, - ) +) from importlib import import_module from itertools import starmap @@ -39,17 +39,17 @@ __all__ = [ - 'Distribution', - 'DistributionFinder', - 'PackageNotFoundError', - 'distribution', - 'distributions', - 'entry_points', - 'files', - 'metadata', - 'requires', - 'version', - ] + "Distribution", + "DistributionFinder", + "PackageNotFoundError", + "distribution", + "distributions", + "entry_points", + "files", + "metadata", + "requires", + "version", +] class PackageNotFoundError(ModuleNotFoundError): @@ -61,13 +61,13 @@ def __str__(self): @property def name(self): - name, = self.args + (name,) = self.args return name class EntryPoint( - PyPy_repr, - collections.namedtuple('EntryPointBase', 'name value group')): + PyPy_repr, collections.namedtuple("EntryPointBase", "name value group") +): """An entry point as defined by Python packaging conventions. See `the packaging docs on entry points @@ -76,10 +76,10 @@ class EntryPoint( """ pattern = re.compile( - r'(?P[\w.]+)\s*' - r'(:\s*(?P[\w.]+)\s*)?' - r'((?P\[.*\])\s*)?$' - ) + r"(?P[\w.]+)\s*" + r"(:\s*(?P[\w.]+)\s*)?" + r"((?P\[.*\])\s*)?$" + ) """ A regular expression describing the syntax for an entry point, which might look like: @@ -102,24 +102,24 @@ def load(self): return the named object. """ match = self.pattern.match(self.value) - module = import_module(match.group('module')) - attrs = filter(None, (match.group('attr') or '').split('.')) + module = import_module(match.group("module")) + attrs = filter(None, (match.group("attr") or "").split(".")) return functools.reduce(getattr, attrs, module) @property def module(self): match = self.pattern.match(self.value) - return match.group('module') + return match.group("module") @property def attr(self): match = self.pattern.match(self.value) - return match.group('attr') + return match.group("attr") @property def extras(self): match = self.pattern.match(self.value) - return list(re.finditer(r'\w+', match.group('extras') or '')) + return list(re.finditer(r"\w+", match.group("extras") or "")) @classmethod def _from_config(cls, config): @@ -127,11 +127,11 @@ def _from_config(cls, config): cls(name, value, group) for group in config.sections() for name, value in config.items(group) - ] + ] @classmethod def _from_text(cls, text): - config = ConfigParser(delimiters='=') + config = ConfigParser(delimiters="=") # case sensitive: https://stackoverflow.com/q/1611799/812183 config.optionxform = str try: @@ -151,18 +151,18 @@ def __reduce__(self): return ( self.__class__, (self.name, self.value, self.group), - ) + ) class PackagePath(pathlib.PurePosixPath): """A reference to a path in a package""" - def read_text(self, encoding='utf-8'): + def read_text(self, encoding="utf-8"): with self.locate().open(encoding=encoding) as stream: return stream.read() def read_binary(self): - with self.locate().open('rb') as stream: + with self.locate().open("rb") as stream: return stream.read() def locate(self): @@ -172,10 +172,10 @@ def locate(self): class FileHash: def __init__(self, spec): - self.mode, _, self.value = spec.partition('=') + self.mode, _, self.value = spec.partition("=") def __repr__(self): - return ''.format(self.mode, self.value) + return "".format(self.mode, self.value) class Distribution: @@ -224,14 +224,13 @@ def discover(cls, **kwargs): :context: A ``DistributionFinder.Context`` object. :return: Iterable of Distribution objects for all packages. """ - context = kwargs.pop('context', None) + context = kwargs.pop("context", None) if context and kwargs: raise ValueError("cannot accept context and kwargs") context = context or DistributionFinder.Context(**kwargs) return itertools.chain.from_iterable( - resolver(context) - for resolver in cls._discover_resolvers() - ) + resolver(context) for resolver in cls._discover_resolvers() + ) @staticmethod def at(path): @@ -246,20 +245,20 @@ def at(path): def _discover_resolvers(): """Search the meta_path for resolvers.""" declared = ( - getattr(finder, 'find_distributions', None) - for finder in sys.meta_path - ) + getattr(finder, "find_distributions", None) for finder in sys.meta_path + ) return filter(None, declared) @classmethod - def _local(cls, root='.'): + def _local(cls, root="."): from pep517 import build, meta + system = build.compat_system(root) builder = functools.partial( meta.build, source_dir=root, system=system, - ) + ) return PathDistribution(zipp.Path(meta.build_as_zip(builder))) @property @@ -270,23 +269,23 @@ def metadata(self): metadata. See PEP 566 for details. """ text = ( - self.read_text('METADATA') - or self.read_text('PKG-INFO') + self.read_text("METADATA") + or self.read_text("PKG-INFO") # This last clause is here to support old egg-info files. Its # effect is to just end up using the PathDistribution's self._path # (which points to the egg-info file) attribute unchanged. - or self.read_text('') - ) + or self.read_text("") + ) return email_message_from_string(text) @property def version(self): """Return the 'Version' metadata for the distribution package.""" - return self.metadata['Version'] + return self.metadata["Version"] @property def entry_points(self): - return EntryPoint._from_text(self.read_text('entry_points.txt')) + return EntryPoint._from_text(self.read_text("entry_points.txt")) @property def files(self): @@ -314,7 +313,7 @@ def _read_files_distinfo(self): """ Read the lines of RECORD """ - text = self.read_text('RECORD') + text = self.read_text("RECORD") return text and text.splitlines() def _read_files_egginfo(self): @@ -322,7 +321,7 @@ def _read_files_egginfo(self): SOURCES.txt might contain literal commas, so wrap each line in quotes. """ - text = self.read_text('SOURCES.txt') + text = self.read_text("SOURCES.txt") return text and map('"{}"'.format, text.splitlines()) @property @@ -332,27 +331,28 @@ def requires(self): return reqs and list(reqs) def _read_dist_info_reqs(self): - return self.metadata.get_all('Requires-Dist') + return self.metadata.get_all("Requires-Dist") def _read_egg_info_reqs(self): - source = self.read_text('requires.txt') + source = self.read_text("requires.txt") return source and self._deps_from_requires_text(source) @classmethod def _deps_from_requires_text(cls, source): section_pairs = cls._read_sections(source.splitlines()) sections = { - section: list(map(operator.itemgetter('line'), results)) - for section, results in - itertools.groupby(section_pairs, operator.itemgetter('section')) - } + section: list(map(operator.itemgetter("line"), results)) + for section, results in itertools.groupby( + section_pairs, operator.itemgetter("section") + ) + } return cls._convert_egg_info_reqs_to_simple_reqs(sections) @staticmethod def _read_sections(lines): section = None for line in filter(None, lines): - section_match = re.match(r'\[(.*)\]$', line) + section_match = re.match(r"\[(.*)\]$", line) if section_match: section = section_match.group(1) continue @@ -369,16 +369,17 @@ def _convert_egg_info_reqs_to_simple_reqs(sections): requirement. This method converts the former to the latter. See _test_deps_from_requires_text for an example. """ + def make_condition(name): return name and 'extra == "{name}"'.format(name=name) def parse_condition(section): - section = section or '' - extra, sep, markers = section.partition(':') + section = section or "" + extra, sep, markers = section.partition(":") if extra and markers: - markers = '({markers})'.format(markers=markers) + markers = "({markers})".format(markers=markers) conditions = list(filter(None, [markers, make_condition(extra)])) - return '; ' + ' and '.join(conditions) if conditions else '' + return "; " + " and ".join(conditions) if conditions else "" for section, deps in sections.items(): for dep in deps: @@ -419,7 +420,7 @@ def path(self): Typically refers to Python package paths and defaults to ``sys.path``. """ - return vars(self).get('path', sys.path) + return vars(self).get("path", sys.path) @abc.abstractmethod def find_distributions(self, context=Context()): @@ -447,7 +448,7 @@ def joinpath(self, child): def children(self): with suppress(Exception): - return os.listdir(self.root or '.') + return os.listdir(self.root or ".") with suppress(Exception): return self.zip_children() return [] @@ -457,41 +458,38 @@ def zip_children(self): names = zip_path.root.namelist() self.joinpath = zip_path.joinpath - return unique_ordered( - child.split(posixpath.sep, 1)[0] - for child in names - ) + return unique_ordered(child.split(posixpath.sep, 1)[0] for child in names) def search(self, name): return ( self.joinpath(child) for child in self.children() if name.matches(child, self.base) - ) + ) class Prepared: """ A prepared search for metadata on a possibly-named package. """ + normalized = None - suffixes = '.dist-info', '.egg-info' - exact_matches = [''][:0] + suffixes = ".dist-info", ".egg-info" + exact_matches = [""][:0] def __init__(self, name): self.name = name if name is None: return self.normalized = self.normalize(name) - self.exact_matches = [ - self.normalized + suffix for suffix in self.suffixes] + self.exact_matches = [self.normalized + suffix for suffix in self.suffixes] @staticmethod def normalize(name): """ PEP 503 normalization plus dashes as underscores. """ - return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_') + return re.sub(r"[-_.]+", "-", name).lower().replace("-", "_") @staticmethod def legacy_normalize(name): @@ -499,30 +497,30 @@ def legacy_normalize(name): Normalize the package name as found in the convention in older packaging tools versions and specs. """ - return name.lower().replace('-', '_') + return name.lower().replace("-", "_") def matches(self, cand, base): low = cand.lower() pre, ext = os.path.splitext(low) - name, sep, rest = pre.partition('-') + name, sep, rest = pre.partition("-") return ( low in self.exact_matches - or ext in self.suffixes and ( - not self.normalized or - name.replace('.', '_') == self.normalized - ) + or ext in self.suffixes + and (not self.normalized or name.replace(".", "_") == self.normalized) # legacy case: - or self.is_egg(base) and low == 'egg-info' - ) + or self.is_egg(base) + and low == "egg-info" + ) def is_egg(self, base): - normalized = self.legacy_normalize(self.name or '') - prefix = normalized + '-' if normalized else '' - versionless_egg_name = normalized + '.egg' if self.name else '' + normalized = self.legacy_normalize(self.name or "") + prefix = normalized + "-" if normalized else "" + versionless_egg_name = normalized + ".egg" if self.name else "" return ( base == versionless_egg_name or base.startswith(prefix) - and base.endswith('.egg')) + and base.endswith(".egg") + ) @install @@ -549,9 +547,8 @@ def find_distributions(self, context=DistributionFinder.Context()): def _search_paths(cls, name, paths): """Find metadata directories in paths heuristically.""" return itertools.chain.from_iterable( - path.search(Prepared(name)) - for path in map(FastPath, paths) - ) + path.search(Prepared(name)) for path in map(FastPath, paths) + ) class PathDistribution(Distribution): @@ -564,9 +561,15 @@ def __init__(self, path): self._path = path def read_text(self, filename): - with suppress(FileNotFoundError, IsADirectoryError, KeyError, - NotADirectoryError, PermissionError): - return self._path.joinpath(filename).read_text(encoding='utf-8') + with suppress( + FileNotFoundError, + IsADirectoryError, + KeyError, + NotADirectoryError, + PermissionError, + ): + return self._path.joinpath(filename).read_text(encoding="utf-8") + read_text.__doc__ = Distribution.read_text.__doc__ def locate_file(self, path): @@ -614,15 +617,11 @@ def entry_points(): :return: EntryPoint objects for all installed packages. """ - eps = itertools.chain.from_iterable( - dist.entry_points for dist in distributions()) - by_group = operator.attrgetter('group') + eps = itertools.chain.from_iterable(dist.entry_points for dist in distributions()) + by_group = operator.attrgetter("group") ordered = sorted(eps, key=by_group) grouped = itertools.groupby(ordered, by_group) - return { - group: tuple(eps) - for group, eps in grouped - } + return {group: tuple(eps) for group, eps in grouped} def files(distribution_name): diff --git a/metaflow/_vendor/v3_5/importlib_metadata/_compat.py b/metaflow/_vendor/v3_5/importlib_metadata/_compat.py index 303d4a22e8..4751e17cf0 100644 --- a/metaflow/_vendor/v3_5/importlib_metadata/_compat.py +++ b/metaflow/_vendor/v3_5/importlib_metadata/_compat.py @@ -10,6 +10,7 @@ import builtins from configparser import ConfigParser import contextlib + FileNotFoundError = builtins.FileNotFoundError IsADirectoryError = builtins.IsADirectoryError NotADirectoryError = builtins.NotADirectoryError @@ -21,12 +22,13 @@ from itertools import imap as map # type: ignore from itertools import ifilterfalse as filterfalse import contextlib2 as contextlib + FileNotFoundError = IOError, OSError IsADirectoryError = IOError, OSError NotADirectoryError = IOError, OSError PermissionError = IOError, OSError -str = type('') +str = type("") suppress = contextlib.suppress @@ -44,16 +46,25 @@ if sys.version_info >= (3,): # pragma: nocover from importlib.abc import MetaPathFinder else: # pragma: nocover + class MetaPathFinder(object): __metaclass__ = abc.ABCMeta __metaclass__ = type __all__ = [ - 'install', 'NullFinder', 'MetaPathFinder', 'ModuleNotFoundError', - 'pathlib', 'ConfigParser', 'map', 'suppress', 'FileNotFoundError', - 'NotADirectoryError', 'email_message_from_string', - ] + "install", + "NullFinder", + "MetaPathFinder", + "ModuleNotFoundError", + "pathlib", + "ConfigParser", + "map", + "suppress", + "FileNotFoundError", + "NotADirectoryError", + "email_message_from_string", +] def install(cls): @@ -77,11 +88,12 @@ def disable_stdlib_finder(): See #91 for more background for rationale on this sketchy behavior. """ + def matches(finder): - return ( - getattr(finder, '__module__', None) == '_frozen_importlib_external' - and hasattr(finder, 'find_distributions') - ) + return getattr( + finder, "__module__", None + ) == "_frozen_importlib_external" and hasattr(finder, "find_distributions") + for finder in filter(matches, sys.meta_path): # pragma: nocover del finder.find_distributions @@ -91,6 +103,7 @@ class NullFinder: A "Finder" (aka "MetaClassFinder") that never finds any modules, but may find distributions. """ + @staticmethod def find_spec(*args, **kwargs): return None @@ -112,10 +125,8 @@ def py2_message_from_string(text): # nocoverpy3 email_message_from_string = ( - py2_message_from_string - if sys.version_info < (3,) else - email.message_from_string - ) + py2_message_from_string if sys.version_info < (3,) else email.message_from_string +) class PyPy_repr: @@ -123,14 +134,16 @@ class PyPy_repr: Override repr for EntryPoint objects on PyPy to avoid __iter__ access. Ref #97, #102. """ - affected = hasattr(sys, 'pypy_version_info') + + affected = hasattr(sys, "pypy_version_info") def __compat_repr__(self): # pragma: nocover def make_param(name): value = getattr(self, name) - return '{name}={value!r}'.format(**locals()) - params = ', '.join(map(make_param, self._fields)) - return 'EntryPoint({params})'.format(**locals()) + return "{name}={value!r}".format(**locals()) + + params = ", ".join(map(make_param, self._fields)) + return "EntryPoint({params})".format(**locals()) if affected: # pragma: nocover __repr__ = __compat_repr__ @@ -148,5 +161,4 @@ def unique_everseen(iterable): # pragma: nocover yield element -unique_ordered = ( - unique_everseen if sys.version_info < (3, 7) else dict.fromkeys) +unique_ordered = unique_everseen if sys.version_info < (3, 7) else dict.fromkeys diff --git a/metaflow/_vendor/v3_5/zipp.py b/metaflow/_vendor/v3_5/zipp.py index 26b723c1fd..72632b0b77 100644 --- a/metaflow/_vendor/v3_5/zipp.py +++ b/metaflow/_vendor/v3_5/zipp.py @@ -12,7 +12,7 @@ OrderedDict = dict -__all__ = ['Path'] +__all__ = ["Path"] def _parents(path): @@ -93,7 +93,7 @@ def resolve_dir(self, name): as a directory (with the trailing slash). """ names = self._name_set() - dirname = name + '/' + dirname = name + "/" dir_match = name not in names and dirname in names return dirname if dir_match else name @@ -110,7 +110,7 @@ def make(cls, source): return cls(_pathlib_compat(source)) # Only allow for FastLookup when supplied zipfile is read-only - if 'r' not in source.mode: + if "r" not in source.mode: cls = CompleteDirs source.__class__ = cls @@ -240,7 +240,7 @@ def __init__(self, root, at=""): self.root = FastLookup.make(root) self.at = at - def open(self, mode='r', *args, pwd=None, **kwargs): + def open(self, mode="r", *args, pwd=None, **kwargs): """ Open this entry as text or binary following the semantics of ``pathlib.Path.open()`` by passing arguments through @@ -249,10 +249,10 @@ def open(self, mode='r', *args, pwd=None, **kwargs): if self.is_dir(): raise IsADirectoryError(self) zip_mode = mode[0] - if not self.exists() and zip_mode == 'r': + if not self.exists() and zip_mode == "r": raise FileNotFoundError(self) stream = self.root.open(self.at, zip_mode, pwd=pwd) - if 'b' in mode: + if "b" in mode: if args or kwargs: raise ValueError("encoding args invalid for binary operation") return stream @@ -279,11 +279,11 @@ def filename(self): return pathlib.Path(self.root.filename).joinpath(self.at) def read_text(self, *args, **kwargs): - with self.open('r', *args, **kwargs) as strm: + with self.open("r", *args, **kwargs) as strm: return strm.read() def read_bytes(self): - with self.open('rb') as strm: + with self.open("rb") as strm: return strm.read() def _is_child(self, path): @@ -323,7 +323,7 @@ def joinpath(self, *other): def parent(self): if not self.at: return self.filename.parent - parent_at = posixpath.dirname(self.at.rstrip('/')) + parent_at = posixpath.dirname(self.at.rstrip("/")) if parent_at: - parent_at += '/' + parent_at += "/" return self._next(parent_at) diff --git a/metaflow/_vendor/v3_6/__init__.py b/metaflow/_vendor/v3_6/__init__.py index 22ae0c5f40..932b79829c 100644 --- a/metaflow/_vendor/v3_6/__init__.py +++ b/metaflow/_vendor/v3_6/__init__.py @@ -1 +1 @@ -# Empty file \ No newline at end of file +# Empty file diff --git a/metaflow/_vendor/v3_6/importlib_metadata/__init__.py b/metaflow/_vendor/v3_6/importlib_metadata/__init__.py index 8d3b7814d5..10a743115e 100644 --- a/metaflow/_vendor/v3_6/importlib_metadata/__init__.py +++ b/metaflow/_vendor/v3_6/importlib_metadata/__init__.py @@ -33,18 +33,18 @@ __all__ = [ - 'Distribution', - 'DistributionFinder', - 'PackageMetadata', - 'PackageNotFoundError', - 'distribution', - 'distributions', - 'entry_points', - 'files', - 'metadata', - 'packages_distributions', - 'requires', - 'version', + "Distribution", + "DistributionFinder", + "PackageMetadata", + "PackageNotFoundError", + "distribution", + "distributions", + "entry_points", + "files", + "metadata", + "packages_distributions", + "requires", + "version", ] @@ -114,15 +114,15 @@ def read(text, filter_=None): lines = filter(filter_, map(str.strip, text.splitlines())) name = None for value in lines: - section_match = value.startswith('[') and value.endswith(']') + section_match = value.startswith("[") and value.endswith("]") if section_match: - name = value.strip('[]') + name = value.strip("[]") continue yield Pair(name, value) @staticmethod def valid(line): - return line and not line.startswith('#') + return line and not line.startswith("#") class DeprecatedTuple: @@ -160,9 +160,9 @@ class EntryPoint(DeprecatedTuple): """ pattern = re.compile( - r'(?P[\w.]+)\s*' - r'(:\s*(?P[\w.]+))?\s*' - r'(?P\[.*\])?\s*$' + r"(?P[\w.]+)\s*" + r"(:\s*(?P[\w.]+))?\s*" + r"(?P\[.*\])?\s*$" ) """ A regular expression describing the syntax for an entry point, @@ -180,7 +180,7 @@ class EntryPoint(DeprecatedTuple): following the attr, and following any extras. """ - dist: Optional['Distribution'] = None + dist: Optional["Distribution"] = None def __init__(self, name, value, group): vars(self).update(name=name, value=value, group=group) @@ -191,24 +191,24 @@ def load(self): return the named object. """ match = self.pattern.match(self.value) - module = import_module(match.group('module')) - attrs = filter(None, (match.group('attr') or '').split('.')) + module = import_module(match.group("module")) + attrs = filter(None, (match.group("attr") or "").split(".")) return functools.reduce(getattr, attrs, module) @property def module(self): match = self.pattern.match(self.value) - return match.group('module') + return match.group("module") @property def attr(self): match = self.pattern.match(self.value) - return match.group('attr') + return match.group("attr") @property def extras(self): match = self.pattern.match(self.value) - return list(re.finditer(r'\w+', match.group('extras') or '')) + return list(re.finditer(r"\w+", match.group("extras") or "")) def _for(self, dist): vars(self).update(dist=dist) @@ -243,8 +243,8 @@ def __setattr__(self, name, value): def __repr__(self): return ( - f'EntryPoint(name={self.name!r}, value={self.value!r}, ' - f'group={self.group!r})' + f"EntryPoint(name={self.name!r}, value={self.value!r}, " + f"group={self.group!r})" ) def __hash__(self): @@ -298,16 +298,16 @@ def wrapped(self, *args, **kwargs): return wrapped for method_name in [ - '__setitem__', - '__delitem__', - 'append', - 'reverse', - 'extend', - 'pop', - 'remove', - '__iadd__', - 'insert', - 'sort', + "__setitem__", + "__delitem__", + "append", + "reverse", + "extend", + "pop", + "remove", + "__iadd__", + "insert", + "sort", ]: locals()[method_name] = _wrap_deprecated_method(method_name) @@ -382,7 +382,7 @@ def _from_text_for(cls, text, dist): def _from_text(text): return ( EntryPoint(name=item.value.name, value=item.value.value, group=item.name) - for item in Sectioned.section_pairs(text or '') + for item in Sectioned.section_pairs(text or "") ) @@ -449,7 +449,7 @@ class SelectableGroups(Deprecated, dict): @classmethod def load(cls, eps): - by_group = operator.attrgetter('group') + by_group = operator.attrgetter("group") ordered = sorted(eps, key=by_group) grouped = itertools.groupby(ordered, by_group) return cls((group, EntryPoints(eps)) for group, eps in grouped) @@ -484,12 +484,12 @@ def select(self, **params): class PackagePath(pathlib.PurePosixPath): """A reference to a path in a package""" - def read_text(self, encoding='utf-8'): + def read_text(self, encoding="utf-8"): with self.locate().open(encoding=encoding) as stream: return stream.read() def read_binary(self): - with self.locate().open('rb') as stream: + with self.locate().open("rb") as stream: return stream.read() def locate(self): @@ -499,10 +499,10 @@ def locate(self): class FileHash: def __init__(self, spec): - self.mode, _, self.value = spec.partition('=') + self.mode, _, self.value = spec.partition("=") def __repr__(self): - return f'' + return f"" class Distribution: @@ -551,7 +551,7 @@ def discover(cls, **kwargs): :context: A ``DistributionFinder.Context`` object. :return: Iterable of Distribution objects for all packages. """ - context = kwargs.pop('context', None) + context = kwargs.pop("context", None) if context and kwargs: raise ValueError("cannot accept context and kwargs") context = context or DistributionFinder.Context(**kwargs) @@ -572,12 +572,12 @@ def at(path): def _discover_resolvers(): """Search the meta_path for resolvers.""" declared = ( - getattr(finder, 'find_distributions', None) for finder in sys.meta_path + getattr(finder, "find_distributions", None) for finder in sys.meta_path ) return filter(None, declared) @classmethod - def _local(cls, root='.'): + def _local(cls, root="."): from pep517 import build, meta system = build.compat_system(root) @@ -596,19 +596,19 @@ def metadata(self) -> _meta.PackageMetadata: metadata. See PEP 566 for details. """ text = ( - self.read_text('METADATA') - or self.read_text('PKG-INFO') + self.read_text("METADATA") + or self.read_text("PKG-INFO") # This last clause is here to support old egg-info files. Its # effect is to just end up using the PathDistribution's self._path # (which points to the egg-info file) attribute unchanged. - or self.read_text('') + or self.read_text("") ) return _adapters.Message(email.message_from_string(text)) @property def name(self): """Return the 'Name' metadata for the distribution package.""" - return self.metadata['Name'] + return self.metadata["Name"] @property def _normalized_name(self): @@ -618,11 +618,11 @@ def _normalized_name(self): @property def version(self): """Return the 'Version' metadata for the distribution package.""" - return self.metadata['Version'] + return self.metadata["Version"] @property def entry_points(self): - return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self) + return EntryPoints._from_text_for(self.read_text("entry_points.txt"), self) @property def files(self): @@ -653,7 +653,7 @@ def _read_files_distinfo(self): """ Read the lines of RECORD """ - text = self.read_text('RECORD') + text = self.read_text("RECORD") return text and text.splitlines() def _read_files_egginfo(self): @@ -661,7 +661,7 @@ def _read_files_egginfo(self): SOURCES.txt might contain literal commas, so wrap each line in quotes. """ - text = self.read_text('SOURCES.txt') + text = self.read_text("SOURCES.txt") return text and map('"{}"'.format, text.splitlines()) @property @@ -671,10 +671,10 @@ def requires(self): return reqs and list(reqs) def _read_dist_info_reqs(self): - return self.metadata.get_all('Requires-Dist') + return self.metadata.get_all("Requires-Dist") def _read_egg_info_reqs(self): - source = self.read_text('requires.txt') + source = self.read_text("requires.txt") return source and self._deps_from_requires_text(source) @classmethod @@ -697,12 +697,12 @@ def make_condition(name): return name and f'extra == "{name}"' def quoted_marker(section): - section = section or '' - extra, sep, markers = section.partition(':') + section = section or "" + extra, sep, markers = section.partition(":") if extra and markers: - markers = f'({markers})' + markers = f"({markers})" conditions = list(filter(None, [markers, make_condition(extra)])) - return '; ' + ' and '.join(conditions) if conditions else '' + return "; " + " and ".join(conditions) if conditions else "" def url_req_space(req): """ @@ -710,7 +710,7 @@ def url_req_space(req): Ref python/importlib_metadata#357. """ # '@' is uniquely indicative of a url_req. - return ' ' * ('@' in req) + return " " * ("@" in req) for section in sections: space = url_req_space(section.value) @@ -752,7 +752,7 @@ def path(self): Typically refers to Python installed package paths such as "site-packages" directories and defaults to ``sys.path``. """ - return vars(self).get('path', sys.path) + return vars(self).get("path", sys.path) @abc.abstractmethod def find_distributions(self, context=Context()): @@ -786,7 +786,7 @@ def joinpath(self, child): def children(self): with suppress(Exception): - return os.listdir(self.root or '.') + return os.listdir(self.root or ".") with suppress(Exception): return self.zip_children() return [] @@ -868,7 +868,7 @@ def normalize(name): """ PEP 503 normalization plus dashes as underscores. """ - return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_') + return re.sub(r"[-_.]+", "-", name).lower().replace("-", "_") @staticmethod def legacy_normalize(name): @@ -876,7 +876,7 @@ def legacy_normalize(name): Normalize the package name as found in the convention in older packaging tools versions and specs. """ - return name.lower().replace('-', '_') + return name.lower().replace("-", "_") def __bool__(self): return bool(self.name) @@ -930,7 +930,7 @@ def read_text(self, filename): NotADirectoryError, PermissionError, ): - return self._path.joinpath(filename).read_text(encoding='utf-8') + return self._path.joinpath(filename).read_text(encoding="utf-8") read_text.__doc__ = Distribution.read_text.__doc__ @@ -948,9 +948,9 @@ def _normalized_name(self): def _name_from_stem(self, stem): name, ext = os.path.splitext(stem) - if ext not in ('.dist-info', '.egg-info'): + if ext not in (".dist-info", ".egg-info"): return - name, sep, rest = stem.partition('-') + name, sep, rest = stem.partition("-") return name @@ -1007,7 +1007,7 @@ def entry_points(**params) -> Union[EntryPoints, SelectableGroups]: :return: EntryPoints or SelectableGroups for all installed packages. """ - norm_name = operator.attrgetter('_normalized_name') + norm_name = operator.attrgetter("_normalized_name") unique = functools.partial(unique_everseen, key=norm_name) eps = itertools.chain.from_iterable( dist.entry_points for dist in unique(distributions()) @@ -1047,17 +1047,17 @@ def packages_distributions() -> Mapping[str, List[str]]: pkg_to_dist = collections.defaultdict(list) for dist in distributions(): for pkg in _top_level_declared(dist) or _top_level_inferred(dist): - pkg_to_dist[pkg].append(dist.metadata['Name']) + pkg_to_dist[pkg].append(dist.metadata["Name"]) return dict(pkg_to_dist) def _top_level_declared(dist): - return (dist.read_text('top_level.txt') or '').split() + return (dist.read_text("top_level.txt") or "").split() def _top_level_inferred(dist): return { - f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name + f.parts[0] if len(f.parts) > 1 else f.with_suffix("").name for f in always_iterable(dist.files) if f.suffix == ".py" } diff --git a/metaflow/_vendor/v3_6/importlib_metadata/_adapters.py b/metaflow/_vendor/v3_6/importlib_metadata/_adapters.py index aa460d3eda..49cfa02e66 100644 --- a/metaflow/_vendor/v3_6/importlib_metadata/_adapters.py +++ b/metaflow/_vendor/v3_6/importlib_metadata/_adapters.py @@ -10,16 +10,16 @@ class Message(email.message.Message): map( FoldedCase, [ - 'Classifier', - 'Obsoletes-Dist', - 'Platform', - 'Project-URL', - 'Provides-Dist', - 'Provides-Extra', - 'Requires-Dist', - 'Requires-External', - 'Supported-Platform', - 'Dynamic', + "Classifier", + "Obsoletes-Dist", + "Platform", + "Project-URL", + "Provides-Dist", + "Provides-Extra", + "Requires-Dist", + "Requires-External", + "Supported-Platform", + "Dynamic", ], ) ) @@ -42,13 +42,13 @@ def __iter__(self): def _repair_headers(self): def redent(value): "Correct for RFC822 indentation" - if not value or '\n' not in value: + if not value or "\n" not in value: return value - return textwrap.dedent(' ' * 8 + value) + return textwrap.dedent(" " * 8 + value) - headers = [(key, redent(value)) for key, value in vars(self)['_headers']] + headers = [(key, redent(value)) for key, value in vars(self)["_headers"]] if self._payload: - headers.append(('Description', self.get_payload())) + headers.append(("Description", self.get_payload())) return headers @property @@ -60,9 +60,9 @@ def json(self): def transform(key): value = self.get_all(key) if key in self.multiple_use_keys else self[key] - if key == 'Keywords': - value = re.split(r'\s+', value) - tk = key.lower().replace('-', '_') + if key == "Keywords": + value = re.split(r"\s+", value) + tk = key.lower().replace("-", "_") return tk, value return dict(map(transform, map(FoldedCase, self))) diff --git a/metaflow/_vendor/v3_6/importlib_metadata/_collections.py b/metaflow/_vendor/v3_6/importlib_metadata/_collections.py index cf0954e1a3..895678a23c 100644 --- a/metaflow/_vendor/v3_6/importlib_metadata/_collections.py +++ b/metaflow/_vendor/v3_6/importlib_metadata/_collections.py @@ -18,13 +18,13 @@ class FreezableDefaultDict(collections.defaultdict): """ def __missing__(self, key): - return getattr(self, '_frozen', super().__missing__)(key) + return getattr(self, "_frozen", super().__missing__)(key) def freeze(self): self._frozen = lambda key: self.default_factory() -class Pair(collections.namedtuple('Pair', 'name value')): +class Pair(collections.namedtuple("Pair", "name value")): @classmethod def parse(cls, text): return cls(*map(str.strip, text.split("=", 1))) diff --git a/metaflow/_vendor/v3_6/importlib_metadata/_compat.py b/metaflow/_vendor/v3_6/importlib_metadata/_compat.py index 3680940f0b..eaaab2ffa6 100644 --- a/metaflow/_vendor/v3_6/importlib_metadata/_compat.py +++ b/metaflow/_vendor/v3_6/importlib_metadata/_compat.py @@ -2,7 +2,7 @@ import platform -__all__ = ['install', 'NullFinder', 'Protocol'] +__all__ = ["install", "NullFinder", "Protocol"] try: @@ -35,8 +35,8 @@ def disable_stdlib_finder(): def matches(finder): return getattr( - finder, '__module__', None - ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions') + finder, "__module__", None + ) == "_frozen_importlib_external" and hasattr(finder, "find_distributions") for finder in filter(matches, sys.meta_path): # pragma: nocover del finder.find_distributions @@ -67,5 +67,5 @@ def pypy_partial(val): Workaround for #327. """ - is_pypy = platform.python_implementation() == 'PyPy' + is_pypy = platform.python_implementation() == "PyPy" return val + is_pypy diff --git a/metaflow/_vendor/v3_6/importlib_metadata/_meta.py b/metaflow/_vendor/v3_6/importlib_metadata/_meta.py index 37ee43e6ef..81ee991eeb 100644 --- a/metaflow/_vendor/v3_6/importlib_metadata/_meta.py +++ b/metaflow/_vendor/v3_6/importlib_metadata/_meta.py @@ -35,13 +35,13 @@ class SimplePath(Protocol): A minimal subset of pathlib.Path required by PathDistribution. """ - def joinpath(self) -> 'SimplePath': + def joinpath(self) -> "SimplePath": ... # pragma: no cover - def __truediv__(self) -> 'SimplePath': + def __truediv__(self) -> "SimplePath": ... # pragma: no cover - def parent(self) -> 'SimplePath': + def parent(self) -> "SimplePath": ... # pragma: no cover def read_text(self) -> str: diff --git a/metaflow/_vendor/v3_6/importlib_metadata/_text.py b/metaflow/_vendor/v3_6/importlib_metadata/_text.py index c88cfbb234..376210d709 100644 --- a/metaflow/_vendor/v3_6/importlib_metadata/_text.py +++ b/metaflow/_vendor/v3_6/importlib_metadata/_text.py @@ -94,6 +94,6 @@ def lower(self): def index(self, sub): return self.lower().index(sub.lower()) - def split(self, splitter=' ', maxsplit=0): + def split(self, splitter=" ", maxsplit=0): pattern = re.compile(re.escape(splitter), re.I) return pattern.split(self, maxsplit) diff --git a/metaflow/_vendor/v3_6/typing_extensions.py b/metaflow/_vendor/v3_6/typing_extensions.py index 43c05bdcd2..7b69e0da73 100644 --- a/metaflow/_vendor/v3_6/typing_extensions.py +++ b/metaflow/_vendor/v3_6/typing_extensions.py @@ -21,58 +21,54 @@ # Please keep __all__ alphabetized within each category. __all__ = [ # Super-special typing primitives. - 'ClassVar', - 'Concatenate', - 'Final', - 'LiteralString', - 'ParamSpec', - 'Self', - 'Type', - 'TypeVarTuple', - 'Unpack', - + "ClassVar", + "Concatenate", + "Final", + "LiteralString", + "ParamSpec", + "Self", + "Type", + "TypeVarTuple", + "Unpack", # ABCs (from collections.abc). - 'Awaitable', - 'AsyncIterator', - 'AsyncIterable', - 'Coroutine', - 'AsyncGenerator', - 'AsyncContextManager', - 'ChainMap', - + "Awaitable", + "AsyncIterator", + "AsyncIterable", + "Coroutine", + "AsyncGenerator", + "AsyncContextManager", + "ChainMap", # Concrete collection types. - 'ContextManager', - 'Counter', - 'Deque', - 'DefaultDict', - 'OrderedDict', - 'TypedDict', - + "ContextManager", + "Counter", + "Deque", + "DefaultDict", + "OrderedDict", + "TypedDict", # Structural checks, a.k.a. protocols. - 'SupportsIndex', - + "SupportsIndex", # One-off things. - 'Annotated', - 'assert_never', - 'dataclass_transform', - 'final', - 'IntVar', - 'is_typeddict', - 'Literal', - 'NewType', - 'overload', - 'Protocol', - 'reveal_type', - 'runtime', - 'runtime_checkable', - 'Text', - 'TypeAlias', - 'TypeGuard', - 'TYPE_CHECKING', - 'Never', - 'NoReturn', - 'Required', - 'NotRequired', + "Annotated", + "assert_never", + "dataclass_transform", + "final", + "IntVar", + "is_typeddict", + "Literal", + "NewType", + "overload", + "Protocol", + "reveal_type", + "runtime", + "runtime_checkable", + "Text", + "TypeAlias", + "TypeGuard", + "TYPE_CHECKING", + "Never", + "NoReturn", + "Required", + "NotRequired", ] if PEP_560: @@ -84,8 +80,8 @@ def _no_slots_copy(dct): dict_copy = dict(dct) - if '__slots__' in dict_copy: - for slot in dict_copy['__slots__']: + if "__slots__" in dict_copy: + for slot in dict_copy["__slots__"]: dict_copy.pop(slot, None) return dict_copy @@ -110,19 +106,26 @@ def _check_generic(cls, parameters, elen=_marker): num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): return - raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" - f" actual {alen}, expected {elen}") + raise TypeError( + f"Too {'many' if alen > elen else 'few'} parameters for {cls};" + f" actual {alen}, expected {elen}" + ) if sys.version_info >= (3, 10): + def _should_collect_from_parameters(t): return isinstance( t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) ) + elif sys.version_info >= (3, 9): + def _should_collect_from_parameters(t): return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) + else: + def _should_collect_from_parameters(t): return isinstance(t, typing._GenericAlias) and not t._special @@ -137,11 +140,7 @@ def _collect_type_vars(types, typevar_types=None): typevar_types = typing.TypeVar tvars = [] for t in types: - if ( - isinstance(t, typevar_types) and - t not in tvars and - not _is_unpack(t) - ): + if isinstance(t, typevar_types) and t not in tvars and not _is_unpack(t): tvars.append(t) if _should_collect_from_parameters(t): tvars.extend([t for t in t.__parameters__ if t not in tvars]) @@ -149,10 +148,11 @@ def _collect_type_vars(types, typevar_types=None): # 3.6.2+ -if hasattr(typing, 'NoReturn'): +if hasattr(typing, "NoReturn"): NoReturn = typing.NoReturn # 3.6.0-3.6.1 else: + class _NoReturn(typing._FinalTypingBase, _root=True): """Special type indicating functions that never return. Example:: @@ -165,6 +165,7 @@ def stop() -> NoReturn: This type is invalid in other positions, e.g., ``List[NoReturn]`` will fail in static type checkers. """ + __slots__ = () def __instancecheck__(self, obj): @@ -177,32 +178,34 @@ def __subclasscheck__(self, cls): # Some unconstrained type variables. These are used by the container types. # (These are not for export.) -T = typing.TypeVar('T') # Any type. -KT = typing.TypeVar('KT') # Key type. -VT = typing.TypeVar('VT') # Value type. -T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. -T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. +T = typing.TypeVar("T") # Any type. +KT = typing.TypeVar("KT") # Key type. +VT = typing.TypeVar("VT") # Value type. +T_co = typing.TypeVar("T_co", covariant=True) # Any type covariant containers. +T_contra = typing.TypeVar("T_contra", contravariant=True) # Ditto contravariant. ClassVar = typing.ClassVar # On older versions of typing there is an internal class named "Final". # 3.8+ -if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): +if hasattr(typing, "Final") and sys.version_info[:2] >= (3, 7): Final = typing.Final # 3.7 elif sys.version_info[:2] >= (3, 7): - class _FinalForm(typing._SpecialForm, _root=True): + class _FinalForm(typing._SpecialForm, _root=True): def __repr__(self): - return 'typing_extensions.' + self._name + return "typing_extensions." + self._name def __getitem__(self, parameters): - item = typing._type_check(parameters, - f'{self._name} accepts only single type') + item = typing._type_check( + parameters, f"{self._name} accepts only single type" + ) return typing._GenericAlias(self, (item,)) - Final = _FinalForm('Final', - doc="""A special typing construct to indicate that a name + Final = _FinalForm( + "Final", + doc="""A special typing construct to indicate that a name cannot be re-assigned or overridden in a subclass. For example: @@ -214,9 +217,11 @@ class Connection: class FastConnector(Connection): TIMEOUT = 1 # Error reported by type checker - There is no runtime checking of these properties.""") + There is no runtime checking of these properties.""", + ) # 3.6 else: + class _Final(typing._FinalTypingBase, _root=True): """A special typing construct to indicate that a name cannot be re-assigned or overridden in a subclass. @@ -233,7 +238,7 @@ class FastConnector(Connection): There is no runtime checking of these properties. """ - __slots__ = ('__type__',) + __slots__ = ("__type__",) def __init__(self, tp=None, **kwds): self.__type__ = tp @@ -241,10 +246,13 @@ def __init__(self, tp=None, **kwds): def __getitem__(self, item): cls = type(self) if self.__type__ is None: - return cls(typing._type_check(item, - f'{cls.__name__[1:]} accepts only single type.'), - _root=True) - raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') + return cls( + typing._type_check( + item, f"{cls.__name__[1:]} accepts only single type." + ), + _root=True, + ) + raise TypeError(f"{cls.__name__[1:]} cannot be further subscripted") def _eval_type(self, globalns, localns): new_tp = typing._eval_type(self.__type__, globalns, localns) @@ -255,7 +263,7 @@ def _eval_type(self, globalns, localns): def __repr__(self): r = super().__repr__() if self.__type__ is not None: - r += f'[{typing._type_repr(self.__type__)}]' + r += f"[{typing._type_repr(self.__type__)}]" return r def __hash__(self): @@ -314,20 +322,21 @@ def IntVar(name): # 3.8+: -if hasattr(typing, 'Literal'): +if hasattr(typing, "Literal"): Literal = typing.Literal # 3.7: elif sys.version_info[:2] >= (3, 7): - class _LiteralForm(typing._SpecialForm, _root=True): + class _LiteralForm(typing._SpecialForm, _root=True): def __repr__(self): - return 'typing_extensions.' + self._name + return "typing_extensions." + self._name def __getitem__(self, parameters): return typing._GenericAlias(self, parameters) - Literal = _LiteralForm('Literal', - doc="""A type that can be used to indicate to type checkers + Literal = _LiteralForm( + "Literal", + doc="""A type that can be used to indicate to type checkers that the corresponding value has a value literally equivalent to the provided parameter. For example: @@ -338,9 +347,11 @@ def __getitem__(self, parameters): Literal[...] cannot be subclassed. There is no runtime checking verifying that the parameter is actually a value - instead of a type.""") + instead of a type.""", + ) # 3.6: else: + class _Literal(typing._FinalTypingBase, _root=True): """A type that can be used to indicate to type checkers that the corresponding value has a value literally equivalent to the @@ -355,7 +366,7 @@ class _Literal(typing._FinalTypingBase, _root=True): verifying that the parameter is actually a value instead of a type. """ - __slots__ = ('__values__',) + __slots__ = ("__values__",) def __init__(self, values=None, **kwds): self.__values__ = values @@ -366,7 +377,7 @@ def __getitem__(self, values): if not isinstance(values, tuple): values = (values,) return cls(values, _root=True) - raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') + raise TypeError(f"{cls.__name__[1:]} cannot be further subscripted") def _eval_type(self, globalns, localns): return self @@ -409,9 +420,11 @@ def __subclasscheck__(self, subclass): versions of Python, see https://github.com/python/typing/issues/501. """ if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") + if sys._getframe(1).f_globals["__name__"] not in ["abc", "functools"]: + raise TypeError( + "Parameterized generics cannot be used with class " + "or instance checks" + ) return False if not self.__extra__: return super().__subclasscheck__(subclass) @@ -434,13 +447,17 @@ def __subclasscheck__(self, subclass): AsyncIterator = typing.AsyncIterator # 3.6.1+ -if hasattr(typing, 'Deque'): +if hasattr(typing, "Deque"): Deque = typing.Deque # 3.6.0 else: - class Deque(collections.deque, typing.MutableSequence[T], - metaclass=_ExtensionsGenericMeta, - extra=collections.deque): + + class Deque( + collections.deque, + typing.MutableSequence[T], + metaclass=_ExtensionsGenericMeta, + extra=collections.deque, + ): __slots__ = () def __new__(cls, *args, **kwds): @@ -448,9 +465,10 @@ def __new__(cls, *args, **kwds): return collections.deque(*args, **kwds) return typing._generic_new(collections.deque, cls, *args, **kwds) + ContextManager = typing.ContextManager # 3.6.2+ -if hasattr(typing, 'AsyncContextManager'): +if hasattr(typing, "AsyncContextManager"): AsyncContextManager = typing.AsyncContextManager # 3.6.0-3.6.1 else: @@ -472,20 +490,24 @@ def __subclasshook__(cls, C): return _check_methods_in_mro(C, "__aenter__", "__aexit__") return NotImplemented + DefaultDict = typing.DefaultDict # 3.7.2+ -if hasattr(typing, 'OrderedDict'): +if hasattr(typing, "OrderedDict"): OrderedDict = typing.OrderedDict # 3.7.0-3.7.2 elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2): OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) # 3.6 else: - class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.OrderedDict): + class OrderedDict( + collections.OrderedDict, + typing.MutableMapping[KT, VT], + metaclass=_ExtensionsGenericMeta, + extra=collections.OrderedDict, + ): __slots__ = () def __new__(cls, *args, **kwds): @@ -493,15 +515,19 @@ def __new__(cls, *args, **kwds): return collections.OrderedDict(*args, **kwds) return typing._generic_new(collections.OrderedDict, cls, *args, **kwds) + # 3.6.2+ -if hasattr(typing, 'Counter'): +if hasattr(typing, "Counter"): Counter = typing.Counter # 3.6.0-3.6.1 else: - class Counter(collections.Counter, - typing.Dict[T, int], - metaclass=_ExtensionsGenericMeta, extra=collections.Counter): + class Counter( + collections.Counter, + typing.Dict[T, int], + metaclass=_ExtensionsGenericMeta, + extra=collections.Counter, + ): __slots__ = () def __new__(cls, *args, **kwds): @@ -509,14 +535,18 @@ def __new__(cls, *args, **kwds): return collections.Counter(*args, **kwds) return typing._generic_new(collections.Counter, cls, *args, **kwds) + # 3.6.1+ -if hasattr(typing, 'ChainMap'): +if hasattr(typing, "ChainMap"): ChainMap = typing.ChainMap -elif hasattr(collections, 'ChainMap'): - class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.ChainMap): - +elif hasattr(collections, "ChainMap"): + + class ChainMap( + collections.ChainMap, + typing.MutableMapping[KT, VT], + metaclass=_ExtensionsGenericMeta, + extra=collections.ChainMap, + ): __slots__ = () def __new__(cls, *args, **kwds): @@ -524,16 +554,22 @@ def __new__(cls, *args, **kwds): return collections.ChainMap(*args, **kwds) return typing._generic_new(collections.ChainMap, cls, *args, **kwds) + # 3.6.1+ -if hasattr(typing, 'AsyncGenerator'): +if hasattr(typing, "AsyncGenerator"): AsyncGenerator = typing.AsyncGenerator # 3.6.0 else: - class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra], - metaclass=_ExtensionsGenericMeta, - extra=collections.abc.AsyncGenerator): + + class AsyncGenerator( + AsyncIterator[T_co], + typing.Generic[T_co, T_contra], + metaclass=_ExtensionsGenericMeta, + extra=collections.abc.AsyncGenerator, + ): __slots__ = () + NewType = typing.NewType Text = typing.Text TYPE_CHECKING = typing.TYPE_CHECKING @@ -542,34 +578,60 @@ class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra], def _gorg(cls): """This function exists for compatibility with old typing versions.""" assert isinstance(cls, GenericMeta) - if hasattr(cls, '_gorg'): + if hasattr(cls, "_gorg"): return cls._gorg while cls.__origin__ is not None: cls = cls.__origin__ return cls -_PROTO_WHITELIST = ['Callable', 'Awaitable', - 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', - 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', - 'ContextManager', 'AsyncContextManager'] +_PROTO_WHITELIST = [ + "Callable", + "Awaitable", + "Iterable", + "Iterator", + "AsyncIterable", + "AsyncIterator", + "Hashable", + "Sized", + "Container", + "Collection", + "Reversible", + "ContextManager", + "AsyncContextManager", +] def _get_protocol_attrs(cls): attrs = set() for base in cls.__mro__[:-1]: # without object - if base.__name__ in ('Protocol', 'Generic'): + if base.__name__ in ("Protocol", "Generic"): continue - annotations = getattr(base, '__annotations__', {}) + annotations = getattr(base, "__annotations__", {}) for attr in list(base.__dict__.keys()) + list(annotations.keys()): - if (not attr.startswith('_abc_') and attr not in ( - '__abstractmethods__', '__annotations__', '__weakref__', - '_is_protocol', '_is_runtime_protocol', '__dict__', - '__args__', '__slots__', - '__next_in_mro__', '__parameters__', '__origin__', - '__orig_bases__', '__extra__', '__tree_hash__', - '__doc__', '__subclasshook__', '__init__', '__new__', - '__module__', '_MutableMapping__marker', '_gorg')): + if not attr.startswith("_abc_") and attr not in ( + "__abstractmethods__", + "__annotations__", + "__weakref__", + "_is_protocol", + "_is_runtime_protocol", + "__dict__", + "__args__", + "__slots__", + "__next_in_mro__", + "__parameters__", + "__origin__", + "__orig_bases__", + "__extra__", + "__tree_hash__", + "__doc__", + "__subclasshook__", + "__init__", + "__new__", + "__module__", + "_MutableMapping__marker", + "_gorg", + ): attrs.add(attr) return attrs @@ -579,14 +641,14 @@ def _is_callable_members_only(cls): # 3.8+ -if hasattr(typing, 'Protocol'): +if hasattr(typing, "Protocol"): Protocol = typing.Protocol # 3.7 elif PEP_560: def _no_init(self, *args, **kwargs): if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') + raise TypeError("Protocols cannot be instantiated") class _ProtocolMeta(abc.ABCMeta): # This metaclass is a bit unfortunate and exists only because of the lack @@ -594,15 +656,20 @@ class _ProtocolMeta(abc.ABCMeta): def __instancecheck__(cls, instance): # We need this method for situations where attributes are # assigned in __init__. - if ((not getattr(cls, '_is_protocol', False) or - _is_callable_members_only(cls)) and - issubclass(instance.__class__, cls)): + if ( + not getattr(cls, "_is_protocol", False) + or _is_callable_members_only(cls) + ) and issubclass(instance.__class__, cls): return True if cls._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(cls, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(cls)): + if all( + hasattr(instance, attr) + and ( + not callable(getattr(cls, attr, None)) + or getattr(instance, attr) is not None + ) + for attr in _get_protocol_attrs(cls) + ): return True return super().__instancecheck__(instance) @@ -643,8 +710,10 @@ def meth(self) -> T: def __new__(cls, *args, **kwds): if cls is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can only be used as a base class") + raise TypeError( + "Type Protocol cannot be instantiated; " + "it can only be used as a base class" + ) return super().__new__(cls) @typing._tp_cache @@ -653,7 +722,8 @@ def __class_getitem__(cls, params): params = (params,) if not params and cls is not typing.Tuple: raise TypeError( - f"Parameter list to {cls.__qualname__}[...] cannot be empty") + f"Parameter list to {cls.__qualname__}[...] cannot be empty" + ) msg = "Parameters to generic types must be types." params = tuple(typing._type_check(p, msg) for p in params) # noqa if cls is Protocol: @@ -664,10 +734,10 @@ def __class_getitem__(cls, params): i += 1 raise TypeError( "Parameters to Protocol[...] must all be type variables." - f" Parameter {i + 1} is {params[i]}") + f" Parameter {i + 1} is {params[i]}" + ) if len(set(params)) != len(params): - raise TypeError( - "Parameters to Protocol[...] must all be unique") + raise TypeError("Parameters to Protocol[...] must all be unique") else: # Subscripting a regular Generic subclass. _check_generic(cls, params, len(cls.__parameters__)) @@ -675,13 +745,13 @@ def __class_getitem__(cls, params): def __init_subclass__(cls, *args, **kwargs): tvars = [] - if '__orig_bases__' in cls.__dict__: + if "__orig_bases__" in cls.__dict__: error = typing.Generic in cls.__orig_bases__ else: error = typing.Generic in cls.__bases__ if error: raise TypeError("Cannot inherit from plain Generic") - if '__orig_bases__' in cls.__dict__: + if "__orig_bases__" in cls.__dict__: tvars = typing._collect_type_vars(cls.__orig_bases__) # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. # If found, tvars must be a subset of it. @@ -690,14 +760,17 @@ def __init_subclass__(cls, *args, **kwargs): # and reject multiple Generic[...] and/or Protocol[...]. gvars = None for base in cls.__orig_bases__: - if (isinstance(base, typing._GenericAlias) and - base.__origin__ in (typing.Generic, Protocol)): + if isinstance(base, typing._GenericAlias) and base.__origin__ in ( + typing.Generic, + Protocol, + ): # for error messages the_base = base.__origin__.__name__ if gvars is not None: raise TypeError( "Cannot inherit from Generic[...]" - " and/or Protocol[...] multiple types.") + " and/or Protocol[...] multiple types." + ) gvars = base.__parameters__ if gvars is None: gvars = tvars @@ -705,50 +778,59 @@ def __init_subclass__(cls, *args, **kwargs): tvarset = set(tvars) gvarset = set(gvars) if not tvarset <= gvarset: - s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) - s_args = ', '.join(str(g) for g in gvars) - raise TypeError(f"Some type variables ({s_vars}) are" - f" not listed in {the_base}[{s_args}]") + s_vars = ", ".join(str(t) for t in tvars if t not in gvarset) + s_args = ", ".join(str(g) for g in gvars) + raise TypeError( + f"Some type variables ({s_vars}) are" + f" not listed in {the_base}[{s_args}]" + ) tvars = gvars cls.__parameters__ = tuple(tvars) # Determine if this is a protocol or a concrete subclass. - if not cls.__dict__.get('_is_protocol', None): + if not cls.__dict__.get("_is_protocol", None): cls._is_protocol = any(b is Protocol for b in cls.__bases__) # Set (or override) the protocol subclass hook. def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): + if not cls.__dict__.get("_is_protocol", None): return NotImplemented - if not getattr(cls, '_is_runtime_protocol', False): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + if not getattr(cls, "_is_runtime_protocol", False): + if sys._getframe(2).f_globals["__name__"] in ["abc", "functools"]: return NotImplemented - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") + raise TypeError( + "Instance and class checks can only be used with" + " @runtime protocols" + ) if not _is_callable_members_only(cls): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + if sys._getframe(2).f_globals["__name__"] in ["abc", "functools"]: return NotImplemented - raise TypeError("Protocols with non-method members" - " don't support issubclass()") + raise TypeError( + "Protocols with non-method members" + " don't support issubclass()" + ) if not isinstance(other, type): # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') + raise TypeError("issubclass() arg 1 must be a class") for attr in _get_protocol_attrs(cls): for base in other.__mro__: if attr in base.__dict__: if base.__dict__[attr] is None: return NotImplemented break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): + annotations = getattr(base, "__annotations__", {}) + if ( + isinstance(annotations, typing.Mapping) + and attr in annotations + and isinstance(other, _ProtocolMeta) + and other._is_protocol + ): break else: return NotImplemented return True - if '__subclasshook__' not in cls.__dict__: + + if "__subclasshook__" not in cls.__dict__: cls.__subclasshook__ = _proto_hook # We have nothing more to do for non-protocols. @@ -757,20 +839,27 @@ def _proto_hook(other): # Check consistency of bases. for base in cls.__bases__: - if not (base in (object, typing.Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, _ProtocolMeta) and base._is_protocol): - raise TypeError('Protocols can only inherit from other' - f' protocols, got {repr(base)}') + if not ( + base in (object, typing.Generic) + or base.__module__ == "collections.abc" + and base.__name__ in _PROTO_WHITELIST + or isinstance(base, _ProtocolMeta) + and base._is_protocol + ): + raise TypeError( + "Protocols can only inherit from other" + f" protocols, got {repr(base)}" + ) cls.__init__ = _no_init + + # 3.6 else: from typing import _next_in_mro, _type_check # noqa def _no_init(self, *args, **kwargs): if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') + raise TypeError("Protocols cannot be instantiated") class _ProtocolMeta(GenericMeta): """Internal metaclass for Protocol. @@ -778,8 +867,18 @@ class _ProtocolMeta(GenericMeta): This exists so Protocol classes can be generic without deriving from Generic. """ - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): + + def __new__( + cls, + name, + bases, + namespace, + tvars=None, + args=None, + origin=None, + extra=None, + orig_bases=None, + ): # This is just a version copied from GenericMeta.__new__ that # includes "Protocol" special treatment. (Comments removed for brevity.) assert extra is None # Protocols should not have extra @@ -792,12 +891,15 @@ def __new__(cls, name, bases, namespace, for base in bases: if base is typing.Generic: raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ in (typing.Generic, Protocol)): + if isinstance(base, GenericMeta) and base.__origin__ in ( + typing.Generic, + Protocol, + ): if gvars is not None: raise TypeError( "Cannot inherit from Generic[...] or" - " Protocol[...] multiple times.") + " Protocol[...] multiple times." + ) gvars = base.__parameters__ if gvars is None: gvars = tvars @@ -807,122 +909,166 @@ def __new__(cls, name, bases, namespace, if not tvarset <= gvarset: s_vars = ", ".join(str(t) for t in tvars if t not in gvarset) s_args = ", ".join(str(g) for g in gvars) - cls_name = "Generic" if any(b.__origin__ is typing.Generic - for b in bases) else "Protocol" - raise TypeError(f"Some type variables ({s_vars}) are" - f" not listed in {cls_name}[{s_args}]") + cls_name = ( + "Generic" + if any(b.__origin__ is typing.Generic for b in bases) + else "Protocol" + ) + raise TypeError( + f"Some type variables ({s_vars}) are" + f" not listed in {cls_name}[{s_args}]" + ) tvars = gvars initial_bases = bases - if (extra is not None and type(extra) is abc.ABCMeta and - extra not in bases): + if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b - for b in bases) - if any(isinstance(b, GenericMeta) and b is not typing.Generic for b in bases): + bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) + if any( + isinstance(b, GenericMeta) and b is not typing.Generic for b in bases + ): bases = tuple(b for b in bases if b is not typing.Generic) - namespace.update({'__origin__': origin, '__extra__': extra}) - self = super(GenericMeta, cls).__new__(cls, name, bases, namespace, - _root=True) - super(GenericMeta, self).__setattr__('_gorg', - self if not origin else - _gorg(origin)) + namespace.update({"__origin__": origin, "__extra__": extra}) + self = super(GenericMeta, cls).__new__( + cls, name, bases, namespace, _root=True + ) + super(GenericMeta, self).__setattr__( + "_gorg", self if not origin else _gorg(origin) + ) self.__parameters__ = tvars - self.__args__ = tuple(... if a is typing._TypingEllipsis else - () if a is typing._TypingEmpty else - a for a in args) if args else None + self.__args__ = ( + tuple( + ... + if a is typing._TypingEllipsis + else () + if a is typing._TypingEmpty + else a + for a in args + ) + if args + else None + ) self.__next_in_mro__ = _next_in_mro(self) if orig_bases is None: self.__orig_bases__ = initial_bases elif origin is not None: self._abc_registry = origin._abc_registry self._abc_cache = origin._abc_cache - if hasattr(self, '_subs_tree'): - self.__tree_hash__ = (hash(self._subs_tree()) if origin else - super(GenericMeta, self).__hash__()) + if hasattr(self, "_subs_tree"): + self.__tree_hash__ = ( + hash(self._subs_tree()) + if origin + else super(GenericMeta, self).__hash__() + ) return self def __init__(cls, *args, **kwargs): super().__init__(*args, **kwargs) - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any(b is Protocol or - isinstance(b, _ProtocolMeta) and - b.__origin__ is Protocol - for b in cls.__bases__) + if not cls.__dict__.get("_is_protocol", None): + cls._is_protocol = any( + b is Protocol + or isinstance(b, _ProtocolMeta) + and b.__origin__ is Protocol + for b in cls.__bases__ + ) if cls._is_protocol: for base in cls.__mro__[1:]: - if not (base in (object, typing.Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, typing.TypingMeta) and base._is_protocol or - isinstance(base, GenericMeta) and - base.__origin__ is typing.Generic): - raise TypeError(f'Protocols can only inherit from other' - f' protocols, got {repr(base)}') + if not ( + base in (object, typing.Generic) + or base.__module__ == "collections.abc" + and base.__name__ in _PROTO_WHITELIST + or isinstance(base, typing.TypingMeta) + and base._is_protocol + or isinstance(base, GenericMeta) + and base.__origin__ is typing.Generic + ): + raise TypeError( + f"Protocols can only inherit from other" + f" protocols, got {repr(base)}" + ) cls.__init__ = _no_init def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): + if not cls.__dict__.get("_is_protocol", None): return NotImplemented if not isinstance(other, type): # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') + raise TypeError("issubclass() arg 1 must be a class") for attr in _get_protocol_attrs(cls): for base in other.__mro__: if attr in base.__dict__: if base.__dict__[attr] is None: return NotImplemented break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): + annotations = getattr(base, "__annotations__", {}) + if ( + isinstance(annotations, typing.Mapping) + and attr in annotations + and isinstance(other, _ProtocolMeta) + and other._is_protocol + ): break else: return NotImplemented return True - if '__subclasshook__' not in cls.__dict__: + + if "__subclasshook__" not in cls.__dict__: cls.__subclasshook__ = _proto_hook def __instancecheck__(self, instance): # We need this method for situations where attributes are # assigned in __init__. - if ((not getattr(self, '_is_protocol', False) or - _is_callable_members_only(self)) and - issubclass(instance.__class__, self)): + if ( + not getattr(self, "_is_protocol", False) + or _is_callable_members_only(self) + ) and issubclass(instance.__class__, self): return True if self._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(self, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(self)): + if all( + hasattr(instance, attr) + and ( + not callable(getattr(self, attr, None)) + or getattr(instance, attr) is not None + ) + for attr in _get_protocol_attrs(self) + ): return True return super(GenericMeta, self).__instancecheck__(instance) def __subclasscheck__(self, cls): if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") + if sys._getframe(1).f_globals["__name__"] not in ["abc", "functools"]: + raise TypeError( + "Parameterized generics cannot be used with class " + "or instance checks" + ) return False - if (self.__dict__.get('_is_protocol', None) and - not self.__dict__.get('_is_runtime_protocol', None)): - if sys._getframe(1).f_globals['__name__'] in ['abc', - 'functools', - 'typing']: + if self.__dict__.get("_is_protocol", None) and not self.__dict__.get( + "_is_runtime_protocol", None + ): + if sys._getframe(1).f_globals["__name__"] in [ + "abc", + "functools", + "typing", + ]: return False - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if (self.__dict__.get('_is_runtime_protocol', None) and - not _is_callable_members_only(self)): - if sys._getframe(1).f_globals['__name__'] in ['abc', - 'functools', - 'typing']: + raise TypeError( + "Instance and class checks can only be used with" + " @runtime protocols" + ) + if self.__dict__.get( + "_is_runtime_protocol", None + ) and not _is_callable_members_only(self): + if sys._getframe(1).f_globals["__name__"] in [ + "abc", + "functools", + "typing", + ]: return super(GenericMeta, self).__subclasscheck__(cls) - raise TypeError("Protocols with non-method members" - " don't support issubclass()") + raise TypeError( + "Protocols with non-method members" " don't support issubclass()" + ) return super(GenericMeta, self).__subclasscheck__(cls) @typing._tp_cache @@ -933,16 +1079,19 @@ def __getitem__(self, params): params = (params,) if not params and _gorg(self) is not typing.Tuple: raise TypeError( - f"Parameter list to {self.__qualname__}[...] cannot be empty") + f"Parameter list to {self.__qualname__}[...] cannot be empty" + ) msg = "Parameters to generic types must be types." params = tuple(_type_check(p, msg) for p in params) if self in (typing.Generic, Protocol): if not all(isinstance(p, typing.TypeVar) for p in params): raise TypeError( - f"Parameters to {repr(self)}[...] must all be type variables") + f"Parameters to {repr(self)}[...] must all be type variables" + ) if len(set(params)) != len(params): raise TypeError( - f"Parameters to {repr(self)}[...] must all be unique") + f"Parameters to {repr(self)}[...] must all be unique" + ) tvars = params args = params elif self in (typing.Tuple, typing.Callable): @@ -956,14 +1105,16 @@ def __getitem__(self, params): args = params prepend = (self,) if self.__origin__ is None else () - return self.__class__(self.__name__, - prepend + self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) + return self.__class__( + self.__name__, + prepend + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__, + orig_bases=self.__orig_bases__, + ) class Protocol(metaclass=_ProtocolMeta): """Base class for protocol classes. Protocol classes are defined as:: @@ -994,21 +1145,25 @@ class GenProto(Protocol[T]): def meth(self) -> T: ... """ + __slots__ = () _is_protocol = True def __new__(cls, *args, **kwds): if _gorg(cls) is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can be used only as a base class") + raise TypeError( + "Type Protocol cannot be instantiated; " + "it can be used only as a base class" + ) return typing._generic_new(cls.__next_in_mro__, cls, *args, **kwds) # 3.8+ -if hasattr(typing, 'runtime_checkable'): +if hasattr(typing, "runtime_checkable"): runtime_checkable = typing.runtime_checkable # 3.6-3.7 else: + def runtime_checkable(cls): """Mark a protocol class as a runtime protocol, so that it can be used with isinstance() and issubclass(). Raise TypeError @@ -1018,8 +1173,10 @@ def runtime_checkable(cls): one-offs in collections.abc such as Hashable. """ if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: - raise TypeError('@runtime_checkable can be only applied to protocol classes,' - f' got {cls!r}') + raise TypeError( + "@runtime_checkable can be only applied to protocol classes," + f" got {cls!r}" + ) cls._is_runtime_protocol = True return cls @@ -1029,10 +1186,11 @@ def runtime_checkable(cls): # 3.8+ -if hasattr(typing, 'SupportsIndex'): +if hasattr(typing, "SupportsIndex"): SupportsIndex = typing.SupportsIndex # 3.6-3.7 else: + @runtime_checkable class SupportsIndex(Protocol): __slots__ = () @@ -1053,71 +1211,91 @@ def __index__(self) -> int: _TypedDictMeta = typing._TypedDictMeta is_typeddict = typing.is_typeddict else: + def _check_fails(cls, other): try: - if sys._getframe(1).f_globals['__name__'] not in ['abc', - 'functools', - 'typing']: + if sys._getframe(1).f_globals["__name__"] not in [ + "abc", + "functools", + "typing", + ]: # Typed dicts are only for static structural subtyping. - raise TypeError('TypedDict does not support instance and class checks') + raise TypeError("TypedDict does not support instance and class checks") except (AttributeError, ValueError): pass return False def _dict_new(*args, **kwargs): if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') + raise TypeError("TypedDict.__new__(): not enough arguments") _, args = args[0], args[1:] # allow the "cls" keyword be passed return dict(*args, **kwargs) - _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' + _dict_new.__text_signature__ = "($cls, _typename, _fields=None, /, **kwargs)" def _typeddict_new(*args, total=True, **kwargs): if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') + raise TypeError("TypedDict.__new__(): not enough arguments") _, args = args[0], args[1:] # allow the "cls" keyword be passed if args: - typename, args = args[0], args[1:] # allow the "_typename" keyword be passed - elif '_typename' in kwargs: - typename = kwargs.pop('_typename') + typename, args = ( + args[0], + args[1:], + ) # allow the "_typename" keyword be passed + elif "_typename" in kwargs: + typename = kwargs.pop("_typename") import warnings - warnings.warn("Passing '_typename' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) + + warnings.warn( + "Passing '_typename' as keyword argument is deprecated", + DeprecationWarning, + stacklevel=2, + ) else: - raise TypeError("TypedDict.__new__() missing 1 required positional " - "argument: '_typename'") + raise TypeError( + "TypedDict.__new__() missing 1 required positional " + "argument: '_typename'" + ) if args: try: - fields, = args # allow the "_fields" keyword be passed + (fields,) = args # allow the "_fields" keyword be passed except ValueError: - raise TypeError('TypedDict.__new__() takes from 2 to 3 ' - f'positional arguments but {len(args) + 2} ' - 'were given') - elif '_fields' in kwargs and len(kwargs) == 1: - fields = kwargs.pop('_fields') + raise TypeError( + "TypedDict.__new__() takes from 2 to 3 " + f"positional arguments but {len(args) + 2} " + "were given" + ) + elif "_fields" in kwargs and len(kwargs) == 1: + fields = kwargs.pop("_fields") import warnings - warnings.warn("Passing '_fields' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) + + warnings.warn( + "Passing '_fields' as keyword argument is deprecated", + DeprecationWarning, + stacklevel=2, + ) else: fields = None if fields is None: fields = kwargs elif kwargs: - raise TypeError("TypedDict takes either a dict or keyword arguments," - " but not both") + raise TypeError( + "TypedDict takes either a dict or keyword arguments," " but not both" + ) - ns = {'__annotations__': dict(fields)} + ns = {"__annotations__": dict(fields)} try: # Setting correct module is necessary to make typed dict classes pickleable. - ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') + ns["__module__"] = sys._getframe(1).f_globals.get("__name__", "__main__") except (AttributeError, ValueError): pass return _TypedDictMeta(typename, (), ns, total=total) - _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' - ' /, *, total=True, **kwargs)') + _typeddict_new.__text_signature__ = ( + "($cls, _typename, _fields=None," " /, *, total=True, **kwargs)" + ) class _TypedDictMeta(type): def __init__(cls, name, bases, ns, total=True): @@ -1130,11 +1308,11 @@ def __new__(cls, name, bases, ns, total=True): # TypedDict supports all three syntaxes described in its docstring. # Subclasses and instances of TypedDict return actual dictionaries # via _dict_new. - ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new + ns["__new__"] = _typeddict_new if name == "TypedDict" else _dict_new tp_dict = super().__new__(cls, name, (dict,), ns) annotations = {} - own_annotations = ns.get('__annotations__', {}) + own_annotations = ns.get("__annotations__", {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" own_annotations = { n: typing._type_check(tp, msg) for n, tp in own_annotations.items() @@ -1143,9 +1321,9 @@ def __new__(cls, name, bases, ns, total=True): optional_keys = set() for base in bases: - annotations.update(base.__dict__.get('__annotations__', {})) - required_keys.update(base.__dict__.get('__required_keys__', ())) - optional_keys.update(base.__dict__.get('__optional_keys__', ())) + annotations.update(base.__dict__.get("__annotations__", {})) + required_keys.update(base.__dict__.get("__required_keys__", ())) + optional_keys.update(base.__dict__.get("__optional_keys__", ())) annotations.update(own_annotations) if PEP_560: @@ -1175,16 +1353,15 @@ def __new__(cls, name, bases, ns, total=True): tp_dict.__annotations__ = annotations tp_dict.__required_keys__ = frozenset(required_keys) tp_dict.__optional_keys__ = frozenset(optional_keys) - if not hasattr(tp_dict, '__total__'): + if not hasattr(tp_dict, "__total__"): tp_dict.__total__ = total return tp_dict __instancecheck__ = __subclasscheck__ = _check_fails - TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) + TypedDict = _TypedDictMeta("TypedDict", (dict,), {}) TypedDict.__module__ = __name__ - TypedDict.__doc__ = \ - """A simple typed name space. At runtime it is equivalent to a plain dict. + TypedDict.__doc__ = """A simple typed name space. At runtime it is equivalent to a plain dict. TypedDict creates a dictionary type that expects all of its instances to have a certain set of keys, with each key @@ -1231,6 +1408,7 @@ class Film(TypedDict): """ return isinstance(tp, tuple(_TYPEDDICT_TYPES)) + if hasattr(typing, "Required"): get_type_hints = typing.get_type_hints elif PEP_560: @@ -1306,13 +1484,14 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False): # Python 3.9+ has PEP 593 (Annotated) -if hasattr(typing, 'Annotated'): +if hasattr(typing, "Annotated"): Annotated = typing.Annotated # Not exported and not a public API, but needed for get_origin() and get_args() # to work. _AnnotatedAlias = typing._AnnotatedAlias # 3.7-3.8 elif PEP_560: + class _AnnotatedAlias(typing._GenericAlias, _root=True): """Runtime representation of an annotated type. @@ -1321,6 +1500,7 @@ class _AnnotatedAlias(typing._GenericAlias, _root=True): instantiating is the same as instantiating the underlying type, binding it to types is also the same. """ + def __init__(self, origin, metadata): if isinstance(origin, _AnnotatedAlias): metadata = origin.__metadata__ + metadata @@ -1334,13 +1514,13 @@ def copy_with(self, params): return _AnnotatedAlias(new_type, self.__metadata__) def __repr__(self): - return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " - f"{', '.join(repr(a) for a in self.__metadata__)}]") + return ( + f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " + f"{', '.join(repr(a) for a in self.__metadata__)}]" + ) def __reduce__(self): - return operator.getitem, ( - Annotated, (self.__origin__,) + self.__metadata__ - ) + return operator.getitem, (Annotated, (self.__origin__,) + self.__metadata__) def __eq__(self, other): if not isinstance(other, _AnnotatedAlias): @@ -1393,9 +1573,11 @@ def __new__(cls, *args, **kwargs): @typing._tp_cache def __class_getitem__(cls, params): if not isinstance(params, tuple) or len(params) < 2: - raise TypeError("Annotated[...] should be used " - "with at least two arguments (a type and an " - "annotation).") + raise TypeError( + "Annotated[...] should be used " + "with at least two arguments (a type and an " + "annotation)." + ) allowed_special_forms = (ClassVar, Final) if get_origin(params[0]) in allowed_special_forms: origin = params[0] @@ -1406,15 +1588,15 @@ def __class_getitem__(cls, params): return _AnnotatedAlias(origin, metadata) def __init_subclass__(cls, *args, **kwargs): - raise TypeError( - f"Cannot subclass {cls.__module__}.Annotated" - ) + raise TypeError(f"Cannot subclass {cls.__module__}.Annotated") + + # 3.6 else: def _is_dunder(name): """Returns True if name is a __dunder_variable_name__.""" - return len(name) > 4 and name.startswith('__') and name.endswith('__') + return len(name) > 4 and name.startswith("__") and name.endswith("__") # Prior to Python 3.7 types did not have `copy_with`. A lot of the equality # checks, argument expansion etc. are done on the _subs_tre. As a result we @@ -1439,7 +1621,7 @@ def _tree_repr(self, tree): else: tp_repr = origin[0]._tree_repr(origin) metadata_reprs = ", ".join(repr(arg) for arg in metadata) - return f'{cls}[{tp_repr}, {metadata_reprs}]' + return f"{cls}[{tp_repr}, {metadata_reprs}]" def _subs_tree(self, tvars=None, args=None): # noqa if self is Annotated: @@ -1455,8 +1637,10 @@ def _subs_tree(self, tvars=None, args=None): # noqa def _get_cons(self): """Return the class used to create instance of this type.""" if self.__origin__ is None: - raise TypeError("Cannot get the underlying type of a " - "non-specialized Annotated type.") + raise TypeError( + "Cannot get the underlying type of a " + "non-specialized Annotated type." + ) tree = self._subs_tree() while isinstance(tree, tuple) and tree[0] is Annotated: tree = tree[1] @@ -1472,13 +1656,15 @@ def __getitem__(self, params): if self.__origin__ is not None: # specializing an instantiated type return super().__getitem__(params) elif not isinstance(params, tuple) or len(params) < 2: - raise TypeError("Annotated[...] should be instantiated " - "with at least two arguments (a type and an " - "annotation).") + raise TypeError( + "Annotated[...] should be instantiated " + "with at least two arguments (a type and an " + "annotation)." + ) else: if ( - isinstance(params[0], typing._TypingBase) and - type(params[0]).__name__ == "_ClassVar" + isinstance(params[0], typing._TypingBase) + and type(params[0]).__name__ == "_ClassVar" ): tp = params[0] else: @@ -1511,7 +1697,7 @@ def __getattr__(self, attr): raise AttributeError(attr) def __setattr__(self, attr, value): - if _is_dunder(attr) or attr.startswith('_abc_'): + if _is_dunder(attr) or attr.startswith("_abc_"): super().__setattr__(attr, value) elif self.__origin__ is None: raise AttributeError(attr) @@ -1556,6 +1742,7 @@ class Annotated(metaclass=AnnotatedMeta): OptimizedList[int] == Annotated[List[int], runtime.Optimize()] """ + # Python 3.8 has get_origin() and get_args() but those implementations aren't # Annotated-aware, so we can't use those. Python 3.9's versions don't support # ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. @@ -1592,8 +1779,16 @@ def get_origin(tp): """ if isinstance(tp, _AnnotatedAlias): return Annotated - if isinstance(tp, (typing._GenericAlias, GenericAlias, _BaseGenericAlias, - ParamSpecArgs, ParamSpecKwargs)): + if isinstance( + tp, + ( + typing._GenericAlias, + GenericAlias, + _BaseGenericAlias, + ParamSpecArgs, + ParamSpecKwargs, + ), + ): return tp.__origin__ if tp is typing.Generic: return typing.Generic @@ -1623,13 +1818,14 @@ def get_args(tp): # 3.10+ -if hasattr(typing, 'TypeAlias'): +if hasattr(typing, "TypeAlias"): TypeAlias = typing.TypeAlias # 3.9 elif sys.version_info[:2] >= (3, 9): + class _TypeAliasForm(typing._SpecialForm, _root=True): def __repr__(self): - return 'typing_extensions.' + self._name + return "typing_extensions." + self._name @_TypeAliasForm def TypeAlias(self, parameters): @@ -1644,14 +1840,18 @@ def TypeAlias(self, parameters): It's invalid when used anywhere except as in the example above. """ raise TypeError(f"{self} is not subscriptable") + + # 3.7-3.8 elif sys.version_info[:2] >= (3, 7): + class _TypeAliasForm(typing._SpecialForm, _root=True): def __repr__(self): - return 'typing_extensions.' + self._name + return "typing_extensions." + self._name - TypeAlias = _TypeAliasForm('TypeAlias', - doc="""Special marker indicating that an assignment should + TypeAlias = _TypeAliasForm( + "TypeAlias", + doc="""Special marker indicating that an assignment should be recognized as a proper type alias definition by type checkers. @@ -1660,14 +1860,16 @@ def __repr__(self): Predicate: TypeAlias = Callable[..., bool] It's invalid when used anywhere except as in the example - above.""") + above.""", + ) # 3.6 else: + class _TypeAliasMeta(typing.TypingMeta): """Metaclass for TypeAlias""" def __repr__(self): - return 'typing_extensions.TypeAlias' + return "typing_extensions.TypeAlias" class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True): """Special marker indicating that an assignment should @@ -1680,6 +1882,7 @@ class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=Tr It's invalid when used anywhere except as in the example above. """ + __slots__ = () def __instancecheck__(self, obj): @@ -1689,19 +1892,21 @@ def __subclasscheck__(self, cls): raise TypeError("TypeAlias cannot be used with issubclass().") def __repr__(self): - return 'typing_extensions.TypeAlias' + return "typing_extensions.TypeAlias" TypeAlias = _TypeAliasBase(_root=True) # Python 3.10+ has PEP 612 -if hasattr(typing, 'ParamSpecArgs'): +if hasattr(typing, "ParamSpecArgs"): ParamSpecArgs = typing.ParamSpecArgs ParamSpecKwargs = typing.ParamSpecKwargs # 3.6-3.9 else: + class _Immutable: """Mixin to indicate that object should not be copied.""" + __slots__ = () def __copy__(self): @@ -1722,6 +1927,7 @@ class ParamSpecArgs(_Immutable): This type is meant for runtime introspection and has no special meaning to static type checkers. """ + def __init__(self, origin): self.__origin__ = origin @@ -1745,6 +1951,7 @@ class ParamSpecKwargs(_Immutable): This type is meant for runtime introspection and has no special meaning to static type checkers. """ + def __init__(self, origin): self.__origin__ = origin @@ -1756,12 +1963,12 @@ def __eq__(self, other): return NotImplemented return self.__origin__ == other.__origin__ + # 3.10+ -if hasattr(typing, 'ParamSpec'): +if hasattr(typing, "ParamSpec"): ParamSpec = typing.ParamSpec # 3.6-3.9 else: - # Inherits from list as a workaround for Callable checks in Python < 3.9.2. class ParamSpec(list): """Parameter specification variable. @@ -1827,25 +2034,25 @@ def __init__(self, name, *, bound=None, covariant=False, contravariant=False): self.__covariant__ = bool(covariant) self.__contravariant__ = bool(contravariant) if bound: - self.__bound__ = typing._type_check(bound, 'Bound must be a type.') + self.__bound__ = typing._type_check(bound, "Bound must be a type.") else: self.__bound__ = None # for pickling: try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + def_mod = sys._getframe(1).f_globals.get("__name__", "__main__") except (AttributeError, ValueError): def_mod = None - if def_mod != 'typing_extensions': + if def_mod != "typing_extensions": self.__module__ = def_mod def __repr__(self): if self.__covariant__: - prefix = '+' + prefix = "+" elif self.__contravariant__: - prefix = '-' + prefix = "-" else: - prefix = '~' + prefix = "~" return prefix + self.__name__ def __hash__(self): @@ -1869,10 +2076,9 @@ def _get_type_vars(self, tvars): # 3.6-3.9 -if not hasattr(typing, 'Concatenate'): +if not hasattr(typing, "Concatenate"): # Inherits from list as a workaround for Callable checks in Python < 3.9.2. class _ConcatenateGenericAlias(list): - # Trick Generic into looking into this for __parameters__. if PEP_560: __class__ = typing._GenericAlias @@ -1891,8 +2097,10 @@ def __init__(self, origin, args): def __repr__(self): _type_repr = typing._type_repr - return (f'{_type_repr(self.__origin__)}' - f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') + return ( + f"{_type_repr(self.__origin__)}" + f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]' + ) def __hash__(self): return hash((self.__origin__, self.__args__)) @@ -1904,7 +2112,9 @@ def __call__(self, *args, **kwargs): @property def __parameters__(self): return tuple( - tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) + tp + for tp in self.__args__ + if isinstance(tp, (typing.TypeVar, ParamSpec)) ) if not PEP_560: @@ -1922,19 +2132,21 @@ def _concatenate_getitem(self, parameters): if not isinstance(parameters, tuple): parameters = (parameters,) if not isinstance(parameters[-1], ParamSpec): - raise TypeError("The last parameter to Concatenate should be a " - "ParamSpec variable.") + raise TypeError( + "The last parameter to Concatenate should be a " "ParamSpec variable." + ) msg = "Concatenate[arg, ...]: each arg must be a type." parameters = tuple(typing._type_check(p, msg) for p in parameters) return _ConcatenateGenericAlias(self, parameters) # 3.10+ -if hasattr(typing, 'Concatenate'): +if hasattr(typing, "Concatenate"): Concatenate = typing.Concatenate - _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa # 3.9 elif sys.version_info[:2] >= (3, 9): + @_TypeAliasForm def Concatenate(self, parameters): """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a @@ -1948,17 +2160,20 @@ def Concatenate(self, parameters): See PEP 612 for detailed information. """ return _concatenate_getitem(self, parameters) + + # 3.7-8 elif sys.version_info[:2] >= (3, 7): + class _ConcatenateForm(typing._SpecialForm, _root=True): def __repr__(self): - return 'typing_extensions.' + self._name + return "typing_extensions." + self._name def __getitem__(self, parameters): return _concatenate_getitem(self, parameters) Concatenate = _ConcatenateForm( - 'Concatenate', + "Concatenate", doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a higher order function which adds, removes or transforms parameters of a callable. @@ -1968,18 +2183,20 @@ def __getitem__(self, parameters): Callable[Concatenate[int, P], int] See PEP 612 for detailed information. - """) + """, + ) # 3.6 else: + class _ConcatenateAliasMeta(typing.TypingMeta): """Metaclass for Concatenate.""" def __repr__(self): - return 'typing_extensions.Concatenate' + return "typing_extensions.Concatenate" - class _ConcatenateAliasBase(typing._FinalTypingBase, - metaclass=_ConcatenateAliasMeta, - _root=True): + class _ConcatenateAliasBase( + typing._FinalTypingBase, metaclass=_ConcatenateAliasMeta, _root=True + ): """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a higher order function which adds, removes or transforms parameters of a callable. @@ -1990,6 +2207,7 @@ class _ConcatenateAliasBase(typing._FinalTypingBase, See PEP 612 for detailed information. """ + __slots__ = () def __instancecheck__(self, obj): @@ -1999,7 +2217,7 @@ def __subclasscheck__(self, cls): raise TypeError("Concatenate cannot be used with issubclass().") def __repr__(self): - return 'typing_extensions.Concatenate' + return "typing_extensions.Concatenate" def __getitem__(self, parameters): return _concatenate_getitem(self, parameters) @@ -2007,13 +2225,14 @@ def __getitem__(self, parameters): Concatenate = _ConcatenateAliasBase(_root=True) # 3.10+ -if hasattr(typing, 'TypeGuard'): +if hasattr(typing, "TypeGuard"): TypeGuard = typing.TypeGuard # 3.9 elif sys.version_info[:2] >= (3, 9): + class _TypeGuardForm(typing._SpecialForm, _root=True): def __repr__(self): - return 'typing_extensions.' + self._name + return "typing_extensions." + self._name @_TypeGuardForm def TypeGuard(self, parameters): @@ -2059,22 +2278,25 @@ def is_str(val: Union[str, float]): ``TypeGuard`` also works with type variables. For more information, see PEP 647 (User-Defined Type Guards). """ - item = typing._type_check(parameters, f'{self} accepts only single type.') + item = typing._type_check(parameters, f"{self} accepts only single type.") return typing._GenericAlias(self, (item,)) + + # 3.7-3.8 elif sys.version_info[:2] >= (3, 7): - class _TypeGuardForm(typing._SpecialForm, _root=True): + class _TypeGuardForm(typing._SpecialForm, _root=True): def __repr__(self): - return 'typing_extensions.' + self._name + return "typing_extensions." + self._name def __getitem__(self, parameters): - item = typing._type_check(parameters, - f'{self._name} accepts only a single type') + item = typing._type_check( + parameters, f"{self._name} accepts only a single type" + ) return typing._GenericAlias(self, (item,)) TypeGuard = _TypeGuardForm( - 'TypeGuard', + "TypeGuard", doc="""Special typing form used to annotate the return type of a user-defined type guard function. ``TypeGuard`` only accepts a single type argument. At runtime, functions marked this way should return a boolean. @@ -2116,9 +2338,11 @@ def is_str(val: Union[str, float]): ``TypeGuard`` also works with type variables. For more information, see PEP 647 (User-Defined Type Guards). - """) + """, + ) # 3.6 else: + class _TypeGuard(typing._FinalTypingBase, _root=True): """Special typing form used to annotate the return type of a user-defined type guard function. ``TypeGuard`` only accepts a single type argument. @@ -2163,7 +2387,7 @@ def is_str(val: Union[str, float]): PEP 647 (User-Defined Type Guards). """ - __slots__ = ('__type__',) + __slots__ = ("__type__",) def __init__(self, tp=None, **kwds): self.__type__ = tp @@ -2171,10 +2395,13 @@ def __init__(self, tp=None, **kwds): def __getitem__(self, item): cls = type(self) if self.__type__ is None: - return cls(typing._type_check(item, - f'{cls.__name__[1:]} accepts only a single type.'), - _root=True) - raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') + return cls( + typing._type_check( + item, f"{cls.__name__[1:]} accepts only a single type." + ), + _root=True, + ) + raise TypeError(f"{cls.__name__[1:]} cannot be further subscripted") def _eval_type(self, globalns, localns): new_tp = typing._eval_type(self.__type__, globalns, localns) @@ -2185,7 +2412,7 @@ def _eval_type(self, globalns, localns): def __repr__(self): r = super().__repr__() if self.__type__ is not None: - r += f'[{typing._type_repr(self.__type__)}]' + r += f"[{typing._type_repr(self.__type__)}]" return r def __hash__(self): @@ -2204,7 +2431,7 @@ def __eq__(self, other): if sys.version_info[:2] >= (3, 7): # Vendored from cpython typing._SpecialFrom class _SpecialForm(typing._Final, _root=True): - __slots__ = ('_name', '__doc__', '_getitem') + __slots__ = ("_name", "__doc__", "_getitem") def __init__(self, getitem): self._getitem = getitem @@ -2212,7 +2439,7 @@ def __init__(self, getitem): self.__doc__ = getitem.__doc__ def __getattr__(self, item): - if item in {'__name__', '__qualname__'}: + if item in {"__name__", "__qualname__"}: return self._name raise AttributeError(item) @@ -2221,7 +2448,7 @@ def __mro_entries__(self, bases): raise TypeError(f"Cannot subclass {self!r}") def __repr__(self): - return f'typing_extensions.{self._name}' + return f"typing_extensions.{self._name}" def __reduce__(self): return self._name @@ -2249,6 +2476,7 @@ def __getitem__(self, parameters): if hasattr(typing, "LiteralString"): LiteralString = typing.LiteralString elif sys.version_info[:2] >= (3, 7): + @_SpecialForm def LiteralString(self, params): """Represents an arbitrary literal string. @@ -2267,7 +2495,9 @@ def query(sql: LiteralString) -> ...: """ raise TypeError(f"{self} is not subscriptable") + else: + class _LiteralString(typing._FinalTypingBase, _root=True): """Represents an arbitrary literal string. @@ -2299,6 +2529,7 @@ def __subclasscheck__(self, cls): if hasattr(typing, "Self"): Self = typing.Self elif sys.version_info[:2] >= (3, 7): + @_SpecialForm def Self(self, params): """Used to spell the type of "self" in classes. @@ -2315,7 +2546,9 @@ def parse(self, data: bytes) -> Self: """ raise TypeError(f"{self} is not subscriptable") + else: + class _Self(typing._FinalTypingBase, _root=True): """Used to spell the type of "self" in classes. @@ -2344,6 +2577,7 @@ def __subclasscheck__(self, cls): if hasattr(typing, "Never"): Never = typing.Never elif sys.version_info[:2] >= (3, 7): + @_SpecialForm def Never(self, params): """The bottom type, a type that has no members. @@ -2369,7 +2603,9 @@ def int_or_str(arg: int | str) -> None: """ raise TypeError(f"{self} is not subscriptable") + else: + class _Never(typing._FinalTypingBase, _root=True): """The bottom type, a type that has no members. @@ -2404,13 +2640,14 @@ def __subclasscheck__(self, cls): Never = _Never(_root=True) -if hasattr(typing, 'Required'): +if hasattr(typing, "Required"): Required = typing.Required NotRequired = typing.NotRequired elif sys.version_info[:2] >= (3, 9): + class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): def __repr__(self): - return 'typing_extensions.' + self._name + return "typing_extensions." + self._name @_ExtensionsSpecialForm def Required(self, parameters): @@ -2429,7 +2666,7 @@ class Movie(TypedDict, total=False): There is no runtime checking that a required key is actually provided when instantiating a related TypedDict. """ - item = typing._type_check(parameters, f'{self._name} accepts only single type') + item = typing._type_check(parameters, f"{self._name} accepts only single type") return typing._GenericAlias(self, (item,)) @_ExtensionsSpecialForm @@ -2446,21 +2683,23 @@ class Movie(TypedDict): year=1999, ) """ - item = typing._type_check(parameters, f'{self._name} accepts only single type') + item = typing._type_check(parameters, f"{self._name} accepts only single type") return typing._GenericAlias(self, (item,)) elif sys.version_info[:2] >= (3, 7): + class _RequiredForm(typing._SpecialForm, _root=True): def __repr__(self): - return 'typing_extensions.' + self._name + return "typing_extensions." + self._name def __getitem__(self, parameters): - item = typing._type_check(parameters, - '{} accepts only single type'.format(self._name)) + item = typing._type_check( + parameters, "{} accepts only single type".format(self._name) + ) return typing._GenericAlias(self, (item,)) Required = _RequiredForm( - 'Required', + "Required", doc="""A special typing construct to mark a key of a total=False TypedDict as required. For example: @@ -2475,9 +2714,10 @@ class Movie(TypedDict, total=False): There is no runtime checking that a required key is actually provided when instantiating a related TypedDict. - """) + """, + ) NotRequired = _RequiredForm( - 'NotRequired', + "NotRequired", doc="""A special typing construct to mark a key of a TypedDict as potentially missing. For example: @@ -2489,11 +2729,12 @@ class Movie(TypedDict): title='The Matrix', # typechecker error if key is omitted year=1999, ) - """) + """, + ) else: # NOTE: Modeled after _Final's implementation when _FinalTypingBase available class _MaybeRequired(typing._FinalTypingBase, _root=True): - __slots__ = ('__type__',) + __slots__ = ("__type__",) def __init__(self, tp=None, **kwds): self.__type__ = tp @@ -2501,11 +2742,13 @@ def __init__(self, tp=None, **kwds): def __getitem__(self, item): cls = type(self) if self.__type__ is None: - return cls(typing._type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) + return cls( + typing._type_check( + item, "{} accepts only single type.".format(cls.__name__[1:]) + ), + _root=True, + ) + raise TypeError("{} cannot be further subscripted".format(cls.__name__[1:])) def _eval_type(self, globalns, localns): new_tp = typing._eval_type(self.__type__, globalns, localns) @@ -2516,7 +2759,7 @@ def _eval_type(self, globalns, localns): def __repr__(self): r = super().__repr__() if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) + r += "[{}]".format(typing._type_repr(self.__type__)) return r def __hash__(self): @@ -2565,9 +2808,10 @@ class Movie(TypedDict): if sys.version_info[:2] >= (3, 9): + class _UnpackSpecialForm(typing._SpecialForm, _root=True): def __repr__(self): - return 'typing_extensions.' + self._name + return "typing_extensions." + self._name class _UnpackAlias(typing._GenericAlias, _root=True): __class__ = typing.TypeVar @@ -2576,35 +2820,37 @@ class _UnpackAlias(typing._GenericAlias, _root=True): def Unpack(self, parameters): """A special typing construct to unpack a variadic type. For example: - Shape = TypeVarTuple('Shape') - Batch = NewType('Batch', int) + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) - def add_batch_axis( - x: Array[Unpack[Shape]] - ) -> Array[Batch, Unpack[Shape]]: ... + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... """ - item = typing._type_check(parameters, f'{self._name} accepts only single type') + item = typing._type_check(parameters, f"{self._name} accepts only single type") return _UnpackAlias(self, (item,)) def _is_unpack(obj): return isinstance(obj, _UnpackAlias) elif sys.version_info[:2] >= (3, 7): + class _UnpackAlias(typing._GenericAlias, _root=True): __class__ = typing.TypeVar class _UnpackForm(typing._SpecialForm, _root=True): def __repr__(self): - return 'typing_extensions.' + self._name + return "typing_extensions." + self._name def __getitem__(self, parameters): - item = typing._type_check(parameters, - f'{self._name} accepts only single type') + item = typing._type_check( + parameters, f"{self._name} accepts only single type" + ) return _UnpackAlias(self, (item,)) Unpack = _UnpackForm( - 'Unpack', + "Unpack", doc="""A special typing construct to unpack a variadic type. For example: Shape = TypeVarTuple('Shape') @@ -2614,7 +2860,8 @@ def add_batch_axis( x: Array[Unpack[Shape]] ) -> Array[Batch, Unpack[Shape]]: ... - """) + """, + ) def _is_unpack(obj): return isinstance(obj, _UnpackAlias) @@ -2624,15 +2871,16 @@ def _is_unpack(obj): class _Unpack(typing._FinalTypingBase, _root=True): """A special typing construct to unpack a variadic type. For example: - Shape = TypeVarTuple('Shape') - Batch = NewType('Batch', int) + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) - def add_batch_axis( - x: Array[Unpack[Shape]] - ) -> Array[Batch, Unpack[Shape]]: ... + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... """ - __slots__ = ('__type__',) + + __slots__ = ("__type__",) __class__ = typing.TypeVar def __init__(self, tp=None, **kwds): @@ -2641,10 +2889,11 @@ def __init__(self, tp=None, **kwds): def __getitem__(self, item): cls = type(self) if self.__type__ is None: - return cls(typing._type_check(item, - 'Unpack accepts only single type.'), - _root=True) - raise TypeError('Unpack cannot be further subscripted') + return cls( + typing._type_check(item, "Unpack accepts only single type."), + _root=True, + ) + raise TypeError("Unpack cannot be further subscripted") def _eval_type(self, globalns, localns): new_tp = typing._eval_type(self.__type__, globalns, localns) @@ -2655,7 +2904,7 @@ def _eval_type(self, globalns, localns): def __repr__(self): r = super().__repr__() if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) + r += "[{}]".format(typing._type_repr(self.__type__)) return r def __hash__(self): @@ -2733,10 +2982,10 @@ def __init__(self, name): # for pickling: try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + def_mod = sys._getframe(1).f_globals.get("__name__", "__main__") except (AttributeError, ValueError): def_mod = None - if def_mod != 'typing_extensions': + if def_mod != "typing_extensions": self.__module__ = def_mod self.__unpacked__ = Unpack[self] @@ -2754,7 +3003,7 @@ def __reduce__(self): return self.__name__ def __init_subclass__(self, *args, **kwds): - if '_root' not in kwds: + if "_root" not in kwds: raise TypeError("Cannot subclass special typing classes") if not PEP_560: @@ -2767,6 +3016,7 @@ def _get_type_vars(self, tvars): if hasattr(typing, "reveal_type"): reveal_type = typing.reveal_type else: + def reveal_type(__obj: T) -> T: """Reveal the inferred type of a variable. @@ -2790,6 +3040,7 @@ def reveal_type(__obj: T) -> T: if hasattr(typing, "assert_never"): assert_never = typing.assert_never else: + def assert_never(__arg: Never) -> Never: """Assert to the type checker that a line of code is unreachable. @@ -2813,17 +3064,17 @@ def int_or_str(arg: int | str) -> None: raise AssertionError("Expected code to be unreachable") -if hasattr(typing, 'dataclass_transform'): +if hasattr(typing, "dataclass_transform"): dataclass_transform = typing.dataclass_transform else: + def dataclass_transform( *, eq_default: bool = True, order_default: bool = False, kw_only_default: bool = False, field_descriptors: typing.Tuple[ - typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], - ... + typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], ... ] = (), ) -> typing.Callable[[T], T]: """Decorator that marks a function, class, or metaclass as providing @@ -2885,6 +3136,7 @@ class CustomerModel(ModelBase): See PEP 681 for details. """ + def decorator(cls_or_fn): cls_or_fn.__dataclass_transform__ = { "eq_default": eq_default, @@ -2893,6 +3145,7 @@ def decorator(cls_or_fn): "field_descriptors": field_descriptors, } return cls_or_fn + return decorator diff --git a/metaflow/_vendor/v3_6/zipp.py b/metaflow/_vendor/v3_6/zipp.py index 26b723c1fd..72632b0b77 100644 --- a/metaflow/_vendor/v3_6/zipp.py +++ b/metaflow/_vendor/v3_6/zipp.py @@ -12,7 +12,7 @@ OrderedDict = dict -__all__ = ['Path'] +__all__ = ["Path"] def _parents(path): @@ -93,7 +93,7 @@ def resolve_dir(self, name): as a directory (with the trailing slash). """ names = self._name_set() - dirname = name + '/' + dirname = name + "/" dir_match = name not in names and dirname in names return dirname if dir_match else name @@ -110,7 +110,7 @@ def make(cls, source): return cls(_pathlib_compat(source)) # Only allow for FastLookup when supplied zipfile is read-only - if 'r' not in source.mode: + if "r" not in source.mode: cls = CompleteDirs source.__class__ = cls @@ -240,7 +240,7 @@ def __init__(self, root, at=""): self.root = FastLookup.make(root) self.at = at - def open(self, mode='r', *args, pwd=None, **kwargs): + def open(self, mode="r", *args, pwd=None, **kwargs): """ Open this entry as text or binary following the semantics of ``pathlib.Path.open()`` by passing arguments through @@ -249,10 +249,10 @@ def open(self, mode='r', *args, pwd=None, **kwargs): if self.is_dir(): raise IsADirectoryError(self) zip_mode = mode[0] - if not self.exists() and zip_mode == 'r': + if not self.exists() and zip_mode == "r": raise FileNotFoundError(self) stream = self.root.open(self.at, zip_mode, pwd=pwd) - if 'b' in mode: + if "b" in mode: if args or kwargs: raise ValueError("encoding args invalid for binary operation") return stream @@ -279,11 +279,11 @@ def filename(self): return pathlib.Path(self.root.filename).joinpath(self.at) def read_text(self, *args, **kwargs): - with self.open('r', *args, **kwargs) as strm: + with self.open("r", *args, **kwargs) as strm: return strm.read() def read_bytes(self): - with self.open('rb') as strm: + with self.open("rb") as strm: return strm.read() def _is_child(self, path): @@ -323,7 +323,7 @@ def joinpath(self, *other): def parent(self): if not self.at: return self.filename.parent - parent_at = posixpath.dirname(self.at.rstrip('/')) + parent_at = posixpath.dirname(self.at.rstrip("/")) if parent_at: - parent_at += '/' + parent_at += "/" return self._next(parent_at) diff --git a/metaflow/cli.py b/metaflow/cli.py index ad582043a6..76bea63f1b 100644 --- a/metaflow/cli.py +++ b/metaflow/cli.py @@ -1,8 +1,8 @@ +from datetime import datetime +from functools import wraps import inspect import sys import traceback -from datetime import datetime -from functools import wraps from metaflow._vendor import click @@ -234,7 +234,6 @@ def output_dot(obj): ) @click.pass_obj def dump(obj, input_path, private=None, max_value_size=None, include=None, file=None): - output = {} kwargs = { "show_private": private, @@ -737,7 +736,6 @@ def resume( decospecs=None, run_id_file=None, ): - before_run(obj, tags, decospecs + obj.environment.decospecs()) if origin_run_id is None: @@ -822,7 +820,6 @@ def run( user_namespace=None, **kwargs ): - if user_namespace is not None: namespace(user_namespace or None) before_run(obj, tags, decospecs + obj.environment.decospecs()) @@ -982,104 +979,100 @@ def start( else: echo = echo_always - ctx.obj.version = metaflow_version.get_version() - version = ctx.obj.version + obj = ctx.obj + obj.version = metaflow_version.get_version() + flow = obj.flow + version = obj.version if use_r(): version = metaflow_r_version() echo("Metaflow %s" % version, fg="magenta", bold=True, nl=False) - echo(" executing *%s*" % ctx.obj.flow.name, fg="magenta", nl=False) + echo(" executing *%s*" % flow.name, fg="magenta", nl=False) echo(" for *%s*" % resolve_identity(), fg="magenta") cli_args._set_top_kwargs(ctx.params) - ctx.obj.echo = echo - ctx.obj.echo_always = echo_always - ctx.obj.is_quiet = quiet - ctx.obj.graph = FlowGraph(ctx.obj.flow.__class__) - ctx.obj.logger = logger - ctx.obj.check = _check - ctx.obj.pylint = pylint - ctx.obj.top_cli = cli - ctx.obj.package_suffixes = package_suffixes.split(",") - ctx.obj.reconstruct_cli = _reconstruct_cli - - ctx.obj.environment = [ + obj.echo = echo + obj.echo_always = echo_always + obj.is_quiet = quiet + obj.graph = FlowGraph(obj.flow.__class__) + obj.logger = logger + obj.check = _check + obj.pylint = pylint + obj.top_cli = cli + obj.package_suffixes = package_suffixes.split(",") + obj.reconstruct_cli = _reconstruct_cli + + obj.environment = [ e for e in ENVIRONMENTS + [MetaflowEnvironment] if e.TYPE == environment - ][0](ctx.obj.flow) - ctx.obj.environment.validate_environment(echo, datastore) + ][0](obj.flow) + obj.environment.validate_environment(echo, datastore) - ctx.obj.event_logger = LOGGING_SIDECARS[event_logger]( - flow=ctx.obj.flow, env=ctx.obj.environment + obj.event_logger = LOGGING_SIDECARS[event_logger]( + flow=obj.flow, env=obj.environment ) - ctx.obj.event_logger.start() + obj.event_logger.start() - ctx.obj.monitor = MONITOR_SIDECARS[monitor]( - flow=ctx.obj.flow, env=ctx.obj.environment - ) - ctx.obj.monitor.start() + obj.monitor = MONITOR_SIDECARS[monitor](flow=obj.flow, env=obj.environment) + obj.monitor.start() - ctx.obj.metadata = [m for m in METADATA_PROVIDERS if m.TYPE == metadata][0]( - ctx.obj.environment, ctx.obj.flow, ctx.obj.event_logger, ctx.obj.monitor + obj.metadata = [m for m in METADATA_PROVIDERS if m.TYPE == metadata][0]( + obj.environment, flow, obj.event_logger, obj.monitor ) - ctx.obj.datastore_impl = [d for d in DATASTORES if d.TYPE == datastore][0] + obj.datastore_impl = [d for d in DATASTORES if d.TYPE == datastore][0] if datastore_root is None: - datastore_root = ctx.obj.datastore_impl.get_datastore_root_from_config( - ctx.obj.echo - ) + datastore_root = obj.datastore_impl.get_datastore_root_from_config(obj.echo) if datastore_root is None: raise CommandException( "Could not find the location of the datastore -- did you correctly set the " "METAFLOW_DATASTORE_SYSROOT_%s environment variable?" % datastore.upper() ) - ctx.obj.datastore_impl.datastore_root = datastore_root + obj.datastore_impl.datastore_root = datastore_root - FlowDataStore.default_storage_impl = ctx.obj.datastore_impl - ctx.obj.flow_datastore = FlowDataStore( - ctx.obj.flow.name, - ctx.obj.environment, - ctx.obj.metadata, - ctx.obj.event_logger, - ctx.obj.monitor, + FlowDataStore.default_storage_impl = obj.datastore_impl + obj.flow_datastore = FlowDataStore( + flow.name, + obj.environment, + obj.metadata, + obj.event_logger, + obj.monitor, ) # It is important to initialize flow decorators early as some of the # things they provide may be used by some of the objects initialized after. decorators._init_flow_decorators( - ctx.obj.flow, - ctx.obj.graph, - ctx.obj.environment, - ctx.obj.flow_datastore, - ctx.obj.metadata, - ctx.obj.logger, + flow, + obj.graph, + obj.environment, + obj.flow_datastore, + obj.metadata, + obj.logger, echo, deco_options, ) if decospecs: - decorators._attach_decorators(ctx.obj.flow, decospecs) + decorators._attach_decorators(flow, decospecs) # initialize current and parameter context for deploy-time parameters - current._set_env(flow=ctx.obj.flow, is_running=False) - parameters.set_parameter_context( - ctx.obj.flow.name, ctx.obj.echo, ctx.obj.flow_datastore - ) + current._set_env(flow=flow, is_running=False) + parameters.set_parameter_context(flow.name, ctx.obj.echo, obj.flow_datastore) if ctx.invoked_subcommand not in ("run", "resume"): # run/resume are special cases because they can add more decorators with --with, # so they have to take care of themselves. - decorators._attach_decorators(ctx.obj.flow, ctx.obj.environment.decospecs()) + decorators._attach_decorators(flow, obj.environment.decospecs()) decorators._init_step_decorators( - ctx.obj.flow, - ctx.obj.graph, - ctx.obj.environment, - ctx.obj.flow_datastore, - ctx.obj.logger, + flow, + obj.graph, + obj.environment, + obj.flow_datastore, + obj.logger, ) # TODO (savin): Enable lazy instantiation of package - ctx.obj.package = None + obj.package = None if ctx.invoked_subcommand is None: ctx.invoke(check) diff --git a/metaflow/cli_args.py b/metaflow/cli_args.py index 40918f984f..ff3b77da5d 100644 --- a/metaflow/cli_args.py +++ b/metaflow/cli_args.py @@ -55,7 +55,6 @@ def step_command( @staticmethod def _options(mapping): for k, v in mapping.items(): - # None or False arguments are ignored # v needs to be explicitly False, not falsy, e.g. 0 is an acceptable value if v is None or v is False: diff --git a/metaflow/client/filecache.py b/metaflow/client/filecache.py index 8c7d945d58..65a0e8422b 100644 --- a/metaflow/client/filecache.py +++ b/metaflow/client/filecache.py @@ -83,7 +83,6 @@ def get_logs_stream( def get_log_legacy( self, ds_type, location, logtype, attempt, flow_name, run_id, step_name, task_id ): - ds_cls = self._get_datastore_storage_impl(ds_type) ds_root = ds_cls.path_join(*ds_cls.path_split(location)[:-5]) cache_id = self._flow_ds_id(ds_type, ds_root, flow_name) diff --git a/metaflow/cmd/configure_cmd.py b/metaflow/cmd/configure_cmd.py index c5457f2911..1b9d0fd645 100644 --- a/metaflow/cmd/configure_cmd.py +++ b/metaflow/cmd/configure_cmd.py @@ -723,7 +723,6 @@ def verify_gcp_credentials(ctx): ) @click.pass_context def azure(ctx, profile): - # Greet the user! echo( "Welcome to Metaflow! Follow the prompts to configure your installation.\n", @@ -765,7 +764,6 @@ def azure(ctx, profile): ) @click.pass_context def gcp(ctx, profile): - # Greet the user! echo( "Welcome to Metaflow! Follow the prompts to configure your installation.\n", @@ -807,7 +805,6 @@ def gcp(ctx, profile): ) @click.pass_context def aws(ctx, profile): - # Greet the user! echo( "Welcome to Metaflow! Follow the prompts to configure your " "installation.\n", @@ -855,7 +852,6 @@ def aws(ctx, profile): ) @click.pass_context def kubernetes(ctx, profile): - check_kubernetes_client(ctx) # Greet the user! diff --git a/metaflow/datastore/content_addressed_store.py b/metaflow/datastore/content_addressed_store.py index e88ca1275d..e0533565ff 100644 --- a/metaflow/datastore/content_addressed_store.py +++ b/metaflow/datastore/content_addressed_store.py @@ -133,7 +133,7 @@ def load_blobs(self, keys, force_raw=False): load_paths.append((key, path)) with self._storage_impl.load_bytes([p for _, p in load_paths]) as loaded: - for (path_key, file_path, meta) in loaded: + for path_key, file_path, meta in loaded: key = self._storage_impl.path_split(path_key)[-1] # At this point, we either return the object as is (if raw) or # decode it according to the encoding version diff --git a/metaflow/datastore/datastore_set.py b/metaflow/datastore/datastore_set.py index 1b977079ac..924ef6202e 100644 --- a/metaflow/datastore/datastore_set.py +++ b/metaflow/datastore/datastore_set.py @@ -22,7 +22,6 @@ def __init__( prefetch_data_artifacts=None, allow_not_done=False, ): - task_datastores = flow_datastore.get_latest_task_datastores( run_id, steps=steps, pathspecs=pathspecs, allow_not_done=allow_not_done ) diff --git a/metaflow/datastore/flow_datastore.py b/metaflow/datastore/flow_datastore.py index f2707c1016..bdec1adf62 100644 --- a/metaflow/datastore/flow_datastore.py +++ b/metaflow/datastore/flow_datastore.py @@ -187,7 +187,6 @@ def get_task_datastore( mode="r", allow_not_done=False, ): - return TaskDataStore( self, run_id, diff --git a/metaflow/datastore/task_datastore.py b/metaflow/datastore/task_datastore.py index 3f003173a0..450e83c0aa 100644 --- a/metaflow/datastore/task_datastore.py +++ b/metaflow/datastore/task_datastore.py @@ -99,7 +99,6 @@ def __init__( mode="r", allow_not_done=False, ): - self._storage_impl = flow_datastore._storage_impl self.TYPE = self._storage_impl.TYPE self._ca_store = flow_datastore.ca_store @@ -361,7 +360,7 @@ def load_artifacts(self, names): # We assume that if we have one "old" style artifact, all of them are # like that which is an easy assumption to make since artifacts are all # stored by the same implementation of the datastore for a given task. - for (key, blob) in self._ca_store.load_blobs(to_load.keys()): + for key, blob in self._ca_store.load_blobs(to_load.keys()): names = to_load[key] for name in names: # We unpickle everytime to have fully distinct objects (the user diff --git a/metaflow/decorators.py b/metaflow/decorators.py index ebdd77ebd7..3383e9eb3f 100644 --- a/metaflow/decorators.py +++ b/metaflow/decorators.py @@ -174,7 +174,6 @@ def __str__(self): class FlowDecorator(Decorator): - _flow_decorators = [] options = {} diff --git a/metaflow/flowspec.py b/metaflow/flowspec.py index 827d7a34c1..e213d6b7a4 100644 --- a/metaflow/flowspec.py +++ b/metaflow/flowspec.py @@ -17,6 +17,7 @@ from .graph import FlowGraph from .unbounded_foreach import UnboundedForeachInput + # For Python 3 compatibility try: basestring @@ -60,7 +61,7 @@ class FlowSpec(object): """ # Attributes that are not saved in the datastore when checkpointing. - # Name starting with '__', methods, functions and Parameters do not need + # Names starting with '__', methods, functions and Parameters do not need # to be listed. _EPHEMERAL = { "_EPHEMERAL", @@ -191,12 +192,13 @@ def _set_constants(self, graph, kwargs): } self._graph_info = graph_info - def _get_parameters(self): - for var in dir(self): - if var[0] == "_" or var in self._NON_PARAMETERS: + @classmethod + def _get_parameters(cls): + for var in dir(cls): + if var[0] == "_" or var in cls._NON_PARAMETERS: continue try: - val = getattr(self, var) + val = getattr(cls, var) except: continue if isinstance(val, Parameter): @@ -228,11 +230,11 @@ def __getattr__(self, name: str): else: raise AttributeError("Flow %s has no attribute '%s'" % (self.name, name)) - def cmd(self, cmdline, input={}, output=[]): + def cmd(self, cmdline, input=None, output=None): """ [Legacy function - do not use] """ - return cmd_with_io.cmd(cmdline, input=input, output=output) + return cmd_with_io.cmd(cmdline, input=input or {}, output=output or []) @property def index(self) -> Optional[int]: diff --git a/metaflow/graph.py b/metaflow/graph.py index 4ea41e0114..2343071ffd 100644 --- a/metaflow/graph.py +++ b/metaflow/graph.py @@ -72,7 +72,6 @@ def _expr_str(self, expr): return "%s.%s" % (expr.value.id, expr.attr) def _parse(self, func_ast): - self.num_args = len(func_ast.args.args) tail = func_ast.body[-1] @@ -80,8 +79,9 @@ def _parse(self, func_ast): if self.name == "end": # TYPE: end self.type = "end" + return - # ensure that the tail an expression + # ensure that the tail is an expression if not isinstance(tail, ast.Expr): return @@ -262,7 +262,6 @@ def node_specs(): ) def output_steps(self): - steps_info = {} graph_structure = [] diff --git a/metaflow/includefile.py b/metaflow/includefile.py index 66396c88c8..77f9e1008b 100644 --- a/metaflow/includefile.py +++ b/metaflow/includefile.py @@ -429,7 +429,6 @@ def _get_handler(url): class UploaderV2: - file_type = "uploader-v2" @classmethod diff --git a/metaflow/metaflow_version.py b/metaflow/metaflow_version.py index 9a36dc79ae..d995f7ec6b 100644 --- a/metaflow/metaflow_version.py +++ b/metaflow/metaflow_version.py @@ -60,7 +60,6 @@ def find_git_on_windows(): def call_git_describe(abbrev=7): """return the string output of git describe""" try: - # first, make sure we are actually in a Metaflow repo, # not some other repo with open(devnull, "w") as fnull: diff --git a/metaflow/multicore_utils.py b/metaflow/multicore_utils.py index 1ac81039c1..51566ef493 100644 --- a/metaflow/multicore_utils.py +++ b/metaflow/multicore_utils.py @@ -59,7 +59,6 @@ def _spawn(func, arg, dir): def parallel_imap_unordered(func, iterable, max_parallel=None, dir=None): - if max_parallel is None: max_parallel = cpu_count() @@ -67,7 +66,6 @@ def parallel_imap_unordered(func, iterable, max_parallel=None, dir=None): pids = [_spawn(func, arg, dir) for arg in islice(args_iter, max_parallel)] while pids: - pid, output_file = pids.pop() if os.waitpid(pid, 0)[1]: raise MulticoreException("Child failed") diff --git a/metaflow/parameters.py b/metaflow/parameters.py index a32a6c2e47..d120b2930c 100644 --- a/metaflow/parameters.py +++ b/metaflow/parameters.py @@ -77,7 +77,6 @@ def __init__( return_str=True, print_representation=None, ): - self.fun = fun self.field = field self.parameter_name = parameter_name diff --git a/metaflow/plugins/airflow/airflow.py b/metaflow/plugins/airflow/airflow.py index d806d93dbd..b2d52bd9fb 100644 --- a/metaflow/plugins/airflow/airflow.py +++ b/metaflow/plugins/airflow/airflow.py @@ -54,7 +54,6 @@ class Airflow(object): - TOKEN_STORAGE_ROOT = "mf.airflow" def __init__( diff --git a/metaflow/plugins/airflow/airflow_utils.py b/metaflow/plugins/airflow/airflow_utils.py index 39ce14b3ec..091837a040 100644 --- a/metaflow/plugins/airflow/airflow_utils.py +++ b/metaflow/plugins/airflow/airflow_utils.py @@ -228,7 +228,6 @@ def json_dump(val): class AirflowDAGArgs(object): - # `_arg_types` is a dictionary which represents the types of the arguments of an Airflow `DAG`. # `_arg_types` is used when parsing types back from the configuration json. # It doesn't cover all the arguments but covers many of the important one which can come from the cli. diff --git a/metaflow/plugins/argo/argo_client.py b/metaflow/plugins/argo/argo_client.py index 0f64ce0b02..84a7195ea0 100644 --- a/metaflow/plugins/argo/argo_client.py +++ b/metaflow/plugins/argo/argo_client.py @@ -12,7 +12,6 @@ class ArgoClientException(MetaflowException): class ArgoClient(object): def __init__(self, namespace=None): - self._kubernetes_client = KubernetesClient() self._namespace = namespace or "default" self._group = "argoproj.io" diff --git a/metaflow/plugins/aws/batch/batch_client.py b/metaflow/plugins/aws/batch/batch_client.py index b1b5bf4ee6..82100753bf 100644 --- a/metaflow/plugins/aws/batch/batch_client.py +++ b/metaflow/plugins/aws/batch/batch_client.py @@ -522,7 +522,6 @@ def __init__(self, ex): class RunningJob(object): - NUM_RETRIES = 8 def __init__(self, id, client): diff --git a/metaflow/plugins/aws/step_functions/step_functions.py b/metaflow/plugins/aws/step_functions/step_functions.py index 319afa4192..8b143d13f0 100644 --- a/metaflow/plugins/aws/step_functions/step_functions.py +++ b/metaflow/plugins/aws/step_functions/step_functions.py @@ -227,7 +227,6 @@ def get_existing_deployment(cls, name): return None def _compile(self): - # Visit every node of the flow and recursively build the state machine. def _visit(node, workflow, exit_node=None): if node.parallel_foreach: diff --git a/metaflow/plugins/aws/step_functions/step_functions_cli.py b/metaflow/plugins/aws/step_functions/step_functions_cli.py index 0ac26e1159..a63c2edf44 100644 --- a/metaflow/plugins/aws/step_functions/step_functions_cli.py +++ b/metaflow/plugins/aws/step_functions/step_functions_cli.py @@ -311,7 +311,6 @@ def make_flow( def resolve_token( name, token_prefix, obj, authorize, given_token, generate_new_token, is_project ): - # 1) retrieve the previous deployment, if one exists workflow = StepFunctions.get_existing_deployment(name) if workflow is None: diff --git a/metaflow/plugins/azure/azure_utils.py b/metaflow/plugins/azure/azure_utils.py index 0f3f465a17..8bf2505191 100644 --- a/metaflow/plugins/azure/azure_utils.py +++ b/metaflow/plugins/azure/azure_utils.py @@ -198,7 +198,6 @@ def __init__(self, token): self._credential = None def get_token(self, *_scopes, **_kwargs): - if (self._cached_token.expires_on - time.time()) < 300: from azure.identity import DefaultAzureCredential diff --git a/metaflow/plugins/cards/card_cli.py b/metaflow/plugins/cards/card_cli.py index 2c0c801774..fceb305481 100644 --- a/metaflow/plugins/cards/card_cli.py +++ b/metaflow/plugins/cards/card_cli.py @@ -655,7 +655,6 @@ def list( as_json=False, file=None, ): - card_id = id if pathspec is None: list_many_cards( diff --git a/metaflow/plugins/cards/card_decorator.py b/metaflow/plugins/cards/card_decorator.py index efa13a1ec9..7e56824b43 100644 --- a/metaflow/plugins/cards/card_decorator.py +++ b/metaflow/plugins/cards/card_decorator.py @@ -89,7 +89,6 @@ def _increment_step_counter(cls): def step_init( self, flow, graph, step_name, decorators, environment, flow_datastore, logger ): - self._flow_datastore = flow_datastore self._environment = environment self._logger = logger @@ -200,7 +199,6 @@ def _options(mapping): yield to_unicode(value) def _create_top_level_args(self): - top_level_options = { "quiet": True, "metadata": self._metadata.TYPE, diff --git a/metaflow/plugins/cards/card_modules/basic.py b/metaflow/plugins/cards/card_modules/basic.py index b078a8f82d..124c9eb25d 100644 --- a/metaflow/plugins/cards/card_modules/basic.py +++ b/metaflow/plugins/cards/card_modules/basic.py @@ -511,7 +511,6 @@ def render(self): class ErrorCard(MetaflowCard): - type = "error" def __init__(self, options={}, components=[], graph=None): @@ -556,7 +555,6 @@ def render(self, task, stack_trace=None): class DefaultCardJSON(MetaflowCard): - type = "default_json" def __init__(self, options=dict(only_repr=True), components=[], graph=None): @@ -577,7 +575,6 @@ def render(self, task): class DefaultCard(MetaflowCard): - ALLOW_USER_COMPONENTS = True type = "default" @@ -613,7 +610,6 @@ def render(self, task): class BlankCard(MetaflowCard): - ALLOW_USER_COMPONENTS = True type = "blank" diff --git a/metaflow/plugins/cards/card_modules/chevron/renderer.py b/metaflow/plugins/cards/card_modules/chevron/renderer.py index 28368cd7ee..b8da7bba0a 100644 --- a/metaflow/plugins/cards/card_modules/chevron/renderer.py +++ b/metaflow/plugins/cards/card_modules/chevron/renderer.py @@ -274,7 +274,6 @@ def render( # If the scope is a callable (as described in # https://mustache.github.io/mustache.5.html) if isinstance(scope, Callable): - # Generate template text from tags text = unicode("", "utf-8") tags = [] diff --git a/metaflow/plugins/cards/card_modules/convert_to_native_type.py b/metaflow/plugins/cards/card_modules/convert_to_native_type.py index d6cabec503..2ecff4697c 100644 --- a/metaflow/plugins/cards/card_modules/convert_to_native_type.py +++ b/metaflow/plugins/cards/card_modules/convert_to_native_type.py @@ -100,7 +100,6 @@ def __call__(self, task, graph=None): return task_dict def _create_task_data_dict(self, task): - task_data_dict = {} type_inferred_objects = {"images": {}, "tables": {}} for data in task: diff --git a/metaflow/plugins/cards/card_modules/renderer_tools.py b/metaflow/plugins/cards/card_modules/renderer_tools.py index 6746e8fee0..16adf79902 100644 --- a/metaflow/plugins/cards/card_modules/renderer_tools.py +++ b/metaflow/plugins/cards/card_modules/renderer_tools.py @@ -40,6 +40,7 @@ def render_safely(func): This is a decorator that can be added to any `MetaflowCardComponent.render` The goal is to render subcomponents safely and ensure that they are JSON serializable. """ + # expects a renderer func def ret_func(self, *args, **kwargs): return _render_component_safely(self, func, True, *args, **kwargs) diff --git a/metaflow/plugins/cards/exception.py b/metaflow/plugins/cards/exception.py index 0b9188b447..96a59c4d4e 100644 --- a/metaflow/plugins/cards/exception.py +++ b/metaflow/plugins/cards/exception.py @@ -23,7 +23,6 @@ def __init__(self, card_name): class TypeRequiredException(MetaflowException): - headline = "Card type missing exception" def __init__(self): @@ -62,7 +61,6 @@ def __init__(self, pathspec, card_type=None, card_hash=None, card_id=None): class TaskNotFoundException(MetaflowException): - headline = "Cannot resolve task for pathspec" def __init__( @@ -84,7 +82,6 @@ def __init__( class IncorrectCardArgsException(MetaflowException): - headline = "Incorrect arguments to @card decorator" def __init__(self, card_type, args): @@ -93,7 +90,6 @@ def __init__(self, card_type, args): class UnrenderableCardException(MetaflowException): - headline = "Unable to render @card" def __init__(self, card_type, args): @@ -105,7 +101,6 @@ def __init__(self, card_type, args): class UnresolvableDatastoreException(MetaflowException): - headline = "Cannot resolve datastore type from `Task.metadata`" def __init__(self, task): diff --git a/metaflow/plugins/catch_decorator.py b/metaflow/plugins/catch_decorator.py index 54030e9a7f..c650d1f6ca 100644 --- a/metaflow/plugins/catch_decorator.py +++ b/metaflow/plugins/catch_decorator.py @@ -65,7 +65,6 @@ def _set_var(self, flow, val): def task_exception( self, exception, step, flow, graph, retry_count, max_user_code_retries ): - # Only "catch" exceptions after all retries are exhausted if retry_count < max_user_code_retries: return False @@ -100,7 +99,6 @@ def step_task_retry_count(self): def task_decorate( self, step_func, func, graph, retry_count, max_user_code_retries, ubf_context ): - # if the user code has failed max_user_code_retries times, @catch # runs a piece of fallback code instead. This way we can continue # running the flow downstream, as we have a proper entry for this task. diff --git a/metaflow/plugins/datastores/azure_storage.py b/metaflow/plugins/datastores/azure_storage.py index c25ed74e18..2694f71d15 100644 --- a/metaflow/plugins/datastores/azure_storage.py +++ b/metaflow/plugins/datastores/azure_storage.py @@ -366,7 +366,6 @@ def save_bytes(self, path_and_bytes_iter, overwrite=False, len_hint=0): @handle_executor_exceptions def load_bytes(self, keys): - tmpdir = mkdtemp(dir=self._tmproot, prefix="metaflow.azure.load_bytes.") try: futures = [ diff --git a/metaflow/plugins/datatools/local.py b/metaflow/plugins/datatools/local.py index f326f6e041..146ed226a3 100644 --- a/metaflow/plugins/datatools/local.py +++ b/metaflow/plugins/datatools/local.py @@ -23,7 +23,6 @@ class LocalObject(object): """ def __init__(self, url, path): - # all fields of S3Object should return a unicode object def ensure_unicode(x): return None if x is None else to_unicode(x) diff --git a/metaflow/plugins/datatools/s3/s3.py b/metaflow/plugins/datatools/s3/s3.py index 6bcc24ff7a..730a22476d 100644 --- a/metaflow/plugins/datatools/s3/s3.py +++ b/metaflow/plugins/datatools/s3/s3.py @@ -142,7 +142,6 @@ def __init__( range_info: Optional[RangeInfo] = None, last_modified: int = None, ): - # all fields of S3Object should return a unicode object prefix, url, path = map(ensure_unicode, (prefix, url, path)) diff --git a/metaflow/plugins/datatools/s3/s3op.py b/metaflow/plugins/datatools/s3/s3op.py index 95f67b8fa3..5494391a5b 100644 --- a/metaflow/plugins/datatools/s3/s3op.py +++ b/metaflow/plugins/datatools/s3/s3op.py @@ -67,7 +67,6 @@ def __init__( range=None, idx=None, ): - self.bucket = bucket self.path = path self.url = url @@ -385,7 +384,6 @@ def start_workers(mode, urls, num_workers, inject_failure, s3config): def process_urls(mode, urls, verbose, inject_failure, num_workers, s3config): - if verbose: print("%sing %d files.." % (mode.capitalize(), len(urls)), file=sys.stderr) @@ -729,7 +727,6 @@ def lst( s3sessionvars=None, s3clientparams=None, ): - s3config = S3Config( s3role, json.loads(s3sessionvars) if s3sessionvars else None, @@ -1004,7 +1001,6 @@ def get( s3clientparams=None, inject_failure=0, ): - s3config = S3Config( s3role, json.loads(s3sessionvars) if s3sessionvars else None, @@ -1149,7 +1145,6 @@ def info( s3clientparams=None, inject_failure=0, ): - s3config = S3Config( s3role, json.loads(s3sessionvars) if s3sessionvars else None, diff --git a/metaflow/plugins/env_escape/configurations/test_lib_impl/test_lib.py b/metaflow/plugins/env_escape/configurations/test_lib_impl/test_lib.py index 420d148bc4..a7d88ff54a 100644 --- a/metaflow/plugins/env_escape/configurations/test_lib_impl/test_lib.py +++ b/metaflow/plugins/env_escape/configurations/test_lib_impl/test_lib.py @@ -10,7 +10,6 @@ class SomeException(MyBaseException): class TestClass1(object): - cls_object = 25 def __init__(self, value): diff --git a/metaflow/plugins/env_escape/server.py b/metaflow/plugins/env_escape/server.py index 11ef2cf4d6..98b3ea843d 100644 --- a/metaflow/plugins/env_escape/server.py +++ b/metaflow/plugins/env_escape/server.py @@ -61,7 +61,6 @@ class Server(object): def __init__(self, config_dir, max_pickle_version): - self._max_pickle_version = data_transferer.defaultProtocol = max_pickle_version try: mappings = importlib.import_module(".server_mappings", package=config_dir) diff --git a/metaflow/plugins/env_escape/stub.py b/metaflow/plugins/env_escape/stub.py index 00364e6155..5f5d3966cc 100644 --- a/metaflow/plugins/env_escape/stub.py +++ b/metaflow/plugins/env_escape/stub.py @@ -78,6 +78,7 @@ def __repr__(self): def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" + # Compatibility 2/3. Remove when only 3 support class metaclass(type): def __new__(cls, name, this_bases, d): @@ -262,7 +263,6 @@ def create_class( setattr_overrides, class_methods, ): - class_dict = {"__slots__": ()} for name, doc in class_methods.items(): method_type = NORMAL_METHOD diff --git a/metaflow/plugins/kubernetes/kubernetes.py b/metaflow/plugins/kubernetes/kubernetes.py index 1ff06f56ca..c44dfc5ffc 100644 --- a/metaflow/plugins/kubernetes/kubernetes.py +++ b/metaflow/plugins/kubernetes/kubernetes.py @@ -152,7 +152,6 @@ def create_job( env=None, tolerations=None, ): - if env is None: env = {} diff --git a/metaflow/plugins/kubernetes/kubernetes_job.py b/metaflow/plugins/kubernetes/kubernetes_job.py index bd1e7cb96b..4b2194d9a3 100644 --- a/metaflow/plugins/kubernetes/kubernetes_job.py +++ b/metaflow/plugins/kubernetes/kubernetes_job.py @@ -266,7 +266,6 @@ def annotation(self, name, value): class RunningJob(object): - # State Machine implementation for the lifecycle behavior documented in # https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/ # @@ -386,7 +385,6 @@ def kill(self): client = self._client.get() if not self.is_done: if self.is_running: - # Case 1. from kubernetes.stream import stream diff --git a/metaflow/plugins/metadata/local.py b/metaflow/plugins/metadata/local.py index 792572219f..d393c64ac3 100644 --- a/metaflow/plugins/metadata/local.py +++ b/metaflow/plugins/metadata/local.py @@ -487,7 +487,6 @@ def _new_task( def _make_path( flow_name=None, run_id=None, step_name=None, task_id=None, create_on_absent=True ): - from metaflow.plugins.datastores.local_storage import LocalStorage if LocalStorage.datastore_root is None: diff --git a/metaflow/plugins/metadata/service.py b/metaflow/plugins/metadata/service.py index a66ca1c6c1..aba6005399 100644 --- a/metaflow/plugins/metadata/service.py +++ b/metaflow/plugins/metadata/service.py @@ -346,7 +346,6 @@ def _get_or_create( tags=None, sys_tags=None, ): - if tags is None: tags = set() if sys_tags is None: diff --git a/metaflow/plugins/parallel_decorator.py b/metaflow/plugins/parallel_decorator.py index c93549926e..f856a38af8 100644 --- a/metaflow/plugins/parallel_decorator.py +++ b/metaflow/plugins/parallel_decorator.py @@ -16,7 +16,6 @@ def __init__(self, attributes=None, statically_defined=False): def runtime_step_cli( self, cli_args, retry_count, max_user_code_retries, ubf_context ): - if ubf_context == UBF_CONTROL: num_parallel = cli_args.task.ubf_iter.num_parallel cli_args.command_options["num-parallel"] = str(num_parallel) diff --git a/metaflow/plugins/project_decorator.py b/metaflow/plugins/project_decorator.py index 37ff93efe6..09927fcd16 100644 --- a/metaflow/plugins/project_decorator.py +++ b/metaflow/plugins/project_decorator.py @@ -83,7 +83,6 @@ def get_top_level_options(self): def format_name(flow_name, project_name, deploy_prod, given_branch, user_name): - if not project_name: # an empty string is not a valid project name raise MetaflowException( diff --git a/metaflow/procpoll.py b/metaflow/procpoll.py index 08358a47d2..9f9b9adfd3 100644 --- a/metaflow/procpoll.py +++ b/metaflow/procpoll.py @@ -31,7 +31,7 @@ def remove(self, fd): self._poll.unregister(fd) def poll(self, timeout): - for (fd, event) in self._poll.poll(timeout): + for fd, event in self._poll.poll(timeout): yield ProcPollEvent( fd=fd, can_read=bool(event & select.POLLIN), diff --git a/metaflow/pylint_wrapper.py b/metaflow/pylint_wrapper.py index fa3da7e023..3b13f30978 100644 --- a/metaflow/pylint_wrapper.py +++ b/metaflow/pylint_wrapper.py @@ -27,7 +27,8 @@ def __init__(self, fname): def has_pylint(self): return self._run is not None - def run(self, logger=None, warnings=False, pylint_config=[]): + def run(self, logger=None, warnings=False, pylint_config=None): + pylint_config = pylint_config or [] args = [self._fname] if not warnings: args.append("--errors-only") diff --git a/metaflow/runtime.py b/metaflow/runtime.py index 335312232c..7fb435c069 100644 --- a/metaflow/runtime.py +++ b/metaflow/runtime.py @@ -75,7 +75,6 @@ def __init__( max_num_splits=MAX_NUM_SPLITS, max_log_size=MAX_LOG_SIZE, ): - if run_id is None: self._run_id = metadata.new_run_id() else: @@ -151,7 +150,6 @@ def __init__( deco.runtime_init(flow, graph, package, self._run_id) def _new_task(self, step, input_paths=None, **kwargs): - if input_paths is None: may_clone = True else: @@ -228,7 +226,6 @@ def execute(self): # main scheduling loop exception = None while self._run_queue or self._active_tasks[0] > 0: - # 1. are any of the current workers finished? finished_tasks = list(self._poll_workers()) # 2. push new tasks triggered by the finished tasks to the queue @@ -495,7 +492,6 @@ def siblings(foreach_stack): ) def _queue_task_foreach(self, task, next_steps): - # CHECK: this condition should be enforced by the linter but # let's assert that the assumption holds if len(next_steps) > 1: @@ -709,7 +705,6 @@ def __init__( join_type=None, task_id=None, ): - self.step = step self.flow_name = flow.name self.run_id = run_id @@ -1139,11 +1134,9 @@ def __init__(self, task): self.env = {} def get_args(self): - # TODO: Make one with dict_to_cli_options; see cli_args.py for more detail def _options(mapping): for k, v in mapping.items(): - # None or False arguments are ignored # v needs to be explicitly False, not falsy, e.g. 0 is an acceptable value if v is None or v is False: @@ -1176,7 +1169,6 @@ def __str__(self): class Worker(object): def __init__(self, task, max_logs_size): - self.task = task self._proc = self._launch() diff --git a/metaflow/sidecar/sidecar_subprocess.py b/metaflow/sidecar/sidecar_subprocess.py index 0b2ee99dd3..3428372752 100644 --- a/metaflow/sidecar/sidecar_subprocess.py +++ b/metaflow/sidecar/sidecar_subprocess.py @@ -67,7 +67,6 @@ def __init__(self, worker_type): self.start() def start(self): - if ( self._worker_type is not None and self._worker_type.startswith(NULL_SIDECAR_PREFIX) diff --git a/metaflow/task.py b/metaflow/task.py index 5e392a2f98..82927a4363 100644 --- a/metaflow/task.py +++ b/metaflow/task.py @@ -62,7 +62,6 @@ def _exec_step_function(self, step_function, input_obj=None): step_function(input_obj) def _init_parameters(self, parameter_ds, passdown=True): - cls = self.flow.__class__ def _set_cls_var(_, __): @@ -381,7 +380,6 @@ def run_step( retry_count, max_user_code_retries, ): - if run_id and task_id: self.metadata.register_run_id(run_id) self.metadata.register_task_id(run_id, step_name, task_id, retry_count) @@ -551,7 +549,6 @@ def run_step( ) for deco in decorators: - deco.task_pre_step( step_name, output, diff --git a/metaflow/util.py b/metaflow/util.py index 4e2c9d92f5..db842ddacd 100644 --- a/metaflow/util.py +++ b/metaflow/util.py @@ -244,7 +244,6 @@ class of the given object. def compress_list(lst, separator=",", rangedelim=":", zlibmarker="!", zlibmin=500): - bad_items = [x for x in lst if separator in x or rangedelim in x or zlibmarker in x] if bad_items: raise MetaflowInternalError( diff --git a/test/core/metaflow_test/__init__.py b/test/core/metaflow_test/__init__.py index 0c5012f043..d90f273363 100644 --- a/test/core/metaflow_test/__init__.py +++ b/test/core/metaflow_test/__init__.py @@ -1,5 +1,4 @@ import sys -import os from metaflow.exception import MetaflowException from metaflow import current diff --git a/test/core/metaflow_test/cli_check.py b/test/core/metaflow_test/cli_check.py index f6edc00a3a..054e945e6f 100644 --- a/test/core/metaflow_test/cli_check.py +++ b/test/core/metaflow_test/cli_check.py @@ -102,7 +102,6 @@ def artifact_dict_if_exists(self, step, name): def assert_log(self, step, logtype, value, exact_match=True): log = self.get_log(step, logtype) if (exact_match and log != value) or (not exact_match and value not in log): - raise AssertLogFailed( "Task '%s/%s' expected %s log '%s' but got '%s'" % (self.run_id, step, logtype, repr(value), repr(log)) diff --git a/test/core/metaflow_test/formatter.py b/test/core/metaflow_test/formatter.py index a68d134092..343fdc89dd 100644 --- a/test/core/metaflow_test/formatter.py +++ b/test/core/metaflow_test/formatter.py @@ -78,7 +78,6 @@ def _choose_step(self, name, node): ) def _flow_lines(self): - tags = [] for step in self.steps: tags.extend(tag.split("(")[0] for tag in step.tags) diff --git a/test/core/run_tests.py b/test/core/run_tests.py index dec44bcabe..3a829169eb 100644 --- a/test/core/run_tests.py +++ b/test/core/run_tests.py @@ -168,7 +168,6 @@ def run_cmd(mode): def run_all(ok_tests, ok_contexts, ok_graphs, debug, num_parallel, inherit_env): - tests = [ test for test in sorted(iter_tests(), key=lambda x: x.PRIORITY) @@ -207,7 +206,6 @@ def run_test_cases(args): formatter = FlowFormatter(graph, test) if formatter.valid: - for context in contexts["contexts"]: if context.get("disable_parallel", False) and any( "num_parallel" in node for node in graph["graph"].values() @@ -290,7 +288,6 @@ def cli( debug=False, inherit_env=False, ): - parse = lambda x: {t.lower() for t in x.split(",") if t} failed = run_all( diff --git a/test/core/tests/card_default_editable_with_id.py b/test/core/tests/card_default_editable_with_id.py index c46c52248a..483e156ce3 100644 --- a/test/core/tests/card_default_editable_with_id.py +++ b/test/core/tests/card_default_editable_with_id.py @@ -47,7 +47,6 @@ def check_results(self, flow, checker): cli_check_dict = checker.artifact_dict(step.name, "random_number") for task_pathspec in cli_check_dict: - task_id = task_pathspec.split("/")[-1] cards_info = checker.list_cards(step.name, task_id) number = cli_check_dict[task_pathspec]["random_number"] diff --git a/test/core/tests/catch_retry.py b/test/core/tests/catch_retry.py index 6d9231da46..a13bdf3375 100644 --- a/test/core/tests/catch_retry.py +++ b/test/core/tests/catch_retry.py @@ -60,7 +60,6 @@ def step_all(self): raise TestRetry() def check_results(self, flow, checker): - checker.assert_log( "start", "stdout", "stdout testing logs 3\n", exact_match=False ) @@ -69,7 +68,6 @@ def check_results(self, flow, checker): ) for step in flow: - if step.name == "start": checker.assert_artifact("start", "test_attempt", 3) try: diff --git a/test/core/tests/tag_catch.py b/test/core/tests/tag_catch.py index a326a46684..de4ecd0ed9 100644 --- a/test/core/tests/tag_catch.py +++ b/test/core/tests/tag_catch.py @@ -62,7 +62,6 @@ def step_all(self): os.kill(os.getpid(), signal.SIGKILL) def check_results(self, flow, checker): - checker.assert_log( "start", "stdout", "stdout testing logs 3\n", exact_match=False ) @@ -71,7 +70,6 @@ def check_results(self, flow, checker): ) for step in flow: - if step.name == "start": checker.assert_artifact("start", "test_attempt", 3) try: diff --git a/test/data/s3/s3_data.py b/test/data/s3/s3_data.py index 0584c6ff14..da5c84451c 100644 --- a/test/data/s3/s3_data.py +++ b/test/data/s3/s3_data.py @@ -133,7 +133,6 @@ class RandomFile(object): - cached_digests = {} cached_files = {} diff --git a/test/unit/test_compute_resource_attributes.py b/test/unit/test_compute_resource_attributes.py index adb21c521b..fd3089cf79 100644 --- a/test/unit/test_compute_resource_attributes.py +++ b/test/unit/test_compute_resource_attributes.py @@ -6,7 +6,6 @@ def test_compute_resource_attributes(): - # use default if nothing is set assert compute_resource_attributes([], MockDeco("batch", {}), {"cpu": "1"}) == { "cpu": "1"