Skip to content

Commit

Permalink
mypy: use more f-strings (#12714)
Browse files Browse the repository at this point in the history
Done largely using https://github.com/ikamensh/flynt
I went over this pretty closely since I wasn't familiar with the tool
I made a couple changes and left out a couple instances which were
harder to parse
  • Loading branch information
hauntsaninja committed May 1, 2022
1 parent fc335cb commit 3c1a762
Show file tree
Hide file tree
Showing 47 changed files with 190 additions and 236 deletions.
41 changes: 20 additions & 21 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -665,7 +665,7 @@ def dump_stats(self) -> None:
if self.options.dump_build_stats:
print("Stats:")
for key, value in sorted(self.stats_summary().items()):
print("{:24}{}".format(key + ":", value))
print(f"{key + ':':24}{value}")

def use_fine_grained_cache(self) -> bool:
return self.cache_enabled and self.options.use_fine_grained_cache
Expand Down Expand Up @@ -1083,7 +1083,7 @@ def read_deps_cache(manager: BuildManager,
except FileNotFoundError:
matched = False
if not matched:
manager.log('Invalid or missing fine-grained deps cache: {}'.format(meta['path']))
manager.log(f"Invalid or missing fine-grained deps cache: {meta['path']}")
return None

return module_deps_metas
Expand Down Expand Up @@ -1485,8 +1485,7 @@ def write_cache(id: str, path: str, tree: MypyFile,

# Obtain file paths.
meta_json, data_json, _ = get_cache_names(id, path, manager.options)
manager.log('Writing {} {} {} {}'.format(
id, path, meta_json, data_json))
manager.log(f'Writing {id} {path} {meta_json} {data_json}')

# Update tree.path so that in bazel mode it's made relative (since
# sometimes paths leak out).
Expand Down Expand Up @@ -1590,7 +1589,7 @@ def delete_cache(id: str, path: str, manager: BuildManager) -> None:
# tracked separately.
meta_path, data_path, _ = get_cache_names(id, path, manager.options)
cache_paths = [meta_path, data_path]
manager.log('Deleting {} {} {}'.format(id, path, " ".join(x for x in cache_paths if x)))
manager.log(f"Deleting {id} {path} {' '.join(x for x in cache_paths if x)}")

for filename in cache_paths:
try:
Expand Down Expand Up @@ -2490,7 +2489,7 @@ def find_module_and_diagnose(manager: BuildManager,
and not options.custom_typeshed_dir):
raise CompileError([
f'mypy: "{os.path.relpath(result)}" shadows library module "{id}"',
'note: A user-defined top-level module with name "%s" is not supported' % id
f'note: A user-defined top-level module with name "{id}" is not supported'
])
return (result, follow_imports)
else:
Expand Down Expand Up @@ -2523,7 +2522,7 @@ def find_module_and_diagnose(manager: BuildManager,
# If we can't find a root source it's always fatal.
# TODO: This might hide non-fatal errors from
# root sources processed earlier.
raise CompileError(["mypy: can't find module '%s'" % id])
raise CompileError([f"mypy: can't find module '{id}'"])
else:
raise ModuleNotFound

Expand Down Expand Up @@ -2670,21 +2669,21 @@ def log_configuration(manager: BuildManager, sources: List[BuildSource]) -> None
]

for conf_name, conf_value in configuration_vars:
manager.log("{:24}{}".format(conf_name + ":", conf_value))
manager.log(f"{conf_name + ':':24}{conf_value}")

for source in sources:
manager.log("{:24}{}".format("Found source:", source))
manager.log(f"{'Found source:':24}{source}")

# Complete list of searched paths can get very long, put them under TRACE
for path_type, paths in manager.search_paths._asdict().items():
if not paths:
manager.trace("No %s" % path_type)
manager.trace(f"No {path_type}")
continue

manager.trace("%s:" % path_type)
manager.trace(f"{path_type}:")

for pth in paths:
manager.trace(" %s" % pth)
manager.trace(f" {pth}")


# The driver
Expand Down Expand Up @@ -2720,7 +2719,7 @@ def dispatch(sources: List[BuildSource],
if not graph:
print("Nothing to do?!", file=stdout)
return graph
manager.log("Loaded graph with %d nodes (%.3f sec)" % (len(graph), t1 - t0))
manager.log(f"Loaded graph with {len(graph)} nodes ({t1 - t0:.3f} sec)")
if manager.options.dump_graph:
dump_graph(graph, stdout)
return graph
Expand Down Expand Up @@ -3009,7 +3008,7 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
scc.append('builtins')
if manager.options.verbosity >= 2:
for id in scc:
manager.trace("Priorities for %s:" % id,
manager.trace(f"Priorities for {id}:",
" ".join("%s:%d" % (x, graph[id].priorities[x])
for x in graph[id].dependencies
if x in ascc and x in graph[id].priorities))
Expand Down Expand Up @@ -3059,19 +3058,19 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
# (on some platforms).
if oldest_in_scc < newest_in_deps:
fresh = False
fresh_msg = "out of date by %.0f seconds" % (newest_in_deps - oldest_in_scc)
fresh_msg = f"out of date by {newest_in_deps - oldest_in_scc:.0f} seconds"
else:
fresh_msg = "fresh"
elif undeps:
fresh_msg = "stale due to changed suppression (%s)" % " ".join(sorted(undeps))
fresh_msg = f"stale due to changed suppression ({' '.join(sorted(undeps))})"
elif stale_scc:
fresh_msg = "inherently stale"
if stale_scc != ascc:
fresh_msg += " (%s)" % " ".join(sorted(stale_scc))
fresh_msg += f" ({' '.join(sorted(stale_scc))})"
if stale_deps:
fresh_msg += " with stale deps (%s)" % " ".join(sorted(stale_deps))
fresh_msg += f" with stale deps ({' '.join(sorted(stale_deps))})"
else:
fresh_msg = "stale due to deps (%s)" % " ".join(sorted(stale_deps))
fresh_msg = f"stale due to deps ({' '.join(sorted(stale_deps))})"

# Initialize transitive_error for all SCC members from union
# of transitive_error of dependencies.
Expand Down Expand Up @@ -3371,7 +3370,7 @@ def topsort(data: Dict[T, Set[T]]) -> Iterable[Set[T]]:
data = {item: (dep - ready)
for item, dep in data.items()
if item not in ready}
assert not data, "A cyclic dependency exists amongst %r" % data
assert not data, f"A cyclic dependency exists amongst {data!r}"


def missing_stubs_file(cache_dir: str) -> str:
Expand All @@ -3388,7 +3387,7 @@ def record_missing_stub_packages(cache_dir: str, missing_stub_packages: Set[str]
if missing_stub_packages:
with open(fnam, 'w') as f:
for pkg in sorted(missing_stub_packages):
f.write('%s\n' % pkg)
f.write(f'{pkg}\n')
else:
if os.path.isfile(fnam):
os.remove(fnam)
8 changes: 3 additions & 5 deletions mypy/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -886,7 +886,7 @@ def check_func_def(self, defn: FuncItem, typ: CallableType, name: Optional[str])
self.msg.unimported_type_becomes_any("Return type", ret_type, fdef)
for idx, arg_type in enumerate(fdef.type.arg_types):
if has_any_from_unimported_type(arg_type):
prefix = f"Argument {idx + 1} to \"{fdef.name}\""
prefix = f'Argument {idx + 1} to "{fdef.name}"'
self.msg.unimported_type_becomes_any(prefix, arg_type, fdef)
check_for_explicit_any(fdef.type, self.options, self.is_typeshed_stub,
self.msg, context=fdef)
Expand Down Expand Up @@ -1918,9 +1918,7 @@ def check_final_enum(self, defn: ClassDef, base: TypeInfo) -> None:
for sym in base.names.values():
if self.is_final_enum_value(sym):
self.fail(
'Cannot extend enum with existing members: "{}"'.format(
base.name,
),
f'Cannot extend enum with existing members: "{base.name}"',
defn,
)
break
Expand Down Expand Up @@ -2571,7 +2569,7 @@ def check_compatibility_super(self, lvalue: RefExpr, lvalue_type: Optional[Type]
return self.check_subtype(compare_type, base_type, rvalue,
message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT,
'expression has type',
'base class "%s" defined the type as' % base.name,
f'base class "{base.name}" defined the type as',
code=codes.ASSIGNMENT)
return True

Expand Down
2 changes: 1 addition & 1 deletion mypy/checkexpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type:
result = self.object_type()
else:
if isinstance(node, PlaceholderNode):
assert False, 'PlaceholderNode %r leaked to checker' % node.fullname
assert False, f'PlaceholderNode {node.fullname!r} leaked to checker'
# Unknown reference; use any type implicitly to avoid
# generating extra type errors.
result = AnyType(TypeOfAny.from_error)
Expand Down
2 changes: 1 addition & 1 deletion mypy/checkstrformat.py
Original file line number Diff line number Diff line change
Expand Up @@ -718,7 +718,7 @@ def check_mapping_str_interpolation(self, specifiers: List[ConversionSpecifier],
self.chk.check_subtype(rep_type, expected_type, replacements,
message_registry.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION,
'expression has type',
'placeholder with key \'%s\' has type' % specifier.key,
f'placeholder with key \'{specifier.key}\' has type',
code=codes.STRING_FORMATTING)
if specifier.conv_type == 's':
self.check_s_special_cases(expr, rep_type, expr)
Expand Down
10 changes: 5 additions & 5 deletions mypy/config_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,10 +211,10 @@ def parse_config_file(options: Options, set_strict_flags: Callable[[], None],

if 'mypy' not in parser:
if filename or file_read not in defaults.SHARED_CONFIG_FILES:
print("%s: No [mypy] section in config file" % file_read, file=stderr)
print(f"{file_read}: No [mypy] section in config file", file=stderr)
else:
section = parser['mypy']
prefix = '{}: [{}]: '.format(file_read, 'mypy')
prefix = f"{file_read}: [mypy]: "
updates, report_dirs = parse_section(
prefix, options, set_strict_flags, section, config_types, stderr)
for k, v in updates.items():
Expand Down Expand Up @@ -322,7 +322,7 @@ def destructure_overrides(toml_data: Dict[str, Any]) -> Dict[str, Any]:
for module in modules:
module_overrides = override.copy()
del module_overrides['module']
old_config_name = 'mypy-%s' % module
old_config_name = f'mypy-{module}'
if old_config_name not in result:
result[old_config_name] = module_overrides
else:
Expand Down Expand Up @@ -447,7 +447,7 @@ def convert_to_boolean(value: Optional[Any]) -> bool:
if not isinstance(value, str):
value = str(value)
if value.lower() not in configparser.RawConfigParser.BOOLEAN_STATES:
raise ValueError('Not a boolean: %s' % value)
raise ValueError(f'Not a boolean: {value}')
return configparser.RawConfigParser.BOOLEAN_STATES[value.lower()]


Expand Down Expand Up @@ -552,7 +552,7 @@ def get_config_module_names(filename: Optional[str], modules: List[str]) -> str:
return ''

if not is_toml(filename):
return ", ".join("[mypy-%s]" % module for module in modules)
return ", ".join(f"[mypy-{module}]" for module in modules)

return "module = ['%s']" % ("', '".join(sorted(modules)))

Expand Down
8 changes: 4 additions & 4 deletions mypy/dmypy/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ def do_run(args: argparse.Namespace) -> None:
response = request(args.status_file, 'run', version=__version__, args=args.flags)
# If the daemon signals that a restart is necessary, do it
if 'restart' in response:
print('Restarting: {}'.format(response['restart']))
print(f"Restarting: {response['restart']}")
restart_server(args, allow_sources=True)
response = request(args.status_file, 'run', version=__version__, args=args.flags)

Expand All @@ -300,7 +300,7 @@ def do_status(args: argparse.Namespace) -> None:
if args.verbose or 'error' in response:
show_stats(response)
if 'error' in response:
fail("Daemon is stuck; consider %s kill" % sys.argv[0])
fail(f"Daemon is stuck; consider {sys.argv[0]} kill")
print("Daemon is up and running")


Expand All @@ -311,7 +311,7 @@ def do_stop(args: argparse.Namespace) -> None:
response = request(args.status_file, 'stop', timeout=5)
if 'error' in response:
show_stats(response)
fail("Daemon is stuck; consider %s kill" % sys.argv[0])
fail(f"Daemon is stuck; consider {sys.argv[0]} kill")
else:
print("Daemon stopped")

Expand Down Expand Up @@ -389,7 +389,7 @@ def check_output(response: Dict[str, Any], verbose: bool,
try:
out, err, status_code = response['out'], response['err'], response['status']
except KeyError:
fail("Response: %s" % str(response))
fail(f"Response: {str(response)}")
sys.stdout.write(out)
sys.stdout.flush()
sys.stderr.write(err)
Expand Down
2 changes: 1 addition & 1 deletion mypy/dmypy_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def run_command(self, command: str, data: Dict[str, object]) -> Dict[str, object
key = 'cmd_' + command
method = getattr(self.__class__, key, None)
if method is None:
return {'error': "Unrecognized command '%s'" % command}
return {'error': f"Unrecognized command '{command}'"}
else:
if command not in {'check', 'recheck', 'run'}:
# Only the above commands use some error formatting.
Expand Down
2 changes: 1 addition & 1 deletion mypy/dmypy_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,5 @@ def receive(connection: IPCBase) -> Any:
except Exception as e:
raise OSError("Data received is not valid JSON") from e
if not isinstance(data, dict):
raise OSError("Data received is not a dict (%s)" % str(type(data)))
raise OSError(f"Data received is not a dict ({type(data)})")
return data
2 changes: 1 addition & 1 deletion mypy/fastparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
if sys.version_info >= (3, 8):
import ast as ast3
assert 'kind' in ast3.Constant._fields, \
"This 3.8.0 alpha (%s) is too old; 3.8.0a3 required" % sys.version.split()[0]
f"This 3.8.0 alpha ({sys.version.split()[0]}) is too old; 3.8.0a3 required"
# TODO: Num, Str, Bytes, NameConstant, Ellipsis are deprecated in 3.8.
# TODO: Index, ExtSlice are deprecated in 3.9.
from ast import (
Expand Down
21 changes: 10 additions & 11 deletions mypy/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -791,9 +791,9 @@ def add_invertible_flag(flag: str,
description='Generate a report in the specified format.')
for report_type in sorted(defaults.REPORTER_NAMES):
if report_type not in {'memory-xml'}:
report_group.add_argument('--%s-report' % report_type.replace('_', '-'),
report_group.add_argument(f"--{report_type.replace('_', '-')}-report",
metavar='DIR',
dest='special-opts:%s_report' % report_type)
dest=f'special-opts:{report_type}_report')

other_group = parser.add_argument_group(
title='Miscellaneous')
Expand Down Expand Up @@ -918,7 +918,7 @@ def add_invertible_flag(flag: str,
# Don't explicitly test if "config_file is not None" for this check.
# This lets `--config-file=` (an empty string) be used to disable all config files.
if config_file and not os.path.exists(config_file):
parser.error("Cannot find config file '%s'" % config_file)
parser.error(f"Cannot find config file '{config_file}'")

options = Options()

Expand Down Expand Up @@ -989,8 +989,7 @@ def set_strict_flags() -> None:

invalid_codes = (enabled_codes | disabled_codes) - valid_error_codes
if invalid_codes:
parser.error("Invalid error code(s): %s" %
', '.join(sorted(invalid_codes)))
parser.error(f"Invalid error code(s): {', '.join(sorted(invalid_codes))}")

options.disabled_error_codes |= {error_codes[code] for code in disabled_codes}
options.enabled_error_codes |= {error_codes[code] for code in enabled_codes}
Expand Down Expand Up @@ -1090,17 +1089,17 @@ def process_package_roots(fscache: Optional[FileSystemCache],
package_root = []
for root in options.package_root:
if os.path.isabs(root):
parser.error("Package root cannot be absolute: %r" % root)
parser.error(f"Package root cannot be absolute: {root!r}")
drive, root = os.path.splitdrive(root)
if drive and drive != current_drive:
parser.error("Package root must be on current drive: %r" % (drive + root))
parser.error(f"Package root must be on current drive: {drive + root!r}")
# Empty package root is always okay.
if root:
root = os.path.relpath(root) # Normalize the heck out of it.
if not root.endswith(os.sep):
root = root + os.sep
if root.startswith(dotdotslash):
parser.error("Package root cannot be above current directory: %r" % root)
parser.error(f"Package root cannot be above current directory: {root!r}")
if root in trivial_paths:
root = ''
package_root.append(root)
Expand All @@ -1119,9 +1118,9 @@ def process_cache_map(parser: argparse.ArgumentParser,
for i in range(0, n, 3):
source, meta_file, data_file = special_opts.cache_map[i:i + 3]
if source in options.cache_map:
parser.error("Duplicate --cache-map source %s)" % source)
parser.error(f"Duplicate --cache-map source {source})")
if not source.endswith('.py') and not source.endswith('.pyi'):
parser.error("Invalid --cache-map source %s (triple[0] must be *.py[i])" % source)
parser.error(f"Invalid --cache-map source {source} (triple[0] must be *.py[i])")
if not meta_file.endswith('.meta.json'):
parser.error("Invalid --cache-map meta_file %s (triple[1] must be *.meta.json)" %
meta_file)
Expand All @@ -1140,7 +1139,7 @@ def maybe_write_junit_xml(td: float, serious: bool, messages: List[str], options

def fail(msg: str, stderr: TextIO, options: Options) -> NoReturn:
"""Fail with a serious error."""
stderr.write('%s\n' % msg)
stderr.write(f'{msg}\n')
maybe_write_junit_xml(0.0, serious=True, messages=[msg], options=options)
sys.exit(2)

Expand Down
10 changes: 5 additions & 5 deletions mypy/memprofile.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,23 +33,23 @@ def collect_memory_stats() -> Tuple[Dict[str, int],
n = type(obj).__name__
if hasattr(obj, '__dict__'):
# Keep track of which class a particular __dict__ is associated with.
inferred[id(obj.__dict__)] = '%s (__dict__)' % n
inferred[id(obj.__dict__)] = f'{n} (__dict__)'
if isinstance(obj, (Node, Type)): # type: ignore
if hasattr(obj, '__dict__'):
for x in obj.__dict__.values():
if isinstance(x, list):
# Keep track of which node a list is associated with.
inferred[id(x)] = '%s (list)' % n
inferred[id(x)] = f'{n} (list)'
if isinstance(x, tuple):
# Keep track of which node a list is associated with.
inferred[id(x)] = '%s (tuple)' % n
inferred[id(x)] = f'{n} (tuple)'

for k in get_class_descriptors(type(obj)):
x = getattr(obj, k, None)
if isinstance(x, list):
inferred[id(x)] = '%s (list)' % n
inferred[id(x)] = f'{n} (list)'
if isinstance(x, tuple):
inferred[id(x)] = '%s (tuple)' % n
inferred[id(x)] = f'{n} (tuple)'

freqs: Dict[str, int] = {}
memuse: Dict[str, int] = {}
Expand Down
Loading

0 comments on commit 3c1a762

Please sign in to comment.