diff --git a/src/python/pants/backend/authentication/netrc_util.py b/src/python/pants/backend/authentication/netrc_util.py index 25cca34183a..0e9d2be1eef 100644 --- a/src/python/pants/backend/authentication/netrc_util.py +++ b/src/python/pants/backend/authentication/netrc_util.py @@ -48,4 +48,4 @@ def _ensure_loaded(self): if len(self._login) == 0: raise self.NetrcError('Found no usable authentication blocks in ~/.netrc') except NetrcParseError as e: - raise self.NetrcError('Problem parsing ~/.netrc: %s' % e) + raise self.NetrcError('Problem parsing ~/.netrc: {}'.format(e)) diff --git a/src/python/pants/backend/codegen/targets/java_thrift_library.py b/src/python/pants/backend/codegen/targets/java_thrift_library.py index 549136a3e5f..678416fb80e 100644 --- a/src/python/pants/backend/codegen/targets/java_thrift_library.py +++ b/src/python/pants/backend/codegen/targets/java_thrift_library.py @@ -46,8 +46,8 @@ def __init__(self, def check_value_for_arg(arg, value, values): if value and value not in values: - raise TargetDefinitionException(self, "%s may only be set to %s ('%s' not valid)" % - (arg, ', or '.join(map(repr, values)), value)) + raise TargetDefinitionException(self, "{} may only be set to {} ('{}' not valid)" + .format(arg, ', or '.join(map(repr, values)), value)) return value # TODO(pl): These should all live in payload fields diff --git a/src/python/pants/backend/codegen/tasks/antlr_gen.py b/src/python/pants/backend/codegen/tasks/antlr_gen.py index 6a358ad70b8..309328d6a4b 100644 --- a/src/python/pants/backend/codegen/tasks/antlr_gen.py +++ b/src/python/pants/backend/codegen/tasks/antlr_gen.py @@ -51,7 +51,7 @@ def genlangs(self): def genlang(self, lang, targets): if lang != 'java': - raise TaskError('Unrecognized antlr gen lang: %s' % lang) + raise TaskError('Unrecognized antlr gen lang: {}'.format(lang)) # TODO: Instead of running the compiler for each target, collect the targets # by type and invoke it twice, once for antlr3 and once for antlr4. @@ -87,7 +87,7 @@ def genlang(self, lang, targets): result = self.runjava(classpath=antlr_classpath, main=java_main, args=args, workunit_name='antlr') if result != 0: - raise TaskError('java %s ... exited non-zero (%i)' % (java_main, result)) + raise TaskError('java {} ... exited non-zero ({})'.format(java_main, result)) # This checks to make sure that all of the sources have an identical package source structure, and # if they do, uses that as the package. If they are different, then the user will need to set the @@ -111,7 +111,7 @@ def collect_sources(target): def createtarget(self, lang, gentarget, dependees): if lang != 'java': - raise TaskError('Unrecognized antlr gen lang: %s' % lang) + raise TaskError('Unrecognized antlr gen lang: {}'.format(lang)) return self._create_java_target(gentarget, dependees) def _create_java_target(self, target, dependees): diff --git a/src/python/pants/backend/codegen/tasks/apache_thrift_gen.py b/src/python/pants/backend/codegen/tasks/apache_thrift_gen.py index 742acb6d8cf..493028738ff 100644 --- a/src/python/pants/backend/codegen/tasks/apache_thrift_gen.py +++ b/src/python/pants/backend/codegen/tasks/apache_thrift_gen.py @@ -32,7 +32,7 @@ def _copytree(from_base, to_base): def abort(error): - raise TaskError('Failed to copy from %s to %s: %s' % (from_base, to_base, error)) + raise TaskError('Failed to copy from {} to {}: {}'.format(from_base, to_base, error)) # TODO(John Sirois): Consider adding a unit test and lifting this to common/dirutils or similar def safe_link(src, dst): @@ -142,7 +142,7 @@ def genlang(self, lang, targets): elif lang == 'python': gen = self.gen_python.gen else: - raise TaskError('Unrecognized thrift gen lang: %s' % lang) + raise TaskError('Unrecognized thrift gen lang: {}'.format(lang)) args = [ self.thrift_binary, @@ -159,7 +159,7 @@ def genlang(self, lang, targets): sessions = [] for source in sources: - self.context.log.info('Generating thrift for %s\n' % source) + self.context.log.info('Generating thrift for {}\n'.format(source)) # Create a unique session dir for this thrift root. Sources may be full paths but we only # need the path relative to the build root to ensure uniqueness. # TODO(John Sirois): file paths should be normalized early on and uniformly, fix the need to @@ -172,7 +172,7 @@ def genlang(self, lang, targets): cmd = args[:] cmd.extend(('-o', outdir)) cmd.append(relsource) - self.context.log.debug('Executing: %s' % ' '.join(cmd)) + self.context.log.debug('Executing: {}'.format(' '.join(cmd))) sessions.append(self.ThriftSession(outdir, cmd, subprocess.Popen(cmd))) result = 0 @@ -182,11 +182,11 @@ def genlang(self, lang, targets): else: result = session.process.wait() if result != 0: - self.context.log.error('Failed: %s' % ' '.join(session.cmd)) + self.context.log.error('Failed: {}'.format(' '.join(session.cmd))) else: _copytree(session.outdir, self.combined_dir) if result != 0: - raise TaskError('%s ... exited non-zero (%i)' % (self.thrift_binary, result)) + raise TaskError('{} ... exited non-zero ({})'.format(self.thrift_binary, result)) def createtarget(self, lang, gentarget, dependees): if lang == 'java': @@ -194,7 +194,7 @@ def createtarget(self, lang, gentarget, dependees): elif lang == 'python': return self._create_python_target(gentarget, dependees) else: - raise TaskError('Unrecognized thrift gen lang: %s' % lang) + raise TaskError('Unrecognized thrift gen lang: {}'.format(lang)) def _create_java_target(self, target, dependees): def create_target(files, deps): @@ -280,7 +280,7 @@ def calculate_gen(source): def calculate_python_genfiles(namespace, types): basepath = namespace.replace('.', '/') def path(name): - return os.path.join(basepath, '%s.py' % name) + return os.path.join(basepath, '{}.py'.format(name)) yield path('__init__') if 'const' in types: yield path('constants') @@ -288,13 +288,13 @@ def path(name): yield path('ttypes') for service in types['service']: yield path(service) - yield os.path.join(basepath, '%s-remote' % service) + yield os.path.join(basepath, '{}-remote'.format(service)) def calculate_java_genfiles(namespace, types): basepath = namespace.replace('.', '/') def path(name): - return os.path.join(basepath, '%s.java' % name) + return os.path.join(basepath, '{}.java'.format(name)) if 'const' in types: yield path('Constants') for typename in ['enum', 'exception', 'service', 'struct', 'union']: diff --git a/src/python/pants/backend/codegen/tasks/code_gen.py b/src/python/pants/backend/codegen/tasks/code_gen.py index 5fc21905200..62bacf907d7 100644 --- a/src/python/pants/backend/codegen/tasks/code_gen.py +++ b/src/python/pants/backend/codegen/tasks/code_gen.py @@ -115,10 +115,10 @@ def find_gentargets(predicate): forced = True gentargets_bylang[lang] = gentargets if self.is_forced(lang) else find_gentargets(predicate) if not forced and gentargets_by_dependee: - self.context.log.warn('Left with unexpected unconsumed gen targets:\n\t%s' % '\n\t'.join( - '%s -> %s' % (dependee, gentargets) - for dependee, gentargets in gentargets_by_dependee.items() - )) + self.context.log.warn('Left with unexpected unconsumed gen targets:\n\t{}'.format('\n\t'.join( + '{} -> {}'.format(dependee, gentargets) + for dependee, gentargets in gentargets_by_dependee.items() + ))) if gentargets: self.prepare_gen(gentargets) diff --git a/src/python/pants/backend/codegen/tasks/jaxb_gen.py b/src/python/pants/backend/codegen/tasks/jaxb_gen.py index 6e6eb0ab93e..587e07324f3 100644 --- a/src/python/pants/backend/codegen/tasks/jaxb_gen.py +++ b/src/python/pants/backend/codegen/tasks/jaxb_gen.py @@ -52,7 +52,7 @@ def prepare_gen(self, target): def genlang(self, lang, targets): if lang != 'java': - raise TaskError('Unrecognized jaxb language: %s' % lang) + raise TaskError('Unrecognized jaxb language: {}'.format(lang)) output_dir = os.path.join(self.workdir, 'gen-java') safe_mkdir(output_dir) cache = [] @@ -137,7 +137,7 @@ def _correct_package(self, package): package = re.sub(r'^\.+', '', package) package = re.sub(r'\.+$', '', package) if re.search(r'\.{2,}', package) is not None: - raise ValueError('Package name cannot have consecutive periods! (%s)' % package) + raise ValueError('Package name cannot have consecutive periods! ({})'.format(package)) return package @classmethod @@ -170,4 +170,4 @@ def _sources_to_be_generated(self, package, path): names.append('ObjectFactory') outdir = package.replace('.', '/') - return [os.path.join(outdir, '%s.java' % name) for name in names] + return [os.path.join(outdir, '{}.java'.format(name)) for name in names] diff --git a/src/python/pants/backend/core/targets/doc.py b/src/python/pants/backend/core/targets/doc.py index 2e1d8dcfae1..38db8358507 100644 --- a/src/python/pants/backend/core/targets/doc.py +++ b/src/python/pants/backend/core/targets/doc.py @@ -105,7 +105,7 @@ def __init__(self, super(Page, self).__init__(address=address, payload=payload, **kwargs) if provides and not isinstance(provides[0], WikiArtifact): - raise ValueError('Page must provide a wiki_artifact. Found instead: %s' % provides) + raise ValueError('Page must provide a wiki_artifact. Found instead: {}'.format(provides)) @property def source(self): diff --git a/src/python/pants/backend/core/tasks/builddictionary.py b/src/python/pants/backend/core/tasks/builddictionary.py index f3a8776e50f..cddb915d237 100644 --- a/src/python/pants/backend/core/tasks/builddictionary.py +++ b/src/python/pants/backend/core/tasks/builddictionary.py @@ -82,7 +82,7 @@ def _gen_build_dictionary(self): # generate rst template = resource_string(__name__, os.path.join(self._templates_dir, 'page.mustache')) filename = os.path.join(self._outdir, 'build_dictionary.rst') - self.context.log.info('Generating %s' % filename) + self.context.log.info('Generating {}'.format(filename)) with safe_open(filename, 'wb') as outfile: generator = Generator(template, tocs=tocs, @@ -91,7 +91,7 @@ def _gen_build_dictionary(self): # generate html template = resource_string(__name__, os.path.join(self._templates_dir, 'bdict_html.mustache')) filename = os.path.join(self._outdir, 'build_dictionary.html') - self.context.log.info('Generating %s' % filename) + self.context.log.info('Generating {}'.format(filename)) with safe_open(filename, 'wb') as outfile: generator = Generator(template, tocs=tocs, @@ -113,7 +113,7 @@ def _gen_options_reference(self): template = resource_string(__name__, os.path.join(self._templates_dir, 'options_reference.mustache')) filename = os.path.join(self._outdir, 'options_reference.rst') - self.context.log.info('Generating %s' % filename) + self.context.log.info('Generating {}'.format(filename)) with safe_open(filename, 'wb') as outfile: generator = Generator(template, goals=filtered_goals, glopts=glopts) generator.write(outfile) @@ -122,7 +122,7 @@ def _gen_options_reference(self): template = resource_string(__name__, os.path.join(self._templates_dir, 'oref_html.mustache')) filename = os.path.join(self._outdir, 'options_reference.html') - self.context.log.info('Generating %s' % filename) + self.context.log.info('Generating {}'.format(filename)) with safe_open(filename, 'wb') as outfile: generator = Generator(template, goals=filtered_goals, glopts=glopts) generator.write(outfile) diff --git a/src/python/pants/backend/core/tasks/clean.py b/src/python/pants/backend/core/tasks/clean.py index ab1d4407c50..7e886fb7e1b 100644 --- a/src/python/pants/backend/core/tasks/clean.py +++ b/src/python/pants/backend/core/tasks/clean.py @@ -17,7 +17,7 @@ def _cautious_rmtree(root): real_buildroot = os.path.realpath(os.path.abspath(get_buildroot())) real_root = os.path.realpath(os.path.abspath(root)) if not real_root.startswith(real_buildroot): - raise TaskError('DANGER: Attempting to delete %s, which is not under the build root!') + raise TaskError('DANGER: Attempting to delete {}, which is not under the build root!'.format(real_root)) safe_rmtree(real_root) diff --git a/src/python/pants/backend/core/tasks/confluence_publish.py b/src/python/pants/backend/core/tasks/confluence_publish.py index 416771e0a53..d204d800658 100644 --- a/src/python/pants/backend/core/tasks/confluence_publish.py +++ b/src/python/pants/backend/core/tasks/confluence_publish.py @@ -70,10 +70,10 @@ def execute(self): for page, wiki_artifact in pages: html_info = genmap.get((wiki_artifact, page)) if len(html_info) > 1: - raise TaskError('Unexpected resources for %s: %s' % (page, html_info)) + raise TaskError('Unexpected resources for {}: {}'.format(page, html_info)) basedir, htmls = html_info.items()[0] if len(htmls) != 1: - raise TaskError('Unexpected resources for %s: %s' % (page, htmls)) + raise TaskError('Unexpected resources for {}: {}'.format(page, htmls)) with safe_open(os.path.join(basedir, htmls[0])) as contents: url = self.publish_page( page.address, @@ -85,7 +85,7 @@ def execute(self): ) if url: urls.append(url) - self.context.log.info('Published %s to %s' % (page, url)) + self.context.log.info('Published {} to {}'.format(page, url)) if self.open and urls: binary_util.ui_open(*urls) @@ -93,10 +93,10 @@ def execute(self): def publish_page(self, address, space, title, content, parent=None): body = textwrap.dedent(''' - + - %s - ''').strip() % (address, content) + {} + ''').strip().format(address, content) pageopts = dict( versionComment = 'updated by pants!' @@ -105,7 +105,7 @@ def publish_page(self, address, space, title, content, parent=None): existing = wiki.getpage(space, title) if existing: if not self.force and existing['content'].strip() == body.strip(): - self.context.log.warn("Skipping publish of '%s' - no changes" % title) + self.context.log.warn("Skipping publish of '{}' - no changes".format(title)) return pageopts['id'] = existing['id'] @@ -115,12 +115,12 @@ def publish_page(self, address, space, title, content, parent=None): page = wiki.create_html_page(space, title, body, parent, **pageopts) return page['url'] except ConfluenceError as e: - raise TaskError('Failed to update confluence: %s' % e) + raise TaskError('Failed to update confluence: {}'.format(e)) def login(self): if not self._wiki: try: self._wiki = Confluence.login(self.url, self.user, self.api()) except ConfluenceError as e: - raise TaskError('Failed to login to confluence: %s' % e) + raise TaskError('Failed to login to confluence: {}'.format(e)) return self._wiki diff --git a/src/python/pants/backend/core/tasks/dependees.py b/src/python/pants/backend/core/tasks/dependees.py index fd9fe4ad3d1..0c134e0898d 100644 --- a/src/python/pants/backend/core/tasks/dependees.py +++ b/src/python/pants/backend/core/tasks/dependees.py @@ -49,7 +49,7 @@ def console_output(self, _): for dependees_type in self._dependees_type: target_aliases = self.context.build_file_parser.registered_aliases().targets if dependees_type not in target_aliases: - raise TaskError('Invalid type name: %s' % dependees_type) + raise TaskError('Invalid type name: {}'.format(dependees_type)) target_type = target_aliases[dependees_type] # Try to find the SourceRoot for the given input type try: @@ -59,9 +59,9 @@ def console_output(self, _): pass if not base_paths: - raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type + + raise TaskError('No SourceRoot set for any target type in {}.'.format(self._dependees_type) + '\nPlease define a source root in BUILD file as:' + - '\n\tsource_root(\'\', %s)' % ', '.join(self._dependees_type)) + '\n\tsource_root(\'\', {})'.format(', '.join(self._dependees_type))) for base_path in base_paths: buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(), os.path.join(get_buildroot(), base_path), diff --git a/src/python/pants/backend/core/tasks/filemap.py b/src/python/pants/backend/core/tasks/filemap.py index 09eb084cd95..be90ea0a071 100644 --- a/src/python/pants/backend/core/tasks/filemap.py +++ b/src/python/pants/backend/core/tasks/filemap.py @@ -19,7 +19,7 @@ def console_output(self, _): if target not in visited: visited.add(target) for rel_source in target.sources_relative_to_buildroot(): - yield '%s %s' % (rel_source, target.address.spec) + yield '{} {}'.format(rel_source, target.address.spec) def _find_targets(self): if len(self.context.target_roots) > 0: diff --git a/src/python/pants/backend/core/tasks/filter.py b/src/python/pants/backend/core/tasks/filter.py index b62b75dd6e7..f7a9f8e4546 100644 --- a/src/python/pants/backend/core/tasks/filter.py +++ b/src/python/pants/backend/core/tasks/filter.py @@ -90,16 +90,16 @@ def filter_for_type(name): try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = name.rsplit('.', 2) - module = __import__('%s.%s' % (from_list, module), fromlist=[from_list]) + module = __import__('{}.{}'.format(from_list, module), fromlist=[from_list]) target_type = getattr(module, type_name) except (ImportError, ValueError): # Fall back on pants provided target types. registered_aliases = self.context.build_file_parser.registered_aliases() if name not in registered_aliases.targets: - raise TaskError('Invalid type name: %s' % name) + raise TaskError('Invalid type name: {}'.format(name)) target_type = registered_aliases.targets[name] if not issubclass(target_type, Target): - raise TaskError('Not a Target type: %s' % name) + raise TaskError('Not a Target type: {}'.format(name)) return lambda target: isinstance(target, target_type) self._filters.extend(_create_filters(self.get_options().type, filter_for_type)) diff --git a/src/python/pants/backend/core/tasks/group_task.py b/src/python/pants/backend/core/tasks/group_task.py index 1dc7e4ac02e..44121fa592e 100644 --- a/src/python/pants/backend/core/tasks/group_task.py +++ b/src/python/pants/backend/core/tasks/group_task.py @@ -248,9 +248,9 @@ def group_name(self): cls._GROUPS[name] = group_task if group_task.product_types() != product_type: - raise ValueError('The group %r was already registered with product type: %r - refusing to ' - 'overwrite with new product type: %r' % (name, group_task.product_types(), - product_type)) + raise ValueError('The group {!r} was already registered with product type: {!r} - refusing to ' + 'overwrite with new product type: {!r}'.format(name, group_task.product_types(), + product_type)) return group_task @@ -271,7 +271,7 @@ def add_member(cls, group_member): """ if not issubclass(group_member, GroupMember): raise ValueError('Only GroupMember subclasses can join a GroupTask, ' - 'given %s of type %s' % (group_member, type(group_member))) + 'given {} of type {}'.format(group_member, type(group_member))) group_member.options_scope = Goal.scope(cls.parent_options_scope, group_member.name()) cls._member_types().append(group_member) @@ -306,9 +306,9 @@ def execute(self): ordered_chunks.append((group_member, chunk)) chunks_by_member[group_member].append(chunk) - self.context.log.debug('::: created chunks(%d)' % len(ordered_chunks)) + self.context.log.debug('::: created chunks({})'.format(len(ordered_chunks))) for i, (group_member, goal_chunk) in enumerate(ordered_chunks): - self.context.log.debug(' chunk(%d) [flavor=%s]:\n\t%s' % ( + self.context.log.debug(' chunk({}) [flavor={}]:\n\t{}'.format( i, group_member.name(), '\n\t'.join(sorted(map(str, goal_chunk))))) # prep diff --git a/src/python/pants/backend/core/tasks/list_goals.py b/src/python/pants/backend/core/tasks/list_goals.py index 05a2926f231..2810b3d451e 100644 --- a/src/python/pants/backend/core/tasks/list_goals.py +++ b/src/python/pants/backend/core/tasks/list_goals.py @@ -31,19 +31,19 @@ def report(): elif self.get_options().all: undocumented.append(goal.name) for name, description in documented_rows: - yield ' %s: %s' % (name.rjust(max_width), description) + yield ' {}: {}'.format(name.rjust(max_width), description) if undocumented: yield '' yield 'Undocumented goals:' - yield ' %s' % ' '.join(undocumented) + yield ' {}'.format(' '.join(undocumented)) def graph(): # TODO(John Sirois): re-work and re-enable: https://github.com/pantsbuild/pants/issues/918 # def get_cluster_name(goal): - # return 'cluster_%s' % goal.name.replace('-', '_') + # return 'cluster_{}'.format(goal.name.replace('-', '_')) # # def get_node_name(goal, task_name): - # name = '%s_%s' % (goal.name, task_name) + # name = '{}_{}'.format(goal.name, task_name) # return name.replace('-', '_') # # yield '\n'.join([ @@ -53,26 +53,26 @@ def graph(): # ]) # for goal in Goal.all(): # yield '\n'.join([ - # ' subgraph %s {' % get_cluster_name(goal), + # ' subgraph {} {{'.format(get_cluster_name(goal)), # ' node [style=filled];', # ' color = blue;', - # ' label = "%s";' % goal.name, + # ' label = "{}";'.format(goal.name), # ]) # for name in goal.ordered_task_names(): - # yield ' %s [label="%s"];' % (get_node_name(goal, name), name) + # yield ' {} [label="{}"];'.format(get_node_name(goal, name), name) # yield ' }' # # edges = set() # for goal in Goal.all(): # tail_task_name = goal.ordered_task_names()[-1] # for dep in goal.dependencies: - # edge = 'ltail=%s lhead=%s' % (get_cluster_name(goal), get_cluster_name(dep)) + # edge = 'ltail={} lhead={}'.format(get_cluster_name(goal), get_cluster_name(dep)) # if edge not in edges: # # We display edges between clusters (representing goals), but dot still requires # # us to specify them between nodes (representing tasks) and then add ltail, lhead # # annotations. We connect the last task in the dependee to the first task in # # the dependency, as this leads to the neatest-looking graph. - # yield ' %s -> %s [%s];' % (get_node_name(goal, tail_task_name), + # yield ' {} -> {} [{}];'.format(get_node_name(goal, tail_task_name), # get_node_name(dep, dep.ordered_task_names()[0]), edge) # edges.add(edge) # yield '}' diff --git a/src/python/pants/backend/core/tasks/markdown_to_html.py b/src/python/pants/backend/core/tasks/markdown_to_html.py index 52a9ca0f583..f3a2ba881b2 100644 --- a/src/python/pants/backend/core/tasks/markdown_to_html.py +++ b/src/python/pants/backend/core/tasks/markdown_to_html.py @@ -229,7 +229,7 @@ def execute(self): css_path = os.path.join(outdir, 'css', 'codehighlight.css') css = emit_codehighlight_css(css_path, self.code_style) if css: - self.context.log.info('Emitted %s' % css) + self.context.log.info('Emitted {}'.format(css)) def is_page(target): return isinstance(target, Page) @@ -267,7 +267,7 @@ def process_page(key, outdir, url_builder, config, genmap, fragment=False): config, css=css ) - self.context.log.info('Processed %s to %s' % (page.source, html_path)) + self.context.log.info('Processed {} to {}'.format(page.source, html_path)) relpath = os.path.relpath(html_path, outdir) genmap.add(key, outdir, [relpath]) return html_path @@ -308,7 +308,7 @@ def parse_url(spec): page = self.context.build_graph.get_target(address) anchor = match.group(2) or '' if not page: - raise TaskError('Invalid markdown link to pants target: "%s". ' % match.group(1) + + raise TaskError('Invalid markdown link to pants target: "{}". '.format(match.group(1)) + 'Is your page missing a dependency on this target?') alias, url = url_builder(page, config=get_config(page)) return alias, url + anchor diff --git a/src/python/pants/backend/core/tasks/paths.py b/src/python/pants/backend/core/tasks/paths.py index 70d68dd979d..84f05ca66f9 100644 --- a/src/python/pants/backend/core/tasks/paths.py +++ b/src/python/pants/backend/core/tasks/paths.py @@ -20,14 +20,14 @@ def __init__(self, *args, **kwargs): @classmethod def _find_paths(cls, from_target, to_target, log): - log.debug('Looking for all paths from %s to %s' % (from_target.address.reference(), - to_target.address.reference())) + log.debug('Looking for all paths from {} to {}'.format(from_target.address.reference(), + to_target.address.reference())) paths = cls._find_paths_rec(from_target, to_target) - print('Found %d paths' % len(paths)) + print('Found {} paths'.format(len(paths))) print('') for path in paths: - log.debug('\t[%s]' % ', '.join([target.address.reference() for target in path])) + log.debug('\t[{}]'.format(', '.join([target.address.reference() for target in path]))) all_paths = defaultdict(lambda: defaultdict(list)) @classmethod @@ -51,14 +51,14 @@ def _find_paths_rec(cls, from_target, to_target): @classmethod def _find_path(cls, from_target, to_target, log): - log.debug('Looking for path from %s to %s' % (from_target.address.reference(), - to_target.address.reference())) + log.debug('Looking for path from {} to {}'.format(from_target.address.reference(), + to_target.address.reference())) queue = [([from_target], 0)] while True: if not queue: - print('no path found from %s to %s!' % (from_target.address.reference(), - to_target.address.reference())) + print('no path found from {} to {}!'.format(from_target.address.reference(), + to_target.address.reference())) break path, indent = queue.pop(0) @@ -67,12 +67,12 @@ def _find_path(cls, from_target, to_target, log): continue cls.examined_targets.add(next_target) - log.debug('%sexamining %s' % (' ' * indent, next_target)) + log.debug('{} examining {}'.format(' ' * indent, next_target)) if next_target == to_target: print('') for target in path: - print('%s' % target.address.reference()) + print('{}'.format(target.address.reference())) break for dep in next_target.dependencies: @@ -82,7 +82,7 @@ def _find_path(cls, from_target, to_target, log): class Path(PathFinder): def execute(self): if len(self.target_roots) != 2: - raise TaskError('Specify two targets please (found %d)' % len(self.target_roots)) + raise TaskError('Specify two targets please (found {})'.format(len(self.target_roots))) self._find_path(self.target_roots[0], self.target_roots[1], self.log) @@ -90,6 +90,6 @@ def execute(self): class Paths(PathFinder): def execute(self): if len(self.target_roots) != 2: - raise TaskError('Specify two targets please (found %d)' % len(self.target_roots)) + raise TaskError('Specify two targets please (found {})'.format(len(self.target_roots))) self._find_paths(self.target_roots[0], self.target_roots[1], self.log) diff --git a/src/python/pants/backend/core/tasks/reflect.py b/src/python/pants/backend/core/tasks/reflect.py index b2521071c87..8ad3b0e581a 100644 --- a/src/python/pants/backend/core/tasks/reflect.py +++ b/src/python/pants/backend/core/tasks/reflect.py @@ -366,8 +366,8 @@ def entry_for_one(nom, sym): if inspect.ismethod(sym) or inspect.isfunction(sym): return entry_for_one_func(nom, sym) return msg_entry(nom, - "TODO! no doc gen for %s %s" % (str(type(sym)), str(sym)), - "TODO! no doc gen for %s %s" % (str(type(sym)), str(sym))) + "TODO! no doc gen for {} {}".format(str(type(sym)), str(sym)), + "TODO! no doc gen for {} {}".format(str(type(sym)), str(sym))) PREDEFS = { # some hardwired entries diff --git a/src/python/pants/backend/core/tasks/roots.py b/src/python/pants/backend/core/tasks/roots.py index a4eda6ca33c..b98cf5dcb30 100644 --- a/src/python/pants/backend/core/tasks/roots.py +++ b/src/python/pants/backend/core/tasks/roots.py @@ -15,4 +15,4 @@ class ListRoots(ConsoleTask): def console_output(self, targets): for src_root, targets in SourceRoot.all_roots().items(): all_targets = ','.join(sorted([tgt.__name__ for tgt in targets])) - yield '%s: %s' % (src_root, all_targets or '*') + yield '{}: {}'.format(src_root, all_targets or '*') diff --git a/src/python/pants/backend/core/tasks/scm_publish.py b/src/python/pants/backend/core/tasks/scm_publish.py index f5cf6a010e4..047eddc10ec 100644 --- a/src/python/pants/backend/core/tasks/scm_publish.py +++ b/src/python/pants/backend/core/tasks/scm_publish.py @@ -73,8 +73,8 @@ def to_i(component): try: return int(component) except (TypeError, ValueError): - raise ValueError('Invalid revision component %s in %s - ' - 'must be an integer' % (component, version)) + raise ValueError('Invalid revision component {} in {} - ' + 'must be an integer'.format(component, version)) return Semver(to_i(major), to_i(minor), to_i(patch)) def __init__(self, major, minor, patch, snapshot=False): @@ -91,11 +91,9 @@ def make_snapshot(self): return Semver(self.major, self.minor, self.patch, snapshot=True) def version(self): - return '%s.%s.%s' % ( - self.major, - self.minor, - ('%s-SNAPSHOT' % self.patch) if self.snapshot else self.patch - ) + return '{}.{}.{}'.format(self.major, + self.minor, + ('{}-SNAPSHOT'.format(self.patch)) if self.snapshot else self.patch) def __eq__(self, other): return self.__cmp__(other) == 0 @@ -116,7 +114,7 @@ def __cmp__(self, other): return diff def __repr__(self): - return 'Semver(%s)' % self.version() + return 'Semver({})'.format(self.version()) class ScmPublishMixin(object): diff --git a/src/python/pants/backend/core/tasks/task.py b/src/python/pants/backend/core/tasks/task.py index 755f7654420..cd191df66a9 100644 --- a/src/python/pants/backend/core/tasks/task.py +++ b/src/python/pants/backend/core/tasks/task.py @@ -344,7 +344,7 @@ def invalidated(self, msg_elements = ['Invalidated ', items_to_report_element([t.address.reference() for t in targets], 'target')] if num_invalid_partitions > 1: - msg_elements.append(' in %d target partitions' % num_invalid_partitions) + msg_elements.append(' in {} target partitions'.format(num_invalid_partitions)) msg_elements.append('.') self.context.log.info(*msg_elements) @@ -460,8 +460,8 @@ def require_single_root_target(self): if len(target_roots) == 0: raise TaskError('No target specified.') elif len(target_roots) > 1: - raise TaskError('Multiple targets specified: %s' % - ', '.join([repr(t) for t in target_roots])) + raise TaskError('Multiple targets specified: {}' + .format(', '.join([repr(t) for t in target_roots]))) return target_roots[0] def require_homogeneous_targets(self, accept_predicate, reject_predicate): @@ -489,8 +489,8 @@ def require_homogeneous_targets(self, accept_predicate, reject_predicate): # both accepted and rejected targets # TODO: once https://github.com/pantsbuild/pants/issues/425 lands, we should add # language-specific flags that would resolve the ambiguity here - raise TaskError('Mutually incompatible targets specified: %s vs %s (and %d others)' % - (accepted[0], rejected[0], len(accepted) + len(rejected) - 2)) + raise TaskError('Mutually incompatible targets specified: {} vs {} (and {} others)' + .format(accepted[0], rejected[0], len(accepted) + len(rejected) - 2)) class Task(TaskBase): diff --git a/src/python/pants/backend/core/wrapped_globs.py b/src/python/pants/backend/core/wrapped_globs.py index 50918027836..fa917aed772 100644 --- a/src/python/pants/backend/core/wrapped_globs.py +++ b/src/python/pants/backend/core/wrapped_globs.py @@ -31,7 +31,7 @@ def __call__(self, *args, **kwargs): for glob in args: if(self._is_glob_dir_outside_root(glob, root)): - raise ValueError('Invalid glob %s, points outside BUILD file root dir %s' % (glob, root)) + raise ValueError('Invalid glob {}, points outside BUILD file root dir {}'.format(glob, root)) result = self.wrapped_fn(root=root, *args, **kwargs) diff --git a/src/python/pants/backend/jvm/artifact.py b/src/python/pants/backend/jvm/artifact.py index 9a9f39927f4..33584849da7 100644 --- a/src/python/pants/backend/jvm/artifact.py +++ b/src/python/pants/backend/jvm/artifact.py @@ -27,15 +27,15 @@ def __init__(self, org, name, repo, description=None): :param string description: Description of this artifact. """ if not isinstance(org, string_types): - raise ValueError("org must be %s but was %s" % (string_types, org)) + raise ValueError("org must be {} but was {}".format(string_types, org)) if not isinstance(name, string_types): - raise ValueError("name must be %s but was %s" % (string_types, name)) + raise ValueError("name must be {} but was {}".format(string_types, name)) if not isinstance(repo, Repository): raise ValueError("repo must be an instance of Repository") if description is not None and not isinstance(description, string_types): - raise ValueError("description must be None or %s but was %s" - % (string_types, description)) + raise ValueError("description must be None or {} but was {}" + .format(string_types, description)) self.org = org self.name = name @@ -58,4 +58,4 @@ def __ne__(self, other): return not self.__eq__(other) def __repr__(self): - return "%s-%s -> %s" % (self.org, self.name, self.repo) + return "{}-{} -> {}".format(self.org, self.name, self.repo) diff --git a/src/python/pants/backend/jvm/repository.py b/src/python/pants/backend/jvm/repository.py index b66a2d99690..9628056a356 100644 --- a/src/python/pants/backend/jvm/repository.py +++ b/src/python/pants/backend/jvm/repository.py @@ -39,4 +39,4 @@ def __ne__(self, other): return not self.__eq__(other) def __repr__(self): - return "%s -> %s (%s)" % (self.name, self.url, self.push_db_basedir) + return "{} -> {} ({})".format(self.name, self.url, self.push_db_basedir) diff --git a/src/python/pants/backend/jvm/targets/jar_dependency.py b/src/python/pants/backend/jvm/targets/jar_dependency.py index 36996bb275e..3ce5d8dbb11 100644 --- a/src/python/pants/backend/jvm/targets/jar_dependency.py +++ b/src/python/pants/backend/jvm/targets/jar_dependency.py @@ -54,8 +54,8 @@ def cache_key(self): return ''.join(str(getattr(self, key)) for key in self._HASH_KEYS) def __repr__(self): - return ('IvyArtifact(%r, type_=%r, ext=%r, conf=%r, url=%r, classifier=%r)' - % (self.name, self.type_, self.ext, self.conf, self.url, self.classifier)) + return ('IvyArtifact({!r}, type_={!r}, ext={!r}, conf={!r}, url={!r}, classifier={!r})' + .format(self.name, self.type_, self.ext, self.conf, self.url, self.classifier)) diff --git a/src/python/pants/backend/jvm/targets/jarable.py b/src/python/pants/backend/jvm/targets/jarable.py index e79afa58796..f60e17e9c39 100644 --- a/src/python/pants/backend/jvm/targets/jarable.py +++ b/src/python/pants/backend/jvm/targets/jarable.py @@ -37,7 +37,7 @@ def get_artifact_info(self): org = self.provides.org if exported else 'internal' module = self.provides.name if exported else self.identifier - id_ = "%s-%s" % (self.provides.org, self.provides.name) if exported else self.identifier + id_ = "{}-{}".format(self.provides.org, self.provides.name) if exported else self.identifier # TODO(John Sirois): This should return something less than a JarDependency encapsulating just # the org and name. Perhaps a JarFamily? diff --git a/src/python/pants/backend/jvm/targets/java_agent.py b/src/python/pants/backend/jvm/targets/java_agent.py index bc674caf44b..0164df1875e 100644 --- a/src/python/pants/backend/jvm/targets/java_agent.py +++ b/src/python/pants/backend/jvm/targets/java_agent.py @@ -52,12 +52,12 @@ def __init__(self, "defined.") if premain and not isinstance(premain, string_types): raise TargetDefinitionException(self, 'The premain must be a fully qualified class name, ' - 'given %s of type %s' % (premain, type(premain))) + 'given {} of type {}'.format(premain, type(premain))) if agent_class and not isinstance(agent_class, string_types): raise TargetDefinitionException(self, 'The agent_class must be a fully qualified class name, given ' - '%s of type %s' % (agent_class, type(agent_class))) + '{} of type {}'.format(agent_class, type(agent_class))) self._premain = premain self._agent_class = agent_class diff --git a/src/python/pants/backend/jvm/targets/jvm_app.py b/src/python/pants/backend/jvm/targets/jvm_app.py index 379a41d960e..9b16f8f9661 100644 --- a/src/python/pants/backend/jvm/targets/jvm_app.py +++ b/src/python/pants/backend/jvm/targets/jvm_app.py @@ -29,7 +29,7 @@ def __call__(self, file): return os.path.relpath(file, self.base) def __repr__(self): - return 'IdentityMapper(%s)' % self.base + return 'IdentityMapper({})'.format(self.base) def __hash__(self): return hash(self.base) @@ -116,7 +116,7 @@ def __init__(self, parse_context, rel_path=None, mapper=None, relative_to=None, if relative_to: base = os.path.join(get_buildroot(), self._rel_path, relative_to) if not os.path.isdir(os.path.join(get_buildroot(), base)): - raise ValueError('Could not find a directory to bundle relative to at %s' % base) + raise ValueError('Could not find a directory to bundle relative to at {}'.format(base)) self.mapper = RelativeToMapper(base) else: self.mapper = mapper or RelativeToMapper(os.path.join(get_buildroot(), self._rel_path)) @@ -134,13 +134,13 @@ def _add(self, filesets): if not os.path.isabs(abspath): abspath = os.path.join(get_buildroot(), self._rel_path, path) if not os.path.exists(abspath): - raise ValueError('Given path: %s with absolute path: %s which does not exist' - % (path, abspath)) + raise ValueError('Given path: {} with absolute path: {} which does not exist' + .format(path, abspath)) self.filemap[abspath] = self.mapper(abspath) return self def __repr__(self): - return 'Bundle(%s, %s)' % (self.mapper, self.filemap) + return 'Bundle({}, {})'.format(self.mapper, self.filemap) class JvmApp(Target): diff --git a/src/python/pants/backend/jvm/targets/scala_library.py b/src/python/pants/backend/jvm/targets/scala_library.py index 71d7a3deb68..102a4c09ddb 100644 --- a/src/python/pants/backend/jvm/targets/scala_library.py +++ b/src/python/pants/backend/jvm/targets/scala_library.py @@ -68,5 +68,5 @@ def java_sources(self): address = SyntheticAddress.parse(spec, relative_to=self.address.spec_path) target = self._build_graph.get_target(address) if target is None: - raise TargetDefinitionException(self, 'No such java target: %s' % spec) + raise TargetDefinitionException(self, 'No such java target: {}'.format(spec)) yield target diff --git a/src/python/pants/backend/jvm/tasks/benchmark_run.py b/src/python/pants/backend/jvm/tasks/benchmark_run.py index d30918bcad4..0cada9e98bc 100644 --- a/src/python/pants/backend/jvm/tasks/benchmark_run.py +++ b/src/python/pants/backend/jvm/tasks/benchmark_run.py @@ -73,4 +73,4 @@ def execute(self): workunit_factory=self.context.new_workunit, workunit_name='caliper') if exit_code != 0: - raise TaskError('java %s ... exited non-zero (%i)' % (caliper_main, exit_code)) + raise TaskError('java {} ... exited non-zero ({})'.format(caliper_main, exit_code)) diff --git a/src/python/pants/backend/jvm/tasks/bootstrap_jvm_tools.py b/src/python/pants/backend/jvm/tasks/bootstrap_jvm_tools.py index 1efa32a9708..89842a48c7c 100644 --- a/src/python/pants/backend/jvm/tasks/bootstrap_jvm_tools.py +++ b/src/python/pants/backend/jvm/tasks/bootstrap_jvm_tools.py @@ -70,7 +70,7 @@ def bootstrap_classpath(): with cache_lock: if 'classpath' not in cache: targets = list(self._resolve_tool_targets(tools, key, scope)) - workunit_name = 'bootstrap-%s' % str(key) + workunit_name = 'bootstrap-{!s}'.format(key) cache['classpath'] = self.ivy_resolve(targets, silent=True, workunit_name=workunit_name)[0] diff --git a/src/python/pants/backend/jvm/tasks/bundle_create.py b/src/python/pants/backend/jvm/tasks/bundle_create.py index acdfaa5646b..9102e2553d9 100644 --- a/src/python/pants/backend/jvm/tasks/bundle_create.py +++ b/src/python/pants/backend/jvm/tasks/bundle_create.py @@ -45,7 +45,7 @@ def is_app(target): return isinstance(target, (JvmApp, JvmBinary)) def __init__(self, target): - assert self.is_app(target), '%s is not a valid app target' % target + assert self.is_app(target), '{} is not a valid app target'.format(target) self.binary = target if isinstance(target, JvmBinary) else target.binary self.bundles = [] if isinstance(target, JvmBinary) else target.payload.bundles @@ -63,7 +63,7 @@ def execute(self): app.basename, prefix=app.basename if self._prefix else None ) - self.context.log.info('created %s' % os.path.relpath(archivepath, get_buildroot())) + self.context.log.info('created {}'.format(os.path.relpath(archivepath, get_buildroot()))) def bundle(self, app): """Create a self-contained application bundle. @@ -80,7 +80,7 @@ def verbose_symlink(src, dst): raise e bundle_dir = os.path.join(self._outdir, '{}-bundle'.format(app.basename)) - self.context.log.info('creating %s' % os.path.relpath(bundle_dir, get_buildroot())) + self.context.log.info('creating {}'.format(os.path.relpath(bundle_dir, get_buildroot()))) safe_mkdir(bundle_dir, clean=True) diff --git a/src/python/pants/backend/jvm/tasks/check_published_deps.py b/src/python/pants/backend/jvm/tasks/check_published_deps.py index 220b27f933e..0ed98671317 100644 --- a/src/python/pants/backend/jvm/tasks/check_published_deps.py +++ b/src/python/pants/backend/jvm/tasks/check_published_deps.py @@ -55,6 +55,6 @@ def get_version_and_sha(target): artifact_target = self._artifacts_to_targets[artifact] semver, sha = get_version_and_sha(artifact_target) if semver.version() != dep.rev: - yield 'outdated %s#%s %s latest %s' % (dep.org, dep.name, dep.rev, semver.version()) + yield 'outdated {}#{} {} latest {}'.format(dep.org, dep.name, dep.rev, semver.version()) elif self._print_uptodate: - yield 'up-to-date %s#%s %s' % (dep.org, dep.name, semver.version()) + yield 'up-to-date {}#{} {}'.format(dep.org, dep.name, semver.version()) diff --git a/src/python/pants/backend/jvm/tasks/detect_duplicates.py b/src/python/pants/backend/jvm/tasks/detect_duplicates.py index cd3d6081b25..f2a4c1ad505 100644 --- a/src/python/pants/backend/jvm/tasks/detect_duplicates.py +++ b/src/python/pants/backend/jvm/tasks/detect_duplicates.py @@ -77,7 +77,7 @@ def _is_conflicts(self, artifacts_by_file_name, binary_target): if len(conflicts_by_artifacts) > 0: self._log_conflicts(conflicts_by_artifacts, binary_target) if self._fail_fast: - raise TaskError('Failing build for target %s.' % binary_target) + raise TaskError('Failing build for target {}.'.format(binary_target)) return True return False @@ -104,7 +104,7 @@ def _get_external_dependencies(self, binary_target): artifacts_by_file_name = defaultdict(set) for basedir, externaljar in self.list_external_jar_dependencies(binary_target): external_dep = os.path.join(basedir, externaljar) - self.context.log.debug(' scanning %s' % external_dep) + self.context.log.debug(' scanning {}'.format(external_dep)) with open_zip(external_dep) as dep_zip: for qualified_file_name in dep_zip.namelist(): # Zip entry names can come in any encoding and in practice we find some jars that have @@ -127,14 +127,14 @@ def _get_conflicts_by_artifacts(self, artifacts_by_file_name): return conflicts_by_artifacts def _log_conflicts(self, conflicts_by_artifacts, target): - self.context.log.warn('\n ===== For target %s:' % target) + self.context.log.warn('\n ===== For target {}:'.format(target)) for artifacts, duplicate_files in conflicts_by_artifacts.items(): if len(artifacts) < 2: continue self.context.log.warn( - 'Duplicate classes and/or resources detected in artifacts: %s' % str(artifacts)) + 'Duplicate classes and/or resources detected in artifacts: {!s}'.format(artifacts)) dup_list = list(duplicate_files) for duplicate_file in dup_list[:self._max_dups]: - self.context.log.warn(' %s' % duplicate_file) + self.context.log.warn(' {}'.format(duplicate_file)) if len(dup_list) > self._max_dups: self.context.log.warn(' ... {remaining} more ...' .format(remaining=(len(dup_list)-self._max_dups))) diff --git a/src/python/pants/backend/jvm/tasks/ivy_task_mixin.py b/src/python/pants/backend/jvm/tasks/ivy_task_mixin.py index 046f45b3d95..c5a5fe65de5 100644 --- a/src/python/pants/backend/jvm/tasks/ivy_task_mixin.py +++ b/src/python/pants/backend/jvm/tasks/ivy_task_mixin.py @@ -144,8 +144,8 @@ def ivy_resolve(self, confs=confs) if not os.path.exists(raw_target_classpath_file_tmp): - raise TaskError('Ivy failed to create classpath file at %s' - % raw_target_classpath_file_tmp) + raise TaskError('Ivy failed to create classpath file at {}' + .format(raw_target_classpath_file_tmp)) shutil.move(raw_target_classpath_file_tmp, raw_target_classpath_file) logger.debug('Moved ivy classfile file to {dest}'.format(dest=raw_target_classpath_file)) @@ -206,8 +206,8 @@ def mapjars(self, genmap, target, executor, jars=None): mapdir = self.mapjar_workdir(target) safe_mkdir(mapdir, clean=True) ivyargs = [ - '-retrieve', '%s/[organisation]/[artifact]/[conf]/' - '[organisation]-[artifact]-[revision](-[classifier]).[ext]' % mapdir, + '-retrieve', '{}/[organisation]/[artifact]/[conf]/' + '[organisation]-[artifact]-[revision](-[classifier]).[ext]'.format(mapdir), '-symlink', ] confs = maybe_list(target.payload.get_field_value('configurations') or []) @@ -275,7 +275,7 @@ def exec_ivy(self, result = execute_runner(runner, workunit_factory=self.context.new_workunit, workunit_name=workunit_name) if result != 0: - raise TaskError('Ivy returned %d' % result) + raise TaskError('Ivy returned {}'.format(result)) except runner.executor.Error as e: raise TaskError(e) diff --git a/src/python/pants/backend/jvm/tasks/jar_create.py b/src/python/pants/backend/jvm/tasks/jar_create.py index 936ed0eb8f2..4e73148c7af 100644 --- a/src/python/pants/backend/jvm/tasks/jar_create.py +++ b/src/python/pants/backend/jvm/tasks/jar_create.py @@ -78,9 +78,9 @@ def execute(self): def create_jar(self, target, path): existing = self._jars.setdefault(path, target) if target != existing: - raise TaskError('Duplicate name: target %s tried to write %s already mapped to target %s' % ( - target, path, existing - )) + raise TaskError( + 'Duplicate name: target {} tried to write {} already mapped to target {}' + .format(target, path, existing)) self._jars[path] = target with self.open_jar(path, overwrite=True, compressed=self.compressed) as jar: yield jar diff --git a/src/python/pants/backend/jvm/tasks/jar_publish.py b/src/python/pants/backend/jvm/tasks/jar_publish.py index 0d95b44e4b3..29293fa358e 100644 --- a/src/python/pants/backend/jvm/tasks/jar_publish.py +++ b/src/python/pants/backend/jvm/tasks/jar_publish.py @@ -82,7 +82,7 @@ def with_sha_and_fingerprint(self, sha, fingerprint): return PushDb.Entry(self.sem_ver, self.named_ver, self.named_is_latest, sha, fingerprint) def __repr__(self): - return '<%s, %s, %s, %s, %s, %s>' % ( + return '<{}, {}, {}, {}, {}, {}>'.format( self.__class__.__name__, self.sem_ver, self.named_ver, self.named_is_latest, self.sha, self.fingerprint) @@ -124,7 +124,7 @@ def _accessors_for_target(self, target): raise ValueError def key(prefix): - return '%s.%s%%%s' % (prefix, jar_dep.org, jar_dep.name) + return '{}.{}%{}'.format(prefix, jar_dep.org, jar_dep.name) def getter(prefix, default=None): return self._props.get(key(prefix), default) @@ -272,7 +272,7 @@ def internaldep(self, jar_dependency, dep=None, configurations=None, classifier= def coordinate(org, name, rev=None): - return '%s#%s;%s' % (org, name, rev) if rev else '%s#%s' % (org, name) + return '{}#{};{}'.format(org, name, rev) if rev else '{}#{}'.format(org, name) def jar_coordinate(jar, rev=None): @@ -444,7 +444,7 @@ def __init__(self, *args, **kwargs): credentials = next(iter(self.context.resolve(auth))) user = credentials.username(data['resolver']) password = credentials.password(data['resolver']) - self.context.log.debug('Found auth for repo=%s user=%s' % (repo, user)) + self.context.log.debug('Found auth for repo={} user={}'.format(repo, user)) self.repos[repo]['username'] = user self.repos[repo]['password'] = password self.commit = self.get_options().commit @@ -472,10 +472,10 @@ def parse_jarcoordinate(coordinate): if not target: siblings = Target.get_all_addresses(address.build_file) prompt = 'did you mean' if len(siblings) == 1 else 'maybe you meant one of these' - raise TaskError('%s => %s?:\n %s' % (address, prompt, - '\n '.join(str(a) for a in siblings))) + raise TaskError('{} => {}?:\n {}'.format(address, prompt, + '\n '.join(str(a) for a in siblings))) if not target.is_exported: - raise TaskError('%s is not an exported target' % coordinate) + raise TaskError('{} is not an exported target'.format(coordinate)) return target.provides.org, target.provides.name except (BuildFile.BuildFileError, BuildFileParser.BuildFileParserError, AddressLookupError) as e: raise TaskError('{message}\n Problem with BUILD file at {coordinate}' @@ -493,10 +493,10 @@ def parse_override(override): # overrides imply semantic versioning rev = Semver.parse(rev) except ValueError as e: - raise TaskError('Invalid version %s: %s' % (rev, e)) + raise TaskError('Invalid version {}: {}'.format(rev, e)) return parse_jarcoordinate(coordinate), rev except ValueError: - raise TaskError('Invalid override: %s' % override) + raise TaskError('Invalid override: {}'.format(override)) self.overrides.update(parse_override(o) for o in self.get_options().override) @@ -514,7 +514,7 @@ def confirm_push(self, coord, version): isatty = False if not isatty: return True - push = raw_input('Publish %s with revision %s ? [y|N] ' % ( + push = raw_input('Publish {} with revision {} ? [y|N] '.format( coord, version )) print('\n') @@ -528,8 +528,8 @@ def _copy_artifact(self, tgt, jar, version, typename, suffix='', extension='jar' if product_mapping is None: if self.get_options().individual_plugins: return - raise ValueError("No product mapping in %s for %s. " - "You may need to run some other task first" % (typename, tgt)) + raise ValueError("No product mapping in {} for {}. " + "You may need to run some other task first".format(typename, tgt)) for basedir, jars in product_mapping.items(): for artifact in jars: path = self.artifact_path(jar, version, name=override_name, suffix=suffix, @@ -547,11 +547,11 @@ def _ivy_jvm_options(self, repo): user = repo.get('username') password = repo.get('password') if user and password: - jvm_options.append('-Dlogin=%s' % user) - jvm_options.append('-Dpassword=%s' % password) + jvm_options.append('-Dlogin={}'.format(user)) + jvm_options.append('-Dpassword={}'.format(password)) else: - raise TaskError('Unable to publish to %s. %s' % - (repo.get('resolver'), repo.get('help', ''))) + raise TaskError('Unable to publish to {}. {}' + .format(repo.get('resolver'), repo.get('help', ''))) return jvm_options def publish(self, ivyxml_path, jar, entry, repo, published): @@ -570,10 +570,10 @@ def publish(self, ivyxml_path, jar, entry, repo, published): args = [ '-settings', ivysettings, '-ivy', ivyxml_path, - '-deliverto', '%s/[organisation]/[module]/ivy-[revision].xml' % self.workdir, + '-deliverto', '{}/[organisation]/[module]/ivy-[revision].xml'.format(self.workdir), '-publish', resolver, - '-publishpattern', '%s/[organisation]/[module]/' - '[artifact]-[revision](-[classifier]).[ext]' % self.workdir, + '-publishpattern', '{}/[organisation]/[module]/' + '[artifact]-[revision](-[classifier]).[ext]'.format(self.workdir), '-revision', entry.version().version(), '-m2compatible', ] @@ -605,7 +605,7 @@ def execute(self): def get_db(tgt): # TODO(tdesai) Handle resource type in get_db. if tgt.provides is None: - raise TaskError('trying to publish target %r which does not provide an artifact' % tgt) + raise TaskError('trying to publish target {!r} which does not provide an artifact'.format(tgt)) dbfile = tgt.provides.repo.push_db(tgt) result = pushdbs.get(dbfile) if not result: @@ -693,7 +693,7 @@ def stage_artifacts(tgt, jar, version, changelog): raise TaskError("publish_extra for '{0}' must override one of name, classifier or " "extension with a non-default value.".format(extra_product)) - ivy_tmpl_key = classifier or '%s-%s'.format(override_name, extension) + ivy_tmpl_key = classifier or '{}-{}'.format(override_name, extension) # Build a list of targets to check. This list will consist of the current target, plus the # entire derived_from chain. @@ -792,7 +792,7 @@ def stage_individual_plugins(tgt, jar, version, changelog): sem_ver = sem_ver.make_snapshot() if sem_ver <= oldentry.sem_ver: - raise TaskError('Requested version %s must be greater than the current version %s' % ( + raise TaskError('Requested version {} must be greater than the current version {}'.format( sem_ver, oldentry.sem_ver )) newentry = oldentry.with_sem_ver(sem_ver) @@ -810,7 +810,7 @@ def stage_individual_plugins(tgt, jar, version, changelog): print('No changes for {0}'.format(pushdb_coordinate(jar, oldentry))) stage_artifacts(target, jar, oldentry.version().version(), changelog) elif skip: - print('Skipping %s to resume at %s' % ( + print('Skipping {} to resume at {}'.format( jar_coordinate(jar, (newentry.version() if self.force else oldentry.version()).version()), coordinate(self.restart_at[0], self.restart_at[1]) )) @@ -864,11 +864,11 @@ def stage_individual_plugins(tgt, jar, version, changelog): def artifact_path(self, jar, version, name=None, suffix='', extension='jar', artifact_ext=''): return os.path.join(self.workdir, jar.org, jar.name + artifact_ext, - '%s%s-%s%s.%s' % ((name or jar.name), - artifact_ext if name != 'ivy' else '', - version, - suffix, - extension)) + '{}{}-{}{}.{}'.format((name or jar.name), + artifact_ext if name != 'ivy' else '', + version, + suffix, + extension)) def check_targets(self, targets): invalid = defaultdict(lambda: defaultdict(set)) @@ -905,12 +905,12 @@ def first_address(pair): return str(first.address) for publish_target, invalid_targets in sorted(invalid.items(), key=first_address): - msg.append('\n Cannot publish %s due to:' % publish_target.address) + msg.append('\n Cannot publish {} due to:'.format(publish_target.address)) for invalid_target, reasons in sorted(invalid_targets.items(), key=first_address): for reason in sorted(reasons): - msg.append('\n %s - %s' % (invalid_target.address, reason)) + msg.append('\n {} - {}'.format(invalid_target.address, reason)) - raise TaskError('The following errors must be resolved to publish.%s' % ''.join(msg)) + raise TaskError('The following errors must be resolved to publish.{}'.format(''.join(msg))) def exported_targets(self): candidates = set() diff --git a/src/python/pants/backend/jvm/tasks/jvm_binary_task.py b/src/python/pants/backend/jvm/tasks/jvm_binary_task.py index 40256798ed5..ce2f3c0fc4d 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_binary_task.py +++ b/src/python/pants/backend/jvm/tasks/jvm_binary_task.py @@ -78,7 +78,7 @@ def monolithic_jar(self, binary, path, with_external_deps): with self.context.new_workunit(name='add-dependency-jars'): for basedir, external_jar in self.list_external_jar_dependencies(binary): external_jar_path = os.path.join(basedir, external_jar) - self.context.log.debug(' dumping %s' % external_jar_path) + self.context.log.debug(' dumping {}'.format(external_jar_path)) jar.writejar(external_jar_path) yield jar @@ -117,7 +117,7 @@ def _unexcluded_dependencies(self, jardepmap, binary): for jar in jars: excludes.add((basedir, jar)) if excludes: - self.context.log.debug('Calculated excludes:\n\t%s' % '\n\t'.join(str(e) for e in excludes)) + self.context.log.debug('Calculated excludes:\n\t{}'.format('\n\t'.join(str(e) for e in excludes))) externaljars = OrderedSet() @@ -129,7 +129,7 @@ def add_jars(target): if (basedir, externaljar) not in excludes: externaljars.add((basedir, externaljar)) else: - self.context.log.debug('Excluding %s from binary' % externaljar) + self.context.log.debug('Excluding {} from binary'.format(externaljar)) binary.walk(add_jars) return externaljars diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/anonymizer.py b/src/python/pants/backend/jvm/tasks/jvm_compile/anonymizer.py index 02b17381313..ce4bfcda78d 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/anonymizer.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/anonymizer.py @@ -63,9 +63,9 @@ def _random_base64_string(): # upper-case letters. _DELIMITER = r'\d|\s|/|-|_|\$|\.' _UPPER = r'[A-Z]' - _UPPER_CASE_RE = re.compile(r'^%s$' % _UPPER) - _DELIMITER_RE = re.compile(r'^%s$' % _DELIMITER) - _BREAK_ON_RE = re.compile(r'(%s|%s)' % (_DELIMITER, _UPPER)) # Capture what we broke on. + _UPPER_CASE_RE = re.compile(r'^{}$'.format(_UPPER)) + _DELIMITER_RE = re.compile(r'^{}$'.format(_DELIMITER)) + _BREAK_ON_RE = re.compile(r'({}|{})'.format(_DELIMITER, _UPPER)) # Capture what we broke on. # Valid replacement words must be all lower-case ASCII letters, with no apostrophes etc, and must be # at least 5 characters. @@ -101,7 +101,7 @@ def words_needed(self): def check_for_comprehensiveness(self): if self._words_needed: - raise Exception('Need %d more words in word_list for full anonymization.' % self._words_needed) + raise Exception('Need {} more words in word_list for full anonymization.'.format(self._words_needed)) def convert(self, s): parts = TokenTranslator._BREAK_ON_RE.split(s) @@ -154,9 +154,9 @@ def _convert_single_token(self, token): def _add_translation(self, frm, to): if frm in self._translations: - raise Exception('Word already has translation: %s -> %s' % (frm, self._translations[frm])) + raise Exception('Word already has translation: {} -> {}'.format(frm, self._translations[frm])) if to in self._reverse_translations: - raise Exception('Translation target already used: %s -> %s' % (self._reverse_translations[to], to)) + raise Exception('Translation target already used: {} -> {}'.format(self._reverse_translations[to], to)) self._translations[frm] = to self._reverse_translations[to] = frm diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/java/apt_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/java/apt_compile.py index 29914ccbe07..4a661dcbce7 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/java/apt_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/java/apt_compile.py @@ -51,4 +51,4 @@ def extra_products(self, target): def _write_processor_info(self, processor_info_file, processors): with safe_open(processor_info_file, 'w') as f: for processor in processors: - f.write('%s\n' % processor.strip()) + f.write('{}\n'.format(processor.strip())) diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/java/java_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/java/java_compile.py index 3807f3707b9..54c7a00857a 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/java/java_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/java/java_compile.py @@ -145,5 +145,5 @@ def compile(self, args, classpath, sources, classes_output_dir, upstream_analysi workunit_name='jmake', workunit_labels=[WorkUnit.COMPILER]) if result: - default_message = 'Unexpected error - JMake returned %d' % result + default_message = 'Unexpected error - JMake returned {}'.format(result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message)) diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/java/jmake_analysis.py b/src/python/pants/backend/jvm/tasks/jvm_compile/java/jmake_analysis.py index 50edbd62315..723e3c9fd17 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/java/jmake_analysis.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/java/jmake_analysis.py @@ -71,7 +71,7 @@ def rebase_path(path): return path outfile.write('pcd entries:\n') - outfile.write('%d items\n' % len(self.pcd_entries)) + outfile.write('{} items\n'.format(len(self.pcd_entries))) for pcd_entry in self.pcd_entries: rebased_src = rebase_path(pcd_entry[1]) if rebased_src: @@ -84,7 +84,7 @@ def rebase_path(path): # Note that last element already includes \n. outfile.write('dependencies:\n') - outfile.write('%d items\n' % len(self.src_to_deps)) + outfile.write('{} items\n'.format(len(self.src_to_deps))) for src, deps in self.src_to_deps.items(): rebased_src = rebase_path(src) if rebased_src: diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/java/jmake_analysis_parser.py b/src/python/pants/backend/jvm/tasks/jvm_compile/java/jmake_analysis_parser.py index 98619e82d40..6136087b705 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/java/jmake_analysis_parser.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/java/jmake_analysis_parser.py @@ -29,7 +29,7 @@ def parse(self, infile): line = infile.readline() tpl = line.split('\t') if len(tpl) != 5: - raise ParseError('Line must contain 5 tab-separated fields: %s' % line) + raise ParseError('Line must contain 5 tab-separated fields: {}'.format(line)) pcd_entries.append(tpl) # Note: we preserve the \n on the last entry. src_to_deps = self._parse_deps_at_position(infile) return JMakeAnalysis(pcd_entries, src_to_deps) @@ -87,4 +87,4 @@ def _parse_deps_at_position(self, infile): def _expect_header(self, line, header): expected = header + ':\n' if line != expected: - raise ParseError('Expected: %s. Found: %s' % (expected, line)) + raise ParseError('Expected: {}. Found: {}'.format(expected, line)) diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py index fd68af65583..fc2e44f9f07 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py @@ -286,13 +286,13 @@ def _compile_vts(self, vts, sources, analysis_file, upstream_analysis, classpath compiled individually. """ if not sources: - self.context.log.warn('Skipping %s compile for targets with no sources:\n %s' - % (self.name(), vts.targets)) + self.context.log.warn('Skipping {} compile for targets with no sources:\n {}' + .format(self.name(), vts.targets)) else: # Do some reporting. self.context.log.info( 'Compiling ', - items_to_report_element(sources, '%s source' % self.name()), + items_to_report_element(sources, '{} source'.format(self.name())), ' in ', items_to_report_element([t.address.reference() for t in vts.targets], 'target'), ' (', diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_global_strategy.py b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_global_strategy.py index d8c660a1ad6..88b4c3b187e 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_global_strategy.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_global_strategy.py @@ -257,8 +257,8 @@ def extra_compile_classpath_iter(): if len(sources) != len(de_duped_sources): counts = [(src, len(list(srcs))) for src, srcs in itertools.groupby(sorted(sources))] self.context.log.warn( - 'De-duped the following sources:\n\t%s' % - '\n\t'.join(sorted('%d %s' % (cnt, src) for src, cnt in counts if cnt > 1))) + 'De-duped the following sources:\n\t{}' + .format('\n\t'.join(sorted('{} {}'.format(cnt, src) for src, cnt in counts if cnt > 1)))) analysis_file = os.path.join(partition_tmpdir, 'analysis') partitions.append((vts, de_duped_sources, analysis_file)) diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_isolated_strategy.py b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_isolated_strategy.py index bcc38971142..ec533a55c71 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_isolated_strategy.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_isolated_strategy.py @@ -120,7 +120,7 @@ def extra_compile_classpath_iter(): invalid_vts_count = len(invalidation_check.invalid_vts_partitioned) for idx, vts in enumerate(invalidation_check.invalid_vts_partitioned): # Invalidated targets are a subset of relevant targets: get the context for this one. - assert len(vts.targets) == 1, ("Requested one target per partition, got %s" % vts) + assert len(vts.targets) == 1, ("Requested one target per partition, got {}".format(vts)) compile_context = compile_contexts[vts.targets[0]] # Generate a classpath specific to this compile and target, and include analysis diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_dependency_analyzer.py b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_dependency_analyzer.py index 8461066ff84..ed250f989e8 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_dependency_analyzer.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_dependency_analyzer.py @@ -159,14 +159,14 @@ def filter_whitelisted(missing_deps): if self._check_missing_deps and (missing_file_deps or missing_tgt_deps): for (tgt_pair, evidence) in missing_tgt_deps: - evidence_str = '\n'.join([' %s uses %s' % (shorten(e[0]), shorten(e[1])) + evidence_str = '\n'.join([' {} uses {}'.format(shorten(e[0]), shorten(e[1])) for e in evidence]) self._context.log.error( - 'Missing BUILD dependency %s -> %s because:\n%s' - % (tgt_pair[0].address.reference(), tgt_pair[1].address.reference(), evidence_str)) + 'Missing BUILD dependency {} -> {} because:\n{}' + .format(tgt_pair[0].address.reference(), tgt_pair[1].address.reference(), evidence_str)) for (src_tgt, dep) in missing_file_deps: - self._context.log.error('Missing BUILD dependency %s -> %s' - % (src_tgt.address.reference(), shorten(dep))) + self._context.log.error('Missing BUILD dependency {} -> {}' + .format(src_tgt.address.reference(), shorten(dep))) if self._check_missing_deps == 'fatal': raise TaskError('Missing deps.') @@ -176,10 +176,10 @@ def filter_whitelisted(missing_deps): if self._check_missing_direct_deps and missing_direct_tgt_deps: for (tgt_pair, evidence) in missing_direct_tgt_deps: - evidence_str = '\n'.join([' %s uses %s' % (shorten(e[0]), shorten(e[1])) + evidence_str = '\n'.join([' {} uses {}'.format(shorten(e[0]), shorten(e[1])) for e in evidence]) - self._context.log.warn('Missing direct BUILD dependency %s -> %s because:\n%s' % - (tgt_pair[0].address, tgt_pair[1].address, evidence_str)) + self._context.log.warn('Missing direct BUILD dependency {} -> {} because:\n{}' + .format(tgt_pair[0].address, tgt_pair[1].address, evidence_str)) if self._check_missing_direct_deps == 'fatal': raise TaskError('Missing direct deps.') @@ -266,7 +266,7 @@ def target_or_java_dep_in_targets(target, targets): missing_direct_tgt_deps_map[(src_tgt, canonical_actual_dep_tgt)].append( (src, actual_dep)) else: - raise TaskError('Requested dep info for unknown source file: %s' % src) + raise TaskError('Requested dep info for unknown source file: {}'.format(src)) return (list(missing_file_deps), missing_tgt_deps_map.items(), diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/resource_mapping.py b/src/python/pants/backend/jvm/tasks/jvm_compile/resource_mapping.py index 03d555178f9..53433f8cca3 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/resource_mapping.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/resource_mapping.py @@ -101,4 +101,4 @@ def get(self, key, default=None): return self.mappings.get(key, default) def __str__(self): - return "ResourceMapping(%s)" % str(self.mappings) + return "ResourceMapping({!s})".format(self.mappings) diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis.py b/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis.py index c3364bc07d4..530da5859bc 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis.py @@ -94,13 +94,13 @@ def rebase(txt): items = [] for k, vals in rep.items(): for v in vals: - item = rebase('%s -> %s%s' % (k, '' if inline_vals else '\n', v)) + item = rebase('{} -> {}{}'.format(k, '' if inline_vals else '\n', v)) if item: items.append(item) items.sort() outfile.write(header + ':\n') - outfile.write('%d items\n' % len(items)) + outfile.write('{} items\n'.format(len(items))) for item in items: outfile.write(item) outfile.write('\n') @@ -231,7 +231,7 @@ def merge_dependencies(internals, externals): compilation_vals = sorted(set([x[0] for a in analyses for x in a.compilations.compilations.itervalues()])) compilations_dict = defaultdict(list) for i, v in enumerate(compilation_vals): - compilations_dict['%03d' % i] = [v] + compilations_dict['{:03}'.format(int(i))] = [v] compilations = Compilations((compilations_dict, )) return ZincAnalysis(compile_setup, relations, stamps, apis, source_infos, compilations) diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis_diff.py b/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis_diff.py index 40772476df7..1535decfa72 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis_diff.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis_diff.py @@ -36,13 +36,13 @@ def is_different(self): def __unicode__(self): parts = [] if self._left_missing_keys: - parts.append('Keys missing from left but available in right: %s' % - ', '.join(self._left_missing_keys)) + parts.append('Keys missing from left but available in right: {}' + .format(', '.join(self._left_missing_keys))) if self._right_missing_keys: - parts.append('Keys available in left but missing from right: %s' % - ', '.join(self._right_missing_keys)) + parts.append('Keys available in left but missing from right: {}' + .format(', '.join(self._right_missing_keys))) for k, vs in self._diff_keys.items(): - parts.append('Different values for key %s: left has %s, right has %s' % (k, vs[0], vs[1])) + parts.append('Different values for key {}: left has {}, right has {}'.format(k, vs[0], vs[1])) return '\n'.join(parts) def __str__(self): @@ -57,7 +57,7 @@ def __init__(self, left_elem, right_elem, keys_only_headers=None): left_type = type(left_elem) right_type = type(right_elem) if left_type != right_type: - raise Exception('Cannot compare elements of types %s and %s' % (left_type, right_type)) + raise Exception('Cannot compare elements of types {} and {}'.format(left_type, right_type)) self._arg_diffs = OrderedDict() for header, left_dict, right_dict in zip(left_elem.headers, left_elem.args, right_elem.args): keys_only = header in (keys_only_headers or []) @@ -70,7 +70,7 @@ def __unicode__(self): parts = [] for header, arg_diff in self._arg_diffs.items(): if arg_diff.is_different(): - parts.append('Section "%s" differs:\n' % header) + parts.append('Section "{}" differs:\n'.format(header)) parts.append(str(arg_diff)) parts.append('\n\n') return ''.join(parts) # '' is a unicode, so the entire result will be. diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis_parser.py b/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis_parser.py index 3ecac546bf9..3ce34eb42c2 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis_parser.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis_parser.py @@ -88,7 +88,7 @@ def _parse_section(self, lines_iter, expected_header=None): if expected_header: line = lines_iter.next() if expected_header + ':\n' != line: - raise ParseError('Expected: "%s:". Found: "%s"' % (expected_header, line)) + raise ParseError('Expected: "{}:". Found: "{}"'.format(expected_header, line)) n = self.parse_num_items(lines_iter.next()) relation = defaultdict(list) # Values are lists, to accommodate relations. for i in range(n): diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_utils.py b/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_utils.py index 26a5e56e14f..8332dcc92e6 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_utils.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_utils.py @@ -69,7 +69,7 @@ def _compiler_classpath(self): def _zinc_jar_args(self): zinc_jars = self.identify_zinc_jars(self._zinc_classpath) # The zinc jar names are also the flag names. - return (list(chain.from_iterable([['-%s' % name, jarpath] + return (list(chain.from_iterable([['-{}'.format(name), jarpath] for (name, jarpath) in sorted(zinc_jars.items())])) + ['-scala-path', ':'.join(self._compiler_classpath)]) @@ -86,9 +86,9 @@ def _create_plugin_args(self): active_plugins = self.find_plugins() ret = [] for name, jar in active_plugins.items(): - ret.append('-S-Xplugin:%s' % jar) + ret.append('-S-Xplugin:{}'.format(jar)) for arg in plugin_args.get(name, []): - ret.append('-S-P:%s:%s' % (name, arg)) + ret.append('-S-P:{}:{}'.format(name, arg)) return ret def plugin_jars(self): @@ -150,7 +150,7 @@ def compile(self, extra_args, classpath, sources, output_dir, args += self._plugin_args() if upstream_analysis_files: args.extend( - ['-analysis-map', ','.join(['%s:%s' % kv for kv in upstream_analysis_files.items()])]) + ['-analysis-map', ','.join(['{}:{}'.format(*kv) for kv in upstream_analysis_files.items()])]) args += extra_args @@ -172,10 +172,10 @@ def write_plugin_info(resources_dir, target): with safe_open(plugin_info_file, 'w') as f: f.write(textwrap.dedent(''' - %s - %s + {} + {} - ''' % (target.plugin, target.classname)).strip()) + '''.format(target.plugin, target.classname)).strip()) return root, plugin_info_file # These are the names of the various jars zinc needs. They are, conveniently and @@ -198,7 +198,7 @@ def identify_zinc_jars(cls, zinc_classpath): jar_for_name = jar break if jar_for_name is None: - raise TaskError('Couldn\'t find jar named %s' % name) + raise TaskError('Couldn\'t find jar named {}'.format(name)) else: jars_by_name[name] = jar_for_name return jars_by_name @@ -217,11 +217,11 @@ def find_plugins(self): plugin_info = ElementTree.parse(plugin_info_file).getroot() if plugin_info.tag != 'plugin': raise TaskError( - 'File %s in %s is not a valid scalac plugin descriptor' % (_PLUGIN_INFO_FILE, jar)) + 'File {} in {} is not a valid scalac plugin descriptor'.format(_PLUGIN_INFO_FILE, jar)) name = plugin_info.find('name').text if name in plugin_names: if name in plugins: - raise TaskError('Plugin %s defined in %s and in %s' % (name, plugins[name], jar)) + raise TaskError('Plugin {} defined in {} and in {}'.format(name, plugins[name], jar)) # It's important to use relative paths, as the compiler flags get embedded in the zinc # analysis file, and we port those between systems via the artifact cache. plugins[name] = os.path.relpath(jar, buildroot) @@ -230,12 +230,12 @@ def find_plugins(self): unresolved_plugins = plugin_names - set(plugins.keys()) if unresolved_plugins: - raise TaskError('Could not find requested plugins: %s' % list(unresolved_plugins)) + raise TaskError('Could not find requested plugins: {}'.format(list(unresolved_plugins))) return plugins def log_zinc_file(self, analysis_file): - self.context.log.debug('Calling zinc on: %s (%s)' % - (analysis_file, - hash_file(analysis_file).upper() - if os.path.exists(analysis_file) - else 'nonexistent')) + self.context.log.debug('Calling zinc on: {} ({})' + .format(analysis_file, + hash_file(analysis_file).upper() + if os.path.exists(analysis_file) + else 'nonexistent')) diff --git a/src/python/pants/backend/jvm/tasks/jvm_run.py b/src/python/pants/backend/jvm/tasks/jvm_run.py index 1b043de1869..721b0e49530 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_run.py +++ b/src/python/pants/backend/jvm/tasks/jvm_run.py @@ -108,5 +108,5 @@ def execute(self): with safe_open(expand_path(self.only_write_cmd_line), 'w') as outfile: outfile.write(' '.join(executor.cmd)) elif result != 0: - raise TaskError('java %s ... exited non-zero (%i)' % (binary.main, result), + raise TaskError('java {} ... exited non-zero ({})'.format(binary.main, result), exit_code=result) diff --git a/src/python/pants/backend/jvm/tasks/jvmdoc_gen.py b/src/python/pants/backend/jvm/tasks/jvmdoc_gen.py index 4bfd15ac162..571a412f4db 100644 --- a/src/python/pants/backend/jvm/tasks/jvmdoc_gen.py +++ b/src/python/pants/backend/jvm/tasks/jvmdoc_gen.py @@ -75,7 +75,7 @@ def __init__(self, *args, **kwargs): jvmdoc_tool_name = self.jvmdoc().tool_name - config_section = '%s-gen' % jvmdoc_tool_name + config_section = '{}-gen'.format(jvmdoc_tool_name) options = self.get_options() self._include_codegen = options.include_codegen @@ -100,7 +100,7 @@ def generate_doc(self, language_predicate, create_jvmdoc_command): catalog = self.context.products.isrequired(self.jvmdoc().product_type) if catalog and self.combined: raise TaskError( - 'Cannot provide %s target mappings for combined output' % self.jvmdoc().product_type) + 'Cannot provide {} target mappings for combined output'.format(self.jvmdoc().product_type)) def docable(tgt): return language_predicate(tgt) and (self._include_codegen or not tgt.is_codegen) @@ -140,7 +140,7 @@ def _generate_combined(self, classpath, targets, create_jvmdoc_command): safe_mkdir(gendir, clean=True) command = create_jvmdoc_command(classpath, gendir, *targets) if command: - self.context.log.debug("Running create_jvmdoc in %s with %s" % (gendir, " ".join(command))) + self.context.log.debug("Running create_jvmdoc in {} with {}".format(gendir, " ".join(command))) result, gendir = create_jvmdoc(command, gendir) self._handle_create_jvmdoc_result(targets, result, command) if self.open: @@ -170,8 +170,8 @@ def _generate_individual(self, classpath, targets, create_jvmdoc_command): self.context.log.debug("Begin multiprocessing section; output may be misordered or garbled") try: for gendir, (target, command) in jobs.items(): - self.context.log.debug("Running create_jvmdoc in %s with %s" % - (gendir, " ".join(command))) + self.context.log.debug("Running create_jvmdoc in {} with {}" + .format(gendir, " ".join(command))) futures.append(pool.apply_async(create_jvmdoc, args=(command, gendir))) for future in futures: @@ -188,7 +188,7 @@ def _generate_individual(self, classpath, targets, create_jvmdoc_command): def _handle_create_jvmdoc_result(self, targets, result, command): if result != 0: targetlist = ", ".join(map(str, targets)) - message = 'Failed to process %s for %s [%d]: %s' % ( + message = 'Failed to process {} for {} [{}]: {}'.format( self.jvmdoc().tool_name, targetlist, result, command) if self.ignore_failure: self.context.log.warn(message) diff --git a/src/python/pants/backend/jvm/tasks/specs_run.py b/src/python/pants/backend/jvm/tasks/specs_run.py index 4e749f50832..c37af462801 100644 --- a/src/python/pants/backend/jvm/tasks/specs_run.py +++ b/src/python/pants/backend/jvm/tasks/specs_run.py @@ -49,7 +49,7 @@ def execute(self): def run_tests(tests): args = ['--color'] if self.get_options().colors else [] - args.append('--specs=%s' % ','.join(tests)) + args.append('--specs={}'.format(','.join(tests))) specs_runner_main = 'com.twitter.common.testing.ExplicitSpecsRunnerMain' bootstrapped_cp = self.tool_classpath('specs') @@ -65,7 +65,7 @@ def run_tests(tests): workunit_labels=[WorkUnit.TEST] ) if result != 0: - raise TaskError('java %s ... exited non-zero (%i)' % (specs_runner_main, result)) + raise TaskError('java {} ... exited non-zero ({})'.format(specs_runner_main, result)) if self.tests: run_tests(self.tests) diff --git a/src/python/pants/backend/project_info/tasks/depmap.py b/src/python/pants/backend/project_info/tasks/depmap.py index 0488844fb58..be70832c8d6 100644 --- a/src/python/pants/backend/project_info/tasks/depmap.py +++ b/src/python/pants/backend/project_info/tasks/depmap.py @@ -136,13 +136,13 @@ def _dep_id(self, dependency): params.update(org='internal', name=dependency.id) if params.get('rev'): - return "%(org)s%(sep)s%(name)s%(sep)s%(rev)s" % params, False + return "{org}{sep}{name}{sep}{rev}".format(**params), False else: - return "%(org)s%(sep)s%(name)s" % params, True + return "{org}{sep}{name}".format(**params), True def _output_dependency_tree(self, target): def output_dep(dep, indent): - return "%s%s" % (indent * " ", dep) + return "{}{}".format(indent * " ", dep) def check_path_to(jar_dep_id): """ @@ -154,7 +154,7 @@ def check_path_to(jar_dep_id): def output_deps(dep, indent=0, outputted=set()): dep_id, _ = self._dep_id(dep) if dep_id in outputted: - return [output_dep("*%s" % dep_id, indent)] if not self.is_minimal else [] + return [output_dep("*{}".format(dep_id), indent)] if not self.is_minimal else [] else: output = [] if not self.is_external_only: @@ -199,12 +199,12 @@ def output_candidate(internal): def output_dep(dep): dep_id, internal = self._dep_id(dep) if internal: - fmt = ' "%(id)s" [style=filled, fillcolor="%(color)d"];' + fmt = ' "{id}" [style=filled, fillcolor="{color}"];' else: - fmt = ' "%(id)s" [style=filled, fillcolor="%(color)d", shape=ellipse];' + fmt = ' "{id}" [style=filled, fillcolor="{color}", shape=ellipse];' if type(dep) not in color_by_type: color_by_type[type(dep)] = len(color_by_type.keys()) + 1 - return fmt % {'id': dep_id, 'color': color_by_type[type(dep)]} + return fmt.format(id=dep_id, color=color_by_type[type(dep)]) def output_deps(outputted, dep, parent=None): output = [] @@ -213,7 +213,7 @@ def output_deps(outputted, dep, parent=None): outputted.add(dep) output.append(output_dep(dep)) if parent: - output.append(' "%s" -> "%s";' % (self._dep_id(parent)[0], self._dep_id(dep)[0])) + output.append(' "{}" -> "{}";'.format(self._dep_id(parent)[0], self._dep_id(dep)[0])) # TODO: This is broken. 'dependency' doesn't exist here, and we don't have # internal_dependencies any more anyway. @@ -233,11 +233,11 @@ def output_deps(outputted, dep, parent=None): left_id = target_id if self.is_external_only else dep_id if (left_id, jar_id) not in outputted: styled = internal and not self.is_internal_only - output += [' "%s" -> "%s"%s;' % (left_id, jar_id, - ' [style="dashed"]' if styled else '')] + output += [' "{}" -> "{}"{};'.format(left_id, jar_id, + ' [style="dashed"]' if styled else '')] outputted.add((left_id, jar_id)) return output - header = ['digraph "%s" {' % target.id] + header = ['digraph "{}" {{'.format(target.id)] graph_attr = [' node [shape=rectangle, colorscheme=set312;];', ' rankdir=LR;'] return header + graph_attr + output_deps(set(), target) + ['}'] diff --git a/src/python/pants/backend/project_info/tasks/eclipse_gen.py b/src/python/pants/backend/project_info/tasks/eclipse_gen.py index 8e93e208e21..52f35fbbecc 100644 --- a/src/python/pants/backend/project_info/tasks/eclipse_gen.py +++ b/src/python/pants/backend/project_info/tasks/eclipse_gen.py @@ -45,13 +45,13 @@ def __init__(self, *args, **kwargs): super(EclipseGen, self).__init__(*args, **kwargs) version = _VERSIONS[self.get_options().version] - self.project_template = os.path.join(_TEMPLATE_BASEDIR, 'project-%s.mustache' % version) - self.classpath_template = os.path.join(_TEMPLATE_BASEDIR, 'classpath-%s.mustache' % version) - self.apt_template = os.path.join(_TEMPLATE_BASEDIR, 'factorypath-%s.mustache' % version) - self.pydev_template = os.path.join(_TEMPLATE_BASEDIR, 'pydevproject-%s.mustache' % version) - self.debug_template = os.path.join(_TEMPLATE_BASEDIR, 'debug-launcher-%s.mustache' % version) + self.project_template = os.path.join(_TEMPLATE_BASEDIR, 'project-{}.mustache'.format(version)) + self.classpath_template = os.path.join(_TEMPLATE_BASEDIR, 'classpath-{}.mustache'.format(version)) + self.apt_template = os.path.join(_TEMPLATE_BASEDIR, 'factorypath-{}.mustache'.format(version)) + self.pydev_template = os.path.join(_TEMPLATE_BASEDIR, 'pydevproject-{}.mustache'.format(version)) + self.debug_template = os.path.join(_TEMPLATE_BASEDIR, 'debug-launcher-{}.mustache'.format(version)) self.coreprefs_template = os.path.join(_TEMPLATE_BASEDIR, - 'org.eclipse.jdt.core.prefs-%s.mustache' % version) + 'org.eclipse.jdt.core.prefs-{}.mustache'.format(version)) self.project_filename = os.path.join(self.cwd, '.project') self.classpath_filename = os.path.join(self.cwd, '.classpath') @@ -87,7 +87,7 @@ def create_source_template(base_id, includes=None, excludes=None): def create_sourcepath(base_id, sources): def normalize_path_pattern(path): - return '%s/' % path if not path.endswith('/') else path + return '{}/'.format(path) if not path.endswith('/') else path includes = [normalize_path_pattern(src_set.path) for src_set in sources if src_set.path] excludes = [] @@ -101,7 +101,7 @@ def normalize_path_pattern(path): for source_set in project.py_sources: pythonpaths.append(create_source_template(linked_folder_id(source_set))) for source_set in project.py_libs: - lib_path = source_set.path if source_set.path.endswith('.egg') else '%s/' % source_set.path + lib_path = source_set.path if source_set.path.endswith('.egg') else '{}/'.format(source_set.path) pythonpaths.append(create_source_template(linked_folder_id(source_set), includes=[lib_path])) @@ -109,7 +109,7 @@ def normalize_path_pattern(path): name=self.project_name, java=TemplateData( jdk=self.java_jdk, - language_level=('1.%d' % self.java_language_level) + language_level=('1.{}'.format(self.java_language_level)) ), python=project.has_python, scala=project.has_scala and not project.skip_scala, @@ -146,7 +146,7 @@ def apply_template(output_path, template_relpath, **template_data): apply_template(self.project_filename, self.project_template, project=configured_project) apply_template(self.classpath_filename, self.classpath_template, classpath=configured_classpath) apply_template(os.path.join(self.gen_project_workdir, - 'Debug on port %d.launch' % project.debug_port), + 'Debug on port {}.launch'.format(project.debug_port)), self.debug_template, project=configured_project) apply_template(self.coreprefs_filename, self.coreprefs_template, project=configured_project) @@ -168,4 +168,4 @@ def apply_template(output_path, template_relpath, **template_data): else: safe_delete(self.pydev_filename) - print('\nGenerated project at %s%s' % (self.gen_project_workdir, os.sep)) + print('\nGenerated project at {}{}'.format(self.gen_project_workdir, os.sep)) diff --git a/src/python/pants/backend/project_info/tasks/ensime_gen.py b/src/python/pants/backend/project_info/tasks/ensime_gen.py index 26fe470b871..a3ad6822aac 100644 --- a/src/python/pants/backend/project_info/tasks/ensime_gen.py +++ b/src/python/pants/backend/project_info/tasks/ensime_gen.py @@ -72,7 +72,7 @@ def create_source_template(base_id, includes=None, excludes=None): def create_sourcepath(base_id, sources): def normalize_path_pattern(path): - return '%s/' % path if not path.endswith('/') else path + return '{}/'.format(path) if not path.endswith('/') else path includes = [normalize_path_pattern(src_set.path) for src_set in sources if src_set.path] excludes = [] @@ -107,7 +107,7 @@ def add_jarlibs(classpath_entries): name=self.project_name, java=TemplateData( jdk=self.java_jdk, - language_level=('1.%d' % self.java_language_level) + language_level=('1.{}'.format(self.java_language_level)) ), scala=scala, source_bases=source_bases.values(), @@ -130,4 +130,4 @@ def apply_template(output_path, template_relpath, **template_data): Generator(pkgutil.get_data(__name__, template_relpath), **template_data).write(output) apply_template(self.project_filename, self.project_template, project=configured_project) - print('\nGenerated ensime project at %s%s' % (self.gen_project_workdir, os.sep)) + print('\nGenerated ensime project at {}{}'.format(self.gen_project_workdir, os.sep)) diff --git a/src/python/pants/backend/project_info/tasks/ide_gen.py b/src/python/pants/backend/project_info/tasks/ide_gen.py index aa0bb16e370..6f09ac9bde2 100644 --- a/src/python/pants/backend/project_info/tasks/ide_gen.py +++ b/src/python/pants/backend/project_info/tasks/ide_gen.py @@ -149,7 +149,7 @@ def __init__(self, *args, **kwargs): if self.get_options().java_jdk_name: self.java_jdk = self.get_options().java_jdk_name else: - self.java_jdk = '1.%d' % self.java_language_level + self.java_jdk = '1.{}'.format(self.java_language_level) # Always tack on the project name to the work dir so each project gets its own linked jars, # etc. See https://github.com/pantsbuild/pants/issues/564 @@ -238,9 +238,9 @@ def prune(target): self.jar_dependencies = jars - self.context.log.debug('pruned to cp:\n\t%s' % '\n\t'.join( - str(t) for t in self.context.targets()) - ) + self.context.log.debug('pruned to cp:\n\t{}'.format( + '\n\t'.join(str(t) for t in self.context.targets()) + )) def map_internal_jars(self, targets): internal_jar_dir = os.path.join(self.gen_project_workdir, 'internal-libs') @@ -256,7 +256,7 @@ def map_internal_jars(self, targets): if mappings: for base, jars in mappings.items(): if len(jars) != 1: - raise IdeGen.Error('Unexpected mapping, multiple jars for %s: %s' % (target, jars)) + raise IdeGen.Error('Unexpected mapping, multiple jars for {}: {}'.format(target, jars)) jar = jars[0] cp_jar = os.path.join(internal_jar_dir, jar) @@ -268,7 +268,7 @@ def map_internal_jars(self, targets): for base, jars in mappings.items(): if len(jars) != 1: raise IdeGen.Error( - 'Unexpected mapping, multiple source jars for %s: %s' % (target, jars) + 'Unexpected mapping, multiple source jars for {}: {}'.format(target, jars) ) jar = jars[0] cp_source_jar = os.path.join(internal_source_jar_dir, jar) diff --git a/src/python/pants/backend/project_info/tasks/idea_gen.py b/src/python/pants/backend/project_info/tasks/idea_gen.py index ea60b68a2bd..a6e397fbf94 100644 --- a/src/python/pants/backend/project_info/tasks/idea_gen.py +++ b/src/python/pants/backend/project_info/tasks/idea_gen.py @@ -101,11 +101,11 @@ def __init__(self, *args, **kwargs): self.java_maximum_heap_size = self.get_options().java_maximum_heap_size_mb idea_version = _VERSIONS[self.get_options().version] - self.project_template = os.path.join(_TEMPLATE_BASEDIR, 'project-%s.mustache' % idea_version) - self.module_template = os.path.join(_TEMPLATE_BASEDIR, 'module-%s.mustache' % idea_version) + self.project_template = os.path.join(_TEMPLATE_BASEDIR, 'project-{}.mustache'.format(idea_version)) + self.module_template = os.path.join(_TEMPLATE_BASEDIR, 'module-{}.mustache'.format(idea_version)) - self.project_filename = os.path.join(self.cwd, '%s.ipr' % self.project_name) - self.module_filename = os.path.join(self.gen_project_workdir, '%s.iml' % self.project_name) + self.project_filename = os.path.join(self.cwd, '{}.ipr'.format(self.project_name)) + self.module_filename = os.path.join(self.gen_project_workdir, '{}.iml'.format(self.project_name)) @staticmethod def _maven_targets_excludes(repo_root): @@ -214,7 +214,7 @@ def create_content_root(source_set): encoding=self.java_encoding, maximum_heap_size=self.java_maximum_heap_size, jdk=self.java_jdk, - language_level = 'JDK_1_%d' % self.java_language_level + language_level ='JDK_1_{}'.format(self.java_language_level) ), resource_extensions=list(project.resource_extensions), scala=scala, diff --git a/src/python/pants/backend/python/code_generator.py b/src/python/pants/backend/python/code_generator.py index 20f98b7acce..af2b0c78d9c 100644 --- a/src/python/pants/backend/python/code_generator.py +++ b/src/python/pants/backend/python/code_generator.py @@ -41,10 +41,10 @@ def path_to_module(path): return path.replace(os.path.sep, '.') def package_name(self): - return '%s%s' % (self.target.id, self.suffix) + return '{}{}'.format(self.target.id, self.suffix) def requirement_string(self): - return '%s==0.0.0' % self.package_name() + return '{}==0.0.0'.format(self.package_name()) @property def package_dir(self): @@ -64,22 +64,22 @@ def dump_setup_py(self): boilerplate = textwrap.dedent(""" from setuptools import setup - setup(name = "%(package_name)s", + setup(name = "{package_name}", version = "0.0.0", - description = "autogenerated code for %(target_name)s", - install_requires = %(install_requires)r, - package_dir = { "": %(package_dir)r }, - packages = %(packages)s, - namespace_packages = %(namespace_packages)s) + description = "autogenerated code for {target_name}", + install_requires = {install_requires!r}, + package_dir = {{ "": {package_dir!r} }}, + packages = {packages}, + namespace_packages = {namespace_packages}) """) - boilerplate = boilerplate % { - 'package_name': self.package_name().encode('utf-8'), - 'package_dir': self.package_dir.encode('utf-8'), - 'target_name': self.target.name.encode('utf-8'), - 'install_requires': [x.encode('utf-8') for x in self.install_requires], - 'packages': repr([x.encode('utf-8') for x in self.created_packages]), - 'namespace_packages': repr([x.encode('utf-8') for x in self.created_namespace_packages]) - } + boilerplate = boilerplate.format( + package_name=self.package_name().encode('utf-8'), + package_dir=self.package_dir.encode('utf-8'), + target_name=self.target.name.encode('utf-8'), + install_requires=[x.encode('utf-8') for x in self.install_requires], + packages=repr([x.encode('utf-8') for x in self.created_packages]), + namespace_packages=repr([x.encode('utf-8') for x in self.created_namespace_packages]) + ) self.chroot.write(boilerplate.encode('utf8'), os.path.join(self.codegen_root, 'setup.py')) self.chroot.write('include *.py'.encode('utf8'), os.path.join(self.codegen_root, 'MANIFEST.in')) diff --git a/src/python/pants/backend/python/interpreter_cache.py b/src/python/pants/backend/python/interpreter_cache.py index bc85c0fb111..4972bdc7653 100644 --- a/src/python/pants/backend/python/interpreter_cache.py +++ b/src/python/pants/backend/python/interpreter_cache.py @@ -83,7 +83,7 @@ def _setup_cached(self, filters): path = os.path.join(self._cache_dir, interpreter_dir) pi = self._interpreter_from_path(path, filters) if pi: - self._logger('Detected interpreter %s: %s' % (pi.binary, str(pi.identity))) + self._logger('Detected interpreter {}: {}'.format(pi.binary, str(pi.identity))) self._interpreters.add(pi) def _setup_paths(self, paths, filters): @@ -165,7 +165,7 @@ def installer_provider(sdist): if egg: return interpreter.with_extra(egg.name, egg.raw_version, egg.path) else: - self._logger('Failed to resolve requirement %s for %s' % (requirement, interpreter)) + self._logger('Failed to resolve requirement {} for {}'.format(requirement, interpreter)) def _resolve_and_link(self, requirement, target_link, installer_provider): # Short-circuit if there is a local copy. @@ -180,13 +180,13 @@ def _resolve_and_link(self, requirement, target_link, installer_provider): links = [link for link in iterator.iter(requirement) if isinstance(link, SourcePackage)] for link in links: - self._logger(' fetching %s' % link.url) + self._logger(' fetching {}'.format(link.url)) sdist = context.fetch(link) - self._logger(' installing %s' % sdist) + self._logger(' installing {}'.format(sdist)) installer = installer_provider(sdist) dist_location = installer.bdist() target_location = os.path.join(os.path.dirname(target_link), os.path.basename(dist_location)) shutil.move(dist_location, target_location) _safe_link(target_location, target_link) - self._logger(' installed %s' % target_location) + self._logger(' installed {}'.format(target_location)) return EggPackage(target_location) diff --git a/src/python/pants/backend/python/python_artifact.py b/src/python/pants/backend/python/python_artifact.py index 0f6d4a82a09..30ec94761a8 100644 --- a/src/python/pants/backend/python/python_artifact.py +++ b/src/python/pants/backend/python/python_artifact.py @@ -34,12 +34,12 @@ def __init__(self, **kwargs): def has(name): value = self._kw.get(name) if value is None: - raise self.MissingArgument('PythonArtifact requires %s to be specified!' % name) + raise self.MissingArgument('PythonArtifact requires {} to be specified!'.format(name)) return value def misses(name): if name in self._kw: - raise self.UnsupportedArgument('PythonArtifact prohibits %s from being specified' % name) + raise self.UnsupportedArgument('PythonArtifact prohibits {} from being specified'.format(name)) self._version = has('version') self._name = has('name') @@ -56,7 +56,7 @@ def version(self): @property def key(self): - return '%s==%s' % (self._name, self._version) + return '{}=={}'.format(self._name, self._version) @property def setup_py_keywords(self): diff --git a/src/python/pants/backend/python/python_chroot.py b/src/python/pants/backend/python/python_chroot.py index 767c33208fd..64ae820d51f 100644 --- a/src/python/pants/backend/python/python_chroot.py +++ b/src/python/pants/backend/python/python_chroot.py @@ -52,7 +52,7 @@ class PythonChroot(object): class InvalidDependencyException(Exception): def __init__(self, target): - Exception.__init__(self, "Not a valid Python dependency! Found: %s" % target) + Exception.__init__(self, "Not a valid Python dependency! Found: {}".format(target)) def __init__(self, context, @@ -88,7 +88,7 @@ def __del__(self): if os.getenv('PANTS_LEAVE_CHROOT') is None: self.delete() else: - self.debug('Left chroot at %s' % self.path()) + self.debug('Left chroot at {}'.format(self.path())) @property def builder(self): @@ -96,7 +96,7 @@ def builder(self): def debug(self, msg, indent=0): if os.getenv('PANTS_VERBOSE') is not None: - print('%s%s' % (' ' * indent, msg)) + print('{}{}'.format(' ' * indent, msg)) def path(self): return os.path.realpath(self._builder.path()) @@ -106,7 +106,7 @@ def copy_to_chroot(base, path, add_function): src = os.path.join(get_buildroot(), base, path) add_function(src, path) - self.debug(' Dumping library: %s' % library) + self.debug(' Dumping library: {}'.format(library)) for relpath in library.sources_relative_to_source_root(): try: copy_to_chroot(library.target_base, relpath, self._builder.add_source) @@ -129,11 +129,11 @@ def copy_to_chroot(base, path, add_function): raise def _dump_requirement(self, req): - self.debug(' Dumping requirement: %s' % req) + self.debug(' Dumping requirement: {}'.format(req)) self._builder.add_requirement(req) def _dump_distribution(self, dist): - self.debug(' Dumping distribution: .../%s' % os.path.basename(dist.location)) + self.debug(' Dumping distribution: .../{}'.format(os.path.basename(dist.location))) self._builder.add_distribution(dist) def _generate_requirement(self, library, builder_cls): @@ -171,7 +171,7 @@ def add_dep(trg): return children def dump(self): - self.debug('Building chroot for %s:' % self._targets) + self.debug('Building chroot for {}:'.format(self._targets)) targets = self.resolve(self._targets) for lib in targets['libraries'] | targets['binaries']: @@ -199,7 +199,7 @@ def dump(self): for req in reqs_from_libraries | generated_reqs | self._extra_requirements: if not req.should_build(self._interpreter.python, Platform.current()): - self.debug('Skipping %s based upon version filter' % req) + self.debug('Skipping {} based upon version filter'.format(req)) continue reqs_to_build.add(req) self._dump_requirement(req.requirement) diff --git a/src/python/pants/backend/python/python_egg.py b/src/python/pants/backend/python/python_egg.py index a8624c5b271..9bbc92f37b6 100644 --- a/src/python/pants/backend/python/python_egg.py +++ b/src/python/pants/backend/python/python_egg.py @@ -40,6 +40,6 @@ def PythonEgg(glob, name=None): requirements.add(dist.as_requirement()) if len(requirements) > 1: - raise ValueError('Got multiple egg versions! => %s' % requirements) + raise ValueError('Got multiple egg versions! => {}'.format(requirements)) return PythonRequirement(str(requirements.pop()), name=name) diff --git a/src/python/pants/backend/python/python_requirement.py b/src/python/pants/backend/python/python_requirement.py index 1373dbf72b3..b663397a247 100644 --- a/src/python/pants/backend/python/python_requirement.py +++ b/src/python/pants/backend/python/python_requirement.py @@ -84,4 +84,4 @@ def cache_key(self): return str(self._requirement) def __repr__(self): - return 'PythonRequirement(%s)' % self._requirement + return 'PythonRequirement({})'.format(self._requirement) diff --git a/src/python/pants/backend/python/resolver.py b/src/python/pants/backend/python/resolver.py index b9939268574..5457b221d1e 100644 --- a/src/python/pants/backend/python/resolver.py +++ b/src/python/pants/backend/python/resolver.py @@ -48,7 +48,7 @@ def resolve_multi(python_setup, distributions = dict() interpreter = interpreter or PythonInterpreter.get() if not isinstance(interpreter, PythonInterpreter): - raise TypeError('Expected interpreter to be a PythonInterpreter, got %s' % type(interpreter)) + raise TypeError('Expected interpreter to be a PythonInterpreter, got {}'.format(type(interpreter))) cache = os.path.join(python_setup.scratch_dir, 'eggs') platforms = get_platforms(platforms or python_setup.platforms) diff --git a/src/python/pants/backend/python/targets/python_binary.py b/src/python/pants/backend/python/targets/python_binary.py index 17ed2528427..34c9fb4dba9 100644 --- a/src/python/pants/backend/python/targets/python_binary.py +++ b/src/python/pants/backend/python/targets/python_binary.py @@ -91,7 +91,7 @@ def __init__(self, source_entry_point = self._translate_to_entry_point(entry_source) if entry_point_module != source_entry_point: raise TargetDefinitionException(self, - 'Specified both source and entry_point but they do not agree: %s vs %s' % ( + 'Specified both source and entry_point but they do not agree: {} vs {}'.format( source_entry_point, entry_point_module)) @property diff --git a/src/python/pants/backend/python/targets/python_target.py b/src/python/pants/backend/python/targets/python_target.py index db19f753039..f9d25f5cb7a 100644 --- a/src/python/pants/backend/python/targets/python_target.py +++ b/src/python/pants/backend/python/targets/python_target.py @@ -72,8 +72,8 @@ def __init__(self, if provides and not isinstance(provides, PythonArtifact): raise TargetDefinitionException(self, - "Target must provide a valid pants setup_py object. Received a '%s' object instead." % - provides.__class__.__name__) + "Target must provide a valid pants setup_py object. Received a '{}' object instead.".format( + provides.__class__.__name__)) self._provides = provides @@ -118,7 +118,7 @@ def resources(self): def get_target(spec): tgt = self._build_graph.get_target_from_spec(spec) if tgt is None: - raise TargetDefinitionException(self, 'No such resource target: %s' % spec) + raise TargetDefinitionException(self, 'No such resource target: {}'.format(spec)) return tgt resource_targets.extend(map(get_target, self._resource_target_specs)) diff --git a/src/python/pants/backend/python/tasks/pytest_run.py b/src/python/pants/backend/python/tasks/pytest_run.py index 88026e24eaa..1a3758380aa 100644 --- a/src/python/pants/backend/python/tasks/pytest_run.py +++ b/src/python/pants/backend/python/tasks/pytest_run.py @@ -134,7 +134,7 @@ def _maybe_emit_junit_xml(self, targets): xml_base = os.path.realpath(xml_base) xml_path = os.path.join(xml_base, Target.maybe_readable_identify(targets) + '.xml') safe_mkdir(os.path.dirname(xml_path)) - args.append('--junitxml=%s' % xml_path) + args.append('--junitxml={}'.format(xml_path)) yield args DEFAULT_COVERAGE_CONFIG = dedent(b""" diff --git a/src/python/pants/backend/python/tasks/python_binary_create.py b/src/python/pants/backend/python/tasks/python_binary_create.py index b567da8b0e5..ea9c80b99fc 100644 --- a/src/python/pants/backend/python/tasks/python_binary_create.py +++ b/src/python/pants/backend/python/tasks/python_binary_create.py @@ -31,7 +31,7 @@ def execute(self): name = binary.name if name in names: raise TaskError('Cannot build two binaries with the same name in a single invocation. ' - '%s and %s both have the name %s.' % (binary, names[name], name)) + '{} and {} both have the name {}.'.format(binary, names[name], name)) names[name] = binary for binary in binaries: @@ -49,5 +49,5 @@ def create_binary(self, binary): pexinfo.build_properties = build_properties with self.temporary_chroot(interpreter=interpreter, pex_info=pexinfo, targets=[binary], platforms=binary.platforms) as chroot: - pex_path = os.path.join(self._distdir, '%s.pex' % binary.name) + pex_path = os.path.join(self._distdir, '{}.pex'.format(binary.name)) chroot.builder.build(pex_path) diff --git a/src/python/pants/backend/python/tasks/python_task.py b/src/python/pants/backend/python/tasks/python_task.py index a2f22ad3f62..cf750812649 100644 --- a/src/python/pants/backend/python/tasks/python_task.py +++ b/src/python/pants/backend/python/tasks/python_task.py @@ -66,9 +66,9 @@ def select_interpreter_for_targets(self, targets): unique_compatibilities = set(tuple(t.compatibility) for t in targets_with_compatibilities) unique_compatibilities_strs = [','.join(x) for x in unique_compatibilities if x] targets_with_compatibilities_strs = [str(t) for t in targets_with_compatibilities] - raise TaskError('Unable to detect a suitable interpreter for compatibilities: %s ' - '(Conflicting targets: %s)' % (' && '.join(unique_compatibilities_strs), - ', '.join(targets_with_compatibilities_strs))) + raise TaskError('Unable to detect a suitable interpreter for compatibilities: {} ' + '(Conflicting targets: {})'.format(' && '.join(unique_compatibilities_strs), + ', '.join(targets_with_compatibilities_strs))) # Return the lowest compatible interpreter. return self.interpreter_cache.select_interpreter(allowed_interpreters)[0] @@ -80,7 +80,7 @@ def select_interpreter(self, filters): if len(interpreters) != 1: raise TaskError('Unable to detect a suitable interpreter.') interpreter = interpreters[0] - self.context.log.debug('Selected %s' % interpreter) + self.context.log.debug('Selected {}'.format(interpreter)) return interpreter @contextmanager diff --git a/src/python/pants/backend/python/tasks/setup_py.py b/src/python/pants/backend/python/tasks/setup_py.py index 125ec946cee..b5c20828008 100644 --- a/src/python/pants/backend/python/tasks/setup_py.py +++ b/src/python/pants/backend/python/tasks/setup_py.py @@ -36,12 +36,12 @@ SETUP_BOILERPLATE = """ # DO NOT EDIT THIS FILE -- AUTOGENERATED BY PANTS -# Target: %(setup_target)s +# Target: {setup_target} from setuptools import setup setup(** -%(setup_dict)s +{setup_dict} ) """ @@ -393,9 +393,9 @@ def iter_files(): if module not in packages: # TODO(wickman) Consider changing this to a full-on error as it # could indicate bad BUILD hygiene. - # raise cls.UndefinedSource('%s is source but does not belong to a package!' % filename) + # raise cls.UndefinedSource('{} is source but does not belong to a package!'.format(filename)) if log: - log.warn('%s is source but does not belong to a package.' % real_filename) + log.warn('{} is source but does not belong to a package.'.format(real_filename)) else: continue submodule = cls.nearest_subpackage(module, packages) @@ -434,7 +434,7 @@ def iter_generated_sources(self, target): break else: raise TypeError( - 'iter_generated_sources could not find suitable code generator for %s' % type(target)) + 'iter_generated_sources could not find suitable code generator for {}'.format(type(target))) builder = builder_cls(target, self._root, self.context.options) builder.generate() @@ -510,7 +510,7 @@ def write_setup(self, root_target, reduced_dependencies, chroot): if 'console_scripts' not in setup_keywords['entry_points']: setup_keywords['entry_points']['console_scripts'] = [] setup_keywords['entry_points']['console_scripts'].append( - '%s = %s' % (binary_name, entry_point)) + '{} = {}'.format(binary_name, entry_point)) # From http://stackoverflow.com/a/13105359 def convert(input): @@ -543,10 +543,10 @@ def convert(input): # >>> # # For more information, see http://bugs.python.org/issue13943 - chroot.write(SETUP_BOILERPLATE % { - 'setup_dict': pprint.pformat(convert(setup_keywords), indent=4), - 'setup_target': repr(root_target) - }, 'setup.py') + chroot.write(SETUP_BOILERPLATE.format( + setup_dict=pprint.pformat(convert(setup_keywords), indent=4), + setup_target=repr(root_target) + ), 'setup.py') # make sure that setup.py is included chroot.write('include *.py'.encode('utf8'), 'MANIFEST.in') @@ -557,7 +557,7 @@ def create_setup_py(self, target, dist_dir): reduced_deps = dependency_calculator.reduced_dependencies(target) self.write_contents(target, reduced_deps, chroot) self.write_setup(target, reduced_deps, chroot) - target_base = '%s-%s' % (target.provides.name, target.provides.version) + target_base = '{}-{}'.format(target.provides.name, target.provides.version) setup_dir = os.path.join(dist_dir, target_base) safe_rmtree(setup_dir) shutil.move(chroot.path(), setup_dir) diff --git a/src/python/pants/backend/python/thrift_builder.py b/src/python/pants/backend/python/thrift_builder.py index 4b291bb8980..4019868d479 100644 --- a/src/python/pants/backend/python/thrift_builder.py +++ b/src/python/pants/backend/python/thrift_builder.py @@ -73,7 +73,7 @@ def collect_sources(target): for src in copied_sources: if not self._run_thrift(src): - raise PythonThriftBuilder.CodeGenerationException("Could not generate .py from %s!" % src) + raise PythonThriftBuilder.CodeGenerationException("Could not generate .py from {}!".format(src)) def _run_thrift(self, source): args = [ @@ -124,4 +124,4 @@ def generate(self): pass if not self.created_packages: - raise self.CodeGenerationException('No Thrift structures declared in %s!' % self.target) + raise self.CodeGenerationException('No Thrift structures declared in {}!'.format(self.target)) diff --git a/src/python/pants/base/build_environment.py b/src/python/pants/base/build_environment.py index 516802d38bc..a6996fbc4d4 100644 --- a/src/python/pants/base/build_environment.py +++ b/src/python/pants/base/build_environment.py @@ -69,10 +69,10 @@ def get_scm(): if worktree and os.path.isdir(worktree): git = Git(worktree=worktree) try: - logger.info('Detected git repository at %s on branch %s' % (worktree, git.branch_name)) + logger.info('Detected git repository at {} on branch {}'.format(worktree, git.branch_name)) set_scm(git) except git.LocalException as e: - logger.info('Failed to load git repository at %s: %s' % (worktree, e)) + logger.info('Failed to load git repository at {}: {}'.format(worktree, e)) return _SCM @@ -80,6 +80,6 @@ def set_scm(scm): """Sets the pants Scm.""" if scm is not None: if not isinstance(scm, Scm): - raise ValueError('The scm must be an instance of Scm, given %s' % scm) + raise ValueError('The scm must be an instance of Scm, given {}'.format(scm)) global _SCM _SCM = scm diff --git a/src/python/pants/base/build_graph.py b/src/python/pants/base/build_graph.py index 711cdea7bc1..3df356cd453 100644 --- a/src/python/pants/base/build_graph.py +++ b/src/python/pants/base/build_graph.py @@ -385,7 +385,7 @@ def _target_addressable_to_target(self, address, addressable): class CycleException(Exception): """Thrown when a circular dependency is detected.""" def __init__(self, cycle): - Exception.__init__(self, 'Cycle detected:\n\t%s' % ( + Exception.__init__(self, 'Cycle detected:\n\t{}'.format( ' ->\n\t'.join(target.address.spec for target in cycle) )) diff --git a/src/python/pants/base/build_root.py b/src/python/pants/base/build_root.py index 7b71478c022..136a2ac8feb 100644 --- a/src/python/pants/base/build_root.py +++ b/src/python/pants/base/build_root.py @@ -47,7 +47,7 @@ def path(self, root_dir): """Manually establishes the build root for the current workspace.""" path = os.path.realpath(root_dir) if not os.path.exists(path): - raise ValueError('Build root does not exist: %s' % root_dir) + raise ValueError('Build root does not exist: {}'.format(root_dir)) self._root_dir = path def reset(self): @@ -55,7 +55,7 @@ def reset(self): self._root_dir = None def __str__(self): - return 'BuildRoot(%s)' % self._root_dir + return 'BuildRoot({})'.format(self._root_dir) @contextmanager def temporary(self, path): diff --git a/src/python/pants/base/cache_manager.py b/src/python/pants/base/cache_manager.py index 245f6ed1f6f..a3fc211c67c 100644 --- a/src/python/pants/base/cache_manager.py +++ b/src/python/pants/base/cache_manager.py @@ -38,8 +38,8 @@ def from_versioned_targets(versioned_targets): # feels hacky; see if there's a cleaner way for callers to handle awareness of the CacheManager. for versioned_target in versioned_targets: if versioned_target._cache_manager != cache_manager: - raise ValueError("Attempting to combine versioned targets %s and %s with different" - " CacheManager instances: %s and %s" % (first_target, versioned_target, + raise ValueError("Attempting to combine versioned targets {} and {} with different" + " CacheManager instances: {} and {}".format(first_target, versioned_target, cache_manager, versioned_target._cache_manager)) return VersionedTargetSet(cache_manager, versioned_targets) @@ -61,8 +61,8 @@ def force_invalidate(self): self._cache_manager.force_invalidate(self) def __repr__(self): - return 'VTS(%s, %s)' % (','.join(target.id for target in self.targets), - 'valid' if self.valid else 'invalid') + return 'VTS({}, {})'.format(','.join(target.id for target in self.targets), + 'valid' if self.valid else 'invalid') class VersionedTarget(VersionedTargetSet): @@ -71,7 +71,7 @@ class VersionedTarget(VersionedTargetSet): """ def __init__(self, cache_manager, target, cache_key): if not isinstance(target, Target): - raise ValueError("The target %s must be an instance of Target but is not." % target.id) + raise ValueError("The target {} must be an instance of Target but is not.".format(target.id)) self.target = target self.cache_key = cache_key @@ -265,7 +265,7 @@ def _key_for(self, target): # This is a catch-all for problems we haven't caught up with and given a better diagnostic. # TODO(Eric Ayers): If you see this exception, add a fix to catch the problem earlier. exc_info = sys.exc_info() - new_exception = self.CacheValidationError("Problem validating target %s in %s: %s" % - (target.id, target.address.spec_path, e)) + new_exception = self.CacheValidationError("Problem validating target {} in {}: {}" + .format(target.id, target.address.spec_path, e)) raise self.CacheValidationError, new_exception, exc_info[2] diff --git a/src/python/pants/base/config.py b/src/python/pants/base/config.py index e3c841534ba..22886eb561a 100644 --- a/src/python/pants/base/config.py +++ b/src/python/pants/base/config.py @@ -190,7 +190,7 @@ def get_required(self, section, option, type=str): val = self.get(section, option, type=type) # Empty str catches blank options. If blank entries are ok, use get(..., default='') instead. if val is None or val == '': - raise Config.ConfigError('Required option %s.%s is not defined.' % (section, option)) + raise Config.ConfigError('Required option {}.{} is not defined.'.format(section, option)) return val @staticmethod diff --git a/src/python/pants/base/exceptions.py b/src/python/pants/base/exceptions.py index a31091ea34c..e32c8f62551 100644 --- a/src/python/pants/base/exceptions.py +++ b/src/python/pants/base/exceptions.py @@ -27,7 +27,7 @@ def __init__(self, target, msg): :param target: the target in question :param string msg: a description of the target misconfiguration """ - super(Exception, self).__init__('Invalid target %s: %s' % (target, msg)) + super(Exception, self).__init__('Invalid target {}: {}'.format(target, msg)) class BuildConfigurationError(Exception): diff --git a/src/python/pants/base/generator.py b/src/python/pants/base/generator.py index 0db1d302a5f..e242579207e 100644 --- a/src/python/pants/base/generator.py +++ b/src/python/pants/base/generator.py @@ -29,7 +29,7 @@ def extend(self, **kwargs): return TemplateData(**props) def __setattr__(self, key, value): - raise AttributeError("Mutation not allowed - use %s.extend(%s = %s)" % (self, key, value)) + raise AttributeError("Mutation not allowed - use {}.extend({} = {})".format(self, key, value)) def __getattr__(self, key): if key in self: @@ -38,7 +38,7 @@ def __getattr__(self, key): return object.__getattribute__(self, key) def __str__(self): - return 'TemplateData(%s)' % pprint.pformat(self) + return 'TemplateData({})'.format(pprint.pformat(self)) class Generator(object): diff --git a/src/python/pants/base/mustache.py b/src/python/pants/base/mustache.py index 36ca943af41..917de64ff07 100644 --- a/src/python/pants/base/mustache.py +++ b/src/python/pants/base/mustache.py @@ -64,7 +64,7 @@ def render_name(self, template_name, args): if template == None: raise self.MustacheError( - "could not find template %s in package %s" % (path, self._package_name)) + "could not find template {} in package {}".format(path, self._package_name)) return self.render(template, args) diff --git a/src/python/pants/base/revision.py b/src/python/pants/base/revision.py index 06d795b7770..6653805cd4e 100644 --- a/src/python/pants/base/revision.py +++ b/src/python/pants/base/revision.py @@ -59,7 +59,7 @@ def parse_components(value): components.extend(parse_components(build)) return cls(*components) except ValueError: - raise cls.BadRevision("Failed to parse '%s' as a semantic version number" % rev) + raise cls.BadRevision("Failed to parse '{}' as a semantic version number".format(rev)) @classmethod def lenient(cls, rev): @@ -86,4 +86,4 @@ def __cmp__(self, other): return 0 def __repr__(self): - return '%s(%s)' % (self.__class__.__name__, ', '.join(map(repr, self._components))) + return '{}({})'.format(self.__class__.__name__, ', '.join(map(repr, self._components))) diff --git a/src/python/pants/base/run_info.py b/src/python/pants/base/run_info.py index 06b23d7fccd..243e3d70bbf 100644 --- a/src/python/pants/base/run_info.py +++ b/src/python/pants/base/run_info.py @@ -58,7 +58,7 @@ def add_infos(self, *keyvals): val = str(val).strip() if ':' in key: raise Exception, 'info key must not contain a colon' - outfile.write('%s: %s\n' % (key, val)) + outfile.write('{}: {}\n'.format(key, val)) self._info[key] = val return infos diff --git a/src/python/pants/base/source_root.py b/src/python/pants/base/source_root.py index 681313bfe62..12bb68fa91e 100644 --- a/src/python/pants/base/source_root.py +++ b/src/python/pants/base/source_root.py @@ -295,7 +295,7 @@ def _relative_to_buildroot(cls, path): else: abspath = os.path.normpath(os.path.join(buildroot, path)) if not abspath.startswith(buildroot): - raise ValueError('Source root %s is not under the build root %s' % (abspath, buildroot)) + raise ValueError('Source root {} is not under the build root {}'.format(abspath, buildroot)) return os.path.relpath(abspath, buildroot) @classmethod diff --git a/src/python/pants/base/target.py b/src/python/pants/base/target.py index a359bb920c1..b72dd65c5bd 100644 --- a/src/python/pants/base/target.py +++ b/src/python/pants/base/target.py @@ -407,9 +407,9 @@ def walk(self, work, predicate=None): as its single argument and returns True if the target should passed to ``work``. """ if not callable(work): - raise ValueError('work must be callable but was %s' % work) + raise ValueError('work must be callable but was {}'.format(work)) if predicate and not callable(predicate): - raise ValueError('predicate must be callable but was %s' % predicate) + raise ValueError('predicate must be callable but was {}'.format(predicate)) self._build_graph.walk_transitive_dependency_graph([self.address], work, predicate) def closure(self): @@ -442,7 +442,7 @@ def __ne__(self, other): def __repr__(self): addr = self.address if hasattr(self, 'address') else 'address not yet set' - return "%s(%s)" % (type(self).__name__, addr) + return "{}({})".format(type(self).__name__, addr) def create_sources_field(self, sources, sources_rel_path, address=None, build_graph=None): """Factory method to create a SourcesField appropriate for the type of the sources object. diff --git a/src/python/pants/base/validation.py b/src/python/pants/base/validation.py index 55765ae4216..c210983dc79 100644 --- a/src/python/pants/base/validation.py +++ b/src/python/pants/base/validation.py @@ -26,14 +26,15 @@ def assert_list(obj, expected_type=string_types, can_be_none=True, default=(), if can_be_none: val = list(default) else: - raise raise_type('Expected an object of acceptable type %s, received None and can_be_none is False' % allowable) + raise raise_type('Expected an object of acceptable type {}, received None and can_be_none is False' + .format(allowable)) if [typ for typ in allowable if isinstance(val, typ)]: lst = list(val) for e in lst: if not isinstance(e, expected_type): - raise raise_type('Expected a list containing values of type %s, instead got a value %s of %s' % - (expected_type, e, e.__class__)) + raise raise_type('Expected a list containing values of type {}, instead got a value {} of {}' + .format(expected_type, e, e.__class__)) return lst else: - raise raise_type('Expected an object of acceptable type %s, received %s instead' % (allowable, val)) + raise raise_type('Expected an object of acceptable type {}, received {} instead'.format(allowable, val)) diff --git a/src/python/pants/base/worker_pool.py b/src/python/pants/base/worker_pool.py index b3229fa5539..292a3102a60 100644 --- a/src/python/pants/base/worker_pool.py +++ b/src/python/pants/base/worker_pool.py @@ -94,7 +94,7 @@ def done(): def error(e): done() - self._run_tracker.log(Report.ERROR, '%s' % e) + self._run_tracker.log(Report.ERROR, '{!s}'.format(e)) # We filter out Nones defensively. There shouldn't be any, but if a bug causes one, # Pants might hang indefinitely without this filtering. @@ -112,7 +112,7 @@ def submit_next(): submit_next() except Exception as e: # Handles errors in the submission code. done() - self._run_tracker.log(Report.ERROR, '%s' % e) + self._run_tracker.log(Report.ERROR, '{!s}'.format(e)) raise def submit_work_and_wait(self, work, workunit_parent=None): diff --git a/src/python/pants/base/workunit.py b/src/python/pants/base/workunit.py index c321e54bbc3..8e5ba286052 100644 --- a/src/python/pants/base/workunit.py +++ b/src/python/pants/base/workunit.py @@ -125,7 +125,7 @@ def set_outcome(self, outcome): those of its subunits. The right thing happens: The outcome of a work unit is the worst outcome of any of its subunits and any outcome set on it directly.""" if outcome not in range(0, 5): - raise Exception('Invalid outcome: %s' % outcome) + raise Exception('Invalid outcome: {}'.format(outcome)) if outcome < self._outcome: self._outcome = outcome @@ -137,7 +137,7 @@ def output(self, name): """Returns the output buffer for the specified output name (e.g., 'stdout'), creating it if necessary.""" m = WorkUnit._valid_name_re.match(name) if not m or m.group(0) != name: - raise Exception('Invalid output name: %s' % name) + raise Exception('Invalid output name: {}'.format(name)) if name not in self._outputs: workunit_name = re.sub(r'\W', '_', self.name) path = os.path.join(self.run_info_dir, @@ -169,7 +169,7 @@ def start_time_string(self): def start_delta_string(self): """A convenient string representation of how long after the run started we started.""" delta = int(self.start_time) - int(self.root().start_time) - return '%02d:%02d' % (delta / 60, delta % 60) + return '{:02}:{:02}'.format(int(delta / 60), delta % 60) def root(self): ret = self diff --git a/src/python/pants/bin/goal_runner.py b/src/python/pants/bin/goal_runner.py index 1700227450b..265d48b5e14 100644 --- a/src/python/pants/bin/goal_runner.py +++ b/src/python/pants/bin/goal_runner.py @@ -84,7 +84,7 @@ def setup(self): self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: - self.run_tracker.log(Report.INFO, 'See a report at: %s' % url) + self.run_tracker.log(Report.INFO, 'See a report at: {}'.format(url)) else: self.run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') @@ -223,7 +223,7 @@ def is_quiet_task(): unknown.append(goal) if unknown: - context.log.error('Unknown goal(s): %s\n' % ' '.join(goal.name for goal in unknown)) + context.log.error('Unknown goal(s): {}\n'.format(' '.join(goal.name for goal in unknown))) return 1 engine = RoundEngine() diff --git a/src/python/pants/bin/pants_exe.py b/src/python/pants/bin/pants_exe.py index 34abfdd9dd2..e1b0f04727b 100644 --- a/src/python/pants/bin/pants_exe.py +++ b/src/python/pants/bin/pants_exe.py @@ -42,7 +42,7 @@ def unhandled_exception_hook(self, exception_class, exception, tb): if self._is_print_backtrace: msg = '\nException caught:\n' + ''.join(self._format_tb(tb)) if str(exception): - msg += '\nException message: %s\n' % str(exception) + msg += '\nException message: {!s}\n'.format(exception) else: msg += '\nNo specific exception message.\n' # TODO(Jin Feng) Always output the unhandled exception details into a log file. @@ -66,7 +66,7 @@ def _run(exiter): root_dir = get_buildroot() if not os.path.exists(root_dir): - exiter.exit_and_fail('PANTS_BUILD_ROOT does not point to a valid path: %s' % root_dir) + exiter.exit_and_fail('PANTS_BUILD_ROOT does not point to a valid path: {}'.format(root_dir)) goal_runner = GoalRunner(root_dir) goal_runner.setup() diff --git a/src/python/pants/binary_util.py b/src/python/pants/binary_util.py index 0e5ff587aad..6e0561e9e3f 100644 --- a/src/python/pants/binary_util.py +++ b/src/python/pants/binary_util.py @@ -198,7 +198,7 @@ def safe_args(args, def create_argfile(fp): fp.write(delimiter.join(args)) fp.close() - return [quoter(fp.name) if quoter else '@%s' % fp.name] + return [quoter(fp.name) if quoter else '@{}'.format(fp.name)] if argfile: try: @@ -221,8 +221,8 @@ def _mac_open(files): def _linux_open(files): cmd = "xdg-open" if not _cmd_exists(cmd): - raise TaskError("The program '%s' isn't in your PATH. Please install and re-run this " - "goal." % cmd) + raise TaskError("The program '{}' isn't in your PATH. Please install and re-run this " + "goal.".format(cmd)) for f in list(files): subprocess.call([cmd, f]) diff --git a/src/python/pants/cache/cache_setup.py b/src/python/pants/cache/cache_setup.py index a28644e32db..8b00be50ed6 100644 --- a/src/python/pants/cache/cache_setup.py +++ b/src/python/pants/cache/cache_setup.py @@ -34,8 +34,8 @@ def select_best_url(spec, pinger, log): return urls[0] # No need to ping if we only have one option anyway. netlocs = map(lambda url: urlparse.urlparse(url)[1], urls) pingtimes = pinger.pings(netlocs) # List of pairs (host, time in ms). - log.debug('Artifact cache server ping times: %s' % - ', '.join(['%s: %3f secs' % p for p in pingtimes])) + log.debug('Artifact cache server ping times: {}' + .format(', '.join(['{}: {:3} secs'.format(*p) for p in pingtimes]))) argmin = min(range(len(pingtimes)), key=lambda i: pingtimes[i][1]) best_url = urls[argmin] if pingtimes[argmin][1] == Pinger.UNREACHABLE: diff --git a/src/python/pants/engine/round_engine.py b/src/python/pants/engine/round_engine.py index 3f9d39be164..7715828e838 100644 --- a/src/python/pants/engine/round_engine.py +++ b/src/python/pants/engine/round_engine.py @@ -38,7 +38,7 @@ def attempt(self, explain): for name, task_type in reversed(self._tasktypes_by_name.items()): with self._context.new_workunit(name=name, labels=[WorkUnit.TASK]): if explain: - self._context.log.debug('Skipping execution of %s in explain mode' % name) + self._context.log.debug('Skipping execution of {} in explain mode'.format(name)) else: task_workdir = os.path.join(goal_workdir, name) task = task_type(self._context, task_workdir) @@ -47,7 +47,7 @@ def attempt(self, explain): if explain: reversed_tasktypes_by_name = reversed(self._tasktypes_by_name.items()) goal_to_task = ', '.join( - '%s->%s' % (name, task_type.__name__) for name, task_type in reversed_tasktypes_by_name) + '{}->{}'.format(name, task_type.__name__) for name, task_type in reversed_tasktypes_by_name) print('{goal} [{goal_to_task}]'.format(goal=self._goal.name, goal_to_task=goal_to_task)) @@ -199,7 +199,7 @@ def attempt(self, context, goals): explain = context.options.for_global_scope().explain if explain: - print('Goal Execution Order:\n\n%s\n' % execution_goals) + print('Goal Execution Order:\n\n{}\n'.format(execution_goals)) print('Goal [TaskRegistrar->Task] Order:\n') serialized_goals_executors = [ge for ge in goal_executors if ge.goal.serialize] diff --git a/src/python/pants/fs/archive.py b/src/python/pants/fs/archive.py index da1b6ff56c0..98312acce03 100644 --- a/src/python/pants/fs/archive.py +++ b/src/python/pants/fs/archive.py @@ -48,7 +48,7 @@ def __init__(self, mode, extension): def create(self, basedir, outdir, name, prefix=None): basedir = ensure_text(basedir) - tarpath = os.path.join(outdir, '%s.%s' % (ensure_text(name), self.extension)) + tarpath = os.path.join(outdir, '{}.{}'.format(ensure_text(name), self.extension)) with open_tar(tarpath, self.mode, dereference=True, errorlevel=1) as tar: tar.add(basedir, arcname=prefix or '.') return tarpath @@ -70,7 +70,7 @@ def extract(cls, path, outdir, filter_func=None): for name in archive_file.namelist(): # While we're at it, we also perform this safety test. if name.startswith(b'/') or name.startswith(b'..'): - raise ValueError('Zip file contains unsafe path: %s' % name) + raise ValueError('Zip file contains unsafe path: {}'.format(name)) # Ignore directories. extract() will create parent dirs as needed. # OS X's python 2.6.1 has a bug in zipfile that makes it unzip directories as regular files. # This method should work on for python 2.6-3.x. @@ -84,7 +84,7 @@ def __init__(self, compression): self.compression = compression def create(self, basedir, outdir, name, prefix=None): - zippath = os.path.join(outdir, '%s.zip' % name) + zippath = os.path.join(outdir, '{}.zip'.format(name)) with open_zip(zippath, 'w', compression=ZIP_DEFLATED) as zip: for root, _, files in safe_walk(basedir): root = ensure_text(root) @@ -119,5 +119,5 @@ def archiver(typename): """ archiver = _ARCHIVER_BY_TYPE.get(typename) if not archiver: - raise ValueError('No archiver registered for %r' % typename) + raise ValueError('No archiver registered for {!r}'.format(typename)) return archiver diff --git a/src/python/pants/fs/fs.py b/src/python/pants/fs/fs.py index 8e90dcfb33a..ee973b3aa1d 100644 --- a/src/python/pants/fs/fs.py +++ b/src/python/pants/fs/fs.py @@ -33,7 +33,7 @@ def safe_filename(name, extension=None, digest=None, max_length=_MAX_FILENAME_LE max_length: the maximum desired file name length """ if os.path.basename(name) != name: - raise ValueError('Name must be a filename, handed a path: %s' % name) + raise ValueError('Name must be a filename, handed a path: {}'.format(name)) ext = extension or '' filename = name + ext @@ -44,8 +44,8 @@ def safe_filename(name, extension=None, digest=None, max_length=_MAX_FILENAME_LE digest.update(name) safe_name = digest.hexdigest() + ext if len(safe_name) > max_length: - raise ValueError('Digest %s failed to produce a filename <= %d ' - 'characters for %s - got %s' % (digest, max_length, filename, safe_name)) + raise ValueError('Digest {} failed to produce a filename <= {} ' + 'characters for {} - got {}'.format(digest, max_length, filename, safe_name)) return safe_name diff --git a/src/python/pants/goal/aggregated_timings.py b/src/python/pants/goal/aggregated_timings.py index 92772cbb229..9629f76f7a8 100644 --- a/src/python/pants/goal/aggregated_timings.py +++ b/src/python/pants/goal/aggregated_timings.py @@ -35,7 +35,7 @@ def add_timing(self, label, secs, is_tool=False): if self._path and os.path.exists(os.path.dirname(self._path)): with open(self._path, 'w') as f: for x in self.get_all(): - f.write('%(label)s: %(timing)s\n' % x) + f.write('{label}: {timing}\n'.format(**x)) def get_all(self): """Returns all the timings, sorted in decreasing order. diff --git a/src/python/pants/goal/artifact_cache_stats.py b/src/python/pants/goal/artifact_cache_stats.py index 9d978e56596..22066784a07 100644 --- a/src/python/pants/goal/artifact_cache_stats.py +++ b/src/python/pants/goal/artifact_cache_stats.py @@ -49,6 +49,6 @@ def _add_stat(self, hit_or_miss, cache_name, tgt): self.stats_per_cache[cache_name][hit_or_miss].append(tgt.address.reference()) if self._dir and os.path.exists(self._dir): # Check existence in case of a clean-all. suffix = 'misses' if hit_or_miss else 'hits' - with open(os.path.join(self._dir, '%s.%s' % (cache_name, suffix)), 'a') as f: + with open(os.path.join(self._dir, '{}.{}'.format(cache_name, suffix)), 'a') as f: f.write(tgt.address.reference()) f.write('\n') diff --git a/src/python/pants/goal/context.py b/src/python/pants/goal/context.py index 3a712805b18..f8dee39788c 100644 --- a/src/python/pants/goal/context.py +++ b/src/python/pants/goal/context.py @@ -155,7 +155,7 @@ def spec_excludes(self): def __str__(self): ident = Target.identify(self.targets()) - return 'Context(id:%s, targets:%s)' % (ident, self.targets()) + return 'Context(id:{}, targets:{})'.format(ident, self.targets()) def submit_foreground_work_and_wait(self, work, workunit_parent=None): """Returns the pool to which tasks can submit foreground (blocking) work.""" diff --git a/src/python/pants/goal/initialize_reporting.py b/src/python/pants/goal/initialize_reporting.py index 700edab8632..a157d32f7b1 100644 --- a/src/python/pants/goal/initialize_reporting.py +++ b/src/python/pants/goal/initialize_reporting.py @@ -68,7 +68,7 @@ def initial_reporting(config, run_tracker): run_tracker.run_info.add_info('default_report', html_reporter.report_path()) (_, port) = ReportingServerManager.get_current_server_pid_and_port() if port: - run_tracker.run_info.add_info('report_url', 'http://localhost:%d/run/%s' % (port, run_id)) + run_tracker.run_info.add_info('report_url', 'http://localhost:{}/run/{}'.format(port, run_id)) return report @@ -104,7 +104,7 @@ def update_reporting(options, is_quiet_task, run_tracker): # Also write plaintext logs to a file. This is completely separate from the html reports. safe_mkdir(options.logdir) run_id = run_tracker.run_info.get_info('id') - outfile = open(os.path.join(options.logdir, '%s.log' % run_id), 'w') + outfile = open(os.path.join(options.logdir, '{}.log'.format(run_id)), 'w') settings = PlainTextReporter.Settings(log_level=log_level, outfile=outfile, color=False, indent=True, timing=True, cache_stats=True) logfile_reporter = PlainTextReporter(run_tracker, settings) diff --git a/src/python/pants/goal/products.py b/src/python/pants/goal/products.py index bd3e9b33e15..e1d86655f34 100644 --- a/src/python/pants/goal/products.py +++ b/src/python/pants/goal/products.py @@ -58,7 +58,7 @@ def __init__(self, root): def add_abs_paths(self, abs_paths): for abs_path in abs_paths: if not abs_path.startswith(self._root): - raise Exception('%s is not under %s' % (abs_path, self._root)) + raise Exception('{} is not under {}'.format(abs_path, self._root)) self._rel_paths.add(os.path.relpath(abs_path, self._root)) def add_rel_paths(self, rel_paths): @@ -203,10 +203,10 @@ def keys_for(self, basedir, product): return keys def __repr__(self): - return 'ProductMapping(%s) {\n %s\n}' % (self.typename, '\n '.join( - '%s => %s\n %s' % (str(target), basedir, outputs) - for target, outputs_by_basedir in self.by_target.items() - for basedir, outputs in outputs_by_basedir.items())) + return 'ProductMapping({}) {{\n {}\n}}'.format(self.typename, '\n '.join( + '{} => {}\n {}'.format(str(target), basedir, outputs) + for target, outputs_by_basedir in self.by_target.items() + for basedir, outputs in outputs_by_basedir.items())) def __bool__(self): return not self.empty() diff --git a/src/python/pants/goal/run_tracker.py b/src/python/pants/goal/run_tracker.py index 155c3750f9b..900e887ddaa 100644 --- a/src/python/pants/goal/run_tracker.py +++ b/src/python/pants/goal/run_tracker.py @@ -81,8 +81,8 @@ def __init__(self, # run_id is safe for use in paths. millis = (self.run_timestamp * 1000) % 1000 - run_id = 'pants_run_%s_%d' % \ - (time.strftime('%Y_%m_%d_%H_%M_%S', time.localtime(self.run_timestamp)), millis) + run_id = 'pants_run_{}_{}'.format( + time.strftime('%Y_%m_%d_%H_%M_%S', time.localtime(self.run_timestamp)), millis) self.run_info_dir = os.path.join(info_dir, run_id) self.run_info = RunInfo(os.path.join(self.run_info_dir, 'info')) @@ -237,7 +237,7 @@ def upload_stats(self): """Send timing results to URL specified in pants.ini""" def error(msg): # Report aleady closed, so just print error. - print("WARNING: Failed to upload stats to %s due to %s" % (self.stats_url, msg), file=sys.stderr) + print("WARNING: Failed to upload stats to {} due to {}".format(self.stats_url, msg), file=sys.stderr) if self.stats_url: params = { @@ -257,9 +257,9 @@ def error(msg): http_conn.request('POST', url.path, urllib.urlencode(params), headers) resp = http_conn.getresponse() if resp.status != 200: - error("HTTP error code: %d" % resp.status) + error("HTTP error code: {}".format(resp.status)) except Exception as e: - error("Error: %s" % e) + error("Error: {}".format(e)) _log_levels = [Report.ERROR, Report.ERROR, Report.WARN, Report.INFO, Report.INFO] diff --git a/src/python/pants/ivy/bootstrapper.py b/src/python/pants/ivy/bootstrapper.py index 9afd7ab86fa..2efef4e0b29 100644 --- a/src/python/pants/ivy/bootstrapper.py +++ b/src/python/pants/ivy/bootstrapper.py @@ -54,7 +54,7 @@ class Error(Exception): _DEFAULT_VERSION = '2.3.0' _DEFAULT_URL = ('https://repo1.maven.org/maven2/' 'org/apache/ivy/ivy/' - '%(version)s/ivy-%(version)s.jar' % {'version': _DEFAULT_VERSION}) + '{version}/ivy-{version}.jar'.format(version=_DEFAULT_VERSION)) _INSTANCE = None @@ -196,7 +196,7 @@ def _bootstrap_ivy_classpath(self, workunit_factory, retry=True): digest.update(fp.read()) else: digest.update(self._version_or_ivyxml) - classpath = os.path.join(ivy_bootstrap_dir, '%s.classpath' % digest.hexdigest()) + classpath = os.path.join(ivy_bootstrap_dir, '{}.classpath'.format(digest.hexdigest())) if not os.path.exists(classpath): ivy = self._bootstrap_ivy(os.path.join(ivy_bootstrap_dir, 'bootstrap.jar')) @@ -210,7 +210,7 @@ def _bootstrap_ivy_classpath(self, workunit_factory, retry=True): ivy.execute(args=args, workunit_factory=workunit_factory, workunit_name='ivy-bootstrap') except ivy.Error as e: safe_delete(classpath) - raise self.Error('Failed to bootstrap an ivy classpath! %s' % e) + raise self.Error('Failed to bootstrap an ivy classpath! {}'.format(e)) with open(classpath) as fp: cp = fp.read().strip().split(os.pathsep) @@ -218,7 +218,7 @@ def _bootstrap_ivy_classpath(self, workunit_factory, retry=True): safe_delete(classpath) if retry: return self._bootstrap_ivy_classpath(workunit_factory, retry=False) - raise self.Error('Ivy bootstrapping failed - invalid classpath: %s' % ':'.join(cp)) + raise self.Error('Ivy bootstrapping failed - invalid classpath: {}'.format(':'.join(cp))) return cp def _bootstrap_ivy(self, bootstrap_jar_path): @@ -227,19 +227,19 @@ def _bootstrap_ivy(self, bootstrap_jar_path): fetcher = Fetcher() checksummer = fetcher.ChecksumListener(digest=hashlib.sha1()) try: - logger.info('\nDownloading %s' % self._bootstrap_jar_url) + logger.info('\nDownloading {}'.format(self._bootstrap_jar_url)) # TODO: Capture the stdout of the fetcher, instead of letting it output # to the console directly. fetcher.download(self._bootstrap_jar_url, listener=fetcher.ProgressListener().wrap(checksummer), path_or_fd=bootstrap_jar, timeout_secs=self._timeout_secs) - logger.info('sha1: %s' % checksummer.checksum) + logger.info('sha1: {}'.format(checksummer.checksum)) bootstrap_jar.close() touch(bootstrap_jar_path) shutil.move(bootstrap_jar.name, bootstrap_jar_path) except fetcher.Error as e: - raise self.Error('Problem fetching the ivy bootstrap jar! %s' % e) + raise self.Error('Problem fetching the ivy bootstrap jar! {}'.format(e)) return Ivy(bootstrap_jar_path, ivy_settings=self._ivy_settings, diff --git a/src/python/pants/ivy/ivy.py b/src/python/pants/ivy/ivy.py index d1e1e4cf571..1daccf43945 100644 --- a/src/python/pants/ivy/ivy.py +++ b/src/python/pants/ivy/ivy.py @@ -30,13 +30,13 @@ def __init__(self, classpath, ivy_settings=None, ivy_cache_dir=None, extra_jvm_o self._classpath = maybe_list(classpath) self._ivy_settings = ivy_settings if self._ivy_settings and not isinstance(self._ivy_settings, string_types): - raise ValueError('ivy_settings must be a string, given %s of type %s' - % (self._ivy_settings, type(self._ivy_settings))) + raise ValueError('ivy_settings must be a string, given {} of type {}'.format( + self._ivy_settings, type(self._ivy_settings))) self._ivy_cache_dir = ivy_cache_dir if self._ivy_cache_dir and not isinstance(self._ivy_cache_dir, string_types): - raise ValueError('ivy_cache_dir must be a string, given %s of type %s' - % (self._ivy_cache_dir, type(self._ivy_cache_dir))) + raise ValueError('ivy_cache_dir must be a string, given {} of type {}'.format( + self._ivy_cache_dir, type(self._ivy_cache_dir))) self._extra_jvm_options = extra_jvm_options or [] @@ -64,10 +64,10 @@ def execute(self, jvm_options=None, args=None, executor=None, try: result = util.execute_runner(runner, workunit_factory, workunit_name, workunit_labels) if result != 0: - raise self.Error('Ivy command failed with exit code %d%s' - % (result, ': ' + ' '.join(args) if args else '')) + raise self.Error('Ivy command failed with exit code {}{}'.format( + result, ': ' + ' '.join(args) if args else '')) except executor.Error as e: - raise self.Error('Problem executing ivy: %s' % e) + raise self.Error('Problem executing ivy: {}'.format(e)) def runner(self, jvm_options=None, args=None, executor=None): """Creates an ivy commandline client runner for the given args.""" @@ -75,15 +75,15 @@ def runner(self, jvm_options=None, args=None, executor=None): jvm_options = jvm_options or [] executor = executor or SubprocessExecutor() if not isinstance(executor, Executor): - raise ValueError('The executor argument must be an Executor instance, given %s of type %s' - % (executor, type(executor))) + raise ValueError('The executor argument must be an Executor instance, given {} of type {}'.format( + executor, type(executor))) if self._ivy_cache_dir and '-cache' not in args: # TODO(John Sirois): Currently this is a magic property to support hand-crafted in # ivysettings.xml. Ideally we'd support either simple -caches or these hand-crafted cases # instead of just hand-crafted. Clean this up by taking over ivysettings.xml and generating # it from BUILD constructs. - jvm_options += ['-Divy.cache.dir=%s' % self._ivy_cache_dir] + jvm_options += ['-Divy.cache.dir={}'.format(self._ivy_cache_dir)] if self._ivy_settings and '-settings' not in args: args = ['-settings', self._ivy_settings] + args diff --git a/src/python/pants/java/distribution/distribution.py b/src/python/pants/java/distribution/distribution.py index 021b1a5174e..db2002fc539 100644 --- a/src/python/pants/java/distribution/distribution.py +++ b/src/python/pants/java/distribution/distribution.py @@ -82,14 +82,14 @@ def search_path(): dist = cls(bin_path=path, minimum_version=minimum_version, maximum_version=maximum_version, jdk=jdk) dist.validate() - logger.debug('Located %s for constraints: minimum_version %s, ' - 'maximum_version %s, jdk %s' % (dist, minimum_version, maximum_version, jdk)) + logger.debug('Located {} for constraints: minimum_version {}, ' + 'maximum_version {}, jdk {}'.format(dist, minimum_version, maximum_version, jdk)) return dist except (ValueError, cls.Error): pass - raise cls.Error('Failed to locate a %s distribution with minimum_version %s, maximum_version %s' - % ('JDK' if jdk else 'JRE', minimum_version, maximum_version)) + raise cls.Error('Failed to locate a {} distribution with minimum_version {}, maximum_version {}'.format( + 'JDK' if jdk else 'JRE', minimum_version, maximum_version)) @staticmethod def _parse_java_version(name, version): @@ -101,7 +101,7 @@ def _parse_java_version(name, version): if isinstance(version, string_types): version = Revision.semver(version.replace('_', '-')) if version and not isinstance(version, Revision): - raise ValueError('%s must be a string or a Revision object, given: %s' % (name, version)) + raise ValueError('{} must be a string or a Revision object, given: {}'.format(name, version)) return version @staticmethod @@ -118,7 +118,7 @@ def __init__(self, bin_path='/usr/bin', minimum_version=None, maximum_version=No """ if not os.path.isdir(bin_path): - raise ValueError('The specified distribution path is invalid: %s' % bin_path) + raise ValueError('The specified distribution path is invalid: {}'.format(bin_path)) self._bin_path = bin_path self._minimum_version = self._parse_java_version("minimum_version", minimum_version) @@ -177,7 +177,7 @@ def binary(self, name): If this distribution has no valid command of the given name raises Distribution.Error. """ if not isinstance(name, string_types): - raise ValueError('name must be a binary name, given %s of type %s' % (name, type(name))) + raise ValueError('name must be a binary name, given {} of type {}'.format(name, type(name))) self.validate() return self._validated_executable(name) @@ -194,13 +194,13 @@ def validate(self): if self._minimum_version: version = self._get_version(java) if version < self._minimum_version: - raise self.Error('The java distribution at %s is too old; expecting at least %s and' - ' got %s' % (java, self._minimum_version, version)) + raise self.Error('The java distribution at {} is too old; expecting at least {} and' + ' got {}'.format(java, self._minimum_version, version)) if self._maximum_version: version = self._get_version(java) if version > self._maximum_version: - raise self.Error('The java distribution at %s is too new; expecting no older than' - ' %s and got %s' % (java, self._maximum_version, version)) + raise self.Error('The java distribution at {} is too new; expecting no older than' + ' {} and got {}'.format(java, self._maximum_version, version)) try: self._validated_executable('javac') # Calling purely for the check and cache side effects @@ -224,8 +224,8 @@ def _get_system_properties(self, java): process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() if process.returncode != 0: - raise self.Error('Failed to determine java system properties for %s with %s - exit code' - ' %d: %s' % (java, ' '.join(cmd), process.returncode, stderr)) + raise self.Error('Failed to determine java system properties for {} with {} - exit code' + ' {}: {}'.format(java, ' '.join(cmd), process.returncode, stderr)) props = {} for line in stdout.split(os.linesep): @@ -238,8 +238,8 @@ def _get_system_properties(self, java): def _validate_executable(self, name): exe = os.path.join(self._bin_path, name) if not self._is_executable(exe): - raise self.Error('Failed to locate the %s executable, %s does not appear to be a' - ' valid %s distribution' % (name, self, 'JDK' if self._jdk else 'JRE')) + raise self.Error('Failed to locate the {} executable, {} does not appear to be a' + ' valid {} distribution'.format(name, self, 'JDK' if self._jdk else 'JRE')) return exe def _validated_executable(self, name): @@ -256,5 +256,5 @@ def _valid_executable(self, name): self._validated_binaries[name] = exe def __repr__(self): - return ('Distribution(%r, minimum_version=%r, maximum_version=%r jdk=%r)' - % (self._bin_path, self._minimum_version, self._maximum_version, self._jdk)) + return ('Distribution({!r}, minimum_version={!r}, maximum_version={!r} jdk={!r})'.format( + self._bin_path, self._minimum_version, self._maximum_version, self._jdk)) diff --git a/src/python/pants/java/executor.py b/src/python/pants/java/executor.py index b05760129a0..56ea1735186 100644 --- a/src/python/pants/java/executor.py +++ b/src/python/pants/java/executor.py @@ -31,7 +31,7 @@ class Executor(AbstractClass): def _scrub_args(classpath, main, jvm_options, args, cwd): classpath = maybe_list(classpath) if not isinstance(main, string_types) or not main: - raise ValueError('A non-empty main classname is required, given: %s' % main) + raise ValueError('A non-empty main classname is required, given: {}'.format(main)) jvm_options = maybe_list(jvm_options or ()) args = maybe_list(args or ()) return classpath, main, jvm_options, args, cwd @@ -70,7 +70,7 @@ def __init__(self, distribution=None): """ if distribution: if not isinstance(distribution, Distribution): - raise ValueError('A valid distribution is required, given: %s' % distribution) + raise ValueError('A valid distribution is required, given: {}'.format(distribution)) distribution.validate() else: distribution = Distribution.cached() diff --git a/src/python/pants/java/jar/manifest.py b/src/python/pants/java/jar/manifest.py index 00dfb9c1f01..77eb5639a7a 100644 --- a/src/python/pants/java/jar/manifest.py +++ b/src/python/pants/java/jar/manifest.py @@ -25,7 +25,7 @@ def _wrap(text): chunk = fp.read(69) if not chunk: return - yield ' %s' % chunk + yield ' {}'.format(chunk) PATH = 'META-INF/MANIFEST.MF' @@ -39,10 +39,10 @@ def __init__(self, contents=''): def addentry(self, header, value): if len(header) > 68: - raise ValueError('Header name must be 68 characters or less, given %s' % header) + raise ValueError('Header name must be 68 characters or less, given {}'.format(header)) if self._contents: self._contents += '\n' - self._contents += '\n'.join(self._wrap('%s: %s' % (header, value))) + self._contents += '\n'.join(self._wrap('{}: {}'.format(header, value))) def contents(self): padded = self._contents + '\n' diff --git a/src/python/pants/java/nailgun_client.py b/src/python/pants/java/nailgun_client.py index 3b629a8a735..f29bd4722e9 100644 --- a/src/python/pants/java/nailgun_client.py +++ b/src/python/pants/java/nailgun_client.py @@ -79,7 +79,7 @@ def execute(self, workdir, main_class, *args, **environment): for arg in args: self._send_chunk('A', arg) for k, v in environment.items(): - self._send_chunk('E', '%s=%s' % (k, v)) + self._send_chunk('E', '{}={}'.format(k, v)) self._send_chunk('D', workdir) self._send_chunk('C', main_class) @@ -106,7 +106,7 @@ def _read_response(self): self._err.flush() return int(payload) else: - raise self.ProtocolError('Received unexpected chunk %s -> %s' % (command, payload)) + raise self.ProtocolError('Received unexpected chunk {} -> {}'.format(command, payload)) def _read_chunk(self, buff): while len(buff) < self.HEADER_LENGTH: @@ -185,19 +185,19 @@ def __call__(self, main_class, cwd=None, *args, **environment): sock = self.try_connect() if not sock: raise self.NailgunError('Problem connecting to nailgun server' - ' %s:%d' % (self._host, self._port)) + ' {}:{}'.format(self._host, self._port)) session = NailgunSession(sock, self._ins, self._out, self._err) try: return session.execute(cwd, main_class, *args, **environment) except socket.error as e: - raise self.NailgunError('Problem contacting nailgun server %s:%d:' - ' %s' % (self._host, self._port, e)) + raise self.NailgunError('Problem contacting nailgun server {}:{}:' + ' {}'.format(self._host, self._port, e)) except session.ProtocolError as e: - raise self.NailgunError('Problem executing the nailgun protocol with nailgun server %s:%s:' - ' %s' % (self._host, self._port, e)) + raise self.NailgunError('Problem executing the nailgun protocol with nailgun server {}:{}:' + ' {}'.format(self._host, self._port, e)) finally: sock.close() def __repr__(self): - return 'NailgunClient(host=%r, port=%r, workdir=%r)' % (self._host, self._port, self._workdir) + return 'NailgunClient(host={!r}, port={!r}, workdir={!r})'.format(self._host, self._port, self._workdir) diff --git a/src/python/pants/java/nailgun_executor.py b/src/python/pants/java/nailgun_executor.py index 0ef70e31e04..d3c3bf82b0b 100644 --- a/src/python/pants/java/nailgun_executor.py +++ b/src/python/pants/java/nailgun_executor.py @@ -44,7 +44,7 @@ def parse(cls, endpoint): """Parses an endpoint from a string of the form exe:fingerprint:pid:port""" components = endpoint.split(':') if len(components) != 4: - raise ValueError('Invalid endpoint spec %s' % endpoint) + raise ValueError('Invalid endpoint spec {}'.format(endpoint)) exe, fingerprint, pid, port = components return cls(exe, fingerprint, int(pid), int(port)) diff --git a/src/python/pants/java/util.py b/src/python/pants/java/util.py index 4f09ba134de..9a089ca4e76 100644 --- a/src/python/pants/java/util.py +++ b/src/python/pants/java/util.py @@ -34,8 +34,8 @@ def execute_java(classpath, main, jvm_options=None, args=None, executor=None, """ executor = executor or SubprocessExecutor() if not isinstance(executor, Executor): - raise ValueError('The executor argument must be a java Executor instance, give %s of type %s' - % (executor, type(executor))) + raise ValueError('The executor argument must be a java Executor instance, give {} of type {}' + .format(executor, type(executor))) runner = executor.runner(classpath, main, args=args, jvm_options=jvm_options, cwd=cwd) workunit_name = workunit_name or main @@ -63,7 +63,7 @@ def execute_runner(runner, workunit_factory=None, workunit_name=None, workunit_l """ if not isinstance(runner, Executor.Runner): raise ValueError('The runner argument must be a java Executor.Runner instance, ' - 'given %s of type %s' % (runner, type(runner))) + 'given {} of type {}'.format(runner, type(runner))) if workunit_factory is None: return runner.run() diff --git a/src/python/pants/net/http/fetcher.py b/src/python/pants/net/http/fetcher.py index 08c9e531149..167a5877462 100644 --- a/src/python/pants/net/http/fetcher.py +++ b/src/python/pants/net/http/fetcher.py @@ -39,7 +39,7 @@ class PermanentError(Error): def __init__(self, value=None, response_code=None): super(Fetcher.PermanentError, self).__init__(value) if response_code and not isinstance(response_code, six.integer_types): - raise ValueError('response_code must be an integer, got %s' % response_code) + raise ValueError('response_code must be an integer, got {}'.format(response_code)) self._response_code = response_code @property @@ -104,7 +104,7 @@ def __init__(self, fh): :param fh: a file handle open for writing """ if not fh or not hasattr(fh, 'write'): - raise ValueError('fh must be an open file handle, given %s' % fh) + raise ValueError('fh must be an open file handle, given {}'.format(fh)) self._fh = fh def recv_chunk(self, data): @@ -153,7 +153,7 @@ def __init__(self, width=None, chunk_size_bytes=None): """ self._width = width or 50 if not isinstance(self._width, six.integer_types): - raise ValueError('The width must be an integer, given %s' % self._width) + raise ValueError('The width must be an integer, given {}'.format(self._width)) self._chunk_size_bytes = chunk_size_bytes or 10 * 1024 self._start = time.time() @@ -176,18 +176,18 @@ def recv_chunk(self, data): self.chunks = chunk_count if self.size: sys.stdout.write('\r') - sys.stdout.write('%3d%% ' % ((self.read * 1.0 / self.size) * 100)) + sys.stdout.write('{:3}% '.format(int(self.read * 1.0 / self.size) * 100)) sys.stdout.write('.' * self.chunks) if self.size: size_width = len(str(self.download_size)) downloaded = int(self.read / 1024) - sys.stdout.write('%s %s KB' % (' ' * (self._width - self.chunks), + sys.stdout.write('{} {} KB'.format(' ' * (self._width - self.chunks), str(downloaded).rjust(size_width))) sys.stdout.flush() def finished(self): if self.chunks > 0: - sys.stdout.write(' %.3fs\n' % (time.time() - self._start)) + sys.stdout.write(' {:.3}s\n'.format(time.time() - self._start)) sys.stdout.flush() def __init__(self, requests_api=None): @@ -211,14 +211,14 @@ def fetch(self, url, listener, chunk_size_bytes=None, timeout_secs=None): timeout_secs = timeout_secs or 1.0 if not isinstance(listener, self.Listener): - raise ValueError('listener must be a Listener instance, given %s' % listener) + raise ValueError('listener must be a Listener instance, given {}'.format(listener)) try: with closing(self._requests.get(url, stream=True, timeout=timeout_secs)) as resp: if resp.status_code != requests.codes.ok: listener.status(resp.status_code) - raise self.PermanentError('GET request to %s failed with status code %d' - % (url, resp.status_code), + raise self.PermanentError('GET request to {} failed with status code {}' + .format(url, resp.status_code), response_code=resp.status_code) size = resp.headers.get('content-length') @@ -229,12 +229,12 @@ def fetch(self, url, listener, chunk_size_bytes=None, timeout_secs=None): listener.recv_chunk(data) read_bytes += len(data) if size and read_bytes != int(size): - raise self.Error('Expected %s bytes, read %d' % (size, read_bytes)) + raise self.Error('Expected {} bytes, read {}'.format(size, read_bytes)) listener.finished() except requests.RequestException as e: exception_factory = (self.TransientError if isinstance(e, self._TRANSIENT_EXCEPTION_TYPES) else self.PermanentError) - raise exception_factory('Problem GETing data from %s: %s' % (url, e)) + raise exception_factory('Problem GETing data from {}: {}'.format(url, e)) def download(self, url, listener=None, path_or_fd=None, chunk_size_bytes=None, timeout_secs=None): """Downloads data from the given URL. diff --git a/src/python/pants/option/option_value_container.py b/src/python/pants/option/option_value_container.py index b0a612e8fd9..ffaf4ba0b56 100644 --- a/src/python/pants/option/option_value_container.py +++ b/src/python/pants/option/option_value_container.py @@ -101,7 +101,7 @@ def __getattr__(self, key): # In case we get called in copy/deepcopy, which don't invoke the ctor. raise AttributeError if key not in self._forwardings: - raise AttributeError('No such forwarded attribute: %s' % key) + raise AttributeError('No such forwarded attribute: {}'.format(key)) val = getattr(self, self._forwardings[key]) if isinstance(val, RankedValue): return val.value diff --git a/src/python/pants/option/options.py b/src/python/pants/option/options.py index 15cc4c0a59b..6dcdcded493 100644 --- a/src/python/pants/option/options.py +++ b/src/python/pants/option/options.py @@ -222,7 +222,7 @@ def _maybe_help(scope): if goals: for goal in goals: if not goal.ordered_task_names(): - print('\nUnknown goal: %s' % goal.name) + print('\nUnknown goal: {}'.format(goal.name)) else: print('\n{0}: {1}\n'.format(goal.name, goal.description)) for scope in goal.known_scopes(): diff --git a/src/python/pants/option/parser.py b/src/python/pants/option/parser.py index 4382e93b5f2..c7bfb83a120 100644 --- a/src/python/pants/option/parser.py +++ b/src/python/pants/option/parser.py @@ -413,4 +413,4 @@ def _freeze(self): self._frozen = True def __str__(self): - return 'Parser(%s)' % self._scope + return 'Parser({})'.format(self._scope) diff --git a/src/python/pants/option/ranked_value.py b/src/python/pants/option/ranked_value.py index a7c528cd82f..0a56894c620 100644 --- a/src/python/pants/option/ranked_value.py +++ b/src/python/pants/option/ranked_value.py @@ -94,4 +94,4 @@ def __eq__(self): return self._rank == self._rank and self._value == self._value def __repr__(self): - return '(%s, %s)' % (self._RANK_NAMES.get(self._rank, 'UNKNOWN'), self._value) + return '({}, {})'.format(self._RANK_NAMES.get(self._rank, 'UNKNOWN'), self._value) diff --git a/src/python/pants/reporting/html_reporter.py b/src/python/pants/reporting/html_reporter.py index e91d4664e7f..03fcd5cc82d 100644 --- a/src/python/pants/reporting/html_reporter.py +++ b/src/python/pants/reporting/html_reporter.py @@ -116,13 +116,13 @@ def end_workunit(self, workunit): """Implementation of Reporter callback.""" # Create the template arguments. duration = workunit.duration() - timing = '%.3f' % duration + timing = '{:.3}'.format(duration) unaccounted_time = None # Background work may be idle a lot, no point in reporting that as unaccounted. if self.is_under_main_root(workunit): unaccounted_time_secs = workunit.unaccounted_time() if unaccounted_time_secs >= 1 and unaccounted_time_secs > 0.05 * duration: - unaccounted_time = '%.3f' % unaccounted_time_secs + unaccounted_time = '{:.3}'.format(unaccounted_time_secs) args = { 'workunit': workunit.to_dict(), 'status': HtmlReporter._outcome_css_classes[workunit.outcome()], 'timing': timing, @@ -139,7 +139,7 @@ def end_workunit(self, workunit): def render_timings(timings): timings_dict = timings.get_all() for item in timings_dict: - item['timing_string'] = '%.3f' % item['timing'] + item['timing_string'] = '{:.3}'.format(item['timing']) args = { 'timings': timings_dict } @@ -176,7 +176,7 @@ def fix_detail_id(e, _id): def handle_output(self, workunit, label, s): """Implementation of Reporter callback.""" if os.path.exists(self._html_dir): # Make sure we're not immediately after a clean-all. - path = os.path.join(self._html_dir, '%s.%s' % (workunit.id, label)) + path = os.path.join(self._html_dir, '{}.{}'.format(workunit.id, label)) output_files = self._output_files[workunit.id] if path not in output_files: f = open(path, 'w') @@ -196,8 +196,8 @@ def handle_output(self, workunit, label, s): } def do_handle_log(self, workunit, level, *msg_elements): """Implementation of Reporter callback.""" - content = '%s' % \ - (HtmlReporter._log_level_css_map[level], self._render_message(*msg_elements)) + content = '{}'.format( + HtmlReporter._log_level_css_map[level], self._render_message(*msg_elements)) # Generate some javascript that appends the content to the workunit's div. args = { @@ -283,7 +283,7 @@ def _handle_ansi_color_codes(self, s): ret.append('') span_depth -= 1 else: - ret.append('' % code) + ret.append(''.format(code)) span_depth += 1 while span_depth > 0: ret.append('') diff --git a/src/python/pants/reporting/linkify.py b/src/python/pants/reporting/linkify.py index 89cf761fa3a..29f0b15d4e3 100644 --- a/src/python/pants/reporting/linkify.py +++ b/src/python/pants/reporting/linkify.py @@ -16,8 +16,8 @@ _OPTIONAL_PORT = r'(:\d+)?' _REL_PATH_COMPONENT = r'(\w|[-.])+' # One or more alphanumeric, underscore, dash or dot. _ABS_PATH_COMPONENT = r'/' + _REL_PATH_COMPONENT -_ABS_PATH_COMPONENTS = r'(%s)+' % _ABS_PATH_COMPONENT -_OPTIONAL_TARGET_SUFFIX = r'(:%s)?' % _REL_PATH_COMPONENT # For /foo/bar:target. +_ABS_PATH_COMPONENTS = r'({})+'.format(_ABS_PATH_COMPONENT) +_OPTIONAL_TARGET_SUFFIX = r'(:{})?'.format(_REL_PATH_COMPONENT) # For /foo/bar:target. # Note that we require at least two path components. # We require the last characgter to be alphanumeric or underscore, because some tools print an @@ -48,10 +48,10 @@ def to_url(m): path = build_file.relpath if os.path.exists(os.path.join(buildroot, path)): # The reporting server serves file content at /browse/. - return '/browse/%s' % path + return '/browse/{}'.format(path) else: return None def maybe_add_link(url, text): - return '%s' % (url, text) if url else text + return '{}'.format(url, text) if url else text return _PATH_RE.sub(lambda m: maybe_add_link(to_url(m), m.group(0)), s) diff --git a/src/python/pants/reporting/plaintext_reporter.py b/src/python/pants/reporting/plaintext_reporter.py index caeb7203f55..351642c9b21 100644 --- a/src/python/pants/reporting/plaintext_reporter.py +++ b/src/python/pants/reporting/plaintext_reporter.py @@ -79,8 +79,8 @@ def start_workunit(self, workunit): all([not x.has_label(WorkUnit.MULTITOOL) and not x.has_label(WorkUnit.BOOTSTRAP) for x in workunit.parent.ancestors()]): # Bootstrapping can be chatty, so don't show anything for its sub-workunits. - self.emit(b'\n%s %s %s[%s]' % - (workunit.start_time_string(), + self.emit(b'\n{} {} {}[{}]'.format( + workunit.start_time_string(), workunit.start_delta_string(), self._indent(workunit), workunit.name if self.settings.indent else workunit.path())) @@ -99,7 +99,7 @@ def end_workunit(self, workunit): if workunit.outcome() != WorkUnit.SUCCESS and not self._show_output(workunit): # Emit the suppressed workunit output, if any, to aid in debugging the problem. for name, outbuf in workunit.outputs().items(): - self.emit(self._prefix(workunit, b'\n==== %s ====\n' % name)) + self.emit(self._prefix(workunit, b'\n==== {} ====\n'.format(name))) self.emit(self._prefix(workunit, outbuf.read_from(0))) self.flush() @@ -149,12 +149,12 @@ def _show_output_unindented(self, workunit): return workunit.has_label(WorkUnit.REPL) or workunit.has_label(WorkUnit.RUN) def _format_aggregated_timings(self, aggregated_timings): - return b'\n'.join([b'%(timing).3f %(label)s' % x for x in aggregated_timings.get_all()]) + return b'\n'.join([b'{timing:.3} {label}'.format(**x) for x in aggregated_timings.get_all()]) def _format_artifact_cache_stats(self, artifact_cache_stats): stats = artifact_cache_stats.get_all() return b'No artifact cache reads.' if not stats else \ - b'\n'.join([b'%(cache_name)s - Hits: %(num_hits)d Misses: %(num_misses)d' % x + b'\n'.join([b'{cache_name} - Hits: {num_hits} Misses: {num_misses}'.format(**x) for x in stats]) def _indent(self, workunit): diff --git a/src/python/pants/reporting/reporting_server.py b/src/python/pants/reporting/reporting_server.py index 1f478aafd67..ffe3df4bf67 100644 --- a/src/python/pants/reporting/reporting_server.py +++ b/src/python/pants/reporting/reporting_server.py @@ -71,10 +71,10 @@ def do_GET(self): self._handle_runs('', {}) return - self._send_content('Invalid GET request %s' % self.path, 'text/html') + self._send_content('Invalid GET request {}'.format(self.path), 'text/html') except (IOError, ValueError): pass # Printing these errors gets annoying, and there's nothing to do about them anyway. - #sys.stderr.write('Invalid GET request %s' % self.path) + #sys.stderr.write('Invalid GET request {}'.format(self.path)) def _handle_runs(self, relpath, params): """Show a listing of all pants runs since the last clean-all.""" @@ -130,7 +130,7 @@ def _handle_content(self, relpath, params): with open(abspath, 'r') as infile: content = infile.read() else: - content = 'No file found at %s' % abspath + content = 'No file found at {}'.format(abspath) content_type = mimetypes.guess_type(abspath)[0] or 'text/plain' if not content_type.startswith('text/') and not content_type == 'application/xml': # Binary file. Display it as hex, split into lines. @@ -281,7 +281,7 @@ def _client_allowed(self): client_ip = self._client_address[0] if not client_ip in self._settings.allowed_clients and \ not 'ALL' in self._settings.allowed_clients: - self._send_content('Access from host %s forbidden.' % client_ip, 'text/html') + self._send_content('Access from host {} forbidden.'.format(client_ip), 'text/html') return False return True diff --git a/src/python/pants/reporting/reporting_utils.py b/src/python/pants/reporting/reporting_utils.py index 5f0dc82ded3..19b1cc1d177 100644 --- a/src/python/pants/reporting/reporting_utils.py +++ b/src/python/pants/reporting/reporting_utils.py @@ -28,7 +28,7 @@ def pluralize(x): items = [str(x) for x in items] n = len(items) - text = '%d %s' % (n, item_type if n == 1 else pluralize(item_type)) + text = '{} {}'.format(n, item_type if n == 1 else pluralize(item_type)) if n == 0: return text else: diff --git a/src/python/pants/thrift_util.py b/src/python/pants/thrift_util.py index d9d25d86c4c..f2550a33632 100644 --- a/src/python/pants/thrift_util.py +++ b/src/python/pants/thrift_util.py @@ -36,12 +36,12 @@ def find_includes(basedirs, source, log=None): include = os.path.join(basedir, capture) if os.path.exists(include): if log: - log.debug('%s has include %s' % (source, include)) + log.debug('{} has include {}'.format(source, include)) includes.add(include) added = True if not added: - raise ValueError("%s included in %s not found in bases %s" - % (include, source, all_basedirs)) + raise ValueError("{} included in {} not found in bases {}" + .format(include, source, all_basedirs)) return includes