Skip to content

Commit

Permalink
gyp: pull Python 3 changes from node/node-gyp
Browse files Browse the repository at this point in the history
PR-URL: #28573
Reviewed-By: Sam Roberts <vieuxtech@gmail.com>
Reviewed-By: Ruben Bridgewater <ruben@bridgewater.de>
cclauss authored and targos committed Jul 20, 2019

Verified

This commit was signed with the committer’s verified signature. The key has expired.
addaleax Anna Henningsen
1 parent 44de431 commit b1db810
Showing 16 changed files with 218 additions and 180 deletions.
16 changes: 14 additions & 2 deletions tools/gyp/PRESUBMIT.py
Original file line number Diff line number Diff line change
@@ -76,8 +76,7 @@
def _LicenseHeader(input_api):
# Accept any year number from 2009 to the current year.
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))

allowed_years = (str(s) for s in reversed(range(2009, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')'

# The (c) is deprecated, but tolerate it until it's removed from all files.
@@ -124,3 +123,16 @@ def CheckChangeOnCommit(input_api, output_api):
finally:
sys.path = old_sys_path
return report


TRYBOTS = [
'linux_try',
'mac_try',
'win_try',
]


def GetPreferredTryMasters(_, change):
return {
'client.gyp': { t: set(['defaulttests']) for t in TRYBOTS },
}
17 changes: 6 additions & 11 deletions tools/gyp/pylib/gyp/MSVSNew.py
Original file line number Diff line number Diff line change
@@ -4,22 +4,17 @@

"""New implementation of Visual Studio project generation."""

import hashlib
import os
import random

import gyp.common

# hashlib is supplied as of Python 2.5 as the replacement interface for md5
# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
# preserving 2.4 compatibility.
try:
import hashlib
_new_md5 = hashlib.md5
except ImportError:
import md5
_new_md5 = md5.new

cmp
except NameError:
def cmp(x, y):
return (x > y) - (x < y)

# Initialize random number generator
random.seed()
@@ -50,7 +45,7 @@ def MakeGuid(name, seed='msvs_new'):
not change when the project for a target is rebuilt.
"""
# Calculate a MD5 signature for the seed and name.
d = _new_md5(str(seed) + str(name)).hexdigest().upper()
d = hashlib.md5(str(seed) + str(name)).hexdigest().upper()
# Convert most of the signature to GUID form (discard the rest)
guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
+ '-' + d[20:32] + '}')
4 changes: 2 additions & 2 deletions tools/gyp/pylib/gyp/MSVSUserFile.py
Original file line number Diff line number Diff line change
@@ -91,7 +91,7 @@ def AddDebugSettings(self, config_name, command, environment = {},

if environment and isinstance(environment, dict):
env_list = ['%s="%s"' % (key, val)
for (key,val) in environment.iteritems()]
for (key,val) in environment.items()]
environment = ' '.join(env_list)
else:
environment = ''
@@ -135,7 +135,7 @@ def AddDebugSettings(self, config_name, command, environment = {},
def WriteIfChanged(self):
"""Writes the user file."""
configs = ['Configurations']
for config, spec in sorted(self.configurations.iteritems()):
for config, spec in sorted(self.configurations.items()):
configs.append(spec)

content = ['VisualStudioUserFile',
89 changes: 49 additions & 40 deletions tools/gyp/pylib/gyp/__init__.py
Original file line number Diff line number Diff line change
@@ -4,16 +4,25 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

from __future__ import print_function

import copy
import gyp.input
import optparse
import argparse
import os.path
import re
import shlex
import sys
import traceback
from gyp.common import GypError

try:
# Python 2
string_types = basestring
except NameError:
# Python 3
string_types = str

# Default debug modes for GYP
debug = {}

@@ -34,8 +43,8 @@ def DebugOutput(mode, message, *args):
pass
if args:
message %= args
print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
ctx[1], ctx[2], message)
print('%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
ctx[1], ctx[2], message))

def FindBuildFiles():
extension = '.gyp'
@@ -207,7 +216,7 @@ def Noop(value):
# We always want to ignore the environment when regenerating, to avoid
# duplicate or changed flags in the environment at the time of regeneration.
flags = ['--ignore-environment']
for name, metadata in options._regeneration_metadata.iteritems():
for name, metadata in options._regeneration_metadata.items():
opt = metadata['opt']
value = getattr(options, name)
value_predicate = metadata['type'] == 'path' and FixPath or Noop
@@ -226,24 +235,24 @@ def Noop(value):
(action == 'store_false' and not value)):
flags.append(opt)
elif options.use_environment and env_name:
print >>sys.stderr, ('Warning: environment regeneration unimplemented '
print('Warning: environment regeneration unimplemented '
'for %s flag %r env_name %r' % (action, opt,
env_name))
env_name), file=sys.stderr)
else:
print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
'flag %r' % (action, opt))
print('Warning: regeneration unimplemented for action %r '
'flag %r' % (action, opt), file=sys.stderr)

return flags

class RegeneratableOptionParser(optparse.OptionParser):
def __init__(self):
class RegeneratableOptionParser(argparse.ArgumentParser):
def __init__(self, usage):
self.__regeneratable_options = {}
optparse.OptionParser.__init__(self)
argparse.ArgumentParser.__init__(self, usage=usage)

def add_option(self, *args, **kw):
def add_argument(self, *args, **kw):
"""Add an option to the parser.
This accepts the same arguments as OptionParser.add_option, plus the
This accepts the same arguments as ArgumentParser.add_argument, plus the
following:
regenerate: can be set to False to prevent this option from being included
in regeneration.
@@ -260,7 +269,7 @@ def add_option(self, *args, **kw):
# it as a string.
type = kw.get('type')
if type == 'path':
kw['type'] = 'string'
kw['type'] = str

self.__regeneratable_options[dest] = {
'action': kw.get('action'),
@@ -269,50 +278,50 @@ def add_option(self, *args, **kw):
'opt': args[0],
}

optparse.OptionParser.add_option(self, *args, **kw)
argparse.ArgumentParser.add_argument(self, *args, **kw)

def parse_args(self, *args):
values, args = optparse.OptionParser.parse_args(self, *args)
values, args = argparse.ArgumentParser.parse_known_args(self, *args)
values._regeneration_metadata = self.__regeneratable_options
return values, args

def gyp_main(args):
my_name = os.path.basename(sys.argv[0])
usage = 'usage: %(prog)s [options ...] [build_file ...]'


parser = RegeneratableOptionParser()
usage = 'usage: %s [options ...] [build_file ...]'
parser.set_usage(usage.replace('%s', '%prog'))
parser.add_option('--build', dest='configs', action='append',
parser = RegeneratableOptionParser(usage=usage.replace('%s', '%(prog)s'))
parser.add_argument('--build', dest='configs', action='append',
help='configuration for build after project generation')
parser.add_option('--check', dest='check', action='store_true',
parser.add_argument('--check', dest='check', action='store_true',
help='check format of gyp files')
parser.add_option('--config-dir', dest='config_dir', action='store',
parser.add_argument('--config-dir', dest='config_dir', action='store',
env_name='GYP_CONFIG_DIR', default=None,
help='The location for configuration files like '
'include.gypi.')
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
parser.add_argument('-d', '--debug', dest='debug', metavar='DEBUGMODE',
action='append', default=[], help='turn on a debugging '
'mode for debugging GYP. Supported modes are "variables", '
'"includes" and "general" or "all" for all of them.')
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
parser.add_argument('-D', dest='defines', action='append', metavar='VAR=VAL',
env_name='GYP_DEFINES',
help='sets variable VAR to value VAL')
parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
parser.add_argument('--depth', dest='depth', metavar='PATH', type='path',
help='set DEPTH gyp variable to a relative path to PATH')
parser.add_option('-f', '--format', dest='formats', action='append',
parser.add_argument('-f', '--format', dest='formats', action='append',
env_name='GYP_GENERATORS', regenerate=False,
help='output formats to generate')
parser.add_option('-G', dest='generator_flags', action='append', default=[],
parser.add_argument('-G', dest='generator_flags', action='append', default=[],
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
help='sets generator flag FLAG to VAL')
parser.add_option('--generator-output', dest='generator_output',
parser.add_argument('--generator-output', dest='generator_output',
action='store', default=None, metavar='DIR', type='path',
env_name='GYP_GENERATOR_OUTPUT',
help='puts generated build files under DIR')
parser.add_option('--ignore-environment', dest='use_environment',
parser.add_argument('--ignore-environment', dest='use_environment',
action='store_false', default=True, regenerate=False,
help='do not read options from environment variables')
parser.add_option('-I', '--include', dest='includes', action='append',
parser.add_argument('-I', '--include', dest='includes', action='append',
metavar='INCLUDE', type='path',
help='files to include in all loaded .gyp files')
# --no-circular-check disables the check for circular relationships between
@@ -322,7 +331,7 @@ def gyp_main(args):
# option allows the strict behavior to be used on Macs and the lenient
# behavior to be used elsewhere.
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
parser.add_option('--no-circular-check', dest='circular_check',
parser.add_argument('--no-circular-check', dest='circular_check',
action='store_false', default=True, regenerate=False,
help="don't check for circular relationships between files")
# --no-duplicate-basename-check disables the check for duplicate basenames
@@ -331,18 +340,18 @@ def gyp_main(args):
# when duplicate basenames are passed into Make generator on Mac.
# TODO(yukawa): Remove this option when these legacy generators are
# deprecated.
parser.add_option('--no-duplicate-basename-check',
parser.add_argument('--no-duplicate-basename-check',
dest='duplicate_basename_check', action='store_false',
default=True, regenerate=False,
help="don't check for duplicate basenames")
parser.add_option('--no-parallel', action='store_true', default=False,
parser.add_argument('--no-parallel', action='store_true', default=False,
help='Disable multiprocessing')
parser.add_option('-S', '--suffix', dest='suffix', default='',
parser.add_argument('-S', '--suffix', dest='suffix', default='',
help='suffix to add to generated files')
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
parser.add_argument('--toplevel-dir', dest='toplevel_dir', action='store',
default=None, metavar='DIR', type='path',
help='directory to use as the root of the source tree')
parser.add_option('-R', '--root-target', dest='root_targets',
parser.add_argument('-R', '--root-target', dest='root_targets',
action='append', metavar='TARGET',
help='include only TARGET and its deep dependencies')

@@ -410,7 +419,7 @@ def gyp_main(args):
for option, value in sorted(options.__dict__.items()):
if option[0] == '_':
continue
if isinstance(value, basestring):
if isinstance(value, string_types):
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
else:
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
@@ -432,7 +441,7 @@ def gyp_main(args):
build_file_dir = os.path.abspath(os.path.dirname(build_file))
build_file_dir_components = build_file_dir.split(os.path.sep)
components_len = len(build_file_dir_components)
for index in xrange(components_len - 1, -1, -1):
for index in range(components_len - 1, -1, -1):
if build_file_dir_components[index] == 'src':
options.depth = os.path.sep.join(build_file_dir_components)
break
@@ -475,7 +484,7 @@ def gyp_main(args):
if home_dot_gyp != None:
default_include = os.path.join(home_dot_gyp, 'include.gypi')
if os.path.exists(default_include):
print 'Using overrides found in ' + default_include
print('Using overrides found in ' + default_include)
includes.append(default_include)

# Command-line --include files come after the default include.
@@ -536,7 +545,7 @@ def gyp_main(args):
def main(args):
try:
return gyp_main(args)
except GypError, e:
except GypError as e:
sys.stderr.write("gyp: %s\n" % e)
return 1

2 changes: 1 addition & 1 deletion tools/gyp/pylib/gyp/flock_tool.py
Original file line number Diff line number Diff line change
@@ -39,7 +39,7 @@ def ExecFlock(self, lockfile, *cmd_list):
# where fcntl.flock(fd, LOCK_EX) always fails
# with EBADF, that's why we use this F_SETLK
# hack instead.
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
if sys.platform.startswith('aix'):
# Python on AIX is compiled with LARGEFILE support, which changes the
# struct size.
88 changes: 45 additions & 43 deletions tools/gyp/pylib/gyp/generator/analyzer.py
Original file line number Diff line number Diff line change
@@ -62,6 +62,8 @@
then the "all" target includes "b1" and "b2".
"""

from __future__ import print_function

import gyp.common
import gyp.ninja_syntax as ninja_syntax
import json
@@ -155,7 +157,7 @@ def _AddSources(sources, base_path, base_path_components, result):
continue
result.append(base_path + source)
if debug:
print 'AddSource', org_source, result[len(result) - 1]
print('AddSource', org_source, result[len(result) - 1])


def _ExtractSourcesFromAction(action, base_path, base_path_components,
@@ -185,7 +187,7 @@ def _ExtractSources(target, target_dict, toplevel_dir):
base_path += '/'

if debug:
print 'ExtractSources', target, base_path
print('ExtractSources', target, base_path)

results = []
if 'sources' in target_dict:
@@ -278,7 +280,7 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir):
the root of the source tree."""
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
if debug:
print 'gyp file modified', build_file
print('gyp file modified', build_file)
return True

# First element of included_files is the file itself.
@@ -291,8 +293,8 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir):
_ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
if _ToLocalPath(toplevel_dir, rel_include_file) in files:
if debug:
print 'included gyp file modified, gyp_file=', build_file, \
'included file=', rel_include_file
print('included gyp file modified, gyp_file=', build_file,
'included file=', rel_include_file)
return True
return False

@@ -373,15 +375,15 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
# If a build file (or any of its included files) is modified we assume all
# targets in the file are modified.
if build_file_in_files[build_file]:
print 'matching target from modified build file', target_name
print('matching target from modified build file', target_name)
target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target)
else:
sources = _ExtractSources(target_name, target_dicts[target_name],
toplevel_dir)
for source in sources:
if _ToGypPath(os.path.normpath(source)) in files:
print 'target', target_name, 'matches', source
print('target', target_name, 'matches', source)
target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target)
break
@@ -433,7 +435,7 @@ def _DoesTargetDependOnMatchingTargets(target):
for dep in target.deps:
if _DoesTargetDependOnMatchingTargets(dep):
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
print '\t', target.name, 'matches by dep', dep.name
print('\t', target.name, 'matches by dep', dep.name)
return True
target.match_status = MATCH_STATUS_DOESNT_MATCH
return False
@@ -445,7 +447,7 @@ def _GetTargetsDependingOnMatchingTargets(possible_targets):
supplied as input to analyzer.
possible_targets: targets to search from."""
found = []
print 'Targets that matched by dependency:'
print('Targets that matched by dependency:')
for target in possible_targets:
if _DoesTargetDependOnMatchingTargets(target):
found.append(target)
@@ -484,12 +486,12 @@ def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
(add_if_no_ancestor or target.requires_build)) or
(target.is_static_library and add_if_no_ancestor and
not target.is_or_has_linked_ancestor)):
print '\t\tadding to compile targets', target.name, 'executable', \
target.is_executable, 'added_to_compile_targets', \
target.added_to_compile_targets, 'add_if_no_ancestor', \
add_if_no_ancestor, 'requires_build', target.requires_build, \
'is_static_library', target.is_static_library, \
'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
print('\t\tadding to compile targets', target.name, 'executable',
target.is_executable, 'added_to_compile_targets',
target.added_to_compile_targets, 'add_if_no_ancestor',
add_if_no_ancestor, 'requires_build', target.requires_build,
'is_static_library', target.is_static_library,
'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor)
result.add(target)
target.added_to_compile_targets = True

@@ -500,54 +502,54 @@ def _GetCompileTargets(matching_targets, supplied_targets):
supplied_targets: set of targets supplied to analyzer to search from."""
result = set()
for target in matching_targets:
print 'finding compile targets for match', target.name
print('finding compile targets for match', target.name)
_AddCompileTargets(target, supplied_targets, True, result)
return result


def _WriteOutput(params, **values):
"""Writes the output, either to stdout or a file is specified."""
if 'error' in values:
print 'Error:', values['error']
print('Error:', values['error'])
if 'status' in values:
print values['status']
print(values['status'])
if 'targets' in values:
values['targets'].sort()
print 'Supplied targets that depend on changed files:'
print('Supplied targets that depend on changed files:')
for target in values['targets']:
print '\t', target
print('\t', target)
if 'invalid_targets' in values:
values['invalid_targets'].sort()
print 'The following targets were not found:'
print('The following targets were not found:')
for target in values['invalid_targets']:
print '\t', target
print('\t', target)
if 'build_targets' in values:
values['build_targets'].sort()
print 'Targets that require a build:'
print('Targets that require a build:')
for target in values['build_targets']:
print '\t', target
print('\t', target)
if 'compile_targets' in values:
values['compile_targets'].sort()
print 'Targets that need to be built:'
print('Targets that need to be built:')
for target in values['compile_targets']:
print '\t', target
print('\t', target)
if 'test_targets' in values:
values['test_targets'].sort()
print 'Test targets:'
print('Test targets:')
for target in values['test_targets']:
print '\t', target
print('\t', target)

output_path = params.get('generator_flags', {}).get(
'analyzer_output_path', None)
if not output_path:
print json.dumps(values)
print(json.dumps(values))
return
try:
f = open(output_path, 'w')
f.write(json.dumps(values) + '\n')
f.close()
except IOError as e:
print 'Error writing to output file', output_path, str(e)
print('Error writing to output file', output_path, str(e))


def _WasGypIncludeFileModified(params, files):
@@ -556,7 +558,7 @@ def _WasGypIncludeFileModified(params, files):
if params['options'].includes:
for include in params['options'].includes:
if _ToGypPath(os.path.normpath(include)) in files:
print 'Include file modified, assuming all changed', include
print('Include file modified, assuming all changed', include)
return True
return False

@@ -638,13 +640,13 @@ def find_matching_test_target_names(self):
set(self._root_targets))]
else:
test_targets = [x for x in test_targets_no_all]
print 'supplied test_targets'
print('supplied test_targets')
for target_name in self._test_target_names:
print '\t', target_name
print 'found test_targets'
print('\t', target_name)
print('found test_targets')
for target in test_targets:
print '\t', target.name
print 'searching for matching test targets'
print('\t', target.name)
print('searching for matching test targets')
matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
matching_test_targets_contains_all = (test_target_names_contains_all and
set(matching_test_targets) &
@@ -654,14 +656,14 @@ def find_matching_test_target_names(self):
# 'all' is subsequentely added to the matching names below.
matching_test_targets = [x for x in (set(matching_test_targets) &
set(test_targets_no_all))]
print 'matched test_targets'
print('matched test_targets')
for target in matching_test_targets:
print '\t', target.name
print('\t', target.name)
matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
for target in matching_test_targets]
if matching_test_targets_contains_all:
matching_target_names.append('all')
print '\tall'
print('\tall')
return matching_target_names

def find_matching_compile_target_names(self):
@@ -677,10 +679,10 @@ def find_matching_compile_target_names(self):
if 'all' in self._supplied_target_names():
supplied_targets = [x for x in (set(supplied_targets) |
set(self._root_targets))]
print 'Supplied test_targets & compile_targets'
print('Supplied test_targets & compile_targets')
for target in supplied_targets:
print '\t', target.name
print 'Finding compile targets'
print('\t', target.name)
print('Finding compile targets')
compile_targets = _GetCompileTargets(self._changed_targets,
supplied_targets)
return [gyp.common.ParseQualifiedTarget(target.name)[1]
@@ -699,7 +701,7 @@ def GenerateOutput(target_list, target_dicts, data, params):

toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
if debug:
print 'toplevel_dir', toplevel_dir
print('toplevel_dir', toplevel_dir)

if _WasGypIncludeFileModified(params, config.files):
result_dict = { 'status': all_changed_string,
3 changes: 2 additions & 1 deletion tools/gyp/pylib/gyp/generator/dump_dependency_json.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from __future__ import print_function
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -96,4 +97,4 @@ def GenerateOutput(target_list, target_dicts, data, params):
f = open(filename, 'w')
json.dump(edges, f)
f.close()
print 'Wrote json to %s.' % filename
print('Wrote json to %s.' % filename)
2 changes: 1 addition & 1 deletion tools/gyp/pylib/gyp/generator/eclipse.py
Original file line number Diff line number Diff line change
@@ -141,7 +141,7 @@ def GetAllIncludeDirectories(target_list, target_dicts,
compiler_includes_list.append(include_dir)

# Find standard gyp include dirs.
if config.has_key('include_dirs'):
if 'include_dirs' in config:
include_dirs = config['include_dirs']
for shared_intermediate_dir in shared_intermediate_dirs:
for include_dir in include_dirs:
2 changes: 1 addition & 1 deletion tools/gyp/pylib/gyp/generator/gypd.py
Original file line number Diff line number Diff line change
@@ -88,7 +88,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
if not output_file in output_files:
output_files[output_file] = input_file

for output_file, input_file in output_files.iteritems():
for output_file, input_file in output_files.items():
output = open(output_file, 'w')
pprint.pprint(data[input_file], output)
output.close()
22 changes: 11 additions & 11 deletions tools/gyp/pylib/gyp/input_test.py
Original file line number Diff line number Diff line change
@@ -22,38 +22,38 @@ def _create_dependency(self, dependent, dependency):
dependency.dependents.append(dependent)

def test_no_cycle_empty_graph(self):
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
for label, node in self.nodes.items():
self.assertEqual([], node.FindCycles())

def test_no_cycle_line(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])

for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
for label, node in self.nodes.items():
self.assertEqual([], node.FindCycles())

def test_no_cycle_dag(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['a'], self.nodes['c'])
self._create_dependency(self.nodes['b'], self.nodes['c'])

for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
for label, node in self.nodes.items():
self.assertEqual([], node.FindCycles())

def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a'])

self.assertEquals([[self.nodes['a'], self.nodes['a']]],
self.assertEqual([[self.nodes['a'], self.nodes['a']]],
self.nodes['a'].FindCycles())

def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])

self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
self.assertEqual([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
self.nodes['a'].FindCycles())
self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
self.assertEqual([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
self.nodes['b'].FindCycles())

def test_two_cycles(self):
@@ -68,7 +68,7 @@ def test_two_cycles(self):
[self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
self.assertTrue(
[self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
self.assertEquals(2, len(cycles))
self.assertEqual(2, len(cycles))

def test_big_cycle(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
@@ -77,7 +77,7 @@ def test_big_cycle(self):
self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a'])

self.assertEquals([[self.nodes['a'],
self.assertEqual([[self.nodes['a'],
self.nodes['b'],
self.nodes['c'],
self.nodes['d'],
4 changes: 2 additions & 2 deletions tools/gyp/pylib/gyp/ordered_dict.py
Original file line number Diff line number Diff line change
@@ -161,8 +161,8 @@ def itervalues(self):
for k in self:
yield self[k]

def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
def items(self):
'od.items -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])

10 changes: 7 additions & 3 deletions tools/gyp/pylib/gyp/simple_copy.py
Original file line number Diff line number Diff line change
@@ -28,8 +28,12 @@ def deepcopy(x):
def _deepcopy_atomic(x):
return x

for x in (type(None), int, long, float,
bool, str, unicode, type):
try:
types = bool, float, int, str, type, type(None), long, unicode
except NameError: # Python 3
types = bool, float, int, str, type, type(None)

for x in types:
d[x] = _deepcopy_atomic

def _deepcopy_list(x):
@@ -38,7 +42,7 @@ def _deepcopy_list(x):

def _deepcopy_dict(x):
y = {}
for key, value in x.iteritems():
for key, value in x.items():
y[deepcopy(key)] = deepcopy(value)
return y
d[dict] = _deepcopy_dict
30 changes: 16 additions & 14 deletions tools/gyp/tools/graphviz.py
Original file line number Diff line number Diff line change
@@ -8,6 +8,8 @@
generate input suitable for graphviz to render a dependency graph of
targets."""

from __future__ import print_function

import collections
import json
import sys
@@ -50,9 +52,9 @@ def WriteGraph(edges):
build_file, target_name, toolset = ParseTarget(src)
files[build_file].append(src)

print 'digraph D {'
print ' fontsize=8' # Used by subgraphs.
print ' node [fontsize=8]'
print('digraph D {')
print(' fontsize=8') # Used by subgraphs.
print(' node [fontsize=8]')

# Output nodes by file. We must first write out each node within
# its file grouping before writing out any edges that may refer
@@ -63,31 +65,31 @@ def WriteGraph(edges):
# the display by making it a box without an internal node.
target = targets[0]
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
target_name)
print(' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
target_name))
else:
# Group multiple nodes together in a subgraph.
print ' subgraph "cluster_%s" {' % filename
print ' label = "%s"' % filename
print(' subgraph "cluster_%s" {' % filename)
print(' label = "%s"' % filename)
for target in targets:
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [label="%s"]' % (target, target_name)
print ' }'
print(' "%s" [label="%s"]' % (target, target_name))
print(' }')

# Now that we've placed all the nodes within subgraphs, output all
# the edges between nodes.
for src, dsts in edges.items():
for dst in dsts:
print ' "%s" -> "%s"' % (src, dst)
print(' "%s" -> "%s"' % (src, dst))

print '}'
print('}')


def main():
if len(sys.argv) < 2:
print >>sys.stderr, __doc__
print >>sys.stderr
print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0])
print(__doc__, file=sys.stderr)
print(file=sys.stderr)
print('usage: %s target1 target2...' % (sys.argv[0]), file=sys.stderr)
return 1

edges = LoadEdges('dump.json', sys.argv[1:])
31 changes: 17 additions & 14 deletions tools/gyp/tools/pretty_gyp.py
Original file line number Diff line number Diff line change
@@ -6,6 +6,8 @@

"""Pretty-prints the contents of a GYP file."""

from __future__ import print_function

import sys
import re

@@ -118,23 +120,24 @@ def prettyprint_input(lines):
basic_offset = 2
last_line = ""
for line in lines:
line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
if len(line) > 0:
brace_diff = 0
if not COMMENT_RE.match(line):
if COMMENT_RE.match(line):
print(line)
else:
line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
if len(line) > 0:
(brace_diff, after) = count_braces(line)
if brace_diff != 0:
if after:
print " " * (basic_offset * indent) + line
indent += brace_diff
if brace_diff != 0:
if after:
print(" " * (basic_offset * indent) + line)
indent += brace_diff
else:
indent += brace_diff
print(" " * (basic_offset * indent) + line)
else:
indent += brace_diff
print " " * (basic_offset * indent) + line
print(" " * (basic_offset * indent) + line)
else:
print " " * (basic_offset * indent) + line
else:
print ""
last_line = line
print("")
last_line = line


def main():
50 changes: 26 additions & 24 deletions tools/gyp/tools/pretty_sln.py
Original file line number Diff line number Diff line change
@@ -12,21 +12,23 @@
Then it outputs a possible build order.
"""

__author__ = 'nsylvain (Nicolas Sylvain)'
from __future__ import print_function

import os
import re
import sys
import pretty_vcproj

__author__ = 'nsylvain (Nicolas Sylvain)'

def BuildProject(project, built, projects, deps):
# if all dependencies are done, we can build it, otherwise we try to build the
# dependency.
# This is not infinite-recursion proof.
for dep in deps[project]:
if dep not in built:
BuildProject(dep, built, projects, deps)
print project
print(project)
built.append(project)

def ParseSolution(solution_file):
@@ -100,44 +102,44 @@ def ParseSolution(solution_file):
return (projects, dependencies)

def PrintDependencies(projects, deps):
print "---------------------------------------"
print "Dependencies for all projects"
print "---------------------------------------"
print "-- --"
print("---------------------------------------")
print("Dependencies for all projects")
print("---------------------------------------")
print("-- --")

for (project, dep_list) in sorted(deps.items()):
print "Project : %s" % project
print "Path : %s" % projects[project][0]
print("Project : %s" % project)
print("Path : %s" % projects[project][0])
if dep_list:
for dep in dep_list:
print " - %s" % dep
print ""
print(" - %s" % dep)
print("")

print "-- --"
print("-- --")

def PrintBuildOrder(projects, deps):
print "---------------------------------------"
print "Build order "
print "---------------------------------------"
print "-- --"
print("---------------------------------------")
print("Build order ")
print("---------------------------------------")
print("-- --")

built = []
for (project, _) in sorted(deps.items()):
if project not in built:
BuildProject(project, built, projects, deps)

print "-- --"
print("-- --")

def PrintVCProj(projects):

for project in projects:
print "-------------------------------------"
print "-------------------------------------"
print project
print project
print project
print "-------------------------------------"
print "-------------------------------------"
print("-------------------------------------")
print("-------------------------------------")
print(project)
print(project)
print(project)
print("-------------------------------------")
print("-------------------------------------")

project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
projects[project][2]))
@@ -153,7 +155,7 @@ def PrintVCProj(projects):
def main():
# check if we have exactly 1 parameter.
if len(sys.argv) < 2:
print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0])
return 1

(projects, deps) = ParseSolution(sys.argv[1])
28 changes: 18 additions & 10 deletions tools/gyp/tools/pretty_vcproj.py
Original file line number Diff line number Diff line change
@@ -12,14 +12,22 @@
It outputs the resulting xml to stdout.
"""

__author__ = 'nsylvain (Nicolas Sylvain)'
from __future__ import print_function

import os
import sys

from xml.dom.minidom import parse
from xml.dom.minidom import Node

__author__ = 'nsylvain (Nicolas Sylvain)'

try:
cmp
except NameError:
def cmp(x, y):
return (x > y) - (x < y)

REPLACEMENTS = dict()
ARGUMENTS = None

@@ -61,7 +69,7 @@ def get_string(node):
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
print '%s%s' % (' '*indent, node.data.strip())
print('%s%s' % (' '*indent, node.data.strip()))
return

if node.childNodes:
@@ -73,23 +81,23 @@ def PrettyPrintNode(node, indent=0):

# Print the main tag
if attr_count == 0:
print '%s<%s>' % (' '*indent, node.nodeName)
print('%s<%s>' % (' '*indent, node.nodeName))
else:
print '%s<%s' % (' '*indent, node.nodeName)
print('%s<%s' % (' '*indent, node.nodeName))

all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
print '%s %s="%s"' % (' '*indent, name, value)
print '%s>' % (' '*indent)
print('%s %s="%s"' % (' '*indent, name, value))
print('%s>' % (' '*indent))
if node.nodeValue:
print '%s %s' % (' '*indent, node.nodeValue)
print('%s %s' % (' '*indent, node.nodeValue))

for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent+2)
print '%s</%s>' % (' '*indent, node.nodeName)
print('%s</%s>' % (' '*indent, node.nodeName))


def FlattenFilter(node):
@@ -283,8 +291,8 @@ def main(argv):

# check if we have exactly 1 parameter.
if len(argv) < 2:
print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
'[key2=value2]' % argv[0])
print('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
'[key2=value2]' % argv[0])
return 1

# Parse the keys

0 comments on commit b1db810

Please sign in to comment.