diff --git a/build/README.chromium b/build/README.chromium deleted file mode 100644 index 012df35c7a..0000000000 --- a/build/README.chromium +++ /dev/null @@ -1,15 +0,0 @@ -List of property sheets to be included by projects: - common.vsprops - Not used anymore. No-op. Kept for compatibility with current projects. - - debug.vsprops - Enables debug settings. Must be included directly in Debug configuration. Includes internal\essential.vsprops. - - external_code.vsprops - Contains settings made to simplify usage of external (non-Google) code. It relaxes the warning levels. Should be included after debug.vsprops or release.vsprops to override their settings. - - output_dll_copy.rules - Run to enable automatic copy of DLL when they are as an input file in a vcproj project. - - release.vsprops - Enables release settings. Must be included directly in Release configuration. Includes internal\essential.vsprops. Also includes "internal\release_impl$(CHROME_BUILD_TYPE).vsprops". So the behavior is dependant on the CHROME_BUILD_TYPE environment variable. diff --git a/build/android_sdk_extras.json b/build/android_sdk_extras.json deleted file mode 100644 index 0d4f101c7a..0000000000 --- a/build/android_sdk_extras.json +++ /dev/null @@ -1,2 +0,0 @@ -[ -] diff --git a/build/apply_locales.py b/build/apply_locales.py deleted file mode 100755 index a942598b28..0000000000 --- a/build/apply_locales.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright (c) 2009 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# TODO: remove this script when GYP has for loops - -import sys -import optparse - -def main(argv): - - parser = optparse.OptionParser() - usage = 'usage: %s [options ...] format_string locale_list' - parser.set_usage(usage.replace('%s', '%prog')) - parser.add_option('-d', dest='dash_to_underscore', action="store_true", - default=False, - help='map "en-US" to "en" and "-" to "_" in locales') - - (options, arglist) = parser.parse_args(argv) - - if len(arglist) < 3: - print('ERROR: need string and list of locales') - return 1 - - str_template = arglist[1] - locales = arglist[2:] - - results = [] - for locale in locales: - # For Cocoa to find the locale at runtime, it needs to use '_' instead - # of '-' (http://crbug.com/20441). Also, 'en-US' should be represented - # simply as 'en' (http://crbug.com/19165, http://crbug.com/25578). - if options.dash_to_underscore: - if locale == 'en-US': - locale = 'en' - locale = locale.replace('-', '_') - results.append(str_template.replace('ZZLOCALE', locale)) - - # Quote each element so filename spaces don't mess up GYP's attempt to parse - # it into a list. - print(' '.join(["'%s'" % x for x in results])) - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/build/branding_value.sh b/build/branding_value.sh deleted file mode 100755 index 9fcb550caa..0000000000 --- a/build/branding_value.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/sh - -# Copyright (c) 2008 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This is a wrapper for fetching values from the BRANDING files. Pass the -# value of GYP's branding variable followed by the key you want and the right -# file is checked. -# -# branding_value.sh Chromium COPYRIGHT -# branding_value.sh Chromium PRODUCT_FULLNAME -# - -set -e - -if [ $# -ne 2 ] ; then - echo "error: expect two arguments, branding and key" >&2 - exit 1 -fi - -BUILD_BRANDING=$1 -THE_KEY=$2 - -pushd $(dirname "${0}") > /dev/null -BUILD_DIR=$(pwd) -popd > /dev/null - -TOP="${BUILD_DIR}/.." - -case ${BUILD_BRANDING} in - Chromium) - BRANDING_FILE="${TOP}/chrome/app/theme/chromium/BRANDING" - ;; - Chrome) - BRANDING_FILE="${TOP}/chrome/app/theme/google_chrome/BRANDING" - ;; - *) - echo "error: unknown branding: ${BUILD_BRANDING}" >&2 - exit 1 - ;; -esac - -BRANDING_VALUE=$(sed -n -e "s/^${THE_KEY}=\(.*\)\$/\1/p" "${BRANDING_FILE}") - -if [ -z "${BRANDING_VALUE}" ] ; then - echo "error: failed to find key '${THE_KEY}'" >&2 - exit 1 -fi - -echo "${BRANDING_VALUE}" diff --git a/build/build-ctags.sh b/build/build-ctags.sh deleted file mode 100755 index 61e017e329..0000000000 --- a/build/build-ctags.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash - -# Copyright 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -if [[ a"`ctags --version | head -1 | grep \"^Exuberant Ctags\"`" == "a" ]]; then - cat < /dev/null || fail $1 - mv -f .tmp_tags tags -} - -# We always build the top level but leave all submodules as optional. -build_dir --extra-excludes "" "top level" - -# Build any other directies that are listed on the command line. -for dir in $@; do - build_dir "$1" - shift -done diff --git a/build/check_return_value.py b/build/check_return_value.py deleted file mode 100755 index 0514262c1f..0000000000 --- a/build/check_return_value.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""This program wraps an arbitrary command and prints "1" if the command ran -successfully.""" - -import os -import subprocess -import sys - -devnull = open(os.devnull, 'wb') -if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull): - print(1) -else: - print(0) diff --git a/build/check_sdk_extras_version.py b/build/check_sdk_extras_version.py deleted file mode 100755 index ed1123ec00..0000000000 --- a/build/check_sdk_extras_version.py +++ /dev/null @@ -1,132 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2015 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -'''Checks the status of an Android SDK package. - -Verifies the given package has been installed from the Android SDK Manager and -that its version is at least the minimum version required by the project -configuration. -''' - -import argparse -import json -import os -import re -import sys - - -COLORAMA_ROOT = os.path.join(os.path.dirname(__file__), - os.pardir, 'third_party', 'colorama', 'src') - -sys.path.append(COLORAMA_ROOT) -import colorama - - -UDPATE_SCRIPT_PATH = 'build/install-android-sdks.sh' - -SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__), - 'android_sdk_extras.json') - -PACKAGE_VERSION_PATTERN = r'^Pkg\.Revision=(?P\d+).*$' - -PKG_NOT_FOUND_MSG = ('Error while checking Android SDK extras versions. ' - 'Could not find the "{package_id}" package in ' - '{checked_location}. Please run {script} to download it.') -UPDATE_NEEDED_MSG = ('Error while checking Android SDK extras versions. ' - 'Version {minimum_version} or greater is required for the ' - 'package "{package_id}". Version {actual_version} found. ' - 'Please run {script} to update it.') -REQUIRED_VERSION_ERROR_MSG = ('Error while checking Android SDK extras ' - 'versions. ' - 'Could not retrieve the required version for ' - 'package "{package_id}".') - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument('--package-id', - help=('id of the package to check for. The list of ' - 'available packages and their ids can be obtained ' - 'by running ' - 'third_party/android_tools/sdk/tools/android list ' - 'sdk --extended')) - parser.add_argument('--package-location', - help='path to the package\'s expected install location.', - metavar='DIR') - parser.add_argument('--stamp', - help=('if specified, a stamp file will be created at the ' - 'provided location.'), - metavar='FILE') - - args = parser.parse_args() - - if not ShouldSkipVersionCheck(): - minimum_version = GetRequiredMinimumVersion(args.package_id) - CheckPackageVersion(args.package_id, args.package_location, minimum_version) - - # Create the stamp file. - if args.stamp: - with open(args.stamp, 'a'): - os.utime(args.stamp, None) - - sys.exit(0) - -def ExitError(msg): - sys.exit(colorama.Fore.MAGENTA + colorama.Style.BRIGHT + msg + - colorama.Style.RESET_ALL) - - -def GetRequiredMinimumVersion(package_id): - with open(SDK_EXTRAS_JSON_FILE, 'r') as json_file: - packages = json.load(json_file) - - for package in packages: - if package['package_id'] == package_id: - return int(package['version'].split('.')[0]) - - ExitError(REQUIRED_VERSION_ERROR_MSG.format(package_id=package_id)) - - -def CheckPackageVersion(pkg_id, location, minimum_version): - version_file_path = os.path.join(location, 'source.properties') - # Extracts the version of the package described by the property file. We only - # care about the major version number here. - version_pattern = re.compile(PACKAGE_VERSION_PATTERN, re.MULTILINE) - - if not os.path.isfile(version_file_path): - ExitError(PKG_NOT_FOUND_MSG.format( - package_id=pkg_id, - checked_location=location, - script=UDPATE_SCRIPT_PATH)) - - with open(version_file_path, 'r') as f: - match = version_pattern.search(f.read()) - - if not match: - ExitError(PKG_NOT_FOUND_MSG.format( - package_id=pkg_id, - checked_location=location, - script=UDPATE_SCRIPT_PATH)) - - pkg_version = int(match.group('version')) - if pkg_version < minimum_version: - ExitError(UPDATE_NEEDED_MSG.format( - package_id=pkg_id, - minimum_version=minimum_version, - actual_version=pkg_version, - script=UDPATE_SCRIPT_PATH)) - - # Everything looks ok, print nothing. - -def ShouldSkipVersionCheck(): - ''' - Bots should not run the version check, since they download the sdk extras - in a different way. - ''' - return bool(os.environ.get('CHROME_HEADLESS')) - -if __name__ == '__main__': - main() diff --git a/build/common.croc b/build/common.croc deleted file mode 100644 index fde7a8b298..0000000000 --- a/build/common.croc +++ /dev/null @@ -1,127 +0,0 @@ -# -*- python -*- -# Crocodile config file for Chromium - settings common to all platforms -# -# This should be speicified before the platform-specific config, for example: -# croc -c chrome_common.croc -c linux/chrome_linux.croc - -{ - # List of root directories, applied in order - 'roots' : [ - # Sub-paths we specifically care about and want to call out - { - 'root' : '_/src', - 'altname' : 'CHROMIUM', - }, - ], - - # List of rules, applied in order - # Note that any 'include':0 rules here will be overridden by the 'include':1 - # rules in the platform-specific configs. - 'rules' : [ - # Don't scan for executable lines in uninstrumented C++ header files - { - 'regexp' : '.*\\.(h|hpp)$', - 'add_if_missing' : 0, - }, - - # Groups - { - 'regexp' : '', - 'group' : 'source', - }, - { - 'regexp' : '.*_(test|unittest|uitest|browsertest)\\.', - 'group' : 'test', - }, - - # Languages - { - 'regexp' : '.*\\.(c|h)$', - 'language' : 'C', - }, - { - 'regexp' : '.*\\.(cc|cpp|hpp)$', - 'language' : 'C++', - }, - - # Files/paths to include. Specify these before the excludes, since rules - # are in order. - { - 'regexp' : '^CHROMIUM/(base|media|net|printing|remoting|chrome|content|webkit/glue|native_client)/', - 'include' : 1, - }, - # Don't include subversion or mercurial SCM dirs - { - 'regexp' : '.*/(\\.svn|\\.hg)/', - 'include' : 0, - }, - # Don't include output dirs - { - 'regexp' : '.*/(Debug|Release|out|xcodebuild)/', - 'include' : 0, - }, - # Don't include third-party source - { - 'regexp' : '.*/third_party/', - 'include' : 0, - }, - # We don't run the V8 test suite, so we don't care about V8 coverage. - { - 'regexp' : '.*/v8/', - 'include' : 0, - }, - ], - - # Paths to add source from - 'add_files' : [ - 'CHROMIUM' - ], - - # Statistics to print - 'print_stats' : [ - { - 'stat' : 'files_executable', - 'format' : '*RESULT FilesKnown: files_executable= %d files', - }, - { - 'stat' : 'files_instrumented', - 'format' : '*RESULT FilesInstrumented: files_instrumented= %d files', - }, - { - 'stat' : '100.0 * files_instrumented / files_executable', - 'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g percent', - }, - { - 'stat' : 'lines_executable', - 'format' : '*RESULT LinesKnown: lines_known= %d lines', - }, - { - 'stat' : 'lines_instrumented', - 'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines', - }, - { - 'stat' : 'lines_covered', - 'format' : '*RESULT LinesCoveredSource: lines_covered_source= %d lines', - 'group' : 'source', - }, - { - 'stat' : 'lines_covered', - 'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines', - 'group' : 'test', - }, - { - 'stat' : '100.0 * lines_covered / lines_executable', - 'format' : '*RESULT PercentCovered: percent_covered= %g percent', - }, - { - 'stat' : '100.0 * lines_covered / lines_executable', - 'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g percent', - 'group' : 'source', - }, - { - 'stat' : '100.0 * lines_covered / lines_executable', - 'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g percent', - 'group' : 'test', - }, - ], -} diff --git a/build/copy_test_data_ios.py b/build/copy_test_data_ios.py deleted file mode 100755 index 46153ea793..0000000000 --- a/build/copy_test_data_ios.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Copies test data files or directories into a given output directory.""" - -import optparse -import os -import shutil -import sys - -class WrongNumberOfArgumentsException(Exception): - pass - -def EscapePath(path): - """Returns a path with spaces escaped.""" - return path.replace(" ", "\\ ") - -def ListFilesForPath(path): - """Returns a list of all the files under a given path.""" - output = [] - # Ignore revision control metadata directories. - if (os.path.basename(path).startswith('.git') or - os.path.basename(path).startswith('.svn')): - return output - - # Files get returned without modification. - if not os.path.isdir(path): - output.append(path) - return output - - # Directories get recursively expanded. - contents = os.listdir(path) - for item in contents: - full_path = os.path.join(path, item) - output.extend(ListFilesForPath(full_path)) - return output - -def CalcInputs(inputs): - """Computes the full list of input files for a set of command-line arguments. - """ - # |inputs| is a list of paths, which may be directories. - output = [] - for input in inputs: - output.extend(ListFilesForPath(input)) - return output - -def CopyFiles(relative_filenames, output_basedir): - """Copies files to the given output directory.""" - for file in relative_filenames: - relative_dirname = os.path.dirname(file) - output_dir = os.path.join(output_basedir, relative_dirname) - output_filename = os.path.join(output_basedir, file) - - # In cases where a directory has turned into a file or vice versa, delete it - # before copying it below. - if os.path.exists(output_dir) and not os.path.isdir(output_dir): - os.remove(output_dir) - if os.path.exists(output_filename) and os.path.isdir(output_filename): - shutil.rmtree(output_filename) - - if not os.path.exists(output_dir): - os.makedirs(output_dir) - shutil.copy(file, output_filename) - -def DoMain(argv): - parser = optparse.OptionParser() - usage = 'Usage: %prog -o [--inputs] [--outputs] ' - parser.set_usage(usage) - parser.add_option('-o', dest='output_dir') - parser.add_option('--inputs', action='store_true', dest='list_inputs') - parser.add_option('--outputs', action='store_true', dest='list_outputs') - options, arglist = parser.parse_args(argv) - - if len(arglist) == 0: - raise WrongNumberOfArgumentsException(' required.') - - files_to_copy = CalcInputs(arglist) - escaped_files = [EscapePath(x) for x in CalcInputs(arglist)] - if options.list_inputs: - return '\n'.join(escaped_files) - - if not options.output_dir: - raise WrongNumberOfArgumentsException('-o required.') - - if options.list_outputs: - outputs = [os.path.join(options.output_dir, x) for x in escaped_files] - return '\n'.join(outputs) - - CopyFiles(files_to_copy, options.output_dir) - return - -def main(argv): - try: - result = DoMain(argv[1:]) - except WrongNumberOfArgumentsException as e: - print(e, file=sys.stderr) - return 1 - if result: - print(result) - return 0 - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/build/cp.py b/build/cp.py deleted file mode 100755 index c7e9beccdf..0000000000 --- a/build/cp.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Copy a file. - -This module works much like the cp posix command - it takes 2 arguments: -(src, dst) and copies the file with path |src| to |dst|. -""" - -import os -import shutil -import sys - - -def Main(src, dst): - # Use copy instead of copyfile to ensure the executable bit is copied. - shutil.copy(src, os.path.normpath(dst)) - return 0 - - -if __name__ == '__main__': - sys.exit(Main(sys.argv[1], sys.argv[2])) diff --git a/build/detect_host_arch.py b/build/detect_host_arch.py deleted file mode 100755 index 728a7b0671..0000000000 --- a/build/detect_host_arch.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Outputs host CPU architecture in format recognized by gyp.""" - -import platform -import re -import sys - - -def HostArch(): - """Returns the host architecture with a predictable string.""" - host_arch = platform.machine() - - # Convert machine type to format recognized by gyp. - if re.match(r'i.86', host_arch) or host_arch == 'i86pc': - host_arch = 'ia32' - elif host_arch in ['x86_64', 'amd64']: - host_arch = 'x64' - elif host_arch.startswith('arm'): - host_arch = 'arm' - - # platform.machine is based on running kernel. It's possible to use 64-bit - # kernel with 32-bit userland, e.g. to give linker slightly more memory. - # Distinguish between different userland bitness by querying - # the python binary. - if host_arch == 'x64' and platform.architecture()[0] == '32bit': - host_arch = 'ia32' - - return host_arch - -def DoMain(_): - """Hook to be called from gyp without starting a separate python - interpreter.""" - return HostArch() - -if __name__ == '__main__': - print(DoMain([])) diff --git a/build/download_gold_plugin.py b/build/download_gold_plugin.py deleted file mode 100755 index 3d6ece8189..0000000000 --- a/build/download_gold_plugin.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2015 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Script to download LLVM gold plugin from google storage.""" - -import json -import os -import shutil -import subprocess -import sys -import zipfile - -SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) -CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir)) -sys.path.insert(0, os.path.join(CHROME_SRC, 'tools')) - -import find_depot_tools - -DEPOT_PATH = find_depot_tools.add_depot_tools_to_path() -GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py') - -LLVM_BUILD_PATH = os.path.join(CHROME_SRC, 'third_party', 'llvm-build', - 'Release+Asserts') -CLANG_UPDATE_PY = os.path.join(CHROME_SRC, 'tools', 'clang', 'scripts', - 'update.py') -CLANG_REVISION = os.popen(CLANG_UPDATE_PY + ' --print-revision').read().rstrip() - -CLANG_BUCKET = 'gs://chromium-browser-clang/Linux_x64' - -def main(): - targz_name = 'llvmgold-%s.tgz' % CLANG_REVISION - remote_path = '%s/%s' % (CLANG_BUCKET, targz_name) - - os.chdir(LLVM_BUILD_PATH) - - subprocess.check_call(['python', GSUTIL_PATH, - 'cp', remote_path, targz_name]) - subprocess.check_call(['tar', 'xzf', targz_name]) - os.remove(targz_name) - return 0 - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/download_nacl_toolchains.py b/build/download_nacl_toolchains.py deleted file mode 100755 index 2226bb9b6a..0000000000 --- a/build/download_nacl_toolchains.py +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Shim to run nacl toolchain download script only if there is a nacl dir.""" - -import os -import shutil -import sys - - -def Main(args): - # Exit early if disable_nacl=1. - if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''): - return 0 - script_dir = os.path.dirname(os.path.abspath(__file__)) - src_dir = os.path.dirname(script_dir) - nacl_dir = os.path.join(src_dir, 'native_client') - nacl_build_dir = os.path.join(nacl_dir, 'build') - package_version_dir = os.path.join(nacl_build_dir, 'package_version') - package_version = os.path.join(package_version_dir, 'package_version.py') - if not os.path.exists(package_version): - print("Can't find '%s'" % package_version) - print('Presumably you are intentionally building without NativeClient.') - print('Skipping NativeClient toolchain download.') - sys.exit(0) - sys.path.insert(0, package_version_dir) - import package_version - - # BUG: - # We remove this --optional-pnacl argument, and instead replace it with - # --no-pnacl for most cases. However, if the bot name is an sdk - # bot then we will go ahead and download it. This prevents increasing the - # gclient sync time for developers, or standard Chrome bots. - if '--optional-pnacl' in args: - args.remove('--optional-pnacl') - use_pnacl = False - buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '') - if 'pnacl' in buildbot_name and 'sdk' in buildbot_name: - use_pnacl = True - if use_pnacl: - print('\n*** DOWNLOADING PNACL TOOLCHAIN ***\n') - else: - args = ['--exclude', 'pnacl_newlib'] + args - - # Only download the ARM gcc toolchain if we are building for ARM - # TODO(olonho): we need to invent more reliable way to get build - # configuration info, to know if we're building for ARM. - if 'target_arch=arm' not in os.environ.get('GYP_DEFINES', ''): - args = ['--exclude', 'nacl_arm_newlib'] + args - - package_version.main(args) - - return 0 - - -if __name__ == '__main__': - sys.exit(Main(sys.argv[1:])) diff --git a/build/download_sdk_extras.py b/build/download_sdk_extras.py deleted file mode 100755 index 7e89dd2c8c..0000000000 --- a/build/download_sdk_extras.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Script to download sdk/extras packages on the bots from google storage. - -The script expects arguments that specify zips file in the google storage -bucket named: __.zip. The file will -be extracted in the android_tools/sdk/extras directory on the test bots. This -script will not do anything for developers. - -TODO(navabi): Move this script (crbug.com/459819). -""" - -import json -import os -import shutil -import subprocess -import sys -import zipfile - -SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) -CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir)) -sys.path.insert(0, os.path.join(SCRIPT_DIR, 'android')) -sys.path.insert(1, os.path.join(CHROME_SRC, 'tools')) - -from pylib import constants -import find_depot_tools - -DEPOT_PATH = find_depot_tools.add_depot_tools_to_path() -GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py') -SDK_EXTRAS_BUCKET = 'gs://chrome-sdk-extras' -SDK_EXTRAS_PATH = os.path.join(constants.ANDROID_SDK_ROOT, 'extras') -SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__), - 'android_sdk_extras.json') - - -def clean_and_extract(dir_name, package_name, zip_file): - local_dir = '%s/%s/%s' % (SDK_EXTRAS_PATH, dir_name, package_name) - if os.path.exists(local_dir): - shutil.rmtree(local_dir) - local_zip = '%s/%s' % (SDK_EXTRAS_PATH, zip_file) - with zipfile.ZipFile(local_zip) as z: - z.extractall(path=SDK_EXTRAS_PATH) - - -def main(): - if not os.environ.get('CHROME_HEADLESS'): - # This is not a buildbot checkout. - return 0 - # Update the android_sdk_extras.json file to update downloaded packages. - with open(SDK_EXTRAS_JSON_FILE) as json_file: - packages = json.load(json_file) - for package in packages: - local_zip = '%s/%s' % (SDK_EXTRAS_PATH, package['zip']) - if not os.path.exists(local_zip): - package_zip = '%s/%s' % (SDK_EXTRAS_BUCKET, package['zip']) - try: - subprocess.check_call(['python', GSUTIL_PATH, '--force-version', '4.7', - 'cp', package_zip, local_zip]) - except subprocess.CalledProcessError: - print ('WARNING: Failed to download SDK packages. If this bot compiles ' - 'for Android, it may have errors.') - return 0 - # Always clean dir and extract zip to ensure correct contents. - clean_and_extract(package['dir_name'], package['package'], package['zip']) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/env_dump.py b/build/env_dump.py deleted file mode 100755 index e6c1bd3a03..0000000000 --- a/build/env_dump.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This script can either source a file and dump the enironment changes done by -# it, or just simply dump the current environment as JSON into a file. - -import json -import optparse -import os -import pipes -import subprocess -import sys - - -def main(): - parser = optparse.OptionParser() - parser.add_option('-f', '--output-json', - help='File to dump the environment as JSON into.') - parser.add_option( - '-d', '--dump-mode', action='store_true', - help='Dump the environment to sys.stdout and exit immediately.') - - parser.disable_interspersed_args() - options, args = parser.parse_args() - if options.dump_mode: - if args or options.output_json: - parser.error('Cannot specify args or --output-json with --dump-mode.') - json.dump(dict(os.environ), sys.stdout) - else: - if not options.output_json: - parser.error('Requires --output-json option.') - - envsetup_cmd = ' '.join(map(pipes.quote, args)) - full_cmd = [ - 'bash', '-c', - '. %s > /dev/null; %s -d' % (envsetup_cmd, os.path.abspath(__file__)) - ] - try: - output = subprocess.check_output(full_cmd, universal_newlines=True) - except Exception as e: - sys.exit('Error running %s and dumping environment.' % envsetup_cmd) - - env_diff = {} - new_env = json.loads(output) - for k, val in list(new_env.items()): - if k == '_' or (k in os.environ and os.environ[k] == val): - continue - env_diff[k] = val - with open(options.output_json, 'w') as f: - json.dump(env_diff, f) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/extract_from_cab.py b/build/extract_from_cab.py deleted file mode 100755 index ad27b72715..0000000000 --- a/build/extract_from_cab.py +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Extracts a single file from a CAB archive.""" - -import os -import shutil -import subprocess -import sys -import tempfile - -def run_quiet(*args): - """Run 'expand' suppressing noisy output. Returns returncode from process.""" - popen = subprocess.Popen(args, stdout=subprocess.PIPE) - out, _ = popen.communicate() - if popen.returncode: - # expand emits errors to stdout, so if we fail, then print that out. - print(out) - return popen.returncode - -def main(): - if len(sys.argv) != 4: - print('Usage: extract_from_cab.py cab_path archived_file output_dir') - return 1 - - [cab_path, archived_file, output_dir] = sys.argv[1:] - - # Expand.exe does its work in a fixed-named temporary directory created within - # the given output directory. This is a problem for concurrent extractions, so - # create a unique temp dir within the desired output directory to work around - # this limitation. - temp_dir = tempfile.mkdtemp(dir=output_dir) - - try: - # Invoke the Windows expand utility to extract the file. - level = run_quiet('expand', cab_path, '-F:' + archived_file, temp_dir) - if level == 0: - # Move the output file into place, preserving expand.exe's behavior of - # paving over any preexisting file. - output_file = os.path.join(output_dir, archived_file) - try: - os.remove(output_file) - except OSError: - pass - os.rename(os.path.join(temp_dir, archived_file), output_file) - finally: - shutil.rmtree(temp_dir, True) - - if level != 0: - return level - - # The expand utility preserves the modification date and time of the archived - # file. Touch the extracted file. This helps build systems that compare the - # modification times of input and output files to determine whether to do an - # action. - os.utime(os.path.join(output_dir, archived_file), None) - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/find_isolated_tests.py b/build/find_isolated_tests.py deleted file mode 100755 index 9d3fe37a6e..0000000000 --- a/build/find_isolated_tests.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Scans build output directory for .isolated files, calculates their SHA1 -hashes, stores final list in JSON document and then removes *.isolated files -found (to ensure no stale *.isolated stay around on the next build). - -Used to figure out what tests were build in isolated mode to trigger these -tests to run on swarming. - -For more info see: -https://sites.google.com/a/chromium.org/dev/developers/testing/isolated-testing -""" - -import glob -import hashlib -import json -import optparse -import os -import re -import sys - - -def hash_file(filepath): - """Calculates the hash of a file without reading it all in memory at once.""" - digest = hashlib.sha1() - with open(filepath, 'rb') as f: - while True: - chunk = f.read(1024*1024) - if not chunk: - break - digest.update(chunk) - return digest.hexdigest() - - -def main(): - parser = optparse.OptionParser( - usage='%prog --build-dir --output-json ', - description=sys.modules[__name__].__doc__) - parser.add_option( - '--build-dir', - help='Path to a directory to search for *.isolated files.') - parser.add_option( - '--output-json', - help='File to dump JSON results into.') - - options, _ = parser.parse_args() - if not options.build_dir: - parser.error('--build-dir option is required') - if not options.output_json: - parser.error('--output-json option is required') - - result = {} - - # Get the file hash values and output the pair. - pattern = os.path.join(options.build_dir, '*.isolated') - for filepath in sorted(glob.glob(pattern)): - test_name = os.path.splitext(os.path.basename(filepath))[0] - if re.match(r'^.+?\.\d$', test_name): - # It's a split .isolated file, e.g. foo.0.isolated. Ignore these. - continue - - # TODO(csharp): Remove deletion once the isolate tracked dependencies are - # inputs for the isolated files. - sha1_hash = hash_file(filepath) - os.remove(filepath) - result[test_name] = sha1_hash - - with open(options.output_json, 'wb') as f: - json.dump(result, f) - - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/gdb-add-index b/build/gdb-add-index deleted file mode 100755 index 992ac16159..0000000000 --- a/build/gdb-add-index +++ /dev/null @@ -1,162 +0,0 @@ -#!/bin/bash -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -# -# Saves the gdb index for a given binary and its shared library dependencies. -# -# This will run gdb index in parallel on a number of binaries using SIGUSR1 -# as the communication mechanism to simulate a semaphore. Because of the -# nature of this technique, using "set -e" is very difficult. The SIGUSR1 -# terminates a "wait" with an error which we need to interpret. -# -# When modifying this code, most of the real logic is in the index_one_file -# function. The rest is cleanup + sempahore plumbing. - -# Cleanup temp directory and ensure all child jobs are dead-dead. -function on_exit { - trap "" EXIT USR1 # Avoid reentrancy. - - local jobs=$(jobs -p) - if [ -n "$jobs" ]; then - echo -n "Killing outstanding index jobs..." - kill -KILL $(jobs -p) - wait - echo "done" - fi - - if [ -f "$DIRECTORY" ]; then - echo -n "Removing temp directory $DIRECTORY..." - rm -rf $DIRECTORY - echo done - fi -} - -# Add index to one binary. -function index_one_file { - local file=$1 - local basename=$(basename "$file") - local should_index="${SHOULD_INDEX}" - - local readelf_out=$(${TOOLCHAIN_PREFIX}readelf -S "$file") - if [[ $readelf_out =~ "gdb_index" ]]; then - if [ "${REMOVE_INDEX}" = 1 ]; then - ${TOOLCHAIN_PREFIX}objcopy --remove-section .gdb_index "$file" - echo "Removed index from $basename." - else - echo "Skipped $basename -- already contains index." - should_index=0 - fi - fi - - if [ "${should_index}" = 1 ]; then - local start=$(date +"%s%N") - echo "Adding index to $basename..." - - ${TOOLCHAIN_PREFIX}gdb -batch "$file" -ex "save gdb-index $DIRECTORY" \ - -ex "quit" - local index_file="$DIRECTORY/$basename.gdb-index" - if [ -f "$index_file" ]; then - ${TOOLCHAIN_PREFIX}objcopy --add-section .gdb_index="$index_file" \ - --set-section-flags .gdb_index=readonly "$file" "$file" - local finish=$(date +"%s%N") - local elapsed=$(((finish - start)/1000000)) - echo " ...$basename indexed. [${elapsed}ms]" - else - echo " ...$basename unindexable." - fi - fi -} - -# Functions that when combined, concurrently index all files in FILES_TO_INDEX -# array. The global FILES_TO_INDEX is declared in the main body of the script. -function async_index { - # Start a background subshell to run the index command. - { - index_one_file $1 - kill -SIGUSR1 $$ # $$ resolves to the parent script. - exit 129 # See comment above wait loop at bottom. - } & -} - -CUR_FILE_NUM=0 -function index_next { - if (( CUR_FILE_NUM >= ${#FILES_TO_INDEX[@]} )); then - return - fi - - async_index "${FILES_TO_INDEX[CUR_FILE_NUM]}" - ((CUR_FILE_NUM += 1)) || true -} - - -######## -### Main body of the script. - -REMOVE_INDEX=0 -SHOULD_INDEX=1 -while getopts ":f:r" opt; do - case $opt in - f) - REMOVE_INDEX=1 - shift - ;; - r) - REMOVE_INDEX=1 - SHOULD_INDEX=0 - shift - ;; - *) - echo "Invalid option: -$OPTARG" >&2 - ;; - esac -done - -if [[ ! $# == 1 ]]; then - echo "Usage: $0 [-f] [-r] path-to-binary" - echo " -f forces replacement of an existing index." - echo " -r removes the index section." - exit 1 -fi - -FILENAME="$1" -if [[ ! -f "$FILENAME" ]]; then - echo "Path $FILENAME does not exist." - exit 1 -fi - -# Ensure we cleanup on on exit. -trap on_exit EXIT - -# We're good to go! Create temp directory for index files. -DIRECTORY=$(mktemp -d) -echo "Made temp directory $DIRECTORY." - -# Create array with the filename and all shared libraries that -# have the same dirname. The dirname is a signal that these -# shared libraries were part of the same build as the binary. -declare -a FILES_TO_INDEX=($FILENAME - $(ldd "$FILENAME" 2>/dev/null \ - | grep $(dirname "$FILENAME") \ - | sed "s/.*[ \t]\(.*\) (.*/\1/") -) - -# Start concurrent indexing. -trap index_next USR1 - -# 4 is an arbitrary default. When changing, remember we are likely IO bound -# so basing this off the number of cores is not sensible. -INDEX_TASKS=${INDEX_TASKS:-4} -for ((i=0;i<${INDEX_TASKS};i++)); do - index_next -done - -# Do a wait loop. Bash waits that terminate due a trap have an exit -# code > 128. We also ensure that our subshell's "normal" exit occurs with -# an exit code > 128. This allows us to do consider a > 128 exit code as -# an indication that the loop should continue. Unfortunately, it also means -# we cannot use set -e since technically the "wait" is failing. -wait -while (( $? > 128 )); do - wait -done diff --git a/build/get_sdk_extras_packages.py b/build/get_sdk_extras_packages.py deleted file mode 100755 index 4c9f87f451..0000000000 --- a/build/get_sdk_extras_packages.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright (c) 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import json -import os -import sys - -SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__), - 'android_sdk_extras.json') - -def main(): - with open(SDK_EXTRAS_JSON_FILE) as json_file: - packages = json.load(json_file) - - out = [] - for package in packages: - out.append(package['package_id']) - - print(','.join(out)) - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/get_syzygy_binaries.py b/build/get_syzygy_binaries.py deleted file mode 100755 index 20393421c6..0000000000 --- a/build/get_syzygy_binaries.py +++ /dev/null @@ -1,488 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""A utility script for downloading versioned Syzygy binaries.""" - -import hashlib -import errno -import json -import logging -import optparse -import os -import re -import shutil -import stat -import sys -import subprocess -import tempfile -import time -import zipfile - - -_LOGGER = logging.getLogger(os.path.basename(__file__)) - -# The relative path where official builds are archived in their GS bucket. -_SYZYGY_ARCHIVE_PATH = ('/builds/official/%(revision)s') - -# A JSON file containing the state of the download directory. If this file and -# directory state do not agree, then the binaries will be downloaded and -# installed again. -_STATE = '.state' - -# This matches an integer (an SVN revision number) or a SHA1 value (a GIT hash). -# The archive exclusively uses lowercase GIT hashes. -_REVISION_RE = re.compile('^(?:\d+|[a-f0-9]{40})$') - -# This matches an MD5 hash. -_MD5_RE = re.compile('^[a-f0-9]{32}$') - -# List of reources to be downloaded and installed. These are tuples with the -# following format: -# (basename, logging name, relative installation path, extraction filter) -_RESOURCES = [ - ('benchmark.zip', 'benchmark', '', None), - ('binaries.zip', 'binaries', 'exe', None), - ('symbols.zip', 'symbols', 'exe', - lambda x: x.filename.endswith('.dll.pdb'))] - - -def _LoadState(output_dir): - """Loads the contents of the state file for a given |output_dir|, returning - None if it doesn't exist. - """ - path = os.path.join(output_dir, _STATE) - if not os.path.exists(path): - _LOGGER.debug('No state file found.') - return None - with open(path, 'rb') as f: - _LOGGER.debug('Reading state file: %s', path) - try: - return json.load(f) - except ValueError: - _LOGGER.debug('Invalid state file.') - return None - - -def _SaveState(output_dir, state, dry_run=False): - """Saves the |state| dictionary to the given |output_dir| as a JSON file.""" - path = os.path.join(output_dir, _STATE) - _LOGGER.debug('Writing state file: %s', path) - if dry_run: - return - with open(path, 'wb') as f: - f.write(json.dumps(state, sort_keys=True, indent=2)) - - -def _Md5(path): - """Returns the MD5 hash of the file at |path|, which must exist.""" - return hashlib.md5(open(path, 'rb').read()).hexdigest() - - -def _StateIsValid(state): - """Returns true if the given state structure is valid.""" - if not isinstance(state, dict): - _LOGGER.debug('State must be a dict.') - return False - r = state.get('revision', None) - if not isinstance(r, str) or not _REVISION_RE.match(r): - _LOGGER.debug('State contains an invalid revision.') - return False - c = state.get('contents', None) - if not isinstance(c, dict): - _LOGGER.debug('State must contain a contents dict.') - return False - for (relpath, md5) in c.items(): - if not isinstance(relpath, str) or len(relpath) == 0: - _LOGGER.debug('State contents dict contains an invalid path.') - return False - if not isinstance(md5, str) or not _MD5_RE.match(md5): - _LOGGER.debug('State contents dict contains an invalid MD5 digest.') - return False - return True - - -def _BuildActualState(stored, revision, output_dir): - """Builds the actual state using the provided |stored| state as a template. - Only examines files listed in the stored state, causing the script to ignore - files that have been added to the directories locally. |stored| must be a - valid state dictionary. - """ - contents = {} - state = { 'revision': revision, 'contents': contents } - for relpath, md5 in stored['contents'].items(): - abspath = os.path.abspath(os.path.join(output_dir, relpath)) - if os.path.isfile(abspath): - m = _Md5(abspath) - contents[relpath] = m - - return state - - -def _StatesAreConsistent(stored, actual): - """Validates whether two state dictionaries are consistent. Both must be valid - state dictionaries. Additional entries in |actual| are ignored. - """ - if stored['revision'] != actual['revision']: - _LOGGER.debug('Mismatched revision number.') - return False - cont_stored = stored['contents'] - cont_actual = actual['contents'] - for relpath, md5 in cont_stored.items(): - if relpath not in cont_actual: - _LOGGER.debug('Missing content: %s', relpath) - return False - if md5 != cont_actual[relpath]: - _LOGGER.debug('Modified content: %s', relpath) - return False - return True - - -def _GetCurrentState(revision, output_dir): - """Loads the current state and checks to see if it is consistent. Returns - a tuple (state, bool). The returned state will always be valid, even if an - invalid state is present on disk. - """ - stored = _LoadState(output_dir) - if not _StateIsValid(stored): - _LOGGER.debug('State is invalid.') - # Return a valid but empty state. - return ({'revision': '0', 'contents': {}}, False) - actual = _BuildActualState(stored, revision, output_dir) - # If the script has been modified consider the state invalid. - path = os.path.join(output_dir, _STATE) - if os.path.getmtime(__file__) > os.path.getmtime(path): - return (stored, False) - # Otherwise, explicitly validate the state. - if not _StatesAreConsistent(stored, actual): - return (stored, False) - return (stored, True) - - -def _DirIsEmpty(path): - """Returns true if the given directory is empty, false otherwise.""" - for root, dirs, files in os.walk(path): - return not dirs and not files - - -def _RmTreeHandleReadOnly(func, path, exc): - """An error handling function for use with shutil.rmtree. This will - detect failures to remove read-only files, and will change their properties - prior to removing them. This is necessary on Windows as os.remove will return - an access error for read-only files, and git repos contain read-only - pack/index files. - """ - excvalue = exc[1] - if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES: - _LOGGER.debug('Removing read-only path: %s', path) - os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) - func(path) - else: - raise - - -def _RmTree(path): - """A wrapper of shutil.rmtree that handles read-only files.""" - shutil.rmtree(path, ignore_errors=False, onerror=_RmTreeHandleReadOnly) - - -def _CleanState(output_dir, state, dry_run=False): - """Cleans up files/directories in |output_dir| that are referenced by - the given |state|. Raises an error if there are local changes. Returns a - dictionary of files that were deleted. - """ - _LOGGER.debug('Deleting files from previous installation.') - deleted = {} - - # Generate a list of files to delete, relative to |output_dir|. - contents = state['contents'] - files = sorted(contents.keys()) - - # Try to delete the files. Keep track of directories to delete as well. - dirs = {} - for relpath in files: - fullpath = os.path.join(output_dir, relpath) - fulldir = os.path.dirname(fullpath) - dirs[fulldir] = True - if os.path.exists(fullpath): - # If somehow the file has become a directory complain about it. - if os.path.isdir(fullpath): - raise Exception('Directory exists where file expected: %s' % fullpath) - - # Double check that the file doesn't have local changes. If it does - # then refuse to delete it. - if relpath in contents: - stored_md5 = contents[relpath] - actual_md5 = _Md5(fullpath) - if actual_md5 != stored_md5: - raise Exception('File has local changes: %s' % fullpath) - - # The file is unchanged so it can safely be deleted. - _LOGGER.debug('Deleting file "%s".', fullpath) - deleted[relpath] = True - if not dry_run: - os.unlink(fullpath) - - # Sort directories from longest name to shortest. This lets us remove empty - # directories from the most nested paths first. - dirs = sorted(list(dirs.keys()), key=lambda x: len(x), reverse=True) - for p in dirs: - if os.path.exists(p) and _DirIsEmpty(p): - _LOGGER.debug('Deleting empty directory "%s".', p) - if not dry_run: - _RmTree(p) - - return deleted - - -def _FindGsUtil(): - """Looks for depot_tools and returns the absolute path to gsutil.py.""" - for path in os.environ['PATH'].split(os.pathsep): - path = os.path.abspath(path) - git_cl = os.path.join(path, 'git_cl.py') - gs_util = os.path.join(path, 'gsutil.py') - if os.path.exists(git_cl) and os.path.exists(gs_util): - return gs_util - return None - - -def _GsUtil(*cmd): - """Runs the given command in gsutil with exponential backoff and retries.""" - gs_util = _FindGsUtil() - cmd = [sys.executable, gs_util] + list(cmd) - - retries = 3 - timeout = 4 # Seconds. - while True: - _LOGGER.debug('Running %s', cmd) - prog = subprocess.Popen(cmd, shell=False) - prog.communicate() - - # Stop retrying on success. - if prog.returncode == 0: - return - - # Raise a permanent failure if retries have been exhausted. - if retries == 0: - raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode)) - - _LOGGER.debug('Sleeping %d seconds and trying again.', timeout) - time.sleep(timeout) - retries -= 1 - timeout *= 2 - - -def _Download(resource): - """Downloads the given GS resource to a temporary file, returning its path.""" - tmp = tempfile.mkstemp(suffix='syzygy_archive') - os.close(tmp[0]) - url = 'gs://syzygy-archive' + resource - _GsUtil('cp', url, tmp[1]) - return tmp[1] - - -def _InstallBinaries(options, deleted={}): - """Installs Syzygy binaries. This assumes that the output directory has - already been cleaned, as it will refuse to overwrite existing files.""" - contents = {} - state = { 'revision': options.revision, 'contents': contents } - archive_path = _SYZYGY_ARCHIVE_PATH % { 'revision': options.revision } - if options.resources: - resources = [(resource, resource, '', None) - for resource in options.resources] - else: - resources = _RESOURCES - for (base, name, subdir, filt) in resources: - # Create the output directory if it doesn't exist. - fulldir = os.path.join(options.output_dir, subdir) - if os.path.isfile(fulldir): - raise Exception('File exists where a directory needs to be created: %s' % - fulldir) - if not os.path.exists(fulldir): - _LOGGER.debug('Creating directory: %s', fulldir) - if not options.dry_run: - os.makedirs(fulldir) - - # Download and read the archive. - resource = archive_path + '/' + base - _LOGGER.debug('Retrieving %s archive at "%s".', name, resource) - path = _Download(resource) - - _LOGGER.debug('Unzipping %s archive.', name) - with open(path, 'rb') as data: - archive = zipfile.ZipFile(data) - for entry in archive.infolist(): - if not filt or filt(entry): - fullpath = os.path.normpath(os.path.join(fulldir, entry.filename)) - relpath = os.path.relpath(fullpath, options.output_dir) - if os.path.exists(fullpath): - # If in a dry-run take into account the fact that the file *would* - # have been deleted. - if options.dry_run and relpath in deleted: - pass - else: - raise Exception('Path already exists: %s' % fullpath) - - # Extract the file and update the state dictionary. - _LOGGER.debug('Extracting "%s".', fullpath) - if not options.dry_run: - archive.extract(entry.filename, fulldir) - md5 = _Md5(fullpath) - contents[relpath] = md5 - if sys.platform == 'cygwin': - os.chmod(fullpath, os.stat(fullpath).st_mode | stat.S_IXUSR) - - _LOGGER.debug('Removing temporary file "%s".', path) - os.remove(path) - - return state - - -def _ParseCommandLine(): - """Parses the command-line and returns an options structure.""" - option_parser = optparse.OptionParser() - option_parser.add_option('--dry-run', action='store_true', default=False, - help='If true then will simply list actions that would be performed.') - option_parser.add_option('--force', action='store_true', default=False, - help='Force an installation even if the binaries are up to date.') - option_parser.add_option('--no-cleanup', action='store_true', default=False, - help='Allow installation on non-Windows platforms, and skip the forced ' - 'cleanup step.') - option_parser.add_option('--output-dir', type='string', - help='The path where the binaries will be replaced. Existing binaries ' - 'will only be overwritten if not up to date.') - option_parser.add_option('--overwrite', action='store_true', default=False, - help='If specified then the installation will happily delete and rewrite ' - 'the entire output directory, blasting any local changes.') - option_parser.add_option('--revision', type='string', - help='The SVN revision or GIT hash associated with the required version.') - option_parser.add_option('--revision-file', type='string', - help='A text file containing an SVN revision or GIT hash.') - option_parser.add_option('--resource', type='string', action='append', - dest='resources', help='A resource to be downloaded.') - option_parser.add_option('--verbose', dest='log_level', action='store_const', - default=logging.INFO, const=logging.DEBUG, - help='Enables verbose logging.') - option_parser.add_option('--quiet', dest='log_level', action='store_const', - default=logging.INFO, const=logging.ERROR, - help='Disables all output except for errors.') - options, args = option_parser.parse_args() - if args: - option_parser.error('Unexpected arguments: %s' % args) - if not options.output_dir: - option_parser.error('Must specify --output-dir.') - if not options.revision and not options.revision_file: - option_parser.error('Must specify one of --revision or --revision-file.') - if options.revision and options.revision_file: - option_parser.error('Must not specify both --revision and --revision-file.') - - # Configure logging. - logging.basicConfig(level=options.log_level) - - # If a revision file has been specified then read it. - if options.revision_file: - options.revision = open(options.revision_file, 'rb').read().strip() - _LOGGER.debug('Parsed revision "%s" from file "%s".', - options.revision, options.revision_file) - - # Ensure that the specified SVN revision or GIT hash is valid. - if not _REVISION_RE.match(options.revision): - option_parser.error('Must specify a valid SVN or GIT revision.') - - # This just makes output prettier to read. - options.output_dir = os.path.normpath(options.output_dir) - - return options - - -def _RemoveOrphanedFiles(options): - """This is run on non-Windows systems to remove orphaned files that may have - been downloaded by a previous version of this script. - """ - # Reconfigure logging to output info messages. This will allow inspection of - # cleanup status on non-Windows buildbots. - _LOGGER.setLevel(logging.INFO) - - output_dir = os.path.abspath(options.output_dir) - - # We only want to clean up the folder in 'src/third_party/syzygy', and we - # expect to be called with that as an output directory. This is an attempt to - # not start deleting random things if the script is run from an alternate - # location, or not called from the gclient hooks. - expected_syzygy_dir = os.path.abspath(os.path.join( - os.path.dirname(__file__), '..', 'third_party', 'syzygy')) - expected_output_dir = os.path.join(expected_syzygy_dir, 'binaries') - if expected_output_dir != output_dir: - _LOGGER.info('Unexpected output directory, skipping cleanup.') - return - - if not os.path.isdir(expected_syzygy_dir): - _LOGGER.info('Output directory does not exist, skipping cleanup.') - return - - def OnError(function, path, excinfo): - """Logs error encountered by shutil.rmtree.""" - _LOGGER.error('Error when running %s(%s)', function, path, exc_info=excinfo) - - _LOGGER.info('Removing orphaned files from %s', expected_syzygy_dir) - if not options.dry_run: - shutil.rmtree(expected_syzygy_dir, True, OnError) - - -def main(): - options = _ParseCommandLine() - - if options.dry_run: - _LOGGER.debug('Performing a dry-run.') - - # We only care about Windows platforms, as the Syzygy binaries aren't used - # elsewhere. However, there was a short period of time where this script - # wasn't gated on OS types, and those OSes downloaded and installed binaries. - # This will cleanup orphaned files on those operating systems. - if sys.platform not in ('win32', 'cygwin'): - if options.no_cleanup: - _LOGGER.debug('Skipping usual cleanup for non-Windows platforms.') - else: - return _RemoveOrphanedFiles(options) - - # Load the current installation state, and validate it against the - # requested installation. - state, is_consistent = _GetCurrentState(options.revision, options.output_dir) - - # Decide whether or not an install is necessary. - if options.force: - _LOGGER.debug('Forcing reinstall of binaries.') - elif is_consistent: - # Avoid doing any work if the contents of the directory are consistent. - _LOGGER.debug('State unchanged, no reinstall necessary.') - return - - # Under normal logging this is the only only message that will be reported. - _LOGGER.info('Installing revision %s Syzygy binaries.', - options.revision[0:12]) - - # Clean up the old state to begin with. - deleted = [] - if options.overwrite: - if os.path.exists(options.output_dir): - # If overwrite was specified then take a heavy-handed approach. - _LOGGER.debug('Deleting entire installation directory.') - if not options.dry_run: - _RmTree(options.output_dir) - else: - # Otherwise only delete things that the previous installation put in place, - # and take care to preserve any local changes. - deleted = _CleanState(options.output_dir, state, options.dry_run) - - # Install the new binaries. In a dry-run this will actually download the - # archives, but it won't write anything to disk. - state = _InstallBinaries(options, deleted) - - # Build and save the state for the directory. - _SaveState(options.output_dir, state, options.dry_run) - - -if __name__ == '__main__': - main() diff --git a/build/gyp_chromium b/build/gyp_chromium deleted file mode 100755 index 383b6450cd..0000000000 --- a/build/gyp_chromium +++ /dev/null @@ -1,333 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This script is wrapper for Chromium that adds some support for how GYP -# is invoked by Chromium beyond what can be done in the gclient hooks. - -import argparse -import glob -import gyp_environment -import os -import re -import shlex -import subprocess -import string -import sys -import vs_toolchain - -script_dir = os.path.dirname(os.path.realpath(__file__)) -chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir)) - -sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib')) -import gyp - -# Assume this file is in a one-level-deep subdirectory of the source root. -SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - -# Add paths so that pymod_do_main(...) can import files. -sys.path.insert(1, os.path.join(chrome_src, 'android_webview', 'tools')) -sys.path.insert(1, os.path.join(chrome_src, 'build', 'android', 'gyp')) -sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build')) -sys.path.insert(1, os.path.join(chrome_src, 'chromecast', 'tools', 'build')) -sys.path.insert(1, os.path.join(chrome_src, 'ios', 'chrome', 'tools', 'build')) -sys.path.insert(1, os.path.join(chrome_src, 'native_client', 'build')) -sys.path.insert(1, os.path.join(chrome_src, 'native_client_sdk', 'src', - 'build_tools')) -sys.path.insert(1, os.path.join(chrome_src, 'remoting', 'tools', 'build')) -sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'liblouis')) -sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'WebKit', - 'Source', 'build', 'scripts')) -sys.path.insert(1, os.path.join(chrome_src, 'tools')) -sys.path.insert(1, os.path.join(chrome_src, 'tools', 'generate_shim_headers')) -sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit')) - -# On Windows, Psyco shortens warm runs of build/gyp_chromium by about -# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70 -# seconds. Conversely, memory usage of build/gyp_chromium with Psyco -# maxes out at about 158 MB vs. 132 MB without it. -# -# Psyco uses native libraries, so we need to load a different -# installation depending on which OS we are running under. It has not -# been tested whether using Psyco on our Mac and Linux builds is worth -# it (the GYP running time is a lot shorter, so the JIT startup cost -# may not be worth it). -if sys.platform == 'win32': - try: - sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32')) - import psyco - except: - psyco = None -else: - psyco = None - - -def GetSupplementalFiles(): - """Returns a list of the supplemental files that are included in all GYP - sources.""" - return glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi')) - - -def ProcessGypDefinesItems(items): - """Converts a list of strings to a list of key-value pairs.""" - result = [] - for item in items: - tokens = item.split('=', 1) - # Some GYP variables have hyphens, which we don't support. - if len(tokens) == 2: - result += [(tokens[0], tokens[1])] - else: - # No value supplied, treat it as a boolean and set it. Note that we - # use the string '1' here so we have a consistent definition whether - # you do 'foo=1' or 'foo'. - result += [(tokens[0], '1')] - return result - - -def GetGypVars(supplemental_files): - """Returns a dictionary of all GYP vars.""" - # Find the .gyp directory in the user's home directory. - home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None) - if home_dot_gyp: - home_dot_gyp = os.path.expanduser(home_dot_gyp) - if not home_dot_gyp: - home_vars = ['HOME'] - if sys.platform in ('cygwin', 'win32'): - home_vars.append('USERPROFILE') - for home_var in home_vars: - home = os.getenv(home_var) - if home != None: - home_dot_gyp = os.path.join(home, '.gyp') - if not os.path.exists(home_dot_gyp): - home_dot_gyp = None - else: - break - - if home_dot_gyp: - include_gypi = os.path.join(home_dot_gyp, "include.gypi") - if os.path.exists(include_gypi): - supplemental_files += [include_gypi] - - # GYP defines from the supplemental.gypi files. - supp_items = [] - for supplement in supplemental_files: - with open(supplement, 'r') as f: - try: - file_data = eval(f.read(), {'__builtins__': None}, None) - except SyntaxError as e: - e.filename = os.path.abspath(supplement) - raise - variables = file_data.get('variables', []) - for v in variables: - supp_items += [(v, str(variables[v]))] - - # GYP defines from the environment. - env_items = ProcessGypDefinesItems( - shlex.split(os.environ.get('GYP_DEFINES', ''))) - - # GYP defines from the command line. - parser = argparse.ArgumentParser() - parser.add_argument('-D', dest='defines', action='append', default=[]) - cmdline_input_items = parser.parse_known_args()[0].defines - cmdline_items = ProcessGypDefinesItems(cmdline_input_items) - - vars_dict = dict(supp_items + env_items + cmdline_items) - return vars_dict - - -def GetOutputDirectory(): - """Returns the output directory that GYP will use.""" - - # Handle command line generator flags. - parser = argparse.ArgumentParser() - parser.add_argument('-G', dest='genflags', default=[], action='append') - genflags = parser.parse_known_args()[0].genflags - - # Handle generator flags from the environment. - genflags += shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', '')) - - needle = 'output_dir=' - for item in genflags: - if item.startswith(needle): - return item[len(needle):] - - return 'out' - - -def additional_include_files(supplemental_files, args=[]): - """ - Returns a list of additional (.gypi) files to include, without duplicating - ones that are already specified on the command line. The list of supplemental - include files is passed in as an argument. - """ - # Determine the include files specified on the command line. - # This doesn't cover all the different option formats you can use, - # but it's mainly intended to avoid duplicating flags on the automatic - # makefile regeneration which only uses this format. - specified_includes = set() - for arg in args: - if arg.startswith('-I') and len(arg) > 2: - specified_includes.add(os.path.realpath(arg[2:])) - - result = [] - def AddInclude(path): - if os.path.realpath(path) not in specified_includes: - result.append(path) - - if os.environ.get('GYP_INCLUDE_FIRST') != None: - AddInclude(os.path.join(chrome_src, os.environ.get('GYP_INCLUDE_FIRST'))) - - # Always include common.gypi. - AddInclude(os.path.join(script_dir, 'common.gypi')) - - # Optionally add supplemental .gypi files if present. - for supplement in supplemental_files: - AddInclude(supplement) - - if os.environ.get('GYP_INCLUDE_LAST') != None: - AddInclude(os.path.join(chrome_src, os.environ.get('GYP_INCLUDE_LAST'))) - - return result - - -if __name__ == '__main__': - # Disabling garbage collection saves about 1 second out of 16 on a Linux - # z620 workstation. Since this is a short-lived process it's not a problem to - # leak a few cyclyc references in order to spare the CPU cycles for - # scanning the heap. - import gc - gc.disable() - - args = sys.argv[1:] - - use_analyzer = len(args) and args[0] == '--analyzer' - if use_analyzer: - args.pop(0) - os.environ['GYP_GENERATORS'] = 'analyzer' - args.append('-Gconfig_path=' + args.pop(0)) - args.append('-Ganalyzer_output_path=' + args.pop(0)) - - if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)): - print('Skipping gyp_chromium due to GYP_CHROMIUM_NO_ACTION env var.') - sys.exit(0) - - # Use the Psyco JIT if available. - if psyco: - psyco.profile() - print("Enabled Psyco JIT.") - - # Fall back on hermetic python if we happen to get run under cygwin. - # TODO(bradnelson): take this out once this issue is fixed: - # http://code.google.com/p/gyp/issues/detail?id=177 - if sys.platform == 'cygwin': - import find_depot_tools - depot_tools_path = find_depot_tools.add_depot_tools_to_path() - python_dir = sorted(glob.glob(os.path.join(depot_tools_path, - 'python2*_bin')))[-1] - env = os.environ.copy() - env['PATH'] = python_dir + os.pathsep + env.get('PATH', '') - cmd = [os.path.join(python_dir, 'python.exe')] + sys.argv - sys.exit(subprocess.call(cmd, env=env)) - - # This could give false positives since it doesn't actually do real option - # parsing. Oh well. - gyp_file_specified = any(arg.endswith('.gyp') for arg in args) - - gyp_environment.SetEnvironment() - - # If we didn't get a file, check an env var, and then fall back to - # assuming 'all.gyp' from the same directory as the script. - if not gyp_file_specified: - gyp_file = os.environ.get('CHROMIUM_GYP_FILE') - if gyp_file: - # Note that CHROMIUM_GYP_FILE values can't have backslashes as - # path separators even on Windows due to the use of shlex.split(). - args.extend(shlex.split(gyp_file)) - else: - args.append(os.path.join(script_dir, 'all.gyp')) - - supplemental_includes = GetSupplementalFiles() - gyp_vars_dict = GetGypVars(supplemental_includes) - # There shouldn't be a circular dependency relationship between .gyp files, - # but in Chromium's .gyp files, on non-Mac platforms, circular relationships - # currently exist. The check for circular dependencies is currently - # bypassed on other platforms, but is left enabled on iOS, where a violation - # of the rule causes Xcode to misbehave badly. - # TODO(mark): Find and kill remaining circular dependencies, and remove this - # option. http://crbug.com/35878. - # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the - # list. - if gyp_vars_dict.get('OS') != 'ios': - args.append('--no-circular-check') - - # libtool on Mac warns about duplicate basenames in static libraries, so - # they're disallowed in general by gyp. We are lax on this point, so disable - # this check other than on Mac. GN does not use static libraries as heavily, - # so over time this restriction will mostly go away anyway, even on Mac. - # https://code.google.com/p/gyp/issues/detail?id=384 - if sys.platform != 'darwin': - args.append('--no-duplicate-basename-check') - - # We explicitly don't support the make gyp generator (crbug.com/348686). Be - # nice and fail here, rather than choking in gyp. - if re.search(r'(^|,|\s)make($|,|\s)', os.environ.get('GYP_GENERATORS', '')): - print('Error: make gyp generator not supported (check GYP_GENERATORS).') - sys.exit(1) - - # We explicitly don't support the native msvs gyp generator. Be nice and - # fail here, rather than generating broken projects. - if re.search(r'(^|,|\s)msvs($|,|\s)', os.environ.get('GYP_GENERATORS', '')): - print('Error: msvs gyp generator not supported (check GYP_GENERATORS).') - print('Did you mean to use the `msvs-ninja` generator?') - sys.exit(1) - - # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check - # to enfore syntax checking. - syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK') - if syntax_check and int(syntax_check): - args.append('--check') - - # TODO(dmikurube): Remove these checks and messages after a while. - if ('linux_use_tcmalloc' in gyp_vars_dict or - 'android_use_tcmalloc' in gyp_vars_dict): - print('*****************************************************************') - print('"linux_use_tcmalloc" and "android_use_tcmalloc" are deprecated!') - print('-----------------------------------------------------------------') - print('You specify "linux_use_tcmalloc" or "android_use_tcmalloc" in') - print('your GYP_DEFINES. Please switch them into "use_allocator" now.') - print('See http://crbug.com/345554 for the details.') - print('*****************************************************************') - - # Automatically turn on crosscompile support for platforms that need it. - # (The Chrome OS build sets CC_host / CC_target which implicitly enables - # this mode.) - if all(('ninja' in os.environ.get('GYP_GENERATORS', ''), - gyp_vars_dict.get('OS') in ['android', 'ios'], - 'GYP_CROSSCOMPILE' not in os.environ)): - os.environ['GYP_CROSSCOMPILE'] = '1' - if gyp_vars_dict.get('OS') == 'android': - args.append('--check') - - args.extend( - ['-I' + i for i in additional_include_files(supplemental_includes, args)]) - - args.extend(['-D', 'gyp_output_dir=' + GetOutputDirectory()]) - - if not use_analyzer: - print('Updating projects from gyp files...') - sys.stdout.flush() - - # Off we go... - gyp_rc = gyp.main(args) - - if not use_analyzer: - vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs() - if vs2013_runtime_dll_dirs: - x64_runtime, x86_runtime = vs2013_runtime_dll_dirs - vs_toolchain.CopyVsRuntimeDlls( - os.path.join(chrome_src, GetOutputDirectory()), - (x86_runtime, x64_runtime)) - - sys.exit(gyp_rc) diff --git a/build/gyp_chromium.py b/build/gyp_chromium.py deleted file mode 100644 index f9e8ac8ed8..0000000000 --- a/build/gyp_chromium.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This file is (possibly, depending on python version) imported by -# gyp_chromium when GYP_PARALLEL=1 and it creates sub-processes -# through the multiprocessing library. - -# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for -# imports that don't end in .py (and aren't directories with an -# __init__.py). This wrapper makes "import gyp_chromium" work with -# those old versions and makes it possible to execute gyp_chromium.py -# directly on Windows where the extension is useful. - -import os - -path = os.path.abspath(os.path.split(__file__)[0]) -execfile(os.path.join(path, 'gyp_chromium')) diff --git a/build/gyp_chromium_test.py b/build/gyp_chromium_test.py deleted file mode 100755 index 3ef63da5eb..0000000000 --- a/build/gyp_chromium_test.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2015 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import os -import sys -import unittest - -SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__)) -SRC_DIR = os.path.dirname(SCRIPT_DIR) - -sys.path.append(os.path.join(SRC_DIR, 'third_party', 'pymock')) - -import mock - -# TODO(sbc): Make gyp_chromium more testable by putting the code in -# a .py file. -gyp_chromium = __import__('gyp_chromium') - - -class TestGetOutputDirectory(unittest.TestCase): - @mock.patch('os.environ', {}) - @mock.patch('sys.argv', [__file__]) - def testDefaultValue(self): - self.assertEqual(gyp_chromium.GetOutputDirectory(), 'out') - - @mock.patch('os.environ', {'GYP_GENERATOR_FLAGS': 'output_dir=envfoo'}) - @mock.patch('sys.argv', [__file__]) - def testEnvironment(self): - self.assertEqual(gyp_chromium.GetOutputDirectory(), 'envfoo') - - @mock.patch('os.environ', {'GYP_GENERATOR_FLAGS': 'output_dir=envfoo'}) - @mock.patch('sys.argv', [__file__, '-Goutput_dir=cmdfoo']) - def testGFlagOverridesEnv(self): - self.assertEqual(gyp_chromium.GetOutputDirectory(), 'cmdfoo') - - @mock.patch('os.environ', {}) - @mock.patch('sys.argv', [__file__, '-G', 'output_dir=foo']) - def testGFlagWithSpace(self): - self.assertEqual(gyp_chromium.GetOutputDirectory(), 'foo') - - -class TestGetGypVars(unittest.TestCase): - @mock.patch('os.environ', {}) - def testDefault(self): - self.assertEqual(gyp_chromium.GetGypVars([]), {}) - - @mock.patch('os.environ', {}) - @mock.patch('sys.argv', [__file__, '-D', 'foo=bar']) - def testDFlags(self): - self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': 'bar'}) - - @mock.patch('os.environ', {}) - @mock.patch('sys.argv', [__file__, '-D', 'foo']) - def testDFlagsNoValue(self): - self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': '1'}) - - @mock.patch('os.environ', {}) - @mock.patch('sys.argv', [__file__, '-D', 'foo=bar', '-Dbaz']) - def testDFlagMulti(self): - self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': 'bar', 'baz': '1'}) - - -if __name__ == '__main__': - unittest.main() diff --git a/build/gyp_environment.py b/build/gyp_environment.py deleted file mode 100644 index fb50645d56..0000000000 --- a/build/gyp_environment.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Sets up various automatic gyp environment variables. These are used by -gyp_chromium and landmines.py which run at different stages of runhooks. To -make sure settings are consistent between them, all setup should happen here. -""" - -import gyp_helper -import os -import sys -import vs_toolchain - -def SetEnvironment(): - """Sets defaults for GYP_* variables.""" - gyp_helper.apply_chromium_gyp_env() - - # Default to ninja on linux and windows, but only if no generator has - # explicitly been set. - # Also default to ninja on mac, but only when not building chrome/ios. - # . -f / --format has precedence over the env var, no need to check for it - # . set the env var only if it hasn't been set yet - # . chromium.gyp_env has been applied to os.environ at this point already - if sys.platform.startswith(('linux', 'win', 'freebsd')) and \ - not os.environ.get('GYP_GENERATORS'): - os.environ['GYP_GENERATORS'] = 'ninja' - elif sys.platform == 'darwin' and not os.environ.get('GYP_GENERATORS') and \ - not 'OS=ios' in os.environ.get('GYP_DEFINES', []): - os.environ['GYP_GENERATORS'] = 'ninja' - - vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs() diff --git a/build/gyp_helper.py b/build/gyp_helper.py deleted file mode 100644 index c840f2d6dc..0000000000 --- a/build/gyp_helper.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This file helps gyp_chromium and landmines correctly set up the gyp -# environment from chromium.gyp_env on disk - -import os - -SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) -CHROME_SRC = os.path.dirname(SCRIPT_DIR) - - -def apply_gyp_environment_from_file(file_path): - """Reads in a *.gyp_env file and applies the valid keys to os.environ.""" - if not os.path.exists(file_path): - return - with open(file_path, 'rU') as f: - file_contents = f.read() - try: - file_data = eval(file_contents, {'__builtins__': None}, None) - except SyntaxError, e: - e.filename = os.path.abspath(file_path) - raise - supported_vars = ( - 'CC', - 'CC_wrapper', - 'CC.host_wrapper', - 'CHROMIUM_GYP_FILE', - 'CHROMIUM_GYP_SYNTAX_CHECK', - 'CXX', - 'CXX_wrapper', - 'CXX.host_wrapper', - 'GYP_DEFINES', - 'GYP_GENERATOR_FLAGS', - 'GYP_CROSSCOMPILE', - 'GYP_GENERATOR_OUTPUT', - 'GYP_GENERATORS', - 'GYP_INCLUDE_FIRST', - 'GYP_INCLUDE_LAST', - 'GYP_MSVS_VERSION', - ) - for var in supported_vars: - file_val = file_data.get(var) - if file_val: - if var in os.environ: - behavior = 'replaces' - if var == 'GYP_DEFINES': - result = file_val + ' ' + os.environ[var] - behavior = 'merges with, and individual components override,' - else: - result = os.environ[var] - print 'INFO: Environment value for "%s" %s value in %s' % ( - var, behavior, os.path.abspath(file_path) - ) - string_padding = max(len(var), len(file_path), len('result')) - print ' %s: %s' % (var.rjust(string_padding), os.environ[var]) - print ' %s: %s' % (file_path.rjust(string_padding), file_val) - os.environ[var] = result - else: - os.environ[var] = file_val - - -def apply_chromium_gyp_env(): - if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ: - # Update the environment based on chromium.gyp_env - path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env') - apply_gyp_environment_from_file(path) diff --git a/build/gypi_to_gn.py b/build/gypi_to_gn.py deleted file mode 100644 index a107f94fca..0000000000 --- a/build/gypi_to_gn.py +++ /dev/null @@ -1,167 +0,0 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Converts a given gypi file to a python scope and writes the result to stdout. - -It is assumed that the file contains a toplevel dictionary, and this script -will return that dictionary as a GN "scope" (see example below). This script -does not know anything about GYP and it will not expand variables or execute -conditions. - -It will strip conditions blocks. - -A variables block at the top level will be flattened so that the variables -appear in the root dictionary. This way they can be returned to the GN code. - -Say your_file.gypi looked like this: - { - 'sources': [ 'a.cc', 'b.cc' ], - 'defines': [ 'ENABLE_DOOM_MELON' ], - } - -You would call it like this: - gypi_values = exec_script("//build/gypi_to_gn.py", - [ rebase_path("your_file.gypi") ], - "scope", - [ "your_file.gypi" ]) - -Notes: - - The rebase_path call converts the gypi file from being relative to the - current build file to being system absolute for calling the script, which - will have a different current directory than this file. - - - The "scope" parameter tells GN to interpret the result as a series of GN - variable assignments. - - - The last file argument to exec_script tells GN that the given file is a - dependency of the build so Ninja can automatically re-run GN if the file - changes. - -Read the values into a target like this: - component("mycomponent") { - sources = gypi_values.sources - defines = gypi_values.defines - } - -Sometimes your .gypi file will include paths relative to a different -directory than the current .gn file. In this case, you can rebase them to -be relative to the current directory. - sources = rebase_path(gypi_values.sources, ".", - "//path/gypi/input/values/are/relative/to") - -This script will tolerate a 'variables' in the toplevel dictionary or not. If -the toplevel dictionary just contains one item called 'variables', it will be -collapsed away and the result will be the contents of that dictinoary. Some -.gypi files are written with or without this, depending on how they expect to -be embedded into a .gyp file. - -This script also has the ability to replace certain substrings in the input. -Generally this is used to emulate GYP variable expansion. If you passed the -argument "--replace=<(foo)=bar" then all instances of "<(foo)" in strings in -the input will be replaced with "bar": - - gypi_values = exec_script("//build/gypi_to_gn.py", - [ rebase_path("your_file.gypi"), - "--replace=<(foo)=bar"], - "scope", - [ "your_file.gypi" ]) - -""" - -import gn_helpers -from optparse import OptionParser -import sys - -def LoadPythonDictionary(path): - file_string = open(path).read() - try: - file_data = eval(file_string, {'__builtins__': None}, None) - except SyntaxError, e: - e.filename = path - raise - except Exception, e: - raise Exception("Unexpected error while reading %s: %s" % (path, str(e))) - - assert isinstance(file_data, dict), "%s does not eval to a dictionary" % path - - # Flatten any variables to the top level. - if 'variables' in file_data: - file_data.update(file_data['variables']) - del file_data['variables'] - - # Strip any conditions. - if 'conditions' in file_data: - del file_data['conditions'] - if 'target_conditions' in file_data: - del file_data['target_conditions'] - - # Strip targets in the toplevel, since some files define these and we can't - # slurp them in. - if 'targets' in file_data: - del file_data['targets'] - - return file_data - - -def ReplaceSubstrings(values, search_for, replace_with): - """Recursively replaces substrings in a value. - - Replaces all substrings of the "search_for" with "repace_with" for all - strings occurring in "values". This is done by recursively iterating into - lists as well as the keys and values of dictionaries.""" - if isinstance(values, str): - return values.replace(search_for, replace_with) - - if isinstance(values, list): - return [ReplaceSubstrings(v, search_for, replace_with) for v in values] - - if isinstance(values, dict): - # For dictionaries, do the search for both the key and values. - result = {} - for key, value in values.items(): - new_key = ReplaceSubstrings(key, search_for, replace_with) - new_value = ReplaceSubstrings(value, search_for, replace_with) - result[new_key] = new_value - return result - - # Assume everything else is unchanged. - return values - -def main(): - parser = OptionParser() - parser.add_option("-r", "--replace", action="append", - help="Replaces substrings. If passed a=b, replaces all substrs a with b.") - (options, args) = parser.parse_args() - - if len(args) != 1: - raise Exception("Need one argument which is the .gypi file to read.") - - data = LoadPythonDictionary(args[0]) - if options.replace: - # Do replacements for all specified patterns. - for replace in options.replace: - split = replace.split('=') - # Allow "foo=" to replace with nothing. - if len(split) == 1: - split.append('') - assert len(split) == 2, "Replacement must be of the form 'key=value'." - data = ReplaceSubstrings(data, split[0], split[1]) - - # Sometimes .gypi files use the GYP syntax with percents at the end of the - # variable name (to indicate not to overwrite a previously-defined value): - # 'foo%': 'bar', - # Convert these to regular variables. - for key in data: - if len(key) > 1 and key[len(key) - 1] == '%': - data[key[:-1]] = data[key] - del data[key] - - print gn_helpers.ToGNString(data) - -if __name__ == '__main__': - try: - main() - except Exception, e: - print str(e) - sys.exit(1) diff --git a/build/install-android-sdks.sh b/build/install-android-sdks.sh deleted file mode 100755 index 1119b7d7fd..0000000000 --- a/build/install-android-sdks.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -e - -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Script to install SDKs needed to build chromium on android. -# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions - -echo 'checking for sdk packages install' -# Use absolute path to call 'android' so script can be run from any directory. -cwd=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) -# Get the SDK extras packages to install from the DEPS file 'sdkextras' hook. -packages="$(python ${cwd}/get_sdk_extras_packages.py)" -if [[ -n "${packages}" ]]; then - ${cwd}/../third_party/android_tools/sdk/tools/android update sdk --no-ui \ - --filter ${packages} -fi - -echo "install-android-sdks.sh complete." diff --git a/build/install-build-deps-android.sh b/build/install-build-deps-android.sh deleted file mode 100755 index 69f2536393..0000000000 --- a/build/install-build-deps-android.sh +++ /dev/null @@ -1,103 +0,0 @@ -#!/bin/bash -e - -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Script to install everything needed to build chromium on android, including -# items requiring sudo privileges. -# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions - -args="$@" -if test "$1" = "--skip-sdk-packages"; then - skip_inst_sdk_packages=1 - args="${@:2}" -else - skip_inst_sdk_packages=0 -fi - -if ! uname -m | egrep -q "i686|x86_64"; then - echo "Only x86 architectures are currently supported" >&2 - exit -fi - -# Install first the default Linux build deps. -"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \ - --no-syms --lib32 --no-arm --no-prompt "${args}" - -lsb_release=$(lsb_release --codename --short) - -# The temporary directory used to store output of update-java-alternatives -TEMPDIR=$(mktemp -d) -cleanup() { - local status=${?} - trap - EXIT - rm -rf "${TEMPDIR}" - exit ${status} -} -trap cleanup EXIT - -# Fix deps -sudo apt-get -f install - -# Install deps -# This step differs depending on what Ubuntu release we are running -# on since the package names are different, and Sun's Java must -# be installed manually on late-model versions. - -# common -sudo apt-get -y install lighttpd python-pexpect xvfb x11-utils - -# Some binaries in the Android SDK require 32-bit libraries on the host. -# See https://developer.android.com/sdk/installing/index.html?pkg=tools -if [[ $lsb_release == "precise" ]]; then - sudo apt-get -y install ia32-libs -else - sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386 -fi - -if [[ $lsb_release == "xenial" || $lsb_release == "bionic" || \ - $lsb_release == "rodete" ]]; then - sudo apt-get -y install openjdk-8-jre openjdk-8-jdk - sudo update-java-alternatives -s java-1.8.0-openjdk-amd64 - sudo apt-get -y install ant -else - sudo apt-get -y install ant - - # Install openjdk and openjre 7 stuff - sudo apt-get -y install openjdk-7-jre openjdk-7-jdk - - # Switch version of Java to openjdk 7. - # Some Java plugins (e.g. for firefox, mozilla) are not required to build, and - # thus are treated only as warnings. Any errors in updating java alternatives - # which are not '*-javaplugin.so' will cause errors and stop the script from - # completing successfully. - if ! sudo update-java-alternatives -s java-1.7.0-openjdk-amd64 \ - >& "${TEMPDIR}"/update-java-alternatives.out - then - # Check that there are the expected javaplugin.so errors for the update - if grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out >& \ - /dev/null - then - # Print as warnings all the javaplugin.so errors - echo 'WARNING: java has no alternatives for the following plugins:' - grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out - fi - # Check if there are any errors that are not javaplugin.so - if grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out \ - >& /dev/null - then - # If there are non-javaplugin.so errors, treat as errors and exit - echo 'ERRORS: Failed to update alternatives for java:' - grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out - exit 1 - fi - fi -fi - -# Install SDK packages for android -if test "$skip_inst_sdk_packages" != 1; then - "$(dirname "${BASH_SOURCE[0]}")/install-android-sdks.sh" -fi - -echo "install-build-deps-android.sh complete." diff --git a/build/install-build-deps.py b/build/install-build-deps.py deleted file mode 100755 index a3024a425d..0000000000 --- a/build/install-build-deps.py +++ /dev/null @@ -1,431 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2015 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import argparse -import operator -import os -import platform -import re -import subprocess -import sys - - -SUPPORTED_UBUNTU_VERSIONS = ( - {'number': '12.04', 'codename': 'precise'}, - {'number': '14.04', 'codename': 'trusty'}, - {'number': '14.10', 'codename': 'utopic'}, - {'number': '15.04', 'codename': 'vivid'}, -) - - -# Packages needed for chromeos only. -_packages_chromeos_dev = ( - 'libbluetooth-dev', - 'libxkbcommon-dev', - 'realpath', -) - - -# Packages needed for development. -_packages_dev = ( - 'apache2.2-bin', - 'bison', - 'cdbs', - 'curl', - 'devscripts', - 'dpkg-dev', - 'elfutils', - 'fakeroot', - 'flex', - 'fonts-thai-tlwg', - 'g++', - 'git-core', - 'git-svn', - 'gperf', - 'language-pack-da', - 'language-pack-fr', - 'language-pack-he', - 'language-pack-zh-hant', - 'libapache2-mod-php5', - 'libasound2-dev', - 'libav-tools', - 'libbrlapi-dev', - 'libbz2-dev', - 'libcairo2-dev', - 'libcap-dev', - 'libcups2-dev', - 'libcurl4-gnutls-dev', - 'libdrm-dev', - 'libelf-dev', - 'libexif-dev', - 'libgconf2-dev', - 'libglib2.0-dev', - 'libglu1-mesa-dev', - 'libgnome-keyring-dev', - 'libgtk2.0-dev', - 'libkrb5-dev', - 'libnspr4-dev', - 'libnss3-dev', - 'libpam0g-dev', - 'libpci-dev', - 'libpulse-dev', - 'libsctp-dev', - 'libspeechd-dev', - 'libsqlite3-dev', - 'libssl-dev', - 'libudev-dev', - 'libwww-perl', - 'libxslt1-dev', - 'libxss-dev', - 'libxt-dev', - 'libxtst-dev', - 'openbox', - 'patch', - 'perl', - 'php5-cgi', - 'pkg-config', - 'python', - 'python-cherrypy3', - 'python-crypto', - 'python-dev', - 'python-numpy', - 'python-opencv', - 'python-openssl', - 'python-psutil', - 'python-yaml', - 'rpm', - 'ruby', - 'subversion', - 'ttf-dejavu-core', - 'ttf-indic-fonts', - 'ttf-kochi-gothic', - 'ttf-kochi-mincho', - 'wdiff', - 'xfonts-mathml', - 'zip', -) - - -# Run-time libraries required by chromeos only. -_packages_chromeos_lib = ( - 'libbz2-1.0', - 'libpulse0', -) - - -# Full list of required run-time libraries. -_packages_lib = ( - 'libasound2', - 'libatk1.0-0', - 'libc6', - 'libcairo2', - 'libcap2', - 'libcups2', - 'libexif12', - 'libexpat1', - 'libfontconfig1', - 'libfreetype6', - 'libglib2.0-0', - 'libgnome-keyring0', - 'libgtk2.0-0', - 'libpam0g', - 'libpango1.0-0', - 'libpci3', - 'libpcre3', - 'libpixman-1-0', - 'libpng12-0', - 'libspeechd2', - 'libsqlite3-0', - 'libstdc++6', - 'libx11-6', - 'libxau6', - 'libxcb1', - 'libxcomposite1', - 'libxcursor1', - 'libxdamage1', - 'libxdmcp6', - 'libxext6', - 'libxfixes3', - 'libxi6', - 'libxinerama1', - 'libxrandr2', - 'libxrender1', - 'libxtst6', - 'zlib1g', -) - - -# Debugging symbols for all of the run-time libraries. -_packages_dbg = ( - 'libatk1.0-dbg', - 'libc6-dbg', - 'libcairo2-dbg', - 'libfontconfig1-dbg', - 'libglib2.0-0-dbg', - 'libgtk2.0-0-dbg', - 'libpango1.0-0-dbg', - 'libpcre3-dbg', - 'libpixman-1-0-dbg', - 'libsqlite3-0-dbg', - 'libx11-6-dbg', - 'libxau6-dbg', - 'libxcb1-dbg', - 'libxcomposite1-dbg', - 'libxcursor1-dbg', - 'libxdamage1-dbg', - 'libxdmcp6-dbg', - 'libxext6-dbg', - 'libxfixes3-dbg', - 'libxi6-dbg', - 'libxinerama1-dbg', - 'libxrandr2-dbg', - 'libxrender1-dbg', - 'libxtst6-dbg', - 'zlib1g-dbg', -) - - -# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf. -_packages_lib32 = ( - 'linux-libc-dev:i386', -) - - -# arm cross toolchain packages needed to build chrome on armhf. -_packages_arm = ( - 'g++-arm-linux-gnueabihf', - 'libc6-dev-armhf-cross', - 'linux-libc-dev-armhf-cross', -) - - -# Packages to build NaCl, its toolchains, and its ports. -_packages_naclports = ( - 'ant', - 'autoconf', - 'bison', - 'cmake', - 'gawk', - 'intltool', - 'xsltproc', - 'xutils-dev', -) -_packages_nacl = ( - 'g++-mingw-w64-i686', - 'lib32ncurses5-dev', - 'lib32z1-dev', - 'libasound2:i386', - 'libcap2:i386', - 'libelf-dev:i386', - 'libexif12:i386', - 'libfontconfig1:i386', - 'libgconf-2-4:i386', - 'libglib2.0-0:i386', - 'libgpm2:i386', - 'libgtk2.0-0:i386', - 'libncurses5:i386', - 'libnss3:i386', - 'libpango1.0-0:i386', - 'libssl1.0.0:i386', - 'libtinfo-dev', - 'libtinfo-dev:i386', - 'libtool', - 'libxcomposite1:i386', - 'libxcursor1:i386', - 'libxdamage1:i386', - 'libxi6:i386', - 'libxrandr2:i386', - 'libxss1:i386', - 'libxtst6:i386', - 'texinfo', - 'xvfb', -) - - -def is_userland_64_bit(): - return platform.architecture()[0] == '64bit' - - -def package_exists(pkg): - return pkg in subprocess.check_output(['apt-cache', 'pkgnames']).splitlines() - - -def lsb_release_short_codename(): - return subprocess.check_output( - ['lsb_release', '--codename', '--short']).strip() - - -def write_error(message): - sys.stderr.write('ERROR: %s\n' % message) - sys.stderr.flush() - - -def nonfatal_get_output(*popenargs, **kwargs): - process = subprocess.Popen( - stdout=subprocess.PIPE, stderr=subprocess.PIPE, *popenargs, **kwargs) - stdout, stderr = process.communicate() - retcode = process.poll() - return retcode, stdout, stderr - - -def compute_dynamic_package_lists(): - global _packages_arm - global _packages_dbg - global _packages_dev - global _packages_lib - global _packages_lib32 - global _packages_nacl - - if is_userland_64_bit(): - # 64-bit systems need a minimum set of 32-bit compat packages - # for the pre-built NaCl binaries. - _packages_dev += ( - 'lib32gcc1', - 'lib32stdc++6', - 'libc6-i386', - ) - - # When cross building for arm/Android on 64-bit systems the host binaries - # that are part of v8 need to be compiled with -m32 which means - # that basic multilib support is needed. - # gcc-multilib conflicts with the arm cross compiler (at least in trusty) - # but g++-X.Y-multilib gives us the 32-bit support that we need. Find out - # the appropriate value of X and Y by seeing what version the current - # distribution's g++-multilib package depends on. - output = subprocess.check_output(['apt-cache', 'depends', 'g++-multilib']) - multilib_package = re.search(r'g\+\+-[0-9.]+-multilib', output).group() - _packages_lib32 += (multilib_package,) - - lsb_codename = lsb_release_short_codename() - - # Find the proper version of libstdc++6-4.x-dbg. - if lsb_codename == 'precise': - _packages_dbg += ('libstdc++6-4.6-dbg',) - elif lsb_codename == 'trusty': - _packages_dbg += ('libstdc++6-4.8-dbg',) - else: - _packages_dbg += ('libstdc++6-4.9-dbg',) - - # Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056 . - if lsb_codename == 'trusty': - _packages_arm += ( - 'g++-4.8-multilib-arm-linux-gnueabihf', - 'gcc-4.8-multilib-arm-linux-gnueabihf', - ) - - # Find the proper version of libgbm-dev. We can't just install libgbm-dev as - # it depends on mesa, and only one version of mesa can exists on the system. - # Hence we must match the same version or this entire script will fail. - mesa_variant = '' - for variant in ('-lts-trusty', '-lts-utopic'): - rc, stdout, stderr = nonfatal_get_output( - ['dpkg-query', '-Wf\'{Status}\'', 'libgl1-mesa-glx' + variant]) - if 'ok installed' in output: - mesa_variant = variant - _packages_dev += ( - 'libgbm-dev' + mesa_variant, - 'libgl1-mesa-dev' + mesa_variant, - 'libgles2-mesa-dev' + mesa_variant, - 'mesa-common-dev' + mesa_variant, - ) - - if package_exists('ttf-mscorefonts-installer'): - _packages_dev += ('ttf-mscorefonts-installer',) - else: - _packages_dev += ('msttcorefonts',) - - if package_exists('libnspr4-dbg'): - _packages_dbg += ('libnspr4-dbg', 'libnss3-dbg') - _packages_lib += ('libnspr4', 'libnss3') - else: - _packages_dbg += ('libnspr4-0d-dbg', 'libnss3-1d-dbg') - _packages_lib += ('libnspr4-0d', 'libnss3-1d') - - if package_exists('libjpeg-dev'): - _packages_dev += ('libjpeg-dev',) - else: - _packages_dev += ('libjpeg62-dev',) - - if package_exists('libudev1'): - _packages_dev += ('libudev1',) - _packages_nacl += ('libudev1:i386',) - else: - _packages_dev += ('libudev0',) - _packages_nacl += ('libudev0:i386',) - - if package_exists('libbrlapi0.6'): - _packages_dev += ('libbrlapi0.6',) - else: - _packages_dev += ('libbrlapi0.5',) - - # Some packages are only needed if the distribution actually supports - # installing them. - if package_exists('appmenu-gtk'): - _packages_lib += ('appmenu-gtk',) - - _packages_dev += _packages_chromeos_dev - _packages_lib += _packages_chromeos_lib - _packages_nacl += _packages_naclports - - -def quick_check(packages): - rc, stdout, stderr = nonfatal_get_output([ - 'dpkg-query', '-W', '-f', '${PackageSpec}:${Status}\n'] + list(packages)) - if rc == 0 and not stderr: - return 0 - print(stderr) - return 1 - - -def main(argv): - parser = argparse.ArgumentParser() - parser.add_argument('--quick-check', action='store_true', - help='quickly try to determine if dependencies are ' - 'installed (this avoids interactive prompts and ' - 'sudo commands so might not be 100% accurate)') - parser.add_argument('--unsupported', action='store_true', - help='attempt installation even on unsupported systems') - args = parser.parse_args(argv) - - lsb_codename = lsb_release_short_codename() - if not args.unsupported and not args.quick_check: - if lsb_codename not in list(map( - operator.itemgetter('codename'), SUPPORTED_UBUNTU_VERSIONS)): - supported_ubuntus = ['%(number)s (%(codename)s)' % v - for v in SUPPORTED_UBUNTU_VERSIONS] - write_error('Only Ubuntu %s are currently supported.' % - ', '.join(supported_ubuntus)) - return 1 - - if platform.machine() not in ('i686', 'x86_64'): - write_error('Only x86 architectures are currently supported.') - return 1 - - if os.geteuid() != 0 and not args.quick_check: - print('Running as non-root user.') - print('You might have to enter your password one or more times') - print('for \'sudo\'.') - print() - - compute_dynamic_package_lists() - - packages = (_packages_dev + _packages_lib + _packages_dbg + _packages_lib32 + - _packages_arm + _packages_nacl) - def packages_key(pkg): - s = pkg.rsplit(':', 1) - if len(s) == 1: - return (s, '') - return s - packages = sorted(set(packages), key=packages_key) - - if args.quick_check: - return quick_check(packages) - - return 0 - - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh deleted file mode 100755 index 60fcea6aaa..0000000000 --- a/build/install-build-deps.sh +++ /dev/null @@ -1,395 +0,0 @@ -#!/bin/bash -e - -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Script to install everything needed to build chromium (well, ideally, anyway) -# See http://code.google.com/p/chromium/wiki/LinuxBuildInstructions -# and http://code.google.com/p/chromium/wiki/LinuxBuild64Bit - -usage() { - echo "Usage: $0 [--options]" - echo "Options:" - echo "--[no-]syms: enable or disable installation of debugging symbols" - echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot" - echo "--[no-]arm: enable or disable installation of arm cross toolchain" - echo "--no-prompt: silently select standard options/defaults" - echo "--quick-check: quickly try to determine if dependencies are installed" - echo " (this avoids interactive prompts and sudo commands," - echo " so might not be 100% accurate)" - echo "--unsupported: attempt installation even on unsupported systems" - echo "Script will prompt interactively if options not given." - exit 1 -} - -# Checks whether a particular package is available in the repos. -# USAGE: $ package_exists -package_exists() { - apt-cache pkgnames | grep -x "$1" > /dev/null 2>&1 -} - -# These default to on because (some) bots need them and it keeps things -# simple for the bot setup if all bots just run the script in its default -# mode. Developers who don't want stuff they don't need installed on their -# own workstations can pass --no-arm when running the script. -do_inst_arm=1 - -while test "$1" != "" -do - case "$1" in - --syms) do_inst_syms=1;; - --no-syms) do_inst_syms=0;; - --lib32) do_inst_lib32=1;; - --arm) do_inst_arm=1;; - --no-arm) do_inst_arm=0;; - --no-prompt) do_default=1 - do_quietly="-qq --assume-yes" - ;; - --quick-check) do_quick_check=1;; - --unsupported) do_unsupported=1;; - *) usage;; - esac - shift -done - -if test "$do_inst_arm" = "1"; then - do_inst_lib32=1 -fi - -# Check for lsb_release command in $PATH -if ! which lsb_release > /dev/null; then - echo "ERROR: lsb_release not found in \$PATH" >&2 - exit 1; -fi - -distro=$(lsb_release --id --short) -codename=$(lsb_release --codename --short) -ubuntu_codenames="(precise|trusty|utopic|vivid|xenial|bionic)" -debian_codenames="(stretch|rodete)" -if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then - if [[ ! $codename =~ $ubuntu_codenames && ! $codename =~ $debian_codenames ]]; then - echo "ERROR: Only Ubuntu 12.04 (precise), 14.04 (trusty), " \ - "14.10 (utopic), 15.04 (vivid), 16.04 (xenial), 18.04 (bionic), " \ - "and Debian (rodete and stretch) are currently supported" >&2 - exit 1 - fi - - if ! uname -m | egrep -q "i686|x86_64"; then - echo "Only x86 architectures are currently supported" >&2 - exit - fi -fi - -if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then - echo "Running as non-root user." - echo "You might have to enter your password one or more times for 'sudo'." - echo -fi - -# Packages needed for development -dev_list="bison cdbs curl dpkg-dev elfutils devscripts fakeroot - flex g++ git-core git-svn gperf - libasound2-dev libbrlapi-dev - libbz2-dev libcairo2-dev libcap-dev libcups2-dev libcurl4-gnutls-dev - libdrm-dev libelf-dev libexif-dev libgconf2-dev libglib2.0-dev - libglu1-mesa-dev libgnome-keyring-dev libgtk2.0-dev libkrb5-dev - libnspr4-dev libnss3-dev libpam0g-dev libpci-dev libpulse-dev - libsctp-dev libspeechd-dev libsqlite3-dev libssl-dev libudev-dev - libwww-perl libxslt1-dev libxss-dev libxt-dev libxtst-dev - patch perl pkg-config python python-cherrypy3 python-crypto - python-dev python-numpy python-opencv python-openssl python-psutil - python-yaml rpm ruby subversion wdiff zip" - -# Full list of required run-time libraries -lib_list="libatk1.0-0 libc6 libasound2 libcairo2 libcap2 libcups2 libexpat1 - libexif12 libfontconfig1 libfreetype6 libglib2.0-0 libgnome-keyring0 - libgtk2.0-0 libpam0g libpango1.0-0 libpci3 libpcre3 libpixman-1-0 - libspeechd2 libstdc++6 libsqlite3-0 libx11-6 libxau6 libxcb1 - libxcomposite1 libxcursor1 libxdamage1 libxdmcp6 libxext6 libxfixes3 - libxi6 libxinerama1 libxrandr2 libxrender1 libxtst6 zlib1g" - -# Debugging symbols for all of the run-time libraries -dbg_list="libatk1.0-dbg libc6-dbg libcairo2-dbg libfontconfig1-dbg - libglib2.0-0-dbg libgtk2.0-0-dbg libpango1.0-0-dbg libpcre3-dbg - libpixman-1-0-dbg libsqlite3-0-dbg libx11-6-dbg libxau6-dbg - libxcb1-dbg libxcomposite1-dbg libxcursor1-dbg libxdamage1-dbg - libxdmcp6-dbg libxext6-dbg libxfixes3-dbg libxi6-dbg libxinerama1-dbg - libxrandr2-dbg libxrender1-dbg libxtst6-dbg zlib1g-dbg" - -# Find the proper version of libstdc++6-4.x-dbg. -if [ "x$codename" = "xprecise" ]; then - dbg_list="${dbg_list} libstdc++6-4.6-dbg" -elif [ "x$codename" = "xtrusty" ]; then - dbg_list="${dbg_list} libstdc++6-4.8-dbg" -else - dbg_list="${dbg_list} libstdc++6-4.9-dbg" -fi - -# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf -lib32_list="linux-libc-dev:i386" - -# arm cross toolchain packages needed to build chrome on armhf -arm_list="libc6-dev-armhf-cross - linux-libc-dev-armhf-cross" - -# Work around for dependency issue Debian/Stretch -if [[ "x$codename" = "xstretch" || "x$codename" = "xxenial" || \ - "x$codename" = "xbionic" ]]; then - arm_list+=" g++-5-arm-linux-gnueabihf" -else - arm_list+=" g++-arm-linux-gnueabihf" -fi - -# Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056 -if [ "x$codename" = "xtrusty" ]; then - arm_list+=" g++-4.8-multilib-arm-linux-gnueabihf - gcc-4.8-multilib-arm-linux-gnueabihf" -fi - -# Find the proper version of libgbm-dev. We can't just install libgbm-dev as -# it depends on mesa, and only one version of mesa can exist on the system. -# Hence we must match the same version or this entire script will fail. -mesa_variant="" -for variant in "-lts-trusty" "-lts-utopic"; do - if $(dpkg-query -Wf'${Status}' libgl1-mesa-glx${variant} 2>/dev/null | \ - grep -q " ok installed"); then - mesa_variant="${variant}" - fi -done -dev_list="${dev_list} libgbm-dev${mesa_variant} - libgles2-mesa-dev${mesa_variant} libgl1-mesa-dev${mesa_variant} - mesa-common-dev${mesa_variant}" - -# Some package names have changed over time -if package_exists libnspr4-dbg; then - dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg" - lib_list="${lib_list} libnspr4 libnss3" -elif package_exists libnspr4-dbgsym; then - dbg_list="${dbg_list} libnspr4-dbgsym libnss3-dbgsym" - lib_list="${lib_list} libnspr4 libnss3" -else - dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg" - lib_list="${lib_list} libnspr4-0d libnss3-1d" -fi -if package_exists libjpeg-dev; then - dev_list="${dev_list} libjpeg-dev" -else - dev_list="${dev_list} libjpeg62-dev" -fi -if package_exists libudev1; then - dev_list="${dev_list} libudev1" -else - dev_list="${dev_list} libudev0" -fi -if package_exists libbrlapi0.6; then - dev_list="${dev_list} libbrlapi0.6" -else - dev_list="${dev_list} libbrlapi0.5" -fi - -if package_exists libpng16-16; then - lib_list="$lib_list libpng16-16" -else - lib_list="$lib_list libpng12-0" -fi - -# Some packages are only needed if the distribution actually supports -# installing them. -if package_exists appmenu-gtk; then - lib_list="$lib_list appmenu-gtk" -fi -if package_exists libav-tools; then - dev_list="${dev_list} libav-tools" -fi - -# When cross building for arm/Android on 64-bit systems the host binaries -# that are part of v8 need to be compiled with -m32 which means -# that basic multilib support is needed. -if [[ "$(uname -m)" == "x86_64" ]]; then - # gcc-multilib conflicts with the arm cross compiler (at least in trusty) but - # g++-X.Y-multilib gives us the 32-bit support that we need. Find out the - # appropriate value of X and Y by seeing what version the current - # distribution's g++-multilib package depends on. - multilib_package=$(apt-cache depends g++-multilib --important | \ - grep -E --color=never --only-matching '\bg\+\+-[0-9.]+-multilib\b') - lib32_list="$lib32_list $multilib_package" -fi - -# Waits for the user to press 'Y' or 'N'. Either uppercase of lowercase is -# accepted. Returns 0 for 'Y' and 1 for 'N'. If an optional parameter has -# been provided to yes_no(), the function also accepts RETURN as a user input. -# The parameter specifies the exit code that should be returned in that case. -# The function will echo the user's selection followed by a newline character. -# Users can abort the function by pressing CTRL-C. This will call "exit 1". -yes_no() { - if [ 0 -ne "${do_default-0}" ] ; then - [ $1 -eq 0 ] && echo "Y" || echo "N" - return $1 - fi - local c - while :; do - c="$(trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT - stty -echo iuclc -icanon 2>/dev/null - dd count=1 bs=1 2>/dev/null | od -An -tx1)" - case "$c" in - " 0a") if [ -n "$1" ]; then - [ $1 -eq 0 ] && echo "Y" || echo "N" - return $1 - fi - ;; - " 79") echo "Y" - return 0 - ;; - " 6e") echo "N" - return 1 - ;; - "") echo "Aborted" >&2 - exit 1 - ;; - *) # The user pressed an unrecognized key. As we are not echoing - # any incorrect user input, alert the user by ringing the bell. - (tput bel) 2>/dev/null - ;; - esac - done -} - -if test "$do_inst_syms" = "" && test 0 -eq ${do_quick_check-0} -then - echo "This script installs all tools and libraries needed to build Chromium." - echo "" - echo "For most of the libraries, it can also install debugging symbols, which" - echo "will allow you to debug code in the system libraries. Most developers" - echo "won't need these symbols." - echo -n "Do you want me to install them for you (y/N) " - if yes_no 1; then - do_inst_syms=1 - fi -fi -if test "$do_inst_syms" = "1"; then - echo "Including debugging symbols." -else - echo "Skipping debugging symbols." - dbg_list= -fi - -if test "$do_inst_lib32" = "1" ; then - echo "Including 32-bit libraries for ARM/Android." -else - echo "Skipping 32-bit libraries for ARM/Android." - lib32_list= -fi - -if test "$do_inst_arm" = "1" ; then - echo "Including ARM cross toolchain." -else - echo "Skipping ARM cross toolchain." - arm_list= -fi - -# The `sort -r -s -t: -k2` sorts all the :i386 packages to the front, to avoid -# confusing dpkg-query (crbug.com/446172). -packages="$( - echo "${dev_list} ${lib_list} ${dbg_list} ${lib32_list} ${arm_list}"\ - | tr " " "\n" | sort -u | sort -r -s -t: -k2 | tr "\n" " " -)" - -if [ 1 -eq "${do_quick_check-0}" ] ; then - failed_check="$(dpkg-query -W -f '${PackageSpec}:${Status}\n' \ - ${packages} 2>&1 | grep -v "ok installed" || :)" - if [ -n "${failed_check}" ]; then - echo - nomatch="$(echo "${failed_check}" | \ - sed -e "s/^No packages found matching \(.*\).$/\1/;t;d")" - missing="$(echo "${failed_check}" | \ - sed -e "/^No packages found matching/d;s/^\(.*\):.*$/\1/")" - if [ "$nomatch" ]; then - # Distinguish between packages that actually aren't available to the - # system (i.e. not in any repo) and packages that just aren't known to - # dpkg (i.e. managed by apt). - unknown="" - for p in ${nomatch}; do - if apt-cache show ${p} > /dev/null 2>&1; then - missing="${p}\n${missing}" - else - unknown="${p}\n${unknown}" - fi - done - if [ -n "${unknown}" ]; then - echo "WARNING: The following packages are unknown to your system" - echo "(maybe missing a repo or need to 'sudo apt-get update'):" - echo -e "${unknown}" | sed -e "s/^/ /" - fi - fi - if [ -n "${missing}" ]; then - echo "WARNING: The following packages are not installed:" - echo -e "${missing}" | sed -e "s/^/ /" - fi - exit 1 - fi - exit 0 -fi - -if test "$do_inst_lib32" = "1"; then - if [[ ! $codename =~ (precise) ]]; then - sudo dpkg --add-architecture i386 - fi -fi -sudo apt-get update - -# We initially run "apt-get" with the --reinstall option and parse its output. -# This way, we can find all the packages that need to be newly installed -# without accidentally promoting any packages from "auto" to "manual". -# We then re-run "apt-get" with just the list of missing packages. -echo "Finding missing packages..." -# Intentionally leaving $packages unquoted so it's more readable. -echo "Packages required: " $packages -echo -new_list_cmd="sudo apt-get install --reinstall $(echo $packages)" -if new_list="$(yes n | LANGUAGE=en LANG=C $new_list_cmd)"; then - # We probably never hit this following line. - echo "No missing packages, and the packages are up-to-date." -elif [ $? -eq 1 ]; then - # We expect apt-get to have exit status of 1. - # This indicates that we cancelled the install with "yes n|". - new_list=$(echo "$new_list" | - sed -e '1,/The following NEW packages will be installed:/d;s/^ //;t;d') - new_list=$(echo "$new_list" | sed 's/ *$//') - if [ -z "$new_list" ] ; then - echo "No missing packages, and the packages are up-to-date." - else - echo "Installing missing packages: $new_list." - sudo apt-get install ${do_quietly-} ${new_list} - fi - echo -else - # An apt-get exit status of 100 indicates that a real error has occurred. - - # I am intentionally leaving out the '"'s around new_list_cmd, - # as this makes it easier to cut and paste the output - echo "The following command failed: " ${new_list_cmd} - echo - echo "It produces the following output:" - yes n | $new_list_cmd || true - echo - echo "You will have to install the above packages yourself." - echo - exit 100 -fi - -# $1 - target name -# $2 - link name -create_library_symlink() { - target=$1 - linkname=$2 - if [ -L $linkname ]; then - if [ "$(basename $(readlink $linkname))" != "$(basename $target)" ]; then - sudo rm $linkname - fi - fi - if [ ! -r $linkname ]; then - echo "Creating link: $linkname" - sudo ln -fs $target $linkname - fi -} diff --git a/build/install-chroot.sh b/build/install-chroot.sh deleted file mode 100755 index 99451ed7ea..0000000000 --- a/build/install-chroot.sh +++ /dev/null @@ -1,888 +0,0 @@ -#!/bin/bash -e - -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This script installs Debian-derived distributions in a chroot environment. -# It can for example be used to have an accurate 32bit build and test -# environment when otherwise working on a 64bit machine. -# N. B. it is unlikely that this script will ever work on anything other than a -# Debian-derived system. - -# Older Debian based systems had both "admin" and "adm" groups, with "admin" -# apparently being used in more places. Newer distributions have standardized -# on just the "adm" group. Check /etc/group for the preferred name of the -# administrator group. -admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm) - -usage() { - echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]" - echo "-b dir additional directories that should be bind mounted," - echo ' or "NONE".' - echo " Default: if local filesystems present, ask user for help" - echo "-g group,... groups that can use the chroot unauthenticated" - echo " Default: '${admin}' and current user's group ('$(id -gn)')" - echo "-l List all installed chroot environments" - echo "-m mirror an alternate repository mirror for package downloads" - echo "-s configure default deb-srcs" - echo "-c always copy 64bit helper binaries to 32bit chroot" - echo "-h this help message" -} - -process_opts() { - local OPTNAME OPTIND OPTERR OPTARG - while getopts ":b:g:lm:sch" OPTNAME; do - case "$OPTNAME" in - b) - if [ "${OPTARG}" = "NONE" -a -z "${bind_mounts}" ]; then - bind_mounts="${OPTARG}" - else - if [ "${bind_mounts}" = "NONE" -o "${OPTARG}" = "${OPTARG#/}" -o \ - ! -d "${OPTARG}" ]; then - echo "Invalid -b option(s)" - usage - exit 1 - fi - bind_mounts="${bind_mounts} -${OPTARG} ${OPTARG} none rw,bind 0 0" - fi - ;; - g) - [ -n "${OPTARG}" ] && - chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}" - ;; - l) - list_all_chroots - exit - ;; - m) - if [ -n "${mirror}" ]; then - echo "You can only specify exactly one mirror location" - usage - exit 1 - fi - mirror="$OPTARG" - ;; - s) - add_srcs="y" - ;; - c) - copy_64="y" - ;; - h) - usage - exit 0 - ;; - \:) - echo "'-$OPTARG' needs an argument." - usage - exit 1 - ;; - *) - echo "invalid command-line option: $OPTARG" - usage - exit 1 - ;; - esac - done - - if [ $# -ge ${OPTIND} ]; then - eval echo "Unexpected command line argument: \${${OPTIND}}" - usage - exit 1 - fi -} - -list_all_chroots() { - for i in /var/lib/chroot/*; do - i="${i##*/}" - [ "${i}" = "*" ] && continue - [ -x "/usr/local/bin/${i%bit}" ] || continue - grep -qs "^\[${i%bit}\]\$" /etc/schroot/schroot.conf || continue - [ -r "/etc/schroot/script-${i}" -a \ - -r "/etc/schroot/mount-${i}" ] || continue - echo "${i%bit}" - done -} - -getkey() { - ( - trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT HUP - stty -echo iuclc -icanon 2>/dev/null - dd count=1 bs=1 2>/dev/null - ) -} - -chr() { - printf "\\$(printf '%03o' "$1")" -} - -ord() { - printf '%d' $(printf '%c' "$1" | od -tu1 -An) -} - -is_network_drive() { - stat -c %T -f "$1/" 2>/dev/null | - egrep -qs '^nfs|cifs|smbfs' -} - -# Check that we are running as a regular user -[ "$(id -nu)" = root ] && { - echo "Run this script as a regular user and provide your \"sudo\"" \ - "password if requested" >&2 - exit 1 -} - -process_opts "$@" - -echo "This script will help you through the process of installing a" -echo "Debian or Ubuntu distribution in a chroot environment. You will" -echo "have to provide your \"sudo\" password when requested." -echo - -# Error handler -trap 'exit 1' INT TERM QUIT HUP -trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT - -# Install any missing applications that this script relies on. If these packages -# are already installed, don't force another "apt-get install". That would -# prevent them from being auto-removed, if they ever become eligible for that. -# And as this script only needs the packages once, there is no good reason to -# introduce a hard dependency on things such as dchroot and debootstrap. -dep= -for i in dchroot debootstrap libwww-perl; do - [ -d /usr/share/doc/"$i" ] || dep="$dep $i" -done -[ -n "$dep" ] && sudo apt-get -y install $dep -sudo apt-get -y install schroot - -# Create directory for chroot -sudo mkdir -p /var/lib/chroot - -# Find chroot environments that can be installed with debootstrap -targets="$(cd /usr/share/debootstrap/scripts - ls | grep '^[a-z]*$')" - -# Ask user to pick one of the available targets -echo "The following targets are available to be installed in a chroot:" -j=1; for i in $targets; do - printf '%4d: %s\n' "$j" "$i" - j=$(($j+1)) -done -while :; do - printf "Which target would you like to install: " - read n - [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break -done -j=1; for i in $targets; do - [ "$j" -eq "$n" ] && { distname="$i"; break; } - j=$(($j+1)) -done -echo - -# On x86-64, ask whether the user wants to install x86-32 or x86-64 -archflag= -arch= -if [ "$(uname -m)" = x86_64 ]; then - while :; do - echo "You are running a 64bit kernel. This allows you to install either a" - printf "32bit or a 64bit chroot environment. %s" \ - "Which one do you want (32, 64) " - read arch - [ "${arch}" == 32 -o "${arch}" == 64 ] && break - done - [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64" - arch="${arch}bit" - echo -fi -target="${distname}${arch}" - -# Don't accidentally overwrite an existing installation -[ -d /var/lib/chroot/"${target}" ] && { - while :; do - echo "This chroot already exists on your machine." - if schroot -l --all-sessions 2>&1 | - sed 's/^session://' | - grep -qs "^${target%bit}-"; then - echo "And it appears to be in active use. Terminate all programs that" - echo "are currently using the chroot environment and then re-run this" - echo "script." - echo "If you still get an error message, you might have stale mounts" - echo "that you forgot to delete. You can always clean up mounts by" - echo "executing \"${target%bit} -c\"." - exit 1 - fi - echo "I can abort installation, I can overwrite the existing chroot," - echo "or I can delete the old one and then exit. What would you like to" - printf "do (a/o/d)? " - read choice - case "${choice}" in - a|A) exit 1;; - o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;; - d|D) sudo rm -rf "/var/lib/chroot/${target}" \ - "/usr/local/bin/${target%bit}" \ - "/etc/schroot/mount-${target}" \ - "/etc/schroot/script-${target}" \ - "/etc/schroot/${target}" - sudo sed -ni '/^[[]'"${target%bit}"']$/,${ - :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \ - "/etc/schroot/schroot.conf" - trap '' INT TERM QUIT HUP - trap '' EXIT - echo "Deleted!" - exit 0;; - esac - done - echo -} -sudo mkdir -p /var/lib/chroot/"${target}" - -# Offer to include additional standard repositories for Ubuntu-based chroots. -alt_repos= -grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && { - while :; do - echo "Would you like to add ${distname}-updates and ${distname}-security " - printf "to the chroot's sources.list (y/n)? " - read alt_repos - case "${alt_repos}" in - y|Y) - alt_repos="y" - break - ;; - n|N) - break - ;; - esac - done - echo -} - -# Check for non-standard file system mount points and ask the user whether -# they should be imported into the chroot environment -# We limit to the first 26 mount points that much some basic heuristics, -# because a) that allows us to enumerate choices with a single character, -# and b) if we find more than 26 mount points, then these are probably -# false-positives and something is very unusual about the system's -# configuration. No need to spam the user with even more information that -# is likely completely irrelevant. -if [ -z "${bind_mounts}" ]; then - mounts="$(awk '$2 != "/" && $2 !~ "^/boot" && $2 !~ "^/home" && - $2 !~ "^/media" && $2 !~ "^/run" && - ($3 ~ "ext[2-4]" || $3 == "reiserfs" || $3 == "btrfs" || - $3 == "xfs" || $3 == "jfs" || $3 == "u?msdos" || - $3 == "v?fat" || $3 == "hfs" || $3 == "ntfs" || - $3 ~ "nfs[4-9]?" || $3 == "smbfs" || $3 == "cifs") { - print $2 - }' /proc/mounts | - head -n26)" - if [ -n "${mounts}" ]; then - echo "You appear to have non-standard mount points that you" - echo "might want to import into the chroot environment:" - echo - sel= - while :; do - # Print a menu, listing all non-default mounts of local or network - # file systems. - j=1; for m in ${mounts}; do - c="$(printf $(printf '\\%03o' $((64+$j))))" - echo "$sel" | grep -qs $c && - state="mounted in chroot" || state="$(tput el)" - printf " $c) %-40s${state}\n" "$m" - j=$(($j+1)) - done - # Allow user to interactively (de-)select any of the entries - echo - printf "Select mount points that you want to be included or press %s" \ - "SPACE to continue" - c="$(getkey | tr a-z A-Z)" - [ "$c" == " " ] && { echo; echo; break; } - if [ -z "$c" ] || - [ "$c" '<' 'A' -o $(ord "$c") -gt $((64 + $(ord "$j"))) ]; then - # Invalid input, ring the console bell - tput bel - else - # Toggle the selection for the given entry - if echo "$sel" | grep -qs $c; then - sel="$(printf "$sel" | sed "s/$c//")" - else - sel="$sel$c" - fi - fi - # Reposition cursor to the top of the list of entries - tput cuu $(($j + 1)) - echo - done - fi - j=1; for m in ${mounts}; do - c="$(chr $(($j + 64)))" - if echo "$sel" | grep -qs $c; then - bind_mounts="${bind_mounts}$m $m none rw,bind 0 0 -" - fi - j=$(($j+1)) - done -fi - -# Remove stale entry from /etc/schroot/schroot.conf. Entries start -# with the target name in square brackets, followed by an arbitrary -# number of lines. The entry stops when either the end of file has -# been reached, or when the beginning of a new target is encountered. -# This means, we cannot easily match for a range of lines in -# "sed". Instead, we actually have to iterate over each line and check -# whether it is the beginning of a new entry. -sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p' \ - /etc/schroot/schroot.conf - -# Download base system. This takes some time -if [ -z "${mirror}" ]; then - grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && - mirror="http://archive.ubuntu.com/ubuntu" || - mirror="http://ftp.us.debian.org/debian" -fi - -sudo ${http_proxy:+http_proxy="${http_proxy}"} debootstrap ${archflag} \ - "${distname}" "/var/lib/chroot/${target}" "$mirror" - -# Add new entry to /etc/schroot/schroot.conf -grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && - brand="Ubuntu" || brand="Debian" -if [ -z "${chroot_groups}" ]; then - chroot_groups="${admin},$(id -gn)" -fi - -if [ -d '/etc/schroot/default' ]; then - new_version=1 - fstab="/etc/schroot/${target}/fstab" -else - new_version=0 - fstab="/etc/schroot/mount-${target}" -fi - -if [ "$new_version" = "1" ]; then - sudo cp -ar /etc/schroot/default /etc/schroot/${target} - - sudo sh -c 'cat >>/etc/schroot/schroot.conf' <>${fstab}" -else - # Older versions of schroot wanted a "priority=" line, whereas recent - # versions deprecate "priority=" and warn if they see it. We don't have - # a good feature test, but scanning for the string "priority=" in the - # existing "schroot.conf" file is a good indication of what to do. - priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf && - echo 'priority=3' || :) - sudo sh -c 'cat >>/etc/schroot/schroot.conf' </etc/schroot/script-'"${target}" - sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \ - /etc/schroot/mount-defaults | - sudo sh -c "cat > ${fstab}" -fi - -# Add the extra mount points that the user told us about -[ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] && - printf "${bind_mounts}" | - sudo sh -c 'cat >>'"${fstab}" - -# If this system has a "/media" mountpoint, import it into the chroot -# environment. Most modern distributions use this mount point to -# automatically mount devices such as CDROMs, USB sticks, etc... -if [ -d /media ] && - ! grep -qs '^/media' "${fstab}"; then - echo '/media /media none rw,rbind 0 0' | - sudo sh -c 'cat >>'"${fstab}" -fi - -# Share /dev/shm, /run and /run/shm. -grep -qs '^/dev/shm' "${fstab}" || - echo '/dev/shm /dev/shm none rw,bind 0 0' | - sudo sh -c 'cat >>'"${fstab}" -if [ ! -d "/var/lib/chroot/${target}/run" ] && - ! grep -qs '^/run' "${fstab}"; then - echo '/run /run none rw,bind 0 0' | - sudo sh -c 'cat >>'"${fstab}" -fi -if ! grep -qs '^/run/shm' "${fstab}"; then - { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' || - echo '/dev/shm /run/shm none rw,bind 0 0'; } | - sudo sh -c 'cat >>'"${fstab}" -fi - -# Set up a special directory that changes contents depending on the target -# that is executing. -d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")" -s="${d}/.${target}" -echo "${s} ${d} none rw,bind 0 0" | - sudo sh -c 'cat >>'"${target}" -mkdir -p "${s}" - -# Install a helper script to launch commands in the chroot -sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF' -#!/bin/bash - -chroot="${0##*/}" - -wrap() { - # Word-wrap the text passed-in on stdin. Optionally, on continuation lines - # insert the same number of spaces as the number of characters in the - # parameter(s) passed to this function. - # If the "fold" program cannot be found, or if the actual width of the - # terminal cannot be determined, this function doesn't attempt to do any - # wrapping. - local f="$(type -P fold)" - [ -z "${f}" ] && { cat; return; } - local c="$(stty -a /dev/null | - sed 's/.*columns[[:space:]]*\([0-9]*\).*/\1/;t;d')" - [ -z "${c}" ] && { cat; return; } - local i="$(echo "$*"|sed 's/./ /g')" - local j="$(printf %s "${i}"|wc -c)" - if [ "${c}" -gt "${j}" ]; then - dd bs=1 count="${j}" 2>/dev/null - "${f}" -sw "$((${c}-${j}))" | sed '2,$s/^/'"${i}"'/' - else - "${f}" -sw "${c}" - fi -} - -help() { - echo "Usage ${0##*/} [-h|--help] [-c|--clean] [-C|--clean-all] [-l|--list] [--] args" | wrap "Usage ${0##*/} " - echo " help: print this message" | wrap " " - echo " list: list all known chroot environments" | wrap " " - echo " clean: remove all old chroot sessions for \"${chroot}\"" | wrap " " - echo " clean-all: remove all old chroot sessions for all environments" | wrap " " - exit 0 -} - -clean() { - local s t rc - rc=0 - for s in $(schroot -l --all-sessions); do - if [ -n "$1" ]; then - t="${s#session:}" - [ "${t#${chroot}-}" == "${t}" ] && continue - fi - if ls -l /proc/*/{cwd,fd} 2>/dev/null | - fgrep -qs "/var/lib/schroot/mount/${t}"; then - echo "Session \"${t}\" still has active users, not cleaning up" | wrap - rc=1 - continue - fi - sudo schroot -c "${s}" -e || rc=1 - done - exit ${rc} -} - -list() { - for e in $(schroot -l); do - e="${e#chroot:}" - [ -x "/usr/local/bin/${e}" ] || continue - if schroot -l --all-sessions 2>/dev/null | - sed 's/^session://' | - grep -qs "^${e}-"; then - echo "${e} is currently active" - else - echo "${e}" - fi - done - exit 0 -} - -while [ "$#" -ne 0 ]; do - case "$1" in - --) shift; break;; - -h|--help) shift; help;; - -l|--list) shift; list;; - -c|--clean) shift; clean "${chroot}";; - -C|--clean-all) shift; clean;; - *) break;; - esac -done - -# Start a new chroot session and keep track of the session id. We inject this -# id into all processes that run inside the chroot. Unless they go out of their -# way to clear their environment, we can then later identify our child and -# grand-child processes by scanning their environment. -session="$(schroot -c "${chroot}" -b)" -export CHROOT_SESSION_ID="${session}" - -# Set GOMA_TMP_DIR for better handling of goma inside chroot. -export GOMA_TMP_DIR="/tmp/goma_tmp_$CHROOT_SESSION_ID" -mkdir -p "$GOMA_TMP_DIR" - -if [ $# -eq 0 ]; then - # Run an interactive shell session - schroot -c "${session}" -r -p -else - # Run a command inside of the chroot environment - p="$1"; shift - schroot -c "${session}" -r -p "$p" -- "$@" -fi -rc=$? - -# Compute the inode of the root directory inside of the chroot environment. -i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. | - awk '{ print $1 }') 2>/dev/null -other_pids= -while [ -n "$i" ]; do - # Identify processes by the inode number of their root directory. Then - # remove all processes that we know belong to other sessions. We use - # "sort | uniq -u" to do what amounts to a "set substraction operation". - pids=$({ ls -id1 /proc/*/root/. 2>/dev/null | - sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1, - t - d'; - echo "${other_pids}"; - echo "${other_pids}"; } | sort | uniq -u) >/dev/null 2>&1 - # Kill all processes that are still left running in the session. This is - # typically an assortment of daemon processes that were started - # automatically. They result in us being unable to tear down the session - # cleanly. - [ -z "${pids}" ] && break - for j in $pids; do - # Unfortunately, the way that schroot sets up sessions has the - # side-effect of being unable to tell one session apart from another. - # This can result in us attempting to kill processes in other sessions. - # We make a best-effort to avoid doing so. - k="$( ( xargs -0 -n1 /dev/null | - sed 's/^CHROOT_SESSION_ID=/x/;t1;d;:1;q')" - if [ -n "${k}" -a "${k#x}" != "${session}" ]; then - other_pids="${other_pids} -${j}" - continue - fi - kill -9 $pids - done -done -# End the chroot session. This should clean up all temporary files. But if we -# earlier failed to terminate all (daemon) processes inside of the session, -# deleting the session could fail. When that happens, the user has to manually -# clean up the stale files by invoking us with "--clean" after having killed -# all running processes. -schroot -c "${session}" -e -# Since no goma processes are running, we can remove goma directory. -rm -rf "$GOMA_TMP_DIR" -exit $rc -EOF -sudo chown root:root /usr/local/bin/"${target%bit}" -sudo chmod 755 /usr/local/bin/"${target%bit}" - -# Add the standard Ubuntu update repositories if requested. -[ "${alt_repos}" = "y" -a \ - -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && -sudo sed -i '/^deb .* [^ -]\+ main$/p - s/^\(deb .* [^ -]\+\) main/\1-security main/ - p - t1 - d - :1;s/-security main/-updates main/ - t - d' "/var/lib/chroot/${target}/etc/apt/sources.list" - -# Add a few more repositories to the chroot -[ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && -sudo sed -i 's/ main$/ main restricted universe multiverse/' \ - "/var/lib/chroot/${target}/etc/apt/sources.list" - -# Add the Ubuntu "partner" repository, if available -if [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && - HEAD "http://archive.canonical.com/ubuntu/dists/${distname}/partner" \ - >&/dev/null; then - sudo sh -c ' - echo "deb http://archive.canonical.com/ubuntu" \ - "'"${distname}"' partner" \ - >>"/var/lib/chroot/'"${target}"'/etc/apt/sources.list"' -fi - -# Add source repositories, if the user requested we do so -[ "${add_srcs}" = "y" -a \ - -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && -sudo sed -i '/^deb[^-]/p - s/^deb\([^-]\)/deb-src\1/' \ - "/var/lib/chroot/${target}/etc/apt/sources.list" - -# Set apt proxy if host has set http_proxy -if [ -n "${http_proxy}" ]; then - sudo sh -c ' - echo "Acquire::http::proxy \"'"${http_proxy}"'\";" \ - >>"/var/lib/chroot/'"${target}"'/etc/apt/apt.conf"' -fi - -# Update packages -sudo "/usr/local/bin/${target%bit}" /bin/sh -c ' - apt-get update; apt-get -y dist-upgrade' || : - -# Install a couple of missing packages -for i in debian-keyring ubuntu-keyring locales sudo; do - [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] || - sudo "/usr/local/bin/${target%bit}" apt-get -y install "$i" || : -done - -# Configure locales -sudo "/usr/local/bin/${target%bit}" /bin/sh -c ' - l='"${LANG:-en_US}"'; l="${l%%.*}" - [ -r /etc/locale.gen ] && - sed -i "s/^# \($l\)/\1/" /etc/locale.gen - locale-gen $LANG en_US en_US.UTF-8' || : - -# Enable multi-arch support, if available -sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null && - [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && { - sudo sed -i 's/ / [arch=amd64,i386] /' \ - "/var/lib/chroot/${target}/etc/apt/sources.list" - [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] && - sudo "/usr/local/bin/${target%bit}" dpkg --add-architecture \ - $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) >&/dev/null || - echo foreign-architecture \ - $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) | - sudo sh -c \ - "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'" -} - -# Configure "sudo" package -sudo "/usr/local/bin/${target%bit}" /bin/sh -c ' - egrep -qs '"'^$(id -nu) '"' /etc/sudoers || - echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers' - -# Install a few more commonly used packages -sudo "/usr/local/bin/${target%bit}" apt-get -y install \ - autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool \ - lsof strace - -# If running a 32bit environment on a 64bit machine, install a few binaries -# as 64bit. This is only done automatically if the chroot distro is the same as -# the host, otherwise there might be incompatibilities in build settings or -# runtime dependencies. The user can force it with the '-c' flag. -host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \ - cut -d "=" -f 2) -if [ "${copy_64}" = "y" -o \ - "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \ - file /bin/bash 2>/dev/null | grep -q x86-64; then - readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \ - 'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1') - sudo "/usr/local/bin/${target%bit}" apt-get -y install \ - lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1 lib64stdc++6 - dep= - for i in binutils gdb; do - [ -d /usr/share/doc/"$i" ] || dep="$dep $i" - done - [ -n "$dep" ] && sudo apt-get -y install $dep - sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64" - for i in libbfd libpython; do - lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } | - grep -s "$i" | awk '{ print $3 }')" - if [ -n "$lib" -a -r "$lib" ]; then - sudo cp "$lib" "/var/lib/chroot/${target}/usr/local/lib/amd64" - fi - done - for lib in libssl libcrypt; do - for path in /usr/lib /usr/lib/x86_64-linux-gnu; do - sudo cp $path/$lib* \ - "/var/lib/chroot/${target}/usr/local/lib/amd64/" >&/dev/null || : - done - done - for i in gdb ld; do - sudo cp /usr/bin/$i "/var/lib/chroot/${target}/usr/local/lib/amd64/" - sudo sh -c "cat >'/var/lib/chroot/${target}/usr/local/bin/$i'" <&/dev/null; then - tmp_script="/tmp/${script##*/}" - cp "${script}" "${tmp_script}" - fi - # Some distributions automatically start an instance of the system- - # wide dbus daemon, cron daemon or of the logging daemon, when - # installing the Chrome build depencies. This prevents the chroot - # session from being closed. So, we always try to shut down any running - # instance of dbus and rsyslog. - sudo /usr/local/bin/"${target%bit}" sh -c "${script}; - rc=$?; - /etc/init.d/cron stop >/dev/null 2>&1 || :; - /etc/init.d/rsyslog stop >/dev/null 2>&1 || :; - /etc/init.d/dbus stop >/dev/null 2>&1 || :; - exit $rc" - rc=$? - [ -n "${tmp_script}" ] && rm -f "${tmp_script}" - [ $rc -ne 0 ] && exit $rc - break - ;; - n|N) - break - ;; - esac - done - echo -fi - -# Check whether ~/chroot is on a (slow) network file system and offer to -# relocate it. Also offer relocation, if the user appears to have multiple -# spindles (as indicated by "${bind_mount}" being non-empty). -# We only offer this option, if it doesn't look as if a chroot environment -# is currently active. Otherwise, relocation is unlikely to work and it -# can be difficult for the user to recover from the failed attempt to relocate -# the ~/chroot directory. -# We don't aim to solve this problem for every configuration, -# but try to help with the common cases. For more advanced configuration -# options, the user can always manually adjust things. -mkdir -p "${HOME}/chroot/" -if [ ! -h "${HOME}/chroot" ] && - ! egrep -qs '^[^[:space:]]*/chroot' /etc/fstab && - { [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] || - is_network_drive "${HOME}/chroot"; } && - ! egrep -qs '/var/lib/[^/]*chroot/.*/chroot' /proc/mounts; then - echo "${HOME}/chroot is currently located on the same device as your" - echo "home directory." - echo "This might not be what you want. Do you want me to move it somewhere" - echo "else?" - # If the computer has multiple spindles, many users configure all or part of - # the secondary hard disk to be writable by the primary user of this machine. - # Make some reasonable effort to detect this type of configuration and - # then offer a good location for where to put the ~/chroot directory. - suggest= - for i in $(echo "${bind_mounts}"|cut -d ' ' -f 1); do - if [ -d "$i" -a -w "$i" -a \( ! -a "$i/chroot" -o -w "$i/chroot/." \) ] && - ! is_network_drive "$i"; then - suggest="$i" - else - for j in "$i/"*; do - if [ -d "$j" -a -w "$j" -a \ - \( ! -a "$j/chroot" -o -w "$j/chroot/." \) ] && - ! is_network_drive "$j"; then - suggest="$j" - else - for k in "$j/"*; do - if [ -d "$k" -a -w "$k" -a \ - \( ! -a "$k/chroot" -o -w "$k/chroot/." \) ] && - ! is_network_drive "$k"; then - suggest="$k" - break - fi - done - fi - [ -n "${suggest}" ] && break - done - fi - [ -n "${suggest}" ] && break - done - def_suggest="${HOME}" - if [ -n "${suggest}" ]; then - # For home directories that reside on network drives, make our suggestion - # the default option. For home directories that reside on a local drive, - # require that the user manually enters the new location. - if is_network_drive "${HOME}"; then - def_suggest="${suggest}" - else - echo "A good location would probably be in \"${suggest}\"" - fi - fi - while :; do - printf "Physical location [${def_suggest}]: " - read dir - [ -z "${dir}" ] && dir="${def_suggest}" - [ "${dir%%/}" == "${HOME%%/}" ] && break - if ! [ -d "${dir}" -a -w "${dir}" ] || - [ -a "${dir}/chroot" -a ! -w "${dir}/chroot/." ]; then - echo "Cannot write to ${dir}/chroot. Please try again" - else - mv "${HOME}/chroot" "${dir}/chroot" - ln -s "${dir}/chroot" "${HOME}/chroot" - for i in $(list_all_chroots); do - sudo "$i" mkdir -p "${dir}/chroot" - done - sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-* - break - fi - done -fi - -# Clean up package files -sudo schroot -c "${target%bit}" -p -- apt-get clean -sudo apt-get clean - -trap '' INT TERM QUIT HUP -trap '' EXIT - -# Let the user know what we did -cat < %r' % (func.__name__, val[0])) - return val[0] - return inner - return memoizer - - -@memoize() -def IsWindows(): - return sys.platform in ['win32', 'cygwin'] - - -@memoize() -def IsLinux(): - return sys.platform.startswith(('linux', 'freebsd', 'openbsd')) - - -@memoize() -def IsMac(): - return sys.platform == 'darwin' - - -@memoize() -def gyp_defines(): - """Parses and returns GYP_DEFINES env var as a dictionary.""" - return dict(arg.split('=', 1) - for arg in shlex.split(os.environ.get('GYP_DEFINES', ''))) - -@memoize() -def gyp_generator_flags(): - """Parses and returns GYP_GENERATOR_FLAGS env var as a dictionary.""" - return dict(arg.split('=', 1) - for arg in shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', ''))) - -@memoize() -def gyp_msvs_version(): - return os.environ.get('GYP_MSVS_VERSION', '') - -@memoize() -def distributor(): - """ - Returns a string which is the distributed build engine in use (if any). - Possible values: 'goma', 'ib', '' - """ - if 'goma' in gyp_defines(): - return 'goma' - elif IsWindows(): - if 'CHROME_HEADLESS' in os.environ: - return 'ib' # use (win and !goma and headless) as approximation of ib - - -@memoize() -def platform(): - """ - Returns a string representing the platform this build is targetted for. - Possible values: 'win', 'mac', 'linux', 'ios', 'android' - """ - if 'OS' in gyp_defines(): - if 'android' in gyp_defines()['OS']: - return 'android' - else: - return gyp_defines()['OS'] - elif IsWindows(): - return 'win' - elif IsLinux(): - return 'linux' - else: - return 'mac' - - -@memoize() -def builder(): - """ - Returns a string representing the build engine (not compiler) to use. - Possible values: 'make', 'ninja', 'xcode', 'msvs', 'scons' - """ - if 'GYP_GENERATORS' in os.environ: - # for simplicity, only support the first explicit generator - generator = os.environ['GYP_GENERATORS'].split(',')[0] - if generator.endswith('-android'): - return generator.split('-')[0] - elif generator.endswith('-ninja'): - return 'ninja' - else: - return generator - else: - if platform() == 'android': - # Good enough for now? Do any android bots use make? - return 'ninja' - elif platform() == 'ios': - return 'xcode' - elif IsWindows(): - return 'ninja' - elif IsLinux(): - return 'ninja' - elif IsMac(): - return 'ninja' - else: - assert False, 'Don\'t know what builder we\'re using!' diff --git a/build/output_dll_copy.rules b/build/output_dll_copy.rules deleted file mode 100644 index c6e905131d..0000000000 --- a/build/output_dll_copy.rules +++ /dev/null @@ -1,17 +0,0 @@ - - - - - - - - - diff --git a/build/protoc_java.py b/build/protoc_java.py deleted file mode 100755 index 59e9201881..0000000000 --- a/build/protoc_java.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Generate java source files from protobuf files. - -This is a helper file for the genproto_java action in protoc_java.gypi. - -It performs the following steps: -1. Deletes all old sources (ensures deleted classes are not part of new jars). -2. Creates source directory. -3. Generates Java files using protoc (output into either --java-out-dir or - --srcjar). -4. Creates a new stamp file. -""" - -import os -import optparse -import shutil -import subprocess -import sys - -sys.path.append(os.path.join(os.path.dirname(__file__), "android", "gyp")) -from util import build_utils - -def main(argv): - parser = optparse.OptionParser() - build_utils.AddDepfileOption(parser) - parser.add_option("--protoc", help="Path to protoc binary.") - parser.add_option("--proto-path", help="Path to proto directory.") - parser.add_option("--java-out-dir", - help="Path to output directory for java files.") - parser.add_option("--srcjar", help="Path to output srcjar.") - parser.add_option("--stamp", help="File to touch on success.") - options, args = parser.parse_args(argv) - - build_utils.CheckOptions(options, parser, ['protoc', 'proto_path']) - if not options.java_out_dir and not options.srcjar: - print('One of --java-out-dir or --srcjar must be specified.') - return 1 - - with build_utils.TempDir() as temp_dir: - # Specify arguments to the generator. - generator_args = ['optional_field_style=reftypes', - 'store_unknown_fields=true'] - out_arg = '--javanano_out=' + ','.join(generator_args) + ':' + temp_dir - # Generate Java files using protoc. - build_utils.CheckOutput( - [options.protoc, '--proto_path', options.proto_path, out_arg] - + args) - - if options.java_out_dir: - build_utils.DeleteDirectory(options.java_out_dir) - shutil.copytree(temp_dir, options.java_out_dir) - else: - build_utils.ZipDir(options.srcjar, temp_dir) - - if options.depfile: - build_utils.WriteDepfile( - options.depfile, - args + [options.protoc] + build_utils.GetPythonDependencies()) - - if options.stamp: - build_utils.Touch(options.stamp) - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/build/rmdir_and_stamp.py b/build/rmdir_and_stamp.py deleted file mode 100755 index 1611b29e8b..0000000000 --- a/build/rmdir_and_stamp.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Wipes out a directory recursively and then touches a stamp file. - -This odd pairing of operations is used to support build scripts which -slurp up entire directories (e.g. build/android/javac.py when handling -generated sources) as inputs. - -The general pattern of use is: - - - Add a target which generates |gen_sources| into |out_path| from |inputs|. - - Include |stamp_file| as an input for that target or any of its rules which - generate files in |out_path|. - - Add an action which depends on |inputs| and which outputs |stamp_file|; - the action should run this script and pass |out_path| and |stamp_file| as - its arguments. - -The net result is that you will force |out_path| to be wiped and all -|gen_sources| to be regenerated any time any file in |inputs| changes. - -See //third_party/mojo/mojom_bindings_generator.gypi for an example use case. - -""" - -import errno -import os -import shutil -import sys - - -def Main(dst_dir, stamp_file): - try: - shutil.rmtree(os.path.normpath(dst_dir)) - except OSError as e: - # Ignore only "not found" errors. - if e.errno != errno.ENOENT: - raise e - with open(stamp_file, 'a'): - os.utime(stamp_file, None) - -if __name__ == '__main__': - sys.exit(Main(sys.argv[1], sys.argv[2])) diff --git a/build/sanitize-mac-build-log.sed b/build/sanitize-mac-build-log.sed deleted file mode 100644 index b4111c7b82..0000000000 --- a/build/sanitize-mac-build-log.sed +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Use this sed script to reduce a Mac build log into something readable. - -# Drop uninformative lines. -/^distcc/d -/^Check dependencies/d -/^ setenv /d -/^ cd /d -/^make: Nothing to be done/d -/^$/d - -# Xcode prints a short "compiling foobar.o" line followed by the lengthy -# full command line. These deletions drop the command line. -\|^ /Developer/usr/bin/|d -\|^ /Developer/Library/PrivateFrameworks/DevToolsCore\.framework/|d -\|^ /Developer/Library/Xcode/Plug-ins/CoreBuildTasks\.xcplugin/|d - -# Drop any goma command lines as well. -\|^ .*/gomacc |d - -# And, if you've overridden something from your own bin directory, remove those -# full command lines, too. -\|^ /Users/[^/]*/bin/|d - -# There's already a nice note for bindings, don't need the command line. -\|^python scripts/rule_binding\.py|d - -# Shorten the "compiling foobar.o" line. -s|^Distributed-CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2| CC \1| -s|^CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2| CC \1| diff --git a/build/sanitize-mac-build-log.sh b/build/sanitize-mac-build-log.sh deleted file mode 100755 index df5a7af29e..0000000000 --- a/build/sanitize-mac-build-log.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -# Copyright (c) 2010 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed diff --git a/build/sanitize-win-build-log.sed b/build/sanitize-win-build-log.sed deleted file mode 100644 index c18e664c83..0000000000 --- a/build/sanitize-win-build-log.sed +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Use this sed script to reduce a Windows build log into something -# machine-parsable. - -# Drop uninformative lines. -/The operation completed successfully\./d - -# Drop parallelization indicators on lines. -s/^[0-9]+>// - -# Shorten bindings generation lines -s/^.*"python".*idl_compiler\.py".*("[^"]+\.idl").*$/ idl_compiler \1/ diff --git a/build/sanitize-win-build-log.sh b/build/sanitize-win-build-log.sh deleted file mode 100755 index df5a7af29e..0000000000 --- a/build/sanitize-win-build-log.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -# Copyright (c) 2010 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed diff --git a/build/symlink.py b/build/symlink.py deleted file mode 100755 index 44f9d8a507..0000000000 --- a/build/symlink.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -"""Make a symlink and optionally touch a file (to handle dependencies).""" -import errno -import optparse -import os.path -import shutil -import sys -def Main(argv): - parser = optparse.OptionParser() - parser.add_option('-f', '--force', action='store_true') - parser.add_option('--touch') - options, args = parser.parse_args(argv[1:]) - if len(args) < 2: - parser.error('at least two arguments required.') - target = args[-1] - sources = args[:-1] - for s in sources: - t = os.path.join(target, os.path.basename(s)) - if len(sources) == 1 and not os.path.isdir(target): - t = target - try: - os.symlink(s, t) - except OSError as e: - if e.errno == errno.EEXIST and options.force: - if os.path.isdir(t): - shutil.rmtree(t, ignore_errors=True) - else: - os.remove(t) - os.symlink(s, t) - else: - raise - if options.touch: - with open(options.touch, 'w') as f: - pass -if __name__ == '__main__': - sys.exit(Main(sys.argv)) diff --git a/build/temp_gyp/README.chromium b/build/temp_gyp/README.chromium deleted file mode 100644 index 8045d61591..0000000000 --- a/build/temp_gyp/README.chromium +++ /dev/null @@ -1,3 +0,0 @@ -This directory will be removed once the files in it are committed upstream and -Chromium imports an upstream revision with these files. Contact mark for -details. diff --git a/build/tree_truth.sh b/build/tree_truth.sh deleted file mode 100755 index 617092dc8a..0000000000 --- a/build/tree_truth.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/bin/bash -# Copyright 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -# -# Script for printing recent commits in a buildbot run. - -# Return the sha1 of the given tag. If not present, return "". -# $1: path to repo -# $2: tag name -tt_sha1_for_tag() { - oneline=$(cd $1 && git log -1 $2 --format='%H' 2>/dev/null) - if [ $? -eq 0 ] ; then - echo $oneline - fi -} - -# Return the sha1 of HEAD, or "" -# $1: path to repo -tt_sha1_for_head() { - ( cd $1 && git log HEAD -n1 --format='%H' | cat ) -} - -# For the given repo, set tag to HEAD. -# $1: path to repo -# $2: tag name -tt_tag_head() { - ( cd $1 && git tag -f $2 ) -} - -# For the given repo, delete the tag. -# $1: path to repo -# $2: tag name -tt_delete_tag() { - ( cd $1 && git tag -d $2 ) -} - -# For the given repo, set tag to "three commits ago" (for testing). -# $1: path to repo -# $2: tag name -tt_tag_three_ago() { - local sh=$(cd $1 && git log --pretty=oneline -n 3 | tail -1 | awk '{print $1}') - ( cd $1 && git tag -f $2 $sh ) -} - -# List the commits between the given tag and HEAD. -# If the tag does not exist, only list the last few. -# If the tag is at HEAD, list nothing. -# Output format has distinct build steps for repos with changes. -# $1: path to repo -# $2: tag name -# $3: simple/short repo name to use for display -tt_list_commits() { - local tag_sha1=$(tt_sha1_for_tag $1 $2) - local head_sha1=$(tt_sha1_for_head $1) - local display_name=$(echo $3 | sed 's#/#_#g') - if [ "${tag_sha1}" = "${head_sha1}" ] ; then - return - fi - if [ "${tag_sha1}" = "" ] ; then - echo "@@@BUILD_STEP Recent commits in repo $display_name@@@" - echo "NOTE: git tag was not found so we have no baseline." - echo "Here are some recent commits, but they may not be new for this build." - ( cd $1 && git log -n 10 --stat | cat) - else - echo "@@@BUILD_STEP New commits in repo $display_name@@@" - ( cd $1 && git log -n 500 $2..HEAD --stat | cat) - fi -} - -# Clean out the tree truth tags in all repos. For testing. -tt_clean_all() { - for project in $@; do - tt_delete_tag $CHROME_SRC/../$project tree_truth - done -} - -# Print tree truth for all clank repos. -tt_print_all() { - for project in $@; do - local full_path=$CHROME_SRC/../$project - tt_list_commits $full_path tree_truth $project - tt_tag_head $full_path tree_truth - done -} - -# Print a summary of the last 10 commits for each repo. -tt_brief_summary() { - echo "@@@BUILD_STEP Brief summary of recent CLs in every branch@@@" - for project in $@; do - echo $project: - local full_path=$CHROME_SRC/../$project - (cd $full_path && git log -n 10 --format=" %H %s %an, %ad" | cat) - echo "=================================================================" - done -} - -CHROME_SRC=$1 -shift -PROJECT_LIST=$@ -tt_brief_summary $PROJECT_LIST -tt_print_all $PROJECT_LIST diff --git a/build/update-linux-sandbox.sh b/build/update-linux-sandbox.sh deleted file mode 100755 index 735733a0d0..0000000000 --- a/build/update-linux-sandbox.sh +++ /dev/null @@ -1,75 +0,0 @@ -#!/bin/sh - -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -BUILDTYPE="${BUILDTYPE:-Debug}" -CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}" -CHROME_OUT_DIR="${CHROME_SRC_DIR}/${CHROMIUM_OUT_DIR:-out}/${BUILDTYPE}" -CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox" -CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox" -CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH") - -TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null) -if [ $? -ne 0 ]; then - echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}" - exit 1 -fi - -# Make sure the path is not on NFS. -if [ "${TARGET_DIR_TYPE}" = "6969" ]; then - echo "Please make sure ${CHROME_SANDBOX_INST_PATH} is not on NFS!" - exit 1 -fi - -installsandbox() { - echo "(using sudo so you may be asked for your password)" - sudo -- cp "${CHROME_SANDBOX_BUILD_PATH}" \ - "${CHROME_SANDBOX_INST_PATH}" && - sudo -- chown root:root "${CHROME_SANDBOX_INST_PATH}" && - sudo -- chmod 4755 "${CHROME_SANDBOX_INST_PATH}" - return $? -} - -if [ ! -d "${CHROME_OUT_DIR}" ]; then - echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" " - echo "If you are building in Release mode" - exit 1 -fi - -if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then - echo -n "Could not find ${CHROME_SANDBOX_BUILD_PATH}, " - echo "please make sure you build the chrome_sandbox target" - exit 1 -fi - -if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then - echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, " - echo "installing it now." - installsandbox -fi - -if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then - echo "Failed to install ${CHROME_SANDBOX_INST_PATH}" - exit 1 -fi - -CURRENT_API=$("${CHROME_SANDBOX_BUILD_PATH}" --get-api) -INSTALLED_API=$("${CHROME_SANDBOX_INST_PATH}" --get-api) - -if [ "${CURRENT_API}" != "${INSTALLED_API}" ]; then - echo "Your installed setuid sandbox is too old, installing it now." - if ! installsandbox; then - echo "Failed to install ${CHROME_SANDBOX_INST_PATH}" - exit 1 - fi -else - echo "Your setuid sandbox is up to date" - if [ "${CHROME_DEVEL_SANDBOX}" != "${CHROME_SANDBOX_INST_PATH}" ]; then - echo -n "Make sure you have \"export " - echo -n "CHROME_DEVEL_SANDBOX=${CHROME_SANDBOX_INST_PATH}\" " - echo "somewhere in your .bashrc" - echo "This variable is currently: ${CHROME_DEVEL_SANDBOX:-empty}" - fi -fi diff --git a/build/whitespace_file.txt b/build/whitespace_file.txt deleted file mode 100644 index ea82f4e4ca..0000000000 --- a/build/whitespace_file.txt +++ /dev/null @@ -1,156 +0,0 @@ -Copyright 2014 The Chromium Authors. All rights reserved. -Use of this useless file is governed by a BSD-style license that can be -found in the LICENSE file. - - -This file is used for making non-code changes to trigger buildbot cycles. Make -any modification below this line. - -====================================================================== - -Let's make a story. Add zero+ sentences for every commit: - -CHÄPTER 1: -It was a dark and blinky night; the rain fell in torrents -- except at -occasional intervals, when it was checked by a violent gust of wind which -swept up the streets (for it is in London that our scene lies), rattling along -the housetops, and fiercely agitating the scanty flame of the lamps that -struggled against the elements. A hooded figure emerged. - -It was a Domo-Kun. - -"What took you so long?", inquired his wife. - -Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the -waffles you brought him?" "You know him, he's not one to forego a waffle, -no matter how burnt," he snickered. - -The pause was filled with the sound of compile errors. - -CHAPTER 2: -The jelly was as dark as night, and just as runny. -The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles -with his fork, watching the runny jelly spread and pool across his plate, -like the blood of a dying fawn. "It reminds me of that time --" he started, as -his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of -images coming from the past flowed through his mind. - -"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly -overhead, barely disturbing the thick cigarette smoke. No doubt was left about -when the fan was last cleaned. - -There was a poignant pause. - -CHAPTER 3: -Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he -began feeling sick. He thought out loud to himself, "No, he wouldn't have done -that to me." He considered that perhaps he shouldn't have pushed so hard. -Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable -horror that had occurred just the week before. - -Next time, there won't be any sushi. Why sushi with waffles anyway? It's like -adorning breakfast cereal with halibut -- shameful. - -CHAPTER 4: -The taste of stale sushi in his mouth the next morning was unbearable. He -wondered where the sushi came from as he attempted to wash the taste away with -a bottle of 3000¥ sake. He tries to recall the cook's face. Purple? Probably. - -CHAPTER 5: -Many tears later, Mr. Usagi would laugh at the memory of the earnest, -well-intentioned Domo-Kun. Another day in the life. That is when he realized that -life goes on. - -TRUISMS (1978-1983) -JENNY HOLZER -A LITTLE KNOWLEDGE CAN GO A LONG WAY -A LOT OF PROFESSIONALS ARE CRACKPOTS -A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER -A NAME MEANS A LOT JUST BY ITSELF -A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD -A RELAXED MAN IS NOT NECESSARILY A BETTER MAN -NO ONE SHOULD EVER USE SVN -AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS -IT IS MANS FATE TO OUTSMART HIMSELF -BEING SURE OF YOURSELF MEANS YOU'RE A FOOL -AM NOT -ARE TOO -IF AT FIRST YOU DON'T SUCCEED: TRY, EXCEPT, FINALLY -AND THEN, TIME LEAPT BACKWARDS -AAAAAAAAAAAAAAAAAAAAAAAAAAAAAaaaaaaaaaaaaaaaaaaaaaaaaaaaahhhh LOT -I'm really tempted to change something above the line. -Reeccciiiipppppeeeeeesssssss!!!!!!!!! -PEOPLE SAY "FAILURE IS NOT AN OPTION", BUT FAILURE IS ALWAYS AN OPTION. -WHAT GOES UP MUST HAVE A NON-ZERO VELOCITY - -I can feel the heat closing in, feel them out there making their moves... -What could possibly go wrong? We've already ate our cake. - -Stand Still. Pause Clocks. We can make the World Stop. -WUBWUBWUBWUBWUB - -I want a 1917 build and you will give me what I want. - -This sentence is false. - -Beauty is in the eyes of a Beholder. - -I'm the best at space. - -The first time Yossarian saw the chaplain, he fell madly in love with him. -* -* -* -Give not thyself up, then, to fire, lest it invert thee, deaden thee; as for -the time it did me. There is a wisdom that is woe; but there is a woe that is -madness. And there is a Catskill eagle in some souls that can alike dive down -into the blackest gorges, and soar out of them again and become invisible in -the sunny spaces. And even if he for ever flies within the gorge, that gorge -is in the mountains; so that even in his lowest swoop the mountain eagle is -still higher than other birds upon the plain, even though they soar. -* -* -* - -I'm here to commit lines and drop rhymes -* -This is a line to test and try uploading a cl. - -And lo, in the year 2014, there was verily an attempt to upgrade to GCC 4.8 on -the Android bots, and it was good. Except on one bot, where it was bad. And -lo, the change was reverted, and GCC went back to 4.6, where code is slower -and less optimized. And verily did it break the build, because artifacts had -been created with 4.8, and alignment was no longer the same, and a great -sadness descended upon the Android GN buildbot, and it did refuseth to build -any more. But the sheriffs thought to themselves: Placebo! Let us clobber the -bot, and perhaps it will rebuild with GCC 4.6, which hath worked for many many -seasons. And so they modified the whitespace file with these immortal lines, -and visited it upon the bots, that great destruction might be wrought upon -their outdated binaries. In clobberus, veritas. - -As the git approaches, light begins to shine through the SCM thrice again... -However, the git, is, after all, quite stupid. - -Suddenly Domo-Kun found itself in a room filled with dazzling mirrors. - -A herd of wild gits appears! Time for CQ :D -And one more for sizes.py... - -Sigh. - -It was love at first sight. The moment Yossarian first laid eyes on the chaplain, he fell madly in love with him. - -Cool whitespace change for git-cl land - -Oh god the bots are red! I'm blind! Mmmm, cronuts. - -If you stand on your head, you will get footprints in your hair. - -sigh -sigher -pick up cls - -In the BUILD we trust. -^_^ - -In the masters we don't. diff --git a/build/win_is_xtree_patched.py b/build/win_is_xtree_patched.py deleted file mode 100755 index a50d0ed05b..0000000000 --- a/build/win_is_xtree_patched.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Determines if the VS xtree header has been patched to disable C4702.""" - -import os - - -def IsPatched(): - # TODO(scottmg): For now, just return if we're using the packaged toolchain - # script (because we know it's patched). Another case could be added here to - # query the active VS installation and actually check the contents of xtree. - # http://crbug.com/346399. - return int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1)) == 1 - - -def DoMain(_): - """Hook to be called from gyp without starting a separate python - interpreter.""" - return "1" if IsPatched() else "0" - - -if __name__ == '__main__': - print(DoMain([]))