From 4dc98ccb36c27f588012f54a7856ff6da08515fc Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Fri, 30 Jun 2017 23:54:19 +0200 Subject: [PATCH 01/14] refactoring for multiple build-environments per platform - moved check_node_builtins to build_utils - added --no-shutdown option for vagrant builds - fixed typo in JNI lib output name on win32 (missing lib prefix) - refactored all existing platform configs to align with new build-env APIs - added graceful handling / shutdown of build-envs if user cancels the build early - added powershell scripts for build dependencies - corrected usage of "smb" mounted folders for macos:vagrant target - added win32:vagrant target --- BUILDING.md | 5 +- CMakeLists.txt | 2 +- build.py | 198 +++++++++------------- build_all.py | 48 ------ build_system/build_utils.py | 87 ++++++++++ build_system/config_android.py | 23 ++- build_system/config_linux.py | 20 ++- build_system/config_macos.py | 22 ++- build_system/config_win32.py | 35 ++-- build_system/constants.py | 4 + build_system/cross_build.py | 27 ++- build_system/docker_build.py | 25 ++- build_system/shared_build_steps.py | 23 +-- build_system/shell_build.py | 3 +- build_system/vagrant_build.py | 63 ++++++- docker/win32/install.cmake.ps1 | 26 +++ docker/win32/install.jdk.ps1 | 18 ++ docker/win32/install.maven.ps1 | 19 +++ docker/win32/install.python.ps1 | 34 ++++ docker/win32/install.vscpp.ps1 | 20 +++ vagrant/{macos => }/.gitignore | 0 vagrant/macos/Vagrantfile | 6 +- vagrant/win32/Vagrantfile | 50 ++++++ vagrant/win32/ie-box-automation-plugin.rb | 75 ++++++++ vagrant/win32/switch-to-winrm-plugin.rb | 146 ++++++++++++++++ vagrant/win32/tools/ConfigWinRM.lnk | Bin 0 -> 1367 bytes vagrant/win32/tools/readme.txt | 11 ++ vagrant/win32/tools/setup.winrm.ps1 | 16 ++ 28 files changed, 775 insertions(+), 231 deletions(-) delete mode 100644 build_all.py create mode 100644 docker/win32/install.cmake.ps1 create mode 100644 docker/win32/install.jdk.ps1 create mode 100644 docker/win32/install.maven.ps1 create mode 100644 docker/win32/install.python.ps1 create mode 100644 docker/win32/install.vscpp.ps1 rename vagrant/{macos => }/.gitignore (100%) create mode 100644 vagrant/win32/Vagrantfile create mode 100644 vagrant/win32/ie-box-automation-plugin.rb create mode 100644 vagrant/win32/switch-to-winrm-plugin.rb create mode 100644 vagrant/win32/tools/ConfigWinRM.lnk create mode 100644 vagrant/win32/tools/readme.txt create mode 100644 vagrant/win32/tools/setup.winrm.ps1 diff --git a/BUILDING.md b/BUILDING.md index 38b7739f7..f09313d2c 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -63,11 +63,14 @@ __Inputs__: - `./src/main/` - J2V8 Java test source code - `./src/test/` +- J2V8 build settings + - `./build_settings.py` __Artifacts:__ -- J2V8 platform-specific packages +- Maven platform-specific packages - `./build.out/j2v8_{platform}_{abi}-{j2v8_version}.jar` - e.g. `./build.out/j2v8_linux_x86_64-4.8.0-SNAPSHOT.jar` +- Gradle Android packages - `./build/outputs/aar/j2v8-release.aar` --- ## JUnit diff --git a/CMakeLists.txt b/CMakeLists.txt index ffe9c6074..9256387fb 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -180,4 +180,4 @@ if(CMAKE_CL_64 OR CMAKE_SIZEOF_VOID_P EQUAL 8) endif() # set library output filename -set_target_properties(j2v8 PROPERTIES OUTPUT_NAME "${PROJECT_NAME}_${J2V8_LIB_PLATFORM_NAME}_${J2V8_LIB_ARCH_NAME}") +set_target_properties(j2v8 PROPERTIES OUTPUT_NAME "${J2V8_LIB_PREFIX}${PROJECT_NAME}_${J2V8_LIB_PLATFORM_NAME}_${J2V8_LIB_ARCH_NAME}") diff --git a/build.py b/build.py index 5e8c24baf..4c83f3171 100644 --- a/build.py +++ b/build.py @@ -1,5 +1,4 @@ import argparse -import collections import os import re import sys @@ -24,10 +23,13 @@ ] composite_steps = [ + # composites c.build_all, c.build_full, c.build_native, + # aliases c.build_java, + c.build_bundle, c.build_test, ] @@ -38,6 +40,19 @@ c.target_win32: win32_config, } +# TODO: shift responsibility to add targets to platform config or no ? +extra_targets = [ + c.target_macos_vagrant, + c.target_win32_docker, + c.target_win32_vagrant, +] + +avail_architectures = [ + c.arch_x86, + c.arch_x64, + c.arch_arm, +] + avail_build_steps = build_step_sequence + composite_steps #----------------------------------------------------------------------- @@ -50,28 +65,13 @@ help="The build target platform name (must be a valid platform string identifier).", dest="target", required=True, - choices=[ - c.target_android, - c.target_linux, - c.target_macos, - c.target_win32, - ]) + choices=sorted(avail_targets.keys() + extra_targets)) parser.add_argument("--arch", "-a", help="The build target architecture identifier (the available architectures are also dependent on the selected platform for a build).", dest="arch", required=True, - choices=[ - c.arch_x86, - c.arch_x64, - c.arch_arm, - ]) - -parser.add_argument("--cross-compile", "-x", - help="Run the actual build in a virtualized sandbox environment, fully decoupled from the build host machine.", - dest="cross_compile", - action="store_const", - const=True) + choices=avail_architectures) parser.add_argument("--node-enabled", "-ne", help="Include the Node.js runtime and builtin node-modules for use in J2V8.", @@ -81,14 +81,19 @@ # NOTE: this option is only used internally to distinguish the running of the build script within # the build-instigator and the actual build-executor (this is relevant when cross-compiling) -parser.add_argument("--build-agent", "-bd", +parser.add_argument("--cross-agent", help=argparse.SUPPRESS, - dest="build_agent", + dest="cross_agent", + type=str) + +parser.add_argument("--no-shutdown", "-nos", + help="When using a cross-compile environment, do not shutdown any of the components when the build is finished or canceled.", + dest="no_shutdown", action="store_const", const=True) parser.add_argument("buildsteps", - help="A single build-step or a list of all the recognized build-steps that should be executed\n" + + help="Pass a single build-step or a list of all the recognized build-steps that should be executed\n" + "(the order of the steps given to the CLI does not matter, the correct order will be restored internally).\n\n" + "the fundamental build steps (in order):\n" + "---------------------------------------\n" + @@ -110,6 +115,7 @@ def parse_build_step_option(step): c.build_full: add_all, c.build_native: add_native, c.build_java: add_managed, + c.build_bundle: add_managed, c.build_test: add_test, # basic steps c.build_node_js: lambda: parsed_steps.add(c.build_node_js), @@ -137,70 +143,8 @@ def add_test(): def raise_unhandled_option(): sys.exit("INTERNAL-ERROR: Tried to handle unrecognized build-step") -args = parser.parse_args() - -#----------------------------------------------------------------------- -# Sanity check for the builtin node-module links in J2V8 C++ JNI code -#----------------------------------------------------------------------- -def check_node_builtins(): - j2v8_jni_cpp_path = "jni/com_eclipsesource_v8_V8Impl.cpp" - j2v8_builtins = [] - - with open(j2v8_jni_cpp_path, "r") as j2v8_jni_cpp: - j2v8_code = j2v8_jni_cpp.read() - - tag = "// @node-builtins-force-link" - start = j2v8_code.find(tag) - - end1 = j2v8_code.find("}", start) - end2 = j2v8_code.find("#endif", start) - - if (end1 < 0 and end2 < 0): - return - - end = min(int(e) for e in [end1, end2]) - - if (end < 0): - return - - j2v8_linked_builtins = j2v8_code[start + len(tag):end] - - j2v8_builtins = [m for m in re.finditer(r"^\s*_register_(?P.+)\(\);\s*$", j2v8_linked_builtins, re.M)] - - comment_tokens = ["//", "/*", "*/"] - - j2v8_builtins = [x.group("name") for x in j2v8_builtins if not any(c in x.group(0) for c in comment_tokens)] - - node_src = "node/src/" - node_builtins = [] - for cc_file in os.listdir(node_src): - if (not cc_file.endswith(".cc")): - continue - - with open(node_src + cc_file, "r") as node_cpp: - node_code = node_cpp.read() - - m = re.search(r"NODE_MODULE_CONTEXT_AWARE_BUILTIN\((.*),\s*node::.*\)", node_code) - - if (m is not None): - node_builtins.append(m.group(1)) - - # are all Node.js builtins mentioned? - builtins_ok = collections.Counter(j2v8_builtins) == collections.Counter(node_builtins) - - if (not builtins_ok): - j2v8_extra = [item for item in j2v8_builtins if item not in node_builtins] - j2v8_missing = [item for item in node_builtins if item not in j2v8_builtins] - - error = "ERROR: J2V8 linking builtins code does not match Node.js builtin modules, check " + j2v8_jni_cpp_path - - if (len(j2v8_extra) > 0): - error += "\n\t" + "J2V8 defines unrecognized node-modules: " + str(j2v8_extra) - - if (len(j2v8_missing) > 0): - error += "\n\t" + "J2V8 definition is missing node-modules: " + str(j2v8_missing) - - sys.exit(error) +if __name__ == "__main__": + args = parser.parse_args() #----------------------------------------------------------------------- # Build execution core function @@ -210,18 +154,37 @@ def execute_build(params): if (params.target is None): sys.exit("ERROR: No target platform specified") - if (not params.target in avail_targets): - sys.exit("ERROR: Unrecognized target platform: " + params.target) + def parse_target(target_str): + sep_idx = target_str.find(":") + return (target_str, None) if sep_idx < 0 else target_str[0:sep_idx], target_str[sep_idx+1:] + # if the "target" string {x:y} passed to the CLI exactly identifies a build-target, we just take it and continue. + # This means that if you want to introduce a customized build for a platform named {platform:custom-name}, + # it will be picked up before any further deconstruction of a "target:sub-target" string is done build_target = avail_targets.get(params.target) + target = None + cross_id = None + + # if the passed "target" string is not already a valid build-target, we need to look for sub-targets + if (build_target is None): + target, cross_id = parse_target(params.target) + # otherwise we just go on with it + else: + target = params.target + + if (not target in avail_targets): + sys.exit("ERROR: Unrecognized target platform: " + target) + + build_target = avail_targets.get(target) + if (params.arch is None): sys.exit("ERROR: No target architecture specified") build_architectures = build_target.architectures if (not params.arch in build_architectures): - sys.exit("ERROR: Unsupported architecture: \"" + params.arch + "\" for selected target platform: " + params.target) + sys.exit("ERROR: Unsupported architecture: \"" + params.arch + "\" for selected target platform: " + target) if (params.buildsteps is None): sys.exit("ERROR: No build-step specified, valid values are: " + ", ".join(avail_build_steps)) @@ -229,9 +192,6 @@ def execute_build(params): if (not params.buildsteps is None and not isinstance(params.buildsteps, list)): params.buildsteps = [params.buildsteps] - # apply default values for unspecified params - params.build_agent = params.build_agent if (hasattr(params, "build_agent")) else None - global parsed_steps parsed_steps.clear() @@ -242,63 +202,69 @@ def execute_build(params): parsed_steps = [step for step in build_step_sequence if step in parsed_steps] platform_steps = build_target.steps + cross_configs = build_target.cross_configs build_cwd = utils.get_cwd() - if (platform_steps.get("cross") is None): - sys.exit("ERROR: cross-compilation is not available/supported for platform: " + params.target) + cross_cfg = None - # if we are the build-instigator (not a cross-compile build-agent) we run some initial checks & setups for the build - if (hasattr(params, "build_agent") and not params.build_agent): + if (cross_id): + if (cross_configs.get(cross_id) is None): + sys.exit("ERROR: target '" + target + "' does not have a recognized cross-compile host: '" + cross_id + "'") + else: + cross_cfg = cross_configs.get(cross_id) + + # if we are the build-instigator (not a cross-compile build-agent) we directly run some initial checks & setups for the build + if (not params.cross_agent): print "Checking Node.js builtins integration consistency..." - check_node_builtins() + utils.check_node_builtins() print "Caching Node.js artifacts..." - curr_node_tag = params.target + "." + params.arch + curr_node_tag = target + "." + params.arch utils.store_nodejs_output(curr_node_tag, build_cwd) - def execute_build_step(compiler, build_step): + def execute_build_step(compiler_inst, build_step): """Executes an immutable copy of the given build-step configuration""" # from this point on, make the build-input immutable to ensure consistency across the whole build process # any actions during the build-step should only be made based on the initial set of variables & conditions # NOTE: this restriction makes it much more easy to reason about the build-process as a whole build_step = immutable.freeze(build_step) - compiler.build(build_step) + compiler_inst.build(build_step) # a cross-compile was requested, we just launch the build-environment and then delegate the requested build-process to the cross-compile environment - if (params.cross_compile): - x_compiler = build_target.cross_compiler() - x_step = platform_steps.get("cross") + if (cross_cfg): + cross_compiler = build_target.cross_compiler(cross_id) # prepare any additional/dynamic parameters for the build and put them into the build-step config - x_step.arch = params.arch - x_step.custom_cmd = "python ./build.py --build-agent -t $PLATFORM -a $ARCH " + ("-ne" if params.node_enabled else "") + " " + " ".join(parsed_steps) - x_step.compiler = x_compiler - x_step.target = build_target + cross_cfg.arch = params.arch + cross_cfg.custom_cmd = "python ./build.py --cross-agent " + cross_id + " -t $PLATFORM -a $ARCH " + ("-ne" if params.node_enabled else "") + " " + " ".join(parsed_steps) + cross_cfg.compiler = cross_compiler + cross_cfg.target = build_target + cross_cfg.no_shutdown = params.no_shutdown - execute_build_step(x_compiler, x_step) + execute_build_step(cross_compiler, cross_cfg) # run the requested build-steps with the given parameters to produce the build-artifacts else: target_compiler = ShellBuildSystem() target_steps = dict(platform_steps) - if (target_steps.has_key("cross")): - x_step = target_steps.get("cross") - del target_steps["cross"] + # this is a build-agent for a cross-compile + if (params.cross_agent): + # the cross-compile step dictates which directory will be used to run the actual build + cross_cfg = cross_configs.get(params.cross_agent) + + if (cross_cfg is None): + sys.exit("ERROR: internal error while looking for cross-compiler config: " + params.cross_agent) - # this is a build-agent for a cross-compile - if (params.build_agent): - # the cross-compile step dictates which directory will be used to run the actual build - build_cwd = x_step.build_cwd + build_cwd = cross_cfg.build_cwd # execute all requested build steps for step in parsed_steps: target_step = target_steps[step] # prepare any additional/dynamic parameters for the build and put them into the build-step config - target_step.cross_compile = params.cross_compile - target_step.build_agent = params.build_agent if (hasattr(params, "build_agent")) else None + target_step.cross_agent = params.cross_agent target_step.arch = params.arch target_step.build_cwd = build_cwd target_step.compiler = target_compiler diff --git a/build_all.py b/build_all.py deleted file mode 100644 index fd39f8fc6..000000000 --- a/build_all.py +++ /dev/null @@ -1,48 +0,0 @@ -import build as b -import build_system.constants as c - -class Object: - def __init__(self, **attributes): - self.__dict__.update(attributes) - -# Android test -# b.execute_build({"target": c.target_android, "arch": c.arch_arm, "buildsteps": c.build_all, "node_enabled": True, "cross_compile": True}) -# b.execute_build(Object(**{"target": c.target_android, "arch": c.arch_x86, "buildsteps": c.build_all, "node_enabled": True, "cross_compile": True})) - -# MacOS test -# b.execute_build(c.target_macos, c.arch_x64, c.build_all, node_enabled = True, cross_compile = True) - -# Win32 test -# b.execute_build(c.target_win32, c.arch_x64, c.build_all, node_enabled = True, cross_compile = False) - -b.execute_build(Object(**{"target": c.target_win32, "arch": c.arch_x64, "buildsteps": "j2v8java", "node_enabled": True, "cross_compile": False})) -# b.execute_build(Object(**{"target": c.target_linux, "arch": c.arch_x64, "buildsteps": c.build_j2v8_java, "node_enabled": True, "cross_compile": True})) - -# b.execute_build(Object(**{"target": c.target_android, "arch": c.arch_x86, "buildsteps": "j2v8java", "node_enabled": True, "cross_compile": False})) - - - - -#b.execute_build(c.target_macos, c.arch_x86, c.build_all, node_enabled = True, cross_compile = True) - -# b.execute_build(c.target_linux, c.arch_x64, c.build_all, True, True) - -# build Node.js only -# def build_njs(target, arch): -# b.execute_build(target, arch, [c.build_node_js], node_enabled = True, cross_compile = True) - -# build_njs(c.target_android, c.arch_arm) -# build_njs(c.target_android, c.arch_x86) - -# build_njs(c.target_linux, c.arch_x86) -# build_njs(c.target_linux, c.arch_x64) - -# # needs reboot here to turn Hyper-V off if Host-OS is Windows - -# build_njs(c.target_macos, c.arch_x86) -# build_njs(c.target_macos, c.arch_x64) - -# # needs reboot here to switch to Windows-Containers - -# build_njs(c.target_win32, c.arch_x86) -# build_njs(c.target_win32, c.arch_x64) diff --git a/build_system/build_utils.py b/build_system/build_utils.py index 34408e59a..394f94799 100644 --- a/build_system/build_utils.py +++ b/build_system/build_utils.py @@ -1,3 +1,4 @@ +import collections import os import re import shutil @@ -5,11 +6,28 @@ import sys from itertools import ifilter +import constants as c + import constants def get_cwd(): return os.getcwd().replace("\\", "/") +def host_cmd_sep(): + return "&& " if os.name == "nt" else "; " + +def is_android(platform): + return c.target_android in platform + +def is_linux(platform): + return c.target_linux in platform + +def is_macos(platform): + return c.target_macos in platform + +def is_win32(platform): + return c.target_win32 in platform + def get_node_branch_version(): out = execute_to_str("git branch", "node") @@ -31,12 +49,18 @@ def get_node_branch_version(): return branch def execute(cmd, cwd = None): + # flush any buffered console output, because popen could block the terminal + sys.stdout.flush() + p = subprocess.Popen(cmd, universal_newlines=True, shell=True, cwd=cwd) return_code = p.wait() if return_code: raise subprocess.CalledProcessError(return_code, cmd) def execute_to_str(cmd, cwd = None): + # flush any buffered console output, because popen could block the terminal + sys.stdout.flush() + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True, shell=True, cwd=cwd) out, err = p.communicate() @@ -123,3 +147,66 @@ def apply_file_template(src, dest, inject_vars_fn): with open(dest, "w") as f: f.write(template_text) + +#----------------------------------------------------------------------- +# Sanity check for the builtin node-module links in J2V8 C++ JNI code +#----------------------------------------------------------------------- +def check_node_builtins(): + j2v8_jni_cpp_path = "jni/com_eclipsesource_v8_V8Impl.cpp" + j2v8_builtins = [] + + with open(j2v8_jni_cpp_path, "r") as j2v8_jni_cpp: + j2v8_code = j2v8_jni_cpp.read() + + tag = "// @node-builtins-force-link" + start = j2v8_code.find(tag) + + end1 = j2v8_code.find("}", start) + end2 = j2v8_code.find("#endif", start) + + if (end1 < 0 and end2 < 0): + return + + end = min(int(e) for e in [end1, end2]) + + if (end < 0): + return + + j2v8_linked_builtins = j2v8_code[start + len(tag):end] + + j2v8_builtins = [m for m in re.finditer(r"^\s*_register_(?P.+)\(\);\s*$", j2v8_linked_builtins, re.M)] + + comment_tokens = ["//", "/*", "*/"] + + j2v8_builtins = [x.group("name") for x in j2v8_builtins if not any(c in x.group(0) for c in comment_tokens)] + + node_src = "node/src/" + node_builtins = [] + for cc_file in os.listdir(node_src): + if (not cc_file.endswith(".cc")): + continue + + with open(node_src + cc_file, "r") as node_cpp: + node_code = node_cpp.read() + + m = re.search(r"NODE_MODULE_CONTEXT_AWARE_BUILTIN\((.*),\s*node::.*\)", node_code) + + if (m is not None): + node_builtins.append(m.group(1)) + + # are all Node.js builtins mentioned? + builtins_ok = collections.Counter(j2v8_builtins) == collections.Counter(node_builtins) + + if (not builtins_ok): + j2v8_extra = [item for item in j2v8_builtins if item not in node_builtins] + j2v8_missing = [item for item in node_builtins if item not in j2v8_builtins] + + error = "ERROR: J2V8 linking builtins code does not match Node.js builtin modules, check " + j2v8_jni_cpp_path + + if (len(j2v8_extra) > 0): + error += "\n\t" + "J2V8 defines unrecognized node-modules: " + str(j2v8_extra) + + if (len(j2v8_missing) > 0): + error += "\n\t" + "J2V8 definition is missing node-modules: " + str(j2v8_missing) + + sys.exit(error) \ No newline at end of file diff --git a/build_system/config_android.py b/build_system/config_android.py index 950b33263..2c0562170 100644 --- a/build_system/config_android.py +++ b/build_system/config_android.py @@ -4,14 +4,20 @@ import shared_build_steps as u import build_utils as b -android_config = PlatformConfig(c.target_android, [c.arch_x86, c.arch_arm], DockerBuildSystem) +android_config = PlatformConfig(c.target_android, [c.arch_x86, c.arch_arm]) -android_config.cross_config(BuildStep( - name="cross-compile-host", - platform=c.target_android, - host_cwd="$CWD/docker", - build_cwd="/j2v8", -)) +android_config.set_cross_configs({ + "docker": BuildStep( + name="cross-compile-host", + platform=c.target_android, + host_cwd="$CWD/docker", + build_cwd="/j2v8", + ) +}) + +android_config.set_cross_compilers({ + "docker": DockerBuildSystem +}) android_config.set_file_abis({ c.arch_arm: "armeabi-v7a", @@ -60,7 +66,6 @@ def build_j2v8_jni(config): android_config.build_step(c.build_j2v8_jni, build_j2v8_jni) #----------------------------------------------------------------------- def build_j2v8_java(config): - # TODO: pass in file arch ABI return \ u.clearNativeLibs(config) + \ u.copyNativeLibs(config) + \ @@ -78,7 +83,7 @@ def build_j2v8_junit(config): u.gradle("connectedCheck --info") # we are running a build directly on the host shell - if (not config.build_agent): + if (not config.cross_agent): # just run the tests on the host directly return test_cmds diff --git a/build_system/config_linux.py b/build_system/config_linux.py index 74b012005..6d9aaecfb 100644 --- a/build_system/config_linux.py +++ b/build_system/config_linux.py @@ -3,14 +3,20 @@ from docker_build import DockerBuildSystem import shared_build_steps as u -linux_config = PlatformConfig(c.target_linux, [c.arch_x86, c.arch_x64], DockerBuildSystem) +linux_config = PlatformConfig(c.target_linux, [c.arch_x86, c.arch_x64]) -linux_config.cross_config(BuildStep( - name="cross-compile-host", - platform=c.target_linux, - host_cwd="$CWD/docker", - build_cwd="/j2v8", -)) +linux_config.set_cross_configs({ + "docker": BuildStep( + name="cross-compile-host", + platform=c.target_linux, + host_cwd="$CWD/docker", + build_cwd="/j2v8", + ) +}) + +linux_config.set_cross_compilers({ + "docker": DockerBuildSystem +}) linux_config.set_file_abis({ c.arch_x64: "x86_64", diff --git a/build_system/config_macos.py b/build_system/config_macos.py index 412a38b19..e6decc607 100644 --- a/build_system/config_macos.py +++ b/build_system/config_macos.py @@ -1,16 +1,24 @@ +import os import constants as c from cross_build import BuildStep, PlatformConfig from vagrant_build import VagrantBuildSystem import shared_build_steps as u -macos_config = PlatformConfig(c.target_macos, [c.arch_x86, c.arch_x64], VagrantBuildSystem) +macos_config = PlatformConfig(c.target_macos, [c.arch_x86, c.arch_x64]) -macos_config.cross_config(BuildStep( - name="cross-compile-host", - platform=c.target_macos, - host_cwd="$CWD/vagrant/$PLATFORM", - build_cwd="/Users/vagrant/j2v8", -)) +macos_config.set_cross_configs({ + "vagrant": BuildStep( + name="cross-compile-host", + platform=c.target_macos, + host_cwd="$CWD/vagrant/$PLATFORM", + build_cwd="/Users/vagrant/j2v8", + pre_build_cmd = u.setEnvVar("VAGRANT_FILE_SHARE_TYPE", "smb" if os.name == "nt" else "virtualbox")[0], + ) +}) + +macos_config.set_cross_compilers({ + "vagrant": VagrantBuildSystem +}) macos_config.set_file_abis({ c.arch_x64: "x86_64", diff --git a/build_system/config_win32.py b/build_system/config_win32.py index 09024e5f5..78dde6b71 100644 --- a/build_system/config_win32.py +++ b/build_system/config_win32.py @@ -1,16 +1,31 @@ +import os import constants as c from cross_build import BuildStep, PlatformConfig from docker_build import DockerBuildSystem +from vagrant_build import VagrantBuildSystem import shared_build_steps as u -win32_config = PlatformConfig(c.target_win32, [c.arch_x86, c.arch_x64], DockerBuildSystem) +win32_config = PlatformConfig(c.target_win32, [c.arch_x86, c.arch_x64]) -win32_config.cross_config(BuildStep( - name="cross-compile-host", - platform=c.target_win32, - host_cwd="$CWD/docker", - build_cwd="C:/j2v8", -)) +win32_config.set_cross_configs({ + "docker": BuildStep( + name="docker-compile-host", + platform=c.target_win32, + host_cwd="$CWD/docker", + build_cwd="C:/j2v8", + ), + "vagrant": BuildStep( + name="vagrant-compile-host", + platform=c.target_win32, + host_cwd="$CWD/vagrant/$PLATFORM", + build_cwd="C:/j2v8", + ) +}) + +win32_config.set_cross_compilers({ + "docker": DockerBuildSystem, + "vagrant": VagrantBuildSystem, +}) win32_config.set_file_abis({ c.arch_x64: "x86_64", @@ -28,8 +43,8 @@ def build_node_js(config): #----------------------------------------------------------------------- def build_j2v8_cmake(config): cmake_gen_suffix = " Win64" if config.arch == c.arch_x64 else "" - cmake_x_compile_flag = "-DJ2V8_CROSS_COMPILE=1" if config.build_agent else "" - cmake_pdb_fix_flag = "-DJ2V8_WIN32_PDB_DOCKER_FIX=1" if config.build_agent else "" + cmake_x_compile_flag = "-DJ2V8_CROSS_COMPILE=1" if config.cross_agent else "" + cmake_pdb_fix_flag = "-DJ2V8_WIN32_PDB_DOCKER_FIX=1" if config.cross_agent == "docker" else "" return \ u.shell("mkdir", "cmake.out/$PLATFORM.$ARCH") + \ ["cd cmake.out\\$PLATFORM.$ARCH"] + \ @@ -40,7 +55,7 @@ def build_j2v8_cmake(config): #----------------------------------------------------------------------- def build_j2v8_jni(config): # show docker container memory usage / limit - show_mem = ["powershell C:/temp/mem.ps1"] if config.build_agent else [] + show_mem = ["powershell C:/temp/mem.ps1"] if config.cross_agent == "docker" else [] return \ show_mem + \ diff --git a/build_system/constants.py b/build_system/constants.py index 66e949c82..464560466 100644 --- a/build_system/constants.py +++ b/build_system/constants.py @@ -2,7 +2,10 @@ target_android = 'android' target_linux = 'linux' target_macos = 'macos' +target_macos_vagrant = 'macos:vagrant' target_win32 = 'win32' +target_win32_docker = 'win32:docker' +target_win32_vagrant = 'win32:vagrant' # target architectures arch_x86 = 'x86' @@ -21,4 +24,5 @@ build_full = 'full' build_native = 'native' build_java = 'java' +build_bundle = 'bundle' build_test = 'test' diff --git a/build_system/cross_build.py b/build_system/cross_build.py index bd95bf7d6..6d43ea3bd 100644 --- a/build_system/cross_build.py +++ b/build_system/cross_build.py @@ -6,12 +6,13 @@ import build_system.build_utils as utils class PlatformConfig(): - def __init__(self, name, architectures, cross_compiler): + def __init__(self, name, architectures): self.name = name self.architectures = architectures - self.cross_compiler = cross_compiler - self.steps = {} self.file_abis = {} + self.steps = {} + self.cross_compilers = {} + self.cross_configs = {} def build_step(self, target, build_fn): self.steps[target] = BuildStep( @@ -20,6 +21,20 @@ def build_step(self, target, build_fn): build=build_fn, ) + def set_cross_compilers(self, compilers_decl): + self.cross_compilers = compilers_decl + + def cross_compiler(self, cross_host_name): + compiler = self.cross_compilers.get(cross_host_name) + + if (not compiler): + sys.exit("ERROR: internal error while looking for cross-compiler: " + cross_host_name) + + return compiler() + + def set_cross_configs(self, cross_configs_decl): + self.cross_configs = cross_configs_decl + def set_file_abis(self, abis_decl): self.file_abis = abis_decl @@ -27,17 +42,15 @@ def file_abi(self, arch): file_abi = self.file_abis.get(arch) return file_abi if not file_abi is None else arch - def cross_config(self, cross_config): - self.steps['cross'] = cross_config - class BuildStep: - def __init__(self, name, platform, build = [], build_cwd = None, host_cwd = None): + def __init__(self, name, platform, build = [], build_cwd = None, host_cwd = None, pre_build_cmd = None): self.name = name self.platform = platform self.build = build self.build_cwd = build_cwd self.host_cwd = host_cwd self.custom_cmd = None + self.pre_build_cmd = pre_build_cmd class BuildSystem: __metaclass__ = ABCMeta diff --git a/build_system/docker_build.py b/build_system/docker_build.py index fb88043ee..9fb4a02b8 100644 --- a/build_system/docker_build.py +++ b/build_system/docker_build.py @@ -1,4 +1,5 @@ +import atexit import re import subprocess import sys @@ -34,7 +35,7 @@ def health_check(self, config): docker_version = version_match.group(1) - docker_req_platform = "windows" if config.platform == c.target_win32 else "linux" + docker_req_platform = "windows" if utils.is_win32(config.platform) else "linux" # check if the docker engine is running the expected container platform (linux or windows) if (docker_req_platform not in docker_version): @@ -51,9 +52,11 @@ def pre_build(self, config): def exec_build(self, config): print ("DOCKER building " + config.platform + "@" + config.arch + " => " + config.name) - mount_point = "C:/j2v8" if config.platform == c.target_win32 else "/j2v8" - shell_invoke = "cmd /C" if config.platform == c.target_win32 else "/bin/bash -c" - cmd_separator = "&&" if config.platform == c.target_win32 else ";" + is_win32 = utils.is_win32(config.platform) + + mount_point = "C:/j2v8" if is_win32 else "/j2v8" + shell_invoke = "cmd /C" if is_win32 else "/bin/bash -c" + cmd_separator = "&&" if is_win32 else ";" build_cmd = config.custom_cmd or (cmd_separator + " ").join(config.build(config)) @@ -61,16 +64,24 @@ def exec_build(self, config): # NOTE: the --memory 3g setting is imporant for windows docker builds, # since the windows docker engine defaults to a 1gb limit which is not enough to run the Node.js build with MSBuild - if (config.platform == c.target_win32): + if (utils.is_win32(config.platform)): memory_option = "--memory 3g" - platform_cmd = "docker run " + memory_option + " --privileged -P -v $CWD:" + mount_point + \ + docker_run_str = "docker run " + memory_option + " --privileged -P -v $CWD:" + mount_point + \ " --name j2v8.$PLATFORM.$ARCH j2v8-$PLATFORM " + shell_invoke + " \"cd $BUILD_CWD" + cmd_separator + " " + build_cmd + "\"" - docker_run_str = self.inject_env(platform_cmd, config) + docker_run_str = self.inject_env(docker_run_str, config) print docker_run_str + docker_stop_str = self.inject_env("docker stop j2v8.$PLATFORM.$ARCH", config) + + def cli_exit_event(): + print "Waiting for docker process to exit..." + self.exec_host_cmd(docker_stop_str, config) + + atexit.register(cli_exit_event) + self.exec_host_cmd(docker_run_str, config) def post_build(self, config): diff --git a/build_system/shared_build_steps.py b/build_system/shared_build_steps.py index 260541db5..2c3caf409 100644 --- a/build_system/shared_build_steps.py +++ b/build_system/shared_build_steps.py @@ -4,21 +4,22 @@ import constants as c import build_settings as s +import build_utils as utils build_cmd = "mvn verify -DskipTests -e" clean_build_cmd = "mvn clean verify -DskipTests -e" run_tests_cmd = "mvn test -e" def gradleCmd(): - return "gradlew" if os.name == 'nt' else "gradle" + return "gradlew" if os.name == "nt" else "gradle" def gradle(cmd): return [ gradleCmd() + " " + cmd, ] -def setEnvVar(config, name, value): - if (os.name == 'nt'): +def setEnvVar(name, value): + if (os.name == "nt"): return ["set \"" + name + "=" + value + "\""] else: return ["export " + name + "=" + value] @@ -26,7 +27,7 @@ def setEnvVar(config, name, value): def clearNativeLibs(config): lib_pattern = "src/main/resources/libj2v8_*" - if (config.platform == c.target_android): + if (utils.is_android(config.platform)): lib_pattern = "src/main/jniLibs/*/libj2v8.so" libs = glob.glob(lib_pattern) @@ -40,11 +41,11 @@ def copyNativeLibs(config): platform_cmake_out = "cmake.out/" + config.platform + "." + config.arch + "/" lib_ext = ".so" - if (config.platform == c.target_win32): + if (utils.is_win32(config.platform)): platform_cmake_out += "Debug/" if hasattr(config, 'debug') and config.debug else "Release/" lib_ext = ".dll" - elif (config.platform == c.target_macos): + elif (utils.is_macos(config.platform)): lib_ext = ".dylib" lib_pattern = platform_cmake_out + "*j2v8_*" + file_abi + lib_ext @@ -58,7 +59,7 @@ def copyNativeLibs(config): copy_cmds = [] lib_target_path = None - if (config.platform == c.target_android): + if (utils.is_android(config.platform)): lib_target_path = "src/main/jniLibs/" + file_abi # directory path copy_cmds += shell("mkdir", lib_target_path) lib_target_path += "/libj2v8.so" # final lib file path @@ -75,13 +76,13 @@ def setBuildEnv(config): file_abi = config.target.file_abi(config.arch) return \ - setEnvVar(config, "J2V8_PLATFORM_NAME", config.platform) + \ - setEnvVar(config, "J2V8_ARCH_NAME", file_abi) + \ - setEnvVar(config, "J2V8_FULL_VERSION", s.J2V8_FULL_VERSION) + setEnvVar("J2V8_PLATFORM_NAME", config.platform) + \ + setEnvVar("J2V8_ARCH_NAME", file_abi) + \ + setEnvVar("J2V8_FULL_VERSION", s.J2V8_FULL_VERSION) def setVersionEnv(config): return \ - setEnvVar(config, "J2V8_FULL_VERSION", s.J2V8_FULL_VERSION) + setEnvVar("J2V8_FULL_VERSION", s.J2V8_FULL_VERSION) def copyOutput(config): file_abi = config.target.file_abi(config.arch) diff --git a/build_system/shell_build.py b/build_system/shell_build.py index e6a0d81cf..7f1e6c74a 100644 --- a/build_system/shell_build.py +++ b/build_system/shell_build.py @@ -2,6 +2,7 @@ import sys from cross_build import BuildSystem import constants as c +import build_utils as utils class ShellBuildSystem(BuildSystem): def clean(self, config): @@ -9,7 +10,7 @@ def clean(self, config): def health_check(self, config): try: - shell_check_cmd = "ver" if config.platform == c.target_win32 else "bash --version" + shell_check_cmd = "ver" if utils.is_win32(config.platform) else "bash --version" self.exec_cmd(shell_check_cmd, config) except subprocess.CalledProcessError: sys.exit("ERROR: Failed Shell build-system health check!") diff --git a/build_system/vagrant_build.py b/build_system/vagrant_build.py index 5a3f99db8..e0754c88d 100644 --- a/build_system/vagrant_build.py +++ b/build_system/vagrant_build.py @@ -1,5 +1,7 @@ +import atexit import subprocess import sys +import build_utils as utils from cross_build import BuildSystem class VagrantBuildSystem(BuildSystem): @@ -13,15 +15,68 @@ def health_check(self, config): sys.exit("ERROR: Failed Vagrant build-system health check, make sure Vagrant is available and running!") def pre_build(self, config): - self.exec_host_cmd("vagrant up", config) + vagrant_start_cmd = "vagrant up" + + if (config.pre_build_cmd): + vagrant_start_cmd = config.pre_build_cmd + utils.host_cmd_sep() + vagrant_start_cmd + + self.exec_host_cmd(vagrant_start_cmd, config) def exec_build(self, config): print ("VAGRANT building " + config.platform + "@" + config.arch + " => " + config.name) - build_cmd = config.custom_cmd or "; ".join(config.build(config)) - platform_cmd = "vagrant ssh -c '" + self.inject_env("cd $BUILD_CWD; " + build_cmd, config) + "'" + # shell = "powershell -c \"cmd /C " if utils.is_win32(config.platform) else "ssh -c " + # cmd_sep = "&& " if utils.is_win32(config.platform) else "; " + vagrant_run_cmd = None + + if (utils.is_win32(config.platform)): + # cmd_sep = "\n" + cmd_sep = "; " + # cmd_sep = "&& " + build_cmd = config.custom_cmd or cmd_sep.join(config.build(config)) + # V1 + build_cmd = self.inject_env("cd $BUILD_CWD" + cmd_sep + build_cmd, config) + + # host_cmd_file = self.inject_env("$HOST_CWD/cmd_temp.bat", config) + # agent_cmd_file = self.inject_env("$BUILD_CWD/vagrant/win32/cmd_temp.bat", config) + + # with open(host_cmd_file, 'w') as f: + # f.write(build_cmd) + + # vagrant_run_cmd = "vagrant powershell -c \"cmd /C " + agent_cmd_file + "\"" + # vagrant_run_cmd = "vagrant powershell -c \"Start-Process cmd.exe -RedirectStandardOutput -NoNewWindow -Wait -ArgumentList @('/C', '" + agent_cmd_file + "')\"" + + # NOTE: working, just the exit code seems off + # vagrant_run_cmd = "vagrant powershell -c \"cmd /C " + agent_cmd_file + " | Out-Host\"" + # vagrant_run_cmd = "vagrant powershell -c \"cmd /C " + agent_cmd_file + "\"" - self.exec_host_cmd(platform_cmd, config) + # V1 + vagrant_run_cmd = "vagrant powershell -c \"Invoke-Command { " + build_cmd + " } -ErrorAction Stop\"" + # vagrant_run_cmd = "vagrant powershell -c \"Set-Location -Path $BUILD_CWD" + cmd_sep + "Invoke-Command -ScriptBlock {" + build_cmd + "} -ErrorAction Stop | Select-Object value\"" + # vagrant_run_cmd = self.inject_env(vagrant_run_cmd, config) + + # vagrant_run_cmd = "vagrant powershell -c \"Invoke-Command { " + agent_cmd_file + " } -NoNewScope -ErrorAction Stop\"" + print "run: " + vagrant_run_cmd + else: + cmd_sep = "; " + build_cmd = config.custom_cmd or cmd_sep.join(config.build(config)) + build_cmd = self.inject_env("cd $BUILD_CWD" + cmd_sep + build_cmd, config) + vagrant_run_cmd = "vagrant ssh -c '" + build_cmd + "'" + + def cli_exit_event(): + if (config.no_shutdown): + return + + print "Waiting for vagrant virtual-machine to exit..." + self.exec_host_cmd("vagrant halt", config) + + atexit.register(cli_exit_event) + + self.exec_host_cmd(vagrant_run_cmd, config) def post_build(self, config): + if (config.no_shutdown): + return + self.exec_host_cmd("vagrant halt", config) + return diff --git a/docker/win32/install.cmake.ps1 b/docker/win32/install.cmake.ps1 new file mode 100644 index 000000000..890deb82e --- /dev/null +++ b/docker/win32/install.cmake.ps1 @@ -0,0 +1,26 @@ + +# CMake version +$env:CMAKE_VERSION = '3.9.0-rc2'; + +[Environment]::SetEnvironmentVariable('CMAKE_VERSION', $env:CMAKE_VERSION, [EnvironmentVariableTarget]::Process); + +# download CMake archive +$url = ('https://cmake.org/files/v3.9/cmake-{0}-win64-x64.zip' -f $env:CMAKE_VERSION); +Write-Host ('Downloading {0} ...' -f $url); +(New-Object System.Net.WebClient).DownloadFile($url, 'C:\cmake.zip'); + +# extract CMake archive +Write-Host 'Installing CMake ...'; +C:/j2v8/docker/win32/unzip.ps1 "C:/cmake.zip" "C:/" + +# add CMake to path +$env:PATH = 'C:\cmake-'+$env:CMAKE_VERSION+'-win64-x64\bin;'+$env:PATH; +[Environment]::SetEnvironmentVariable('PATH', $env:PATH, [EnvironmentVariableTarget]::Machine); + +Write-Host 'Verifying install ...'; +Write-Host 'cmake -version'; cmake -version; + +Write-Host 'Removing ...'; +Remove-Item C:\cmake.zip -Force; + +Write-Host 'Complete.'; diff --git a/docker/win32/install.jdk.ps1 b/docker/win32/install.jdk.ps1 new file mode 100644 index 000000000..f5c94d9f9 --- /dev/null +++ b/docker/win32/install.jdk.ps1 @@ -0,0 +1,18 @@ +# download JDK +Write-Host 'Downloading ...'; +C:/j2v8/docker/win32/wget.ps1 ` + http://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-windows-x64.exe ` + C:\jdk.exe ` + "oraclelicense=accept-securebackup-cookie" + +Write-Host 'Installing JDK ...'; +Start-Process C:/jdk.exe -Wait ` + -ArgumentList @('/s', 'ADDLOCAL="ToolsFeature,SourceFeature"'); + +$env:JAVA_HOME = 'C:\Program Files\Java\jdk1.8.0_131'; +[Environment]::SetEnvironmentVariable('JAVA_HOME', $env:JAVA_HOME, [EnvironmentVariableTarget]::Machine); + +Write-Host 'Removing ...'; +Remove-Item C:\jdk.exe -Force; + +Write-Host 'Complete.'; diff --git a/docker/win32/install.maven.ps1 b/docker/win32/install.maven.ps1 new file mode 100644 index 000000000..289b7143b --- /dev/null +++ b/docker/win32/install.maven.ps1 @@ -0,0 +1,19 @@ + +Write-Host 'Downloading ...'; +C:/j2v8/docker/win32/wget.ps1 ` + http://www-eu.apache.org/dist/maven/maven-3/3.5.0/binaries/apache-maven-3.5.0-bin.zip ` + C:\maven.zip + +Write-Host 'Installing Maven ...'; +C:/j2v8/docker/win32/unzip.ps1 "C:/maven.zip" "C:/" + +$env:PATH = 'C:\apache-maven-3.5.0\bin;'+$env:PATH; +[Environment]::SetEnvironmentVariable('PATH', $env:PATH, [EnvironmentVariableTarget]::Machine); + +Write-Host 'Verifying install ...'; +Write-Host 'mvn -version'; mvn -version; + +Write-Host 'Removing ...'; +Remove-Item C:\maven.zip -Force; + +Write-Host 'Complete.'; diff --git a/docker/win32/install.python.ps1 b/docker/win32/install.python.ps1 new file mode 100644 index 000000000..5f2144a36 --- /dev/null +++ b/docker/win32/install.python.ps1 @@ -0,0 +1,34 @@ + +$env:PYTHON_VERSION = '2.7.13'; +$env:PYTHON_RELEASE = '2.7.13'; + +[Environment]::SetEnvironmentVariable('PYTHON_VERSION', $env:PYTHON_VERSION, [EnvironmentVariableTarget]::Process); +[Environment]::SetEnvironmentVariable('PYTHON_RELEASE', $env:PYTHON_RELEASE, [EnvironmentVariableTarget]::Process); + +$url = ('https://www.python.org/ftp/python/{0}/python-{1}.amd64.msi' -f $env:PYTHON_RELEASE, $env:PYTHON_VERSION); +Write-Host ('Downloading {0} ...' -f $url); +(New-Object System.Net.WebClient).DownloadFile($url, 'C:\python.msi'); + +Write-Host 'Installing Python ...'; +# https://www.python.org/download/releases/2.4/msi/ +Start-Process msiexec -Wait ` + -ArgumentList @( + '/i', + 'C:\python.msi', + '/quiet', + '/qn', + 'TARGETDIR=C:\Python', + 'ALLUSERS=1', + 'ADDLOCAL=DefaultFeature,Extensions,TclTk,Tools,PrependPath' + ); + +# the installer updated PATH, so we should refresh our local value +$env:PATH = [Environment]::GetEnvironmentVariable('PATH', [EnvironmentVariableTarget]::Machine); + +Write-Host 'Verifying install ...'; +Write-Host 'python --version'; python --version; + +Write-Host 'Removing ...'; +Remove-Item C:\python.msi -Force; + +Write-Host 'Complete.'; diff --git a/docker/win32/install.vscpp.ps1 b/docker/win32/install.vscpp.ps1 new file mode 100644 index 000000000..7bc58cb18 --- /dev/null +++ b/docker/win32/install.vscpp.ps1 @@ -0,0 +1,20 @@ +# source: https://github.com/friism/dockerfiles/blob/master/vs-build-tools/17/Dockerfile +# install MSBuild & C++ build tools +Invoke-WebRequest "http://go.microsoft.com/fwlink/?LinkId=691126" ` + -OutFile C:\visualcppbuildtools_full.exe -UseBasicParsing; + +Write-Host 'Installing VS C++ ...'; +Start-Process -FilePath 'C:\visualcppbuildtools_full.exe' -ArgumentList '/quiet', '/NoRestart' -Wait; + +# MSbuild path +# NOTE: can add "\amd64" after "...\Bin" for x64 version of the compiler +$env:PATH = 'C:\Program Files (x86)\MSBuild\14.0\Bin;'+$env:PATH; +[Environment]::SetEnvironmentVariable('PATH', $env:PATH, [EnvironmentVariableTarget]::Machine); + +Write-Host 'Verifying install ...'; +Write-Host 'msbuild /version'; msbuild /version; + +Write-Host 'Removing ...'; +Remove-Item C:\visualcppbuildtools_full.exe -Force; + +Write-Host 'Complete.'; diff --git a/vagrant/macos/.gitignore b/vagrant/.gitignore similarity index 100% rename from vagrant/macos/.gitignore rename to vagrant/.gitignore diff --git a/vagrant/macos/Vagrantfile b/vagrant/macos/Vagrantfile index 20be04f39..a6ec02f7a 100644 --- a/vagrant/macos/Vagrantfile +++ b/vagrant/macos/Vagrantfile @@ -39,6 +39,8 @@ echo "export JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk1.8.0_131.jdk/Conten echo "export PATH=/opt/apache-maven-3.5.0/bin:$PATH" >> /Users/vagrant/.bash_profile SCRIPT +fs_type = ENV['VAGRANT_FILE_SHARE_TYPE'] || "nfs" + Vagrant.configure(2) do |config| config.vm.box = "http://files.dryga.com/boxes/osx-sierra-0.3.1.box" @@ -51,8 +53,8 @@ Vagrant.configure(2) do |config| v.cpus = 4 end - config.vm.synced_folder ".", "/vagrant", type: "smb" - config.vm.synced_folder "../../", "/Users/vagrant/j2v8", type: "smb", smb_username: ENV['VAGRANT_SMB_USER'], smb_password: ENV['VAGRANT_SMB_PASSWORD'] + config.vm.synced_folder ".", "/vagrant", type: fs_type + config.vm.synced_folder "../../", "/Users/vagrant/j2v8", type: fs_type, smb_username: ENV['VAGRANT_SMB_USER'], smb_password: ENV['VAGRANT_SMB_PASSWORD'] config.vm.provision "shell", inline: $fix_xcode_paths config.vm.provision "shell", inline: $provision_java diff --git a/vagrant/win32/Vagrantfile b/vagrant/win32/Vagrantfile new file mode 100644 index 000000000..a3cd131d5 --- /dev/null +++ b/vagrant/win32/Vagrantfile @@ -0,0 +1,50 @@ + +require_relative 'ie-box-automation-plugin' +require_relative 'switch-to-winrm-plugin' + +fs_type = ENV['VAGRANT_FILE_SHARE_TYPE'] || "virtualbox" + +Vagrant.configure(2) do |config| + + config.vm.box = "Microsoft/EdgeOnWindows10" + config.vm.hostname = "j2v8-win32-x64" + + # source: https://github.com/danielmenezesbr/modernie-winrm + config.vm.boot_timeout = 5000 + + config.vm.guest = :windows + + config.vm.communicator = :winrm if provisioned? + config.winrm.username = "IEUser" if provisioned? + config.winrm.password = "Passw0rd!" if provisioned? + config.winrm.timeout = 50000 if provisioned? + config.winrm.retry_delay = 30 if provisioned? + config.winrm.retry_limit = 1000 if provisioned? + + config.ssh.username = "IEUser" + config.ssh.password = "Passw0rd!" + config.ssh.insert_key = false + + config.vm.box_check_update = false + + config.vm.provider "virtualbox" do |v| + v.name = "j2v8.win32.x64" + v.memory = 8192 + v.cpus = 4 + end + + config.vm.synced_folder ".", "/vagrant", disabled: true if not provisioned? + config.vm.synced_folder "../../", "C:/j2v8", type: fs_type, smb_username: ENV['VAGRANT_SMB_USER'], smb_password: ENV['VAGRANT_SMB_PASSWORD'] if provisioned? + + config.vm.provision "file", source: "./tools", destination: "c:/users/IEUser" + config.vm.provision "winrm", type: "ie_box_automation" + + config.vm.provision :switch_to_winrm + + config.vm.provision "install-python", type:"shell", inline: "C:/J2V8/docker/win32/install.python.ps1" + config.vm.provision "install-vsc++", type:"shell", inline: "C:/J2V8/docker/win32/install.vscpp.ps1" + config.vm.provision "install-cmake", type:"shell", inline: "C:/J2V8/docker/win32/install.cmake.ps1" + config.vm.provision "install-jdk", type:"shell", inline: "C:/J2V8/docker/win32/install.jdk.ps1" + config.vm.provision "install-maven", type:"shell", inline: "C:/J2V8/docker/win32/install.maven.ps1" + +end diff --git a/vagrant/win32/ie-box-automation-plugin.rb b/vagrant/win32/ie-box-automation-plugin.rb new file mode 100644 index 000000000..927d1d2bb --- /dev/null +++ b/vagrant/win32/ie-box-automation-plugin.rb @@ -0,0 +1,75 @@ +# -*- mode: ruby -*- +# vi: set ft=ruby : + +## +# If you copy this file, dont't delete this comment. +# This Vagrantfile was created by Daniel Menezes: +# https://github.com/danielmenezesbr/modernie-winrm +# E-mail: danielmenezes at gmail dot com +## + +require 'rubygems' +require 'net/ssh' + +# TODO +# ==== +# Uses config.ssh in Net::SSH.start +# test in win8/10 +# add activate (view desktop information) +# use logger for debug + + +# Function to check whether VM was already provisioned +def provisioned?(vm_name='default', provider='virtualbox') + File.exist?(".vagrant/machines/#{vm_name}/#{provider}/action_provision") +end + +module LocalCommand + + class Config < Vagrant.plugin("2", :config) + #attr_accessor :command + end + + class MyPlugin < Vagrant.plugin("2") + name "ie_box_automation" + + config(:ie_box_automation, :provisioner) do + Config + end + + provisioner(:ie_box_automation) do + Provisioner + end + end + + class Provisioner < Vagrant.plugin("2", :provisioner) + def provision + #result = system "#{config.command}" + begin + puts "Establishing SSH connection..." + ssh = Net::SSH.start("localhost", "IEUser", :password => "Passw0rd!", :port => 2222) + + puts "Disabling firewall..." + res = ssh.exec!("NetSh Advfirewall set allprofiles state off") + #for debug + #puts res + + puts "Changing network location..." + res = ssh.exec!("./tools/NLMtool_staticlib.exe -setcategory private") + #for debug + #puts res + + puts "Turn off User Account Control..." + res = ssh.exec!("cmd /c \"reg add HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\Policies\\System /v EnableLUA /d 0 /t REG_DWORD /f /reg:64\"") + + puts "Creating link to config WinRM on Startup..." + res = ssh.exec!("mv ./tools/ConfigWinRM.lnk \"/cygdrive/c/Users/IEUser/AppData/Roaming/Microsoft/Windows/Start Menu/Programs/Startup\"") + #for debug + #puts res + ssh.close + rescue Exception => e + puts "uncaught #{e} exception while handling connection: #{e.message}" + end + end + end +end diff --git a/vagrant/win32/switch-to-winrm-plugin.rb b/vagrant/win32/switch-to-winrm-plugin.rb new file mode 100644 index 000000000..288d378d1 --- /dev/null +++ b/vagrant/win32/switch-to-winrm-plugin.rb @@ -0,0 +1,146 @@ + +require 'vagrant' + +# Define the plugin. +class SwitchToWinRMPlugin < Vagrant.plugin('2') + name 'Switch to WinRM Plugin' + + # This plugin provides a provisioner called windows_reboot. + provisioner 'switch_to_winrm' do + + # Create a provisioner. + class SwitchToWinRMProvisioner < Vagrant.plugin('2', :provisioner) + # Initialization, define internal state. Nothing needed. + def initialize(machine, config) + super(machine, config) + end + + # Configuration changes to be done. Nothing needed here either. + def configure(root_config) + super(root_config) + end + + # Run the provisioning. + def provision + _provisioned = @machine.config.vm.communicator == :winrm + + env = @machine.instance_variable_get(:@env) + + if not _provisioned + puts "action_halt..." + env.action_runner.run(VagrantPlugins::ProviderVirtualBox::Action::action_halt(), { + machine: @machine, + ui: @machine.ui, + }) + # end + + puts "switching comm..." + @machine.config.vm.communicator = :winrm + @machine.config.winrm.username = "IEUser" + @machine.config.winrm.password = "Passw0rd!" + @machine.config.winrm.timeout = 50000 + @machine.config.winrm.retry_delay = 30 + + @machine.config.vm.synced_folder ".", "/vagrant", disabled: true + @machine.config.vm.synced_folder "../../", "C:/j2v8", type: "virtualbox", smb_username: ENV['VAGRANT_SMB_USER'], smb_password: ENV['VAGRANT_SMB_PASSWORD'] + + requested = @machine.config.vm.communicator + requested ||= :ssh + klass = Vagrant.plugin("2").manager.communicators[requested] + raise Errors::CommunicatorNotFound, comm: requested.to_s if !klass + + # puts "inspect: ", Vagrant.plugin("2").manager.inspect # Object + # puts "instance_variables: ", Vagrant.plugin("2").manager.instance_variables # Object + # puts "methods:", Vagrant.plugin("2").manager.methods # Object + + comm = klass.new(@machine) + @machine.instance_variable_set(:@communicator, comm) + puts "patched communicator" + + @machine.config.finalize! + + # if not _provisioned + puts "action_boot..." + env.action_runner.run(VagrantPlugins::ProviderVirtualBox::Action::action_boot(), { + machine: @machine, + ui: @machine.ui, + }) + end + + # puts "A", @machine.class # Object + # puts "B", @machine.instance_variables + # puts "C", @machine.inspect + + # app = Vagrant::Action::Builder.new.tap do |b| + # b.use VagrantPlugins::ProviderVirtualBox::Action::Network + # end + + # puts("running network upadte") + # env.action_runner.run(app, { + # machine: @machine, + # ui: @machine.ui, + # }) + + # comm.wait_for_ready(5000) + # @machine.action('wait_for_communicator') + # Vagrant::Action::Builder.new.tap do |b| + # b.use WaitForCommunicator, [:starting, :running] + # end + # Vagrant::Action::Builtin::WaitForCommunicator.new() + + # app = Vagrant::Action::Builder.new.tap do |b| + # b.use Vagrant::Action::Builtin::WaitForCommunicator, [:starting, :running] + # end + + # Vagrant::Action.run(app) + # runner = Vagrant::Action::Runner.new + # runner.run(app, env) + + # env.action_runner.run(app, { + # machine: @machine, + # ui: @machine.ui, + # }) + + + + # begin + # sleep 5 + # end until @machine.communicate.ready? + + # puts "communicator ready" + + # comm.initialize(@machine) + + # @machine.communicator = klass.new(self) + # @machine.communicator.initialize(@machine) + + # @machine.ui.info("trying action_boot...") + # @machine.action('action_boot') + + # @machine.config.winrm.retry_limit = 1000 + # command = 'shutdown -t 0 -r -f' + # @machine.ui.info("Issuing command: #{command}") + # @machine.communicate.execute(command) do + # if type == :stderr + # @machine.ui.error(data); + # end + # end + + # begin + # sleep 5 + # end until @machine.communicate.ready? + + # # Now the machine is up again, perform the necessary tasks. + # @machine.ui.info("Launching remount_synced_folders action...") + # @machine.action('remount_synced_folders') + end + + # Nothing needs to be done on cleanup. + def cleanup + super + end + end + SwitchToWinRMProvisioner + + end +end diff --git a/vagrant/win32/tools/ConfigWinRM.lnk b/vagrant/win32/tools/ConfigWinRM.lnk new file mode 100644 index 0000000000000000000000000000000000000000..6aec696b43db28ea236c3ae51966a82533f53a33 GIT binary patch literal 1367 zcma)5-%C?b9RKJv93sqGQWiM^eJD0JHFfJJO+|-pt63AfMxwV|-0hBSIeo~th$4iD z!q9T`WCTG7`B3x&@}&oho_xrNUW{J)2Yl#r&aPKoklk~?KhF1@^Z9ueL1i;Z9_Dc7}m3CoPrf4`28joC?60jF7B~i7rfLwnO80-Er=q91csp_ zi!{wu=r>UpXKaX!r!#82u>m-}1$S{qAX*RBlb7V-O#V^ZhX}@rmw}2n8o}e(1I<8A z1@t+_+ZAjJMJITLUK3fL6Kewb#BqUKZer$@W1qYQ5r<{uDpK?~LT?&}@D((CWde1z zHjH{)Zos$6kFX0rC#9c?0zY4;cVUzWF7O!a#N-WFv{b+{=Ri`=s;Tsds%b8DOa*)#7A6GixULgNb=0&4|-x#4biYMgk6y?vnPCEQWMaHj(;Cl_X)md1qF! zQNn1AI;x@x?ZPI%VkkaPauiP9NtdaQ(eB_pUxp!E<2)wsEXt)(A7Vp0h$tFbmyMm4*+|ebEYEguu5sp& zzB9x~GBa6P6Fe8Ep5%%LqHKJMxmwBuU;)42rjg_UOYcDwIJ3mky!3r zAEo%x89kvoNA$SrOb&70F#GLd_hf~Ceo6h=HCQ&dVzFu;239L4qwX6|A67lvZv8$n zti-=X>4}6RKW_JRyzXxGzgpXMuWl;76sKJ_aSavmULs7AhjfJ0AT C:/Users/IEUser/winrm_ok From 4e6c54134395eda3e8ea64387e9b5751e7e5e00d Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Thu, 20 Jul 2017 01:51:21 +0200 Subject: [PATCH 02/14] add "j2v8optimize" buildstep (performs execstack & strip on linux) - register atexit events before cross-builds are started - switched linux & android docker images to Ubuntu Xenial LTS (see #311) - cleaned up and documented switch-to-winrm-plugin that is used in win32:vagrant builds --- build.py | 30 ++++++--- build_system/config_linux.py | 9 +++ build_system/constants.py | 4 +- build_system/docker_build.py | 21 +++--- build_system/rm.py | 2 +- build_system/vagrant_build.py | 20 +++--- docker/android/Dockerfile | 6 +- docker/linux/Dockerfile | 6 +- docker/shared/install.debian.packages.sh | 4 +- vagrant/win32/switch-to-winrm-plugin.rb | 84 +++--------------------- 10 files changed, 77 insertions(+), 109 deletions(-) diff --git a/build.py b/build.py index 4c83f3171..f2fd09def 100644 --- a/build.py +++ b/build.py @@ -18,6 +18,7 @@ c.build_node_js, c.build_j2v8_cmake, c.build_j2v8_jni, + c.build_j2v8_optimize, c.build_j2v8_java, c.build_j2v8_junit, ] @@ -40,13 +41,6 @@ c.target_win32: win32_config, } -# TODO: shift responsibility to add targets to platform config or no ? -extra_targets = [ - c.target_macos_vagrant, - c.target_win32_docker, - c.target_win32_vagrant, -] - avail_architectures = [ c.arch_x86, c.arch_x64, @@ -55,6 +49,20 @@ avail_build_steps = build_step_sequence + composite_steps +# this goes through all known target platforms, and returns the sub-targets +# that are available for cross-compilation +def get_cross_targets(): + cross_targets = [] + + for tgt in avail_targets.values(): + if (not tgt.cross_compilers): + continue + + for xcomp in tgt.cross_compilers: + cross_targets.append(tgt.name + ":" + xcomp) + + return cross_targets + #----------------------------------------------------------------------- # Command-Line setup #----------------------------------------------------------------------- @@ -65,7 +73,7 @@ help="The build target platform name (must be a valid platform string identifier).", dest="target", required=True, - choices=sorted(avail_targets.keys() + extra_targets)) + choices=sorted(avail_targets.keys() + get_cross_targets())) parser.add_argument("--arch", "-a", help="The build target architecture identifier (the available architectures are also dependent on the selected platform for a build).", @@ -121,6 +129,7 @@ def parse_build_step_option(step): c.build_node_js: lambda: parsed_steps.add(c.build_node_js), c.build_j2v8_cmake: lambda: parsed_steps.add(c.build_j2v8_cmake), c.build_j2v8_jni: lambda: parsed_steps.add(c.build_j2v8_jni), + c.build_j2v8_optimize: lambda: parsed_steps.add(c.build_j2v8_optimize), c.build_j2v8_java: lambda: parsed_steps.add(c.build_j2v8_java), c.build_j2v8_junit: lambda: parsed_steps.add(c.build_j2v8_junit), }.get(step, raise_unhandled_option) @@ -133,6 +142,7 @@ def add_native(): parsed_steps.add(c.build_node_js) parsed_steps.add(c.build_j2v8_cmake) parsed_steps.add(c.build_j2v8_jni) + parsed_steps.add(c.build_j2v8_optimize) def add_managed(): parsed_steps.add(c.build_j2v8_java) @@ -261,6 +271,10 @@ def execute_build_step(compiler_inst, build_step): # execute all requested build steps for step in parsed_steps: + if (not step in target_steps): + sys.exit("Hint: skipping build step \"" + step + "\" (not configured and/or supported for platform \"" + params.target + "\")") + continue + target_step = target_steps[step] # prepare any additional/dynamic parameters for the build and put them into the build-step config diff --git a/build_system/config_linux.py b/build_system/config_linux.py index 6d9aaecfb..7d4cd7407 100644 --- a/build_system/config_linux.py +++ b/build_system/config_linux.py @@ -56,6 +56,15 @@ def build_j2v8_jni(config): linux_config.build_step(c.build_j2v8_jni, build_j2v8_jni) #----------------------------------------------------------------------- +def build_j2v8_optimize(config): + file_abi = config.target.file_abi(config.arch) + return [ + "execstack -c cmake.out/$PLATFORM.$ARCH/libj2v8_linux_" + file_abi + ".so", + "strip --strip-unneeded -R .note -R .comment cmake.out/$PLATFORM.$ARCH/libj2v8_linux_" + file_abi + ".so", + ] + +linux_config.build_step(c.build_j2v8_optimize, build_j2v8_optimize) +#----------------------------------------------------------------------- def build_j2v8_java(config): return \ u.clearNativeLibs(config) + \ diff --git a/build_system/constants.py b/build_system/constants.py index 464560466..115c756bf 100644 --- a/build_system/constants.py +++ b/build_system/constants.py @@ -2,10 +2,7 @@ target_android = 'android' target_linux = 'linux' target_macos = 'macos' -target_macos_vagrant = 'macos:vagrant' target_win32 = 'win32' -target_win32_docker = 'win32:docker' -target_win32_vagrant = 'win32:vagrant' # target architectures arch_x86 = 'x86' @@ -16,6 +13,7 @@ build_node_js = 'nodejs' build_j2v8_cmake = 'j2v8cmake' build_j2v8_jni = 'j2v8jni' +build_j2v8_optimize = 'j2v8optimize' build_j2v8_java = 'j2v8java' build_j2v8_junit = 'j2v8junit' diff --git a/build_system/docker_build.py b/build_system/docker_build.py index 9fb4a02b8..944c1d659 100644 --- a/build_system/docker_build.py +++ b/build_system/docker_build.py @@ -47,10 +47,21 @@ def health_check(self, config): def pre_build(self, config): print ("preparing " + config.platform + "@" + config.arch + " => " + config.name) + docker_stop_str = self.inject_env("docker stop j2v8.$PLATFORM.$ARCH", config) + + def cli_exit_event(): + if (config.no_shutdown): + return + + print "Waiting for docker process to exit..." + self.exec_host_cmd(docker_stop_str, config) + + atexit.register(cli_exit_event) + self.exec_host_cmd("docker build -f $PLATFORM/Dockerfile -t \"j2v8-$PLATFORM\" .", config) def exec_build(self, config): - print ("DOCKER building " + config.platform + "@" + config.arch + " => " + config.name) + print ("DOCKER running " + config.platform + "@" + config.arch + " => " + config.name) is_win32 = utils.is_win32(config.platform) @@ -74,14 +85,6 @@ def exec_build(self, config): print docker_run_str - docker_stop_str = self.inject_env("docker stop j2v8.$PLATFORM.$ARCH", config) - - def cli_exit_event(): - print "Waiting for docker process to exit..." - self.exec_host_cmd(docker_stop_str, config) - - atexit.register(cli_exit_event) - self.exec_host_cmd(docker_run_str, config) def post_build(self, config): diff --git a/build_system/rm.py b/build_system/rm.py index aa383e2a9..ca8ddf65c 100644 --- a/build_system/rm.py +++ b/build_system/rm.py @@ -2,7 +2,7 @@ import sys import shutil -# this is a cross-platform polyfill for "cp" +# this is a cross-platform polyfill for "rm" items = sys.argv[1:] diff --git a/build_system/vagrant_build.py b/build_system/vagrant_build.py index e0754c88d..577dade2f 100644 --- a/build_system/vagrant_build.py +++ b/build_system/vagrant_build.py @@ -20,10 +20,19 @@ def pre_build(self, config): if (config.pre_build_cmd): vagrant_start_cmd = config.pre_build_cmd + utils.host_cmd_sep() + vagrant_start_cmd + def cli_exit_event(): + if (config.no_shutdown): + return + + print "Waiting for vagrant virtual-machine to exit..." + self.exec_host_cmd("vagrant halt", config) + + atexit.register(cli_exit_event) + self.exec_host_cmd(vagrant_start_cmd, config) def exec_build(self, config): - print ("VAGRANT building " + config.platform + "@" + config.arch + " => " + config.name) + print ("VAGRANT running " + config.platform + "@" + config.arch + " => " + config.name) # shell = "powershell -c \"cmd /C " if utils.is_win32(config.platform) else "ssh -c " # cmd_sep = "&& " if utils.is_win32(config.platform) else "; " @@ -63,15 +72,6 @@ def exec_build(self, config): build_cmd = self.inject_env("cd $BUILD_CWD" + cmd_sep + build_cmd, config) vagrant_run_cmd = "vagrant ssh -c '" + build_cmd + "'" - def cli_exit_event(): - if (config.no_shutdown): - return - - print "Waiting for vagrant virtual-machine to exit..." - self.exec_host_cmd("vagrant halt", config) - - atexit.register(cli_exit_event) - self.exec_host_cmd(vagrant_run_cmd, config) def post_build(self, config): diff --git a/docker/android/Dockerfile b/docker/android/Dockerfile index 30437d90f..428a0ae49 100644 --- a/docker/android/Dockerfile +++ b/docker/android/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:zesty +FROM ubuntu:xenial RUN mkdir -p /temp/docker/shared/ WORKDIR /temp/docker/shared/ @@ -31,6 +31,10 @@ COPY ./shared/install.jdk.sh /temp/docker/shared RUN ./install.jdk.sh ENV JAVA_HOME "/opt/jdk/jdk1.8.0_131" +COPY ./shared/install.cmake.sh /temp/docker/shared +RUN ./install.cmake.sh +ENV PATH "$PATH:/opt/cmake/bin" + COPY ./shared/install.gradle.sh /temp/docker/shared RUN ./install.gradle.sh ENV GRADLE_HOME "/opt/gradle-3.5" diff --git a/docker/linux/Dockerfile b/docker/linux/Dockerfile index 1e49d955c..2ba9a37db 100644 --- a/docker/linux/Dockerfile +++ b/docker/linux/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:zesty +FROM ubuntu:xenial RUN mkdir -p /temp/docker/shared/ WORKDIR /temp/docker/shared/ @@ -19,6 +19,10 @@ COPY ./shared/install.maven.sh /temp/docker/shared RUN ./install.maven.sh ENV PATH "$PATH:/opt/apache-maven-3.5.0/bin" +COPY ./shared/install.cmake.sh /temp/docker/shared +RUN ./install.cmake.sh +ENV PATH "$PATH:/opt/cmake/bin" + # download the most critical maven dependencies for the build beforehand COPY ./shared/pom.xml /temp WORKDIR /temp diff --git a/docker/shared/install.debian.packages.sh b/docker/shared/install.debian.packages.sh index f8301df5c..344cdb261 100755 --- a/docker/shared/install.debian.packages.sh +++ b/docker/shared/install.debian.packages.sh @@ -10,6 +10,6 @@ apt-get -qq update && \ file \ python \ make \ - cmake \ wget \ - supervisor + supervisor \ + execstack diff --git a/vagrant/win32/switch-to-winrm-plugin.rb b/vagrant/win32/switch-to-winrm-plugin.rb index 288d378d1..de8f63acd 100644 --- a/vagrant/win32/switch-to-winrm-plugin.rb +++ b/vagrant/win32/switch-to-winrm-plugin.rb @@ -5,7 +5,7 @@ class SwitchToWinRMPlugin < Vagrant.plugin('2') name 'Switch to WinRM Plugin' - # This plugin provides a provisioner called windows_reboot. + # This plugin provides a provisioner called switch_to_winrm. provisioner 'switch_to_winrm' do # Create a provisioner. @@ -27,13 +27,15 @@ def provision env = @machine.instance_variable_get(:@env) if not _provisioned + # stop the VM before we switch the communicator puts "action_halt..." env.action_runner.run(VagrantPlugins::ProviderVirtualBox::Action::action_halt(), { machine: @machine, ui: @machine.ui, }) - # end + # TODO: this is just a copy-paste of the settings from the actual Vagrantfile config + # there should be some practical way to remove this code duplication! puts "switching comm..." @machine.config.vm.communicator = :winrm @machine.config.winrm.username = "IEUser" @@ -44,95 +46,29 @@ def provision @machine.config.vm.synced_folder ".", "/vagrant", disabled: true @machine.config.vm.synced_folder "../../", "C:/j2v8", type: "virtualbox", smb_username: ENV['VAGRANT_SMB_USER'], smb_password: ENV['VAGRANT_SMB_PASSWORD'] + # NOTE: this is copied from https://github.com/mitchellh/vagrant/blob/d1a589c59f75dd2910e47976a742dc6bc99035b0/lib/vagrant/machine.rb#L246 + # it reinstantiates the communicator defined by the vagrant configuration ... requested = @machine.config.vm.communicator requested ||= :ssh klass = Vagrant.plugin("2").manager.communicators[requested] raise Errors::CommunicatorNotFound, comm: requested.to_s if !klass - # puts "inspect: ", Vagrant.plugin("2").manager.inspect # Object - # puts "instance_variables: ", Vagrant.plugin("2").manager.instance_variables # Object - # puts "methods:", Vagrant.plugin("2").manager.methods # Object - comm = klass.new(@machine) + + # ... and then monkey-patches the new instance into the machine @machine.instance_variable_set(:@communicator, comm) puts "patched communicator" + # this applies the changed communicator and also reconfigures the related network settings @machine.config.finalize! - # if not _provisioned + # start the VM now, after we successfully switched the communicator puts "action_boot..." env.action_runner.run(VagrantPlugins::ProviderVirtualBox::Action::action_boot(), { machine: @machine, ui: @machine.ui, }) end - - # puts "A", @machine.class # Object - # puts "B", @machine.instance_variables - # puts "C", @machine.inspect - - # app = Vagrant::Action::Builder.new.tap do |b| - # b.use VagrantPlugins::ProviderVirtualBox::Action::Network - # end - - # puts("running network upadte") - # env.action_runner.run(app, { - # machine: @machine, - # ui: @machine.ui, - # }) - - # comm.wait_for_ready(5000) - # @machine.action('wait_for_communicator') - # Vagrant::Action::Builder.new.tap do |b| - # b.use WaitForCommunicator, [:starting, :running] - # end - # Vagrant::Action::Builtin::WaitForCommunicator.new() - - # app = Vagrant::Action::Builder.new.tap do |b| - # b.use Vagrant::Action::Builtin::WaitForCommunicator, [:starting, :running] - # end - - # Vagrant::Action.run(app) - # runner = Vagrant::Action::Runner.new - # runner.run(app, env) - - # env.action_runner.run(app, { - # machine: @machine, - # ui: @machine.ui, - # }) - - - - # begin - # sleep 5 - # end until @machine.communicate.ready? - - # puts "communicator ready" - - # comm.initialize(@machine) - - # @machine.communicator = klass.new(self) - # @machine.communicator.initialize(@machine) - - # @machine.ui.info("trying action_boot...") - # @machine.action('action_boot') - - # @machine.config.winrm.retry_limit = 1000 - # command = 'shutdown -t 0 -r -f' - # @machine.ui.info("Issuing command: #{command}") - # @machine.communicate.execute(command) do - # if type == :stderr - # @machine.ui.error(data); - # end - # end - - # begin - # sleep 5 - # end until @machine.communicate.ready? - - # # Now the machine is up again, perform the necessary tasks. - # @machine.ui.info("Launching remount_synced_folders action...") - # @machine.action('remount_synced_folders') end # Nothing needs to be done on cleanup. From 07fea24566b6d3f8f2b4704f602d2d6b580b4fd8 Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Sun, 30 Jul 2017 08:03:58 +0200 Subject: [PATCH 03/14] additional build-system features & some j2v8 fixes Features ------------------------ [Build-System] - replaced default Android instrumentation test runner with Spoon test runner (allows to optionally run individual tests on emulator) - added interactive CLI mode (see build.py, build_interactive.py) - added "--interactive, -i" CLI parameter to start interactive build CLI - added support for specifying "anti-build-steps" via the CLI (for example: "all ~nodejs ~test" runs all steps, except for "nodejs" and "test") - added "--vendor, -v" CLI parameter that allows to build specialized linux vendor binaries, e.g. for alpine-linux - added alpine-linux binary support - added "inject_env" API that is directly callable on a build-step config - added "--sys-image, -img" CLI parameter that can be used to override the used OS image for Docker / Vagrant builds - refactored the original "--cross-compile, -x" parameter into two separate parameters "--docker, -dkr" and "--vagrant, -vgr" (this makes it possible to support both variants for all target platforms) - added "--keep-native-libs, -knl" CLI parameter (can be useful for building bundles that should include multiple platform binaries) - centralized some repeated constant literals & code from build-steps across multiple platforms (into build_utils.py, cmake_utils.py and shared_build_steps.py) - added automatically set "file_abi" property on build-step configs - added more variables for use with "inject_env" API - added "adb logcat" process to Android Docker build (writes realtime output from Android emulator to ./docker/android/logcat.txt) - moved all Node.js utility code to nodejs.py CLI script - added experimental code to package Node.js pre-built binary packages (see nodejs.py) [J2V8] - updated LibraryLoader code to include Linux-Vendor specifier when looking for native libs (vendor-specific libs are preferred over vendor-agnostic ones) - added PlatformDetector as a central place to get normalized string identifiers for OS,Arch,Vendor Fixes & Changes ------------------------ [Build-System] - reorganized some code of the build-system into more separate files (see build.py, build_constants.py, build_executor.py, cli.py) - added missing "bool" type to immutable.py - throw error in CMake scripts if Java-Home could not be found - default Docker base-image for Linux & Android builds is now "debian:jessie" - installation of JDK in install.jdk.sh now checks if a JDK is already installed (needed for alpine-linux build) - refactored win32 Dockerfile (now uses the same PowerShell scripts as the Vagrant build instead of inline commands) [J2V8] - separated LibraryLoader and Platform-Detection code (see PlatformDetector.java) - show native lib-name in IllegalStateException message when native lib could not be loaded - AllTests.java is now called A_RunAheadTests and only includes the V8RuntimeNotLoadedTest (this is the only test that should be run before any other tests) - extended and refactored LibraryLoaderTest / PlatformDetectorTest implementations - added timeout for V8LockerTest - added workaround for V8RuntimeNotLoadedTest (is currently not implemented to run on Android) --- .gitignore | 10 +- BUILDING.md | 10 +- CMakeLists.txt | 135 +++++--- build.gradle | 17 + build.py | 304 +---------------- build_system/build_configs.py | 123 +++++++ build_system/build_constants.py | 43 +++ build_system/build_executor.py | 228 +++++++++++++ build_system/build_interactive.py | 39 +++ .../build_settings.py | 0 .../{cross_build.py => build_structures.py} | 25 +- build_system/build_utils.py | 35 +- build_system/cli.py | 112 +++++++ build_system/cmake_utils.py | 29 ++ build_system/config_android.py | 32 +- build_system/config_linux.py | 42 ++- build_system/config_macos.py | 27 +- build_system/config_win32.py | 41 ++- build_system/constants.py | 16 +- build_system/docker_build.py | 44 ++- build_system/immutable.py | 2 +- build_system/shared_build_steps.py | 61 ++-- build_system/shell_build.py | 2 +- build_system/vagrant_build.py | 36 +- cmake/FindJava.cmake | 6 +- docker/android/.gitignore | 1 + docker/android/Dockerfile | 5 +- docker/android/supervisord.template.conf | 22 ++ docker/linux/Dockerfile | 16 +- docker/shared/install.alpine.packages.sh | 16 + docker/shared/install.jdk.sh | 6 + docker/shared/install.maven.sh | 1 + docker/win32/Dockerfile | 110 ++----- docker/win32/install.python.ps1 | 1 + nodejs.py | 189 +++++++++++ pom.xml | 2 +- prepare_build.py | 33 -- .../com/eclipsesource/v8/LibraryLoader.java | 152 ++++----- .../java/com/eclipsesource/v8/Platform.java | 12 + .../eclipsesource/v8/PlatformDetector.java | 310 ++++++++++++++++++ src/main/java/com/eclipsesource/v8/V8.java | 8 +- .../com/eclipsesource/v8/A_RunAheadTests.java | 28 ++ .../java/com/eclipsesource/v8/AllTests.java | 44 --- .../eclipsesource/v8/LibraryLoaderTest.java | 169 ++++++---- .../v8/PlatformDetectorTest.java | 160 +++++++++ .../com/eclipsesource/v8/V8LockerTest.java | 6 + .../v8/V8RuntimeNotLoadedTest.java | 33 +- store_node_patch.py | 16 - vagrant/macos/Vagrantfile | 3 +- vagrant/win32/Vagrantfile | 12 +- 50 files changed, 1943 insertions(+), 831 deletions(-) create mode 100644 build_system/build_configs.py create mode 100644 build_system/build_constants.py create mode 100644 build_system/build_executor.py create mode 100644 build_system/build_interactive.py rename build_settings.py => build_system/build_settings.py (100%) rename build_system/{cross_build.py => build_structures.py} (78%) create mode 100644 build_system/cli.py create mode 100644 build_system/cmake_utils.py create mode 100644 docker/shared/install.alpine.packages.sh create mode 100644 nodejs.py delete mode 100644 prepare_build.py create mode 100644 src/main/java/com/eclipsesource/v8/Platform.java create mode 100644 src/main/java/com/eclipsesource/v8/PlatformDetector.java create mode 100644 src/test/java/com/eclipsesource/v8/A_RunAheadTests.java delete mode 100644 src/test/java/com/eclipsesource/v8/AllTests.java create mode 100644 src/test/java/com/eclipsesource/v8/PlatformDetectorTest.java delete mode 100644 store_node_patch.py diff --git a/.gitignore b/.gitignore index 4c9528d41..437d72873 100644 --- a/.gitignore +++ b/.gitignore @@ -21,11 +21,17 @@ hs_err*.log *.iws .idea -# python binaries +# Python binaries *.pyc -# Build input/output. +# build input/output node build.out cmake.out node.out + +# test input/output +test-mockup-os-release + +# generated dependency packages +j2v8-dependencies-* diff --git a/BUILDING.md b/BUILDING.md index f09313d2c..5c3bbefef 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -47,8 +47,8 @@ __Inputs__: __Artifacts:__ - J2V8 native shared libraries - - `./cmake.out/{platform}.{architecture}/libj2v8_{platform}_{abi}.{ext}` - - e.g. `./cmake.out/linux.x64/libj2v8_linux_x86_64.so` + - `./cmake.out/{platform}.{architecture}/libj2v8-[vendor-]{platform}-{abi}.{ext}` + - e.g. `./cmake.out/linux.x64/libj2v8-alpine-linux-x86_64.so` - The built shared libraries will also be automatically copied to the required Java / Android project directories to be included in the .jar/.aar packages that will be built later. - `./src/main/resources/` (Java) - `./src/main/jniLibs/{abi}/libj2v8.so` (Android) @@ -83,8 +83,8 @@ __Inputs__: - `./src/test/` __Artifacts:__ -- Maven Surefire test reports +- Maven Surefire test reports (Desktop platforms) - `./target/surefire-reports/` -- Gradle connected-test reports - - `./build/outputs/androidTest-results/connected/` +- Gradle Spoon test reports (Android only) + - `./build/spoon/debug/` --- diff --git a/CMakeLists.txt b/CMakeLists.txt index 9256387fb..52cb2b17c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -25,72 +25,120 @@ include(BuildUtils) include(NodeJsUtils) include(Policies) +#----------------------------------------------------------------------- +# DEPENDENCY SETTINGS / CMAKE OPTIONS +#----------------------------------------------------------------------- + +# look for dependencies +find_package(Java) + +# j2v8 dependency options +set(J2V8_JDK_DIR ${Java_ROOT} CACHE STRING "Path to the Java JDK dependency") +set(J2V8_NODEJS_DIR "${CMAKE_SOURCE_DIR}/node" CACHE STRING "Path to the Node.js dependency") + +# get the required Node.js link libraries +get_njs_libs(${J2V8_NODEJS_DIR} "Debug") +get_njs_libs(${J2V8_NODEJS_DIR} "Release") + +# j2v8 build options +set(J2V8_TARGET_ARCH "" CACHE STRING "The target architecture for the build.") +option(J2V8_NODE_ENABLED "Build the J2V8 native bridge with Node.js support enabled" ON) +option(J2V8_BUILD_ONLY_DEBUG_RELEASE "Generate only Debug and Release configurations (exclude RelWithDebInfo and MinSizeRel)" ON) + +if(CMAKE_SYSTEM_NAME STREQUAL "Windows" AND MSVC) + option(J2V8_LINK_WITH_STATIC_MSVCRT "Link against the static version of the Microsoft Visual C++ Common Runtime (will link against the dynamic DLL version if this option is disabled)" ON) +endif() + #----------------------------------------------------------------------- # BUILD PLATFORM SETUP & VARIABLES #----------------------------------------------------------------------- +# HINT: CMake Multiarchitecture Compilation +# see: https://stackoverflow.com/a/5359572/425532 + +if("${J2V8_TARGET_ARCH}" STREQUAL "") + message (FATAL_ERROR "J2V8_TARGET_ARCH not specified") +endif() + +if(J2V8_TARGET_ARCH STREQUAL "x86_64") + set(J2V8_BUILD_X64 TRUE) +endif() + if(CMAKE_SYSTEM_NAME STREQUAL "Android") #{ set(JAVA_PLATFORM_NAME "android") - # output library filename - set(J2V8_LIB_PLATFORM_NAME "android") + # output library filename parts set(J2V8_LIB_PREFIX "") set(J2V8_LIB_ARCH_NAME ${CMAKE_ANDROID_ARCH_ABI}) + set(J2V8_LIB_VENDOR_NAME "") + set(J2V8_LIB_PLATFORM_NAME "android") #} elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux") #{ set(JAVA_PLATFORM_NAME "linux") - # output library filename - set(J2V8_LIB_PLATFORM_NAME "linux") + # output library filename parts set(J2V8_LIB_PREFIX "") - set(J2V8_LIB_ARCH_NAME "x86") + set(J2V8_LIB_ARCH_NAME ${J2V8_TARGET_ARCH}) + set(J2V8_LIB_VENDOR_NAME "") + set(J2V8_LIB_PLATFORM_NAME "linux") + + if(J2V8_VENDOR) + set(J2V8_LIB_VENDOR_NAME "-${J2V8_VENDOR}") + endif() + + # configure library architecture + if(J2V8_BUILD_X64) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m64 ") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -m64 ") + else() + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32 ") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -m32 ") + endif() #} elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin") #{ set(JAVA_PLATFORM_NAME "darwin") - # output library filename - set(J2V8_LIB_PLATFORM_NAME "macosx") + # output library filename parts set(J2V8_LIB_PREFIX "") - set(J2V8_LIB_ARCH_NAME "x86") + set(J2V8_LIB_ARCH_NAME ${J2V8_TARGET_ARCH}) + set(J2V8_LIB_VENDOR_NAME "") + set(J2V8_LIB_PLATFORM_NAME "macosx") + + # configure library architecture + if(J2V8_BUILD_X64) + set(CMAKE_OSX_ARCHITECTURES "x86_64") + else() + set(CMAKE_OSX_ARCHITECTURES "i386") + + # fix for 32-bit linking error "ld: illegal text reloc" + # see: https://stackoverflow.com/a/9322458/425532 + set(CMAKE_SHARED_LINKER_FLAGS "-read_only_relocs suppress") + endif() #} elseif(CMAKE_SYSTEM_NAME STREQUAL "Windows") #{ set(JAVA_PLATFORM_NAME "win32") - # output library filename - set(J2V8_LIB_PLATFORM_NAME "win32") + # output library filename parts set(J2V8_LIB_PREFIX "lib") - set(J2V8_LIB_ARCH_NAME "x86") + set(J2V8_LIB_ARCH_NAME ${J2V8_TARGET_ARCH}) + set(J2V8_LIB_VENDOR_NAME "") + set(J2V8_LIB_PLATFORM_NAME "windows") #} endif() -#----------------------------------------------------------------------- -# DEPENDENCY SETTINGS / CMAKE OPTIONS -#----------------------------------------------------------------------- - -# look for dependencies -find_package(Java) - -# j2v8 dependency options -set(J2V8_JDK_DIR ${Java_ROOT} CACHE STRING "Path to the Java JDK dependency") -set(J2V8_NODEJS_DIR "${CMAKE_SOURCE_DIR}/node" CACHE STRING "Path to the Node.js dependency") - -# get the required Node.js link libraries -get_njs_libs(${J2V8_NODEJS_DIR} "Debug") -get_njs_libs(${J2V8_NODEJS_DIR} "Release") - -# j2v8 build options -option(J2V8_NODE_COMPATIBLE "Build the J2V8 native bridge with Node.js support enabled" ON) -option(J2V8_BUILD_ONLY_DEBUG_RELEASE "Generate only Debug and Release configurations (exclude RelWithDebInfo and MinSizeRel)" ON) - -if(CMAKE_SYSTEM_NAME STREQUAL "Windows" AND MSVC) -#{ - option(J2V8_LINK_WITH_STATIC_MSVCRT "Link against the static version of the Microsoft Visual C++ Common Runtime (will link against the dynamic DLL version if this option is disabled)" ON) -#} -endif() +message("--------------------------------------------------") +message("J2V8_LIB_ARCH_NAME = ${J2V8_LIB_ARCH_NAME}") +message("J2V8_LIB_VENDOR_NAME = ${J2V8_LIB_VENDOR_NAME}") +message("J2V8_LIB_PLATFORM_NAME = ${J2V8_LIB_PLATFORM_NAME}") +message("J2V8_TARGET_ARCH = ${J2V8_TARGET_ARCH}") +message("J2V8_BUILD_X64 = ${J2V8_BUILD_X64}") +message("--------------------------------------------------") +message("J2V8_NODE_ENABLED = ${J2V8_NODE_ENABLED}") +message("--------------------------------------------------") #----------------------------------------------------------------------- # INCLUDE DIRECTORIES & SOURCE FILES @@ -138,9 +186,7 @@ endif() # remove the MinSizeRel and RelWithDebInfo configurations if(J2V8_BUILD_ONLY_DEBUG_RELEASE) -#{ set(CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "limited configs" FORCE) -#} endif() # link against the static MS C++ runtime libraries @@ -152,10 +198,12 @@ endif() add_library(j2v8 SHARED ${src_files}) # enable Node.js if requested by the build options above -if(J2V8_NODE_COMPATIBLE) -#{ +if(J2V8_NODE_ENABLED) set_property(TARGET j2v8 PROPERTY COMPILE_DEFINITIONS ${COMPILE_DEFINITIONS} NODE_COMPATIBLE=1) -#} +endif() + +if(CMAKE_SYSTEM_NAME STREQUAL "Windows" AND MSVC) + set_property(TARGET j2v8 APPEND_STRING PROPERTY LINK_FLAGS_RELEASE "/LTCG") endif() # build output directory @@ -174,10 +222,5 @@ target_link_libraries(j2v8 # OUTPUT SETTINGS & POST-BUILD #----------------------------------------------------------------------- -# apply lib suffix if building a 64 bit target -if(CMAKE_CL_64 OR CMAKE_SIZEOF_VOID_P EQUAL 8) - set(J2V8_LIB_ARCH_NAME "${J2V8_LIB_ARCH_NAME}_64") -endif() - # set library output filename -set_target_properties(j2v8 PROPERTIES OUTPUT_NAME "${J2V8_LIB_PREFIX}${PROJECT_NAME}_${J2V8_LIB_PLATFORM_NAME}_${J2V8_LIB_ARCH_NAME}") +set_target_properties(j2v8 PROPERTIES OUTPUT_NAME "${J2V8_LIB_PREFIX}${PROJECT_NAME}${J2V8_LIB_VENDOR_NAME}-${J2V8_LIB_PLATFORM_NAME}-${J2V8_LIB_ARCH_NAME}") diff --git a/build.gradle b/build.gradle index adaceb30c..fba5986f1 100644 --- a/build.gradle +++ b/build.gradle @@ -17,10 +17,12 @@ buildscript { } dependencies { classpath 'com.android.tools.build:gradle:2.2.2' + classpath 'com.stanfy.spoon:spoon-gradle-plugin:1.2.2' } } apply plugin: 'com.android.library' +apply plugin: 'spoon' repositories { jcenter() @@ -62,6 +64,21 @@ android { } } +// see: https://github.com/square/spoon +spoon { + // for debug output + debug = true + + // To run only specified test classes + if (project.hasProperty('testClass')) { + className = project.testClass + } + + // To run a single method in TestCase + if (project.hasProperty('testMethod')) { + methodName = project.testMethod + } +} signing { required { has("release") && gradle.taskGraph.hasTask("uploadArchives") } diff --git a/build.py b/build.py index f2fd09def..2323b9b15 100644 --- a/build.py +++ b/build.py @@ -1,291 +1,19 @@ -import argparse -import os -import re -import sys - -import build_system.constants as c -import build_system.build_utils as utils -from build_system.shell_build import ShellBuildSystem - -from build_system.config_android import android_config -from build_system.config_linux import linux_config -from build_system.config_macos import macos_config -from build_system.config_win32 import win32_config - -import build_system.immutable as immutable - -build_step_sequence = [ - c.build_node_js, - c.build_j2v8_cmake, - c.build_j2v8_jni, - c.build_j2v8_optimize, - c.build_j2v8_java, - c.build_j2v8_junit, -] - -composite_steps = [ - # composites - c.build_all, - c.build_full, - c.build_native, - # aliases - c.build_java, - c.build_bundle, - c.build_test, -] - -avail_targets = { - c.target_android: android_config, - c.target_linux: linux_config, - c.target_macos: macos_config, - c.target_win32: win32_config, -} - -avail_architectures = [ - c.arch_x86, - c.arch_x64, - c.arch_arm, -] - -avail_build_steps = build_step_sequence + composite_steps - -# this goes through all known target platforms, and returns the sub-targets -# that are available for cross-compilation -def get_cross_targets(): - cross_targets = [] - - for tgt in avail_targets.values(): - if (not tgt.cross_compilers): - continue - - for xcomp in tgt.cross_compilers: - cross_targets.append(tgt.name + ":" + xcomp) - - return cross_targets - -#----------------------------------------------------------------------- -# Command-Line setup -#----------------------------------------------------------------------- - -parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) - -parser.add_argument("--target", "-t", - help="The build target platform name (must be a valid platform string identifier).", - dest="target", - required=True, - choices=sorted(avail_targets.keys() + get_cross_targets())) +""" +This script should be invoked directly via the CLI to start a J2V8 build +""" -parser.add_argument("--arch", "-a", - help="The build target architecture identifier (the available architectures are also dependent on the selected platform for a build).", - dest="arch", - required=True, - choices=avail_architectures) - -parser.add_argument("--node-enabled", "-ne", - help="Include the Node.js runtime and builtin node-modules for use in J2V8.", - dest="node_enabled", - action="store_const", - const=True) - -# NOTE: this option is only used internally to distinguish the running of the build script within -# the build-instigator and the actual build-executor (this is relevant when cross-compiling) -parser.add_argument("--cross-agent", - help=argparse.SUPPRESS, - dest="cross_agent", - type=str) - -parser.add_argument("--no-shutdown", "-nos", - help="When using a cross-compile environment, do not shutdown any of the components when the build is finished or canceled.", - dest="no_shutdown", - action="store_const", - const=True) - -parser.add_argument("buildsteps", - help="Pass a single build-step or a list of all the recognized build-steps that should be executed\n" + - "(the order of the steps given to the CLI does not matter, the correct order will be restored internally).\n\n" + - "the fundamental build steps (in order):\n" + - "---------------------------------------\n" + - "\n".join(build_step_sequence) + "\n\n" + - "aliases / combinations of multiple of the above steps:\n" + - "------------------------------------------------------\n" + - "\n".join(composite_steps), - metavar="build-steps", - nargs="*", - default="all", - choices=avail_build_steps) - -parsed_steps = set() - -def parse_build_step_option(step): - return { - # composite steps - c.build_all: add_all, - c.build_full: add_all, - c.build_native: add_native, - c.build_java: add_managed, - c.build_bundle: add_managed, - c.build_test: add_test, - # basic steps - c.build_node_js: lambda: parsed_steps.add(c.build_node_js), - c.build_j2v8_cmake: lambda: parsed_steps.add(c.build_j2v8_cmake), - c.build_j2v8_jni: lambda: parsed_steps.add(c.build_j2v8_jni), - c.build_j2v8_optimize: lambda: parsed_steps.add(c.build_j2v8_optimize), - c.build_j2v8_java: lambda: parsed_steps.add(c.build_j2v8_java), - c.build_j2v8_junit: lambda: parsed_steps.add(c.build_j2v8_junit), - }.get(step, raise_unhandled_option) - -def add_all(): - add_native() - add_managed() - -def add_native(): - parsed_steps.add(c.build_node_js) - parsed_steps.add(c.build_j2v8_cmake) - parsed_steps.add(c.build_j2v8_jni) - parsed_steps.add(c.build_j2v8_optimize) - -def add_managed(): - parsed_steps.add(c.build_j2v8_java) - -def add_test(): - parsed_steps.add(c.build_j2v8_junit) - -def raise_unhandled_option(): - sys.exit("INTERNAL-ERROR: Tried to handle unrecognized build-step") +import sys -if __name__ == "__main__": +import build_system.cli as cli +import build_system.build_interactive as interactive +import build_system.build_executor as bex + +# interactive shell entrypoint +if (len(sys.argv) >= 2 and sys.argv[1] in ["--interactive", "-i"]): + print "entering interactive mode...\n" + interactive.run_interactive_cli() +# passive command-line entrypoint +else: + parser = cli.get_parser() args = parser.parse_args() - -#----------------------------------------------------------------------- -# Build execution core function -#----------------------------------------------------------------------- -def execute_build(params): - - if (params.target is None): - sys.exit("ERROR: No target platform specified") - - def parse_target(target_str): - sep_idx = target_str.find(":") - return (target_str, None) if sep_idx < 0 else target_str[0:sep_idx], target_str[sep_idx+1:] - - # if the "target" string {x:y} passed to the CLI exactly identifies a build-target, we just take it and continue. - # This means that if you want to introduce a customized build for a platform named {platform:custom-name}, - # it will be picked up before any further deconstruction of a "target:sub-target" string is done - build_target = avail_targets.get(params.target) - - target = None - cross_id = None - - # if the passed "target" string is not already a valid build-target, we need to look for sub-targets - if (build_target is None): - target, cross_id = parse_target(params.target) - # otherwise we just go on with it - else: - target = params.target - - if (not target in avail_targets): - sys.exit("ERROR: Unrecognized target platform: " + target) - - build_target = avail_targets.get(target) - - if (params.arch is None): - sys.exit("ERROR: No target architecture specified") - - build_architectures = build_target.architectures - - if (not params.arch in build_architectures): - sys.exit("ERROR: Unsupported architecture: \"" + params.arch + "\" for selected target platform: " + target) - - if (params.buildsteps is None): - sys.exit("ERROR: No build-step specified, valid values are: " + ", ".join(avail_build_steps)) - - if (not params.buildsteps is None and not isinstance(params.buildsteps, list)): - params.buildsteps = [params.buildsteps] - - global parsed_steps - parsed_steps.clear() - - for step in params.buildsteps: - parse_build_step_option(step)() - - # force build-steps into defined order (see: http://stackoverflow.com/a/23529016) - parsed_steps = [step for step in build_step_sequence if step in parsed_steps] - - platform_steps = build_target.steps - cross_configs = build_target.cross_configs - - build_cwd = utils.get_cwd() - - cross_cfg = None - - if (cross_id): - if (cross_configs.get(cross_id) is None): - sys.exit("ERROR: target '" + target + "' does not have a recognized cross-compile host: '" + cross_id + "'") - else: - cross_cfg = cross_configs.get(cross_id) - - # if we are the build-instigator (not a cross-compile build-agent) we directly run some initial checks & setups for the build - if (not params.cross_agent): - print "Checking Node.js builtins integration consistency..." - utils.check_node_builtins() - - print "Caching Node.js artifacts..." - curr_node_tag = target + "." + params.arch - utils.store_nodejs_output(curr_node_tag, build_cwd) - - def execute_build_step(compiler_inst, build_step): - """Executes an immutable copy of the given build-step configuration""" - # from this point on, make the build-input immutable to ensure consistency across the whole build process - # any actions during the build-step should only be made based on the initial set of variables & conditions - # NOTE: this restriction makes it much more easy to reason about the build-process as a whole - build_step = immutable.freeze(build_step) - compiler_inst.build(build_step) - - # a cross-compile was requested, we just launch the build-environment and then delegate the requested build-process to the cross-compile environment - if (cross_cfg): - cross_compiler = build_target.cross_compiler(cross_id) - - # prepare any additional/dynamic parameters for the build and put them into the build-step config - cross_cfg.arch = params.arch - cross_cfg.custom_cmd = "python ./build.py --cross-agent " + cross_id + " -t $PLATFORM -a $ARCH " + ("-ne" if params.node_enabled else "") + " " + " ".join(parsed_steps) - cross_cfg.compiler = cross_compiler - cross_cfg.target = build_target - cross_cfg.no_shutdown = params.no_shutdown - - execute_build_step(cross_compiler, cross_cfg) - - # run the requested build-steps with the given parameters to produce the build-artifacts - else: - target_compiler = ShellBuildSystem() - target_steps = dict(platform_steps) - - # this is a build-agent for a cross-compile - if (params.cross_agent): - # the cross-compile step dictates which directory will be used to run the actual build - cross_cfg = cross_configs.get(params.cross_agent) - - if (cross_cfg is None): - sys.exit("ERROR: internal error while looking for cross-compiler config: " + params.cross_agent) - - build_cwd = cross_cfg.build_cwd - - # execute all requested build steps - for step in parsed_steps: - if (not step in target_steps): - sys.exit("Hint: skipping build step \"" + step + "\" (not configured and/or supported for platform \"" + params.target + "\")") - continue - - target_step = target_steps[step] - - # prepare any additional/dynamic parameters for the build and put them into the build-step config - target_step.cross_agent = params.cross_agent - target_step.arch = params.arch - target_step.build_cwd = build_cwd - target_step.compiler = target_compiler - target_step.target = build_target - - execute_build_step(target_compiler, target_step) - -# check if this script was invoked via CLI directly to start a build -if __name__ == "__main__": - execute_build(args) + bex.execute_build(args) diff --git a/build_system/build_configs.py b/build_system/build_configs.py new file mode 100644 index 000000000..09fec5924 --- /dev/null +++ b/build_system/build_configs.py @@ -0,0 +1,123 @@ + +import constants as c + +configs = [ + # ANDROID builds + { + "name": "Docker >> android-x86 >> NODE_ENABLED", + "params": { + "target": c.target_android, + "arch": c.arch_x86, + "docker": True, + "node_enabled": True, + }, + }, + { + "name": "Docker >> android-arm >> NODE_ENABLED", + "params": { + "target": c.target_android, + "arch": c.arch_arm, + "docker": True, + "node_enabled": True, + }, + }, + # LINUX builds + { + "name": "Docker >> alpine-linux-x64 >> NODE_ENABLED", + "params": { + "target": c.target_linux, + "vendor": "alpine", + "arch": c.arch_x64, + "docker": True, + "sys_image": "openjdk:8u131-alpine", + "node_enabled": True, + }, + }, + # TODO: build not supported, because default gcc/g++ on alpine does not support x32 compilation + # (see: https://stackoverflow.com/a/40574830/425532) + # { + # "name": "Docker >> alpine-linux-x86 >> NODE_ENABLED", + # "params": { + # "target": c.target_linux, + # "vendor": "alpine", + # "arch": c.arch_x86, + # "docker": True, + # "sys_image": "openjdk:8u131-alpine", + # "node_enabled": True, + # }, + # }, + { + "name": "Docker >> linux-x64 >> NODE_ENABLED", + "params": { + "target": c.target_linux, + "arch": c.arch_x64, + "docker": True, + "node_enabled": True, + }, + }, + { + "name": "Docker >> linux-x86 >> NODE_ENABLED", + "params": { + "target": c.target_linux, + "arch": c.arch_x86, + "docker": True, + "node_enabled": True, + }, + }, + # MACOSX builds + { + "name": "Vagrant >> macosx-x64 >> NODE_ENABLED", + "params": { + "target": c.target_macos, + "arch": c.arch_x64, + "vagrant": True, + "node_enabled": True, + }, + }, + { + "name": "Vagrant >> macosx-x86 >> NODE_ENABLED", + "params": { + "target": c.target_macos, + "arch": c.arch_x86, + "vagrant": True, + "node_enabled": True, + }, + }, + # WINDOWS builds + { + "name": "Native >> windows-x64 >> NODE_ENABLED", + "params": { + "target": c.target_win32, + "arch": c.arch_x64, + "node_enabled": True, + }, + }, + # TODO: this build is currently broken due to a Node.js build-system issue + # { + # # see: https://github.com/nodejs/node/issues/13569 + # "name": "Native >> windows-x86 >> NODE_ENABLED", + # "params": { + # "target": c.target_win32, + # "arch": c.arch_x86, + # "node_enabled": True, + # }, + # }, + { + "name": "Docker >> windows-x64 >> NODE_ENABLED", + "params": { + "target": c.target_win32, + "arch": c.arch_x64, + "docker": True, + "node_enabled": True, + }, + }, + { + "name": "Vagrant >> windows-x64 >> NODE_ENABLED", + "params": { + "target": c.target_win32, + "arch": c.arch_x64, + "vagrant": True, + "node_enabled": True, + }, + }, +] diff --git a/build_system/build_constants.py b/build_system/build_constants.py new file mode 100644 index 000000000..0dd72fa6a --- /dev/null +++ b/build_system/build_constants.py @@ -0,0 +1,43 @@ + +import constants as c + +from config_android import android_config +from config_linux import linux_config +from config_macos import macos_config +from config_win32 import win32_config + +build_step_sequence = [ + c.build_node_js, + c.build_j2v8_cmake, + c.build_j2v8_jni, + c.build_j2v8_optimize, + c.build_j2v8_java, + c.build_j2v8_junit, +] + +composite_steps = [ + # composites + c.build_all, + c.build_native, + c.build_j2v8, + # aliases + c.build_java, + c.build_test, +] + +platform_targets = { + c.target_android: android_config, + c.target_linux: linux_config, + c.target_macos: macos_config, + c.target_win32: win32_config, +} + +avail_targets = platform_targets.keys() + +avail_architectures = [ + c.arch_x86, + c.arch_x64, + c.arch_arm, +] + +avail_build_steps = build_step_sequence + composite_steps diff --git a/build_system/build_executor.py b/build_system/build_executor.py new file mode 100644 index 000000000..b850cc65e --- /dev/null +++ b/build_system/build_executor.py @@ -0,0 +1,228 @@ + +import sys + +import cli +import build_constants as bc +import constants as c +import build_utils as utils +from shell_build import ShellBuildSystem + +import immutable + +parsed_steps = set() + +step_handlers = {} + +def atomic_step(step, alias = None): + if (alias is None): + alias = step + + # handle anti-step + step_handlers[alias] = lambda: parsed_steps.add(step) + + # handle anti-step + step_handlers["~" + alias] = lambda: parsed_steps.discard(step) + + # register anti-step in CLI + bc.avail_build_steps.append("~" + alias) + +def multi_step(alias, include, exclude = []): + # handle step + step_handlers[alias] = lambda: \ + [step_handlers.get(s)() for s in include] + \ + [step_handlers.get("~" + s)() for s in exclude] + + # handle anti-step + step_handlers["~" + alias] = lambda: \ + [step_handlers.get("~" + s)() for s in include] + \ + [step_handlers.get(s)() for s in exclude] + + # register anti-step in CLI + bc.avail_build_steps.append("~" + alias) + +def init_buildsteps(): + # special alias to include all build steps into one + multi_step(c.build_all, bc.build_step_sequence) + + # atomic steps + for step in list(bc.build_step_sequence): + atomic_step(step) + + # atomic aliases + atomic_step(c.build_j2v8_java, c.build_java) + atomic_step(c.build_j2v8_junit, c.build_test) + + # composite alias: build only the native parts (including nodejs) + multi_step(c.build_native, [ + c.build_node_js, + c.build_j2v8_cmake, + c.build_j2v8_jni, + c.build_j2v8_optimize, + ]) + + # composite alias: build everything except nodejs + multi_step(c.build_j2v8, [c.build_all], [c.build_node_js]) + +def handle_build_step_option(step): + return step_handlers.get(step, raise_unhandled_option(step)) + +def raise_unhandled_option(step): + return lambda: sys.exit("INTERNAL-ERROR: Tried to handle unrecognized build-step \"" + step + "\"") + +# initialize the advanced parsing mechanisms for the build CLI +init_buildsteps() + +#----------------------------------------------------------------------- +# Build execution core function +#----------------------------------------------------------------------- +def execute_build(params): + + # if (type(params) is dict): + if (isinstance(params, dict)): + params = cli.BuildParams(params) + + if (params.target is None): + sys.exit("ERROR: No target platform specified") + + if (params.docker and params.vagrant): + sys.exit("ERROR: Choose either Docker or Vagrant for the build, can not use both") + + # this defines the target platform / operating system the build should be run for + build_target = bc.platform_targets.get(params.target) + + target = params.target + cross_id = "docker" if params.docker else "vagrant" if params.vagrant else None + + if (not target in bc.platform_targets): + sys.exit("ERROR: Unrecognized target platform: " + target) + + build_target = bc.platform_targets.get(target) + + if (params.arch is None): + sys.exit("ERROR: No target architecture specified") + + build_architectures = build_target.architectures + + if (not params.arch in build_architectures): + sys.exit("ERROR: Unsupported architecture: \"" + params.arch + "\" for selected target platform: " + target) + + if (params.buildsteps is None): + sys.exit("ERROR: No build-step specified, valid values are: " + ", ".join(bc.avail_build_steps)) + + if (not params.buildsteps is None and not isinstance(params.buildsteps, list)): + params.buildsteps = [params.buildsteps] + + global parsed_steps + parsed_steps.clear() + + for step in params.buildsteps: + handle_build_step_option(step)() + + # force build-steps into defined order (see: http://stackoverflow.com/a/23529016) + parsed_steps = [step for step in bc.build_step_sequence if step in parsed_steps] + + if (len(parsed_steps) == 0): + sys.exit("WARNING: No build-steps to be done ... exiting") + + platform_steps = build_target.steps + cross_configs = build_target.cross_configs + + build_cwd = utils.get_cwd() + + cross_cfg = None + + if (cross_id): + if (cross_configs.get(cross_id) is None): + sys.exit("ERROR: target '" + target + "' does not have a recognized cross-compile host: '" + cross_id + "'") + else: + cross_cfg = cross_configs.get(cross_id) + + # if we are the build-instigator (not a cross-compile build-agent) we directly run some initial checks & setups for the build + if (not params.cross_agent): + print "Checking Node.js builtins integration consistency..." + utils.check_node_builtins() + + print "Caching Node.js artifacts..." + curr_node_tag = (params.vendor + "-" if params.vendor else "") + target + "." + params.arch + utils.store_nodejs_output(curr_node_tag, build_cwd) + + def execute_build_step(compiler_inst, build_step): + """Executes an immutable copy of the given build-step configuration""" + # from this point on, make the build-input immutable to ensure consistency across the whole build process + # any actions during the build-step should only be made based on the initial set of variables & conditions + # NOTE: this restriction makes it much more easy to reason about the build-process as a whole + build_step = immutable.freeze(build_step) + compiler_inst.build(build_step) + + # a cross-compile was requested, we just launch the build-environment and then delegate the requested build-process to the cross-compile environment + if (cross_cfg): + cross_compiler = build_target.cross_compiler(cross_id) + + # prepare additional parameters/utils for the build and put them into the build-step config + + cross_cfg.custom_cmd = "python ./build.py " + \ + "--cross-agent " + cross_id + \ + " -t $PLATFORM -a $ARCH " + \ + (" -ne" if params.node_enabled else "") + \ + (" -v " + params.vendor if params.vendor else "") + \ + (" -knl " if params.keep_native_libs else "") + \ + " " + " ".join(parsed_steps) + + # meta-vars & util functions + cross_cfg.compiler = cross_compiler + cross_cfg.inject_env = lambda s: cross_compiler.inject_env(s, cross_cfg) + cross_cfg.target = build_target + + # build params + cross_cfg.arch = params.arch + cross_cfg.file_abi = build_target.file_abi(params.arch) + cross_cfg.no_shutdown = params.no_shutdown + cross_cfg.sys_image = params.sys_image + cross_cfg.vendor = params.vendor + cross_cfg.docker = params.docker + cross_cfg.vagrant = params.vagrant + + execute_build_step(cross_compiler, cross_cfg) + + # run the requested build-steps with the given parameters to produce the build-artifacts + else: + target_compiler = ShellBuildSystem() + target_steps = dict(platform_steps) + + # this is a build-agent for a cross-compile + if (params.cross_agent): + # the cross-compile step dictates which directory will be used to run the actual build + cross_cfg = cross_configs.get(params.cross_agent) + + if (cross_cfg is None): + sys.exit("ERROR: internal error while looking for cross-compiler config: " + params.cross_agent) + + build_cwd = cross_cfg.build_cwd + + # execute all requested build steps + for step in parsed_steps: + if (not step in target_steps): + print("INFO: skipping build step \"" + step + "\" (not configured and/or supported for platform \"" + params.target + "\")") + continue + + target_step = target_steps[step] + + # prepare additional parameters/utils for the build and put them into the build-step config + + # meta-vars & util functions + target_step.cross_agent = params.cross_agent + target_step.compiler = target_compiler + target_step.inject_env = lambda s: target_compiler.inject_env(s, target_steps[step]) + target_step.target = build_target + + # build params + target_step.arch = params.arch + target_step.file_abi = build_target.file_abi(params.arch) + target_step.node_enabled = params.node_enabled + target_step.build_cwd = build_cwd + target_step.vendor = params.vendor + target_step.docker = params.docker + target_step.vagrant = params.vagrant + target_step.keep_native_libs = params.keep_native_libs + + execute_build_step(target_compiler, target_step) diff --git a/build_system/build_interactive.py b/build_system/build_interactive.py new file mode 100644 index 000000000..a4c578211 --- /dev/null +++ b/build_system/build_interactive.py @@ -0,0 +1,39 @@ +import sys + +import build_configs as bcfg +import build_executor as bex + +def run_interactive_cli(): + idx = 0 + for cfg in bcfg.configs: + print ("[" + str(idx) + "] " + cfg.get("name")) + idx += 1 + print # newline + + # NOTE: argv[1] usually should be -i, therefore we need to consider this arg in all checks + base_arg_count = 2 + + sel_index = \ + int(sys.argv[base_arg_count]) \ + if len(sys.argv) > base_arg_count \ + else input("Select a predefined build-configuration to run: ") + + if not isinstance(sel_index, int) or sel_index < 0 or sel_index > len(bcfg.configs): + sys.exit("ERROR: Must enter a valid test index in the range [0 ... " + str(len(bcfg.configs)) + "]") + + sel_cfg = bcfg.configs[sel_index] + + print ("Building: " + sel_cfg.get("name")) + print # newline + + build_params = sel_cfg.get("params") + + build_steps = \ + sys.argv[base_arg_count + 1:] \ + if len(sys.argv) > base_arg_count + 1 \ + else raw_input("Override build-steps ? (leave empty to run pre-configured steps): ").split() + + if (len(build_steps) > 0): + build_params["buildsteps"] = build_steps + + bex.execute_build(build_params) diff --git a/build_settings.py b/build_system/build_settings.py similarity index 100% rename from build_settings.py rename to build_system/build_settings.py diff --git a/build_system/cross_build.py b/build_system/build_structures.py similarity index 78% rename from build_system/cross_build.py rename to build_system/build_structures.py index 6d43ea3bd..0c70211d2 100644 --- a/build_system/cross_build.py +++ b/build_system/build_structures.py @@ -3,7 +3,8 @@ import os import sys from shutil import copy2 -import build_system.build_utils as utils +import build_settings as s +import build_utils as utils class PlatformConfig(): def __init__(self, name, architectures): @@ -42,15 +43,14 @@ def file_abi(self, arch): file_abi = self.file_abis.get(arch) return file_abi if not file_abi is None else arch -class BuildStep: - def __init__(self, name, platform, build = [], build_cwd = None, host_cwd = None, pre_build_cmd = None): +class BuildStep(object): + def __init__(self, name, platform, build = [], build_cwd = None, host_cwd = None): self.name = name self.platform = platform self.build = build self.build_cwd = build_cwd self.host_cwd = host_cwd self.custom_cmd = None - self.pre_build_cmd = pre_build_cmd class BuildSystem: __metaclass__ = ABCMeta @@ -93,13 +93,30 @@ def exec_cmd(self, cmd, config): def inject_env(self, cmd, config): build_cwd = utils.get_cwd() + vendor = config.vendor return (cmd + # global config variables + .replace("$NODE_VERSION", s.NODE_VERSION) + .replace("$J2V8_VERSION", s.J2V8_VERSION) + .replace("$J2V8_FULL_VERSION", s.J2V8_FULL_VERSION) + + # build specific variables .replace("$BUILD_CWD", config.build_cwd or build_cwd) .replace("$HOST_CWD", config.host_cwd or "") .replace("$CWD", build_cwd) .replace("$PLATFORM", config.platform) .replace("$ARCH", config.arch) + .replace("$FILE_ABI", config.file_abi) + .replace("$LIB_EXT", utils.platform_libext(config)) + + # Vendor can be an optional part, + # therefore some additional tricks in the string replacement are needed here + .replace(".$VENDOR", "." + vendor if vendor else "") + .replace("-$VENDOR", "-" + vendor if vendor else "") + .replace("$VENDOR.", vendor + "." if vendor else "") + .replace("$VENDOR-", vendor + "-" if vendor else "") + .replace("$VENDOR", config.vendor or "") ) @abstractmethod diff --git a/build_system/build_utils.py b/build_system/build_utils.py index 394f94799..af2566cc4 100644 --- a/build_system/build_utils.py +++ b/build_system/build_utils.py @@ -1,4 +1,5 @@ import collections +import glob import os import re import shutil @@ -28,25 +29,16 @@ def is_macos(platform): def is_win32(platform): return c.target_win32 in platform -def get_node_branch_version(): - out = execute_to_str("git branch", "node") +def platform_libext(config): + lib_ext = "so" - git_branch_lines = out.splitlines() + if (is_win32(config.platform)): + lib_ext = "dll" - branch_str = next(ifilter(lambda x: x.startswith("*"), git_branch_lines), None) + elif (is_macos(config.platform)): + lib_ext = "dylib" - print "Git active branch: " + branch_str - - branch_match = re.search(r"\* \(HEAD detached at v(.*)\)", branch_str) - - if (branch_match is None): - branch_match = re.search(r"\* \((.*)\)", branch_str) - - if (branch_match is None): - sys.exit("ERROR: Unrecognized branch name format while running 'git branch': " + branch_str) - - branch = branch_match.group(1) - return branch + return lib_ext def execute(cmd, cwd = None): # flush any buffered console output, because popen could block the terminal @@ -152,6 +144,16 @@ def apply_file_template(src, dest, inject_vars_fn): # Sanity check for the builtin node-module links in J2V8 C++ JNI code #----------------------------------------------------------------------- def check_node_builtins(): + node_src = "node/src/" + + # node.js directory is not available + if (not os.path.exists(node_src)): + return + + # building from a pre-built dependency package (does not include c++ source files) + if (len(glob.glob(node_src + ".cc")) == 0): + return + j2v8_jni_cpp_path = "jni/com_eclipsesource_v8_V8Impl.cpp" j2v8_builtins = [] @@ -180,7 +182,6 @@ def check_node_builtins(): j2v8_builtins = [x.group("name") for x in j2v8_builtins if not any(c in x.group(0) for c in comment_tokens)] - node_src = "node/src/" node_builtins = [] for cc_file in os.listdir(node_src): if (not cc_file.endswith(".cc")): diff --git a/build_system/cli.py b/build_system/cli.py new file mode 100644 index 000000000..d4b874269 --- /dev/null +++ b/build_system/cli.py @@ -0,0 +1,112 @@ +import argparse + +import constants as c +import build_constants as bc + +class BuildParams(object): + def __init__(self, d): + self.target = d.get("target") + self.arch = d.get("arch") + self.vendor = d.get("vendor") + self.keep_native_libs = d.get("keep_native_libs") + self.node_enabled = d.get("node_enabled") + self.docker = d.get("docker") + self.vagrant = d.get("vagrant") + self.sys_image = d.get("sys_image") + self.no_shutdown = d.get("no_shutdown") + self.buildsteps = d.get("buildsteps") or c.build_all + + self.cross_agent = None + +def init_args(parser): + # Essential build settings + parser.add_argument("--target", "-t", + help="The build target platform name (must be a valid platform string identifier).", + dest="target", + required=True, + choices=sorted(bc.avail_targets)) + + parser.add_argument("--arch", "-a", + help="The build target architecture identifier (the available architectures are also dependent on the selected platform for a build).", + dest="arch", + required=True, + choices=bc.avail_architectures) + + # Optional build settings + parser.add_argument("--vendor", "-v", + help="The operating system vendor (most relevant when building for a specific Linux distribution).", + dest="vendor") + + parser.add_argument("--keep-native-libs", "-knl", + help="Do not delete the native libraries from the Java directories between builds.", + dest="keep_native_libs", + default=False, + action="store_const", + const=True) + + # J2V8 Feature switches + parser.add_argument("--node-enabled", "-ne", + help="Include the Node.js runtime and builtin node-modules for use in J2V8.", + dest="node_enabled", + default=False, + action="store_const", + const=True) + + # Docker / Vagrant cross-compile settings + parser.add_argument("--docker", "-dkr", + help="Run a cross-compile build in a Docker container (all required build-tools are then fully contained & virtualized).", + dest="docker", + default=False, + action="store_const", + const=True) + + parser.add_argument("--vagrant", "-vgr", + help="Run a cross-compile build in a Vagrant virtual machine (all required build-tools are then fully contained & virtualized).", + dest="vagrant", + default=False, + action="store_const", + const=True) + + parser.add_argument("--sys-image", "-img", + help="The operating system image to use as a basis for the virtualized build systems (used in Docker & Vagrant builds).", + dest="sys_image") + + parser.add_argument("--no-shutdown", "-nos", + help="When using a cross-compile environment, do not shutdown any of the components when the build is finished or canceled.", + dest="no_shutdown", + action="store_const", + const=True) + + # Meta-Args + # NOTE: this option is only used internally to distinguish the running of the build script within + # the build-instigator and the actual build-executor (this is relevant when cross-compiling) + parser.add_argument("--cross-agent", + help=argparse.SUPPRESS, + dest="cross_agent", + type=str) + + parser.add_argument("--interactive", "-i", + help="Run the interactive version of the J2V8 build CLI.", + dest="interactive", + default=False, + action="store_const", + const=True) + + parser.add_argument("buildsteps", + help="Pass a single build-step or a list of all the recognized build-steps that should be executed\n" + + "(the order of the steps given to the CLI does not matter, the correct order will be restored internally).\n\n" + + "the fundamental build steps (in order):\n" + + "---------------------------------------\n" + + "\n".join(bc.build_step_sequence) + "\n\n" + + "aliases / combinations of multiple of the above steps:\n" + + "------------------------------------------------------\n" + + "\n".join(bc.composite_steps), + metavar="build-steps", + nargs="*", + default="all", + choices=bc.avail_build_steps) + +def get_parser(): + parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) + init_args(parser) + return parser diff --git a/build_system/cmake_utils.py b/build_system/cmake_utils.py new file mode 100644 index 000000000..cfb3b60b7 --- /dev/null +++ b/build_system/cmake_utils.py @@ -0,0 +1,29 @@ + +# see: https://cmake.org/cmake/help/v2.8.8/cmake.html#opt:-Dvar:typevalue +def setVar(var, value, type = "STRING"): + return " -D%(var)s:%(type)s=%(value)s " % locals() + +def setTargetArch(config): + return setVar("J2V8_TARGET_ARCH", config.file_abi) + +def setNodeEnabled(config): + return setVar("J2V8_NODE_ENABLED", "TRUE" if config.node_enabled else "FALSE", "BOOL") + +def setVendor(config): + return setVar("J2V8_VENDOR", config.vendor) if config.vendor else "" + +def setCrossCompile(config): + return setVar("J2V8_CROSS_COMPILE", "TRUE", "BOOL") if config.cross_agent else "" + +def setToolchain(toolchain_file_path): + return setVar("CMAKE_TOOLCHAIN_FILE", toolchain_file_path) + +def setWin32PdbDockerFix(config): + return setVar("J2V8_WIN32_PDB_DOCKER_FIX", "TRUE", "BOOL") if config.cross_agent == "docker" else "" + +def setAllVars(config): + return \ + setCrossCompile(config) + \ + setTargetArch(config) + \ + setVendor(config) + \ + setNodeEnabled(config) diff --git a/build_system/config_android.py b/build_system/config_android.py index 2c0562170..0f33fdf75 100644 --- a/build_system/config_android.py +++ b/build_system/config_android.py @@ -1,17 +1,17 @@ import constants as c -from cross_build import BuildStep, PlatformConfig -from docker_build import DockerBuildSystem +from build_structures import PlatformConfig +from docker_build import DockerBuildSystem, DockerBuildStep import shared_build_steps as u import build_utils as b +import cmake_utils as cmu android_config = PlatformConfig(c.target_android, [c.arch_x86, c.arch_arm]) android_config.set_cross_configs({ - "docker": BuildStep( - name="cross-compile-host", + "docker": DockerBuildStep( platform=c.target_android, host_cwd="$CWD/docker", - build_cwd="/j2v8", + build_cwd="/j2v8" ) }) @@ -44,22 +44,27 @@ def build_node_js(config): android_config.build_step(c.build_node_js, build_node_js) #----------------------------------------------------------------------- def build_j2v8_cmake(config): + cmake_vars = cmu.setAllVars(config) + cmake_toolchain = cmu.setToolchain("$BUILD_CWD/docker/android/android.$ARCH.toolchain.cmake") + return [ - "mkdir -p cmake.out/$PLATFORM.$ARCH", - "cd cmake.out/$PLATFORM.$ARCH", + "mkdir -p " + u.cmake_out_dir, + "cd " + u.cmake_out_dir, "rm -rf CMakeCache.txt CMakeFiles/", """cmake \ -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_TOOLCHAIN_FILE=$BUILD_CWD/docker/android/android.$ARCH.toolchain.cmake \ + %(cmake_vars)s \ + %(cmake_toolchain)s \ ../../ \ - """, + """ + % locals() ] android_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake) #----------------------------------------------------------------------- def build_j2v8_jni(config): return [ - "cd cmake.out/$PLATFORM.$ARCH", + "cd " + u.cmake_out_dir, "make -j4", ] @@ -80,7 +85,9 @@ def build_j2v8_junit(config): test_cmds = \ u.setVersionEnv(config) + \ - u.gradle("connectedCheck --info") + u.gradle("spoon") + # u.gradle("spoon -PtestClass=com.eclipsesource.v8.LibraryLoaderTest,com.eclipsesource.v8.PlatformDetectorTest") + # u.gradle("connectedCheck --info") # we are running a build directly on the host shell if (not config.cross_agent): @@ -95,14 +102,13 @@ def build_j2v8_junit(config): lambda x: x.replace("$TEST_CMDS", " && ".join(test_cmds)) ) - image_arch = config.target.file_abi(config.arch) emu_arch = "-arm" if config.arch == c.arch_arm else "64-x86" b.apply_file_template( "./docker/android/start-emulator.template.sh", "./docker/android/start-emulator.sh", lambda x: x - .replace("$IMG_ARCH", image_arch) + .replace("$IMG_ARCH", config.file_abi) .replace("$EMU_ARCH", emu_arch) ) diff --git a/build_system/config_linux.py b/build_system/config_linux.py index 7d4cd7407..3292fe42d 100644 --- a/build_system/config_linux.py +++ b/build_system/config_linux.py @@ -1,16 +1,16 @@ import constants as c -from cross_build import BuildStep, PlatformConfig -from docker_build import DockerBuildSystem +from build_structures import PlatformConfig +from docker_build import DockerBuildSystem, DockerBuildStep import shared_build_steps as u +import cmake_utils as cmu linux_config = PlatformConfig(c.target_linux, [c.arch_x86, c.arch_x64]) linux_config.set_cross_configs({ - "docker": BuildStep( - name="cross-compile-host", + "docker": DockerBuildStep( platform=c.target_linux, host_cwd="$CWD/docker", - build_cwd="/j2v8", + build_cwd="/j2v8" ) }) @@ -20,7 +20,7 @@ linux_config.set_file_abis({ c.arch_x64: "x86_64", - c.arch_x86: "x86" + c.arch_x86: "x86_32", }) #----------------------------------------------------------------------- @@ -33,34 +33,47 @@ def build_node_js(config): --dest-cpu=$ARCH \ --without-snapshot \ --enable-static""", - # "make clean", # NOTE: make this an on/off option + # "make clean", # TODO: make this an on/off option "CFLAGS=-fPIC CXXFLAGS=-fPIC make -j4", ] linux_config.build_step(c.build_node_js, build_node_js) #----------------------------------------------------------------------- def build_j2v8_cmake(config): + cmake_vars = cmu.setAllVars(config) + + # NOTE: uses Python string interpolation (see: https://stackoverflow.com/a/4450610) return \ - u.shell("mkdir", "cmake.out/$PLATFORM.$ARCH") + \ - ["cd cmake.out/$PLATFORM.$ARCH"] + \ + u.shell("mkdir", u.cmake_out_dir) + \ + ["cd " + u.cmake_out_dir] + \ u.shell("rm", "CMakeCache.txt CMakeFiles/") + \ - ["cmake ../../"] + u.setJavaHome(config) + \ + ["""cmake \ + -DCMAKE_BUILD_TYPE=Release \ + %(cmake_vars)s \ + ../../ \ + """ + % locals()] linux_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake) #----------------------------------------------------------------------- def build_j2v8_jni(config): return [ - "cd cmake.out/$PLATFORM.$ARCH", + "cd " + u.cmake_out_dir, "make -j4", ] linux_config.build_step(c.build_j2v8_jni, build_j2v8_jni) #----------------------------------------------------------------------- def build_j2v8_optimize(config): - file_abi = config.target.file_abi(config.arch) + # NOTE: execstack / strip are not part of the alpine tools, therefore we just skip this step + if config.vendor == c.vendor_alpine: + return ["echo Skipped..."] + + lib_path = u.outputLibPath(config) return [ - "execstack -c cmake.out/$PLATFORM.$ARCH/libj2v8_linux_" + file_abi + ".so", - "strip --strip-unneeded -R .note -R .comment cmake.out/$PLATFORM.$ARCH/libj2v8_linux_" + file_abi + ".so", + "execstack -c " + lib_path, + "strip --strip-unneeded -R .note -R .comment " + lib_path, ] linux_config.build_step(c.build_j2v8_optimize, build_j2v8_optimize) @@ -70,6 +83,7 @@ def build_j2v8_java(config): u.clearNativeLibs(config) + \ u.copyNativeLibs(config) + \ u.setBuildEnv(config) + \ + u.setJavaHome(config) + \ [u.build_cmd] + \ u.copyOutput(config) diff --git a/build_system/config_macos.py b/build_system/config_macos.py index e6decc607..8226fb28e 100644 --- a/build_system/config_macos.py +++ b/build_system/config_macos.py @@ -1,14 +1,14 @@ import os import constants as c -from cross_build import BuildStep, PlatformConfig -from vagrant_build import VagrantBuildSystem +from build_structures import PlatformConfig +from vagrant_build import VagrantBuildSystem, VagrantBuildStep import shared_build_steps as u +import cmake_utils as cmu macos_config = PlatformConfig(c.target_macos, [c.arch_x86, c.arch_x64]) macos_config.set_cross_configs({ - "vagrant": BuildStep( - name="cross-compile-host", + "vagrant": VagrantBuildStep( platform=c.target_macos, host_cwd="$CWD/vagrant/$PLATFORM", build_cwd="/Users/vagrant/j2v8", @@ -22,7 +22,7 @@ macos_config.set_file_abis({ c.arch_x64: "x86_64", - c.arch_x86: "x86" + c.arch_x86: "x86_32", }) #----------------------------------------------------------------------- @@ -35,24 +35,31 @@ def build_node_js(config): --dest-cpu=$ARCH \ --without-snapshot \ --enable-static""", - # "make clean", # NOTE: make this an on/off option "make -j4", ] macos_config.build_step(c.build_node_js, build_node_js) #----------------------------------------------------------------------- def build_j2v8_cmake(config): + cmake_vars = cmu.setAllVars(config) + + # NOTE: uses Python string interpolation (see: https://stackoverflow.com/a/4450610) return \ - u.shell("mkdir", "./cmake.out/$PLATFORM.$ARCH") + \ - ["cd ./cmake.out/$PLATFORM.$ARCH"] + \ + u.shell("mkdir", u.cmake_out_dir) + \ + ["cd " + u.cmake_out_dir] + \ u.shell("rm", "CMakeCache.txt CMakeFiles/") + \ - ["cmake ../../"] + ["""cmake \ + -DCMAKE_BUILD_TYPE=Release \ + %(cmake_vars)s \ + ../../ \ + """ + % locals()] macos_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake) #----------------------------------------------------------------------- def build_j2v8_jni(config): return [ - "cd ./cmake.out/$PLATFORM.$ARCH", + "cd " + u.cmake_out_dir, "make -j4", ] diff --git a/build_system/config_win32.py b/build_system/config_win32.py index 78dde6b71..5b331cafb 100644 --- a/build_system/config_win32.py +++ b/build_system/config_win32.py @@ -1,24 +1,23 @@ import os import constants as c -from cross_build import BuildStep, PlatformConfig -from docker_build import DockerBuildSystem -from vagrant_build import VagrantBuildSystem +from build_structures import PlatformConfig +from docker_build import DockerBuildSystem, DockerBuildStep +from vagrant_build import VagrantBuildSystem, VagrantBuildStep import shared_build_steps as u +import cmake_utils as cmu win32_config = PlatformConfig(c.target_win32, [c.arch_x86, c.arch_x64]) win32_config.set_cross_configs({ - "docker": BuildStep( - name="docker-compile-host", + "docker": DockerBuildStep( platform=c.target_win32, host_cwd="$CWD/docker", - build_cwd="C:/j2v8", + build_cwd="C:/j2v8" ), - "vagrant": BuildStep( - name="vagrant-compile-host", + "vagrant": VagrantBuildStep( platform=c.target_win32, host_cwd="$CWD/vagrant/$PLATFORM", - build_cwd="C:/j2v8", + build_cwd="C:/j2v8" ) }) @@ -29,7 +28,7 @@ win32_config.set_file_abis({ c.arch_x64: "x86_64", - c.arch_x86: "x86" + c.arch_x86: "x86_32", }) #----------------------------------------------------------------------- @@ -42,25 +41,33 @@ def build_node_js(config): win32_config.build_step(c.build_node_js, build_node_js) #----------------------------------------------------------------------- def build_j2v8_cmake(config): + cmake_vars = cmu.setAllVars(config) cmake_gen_suffix = " Win64" if config.arch == c.arch_x64 else "" - cmake_x_compile_flag = "-DJ2V8_CROSS_COMPILE=1" if config.cross_agent else "" - cmake_pdb_fix_flag = "-DJ2V8_WIN32_PDB_DOCKER_FIX=1" if config.cross_agent == "docker" else "" + cmake_pdb_fix_flag = cmu.setWin32PdbDockerFix(config) + + # NOTE: uses Python string interpolation (see: https://stackoverflow.com/a/4450610) return \ - u.shell("mkdir", "cmake.out/$PLATFORM.$ARCH") + \ - ["cd cmake.out\\$PLATFORM.$ARCH"] + \ + u.shell("mkdir", u.cmake_out_dir) + \ + ["cd " + u.cmake_out_dir] + \ u.shell("rm", "CMakeCache.txt CMakeFiles/") + \ - ["cmake ..\\..\\ " + cmake_x_compile_flag + " " + cmake_pdb_fix_flag + " -G\"Visual Studio 14 2015" + cmake_gen_suffix + "\""] + ["""cmake \ + ../../ \ + %(cmake_vars)s \ + %(cmake_pdb_fix_flag)s \ + -G"Visual Studio 14 2015%(cmake_gen_suffix)s" + """ + % locals()] win32_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake) #----------------------------------------------------------------------- def build_j2v8_jni(config): # show docker container memory usage / limit - show_mem = ["powershell C:/temp/mem.ps1"] if config.cross_agent == "docker" else [] + show_mem = ["powershell C:/j2v8/docker/win32/mem.ps1"] if config.cross_agent == "docker" else [] return \ show_mem + \ [ - "cd cmake.out\$PLATFORM.$ARCH", + "cd " + u.cmake_out_dir, "msbuild j2v8.sln /property:Configuration=Release", ] + \ show_mem diff --git a/build_system/constants.py b/build_system/constants.py index 115c756bf..eb96102e2 100644 --- a/build_system/constants.py +++ b/build_system/constants.py @@ -4,12 +4,15 @@ target_macos = 'macos' target_win32 = 'win32' +vendor_alpine = 'alpine' +vendor_debian = 'debian' + # target architectures arch_x86 = 'x86' arch_x64 = 'x64' arch_arm = 'arm' -# core build-steps +# atomic build-steps build_node_js = 'nodejs' build_j2v8_cmake = 'j2v8cmake' build_j2v8_jni = 'j2v8jni' @@ -17,10 +20,11 @@ build_j2v8_java = 'j2v8java' build_j2v8_junit = 'j2v8junit' -# build-step aliases / aggregate steps -build_all = 'all' -build_full = 'full' -build_native = 'native' +# aliases build_java = 'java' -build_bundle = 'bundle' build_test = 'test' + +# composites +build_all = 'all' +build_native = 'native' +build_j2v8 = 'j2v8' diff --git a/build_system/docker_build.py b/build_system/docker_build.py index 944c1d659..accd8b187 100644 --- a/build_system/docker_build.py +++ b/build_system/docker_build.py @@ -4,14 +4,19 @@ import subprocess import sys -from cross_build import BuildSystem +from build_structures import BuildSystem, BuildStep import constants as c import build_utils as utils +class DockerBuildStep(BuildStep): + def __init__(self, platform, build_cwd = None, host_cwd = None): + super(DockerBuildStep, self).__init__("docker-build-host", platform, None, build_cwd, host_cwd) + class DockerBuildSystem(BuildSystem): def clean(self, config): try: - self.exec_host_cmd("docker rm -f -v j2v8.$PLATFORM.$ARCH", config) + container_name = self.get_container_name(config) + self.exec_host_cmd("docker rm -f -v " + container_name, config) except subprocess.CalledProcessError: return @@ -44,10 +49,17 @@ def health_check(self, config): except subprocess.CalledProcessError: sys.exit("ERROR: Failed Docker build-system health check, make sure Docker is available and running!") + def get_image_name(self, config): + return "j2v8-$VENDOR-$PLATFORM" + + def get_container_name(self, config): + return "j2v8.$VENDOR.$PLATFORM.$ARCH" + def pre_build(self, config): print ("preparing " + config.platform + "@" + config.arch + " => " + config.name) - docker_stop_str = self.inject_env("docker stop j2v8.$PLATFORM.$ARCH", config) + container_name = self.get_container_name(config) + docker_stop_str = self.inject_env("docker stop " + container_name, config) def cli_exit_event(): if (config.no_shutdown): @@ -58,7 +70,18 @@ def cli_exit_event(): atexit.register(cli_exit_event) - self.exec_host_cmd("docker build -f $PLATFORM/Dockerfile -t \"j2v8-$PLATFORM\" .", config) + args_str = "" + + if (config.sys_image): + args_str += " --build-arg sys_image=" + config.sys_image + + if (config.vendor): + args_str += " --build-arg vendor=" + config.vendor + + image_name = self.get_image_name(config) + + print ("Building docker image: " + config.inject_env(image_name)) + self.exec_host_cmd("docker build " + args_str + " -f $PLATFORM/Dockerfile -t \"" + image_name + "\" .", config) def exec_build(self, config): print ("DOCKER running " + config.platform + "@" + config.arch + " => " + config.name) @@ -71,15 +94,20 @@ def exec_build(self, config): build_cmd = config.custom_cmd or (cmd_separator + " ").join(config.build(config)) - memory_option = "" + extra_options = "" # NOTE: the --memory 3g setting is imporant for windows docker builds, # since the windows docker engine defaults to a 1gb limit which is not enough to run the Node.js build with MSBuild if (utils.is_win32(config.platform)): - memory_option = "--memory 3g" + extra_options = "--memory 3g" + else: + extra_options = "--privileged" + + image_name = self.get_image_name(config) + container_name = self.get_container_name(config) - docker_run_str = "docker run " + memory_option + " --privileged -P -v $CWD:" + mount_point + \ - " --name j2v8.$PLATFORM.$ARCH j2v8-$PLATFORM " + shell_invoke + " \"cd $BUILD_CWD" + cmd_separator + " " + build_cmd + "\"" + docker_run_str = "docker run " + extra_options + " -P -v $CWD:" + mount_point + \ + " --name " + container_name + " " + image_name + " " + shell_invoke + " \"cd $BUILD_CWD" + cmd_separator + " " + build_cmd + "\"" docker_run_str = self.inject_env(docker_run_str, config) diff --git a/build_system/immutable.py b/build_system/immutable.py index def4493a9..d480c0879 100644 --- a/build_system/immutable.py +++ b/build_system/immutable.py @@ -1,6 +1,6 @@ # for original source see: http://code.activestate.com/recipes/576527-freeze-make-any-object-immutable/ -immutable_types = set((int, str)) +immutable_types = set((int, str, bool)) class Frozen(object): def __init__(self, value): diff --git a/build_system/shared_build_steps.py b/build_system/shared_build_steps.py index 2c3caf409..1cefa255c 100644 --- a/build_system/shared_build_steps.py +++ b/build_system/shared_build_steps.py @@ -6,49 +6,64 @@ import build_settings as s import build_utils as utils -build_cmd = "mvn verify -DskipTests -e" -clean_build_cmd = "mvn clean verify -DskipTests -e" -run_tests_cmd = "mvn test -e" +# TODO: add CLI option to override / pass-in custom maven/gradle args +build_cmd = "mvn clean verify -DskipTests -e" +run_tests_cmd = "mvn test -e"# -Dtest=V8RuntimeNotLoadedTest" + +# the ./ should work fine on all platforms +# IMPORTANT: on MacOSX the ./ prefix is a strict requirement by some CLI commands !!! +cmake_out_dir = "./cmake.out/$VENDOR-$PLATFORM.$ARCH/" def gradleCmd(): return "gradlew" if os.name == "nt" else "gradle" def gradle(cmd): return [ - gradleCmd() + " " + cmd, + gradleCmd() + " --daemon " + cmd, ] +def outputLibName(config): + return config.inject_env("libj2v8-$VENDOR-$PLATFORM-$FILE_ABI.$LIB_EXT") + +def outputLibPath(config): + return cmake_out_dir + "/" + outputLibName(config) + def setEnvVar(name, value): if (os.name == "nt"): return ["set \"" + name + "=" + value + "\""] else: return ["export " + name + "=" + value] +def setJavaHome(config): + # NOTE: when running docker alpine-linux builds, we don't want to overwrite JAVA_HOME + if (config.vendor == c.vendor_alpine and config.cross_agent == "docker"): + return [] + + return setEnvVar("JAVA_HOME", "/opt/jdk/jdk1.8.0_131") + def clearNativeLibs(config): - lib_pattern = "src/main/resources/libj2v8_*" + # the CLI can override this step + if (config.keep_native_libs): + print("Native libraries not cleared...") + return [] - if (utils.is_android(config.platform)): - lib_pattern = "src/main/jniLibs/*/libj2v8.so" + def clearLibs(lib_pattern): + libs = glob.glob(lib_pattern) + return [shell("rm", lib)[0] for lib in libs] - libs = glob.glob(lib_pattern) - rm_libs = [shell("rm", lib)[0] for lib in libs] + rm_libs = \ + clearLibs("src/main/resources/libj2v8*") + \ + clearLibs("src/main/jniLibs/*/libj2v8.so") return rm_libs def copyNativeLibs(config): - file_abi = config.target.file_abi(config.arch) - - platform_cmake_out = "cmake.out/" + config.platform + "." + config.arch + "/" - lib_ext = ".so" + platform_cmake_out = config.inject_env(cmake_out_dir) if (utils.is_win32(config.platform)): platform_cmake_out += "Debug/" if hasattr(config, 'debug') and config.debug else "Release/" - lib_ext = ".dll" - - elif (utils.is_macos(config.platform)): - lib_ext = ".dylib" - lib_pattern = platform_cmake_out + "*j2v8_*" + file_abi + lib_ext + lib_pattern = config.inject_env(platform_cmake_out + "*j2v8-*$FILE_ABI.$LIB_EXT") platform_lib_path = glob.glob(lib_pattern) if (len(platform_lib_path) == 0): @@ -60,7 +75,7 @@ def copyNativeLibs(config): lib_target_path = None if (utils.is_android(config.platform)): - lib_target_path = "src/main/jniLibs/" + file_abi # directory path + lib_target_path = config.inject_env("src/main/jniLibs/$FILE_ABI") # directory path copy_cmds += shell("mkdir", lib_target_path) lib_target_path += "/libj2v8.so" # final lib file path else: @@ -73,11 +88,9 @@ def copyNativeLibs(config): return copy_cmds def setBuildEnv(config): - file_abi = config.target.file_abi(config.arch) - return \ setEnvVar("J2V8_PLATFORM_NAME", config.platform) + \ - setEnvVar("J2V8_ARCH_NAME", file_abi) + \ + setEnvVar("J2V8_ARCH_NAME", config.file_abi) + \ setEnvVar("J2V8_FULL_VERSION", s.J2V8_FULL_VERSION) def setVersionEnv(config): @@ -85,11 +98,9 @@ def setVersionEnv(config): setEnvVar("J2V8_FULL_VERSION", s.J2V8_FULL_VERSION) def copyOutput(config): - file_abi = config.target.file_abi(config.arch) - return \ shell("mkdir", "build.out") + \ - shell("cp", "target/j2v8_" + config.platform + "_" + file_abi + "-" + s.J2V8_FULL_VERSION + ".jar build.out/") + shell("cp", "target/j2v8_$PLATFORM_$FILE_ABI-$J2V8_FULL_VERSION.jar build.out/") def shell(cmd, args): return [ diff --git a/build_system/shell_build.py b/build_system/shell_build.py index 7f1e6c74a..eadeeefd7 100644 --- a/build_system/shell_build.py +++ b/build_system/shell_build.py @@ -1,6 +1,6 @@ import subprocess import sys -from cross_build import BuildSystem +from build_structures import BuildSystem import constants as c import build_utils as utils diff --git a/build_system/vagrant_build.py b/build_system/vagrant_build.py index 577dade2f..25e9624f9 100644 --- a/build_system/vagrant_build.py +++ b/build_system/vagrant_build.py @@ -2,7 +2,13 @@ import subprocess import sys import build_utils as utils -from cross_build import BuildSystem +from build_structures import BuildSystem, BuildStep +import shared_build_steps as u + +class VagrantBuildStep(BuildStep): + def __init__(self, platform, build_cwd = None, host_cwd = None, pre_build_cmd = None): + super(VagrantBuildStep, self).__init__("vagrant-build-host", platform, None, build_cwd, host_cwd) + self.pre_build_cmd = pre_build_cmd class VagrantBuildSystem(BuildSystem): def clean(self, config): @@ -17,6 +23,9 @@ def health_check(self, config): def pre_build(self, config): vagrant_start_cmd = "vagrant up" + if (config.sys_image): + vagrant_start_cmd = u.setEnvVar("VAGRANT_SYS_IMAGE", config.sys_image)[0] + utils.host_cmd_sep() + vagrant_start_cmd + if (config.pre_build_cmd): vagrant_start_cmd = config.pre_build_cmd + utils.host_cmd_sep() + vagrant_start_cmd @@ -34,38 +43,13 @@ def cli_exit_event(): def exec_build(self, config): print ("VAGRANT running " + config.platform + "@" + config.arch + " => " + config.name) - # shell = "powershell -c \"cmd /C " if utils.is_win32(config.platform) else "ssh -c " - # cmd_sep = "&& " if utils.is_win32(config.platform) else "; " vagrant_run_cmd = None if (utils.is_win32(config.platform)): - # cmd_sep = "\n" cmd_sep = "; " - # cmd_sep = "&& " build_cmd = config.custom_cmd or cmd_sep.join(config.build(config)) - # V1 build_cmd = self.inject_env("cd $BUILD_CWD" + cmd_sep + build_cmd, config) - - # host_cmd_file = self.inject_env("$HOST_CWD/cmd_temp.bat", config) - # agent_cmd_file = self.inject_env("$BUILD_CWD/vagrant/win32/cmd_temp.bat", config) - - # with open(host_cmd_file, 'w') as f: - # f.write(build_cmd) - - # vagrant_run_cmd = "vagrant powershell -c \"cmd /C " + agent_cmd_file + "\"" - # vagrant_run_cmd = "vagrant powershell -c \"Start-Process cmd.exe -RedirectStandardOutput -NoNewWindow -Wait -ArgumentList @('/C', '" + agent_cmd_file + "')\"" - - # NOTE: working, just the exit code seems off - # vagrant_run_cmd = "vagrant powershell -c \"cmd /C " + agent_cmd_file + " | Out-Host\"" - # vagrant_run_cmd = "vagrant powershell -c \"cmd /C " + agent_cmd_file + "\"" - - # V1 vagrant_run_cmd = "vagrant powershell -c \"Invoke-Command { " + build_cmd + " } -ErrorAction Stop\"" - # vagrant_run_cmd = "vagrant powershell -c \"Set-Location -Path $BUILD_CWD" + cmd_sep + "Invoke-Command -ScriptBlock {" + build_cmd + "} -ErrorAction Stop | Select-Object value\"" - # vagrant_run_cmd = self.inject_env(vagrant_run_cmd, config) - - # vagrant_run_cmd = "vagrant powershell -c \"Invoke-Command { " + agent_cmd_file + " } -NoNewScope -ErrorAction Stop\"" - print "run: " + vagrant_run_cmd else: cmd_sep = "; " build_cmd = config.custom_cmd or cmd_sep.join(config.build(config)) diff --git a/cmake/FindJava.cmake b/cmake/FindJava.cmake index 86693b6dc..4159b7228 100644 --- a/cmake/FindJava.cmake +++ b/cmake/FindJava.cmake @@ -93,4 +93,8 @@ else() set(Java_ROOT "$ENV{JAVA_HOME}") endif() -message ("Java-Root: ${Java_ROOT}") +if ("${Java_ROOT}" STREQUAL "") + message(FATAL_ERROR "Unable to locate Java JDK") +endif() + +message ("Using Java-Root: ${Java_ROOT}") diff --git a/docker/android/.gitignore b/docker/android/.gitignore index 83f1e64a6..55aaa5cd6 100644 --- a/docker/android/.gitignore +++ b/docker/android/.gitignore @@ -1,3 +1,4 @@ start-emulator.sh supervisord.conf AndroidManifest.xml +logcat.txt diff --git a/docker/android/Dockerfile b/docker/android/Dockerfile index 428a0ae49..7b9f7aeb4 100644 --- a/docker/android/Dockerfile +++ b/docker/android/Dockerfile @@ -1,4 +1,7 @@ -FROM ubuntu:xenial +# sys_image can be overridden from the CLI +ARG sys_image=debian:jessie + +FROM $sys_image RUN mkdir -p /temp/docker/shared/ WORKDIR /temp/docker/shared/ diff --git a/docker/android/supervisord.template.conf b/docker/android/supervisord.template.conf index 0faa20f1b..7fafa057e 100644 --- a/docker/android/supervisord.template.conf +++ b/docker/android/supervisord.template.conf @@ -45,3 +45,25 @@ stdout_logfile=/dev/stdout stdout_logfile_maxbytes=0 stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 + +[program:logcat] +command=/bin/bash -c "/j2v8/docker/android/wait-for-emulator.sh; adb logcat > /j2v8/docker/android/logcat.txt" +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +startsecs = 0 +autorestart = false +startretries = 1 +numprocs=1 +stopasgroup=true +killasgroup=true + +[eventlistener:logcat_exit] +command=/j2v8/docker/android/kill_supervisor.py +process_name=tests +events=PROCESS_STATE_EXITED,PROCESS_STATE_FATAL +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 diff --git a/docker/linux/Dockerfile b/docker/linux/Dockerfile index 2ba9a37db..929835402 100644 --- a/docker/linux/Dockerfile +++ b/docker/linux/Dockerfile @@ -1,4 +1,10 @@ -FROM ubuntu:xenial +# sys_image can be overridden from the CLI +ARG sys_image=debian:jessie + +FROM $sys_image + +# vendor can be overridden from the CLI +ARG vendor=debian RUN mkdir -p /temp/docker/shared/ WORKDIR /temp/docker/shared/ @@ -8,12 +14,12 @@ WORKDIR /temp/docker/shared/ # since it will not requrie rebuilding all docker image layers # but just the ones that were affected -COPY ./shared/install.debian.packages.sh /temp/docker/shared -RUN ./install.debian.packages.sh +COPY ./shared/install.${vendor}.packages.sh /temp/docker/shared +RUN ./install.${vendor}.packages.sh COPY ./shared/install.jdk.sh /temp/docker/shared RUN ./install.jdk.sh -ENV JAVA_HOME "/opt/jdk/jdk1.8.0_131" +# ENV JAVA_HOME "/opt/jdk/jdk1.8.0_131" COPY ./shared/install.maven.sh /temp/docker/shared RUN ./install.maven.sh @@ -28,4 +34,4 @@ COPY ./shared/pom.xml /temp WORKDIR /temp RUN export J2V8_PLATFORM_NAME=temp && \ export J2V8_ARCH_NAME=temp && \ - mvn verify -DskipTests || true + mvn clean verify -DskipTests || true diff --git a/docker/shared/install.alpine.packages.sh b/docker/shared/install.alpine.packages.sh new file mode 100644 index 000000000..bae3cd2c8 --- /dev/null +++ b/docker/shared/install.alpine.packages.sh @@ -0,0 +1,16 @@ + +echo "Preparing Alpine packages..." +apk add --update --no-cache \ + git \ + unzip \ + gcc \ + g++ \ + curl \ + file \ + python \ + make \ + cmake \ + wget \ + supervisor \ + bash \ + linux-headers diff --git a/docker/shared/install.jdk.sh b/docker/shared/install.jdk.sh index ad7b55f33..b7d7a0f75 100755 --- a/docker/shared/install.jdk.sh +++ b/docker/shared/install.jdk.sh @@ -1,4 +1,10 @@ +if java_loc="$(type -p javac)" || [ -z "$java_loc" ]; then + echo "JDK already installed, skipping installation..." + echo "Existing JDK location: "$java_loc + exit 0 +fi + # sources: # - https://www.mkyong.com/java/how-to-install-oracle-jdk-8-on-debian/ diff --git a/docker/shared/install.maven.sh b/docker/shared/install.maven.sh index 27dc8eff3..d3758da2d 100755 --- a/docker/shared/install.maven.sh +++ b/docker/shared/install.maven.sh @@ -1,5 +1,6 @@ echo "Preparing Maven..." curl http://www-eu.apache.org/dist/maven/maven-3/3.5.0/binaries/apache-maven-3.5.0-bin.tar.gz -O +mkdir -p /opt tar xzvf apache-maven-3.5.0-bin.tar.gz -C /opt/ chmod -R 777 /opt/apache-maven-3.5.0 diff --git a/docker/win32/Dockerfile b/docker/win32/Dockerfile index 73758324e..f96440a44 100644 --- a/docker/win32/Dockerfile +++ b/docker/win32/Dockerfile @@ -1,101 +1,47 @@ -FROM microsoft/windowsservercore:latest +# sys_image can be overridden from the CLI +ARG sys_image=microsoft/windowsservercore:latest + +FROM $sys_image # SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"] SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop';"] -# copy all utility scripts -COPY ./win32/mem.ps1 C:/temp/ -COPY ./win32/unzip.ps1 C:/temp/ -COPY ./win32/wget.ps1 C:/temp/ - -# Python -# source: https://github.com/docker-library/python/blob/master/2.7/windows/windowsservercore/Dockerfile -ENV PYTHON_VERSION 2.7.13 -ENV PYTHON_RELEASE 2.7.13 - -RUN $url = ('https://www.python.org/ftp/python/{0}/python-{1}.amd64.msi' -f $env:PYTHON_RELEASE, $env:PYTHON_VERSION); \ - Write-Host ('Downloading {0} ...' -f $url); \ - (New-Object System.Net.WebClient).DownloadFile($url, 'python.msi'); \ - \ - Write-Host 'Installing Python ...'; \ -# https://www.python.org/download/releases/2.4/msi/ - Start-Process msiexec -Wait \ - -ArgumentList @( \ - '/i', \ - 'python.msi', \ - '/quiet', \ - '/qn', \ - 'TARGETDIR=C:\Python', \ - 'ALLUSERS=1', \ - 'ADDLOCAL=DefaultFeature,Extensions,TclTk,Tools,PrependPath' \ - ); \ - \ -# the installer updated PATH, so we should refresh our local value - $env:PATH = [Environment]::GetEnvironmentVariable('PATH', [EnvironmentVariableTarget]::Machine); \ - \ - Write-Host 'Verifying install ...'; \ - Write-Host ' python --version'; python --version; \ - \ - Write-Host 'Removing ...'; \ - Remove-Item python.msi -Force; \ - \ - Write-Host 'Complete.'; - -# CMake version -ENV CMAKE_VERSION 3.9.0-rc2 - -# download CMake archive -RUN $url = ('https://cmake.org/files/v3.9/cmake-{0}-win64-x64.zip' -f $env:CMAKE_VERSION); \ - Write-Host ('Downloading {0} ...' -f $url); \ - (New-Object System.Net.WebClient).DownloadFile($url, 'cmake.zip'); \ - Write-Host 'Installing CMake ...'; +# copy all utility scripts before the actual install scripts +COPY ./win32/mem.ps1 C:/j2v8/docker/win32/ +COPY ./win32/unzip.ps1 C:/j2v8/docker/win32/ +COPY ./win32/wget.ps1 C:/j2v8/docker/win32/ -# extract CMake archive -RUN C:/temp/unzip.ps1 "cmake.zip" "." +# Python +COPY ./win32/install.python.ps1 C:/j2v8/docker/win32/ +RUN C:/j2v8/docker/win32/install.python.ps1 -# add CMake to path -RUN $env:PATH = (Get-Location | select -ExpandProperty Path)+'\cmake-'+$env:CMAKE_VERSION+'-win64-x64\bin;'+$env:PATH; \ -[Environment]::SetEnvironmentVariable('PATH', $env:PATH, [EnvironmentVariableTarget]::Machine); +# VS C++ +COPY ./win32/install.vscpp.ps1 C:/j2v8/docker/win32/ +RUN C:/j2v8/docker/win32/install.vscpp.ps1 -# source: https://github.com/friism/dockerfiles/blob/master/vs-build-tools/17/Dockerfile -# install MSBuild & C++ build tools -RUN Invoke-WebRequest "http://go.microsoft.com/fwlink/?LinkId=691126" \ - -OutFile visualcppbuildtools_full.exe -UseBasicParsing ; +# CMake +COPY ./win32/install.cmake.ps1 C:/j2v8/docker/win32/ +RUN C:/j2v8/docker/win32/install.cmake.ps1 -RUN Start-Process -FilePath 'visualcppbuildtools_full.exe' -ArgumentList '/quiet', '/NoRestart' -Wait ; \ - Remove-Item .\visualcppbuildtools_full.exe +# JDK +COPY ./win32/install.jdk.ps1 C:/j2v8/docker/win32/ +RUN C:/j2v8/docker/win32/install.jdk.ps1 -# MSbuild path -# NOTE: can add "\amd64" after "...\Bin" for x64 version of the compiler -RUN $env:PATH = 'C:\Program Files (x86)\MSBuild\14.0\Bin;'+$env:PATH; \ -[Environment]::SetEnvironmentVariable('PATH', $env:PATH, [EnvironmentVariableTarget]::Machine); - -# download JDK -RUN C:/temp/wget.ps1 \ - http://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-windows-x64.exe \ - C:\jdk.exe \ - "oraclelicense=accept-securebackup-cookie" - -RUN Start-Process C:/jdk.exe -Wait \ - -ArgumentList @('/s', 'ADDLOCAL="ToolsFeature,SourceFeature"'); -ENV JAVA_HOME "C:\Program Files\Java\jdk1.8.0_131" +# Maven +COPY ./win32/install.maven.ps1 C:/j2v8/docker/win32/ +RUN C:/j2v8/docker/win32/install.maven.ps1 # NOTE: only needed if using the amd64 version of MSBuild # ENV VCTargetsPath "C:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\v140" -RUN C:/temp/wget.ps1 \ - http://www-eu.apache.org/dist/maven/maven-3/3.5.0/binaries/apache-maven-3.5.0-bin.zip \ - C:\maven.zip - -RUN C:/temp/unzip.ps1 "maven.zip" "." - -RUN $env:PATH = 'C:\apache-maven-3.5.0\bin;'+$env:PATH; \ -[Environment]::SetEnvironmentVariable('PATH', $env:PATH, [EnvironmentVariableTarget]::Machine); +# NOTE: need to remove temporary j2v8 dir, since at the same directory the docker volume will be mounted +RUN Remove-Item -Recurse -Force C:/j2v8 # download the most critical maven dependencies for the build beforehand -# TODO: for some reason this does not work with the win32 docker server image +# TODO: for some reason this does not cache the maven downloads in the win32 docker server image +RUN mkdir C:/temp COPY ./shared/pom.xml C:/temp WORKDIR /temp RUN set J2V8_PLATFORM_NAME=temp; \ set J2V8_ARCH_NAME=temp; \ - Invoke-Command { mvn verify -DskipTests } -ErrorAction SilentlyContinue + Invoke-Command { mvn clean verify -DskipTests } -ErrorAction SilentlyContinue diff --git a/docker/win32/install.python.ps1 b/docker/win32/install.python.ps1 index 5f2144a36..2ca0d84b1 100644 --- a/docker/win32/install.python.ps1 +++ b/docker/win32/install.python.ps1 @@ -1,3 +1,4 @@ +# source: https://github.com/docker-library/python/blob/master/2.7/windows/windowsservercore/Dockerfile $env:PYTHON_VERSION = '2.7.13'; $env:PYTHON_RELEASE = '2.7.13'; diff --git a/nodejs.py b/nodejs.py new file mode 100644 index 000000000..4d6b6d1c8 --- /dev/null +++ b/nodejs.py @@ -0,0 +1,189 @@ +import argparse +import collections +import glob +import os +import sys +import tarfile +import zipfile + +import build_system.constants as c +import build_system.build_utils as utils +import build_system.build_settings as settings + +Command = collections.namedtuple("Command", "aliases function") +DepsDirectory = collections.namedtuple("DepsDirectory", "path include") + +# Command-Line setup +parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) + +#----------------------------------------------------------------------- +def flush_cache(silent = False): + if not silent: + print "[flush-cache]" + + utils.store_nodejs_output(None, ".") + + if not silent: + print "Done" + +cmd_flush_cache = Command( + aliases=["flush-cache", "fc"], + function=flush_cache, +) +#----------------------------------------------------------------------- +def git_init(): + print "[git-init]" + + # TODO: add CLI overide options + # - Node version + # - J2V8 version + + utils.store_nodejs_output(None, ".") + + if (not os.path.exists("node")): + print "Cloning Node.js version: " + settings.NODE_VERSION + # NOTE: autocrlf=false is very important for linux based cross-compiles of Node.js to work on a windows docker host + utils.execute("git clone https://github.com/nodejs/node --config core.autocrlf=false --depth 1 --branch v" + settings.NODE_VERSION) + else: + print "Node.js is already cloned & checked out" + apply_diff(True) + + print "Done" + +cmd_git_init = Command( + aliases=["git-init", "gi"], + function=git_init +) +#----------------------------------------------------------------------- +def package(): + print "[package]" + + platforms = sys.argv[2:] + full = len(platforms) == 0 + + print platforms + return + + # make sure all node.js binaries are stored in the cache before packaging + flush_cache(True) + + # C++ header files + included_paths = [ + DepsDirectory(path="./node/deps/", include=[".h"]), + DepsDirectory(path="./node/src/", include=[".h"]), + ] + + # Android + if (full or c.target_android in platforms): + included_paths += [ + DepsDirectory(path="./node.out/android.arm/", include=["j2v8.node.out", ".o", ".a"]), + DepsDirectory(path="./node.out/android.x86/", include=["j2v8.node.out", ".o", ".a"]), + ] + + # Linux + if (full or c.target_linux in platforms): + included_paths += [ + DepsDirectory(path="./node.out/linux.x64/", include=["j2v8.node.out", ".o", ".a"]), + DepsDirectory(path="./node.out/linux.x86/", include=["j2v8.node.out", ".o", ".a"]), + ] + + # MacOSX + if (full or c.target_macos in platforms): + included_paths += [ + DepsDirectory(path="./node.out/macos.x64/", include=["j2v8.node.out", ".a"]), + DepsDirectory(path="./node.out/macos.x86/", include=["j2v8.node.out", ".a"]), + ] + + # Windows + if (full or c.target_win32 in platforms): + included_paths += [ + DepsDirectory(path="./node.out/win32.x64/", include=["j2v8.node.out", ".lib"]), + DepsDirectory(path="./node.out/win32.x86/", include=["j2v8.node.out", ".lib"]), + ] + + with tarfile.open("j2v8-nodejs-deps-" + settings.J2V8_VERSION + ".tar.bz2", "w:bz2") as zipf: + # with zipfile.ZipFile("j2v8-nodejs-deps-" + settings.J2V8_VERSION + ".zip", "w", zipfile.ZIP_DEFLATED) as zipf: + for curr_p in included_paths: + print "zipping " + curr_p.path + dir_path = os.path.normpath(curr_p.path) + + for root, dirs, files in os.walk(dir_path): + for f in files: + file_path = os.path.join(root, f) + + copy_file = False + + for pattern in curr_p.include: + if (file_path.endswith(pattern)): + copy_file = True + break + + if (copy_file): + if (os.stat(file_path).st_size > 1024 * 1024): + print file_path + + # zipf.write(file_path) + zipf.add(file_path) + + print "Done" + +cmd_package = Command( + aliases=["package", "pkg"], + function=package +) +#----------------------------------------------------------------------- +def store_diff(): + print "[store-diff]" + + patch_file = os.path.join("..", "node.patches", settings.NODE_VERSION + ".diff") + print "Storing local changes to patch-file: " + patch_file + + utils.execute("git diff > " + patch_file, "node") + print "Done" + +cmd_store_diff = Command( + aliases=["store-diff", "sd"], + function=store_diff +) +#----------------------------------------------------------------------- +def apply_diff(silent = False): + if not silent: + print "[apply-diff]" + + patch_file = os.path.join("node.patches", settings.NODE_VERSION + ".diff") + + if (os.path.exists(patch_file)): + print "Applying Node.js patch: " + patch_file + utils.execute("git apply " + os.path.join("..", patch_file), "node") + else: + print "No special Node.js patch present for this version" + + if not silent: + print "Done" + +cmd_apply_diff = Command( + aliases=["apply-diff", "ad"], + function=apply_diff +) +#----------------------------------------------------------------------- + +all_cmds = [ + cmd_flush_cache, + cmd_git_init, + cmd_package, + cmd_store_diff, + cmd_apply_diff, +] + +parser.add_argument("cmd", + metavar="command", + nargs=1, + type=str, + choices=[cmd for commands in all_cmds for cmd in commands.aliases]) + +args = parser.parse_args() + +for cmd_tuple in all_cmds: + if (args.cmd[0] in cmd_tuple.aliases): + cmd_tuple.function() + break diff --git a/pom.xml b/pom.xml index 73fb0fb86..24604b0be 100644 --- a/pom.xml +++ b/pom.xml @@ -40,7 +40,7 @@ junit junit - 4.11 + 4.12 test diff --git a/prepare_build.py b/prepare_build.py deleted file mode 100644 index cad7937f2..000000000 --- a/prepare_build.py +++ /dev/null @@ -1,33 +0,0 @@ - -import os -import sys - -import build_system.build_utils as utils -import build_settings as settings - -# TODO: add CLI overide options -# - Node version -# - J2V8 version - -utils.store_nodejs_output(None, ".") - -if (not os.path.exists("node")): - print "Cloning Node.js version: " + settings.NODE_VERSION - # NOTE: autocrlf=false is very important for linux based cross-compiles of Node.js to work on a windows docker host - utils.execute("git clone https://github.com/nodejs/node --config core.autocrlf=false --depth 1 --branch v" + settings.NODE_VERSION) -else: - print "Node.js is already cloned & checked out" - branch = utils.get_node_branch_version() - - if (branch != settings.NODE_VERSION): - sys.exit("ERROR: The checked out Node.js version (" + branch + ") does not match the version specified in build_settings.py (" + settings.NODE_VERSION + ")") - -branch_patch_file = os.path.join("node.patches", settings.NODE_VERSION + ".diff") - -if (os.path.exists(branch_patch_file)): - print "Applying Node.js patch: " + branch_patch_file - utils.execute("git apply " + os.path.join("..", branch_patch_file), "node") -else: - print "No special Node.js patch present for this version" - -print "Done" diff --git a/src/main/java/com/eclipsesource/v8/LibraryLoader.java b/src/main/java/com/eclipsesource/v8/LibraryLoader.java index 37d12df0e..4fbf7405c 100644 --- a/src/main/java/com/eclipsesource/v8/LibraryLoader.java +++ b/src/main/java/com/eclipsesource/v8/LibraryLoader.java @@ -7,6 +7,7 @@ * * Contributors: * EclipseSource - initial API and implementation + * Wolfgang Steiner - code separation PlatformDetector/LibraryLoader ******************************************************************************/ package com.eclipsesource.v8; @@ -27,53 +28,86 @@ class LibraryLoader { SEPARATOR = System.getProperty("file.separator"); //$NON-NLS-1$ } - private static String computeLibraryShortName() { - String base = "j2v8"; - String osSuffix = getOS(); - String archSuffix = getArchSuffix(); - return base + "_" + osSuffix + "_" + archSuffix; - } + /** + * Returns the base-name for the native J2V8 library file. + * @param withLinuxVendor include/exclude the {vendor} part from the returned filename + *

NOTE: Vendors are only included for linux systems

+ * @return The filename string has the following structure: + *
{arch}-[vendor]-{operating_system}
+ */ + public static String computeLibraryShortName(boolean withLinuxVendor) { + String prefix = "j2v8"; + String vendor = withLinuxVendor && PlatformDetector.OS.isLinux() ? PlatformDetector.Vendor.getName() : null; + String os = PlatformDetector.OS.getName(); + String arch = PlatformDetector.Arch.getName(); + + final String separator = "-"; - private static String computeLibraryFullName() { - return "lib" + computeLibraryShortName() + "." + getOSFileExtension(); + return + prefix + + (vendor != null ? separator + vendor : "") + + separator + os + + separator + arch; } - static void loadLibrary(final String tempDirectory) { - if ( isAndroid() ) { - System.loadLibrary("j2v8"); - return; - } - StringBuffer message = new StringBuffer(); - String libShortName = computeLibraryShortName(); - String libFullName = computeLibraryFullName(); - String ideLocation = System.getProperty("user.dir") + SEPARATOR + "jni" + SEPARATOR + computeLibraryFullName(); + public static String computeLibraryFullName(boolean withLinuxVendor) { + return "lib" + computeLibraryShortName(withLinuxVendor) + "." + PlatformDetector.OS.getLibFileExtension(); + } - String path = null; + static boolean tryLoad(boolean withLinuxVendor, StringBuffer message) { + String libShortName = computeLibraryShortName(withLinuxVendor); + String libFullName = computeLibraryFullName(withLinuxVendor); + String ideLocation = System.getProperty("user.dir") + SEPARATOR + "jni" + SEPARATOR + libFullName; /* Try loading library from java library path */ if (load(libFullName, message)) { - return; + return true; } if (load(libShortName, message)) { - return; + return true; } /* Try loading library from the IDE location */ if (new File(ideLocation).exists()) { if (load(ideLocation, message)) { - return; + return true; } } + return false; + } + + static void loadLibrary(final String tempDirectory) { + if (PlatformDetector.OS.isAndroid()) { + System.loadLibrary("j2v8"); + return; + } + + StringBuffer message = new StringBuffer(); + + // try loading a vendor-specific library first + if (tryLoad(true, message)) + return; + + // if there is no vendor-specific library, just try to load the default OS library + if (tryLoad(false, message)) + return; + + String path = null; + if (tempDirectory != null) { path = tempDirectory; } else { path = System.getProperty("java.io.tmpdir"); //$NON-NLS-1$ } - if (extract(path + SEPARATOR + libFullName, libFullName, message)) { + // try extracting a vendor-specific library first + if (extract(path, true, message)) + return; + + // if there is no vendor-specific library, just try to extract the default OS library + if (extract(path, false, message)) return; - } /* Failed to find the library */ throw new UnsatisfiedLinkError("Could not load J2V8 library. Reasons: " + message.toString()); //$NON-NLS-1$ @@ -98,6 +132,11 @@ static boolean load(final String libName, final StringBuffer message) { return false; } + static boolean extract(String libPath, boolean withLinuxVendor, StringBuffer message) { + String libFullName = computeLibraryFullName(withLinuxVendor); + return extract(libPath + SEPARATOR + libFullName, libFullName, message); + } + static boolean extract(final String fileName, final String mappedName, final StringBuffer message) { FileOutputStream os = null; InputStream is = null; @@ -144,7 +183,7 @@ static boolean extract(final String fileName, final String mappedName, final Str } static void chmod(final String permision, final String path) { - if (isWindows()) { + if (PlatformDetector.OS.isWindows()) { return; } try { @@ -152,69 +191,4 @@ static void chmod(final String permision, final String path) { } catch (Throwable e) { } } - - static String getOsName() { - return System.getProperty("os.name") + System.getProperty("java.specification.vendor"); - } - - static boolean isWindows() { - return getOsName().startsWith("Windows"); - } - - static boolean isMac() { - return getOsName().startsWith("Mac"); - } - - static boolean isLinux() { - return getOsName().startsWith("Linux"); - } - - static boolean isNativeClient() { - return getOsName().startsWith("nacl"); - } - - static boolean isAndroid() { - return getOsName().contains("Android"); - } - - static String getArchSuffix() { - String arch = System.getProperty("os.arch"); - if (arch.equals("i686")) { - return "x86"; - } else if (arch.equals("amd64")) { - return "x86_64"; - } else if (arch.equals("nacl")) { - return "armv7l"; - } else if (arch.equals("aarch64")) { - return "armv7l"; - } - return arch; - } - - static String getOSFileExtension() { - if (isWindows()) { - return "dll"; - } else if (isMac()) { - return "dylib"; - } else if (isLinux()) { - return "so"; - } else if (isNativeClient()) { - return "so"; - } - throw new UnsatisfiedLinkError("Unsupported platform: " + getOsName()); - } - - static String getOS() { - if (isWindows()) { - return "win32"; - } else if (isMac()) { - return "macosx"; - } else if (isLinux() && !isAndroid()) { - return "linux"; - } else if (isAndroid()) { - return "android"; - } - throw new UnsatisfiedLinkError("Unsupported platform: " + getOsName()); - } - } diff --git a/src/main/java/com/eclipsesource/v8/Platform.java b/src/main/java/com/eclipsesource/v8/Platform.java new file mode 100644 index 000000000..8168935bc --- /dev/null +++ b/src/main/java/com/eclipsesource/v8/Platform.java @@ -0,0 +1,12 @@ +package com.eclipsesource.v8; + +public class Platform { + public static final String ANDROID = "android"; + public static final String LINUX = "linux"; + public static final String MACOSX = "macosx"; + public static final String WINDOWS = "windows"; + + public static final String NATIVE_CLIENT = "nacl"; + + public static final String UNKNOWN = "unknown"; +} diff --git a/src/main/java/com/eclipsesource/v8/PlatformDetector.java b/src/main/java/com/eclipsesource/v8/PlatformDetector.java new file mode 100644 index 000000000..b1c5de324 --- /dev/null +++ b/src/main/java/com/eclipsesource/v8/PlatformDetector.java @@ -0,0 +1,310 @@ +/******************************************************************************* + * Copyright (c) 2017 EclipseSource and others. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + * + * Contributors: + * Trustin Lee - original OS/Arch/Vendor detection code (see: https://github.com/trustin/os-maven-plugin) + * Wolfgang Steiner - initial API and implementation + * + * Copyright 2014 Trustin Heuiseung Lee. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package com.eclipsesource.v8; + +import java.io.BufferedReader; +import java.io.Closeable; +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStreamReader; +import java.io.IOException; +import java.util.HashMap; +import java.util.Locale; + +import com.eclipsesource.v8.Platform; + +public class PlatformDetector { + public static class Arch { + public static String getName() { + final String archProperty = System.getProperty("os.arch"); + final String archName = normalizeArch(archProperty); + + if (archName.equals(Platform.UNKNOWN)) + throw new UnsatisfiedLinkError("Unsupported arch: " + archProperty); + + return archName; + } + } + + public static class OS { + public static String getName() { + final String osProperty = System.getProperty("os.name"); + final String osName = normalizeOs(osProperty); + + final String vendorProperty = System.getProperty("java.specification.vendor"); + final String vendorName = normalize(vendorProperty); + + // special handling for android + if (vendorName.contains("android") || osName.contains("android")) { + return Platform.ANDROID; + } + + if (osName.equals(Platform.UNKNOWN)) + throw new UnsatisfiedLinkError("Unsupported platform/vendor: " + osProperty + " / " + vendorProperty); + + return osName; + } + + public static boolean isWindows() { + return getName().equals(Platform.WINDOWS); + } + + public static boolean isMac() { + return getName().equals(Platform.MACOSX); + } + + public static boolean isLinux() { + return getName().equals(Platform.LINUX); + } + + public static boolean isNativeClient() { + return getName().equals(Platform.NATIVE_CLIENT); + } + + public static boolean isAndroid() { + return getName().equals(Platform.ANDROID); + } + + public static String getLibFileExtension() { + if (isWindows()) + return "dll"; + + if (isMac()) + return "dylib"; + + if (isLinux() + || isAndroid() + || isNativeClient()) + return "so"; + + throw new UnsatisfiedLinkError("Unsupported platform library-extension for: " + getName()); + } + } + + public static class Vendor { + private static final String[] LINUX_OS_RELEASE_FILES = {"/etc/os-release", "/usr/lib/os-release"}; + private static final String REDHAT_RELEASE_FILE = "/etc/redhat-release"; + private static final String LINUX_ID_PREFIX = "ID="; + + public static String getName() { + if (OS.isWindows()) + return "microsoft"; + if (OS.isMac()) + return "apple"; + if (OS.isLinux()) + return getLinuxOsReleaseId(); + if (OS.isAndroid()) + return "google"; + + throw new UnsatisfiedLinkError("Unsupported vendor: " + getName()); + } + + private static String getLinuxOsReleaseId() { + // First, look for the os-release file. + for (String osReleaseFileName : LINUX_OS_RELEASE_FILES) { + File file = new File(osReleaseFileName); + if (file.exists()) { + return parseLinuxOsReleaseFile(file); + } + } + + // Older versions of redhat don't have /etc/os-release. In this case, try + // parsing this file. + File file = new File(REDHAT_RELEASE_FILE); + if (file.exists()) { + return parseLinuxRedhatReleaseFile(file); + } + + throw new UnsatisfiedLinkError("Unsupported linux vendor: " + getName()); + } + + private static String parseLinuxOsReleaseFile(File file) { + BufferedReader reader = null; + try { + reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), "utf-8")); + + String id = null; + String line; + while((line = reader.readLine()) != null) { + // Parse the ID line. + if (line.startsWith(LINUX_ID_PREFIX)) { + // Set the ID for this version. + id = normalizeOsReleaseValue(line.substring(LINUX_ID_PREFIX.length())); + break; + } + } + + return id; + } catch (IOException ignored) { + // Just absorb. Don't treat failure to read /etc/os-release as an error. + } finally { + closeQuietly(reader); + } + return null; + } + + private static String parseLinuxRedhatReleaseFile(File file) { + BufferedReader reader = null; + try { + reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), "utf-8")); + + // There is only a single line in this file. + String line = reader.readLine(); + if (line != null) { + line = line.toLowerCase(Locale.US); + + String id; + if (line.contains("centos")) { + id = "centos"; + } else if (line.contains("fedora")) { + id = "fedora"; + } else if (line.contains("red hat enterprise linux")) { + id = "rhel"; + } else { + // Other variants are not currently supported. + return null; + } + + return id; + } + } catch (IOException ignored) { + // Just absorb. Don't treat failure to read /etc/os-release as an error. + } finally { + closeQuietly(reader); + } + return null; + } + + private static void closeQuietly(Closeable obj) { + try { + if (obj != null) { + obj.close(); + } + } catch (IOException ignored) { + // Ignore. + } + } + } + + private static String normalizeOsReleaseValue(String value) { + // Remove any quotes from the string. + return value.trim().replace("\"", ""); + } + + private static String normalizeOs(String value) { + value = normalize(value); + if (value.startsWith("aix")) { + return "aix"; + } + if (value.startsWith("hpux")) { + return "hpux"; + } + if (value.startsWith("os400")) { + // Avoid the names such as os4000 + if (value.length() <= 5 || !Character.isDigit(value.charAt(5))) { + return "os400"; + } + } + if (value.startsWith("android")) { + return Platform.ANDROID; + } + if (value.startsWith("linux")) { + return Platform.LINUX; + } + if (value.startsWith("nacl")) { + return Platform.NATIVE_CLIENT; + } + if (value.startsWith("macosx") || value.startsWith("osx")) { + return Platform.MACOSX; + } + if (value.startsWith("freebsd")) { + return "freebsd"; + } + if (value.startsWith("openbsd")) { + return "openbsd"; + } + if (value.startsWith("netbsd")) { + return "netbsd"; + } + if (value.startsWith("solaris") || value.startsWith("sunos")) { + return "sunos"; + } + if (value.startsWith("windows")) { + return Platform.WINDOWS; + } + + return Platform.UNKNOWN; + } + + private static String normalizeArch(String value) { + value = normalize(value); + if (value.matches("^(x8664|amd64|ia32e|em64t|x64)$")) { + return "x86_64"; + } + if (value.matches("^(x8632|x86|i[3-6]86|ia32|x32)$")) { + return "x86_32"; + } + if (value.matches("^(ia64|itanium64)$")) { + return "itanium_64"; + } + if (value.matches("^(sparc|sparc32)$")) { + return "sparc_32"; + } + if (value.matches("^(sparcv9|sparc64)$")) { + return "sparc_64"; + } + if (value.matches("^(arm|arm32)$")) { + return "arm_32"; + } + if ("aarch64".equals(value)) { + return "aarch_64"; + } + if (value.matches("^(ppc|ppc32)$")) { + return "ppc_32"; + } + if ("ppc64".equals(value)) { + return "ppc_64"; + } + if ("ppc64le".equals(value)) { + return "ppcle_64"; + } + if ("s390".equals(value)) { + return "s390_32"; + } + if ("s390x".equals(value)) { + return "s390_64"; + } + + return Platform.UNKNOWN; + } + + private static String normalize(String value) { + if (value == null) { + return ""; + } + return value.toLowerCase(Locale.US).replaceAll("[^a-z0-9]+", ""); + } +} \ No newline at end of file diff --git a/src/main/java/com/eclipsesource/v8/V8.java b/src/main/java/com/eclipsesource/v8/V8.java index c0881e1a3..65d4500cf 100644 --- a/src/main/java/com/eclipsesource/v8/V8.java +++ b/src/main/java/com/eclipsesource/v8/V8.java @@ -249,12 +249,14 @@ private void notifyReferenceDisposed(final V8Value object) { private static void checkNativeLibraryLoaded() { if (!nativeLibraryLoaded) { + String message = "J2V8 native library not loaded (" + LibraryLoader.computeLibraryShortName(true) + ")"; + if (nativeLoadError != null) { - throw new IllegalStateException("J2V8 native library not loaded", nativeLoadError); + throw new IllegalStateException(message, nativeLoadError); } else if (nativeLoadException != null) { - throw new IllegalStateException("J2V8 native library not loaded", nativeLoadException); + throw new IllegalStateException(message, nativeLoadException); } else { - throw new IllegalStateException("J2V8 native library not loaded"); + throw new IllegalStateException(message); } } } diff --git a/src/test/java/com/eclipsesource/v8/A_RunAheadTests.java b/src/test/java/com/eclipsesource/v8/A_RunAheadTests.java new file mode 100644 index 000000000..d6fab17d6 --- /dev/null +++ b/src/test/java/com/eclipsesource/v8/A_RunAheadTests.java @@ -0,0 +1,28 @@ +/******************************************************************************* + * Copyright (c) 2014 EclipseSource and others. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + * + * Contributors: + * EclipseSource - initial API and implementation + ******************************************************************************/ +package com.eclipsesource.v8; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; + +@RunWith(Suite.class) +// V8RuntimeNotLoadedTest must be run first. This is because we need to test when the natives are not loaded +// and once the V8 class is loaded we cannot unload it. +@SuiteClasses({ V8RuntimeNotLoadedTest.class }) +/** + * IMPORTANT: This class is intentionally prefixed with "A_" because this leads the JUnit test runner + * to run it before all other classes (this behavior is undocumented and could break at any point though) + * @ + */ +public class A_RunAheadTests { + +} diff --git a/src/test/java/com/eclipsesource/v8/AllTests.java b/src/test/java/com/eclipsesource/v8/AllTests.java deleted file mode 100644 index 7b720f269..000000000 --- a/src/test/java/com/eclipsesource/v8/AllTests.java +++ /dev/null @@ -1,44 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2014 EclipseSource and others. - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * EclipseSource - initial API and implementation - ******************************************************************************/ -package com.eclipsesource.v8; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; - -import com.eclipsesource.v8.debug.BreakEventTest; -import com.eclipsesource.v8.debug.DebugHandlerTest; -import com.eclipsesource.v8.debug.ExecutionStateTest; -import com.eclipsesource.v8.debug.FrameTest; -import com.eclipsesource.v8.debug.MirrorTest; -import com.eclipsesource.v8.debug.ScopeTest; -import com.eclipsesource.v8.debug.ScriptBreakPointTest; -import com.eclipsesource.v8.utils.ArrayBufferTest; -import com.eclipsesource.v8.utils.ConcurrentV8Test; -import com.eclipsesource.v8.utils.MemoryManagerTest; -import com.eclipsesource.v8.utils.TypedArrayTest; -import com.eclipsesource.v8.utils.V8ExecutorTest; -import com.eclipsesource.v8.utils.V8MapTest; -import com.eclipsesource.v8.utils.V8ObjectUtilsTest; -import com.eclipsesource.v8.utils.V8PropertyMapTest; - -@RunWith(Suite.class) -// V8RuntimeNotLoadedTest must be run first. This is because we need to test when the natives are not loaded -// and once the V8 class is loaded we cannot unload it. -@SuiteClasses({ V8RuntimeNotLoadedTest.class, LibraryLoaderTest.class, V8ObjectTest.class, V8Test.class, V8ArrayTest.class, V8JSFunctionCallTest.class, - V8CallbackTest.class, V8ScriptCompilationExceptionTest.class, V8ScriptExecutionExceptionTest.class, V8ObjectUtilsTest.class, V8TypedArraysTest.class, - V8ArrayBufferTest.class, NullScriptExecuteTest.class, V8MultiThreadTest.class, V8LockerTest.class, V8ExecutorTest.class, V8MapTest.class, - TypedArrayTest.class, ArrayBufferTest.class, ConcurrentV8Test.class, - V8PropertyMapTest.class, DebugHandlerTest.class, ExecutionStateTest.class, FrameTest.class, ScopeTest.class, ScriptBreakPointTest.class, - MirrorTest.class, BreakEventTest.class, MemoryManagerTest.class, NodeJSTest.class }) -public class AllTests { - -} diff --git a/src/test/java/com/eclipsesource/v8/LibraryLoaderTest.java b/src/test/java/com/eclipsesource/v8/LibraryLoaderTest.java index 6b6daed06..5ae5aa801 100644 --- a/src/test/java/com/eclipsesource/v8/LibraryLoaderTest.java +++ b/src/test/java/com/eclipsesource/v8/LibraryLoaderTest.java @@ -7,10 +7,21 @@ * * Contributors: * EclipseSource - initial API and implementation + * Wolfgang Steiner - code separation PlatformDetector/LibraryLoader ******************************************************************************/ package com.eclipsesource.v8; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.File; +import java.io.PrintWriter; + +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; + +import java.util.HashMap; import org.junit.After; import org.junit.Before; @@ -22,104 +33,136 @@ public class LibraryLoaderTest { private String vendor; private String arch; + private Field releaseFilesField; + private String[] releaseFiles; + + static void makeFinalStaticAccessible(Field field) { + field.setAccessible(true); + + try { + // on certain JVMs this is not present and will throw the exceptions below (e.g. the Android Dalvik VM) + Field modifiersField = Field.class.getDeclaredField("modifiers"); + modifiersField.setAccessible(true); + modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL); + } + catch (NoSuchFieldException e) {} + catch (IllegalAccessException e) {} + } + @Before - public void setup() { + public void setup() throws Exception { osName = System.getProperty("os.name"); vendor = System.getProperty("java.specification.vendor"); arch = System.getProperty("os.arch"); + + Class vendorClass = PlatformDetector.Vendor.class; + releaseFilesField = vendorClass.getDeclaredField("LINUX_OS_RELEASE_FILES"); + makeFinalStaticAccessible(releaseFilesField); + + releaseFiles = (String[])releaseFilesField.get(null); } @After - public void tearDown() { + public void tearDown() throws Exception { System.setProperty("os.name", osName); System.setProperty("java.specification.vendor", vendor); System.setProperty("os.arch", arch); - } - - @Test - public void testGetOSMac() { - System.setProperty("os.name", "Mac OS X"); - - assertEquals("macosx", LibraryLoader.getOS()); - } - - @Test - public void testGetOSLinux() { - System.setProperty("os.name", "Linux"); - - assertEquals("linux", LibraryLoader.getOS()); - } - @Test - public void testGetOSWindows() { - System.setProperty("os.name", "Windows"); - - assertEquals("win32", LibraryLoader.getOS()); + releaseFilesField.set(null, releaseFiles); } @Test - public void testGetOSAndroid() { - System.setProperty("os.name", "Linux"); - System.setProperty("java.specification.vendor", "The Android Project"); + public void testAndroidLibNameStructure() throws Exception { + System.setProperty("os.name", "Android"); + System.setProperty("java.specification.vendor", "..."); + System.setProperty("os.arch", "x64"); - assertEquals("android", LibraryLoader.getOS()); - } + performTests(Platform.ANDROID, null, ".so"); - @Test - public void testGetOSFileExtensionNativeClient() { - System.setProperty("os.name", "naclthe android project"); - System.setProperty("java.specification.vendor", "The Android Project"); + System.setProperty("os.name", "..."); + System.setProperty("java.specification.vendor", "Android"); + System.setProperty("os.arch", "x64"); - assertEquals("so", LibraryLoader.getOSFileExtension()); + performTests(Platform.ANDROID, null, ".so"); } @Test - public void testGetArchxNaCl() { - System.setProperty("os.arch", "nacl"); + public void testLinuxLibNameStructure() throws Exception { - assertEquals("armv7l", LibraryLoader.getArchSuffix()); - } - - @Test - public void testGetArchaarch64() { - System.setProperty("os.arch", "aarch64"); + // skip this test on android + if (PlatformDetector.OS.isAndroid()) + return; - assertEquals("armv7l", LibraryLoader.getArchSuffix()); - } + System.setProperty("os.name", "Linux"); + System.setProperty("java.specification.vendor", "OSS"); + System.setProperty("os.arch", "x64"); - @Test - public void testGetArchx86() { - System.setProperty("os.arch", "x86"); + final String os_release_test_path = "./test-mockup-os-release"; + final String test_vendor = "linux_vendor"; - assertEquals("x86", LibraryLoader.getArchSuffix()); - } + // mock /etc/os-release file + releaseFilesField.set(null, new String[] { os_release_test_path }); - @Test - public void testGetArchx86_64() { - System.setProperty("os.arch", "x86_64"); + PrintWriter out = new PrintWriter(os_release_test_path); + out.println( + "NAME=The-Linux-Vendor\n" + + "VERSION=\"towel_42\"\n" + + "ID=" + test_vendor + "\n" + + "VERSION_ID=42\n" + ); + out.close(); - assertEquals("x86_64", LibraryLoader.getArchSuffix()); + performTests(Platform.LINUX, test_vendor, ".so"); } @Test - public void testGetArchx64FromAmd64() { - System.setProperty("os.arch", "amd64"); + public void testMacOSXLibNameStructure() throws Exception { + System.setProperty("os.name", "MacOSX"); + System.setProperty("java.specification.vendor", "Apple"); + System.setProperty("os.arch", "x64"); - assertEquals("x86_64", LibraryLoader.getArchSuffix()); + performTests(Platform.MACOSX, null, ".dylib"); } @Test - public void testGetArcharmv7l() { - System.setProperty("os.arch", "armv7l"); + public void testWindowsLibNameStructure() throws Exception { + System.setProperty("os.name", "Windows"); + System.setProperty("java.specification.vendor", "Microsoft"); + System.setProperty("os.arch", "x64"); - assertEquals("armv7l", LibraryLoader.getArchSuffix()); + performTests(Platform.WINDOWS, null, ".dll"); } - @Test - public void test686isX86() { - System.setProperty("os.arch", "i686"); - - assertEquals("x86", LibraryLoader.getArchSuffix()); + private void performTests(String expectedOsName, String expectedVendor, String expectedLibExtension) { + // API calls + String libName = LibraryLoader.computeLibraryShortName(true); + String[] parts = libName.split("-"); + + // test assertions + int i = 0; + int expectedParts = expectedVendor != null ? 4 : 3; + assertEquals(expectedParts, parts.length); + assertEquals("j2v8", parts[i++]); + if (expectedVendor != null) + assertEquals(expectedVendor, parts[i++]); + assertEquals(expectedOsName, parts[i++]); + assertEquals("x86_64", parts[i++]); + + // API calls + libName = LibraryLoader.computeLibraryShortName(false); + parts = libName.split("-"); + + // test assertions + assertEquals(3, parts.length); + assertEquals("j2v8", parts[0]); + assertEquals(expectedOsName, parts[1]); + assertEquals("x86_64", parts[2]); + + // API calls + libName = LibraryLoader.computeLibraryFullName(false); + + // test assertions + assertTrue(libName.startsWith("libj2v8")); + assertTrue(libName.endsWith(expectedLibExtension)); } - } diff --git a/src/test/java/com/eclipsesource/v8/PlatformDetectorTest.java b/src/test/java/com/eclipsesource/v8/PlatformDetectorTest.java new file mode 100644 index 000000000..5b7539fe3 --- /dev/null +++ b/src/test/java/com/eclipsesource/v8/PlatformDetectorTest.java @@ -0,0 +1,160 @@ +/******************************************************************************* + * Copyright (c) 2017 EclipseSource and others. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + * + * Contributors: + * EclipseSource - initial API and implementation + * Wolfgang Steiner - code separation PlatformDetector/LibraryLoader + ******************************************************************************/ +package com.eclipsesource.v8; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.File; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class PlatformDetectorTest { + + private String osName; + private String vendor; + private String arch; + + @Before + public void setup() { + osName = System.getProperty("os.name"); + vendor = System.getProperty("java.specification.vendor"); + arch = System.getProperty("os.arch"); + } + + @After + public void tearDown() { + System.setProperty("os.name", osName); + System.setProperty("java.specification.vendor", vendor); + System.setProperty("os.arch", arch); + } + + @Test + public void testGetOSUnknown() { + System.setProperty("os.name", "???"); + System.setProperty("java.specification.vendor", "???"); + + try { + PlatformDetector.OS.getName(); + } catch (Error e) { + assertTrue("Expected UnsatisfiedLinkError", e instanceof UnsatisfiedLinkError); + assertTrue(e.getMessage().startsWith("Unsupported platform/vendor")); + return; + } + fail("Expected exception"); + } + + @Test + public void testGetOSMac() { + System.setProperty("os.name", "Mac OS X"); + System.setProperty("java.specification.vendor", "Apple"); + + assertEquals("macosx", PlatformDetector.OS.getName()); + } + + @Test + public void testGetOSLinux() { + System.setProperty("os.name", "Linux"); + System.setProperty("java.specification.vendor", "OSS"); + + assertEquals("linux", PlatformDetector.OS.getName()); + } + + @Test + public void testGetOSWindows() { + System.setProperty("os.name", "Windows"); + System.setProperty("java.specification.vendor", "Microsoft"); + + assertEquals("windows", PlatformDetector.OS.getName()); + } + + @Test + public void testGetOSAndroid() { + System.setProperty("os.name", "Linux"); + System.setProperty("java.specification.vendor", "The Android Project"); + + assertEquals("android", PlatformDetector.OS.getName()); + } + + @Test + public void testGetOSFileExtensionAndroid() { + System.setProperty("os.name", "naclthe android project"); + System.setProperty("java.specification.vendor", "The Android Project"); + + assertEquals("so", PlatformDetector.OS.getLibFileExtension()); + } + + @Test(expected = UnsatisfiedLinkError.class) + public void testGetArchxNaCl() { + System.setProperty("os.arch", "nacl"); + + PlatformDetector.Arch.getName(); + } + + @Test + public void testGetArchaarch64() { + System.setProperty("os.arch", "aarch64"); + + assertEquals("aarch_64", PlatformDetector.Arch.getName()); + } + + @Test + public void testGetArchx86() { + System.setProperty("os.arch", "x86"); + + assertEquals("x86_32", PlatformDetector.Arch.getName()); + } + + @Test + public void testGetArchx86_64() { + System.setProperty("os.arch", "x86_64"); + + assertEquals("x86_64", PlatformDetector.Arch.getName()); + } + + @Test + public void testGetArchx64FromAmd64() { + System.setProperty("os.arch", "amd64"); + + assertEquals("x86_64", PlatformDetector.Arch.getName()); + } + + @Test(expected = UnsatisfiedLinkError.class) + public void testGetArcharmv7l() { + System.setProperty("os.arch", "armv7l"); + + PlatformDetector.Arch.getName(); + } + + @Test + public void test686isX86() { + System.setProperty("os.arch", "i686"); + + assertEquals("x86_32", PlatformDetector.Arch.getName()); + } + + @Test + public void testVendor_Alpine() { + if (!isAlpineLinux()) { + return; + } + + assertEquals("alpine", PlatformDetector.Vendor.getName()); + } + + private boolean isAlpineLinux() { + return new File("/etc/alpine-release").exists(); + } +} diff --git a/src/test/java/com/eclipsesource/v8/V8LockerTest.java b/src/test/java/com/eclipsesource/v8/V8LockerTest.java index 198931e54..563d7af43 100644 --- a/src/test/java/com/eclipsesource/v8/V8LockerTest.java +++ b/src/test/java/com/eclipsesource/v8/V8LockerTest.java @@ -18,13 +18,18 @@ import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.Timeout; public class V8LockerTest { private boolean passed = false; private V8 v8 = null; + @Rule + public Timeout globalTimeout = Timeout.seconds(60); + @Before public void setup() { v8 = V8.createV8Runtime(); @@ -169,6 +174,7 @@ public void testCannotUseReleasedLocker() { fail("Expected exception"); } + // TODO: frozen/deadlock on android @Test public void testBinarySemaphore() throws InterruptedException { v8.getLocker().acquire(); // Lock has been acquired twice diff --git a/src/test/java/com/eclipsesource/v8/V8RuntimeNotLoadedTest.java b/src/test/java/com/eclipsesource/v8/V8RuntimeNotLoadedTest.java index cd40f5098..0dbbb8bff 100644 --- a/src/test/java/com/eclipsesource/v8/V8RuntimeNotLoadedTest.java +++ b/src/test/java/com/eclipsesource/v8/V8RuntimeNotLoadedTest.java @@ -11,6 +11,8 @@ package com.eclipsesource.v8; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertEquals; +import static org.junit.Assume.assumeFalse; import java.lang.reflect.Field; import java.net.URLClassLoader; @@ -32,6 +34,16 @@ public class V8RuntimeNotLoadedTest { private static final String JAVA_LIBRARY_PATH = "java.library.path"; private String existingLibraryPath; + /** + * NOTE: we need to skip these tests, because on Android the SystemClassLoader + * can not be cast to an URLClassLoader and some other issues (see TestClassLoader below) + */ + private static boolean skipTest() { + return PlatformDetector.OS.isAndroid(); + } + + private final static String skipMessage = "Skipped test (not implemented for Android)"; + @Before public void before() throws Exception { existingLibraryPath = System.getProperty(JAVA_LIBRARY_PATH); @@ -45,21 +57,34 @@ public void after() throws Exception { @Test public void testJ2V8NotEnabled() { + assumeFalse(skipMessage, skipTest()); // conditional skip + assertFalse(V8.isLoaded()); } - @Test(expected = IllegalStateException.class) + @Test(expected = UnsatisfiedLinkError.class) public void testJ2V8CannotCreateRuntime() { + assumeFalse(skipMessage, skipTest()); // conditional skip + String oldValue = System.getProperty("os.arch"); System.setProperty("os.arch", "unknown"); try { V8.createV8Runtime(); - } finally { + } + catch (UnsatisfiedLinkError ex) { + assertEquals("Unsupported arch: unknown", ex.getMessage()); + throw ex; + } + finally { System.setProperty("os.arch", oldValue); } } private static void setLibraryPath(final String path) throws Exception { + // we need to skip here too, because "sys_paths" also does not exist on Android + if (skipTest()) + return; + System.setProperty(JAVA_LIBRARY_PATH, path); // set sys_paths to null so that java.library.path will be reevalueted next time it is needed @@ -76,6 +101,9 @@ public SeparateClassloaderTestRunner(final Class clazz) throws Initialization private static Class getFromTestClassloader(final Class clazz) throws InitializationError { try { + if (skipTest()) + return clazz; + ClassLoader testClassLoader = new TestClassLoader(); return Class.forName(clazz.getName(), true, testClassLoader); } catch (ClassNotFoundException e) { @@ -85,6 +113,7 @@ private static Class getFromTestClassloader(final Class clazz) throws Init public static class TestClassLoader extends URLClassLoader { public TestClassLoader() { + // TODO: this crashes on Android (see: https://stackoverflow.com/q/31920245) super(((URLClassLoader) getSystemClassLoader()).getURLs()); } diff --git a/store_node_patch.py b/store_node_patch.py deleted file mode 100644 index 4e5f686e4..000000000 --- a/store_node_patch.py +++ /dev/null @@ -1,16 +0,0 @@ - -import os - -import build_system.build_utils as utils - -branch = utils.get_node_branch_version() - -print "Determined branch version name: " + branch - -branch_patch_file = os.path.join("..", "node.patches", branch + ".diff") - -print "Storing local changes to patch-file: " + branch_patch_file - -utils.execute("git diff > " + branch_patch_file, "node") - -print "Done" diff --git a/vagrant/macos/Vagrantfile b/vagrant/macos/Vagrantfile index a6ec02f7a..06ec90ff8 100644 --- a/vagrant/macos/Vagrantfile +++ b/vagrant/macos/Vagrantfile @@ -43,8 +43,7 @@ fs_type = ENV['VAGRANT_FILE_SHARE_TYPE'] || "nfs" Vagrant.configure(2) do |config| - config.vm.box = "http://files.dryga.com/boxes/osx-sierra-0.3.1.box" - # config.vm.box = "AndrewDryga/vagrant-box-osx" + config.vm.box = ENV['VAGRANT_SYS_IMAGE'] || "http://files.dryga.com/boxes/osx-sierra-0.3.1.box" config.vm.hostname = "j2v8.macos.x64" config.vm.provider "virtualbox" do |v| diff --git a/vagrant/win32/Vagrantfile b/vagrant/win32/Vagrantfile index a3cd131d5..eceab6ff4 100644 --- a/vagrant/win32/Vagrantfile +++ b/vagrant/win32/Vagrantfile @@ -6,7 +6,7 @@ fs_type = ENV['VAGRANT_FILE_SHARE_TYPE'] || "virtualbox" Vagrant.configure(2) do |config| - config.vm.box = "Microsoft/EdgeOnWindows10" + config.vm.box = ENV['VAGRANT_SYS_IMAGE'] || "Microsoft/EdgeOnWindows10" config.vm.hostname = "j2v8-win32-x64" # source: https://github.com/danielmenezesbr/modernie-winrm @@ -41,10 +41,10 @@ Vagrant.configure(2) do |config| config.vm.provision :switch_to_winrm - config.vm.provision "install-python", type:"shell", inline: "C:/J2V8/docker/win32/install.python.ps1" - config.vm.provision "install-vsc++", type:"shell", inline: "C:/J2V8/docker/win32/install.vscpp.ps1" - config.vm.provision "install-cmake", type:"shell", inline: "C:/J2V8/docker/win32/install.cmake.ps1" - config.vm.provision "install-jdk", type:"shell", inline: "C:/J2V8/docker/win32/install.jdk.ps1" - config.vm.provision "install-maven", type:"shell", inline: "C:/J2V8/docker/win32/install.maven.ps1" + config.vm.provision "install-python", type:"shell", inline: "C:/j2v8/docker/win32/install.python.ps1" + config.vm.provision "install-vsc++", type:"shell", inline: "C:/j2v8/docker/win32/install.vscpp.ps1" + config.vm.provision "install-cmake", type:"shell", inline: "C:/j2v8/docker/win32/install.cmake.ps1" + config.vm.provision "install-jdk", type:"shell", inline: "C:/j2v8/docker/win32/install.jdk.ps1" + config.vm.provision "install-maven", type:"shell", inline: "C:/j2v8/docker/win32/install.maven.ps1" end From 74221c9b5efed96ce804e6843371191a790d7de9 Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Sun, 30 Jul 2017 23:26:33 +0200 Subject: [PATCH 04/14] set variables in pom.xml directly (no env-vars) - added build-util that directly sets the required build variables into the pom.xml file without using environment variables - reverted gradle sdk- & target-versions back from 19 to 10 - added alias-functions for the polyfill shell commands to shared_build_steps.py - started documenting the build-system modules & functions - fixed git EOL problems for shell/python scripts used by Docker builds --- CMakeLists.txt | 5 +- build.gradle | 6 +- build_system/build_settings.py | 18 +++- build_system/build_structures.py | 11 ++- build_system/build_utils.py | 6 +- build_system/config_linux.py | 8 +- build_system/config_macos.py | 8 +- build_system/config_win32.py | 8 +- build_system/{ => polyfills}/cp.py | 5 +- build_system/{ => polyfills}/mkdir.py | 5 +- build_system/{ => polyfills}/rm.py | 5 +- build_system/shared_build_steps.py | 127 +++++++++++++++++++++----- cmake/Policies.cmake | 2 + docker/android/.gitattributes | 2 + docker/shared/.gitattributes | 1 + pom.xml | 15 ++- 16 files changed, 164 insertions(+), 68 deletions(-) rename build_system/{ => polyfills}/cp.py (57%) rename build_system/{ => polyfills}/mkdir.py (57%) rename build_system/{ => polyfills}/rm.py (73%) create mode 100644 docker/android/.gitattributes create mode 100644 docker/shared/.gitattributes diff --git a/CMakeLists.txt b/CMakeLists.txt index 52cb2b17c..d1a09fd31 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -15,11 +15,8 @@ endif() cmake_minimum_required(VERSION 3.6) project(j2v8) -# adding cmake directory for includes -set(CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake) - # set up the module path -set(CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake) +set(CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake) include(BuildUtils) include(NodeJsUtils) diff --git a/build.gradle b/build.gradle index fba5986f1..a513b491e 100644 --- a/build.gradle +++ b/build.gradle @@ -41,12 +41,12 @@ dependencies { } android { - compileSdkVersion 19 + compileSdkVersion 10 buildToolsVersion '24.0.3' defaultConfig { - minSdkVersion 19 - targetSdkVersion 19 + minSdkVersion 10 + targetSdkVersion 10 testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" } diff --git a/build_system/build_settings.py b/build_system/build_settings.py index 399e13c14..b2587f50c 100644 --- a/build_system/build_settings.py +++ b/build_system/build_settings.py @@ -1,18 +1,26 @@ - +""" +The values specified here will be used as the single source of truth for +version strings and other global build variables that should be used in +the build scrips (build.py) or the Node.js utility scripts (nodejs.py) +""" #----------------------------------------------------------------------- # Node.js settings #----------------------------------------------------------------------- +# [user-setting] you can change this if you plan to use a different Node.js version for the J2V8 build NODE_VERSION_MAJOR, NODE_VERSION_MINOR, NODE_VERSION_PATCH = 7, 4, 0 +# The Node.js version in the format {major.minor.patch} to be used in other build & utility scripts +NODE_VERSION = '{}.{}.{}'.format(NODE_VERSION_MAJOR, NODE_VERSION_MINOR, NODE_VERSION_PATCH) + #----------------------------------------------------------------------- # J2V8 settings #----------------------------------------------------------------------- + J2V8_VERSION_MAJOR, J2V8_VERSION_MINOR, J2V8_VERSION_PATCH = 4, 8, 0 J2V8_VERSION_SUFFIX = "-SNAPSHOT" -#----------------------------------------------------------------------- -# Combined & computed values -#----------------------------------------------------------------------- -NODE_VERSION = '{}.{}.{}'.format(NODE_VERSION_MAJOR, NODE_VERSION_MINOR, NODE_VERSION_PATCH) +# The J2V8 version in the format {major.minor.patch} to be used in other build & utility scripts J2V8_VERSION = '{}.{}.{}'.format(J2V8_VERSION_MAJOR, J2V8_VERSION_MINOR, J2V8_VERSION_PATCH) + +# The J2V8 version including a version suffix string (e.g. 1.0.0-SUFFIX) J2V8_FULL_VERSION = J2V8_VERSION + J2V8_VERSION_SUFFIX diff --git a/build_system/build_structures.py b/build_system/build_structures.py index 0c70211d2..f626c2047 100644 --- a/build_system/build_structures.py +++ b/build_system/build_structures.py @@ -5,6 +5,7 @@ from shutil import copy2 import build_settings as s import build_utils as utils +import shared_build_steps as sbs class PlatformConfig(): def __init__(self, name, architectures): @@ -62,11 +63,13 @@ def build(self, config): # clean previous build outputs self.clean(config) - # copy the maven & gradle config file to the docker shared directory - # this allows to pre-fetch most of the maven dependencies before the actual build (e.g. into docker images) - copy2("pom.xml", "./docker/shared") + # copy the maven / gradle config files to the docker shared directory + # this allows Dockerfiles to pre-fetch most of the maven / gradle dependencies before the actual build + # and store downloaded maven / gradle dependencies inside the generated docker images (results in faster builds) copy2("build.gradle", "./docker/shared") copy2("src/main/AndroidManifest.xml", "./docker/android/AndroidManifest.xml") + # use the original pom.xml, but with dummy constant values, this avoids unnecessary rebuilding of docker images + sbs.apply_maven_null_settings(target_pom_path="./docker/shared/pom.xml") # execute all the build stages self.pre_build(config) @@ -116,7 +119,7 @@ def inject_env(self, cmd, config): .replace("-$VENDOR", "-" + vendor if vendor else "") .replace("$VENDOR.", vendor + "." if vendor else "") .replace("$VENDOR-", vendor + "-" if vendor else "") - .replace("$VENDOR", config.vendor or "") + .replace("$VENDOR", vendor or "") ) @abstractmethod diff --git a/build_system/build_utils.py b/build_system/build_utils.py index af2566cc4..8c7334952 100644 --- a/build_system/build_utils.py +++ b/build_system/build_utils.py @@ -9,8 +9,6 @@ import constants as c -import constants - def get_cwd(): return os.getcwd().replace("\\", "/") @@ -78,7 +76,7 @@ def store_nodejs_output(next_node_tag, build_cwd): if (os.path.isdir(out_dir)): if (os.path.exists(curr_tag_file)): - with open(curr_tag_file, 'r') as f: + with open(curr_tag_file, "r") as f: curr_node_tag = f.read() if (curr_node_tag != next_node_tag): @@ -124,7 +122,7 @@ def store_nodejs_output(next_node_tag, build_cwd): if not os.path.exists(out_dir): os.makedirs(out_dir) # ... and immediately also create a tag-file so we know what we built later on - with open(curr_tag_file, 'w') as f: + with open(curr_tag_file, "w") as f: f.write(next_node_tag) elif (not next_node_tag is None): diff --git a/build_system/config_linux.py b/build_system/config_linux.py index 3292fe42d..418bb54f0 100644 --- a/build_system/config_linux.py +++ b/build_system/config_linux.py @@ -44,9 +44,9 @@ def build_j2v8_cmake(config): # NOTE: uses Python string interpolation (see: https://stackoverflow.com/a/4450610) return \ - u.shell("mkdir", u.cmake_out_dir) + \ + u.mkdir(u.cmake_out_dir) + \ ["cd " + u.cmake_out_dir] + \ - u.shell("rm", "CMakeCache.txt CMakeFiles/") + \ + u.rm("CMakeCache.txt CMakeFiles/") + \ u.setJavaHome(config) + \ ["""cmake \ -DCMAKE_BUILD_TYPE=Release \ @@ -79,10 +79,11 @@ def build_j2v8_optimize(config): linux_config.build_step(c.build_j2v8_optimize, build_j2v8_optimize) #----------------------------------------------------------------------- def build_j2v8_java(config): + u.apply_maven_config_settings(config) + return \ u.clearNativeLibs(config) + \ u.copyNativeLibs(config) + \ - u.setBuildEnv(config) + \ u.setJavaHome(config) + \ [u.build_cmd] + \ u.copyOutput(config) @@ -91,7 +92,6 @@ def build_j2v8_java(config): #----------------------------------------------------------------------- def build_j2v8_junit(config): return \ - u.setBuildEnv(config) + \ [u.run_tests_cmd] linux_config.build_step(c.build_j2v8_junit, build_j2v8_junit) diff --git a/build_system/config_macos.py b/build_system/config_macos.py index 8226fb28e..d113c3942 100644 --- a/build_system/config_macos.py +++ b/build_system/config_macos.py @@ -45,9 +45,9 @@ def build_j2v8_cmake(config): # NOTE: uses Python string interpolation (see: https://stackoverflow.com/a/4450610) return \ - u.shell("mkdir", u.cmake_out_dir) + \ + u.mkdir(u.cmake_out_dir) + \ ["cd " + u.cmake_out_dir] + \ - u.shell("rm", "CMakeCache.txt CMakeFiles/") + \ + u.rm("CMakeCache.txt CMakeFiles/") + \ ["""cmake \ -DCMAKE_BUILD_TYPE=Release \ %(cmake_vars)s \ @@ -66,10 +66,11 @@ def build_j2v8_jni(config): macos_config.build_step(c.build_j2v8_jni, build_j2v8_jni) #----------------------------------------------------------------------- def build_j2v8_java(config): + u.apply_maven_config_settings(config) + return \ u.clearNativeLibs(config) + \ u.copyNativeLibs(config) + \ - u.setBuildEnv(config) + \ [u.build_cmd] + \ u.copyOutput(config) @@ -77,7 +78,6 @@ def build_j2v8_java(config): #----------------------------------------------------------------------- def build_j2v8_junit(config): return \ - u.setBuildEnv(config) + \ [u.run_tests_cmd] macos_config.build_step(c.build_j2v8_junit, build_j2v8_junit) diff --git a/build_system/config_win32.py b/build_system/config_win32.py index 5b331cafb..00708fae8 100644 --- a/build_system/config_win32.py +++ b/build_system/config_win32.py @@ -47,9 +47,9 @@ def build_j2v8_cmake(config): # NOTE: uses Python string interpolation (see: https://stackoverflow.com/a/4450610) return \ - u.shell("mkdir", u.cmake_out_dir) + \ + u.mkdir(u.cmake_out_dir) + \ ["cd " + u.cmake_out_dir] + \ - u.shell("rm", "CMakeCache.txt CMakeFiles/") + \ + u.rm("CMakeCache.txt CMakeFiles/") + \ ["""cmake \ ../../ \ %(cmake_vars)s \ @@ -75,10 +75,11 @@ def build_j2v8_jni(config): win32_config.build_step(c.build_j2v8_jni, build_j2v8_jni) #----------------------------------------------------------------------- def build_j2v8_java(config): + u.apply_maven_config_settings(config) + return \ u.clearNativeLibs(config) + \ u.copyNativeLibs(config) + \ - u.setBuildEnv(config) + \ [u.build_cmd] + \ u.copyOutput(config) @@ -86,7 +87,6 @@ def build_j2v8_java(config): #----------------------------------------------------------------------- def build_j2v8_junit(config): return \ - u.setBuildEnv(config) + \ [u.run_tests_cmd] win32_config.build_step(c.build_j2v8_junit, build_j2v8_junit) diff --git a/build_system/cp.py b/build_system/polyfills/cp.py similarity index 57% rename from build_system/cp.py rename to build_system/polyfills/cp.py index a83c24045..b36a562a8 100644 --- a/build_system/cp.py +++ b/build_system/polyfills/cp.py @@ -1,9 +1,10 @@ +""" +This is a basic cross-platform polyfill for the "cp" shell command +""" import os import sys from shutil import copy2 -# this is a cross-platform polyfill for "cp" - src = sys.argv[1] dst = sys.argv[2] diff --git a/build_system/mkdir.py b/build_system/polyfills/mkdir.py similarity index 57% rename from build_system/mkdir.py rename to build_system/polyfills/mkdir.py index 50277b1de..d48134593 100644 --- a/build_system/mkdir.py +++ b/build_system/polyfills/mkdir.py @@ -1,8 +1,9 @@ +""" +This is a basic cross-platform polyfill for the "mkdir -p" shell command +""" import os import sys -# this is a cross-platform polyfill for "mkdir -p" - directory = sys.argv[1] if not os.path.exists(directory): diff --git a/build_system/rm.py b/build_system/polyfills/rm.py similarity index 73% rename from build_system/rm.py rename to build_system/polyfills/rm.py index ca8ddf65c..14d0024af 100644 --- a/build_system/rm.py +++ b/build_system/polyfills/rm.py @@ -1,9 +1,10 @@ +""" +This is a basic cross-platform polyfill for the "rm" shell command +""" import os import sys import shutil -# this is a cross-platform polyfill for "rm" - items = sys.argv[1:] for item in items: diff --git a/build_system/shared_build_steps.py b/build_system/shared_build_steps.py index 1cefa255c..53c5c1b80 100644 --- a/build_system/shared_build_steps.py +++ b/build_system/shared_build_steps.py @@ -1,6 +1,7 @@ import glob import os import sys +import xml.etree.ElementTree as ET import constants as c import build_settings as s @@ -28,6 +29,9 @@ def outputLibName(config): def outputLibPath(config): return cmake_out_dir + "/" + outputLibName(config) +def outputJarName(config): + return config.inject_env("j2v8_$VENDOR-$PLATFORM_$FILE_ABI-$J2V8_FULL_VERSION.jar") + def setEnvVar(name, value): if (os.name == "nt"): return ["set \"" + name + "=" + value + "\""] @@ -41,6 +45,35 @@ def setJavaHome(config): return setEnvVar("JAVA_HOME", "/opt/jdk/jdk1.8.0_131") +def setVersionEnv(config): + return \ + setEnvVar("J2V8_FULL_VERSION", s.J2V8_FULL_VERSION) + +def copyOutput(config): + jar_name = outputJarName(config) + + return \ + mkdir("build.out") + \ + cp("target/" + jar_name + " build.out/") + +def shell(cmd, args): + """ + Invokes the cross-platform polyfill for the shell command defined by the 'cmd' parameter + """ + return ["python $CWD/build_system/polyfills/" + cmd + ".py " + args] + +def cp(args): + """Invokes the cross-platform polyfill for the 'cp' shell command""" + return shell("cp", args) + +def mkdir(args): + """Invokes the cross-platform polyfill for the 'mkdir' shell command""" + return shell("mkdir", args) + +def rm(args): + """Invokes the cross-platform polyfill for the 'rm' shell command""" + return shell("rm", args) + def clearNativeLibs(config): # the CLI can override this step if (config.keep_native_libs): @@ -49,7 +82,7 @@ def clearNativeLibs(config): def clearLibs(lib_pattern): libs = glob.glob(lib_pattern) - return [shell("rm", lib)[0] for lib in libs] + return [rm(lib)[0] for lib in libs] rm_libs = \ clearLibs("src/main/resources/libj2v8*") + \ @@ -76,33 +109,83 @@ def copyNativeLibs(config): lib_target_path = None if (utils.is_android(config.platform)): lib_target_path = config.inject_env("src/main/jniLibs/$FILE_ABI") # directory path - copy_cmds += shell("mkdir", lib_target_path) + copy_cmds += mkdir(lib_target_path) lib_target_path += "/libj2v8.so" # final lib file path else: lib_target_path = "src/main/resources/" print "copying native lib from: " + platform_lib_path + " to: " + lib_target_path - copy_cmds += shell("cp", platform_lib_path + " " + lib_target_path) + copy_cmds += cp(platform_lib_path + " " + lib_target_path) return copy_cmds -def setBuildEnv(config): - return \ - setEnvVar("J2V8_PLATFORM_NAME", config.platform) + \ - setEnvVar("J2V8_ARCH_NAME", config.file_abi) + \ - setEnvVar("J2V8_FULL_VERSION", s.J2V8_FULL_VERSION) - -def setVersionEnv(config): - return \ - setEnvVar("J2V8_FULL_VERSION", s.J2V8_FULL_VERSION) - -def copyOutput(config): - return \ - shell("mkdir", "build.out") + \ - shell("cp", "target/j2v8_$PLATFORM_$FILE_ABI-$J2V8_FULL_VERSION.jar build.out/") - -def shell(cmd, args): - return [ - "python $CWD/build_system/" + cmd + ".py " + args, - ] +def apply_maven_null_settings(src_pom_path = "./pom.xml", target_pom_path = None): + maven_settings = { + "properties": { + "os": "undefined", + "arch": "undefined", + }, + "artifactId": "undefined", + "version": "undefined", + "name": "undefined", + } + + apply_maven_settings(maven_settings, src_pom_path, target_pom_path) + +def apply_maven_config_settings(config, src_pom_path = "./pom.xml", target_pom_path = None): + os = config.inject_env("$VENDOR-$PLATFORM") + arch = config.file_abi + version = s.J2V8_FULL_VERSION + name = config.inject_env("j2v8_$VENDOR-$PLATFORM_$FILE_ABI") + + maven_settings = { + "properties": { + "os": os, + "arch": arch, + }, + "artifactId": name, + "version": version, + "name": name, + } + + apply_maven_settings(maven_settings, src_pom_path, target_pom_path) + +def apply_maven_settings(settings, src_pom_path = "./pom.xml", target_pom_path = None): + #----------------------------------------------------------------------- + pom_ns = "http://maven.apache.org/POM/4.0.0" + ns = {"pom": pom_ns} + #----------------------------------------------------------------------- + def __recurse_maven_settings(settings, callback, curr_path = None): + if (curr_path is None): + curr_path = [] + + for key in settings: + value = settings.get(key) + + curr_path.append(key) + + if isinstance(value, dict): + __recurse_maven_settings(value, callback, curr_path) + else: + callback(curr_path, value) + + curr_path.pop() + #----------------------------------------------------------------------- + def __handle_setting(path, value): + xpath = "." + "/pom:".join([""] + path) + node = root.find(xpath, ns) + node.text = value + return + #----------------------------------------------------------------------- + + target_pom_path = target_pom_path or src_pom_path + + print "Updating Maven configuration (" + target_pom_path + ")..." + + tree = ET.parse(src_pom_path) + root = tree.getroot() + + __recurse_maven_settings(settings, __handle_setting) + + tree.write(target_pom_path, default_namespace=pom_ns) diff --git a/cmake/Policies.cmake b/cmake/Policies.cmake index 4cceb6f69..66d10b441 100644 --- a/cmake/Policies.cmake +++ b/cmake/Policies.cmake @@ -2,11 +2,13 @@ if (COMMAND cmake_policy) #{ # NEW = Libraries linked by full-path must have a valid library file name. + # see: https://cmake.org/cmake/help/v3.0/policy/CMP0008.html if (POLICY CMP0008) cmake_policy (SET CMP0008 NEW) endif (POLICY CMP0008) # NEW = Included scripts do automatic cmake_policy PUSH and POP. + # see: https://cmake.org/cmake/help/v3.0/policy/CMP0011.html if (POLICY CMP0011) cmake_policy (SET CMP0011 NEW) endif(POLICY CMP0011) diff --git a/docker/android/.gitattributes b/docker/android/.gitattributes new file mode 100644 index 000000000..fc631f17c --- /dev/null +++ b/docker/android/.gitattributes @@ -0,0 +1,2 @@ +*.sh eol=lf +*.py eol=lf diff --git a/docker/shared/.gitattributes b/docker/shared/.gitattributes new file mode 100644 index 000000000..50ca329f2 --- /dev/null +++ b/docker/shared/.gitattributes @@ -0,0 +1 @@ +*.sh eol=lf diff --git a/pom.xml b/pom.xml index 24604b0be..c409b634b 100644 --- a/pom.xml +++ b/pom.xml @@ -1,19 +1,18 @@ - + 4.0.0 UTF-8 - ${env.J2V8_PLATFORM_NAME} + alpine-linux gtk - ${env.J2V8_ARCH_NAME} + x86_64 com.eclipsesource.j2v8 - j2v8_${os}_${arch} - ${env.J2V8_FULL_VERSION} + j2v8_alpine-linux_x86_64 + 4.8.0-SNAPSHOT bundle - j2v8_${os}_${arch} + j2v8_alpine-linux_x86_64 J2V8 is a set of Java bindings for V8 https://github.com/eclipsesource/j2v8 @@ -159,4 +158,4 @@ - + \ No newline at end of file From 138a693c60bd65d7e21b5b29703b4c29cd6c798b Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Tue, 1 Aug 2017 22:08:45 +0200 Subject: [PATCH 05/14] show progress percentage on `nodejs.py pkg` --- .gitignore | 2 +- build.py | 1 - docker/linux/Dockerfile | 4 +- docker/win32/Dockerfile | 4 +- nodejs.py | 131 +++++++++++++++++++++++++++------------- 5 files changed, 91 insertions(+), 51 deletions(-) diff --git a/.gitignore b/.gitignore index 437d72873..83118ed52 100644 --- a/.gitignore +++ b/.gitignore @@ -34,4 +34,4 @@ node.out test-mockup-os-release # generated dependency packages -j2v8-dependencies-* +*.tar.* diff --git a/build.py b/build.py index 2323b9b15..08d53cf3a 100644 --- a/build.py +++ b/build.py @@ -1,7 +1,6 @@ """ This script should be invoked directly via the CLI to start a J2V8 build """ - import sys import build_system.cli as cli diff --git a/docker/linux/Dockerfile b/docker/linux/Dockerfile index 929835402..f20eb1ba0 100644 --- a/docker/linux/Dockerfile +++ b/docker/linux/Dockerfile @@ -32,6 +32,4 @@ ENV PATH "$PATH:/opt/cmake/bin" # download the most critical maven dependencies for the build beforehand COPY ./shared/pom.xml /temp WORKDIR /temp -RUN export J2V8_PLATFORM_NAME=temp && \ - export J2V8_ARCH_NAME=temp && \ - mvn clean verify -DskipTests || true +RUN mvn clean verify -DskipTests || true diff --git a/docker/win32/Dockerfile b/docker/win32/Dockerfile index f96440a44..098b0626f 100644 --- a/docker/win32/Dockerfile +++ b/docker/win32/Dockerfile @@ -42,6 +42,4 @@ RUN Remove-Item -Recurse -Force C:/j2v8 RUN mkdir C:/temp COPY ./shared/pom.xml C:/temp WORKDIR /temp -RUN set J2V8_PLATFORM_NAME=temp; \ - set J2V8_ARCH_NAME=temp; \ - Invoke-Command { mvn clean verify -DskipTests } -ErrorAction SilentlyContinue +RUN Invoke-Command { mvn clean verify -DskipTests } -ErrorAction SilentlyContinue diff --git a/nodejs.py b/nodejs.py index 4d6b6d1c8..b3f5ca7ad 100644 --- a/nodejs.py +++ b/nodejs.py @@ -1,15 +1,44 @@ +""" +Utility script to manage the Node.js dependency +""" import argparse import collections +import fnmatch import glob +import io import os import sys import tarfile import zipfile import build_system.constants as c +import build_system.build_constants as bc import build_system.build_utils as utils import build_system.build_settings as settings +# helper classes to show zipping progress +# original idea: https://stackoverflow.com/a/3668977/425532 +class ReadProgressFileObject(io.FileIO): + current_read = 0 + def __init__(self, path, *args, **kwargs): + io.FileIO.__init__(self, path, *args, **kwargs) + + def read(self, size): + b = io.FileIO.read(self, size) + ReadProgressFileObject.current_read += len(b) + return b + +class WriteProgressFileObject(io.FileIO): + def __init__(self, path, size, *args, **kwargs): + self._total_size = size + io.FileIO.__init__(self, path, *args, **kwargs) + + def write(self, b): + progress = min(100.0, ReadProgressFileObject.current_read / (self._total_size * 0.01)) + sys.stdout.write("\r[%3.2f%%] " %(progress)) + sys.stdout.flush() + return io.FileIO.write(self, b) + Command = collections.namedtuple("Command", "aliases function") DepsDirectory = collections.namedtuple("DepsDirectory", "path include") @@ -61,51 +90,59 @@ def package(): platforms = sys.argv[2:] full = len(platforms) == 0 - print platforms - return - # make sure all node.js binaries are stored in the cache before packaging flush_cache(True) # C++ header files - included_paths = [ - DepsDirectory(path="./node/deps/", include=[".h"]), - DepsDirectory(path="./node/src/", include=[".h"]), - ] - - # Android - if (full or c.target_android in platforms): - included_paths += [ - DepsDirectory(path="./node.out/android.arm/", include=["j2v8.node.out", ".o", ".a"]), - DepsDirectory(path="./node.out/android.x86/", include=["j2v8.node.out", ".o", ".a"]), - ] - - # Linux - if (full or c.target_linux in platforms): - included_paths += [ - DepsDirectory(path="./node.out/linux.x64/", include=["j2v8.node.out", ".o", ".a"]), - DepsDirectory(path="./node.out/linux.x86/", include=["j2v8.node.out", ".o", ".a"]), - ] - - # MacOSX - if (full or c.target_macos in platforms): - included_paths += [ - DepsDirectory(path="./node.out/macos.x64/", include=["j2v8.node.out", ".a"]), - DepsDirectory(path="./node.out/macos.x86/", include=["j2v8.node.out", ".a"]), - ] - - # Windows - if (full or c.target_win32 in platforms): - included_paths += [ - DepsDirectory(path="./node.out/win32.x64/", include=["j2v8.node.out", ".lib"]), - DepsDirectory(path="./node.out/win32.x86/", include=["j2v8.node.out", ".lib"]), - ] - - with tarfile.open("j2v8-nodejs-deps-" + settings.J2V8_VERSION + ".tar.bz2", "w:bz2") as zipf: + # NOTE: see https://stackoverflow.com/a/4851555/425532 why this weird syntax is necessary here + dependencies = { + "list": [ + DepsDirectory(path="./node/deps/", include=[".h"]), + DepsDirectory(path="./node/src/", include=[".h"]), + ], + "size": 0, + } + + def __add_platform_deps(platform, include, vendor = None): + target = bc.platform_targets.get(platform) + vendor_str = (vendor + "-" if vendor else "") + selected = (vendor_str + platform) in platforms + + if (full or selected): + dependencies["list"] += [ + DepsDirectory( + path="./node.out/" + vendor_str + platform + "." + arch + "/", + include=["j2v8.node.out"] + include + ) for arch in target.architectures + ] + + # speciffy the platforms & file patterns that should be included + __add_platform_deps(c.target_android, [".o", ".a"]) + __add_platform_deps(c.target_linux, [".o", ".a"]) + __add_platform_deps(c.target_linux, [".o", ".a"], vendor = c.vendor_alpine) + __add_platform_deps(c.target_macos, [".a"]) + __add_platform_deps(c.target_win32, [".lib"]) + + # could be a package for an individual platform, or a complete package + package_platform = platforms[0] + "-" if len(platforms) == 1 else "" + package_filename = "j2v8-nodejs-deps-" + package_platform + settings.J2V8_VERSION + ".tar.bz2" + + # determine the uncompressed total size of all included files + for dep in dependencies["list"]: + print "scan " + dep.path + for root, dirs, filenames in os.walk(dep.path): + for pattern in dep.include: + for file_name in fnmatch.filter(filenames, '*' + pattern): + file_path = os.path.join(root, file_name) + dependencies["size"] += os.path.getsize(file_path) + + # start zipping the package + with tarfile.open(fileobj=WriteProgressFileObject(package_filename, dependencies["size"], "w"), mode="w:bz2") as zipf: + # with tarfile.open(package_filename, "w:bz2") as zipf: # with zipfile.ZipFile("j2v8-nodejs-deps-" + settings.J2V8_VERSION + ".zip", "w", zipfile.ZIP_DEFLATED) as zipf: - for curr_p in included_paths: - print "zipping " + curr_p.path - dir_path = os.path.normpath(curr_p.path) + for dep in dependencies["list"]: + print "compress " + dep.path + dir_path = os.path.normpath(dep.path) for root, dirs, files in os.walk(dir_path): for f in files: @@ -113,19 +150,23 @@ def package(): copy_file = False - for pattern in curr_p.include: + for pattern in dep.include: if (file_path.endswith(pattern)): copy_file = True break if (copy_file): - if (os.stat(file_path).st_size > 1024 * 1024): + # only show files > 1 MB + if (os.path.getsize(file_path) > 1024 * 1024): print file_path # zipf.write(file_path) - zipf.add(file_path) + # zipf.add(file_path) + info = zipf.gettarinfo(file_path) + zipf.addfile(info, ReadProgressFileObject(file_path)) print "Done" + print "generated: " + package_filename cmd_package = Command( aliases=["package", "pkg"], @@ -181,6 +222,10 @@ def apply_diff(silent = False): type=str, choices=[cmd for commands in all_cmds for cmd in commands.aliases]) +parser.add_argument("rest", + nargs="*", + help=argparse.SUPPRESS) + args = parser.parse_args() for cmd_tuple in all_cmds: From 2520075955bd34b1c4423c2fae91a46bf733cdd3 Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Wed, 2 Aug 2017 18:57:15 +0200 Subject: [PATCH 06/14] extended documentation & code polishing --- BUILDING.md | 72 ++++++++++-- build.py | 2 +- build_system/build_configs.py | 6 + build_system/build_constants.py | 63 +++++++--- build_system/build_executor.py | 179 +++++++++++++++++------------ build_system/build_interactive.py | 1 + build_system/build_settings.py | 4 +- build_system/build_structures.py | 42 ++++--- build_system/build_utils.py | 34 ++++-- build_system/cli.py | 16 ++- build_system/cmake_utils.py | 1 + build_system/constants.py | 2 + build_system/shared_build_steps.py | 18 +++ nodejs.py | 4 +- 14 files changed, 313 insertions(+), 131 deletions(-) diff --git a/BUILDING.md b/BUILDING.md index 5c3bbefef..cf563ee61 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -1,3 +1,42 @@ +# Build-System CLI + +## Non-interactive +``` +python build.py -h, --help + +usage: build.py [-h] --target {android,linux,macos,win32} --arch {x86,x64,arm} + [--vendor VENDOR] [--keep-native-libs] [--node-enabled] + [--docker] [--vagrant] [--sys-image SYS_IMAGE] [--no-shutdown] + [--interactive] + [build-steps [build-steps ...]] +``` +``` +python build.py -v alpine -t linux -a x64 -dkr -img openjdk:8u131-alpine -ne j2v8 +``` + +## Interactive +``` +python build.py --i, --interactive + +entering interactive mode... + +[0] Docker >> android-x86 >> NODE_ENABLED +[1] Docker >> android-arm >> NODE_ENABLED +[2] Docker >> alpine-linux-x64 >> NODE_ENABLED +[3] Docker >> linux-x64 >> NODE_ENABLED +[4] Docker >> linux-x86 >> NODE_ENABLED +[5] Vagrant >> macosx-x64 >> NODE_ENABLED +[6] Vagrant >> macosx-x86 >> NODE_ENABLED +[7] Native >> windows-x64 >> NODE_ENABLED +[8] Docker >> windows-x64 >> NODE_ENABLED +[9] Vagrant >> windows-x64 >> NODE_ENABLED + +Select a predefined build-configuration to run: 2 +Building: Docker >> alpine-linux-x64 >> NODE_ENABLED + +Override build-steps ? (leave empty to run pre-configured steps): j2v8 +``` + # Build-Steps The J2V8 build-system performs several build steps in a fixed order to produce the final J2V8 packages for usage on the designated target platforms. What follows is a short summary for what each of the executed build-steps does and what output artifacts are produced by each step. @@ -5,7 +44,8 @@ The J2V8 build-system performs several build steps in a fixed order to produce t --- ## Node.js -Builds the [Node.js](https://nodejs.org/en/) & [V8](https://developers.google.com/v8/) dependency artifacts that are later linked against by the J2V8 native bridge code. +Builds the [Node.js](https://nodejs.org/en/) & [V8](https://developers.google.com/v8/) dependency artifacts that are later linked into the J2V8 native bridge code. +(only works if the Node.js source was checked out into the J2V8 `./node` directory) __Inputs:__ - Node.js source code @@ -22,7 +62,7 @@ __Artifacts:__ --- ## CMake -Uses [CMake](https://cmake.org/) to generate the native Makefiles / IDE project files to later build the J2V8 C++ native bridge shared libraries (.so/.dylib/.dll) +Uses [CMake](https://cmake.org/) to generate the native Makefiles / IDE project files to later build the J2V8 C++ native bridge shared libraries. __Inputs__: - Node.js / V8 static link libraries @@ -37,7 +77,7 @@ __Artifacts:__ --- ## JNI -The previously generated Makefiles / IDE project files are used to compile and link the J2V8 C++ source code, which provides the JNI bridge to interop between the Java code and the C++ code of Node.js / V8. +Compile and link the J2V8 C++ shared libraries (.so/.dylib/.dll), which provide the JNI bridge to interop with the C++ code of Node.js / V8. __Inputs__: - CMake generated Makefiles / IDE Project-files @@ -49,16 +89,34 @@ __Artifacts:__ - J2V8 native shared libraries - `./cmake.out/{platform}.{architecture}/libj2v8-[vendor-]{platform}-{abi}.{ext}` - e.g. `./cmake.out/linux.x64/libj2v8-alpine-linux-x86_64.so` -- The built shared libraries will also be automatically copied to the required Java / Android project directories to be included in the .jar/.aar packages that will be built later. - - `./src/main/resources/` (Java) - - `./src/main/jniLibs/{abi}/libj2v8.so` (Android) +--- +## Optimize + +The native J2V8 libraries are optimized for performance and/or filesize by using the available tools of the target-platform / compiler-toolchain. + +__Inputs__: +- unoptimized J2V8 native shared libraries + - `./cmake.out/{platform}.{architecture}/libj2v8-[vendor-]{platform}-{abi}.{ext}` + - e.g. `./cmake.out/linux.x64/libj2v8-alpine-linux-x86_64.so` +- platform-specific optimization tools: + - Android: - + - Linux: `execstack`, `strip` + - MacOSX: - + - Windows: - + +__Artifacts:__ +- optimized J2V8 native shared libraries + - `./cmake.out/{platform}.{architecture}/libj2v8-[vendor-]{platform}-{abi}.{ext}` + - e.g. `./cmake.out/linux.x64/libj2v8-alpine-linux-x86_64.so` --- ## Java / Android Compiles the Java source code and packages it, including the previously built native libraries, into the final package artifacts. For the execution of this build-step [Maven](https://maven.apache.org/) (Java) or [Gradle](https://gradle.org/) (Android) are used for the respective target platforms. __Inputs__: -- J2V8 native shared libraries +- J2V8 native shared libraries (will be automatically copied to the required Java / Android project directories to be included in the .jar/.aar packages) + - `./src/main/resources/` (Java) + - `./src/main/jniLibs/{abi}/libj2v8.so` (Android) - J2V8 Java source code - `./src/main/` - J2V8 Java test source code diff --git a/build.py b/build.py index 08d53cf3a..52f732ecc 100644 --- a/build.py +++ b/build.py @@ -9,7 +9,7 @@ # interactive shell entrypoint if (len(sys.argv) >= 2 and sys.argv[1] in ["--interactive", "-i"]): - print "entering interactive mode...\n" + print "\nentering interactive mode...\n" interactive.run_interactive_cli() # passive command-line entrypoint else: diff --git a/build_system/build_configs.py b/build_system/build_configs.py index 09fec5924..dcf8b4e1b 100644 --- a/build_system/build_configs.py +++ b/build_system/build_configs.py @@ -1,4 +1,10 @@ +""" +This file contains the collection of build-configurations that are available +for selection when running the build.py script with the --interactive, -i parameter. +Parameters for the build can be specified by their variable-name ("dest" defined in the cli.py arguments). +An array of build-steps can also be specified here, if none are specified then "all" steps will be run. +""" import constants as c configs = [ diff --git a/build_system/build_constants.py b/build_system/build_constants.py index 0dd72fa6a..d3875d989 100644 --- a/build_system/build_constants.py +++ b/build_system/build_constants.py @@ -1,3 +1,7 @@ +""" +Contains the essential lists/map structures that are referenced by the build process & CLI +""" +import collections import constants as c @@ -6,38 +10,59 @@ from config_macos import macos_config from config_win32 import win32_config -build_step_sequence = [ - c.build_node_js, - c.build_j2v8_cmake, - c.build_j2v8_jni, - c.build_j2v8_optimize, - c.build_j2v8_java, - c.build_j2v8_junit, +CLIStep = collections.namedtuple("CLIStep", "id help") + +#----------------------------------------------------------------------- +# Build-steps lists, maps and sequences +#----------------------------------------------------------------------- +atomic_build_steps = [ + CLIStep(c.build_node_js, " Builds the Node.js & V8 dependency artifacts that are later linked into the J2V8 native bridge code.\n" + + " (only works if the Node.js source was checked out into the J2V8 ./node directory)"), + CLIStep(c.build_j2v8_cmake, " Uses CMake to generate the native Makefiles / IDE project files to later build the J2V8 C++ native bridge shared libraries."), + CLIStep(c.build_j2v8_jni, " Compile and link the J2V8 C++ shared libraries (.so/.dylib/.dll), which provide the JNI bridge to interop with the C++ code of Node.js / V8."), + CLIStep(c.build_j2v8_optimize, " The native J2V8 libraries are optimized for performance and/or filesize by using the available tools of the target-platform / compiler-toolchain."), + CLIStep(c.build_j2v8_java, " Compiles the Java source code and packages it, including the previously built native libraries, into the final package artifacts.\n" + + " For the execution of this build-step Maven (Java) or Gradle (Android) are used for the respective target platforms."), + CLIStep(c.build_j2v8_junit, " Runs the Java (JUnit) unit tests."), ] -composite_steps = [ - # composites - c.build_all, - c.build_native, - c.build_j2v8, - # aliases - c.build_java, - c.build_test, +# build_steps_help = dict(atomic_build_steps) + +atomic_build_step_sequence = [s.id for s in atomic_build_steps] + +advanced_steps = [ + # atomic aliases + CLIStep(c.build_java, " Alias for " + c.build_j2v8_java), + CLIStep(c.build_test, " Alias for " + c.build_j2v8_junit), + + # multi-step aliases + CLIStep(c.build_all, " Run all build steps."), + CLIStep(c.build_native, " Build only the native parts. (includes nodejs)"), + CLIStep(c.build_j2v8, " Run all build steps to build J2V8 (this does not try to build Node.js)\n" + + " This is useful when building with a pre-compiled Node.js dependency package."), ] -platform_targets = { +# advanced_steps_help = dict(advanced_steps) + +advanced_steps_list = [s.id for s in advanced_steps] + + +avail_build_steps = atomic_build_step_sequence + advanced_steps_list + +#----------------------------------------------------------------------- +# Build execution core function +#----------------------------------------------------------------------- +platform_configs = { c.target_android: android_config, c.target_linux: linux_config, c.target_macos: macos_config, c.target_win32: win32_config, } -avail_targets = platform_targets.keys() +avail_targets = platform_configs.keys() avail_architectures = [ c.arch_x86, c.arch_x64, c.arch_arm, ] - -avail_build_steps = build_step_sequence + composite_steps diff --git a/build_system/build_executor.py b/build_system/build_executor.py index b850cc65e..67ad87b57 100644 --- a/build_system/build_executor.py +++ b/build_system/build_executor.py @@ -6,53 +6,70 @@ import constants as c import build_utils as utils from shell_build import ShellBuildSystem - import immutable +# collection of all parsed build-steps that will then be passed on to the core build function +# (this list must only contain atomic steps after all step evaluations are finished) parsed_steps = set() -step_handlers = {} +# a registry/dictionary of evaluation-functions that translate from their corresponding step/alias +# into the list of atomic build-steps (see parsed_steps above) +step_evaluators = {} + +#----------------------------------------------------------------------- +# Advanced build-step parsing (anti-steps, multi-steps) +#----------------------------------------------------------------------- def atomic_step(step, alias = None): + """ + Atomic build-steps are just directly forwarded to the build-executor. + This function will also automatically add an additional anti-step with a "~" prefix. + """ if (alias is None): alias = step - # handle anti-step - step_handlers[alias] = lambda: parsed_steps.add(step) + # add step handler (step => step) + step_evaluators[alias] = lambda: parsed_steps.add(step) - # handle anti-step - step_handlers["~" + alias] = lambda: parsed_steps.discard(step) + # add anti-step handler (step => ~step) + step_evaluators["~" + alias] = lambda: parsed_steps.discard(step) - # register anti-step in CLI + # register additional anti-step in CLI bc.avail_build_steps.append("~" + alias) def multi_step(alias, include, exclude = []): - # handle step - step_handlers[alias] = lambda: \ - [step_handlers.get(s)() for s in include] + \ - [step_handlers.get("~" + s)() for s in exclude] - - # handle anti-step - step_handlers["~" + alias] = lambda: \ - [step_handlers.get("~" + s)() for s in include] + \ - [step_handlers.get(s)() for s in exclude] - - # register anti-step in CLI + """ + Forwards a collection/sequence of build-steps to the build-executor when + the defined step alias name was detected. Also the inverted anti-steps sequence + will be evaluated if the "~" prefixed alias is recognized. + """ + # add aliased step-sequence (alias => step1, step2, ... , stepN) + step_evaluators[alias] = lambda: \ + [step_evaluators.get(s)() for s in include] + \ + [step_evaluators.get("~" + s)() for s in exclude] + + # add aliased anti-step-sequence (~alias => ~step1, ~step2, ... , ~stepN) + step_evaluators["~" + alias] = lambda: \ + [step_evaluators.get("~" + s)() for s in include] + \ + [step_evaluators.get(s)() for s in exclude] + + # register additional anti-step in CLI bc.avail_build_steps.append("~" + alias) def init_buildsteps(): - # special alias to include all build steps into one - multi_step(c.build_all, bc.build_step_sequence) + """Setup of all available build-step atomics & combinations""" + # special alias to group all build steps into a single one + multi_step(c.build_all, bc.atomic_build_step_sequence) # atomic steps - for step in list(bc.build_step_sequence): + for step in list(bc.atomic_build_step_sequence): atomic_step(step) # atomic aliases atomic_step(c.build_j2v8_java, c.build_java) atomic_step(c.build_j2v8_junit, c.build_test) - # composite alias: build only the native parts (including nodejs) + # multi-step alias: build only the native parts (includes nodejs) multi_step(c.build_native, [ c.build_node_js, c.build_j2v8_cmake, @@ -60,24 +77,41 @@ def init_buildsteps(): c.build_j2v8_optimize, ]) - # composite alias: build everything except nodejs - multi_step(c.build_j2v8, [c.build_all], [c.build_node_js]) + # multi-step alias: build everything that belongs to J2V8 (excludes Node.js) + # this is useful when building J2V8 with a pre-compiled Node.js dependency package + multi_step(c.build_j2v8, [c.build_all], [c.build_node_js, c.build_j2v8_junit]) -def handle_build_step_option(step): - return step_handlers.get(step, raise_unhandled_option(step)) +def evaluate_build_step_option(step): + """Find the registered evaluator function for the given step and execute it""" + step_eval_func = step_evaluators.get(step, raise_unhandled_option(step)) + step_eval_func() def raise_unhandled_option(step): return lambda: sys.exit("INTERNAL-ERROR: Tried to handle unrecognized build-step \"" + step + "\"") -# initialize the advanced parsing mechanisms for the build CLI +# initialize the advanced parsing evaluation handlers for the build.py CLI init_buildsteps() #----------------------------------------------------------------------- # Build execution core function #----------------------------------------------------------------------- def execute_build(params): - - # if (type(params) is dict): + """ + Receives an params-object with all the necessary build-settings to start + building the J2V8 artifacts. There are two paths internally that this function will take: + + A) Run the build in the same OS shell environment that the build.py command was started from. + This means you have to make sure all the necessary build utensils are installed on your system. + To find out what is needed to build on a particular platform you can have a look in the "docker" + and "vagrant" directories, they contain shell scripts that show how to install all the things + you need if you would want to set up a build environment manually on your machine. + + B) Use virtualization technologies to run a sandboxed build-environment that does not rely + on your machine having installed any of the required build-tools natively. This also allows + to cross-compile mostly all supported platforms independently of the host operating system that + you are running on your machine (only Docker and/or Vagrant are required to run this). + """ + # convert from a dictionary form to the normalized params-object form if (isinstance(params, dict)): params = cli.BuildParams(params) @@ -87,23 +121,20 @@ def execute_build(params): if (params.docker and params.vagrant): sys.exit("ERROR: Choose either Docker or Vagrant for the build, can not use both") - # this defines the target platform / operating system the build should be run for - build_target = bc.platform_targets.get(params.target) - target = params.target - cross_id = "docker" if params.docker else "vagrant" if params.vagrant else None - if (not target in bc.platform_targets): + if (not target in bc.platform_configs): sys.exit("ERROR: Unrecognized target platform: " + target) - build_target = bc.platform_targets.get(target) + # this defines the PlatformConfig / operating system the build should be run for + target_platform = bc.platform_configs.get(target) if (params.arch is None): sys.exit("ERROR: No target architecture specified") - build_architectures = build_target.architectures + avail_architectures = target_platform.architectures - if (not params.arch in build_architectures): + if (not params.arch in avail_architectures): sys.exit("ERROR: Unsupported architecture: \"" + params.arch + "\" for selected target platform: " + target) if (params.buildsteps is None): @@ -115,27 +146,31 @@ def execute_build(params): global parsed_steps parsed_steps.clear() + # go through the raw list of build-steps (given by the CLI or an API call) + # and generate a list of only the atomic build-steps that were derived in the evaluation for step in params.buildsteps: - handle_build_step_option(step)() + evaluate_build_step_option(step) - # force build-steps into defined order (see: http://stackoverflow.com/a/23529016) - parsed_steps = [step for step in bc.build_step_sequence if step in parsed_steps] + # force build-steps into their pre-defined order (see: http://stackoverflow.com/a/23529016) + parsed_steps = [step for step in bc.atomic_build_step_sequence if step in parsed_steps] if (len(parsed_steps) == 0): sys.exit("WARNING: No build-steps to be done ... exiting") - platform_steps = build_target.steps - cross_configs = build_target.cross_configs - build_cwd = utils.get_cwd() cross_cfg = None + cross_configs = target_platform.cross_configs - if (cross_id): - if (cross_configs.get(cross_id) is None): - sys.exit("ERROR: target '" + target + "' does not have a recognized cross-compile host: '" + cross_id + "'") + cross_sys = "docker" if params.docker else "vagrant" if params.vagrant else None + + # if a recognized cross-compile option was specified by the params + # try to find the configuration parameters to run the cross-compiler + if (cross_sys): + if (cross_configs.get(cross_sys) is None): + sys.exit("ERROR: target '" + target + "' does not have a recognized cross-compile host: '" + cross_sys + "'") else: - cross_cfg = cross_configs.get(cross_id) + cross_cfg = cross_configs.get(cross_sys) # if we are the build-instigator (not a cross-compile build-agent) we directly run some initial checks & setups for the build if (not params.cross_agent): @@ -146,48 +181,49 @@ def execute_build(params): curr_node_tag = (params.vendor + "-" if params.vendor else "") + target + "." + params.arch utils.store_nodejs_output(curr_node_tag, build_cwd) - def execute_build_step(compiler_inst, build_step): - """Executes an immutable copy of the given build-step configuration""" + def execute_build_step(build_system, build_step): + """Creates an immutable copy of a single BuildStep configuration and executes it in the build-system""" # from this point on, make the build-input immutable to ensure consistency across the whole build process # any actions during the build-step should only be made based on the initial set of variables & conditions - # NOTE: this restriction makes it much more easy to reason about the build-process as a whole + # NOTE: this restriction makes it much more easy to reason about the build-process as a whole (see "unidirectional data flow") build_step = immutable.freeze(build_step) - compiler_inst.build(build_step) + build_system.build(build_step) - # a cross-compile was requested, we just launch the build-environment and then delegate the requested build-process to the cross-compile environment + # a cross-compile was requested, we just launch the virtualization-environment and then delegate + # the originally requested build parameters to the cross-compile environment then running the build.py CLI if (cross_cfg): - cross_compiler = build_target.cross_compiler(cross_id) - - # prepare additional parameters/utils for the build and put them into the build-step config + cross_compiler = target_platform.cross_compiler(cross_sys) + # invoke the build.py CLI within the virtualized / self-contained build-system provider cross_cfg.custom_cmd = "python ./build.py " + \ - "--cross-agent " + cross_id + \ + "--cross-agent " + cross_sys + \ " -t $PLATFORM -a $ARCH " + \ (" -ne" if params.node_enabled else "") + \ (" -v " + params.vendor if params.vendor else "") + \ (" -knl " if params.keep_native_libs else "") + \ " " + " ".join(parsed_steps) - # meta-vars & util functions + # apply meta-vars & util functions cross_cfg.compiler = cross_compiler cross_cfg.inject_env = lambda s: cross_compiler.inject_env(s, cross_cfg) - cross_cfg.target = build_target + cross_cfg.target = target_platform - # build params + # apply essential build params cross_cfg.arch = params.arch - cross_cfg.file_abi = build_target.file_abi(params.arch) + cross_cfg.file_abi = target_platform.file_abi(params.arch) cross_cfg.no_shutdown = params.no_shutdown cross_cfg.sys_image = params.sys_image cross_cfg.vendor = params.vendor cross_cfg.docker = params.docker cross_cfg.vagrant = params.vagrant + # start the cross-compile execute_build_step(cross_compiler, cross_cfg) - # run the requested build-steps with the given parameters to produce the build-artifacts + # run the requested build-steps & parameters in the current shell environment else: target_compiler = ShellBuildSystem() - target_steps = dict(platform_steps) + build_steps = dict(target_platform.steps) # this is a build-agent for a cross-compile if (params.cross_agent): @@ -199,25 +235,23 @@ def execute_build_step(compiler_inst, build_step): build_cwd = cross_cfg.build_cwd - # execute all requested build steps + # execute all steps from a list that parsed / evaluated before (see the "build-step parsing" section above) for step in parsed_steps: - if (not step in target_steps): + if (not step in build_steps): print("INFO: skipping build step \"" + step + "\" (not configured and/or supported for platform \"" + params.target + "\")") continue - target_step = target_steps[step] - - # prepare additional parameters/utils for the build and put them into the build-step config + target_step = build_steps[step] - # meta-vars & util functions + # apply meta-vars & util functions target_step.cross_agent = params.cross_agent target_step.compiler = target_compiler - target_step.inject_env = lambda s: target_compiler.inject_env(s, target_steps[step]) - target_step.target = build_target + target_step.inject_env = lambda s: target_compiler.inject_env(s, build_steps[step]) + target_step.target = target_platform - # build params + # apply essential build params target_step.arch = params.arch - target_step.file_abi = build_target.file_abi(params.arch) + target_step.file_abi = target_platform.file_abi(params.arch) target_step.node_enabled = params.node_enabled target_step.build_cwd = build_cwd target_step.vendor = params.vendor @@ -225,4 +259,5 @@ def execute_build_step(compiler_inst, build_step): target_step.vagrant = params.vagrant target_step.keep_native_libs = params.keep_native_libs + # run the current BuildStep execute_build_step(target_compiler, target_step) diff --git a/build_system/build_interactive.py b/build_system/build_interactive.py index a4c578211..30e1f1722 100644 --- a/build_system/build_interactive.py +++ b/build_system/build_interactive.py @@ -1,3 +1,4 @@ +"""Provides a simple interactive CLI to start a selected build from a given set of build-configurations""" import sys import build_configs as bcfg diff --git a/build_system/build_settings.py b/build_system/build_settings.py index b2587f50c..561da3878 100644 --- a/build_system/build_settings.py +++ b/build_system/build_settings.py @@ -1,7 +1,7 @@ """ The values specified here will be used as the single source of truth for -version strings and other global build variables that should be used in -the build scrips (build.py) or the Node.js utility scripts (nodejs.py) +version strings and other globally shared build variables that should be +used in the build-process (build.py) or the Node.js utility scripts (nodejs.py) """ #----------------------------------------------------------------------- # Node.js settings diff --git a/build_system/build_structures.py b/build_system/build_structures.py index f626c2047..40a71b5df 100644 --- a/build_system/build_structures.py +++ b/build_system/build_structures.py @@ -1,3 +1,5 @@ +"""Contains the fundamental data-structures that are used for the build-process""" + from abc import ABCMeta, abstractmethod import commands import os @@ -8,6 +10,7 @@ import shared_build_steps as sbs class PlatformConfig(): + """Configuration container for all values that are defined for a single target-platform""" def __init__(self, name, architectures): self.name = name self.architectures = architectures @@ -45,6 +48,7 @@ def file_abi(self, arch): return file_abi if not file_abi is None else arch class BuildStep(object): + """Configuration capsule for all values that are defined for a well-defined step in the build pipeline""" def __init__(self, name, platform, build = [], build_cwd = None, host_cwd = None): self.name = name self.platform = platform @@ -54,10 +58,11 @@ def __init__(self, name, platform, build = [], build_cwd = None, host_cwd = None self.custom_cmd = None class BuildSystem: + """The functional compositor and abstract base-class for any concrete build-system implementation""" __metaclass__ = ABCMeta def build(self, config): - # perform the health check for the build system first + # perform the health check for this build-system first self.health_check(config) # clean previous build outputs @@ -65,10 +70,12 @@ def build(self, config): # copy the maven / gradle config files to the docker shared directory # this allows Dockerfiles to pre-fetch most of the maven / gradle dependencies before the actual build - # and store downloaded maven / gradle dependencies inside the generated docker images (results in faster builds) + # and store downloaded maven / gradle dependencies inside the generated docker images + # (results in faster builds/less network traffic) copy2("build.gradle", "./docker/shared") copy2("src/main/AndroidManifest.xml", "./docker/android/AndroidManifest.xml") - # use the original pom.xml, but with dummy constant values, this avoids unnecessary rebuilding of docker images + # use the original pom.xml, but with some never changing dummy parameter values. + # this avoids unnecessary rebuilding of docker images (some pom.xml changes are mandatory during the J2V8 build) sbs.apply_maven_null_settings(target_pom_path="./docker/shared/pom.xml") # execute all the build stages @@ -77,24 +84,31 @@ def build(self, config): self.post_build(config) def exec_host_cmd(self, cmd, config): - cmd = self.inject_env(cmd, config) - dir = None - - if (config.host_cwd is not None): - dir = self.inject_env(config.host_cwd, config) - - utils.execute(cmd, dir) + """Execute a shell-command on the host system (injects $CWD as the location of the J2V8 source directory""" + self.__exec_cmd_core(cmd, config, config.host_cwd) def exec_cmd(self, cmd, config): + """ + Execute a shell-command in the current shell environment (could be native or inside a virtualized system) + On the native host-system, $CWD will be set to the location of the J2V8 source directory. + Running inside a virtualized system, $CWD will be set to the path configured in the cross-compiler settings. + """ + self.__exec_cmd_core(cmd, config, config.build_cwd) + + def __exec_cmd_core(self, cmd, config, cwd): cmd = self.inject_env(cmd, config) - dir = None - if (config.build_cwd is not None): - dir = self.inject_env(config.build_cwd, config) + if (cwd is not None): + # inject env-vars in the given working-directory path + cwd = self.inject_env(cwd, config) - utils.execute(cmd, dir) + utils.execute(cmd, cwd) def inject_env(self, cmd, config): + """ + Grab values for often used properties from the config object + and perform variable substitution on the given cmd string. + """ build_cwd = utils.get_cwd() vendor = config.vendor diff --git a/build_system/build_utils.py b/build_system/build_utils.py index 8c7334952..1698c81bd 100644 --- a/build_system/build_utils.py +++ b/build_system/build_utils.py @@ -39,6 +39,10 @@ def platform_libext(config): return lib_ext def execute(cmd, cwd = None): + """ + Low-Level CLI utility function to execute a shell command in a sub-process of the current python process + (redirects all output to stdout) + """ # flush any buffered console output, because popen could block the terminal sys.stdout.flush() @@ -48,6 +52,10 @@ def execute(cmd, cwd = None): raise subprocess.CalledProcessError(return_code, cmd) def execute_to_str(cmd, cwd = None): + """ + Low-Level CLI utility function to execute a shell command in a sub-process of the current python process + (returns all output as a string) + """ # flush any buffered console output, because popen could block the terminal sys.stdout.flush() @@ -60,6 +68,7 @@ def execute_to_str(cmd, cwd = None): return out def store_nodejs_output(next_node_tag, build_cwd): + """Cache built Node.js artifacts into a common directory structure, identified by vendor, platform and architecture.""" curr_node_tag = None curr_dir = lambda subdir: build_cwd + "/node/" + subdir @@ -88,11 +97,11 @@ def store_nodejs_output(next_node_tag, build_cwd): node = curr_dir(subdir) # we want to store into the cache, delete any existing directories that might - # already occupy the cache (there should not be one) + # already occupy the cache subdir (there should be none) if (os.path.isdir(curr_cache)): shutil.rmtree(curr_cache) - # move the previous build artifacts into the cache + # move the previously built artifacts into the cache if (os.path.isdir(node)): print "node --- " + subdir + " ---> cache[" + curr_node_tag + "]" shutil.move(node, curr_cache) @@ -103,11 +112,12 @@ def store_nodejs_output(next_node_tag, build_cwd): next_dir = cached_dir(next_node_tag, "out") if (os.path.isdir(next_dir)): - print ">>> Reused Node.js build files from previous build: " + next_node_tag + print ">>> Reused Node.js build files from build-cache: " + next_node_tag print "node <--- out --- cache[" + next_node_tag + "]" + # move main node.js "out" directory from the cache back into the node directory shutil.move(next_dir, out_dir) - # move extra dirs from cache into node + # also move any extra dirs from the cache back into node for subdir in extra_dirs: node = curr_dir(subdir) next_cache = cached_dir(next_node_tag, subdir) @@ -116,19 +126,22 @@ def store_nodejs_output(next_node_tag, build_cwd): print "node <--- " + subdir + " --- cache[" + next_node_tag + "]" shutil.move(next_cache, node) else: - print ">>> Prepared Node.js output for caching: " + next_node_tag + print ">>> Prepared Node.js build-cache: " + next_node_tag - # create fresh out-dir to receive build artifacts ... + # create fresh out-dir in the cache to receive build artifacts ... if not os.path.exists(out_dir): os.makedirs(out_dir) + # ... and immediately also create a tag-file so we know what we built later on with open(curr_tag_file, "w") as f: f.write(next_node_tag) elif (not next_node_tag is None): - print ">>> Used existing Node.js build files: " + next_node_tag + # this build is for the same vendor/platform/architecture as last time + print ">>> Node.js build-cache used: " + next_node_tag def apply_file_template(src, dest, inject_vars_fn): + """Read a text file from src, run the read text through a transformer function and write the modified text into dest""" template_text = None with open(src, "r") as f: template_text = f.read() @@ -142,6 +155,11 @@ def apply_file_template(src, dest, inject_vars_fn): # Sanity check for the builtin node-module links in J2V8 C++ JNI code #----------------------------------------------------------------------- def check_node_builtins(): + """ + The function compares the list of builtin Node.js modules with the setup + code in jni/com_eclipsesource_v8_V8Impl.cpp to make sure that every module + is correctly initialized and linked into the native J2V8 library. + """ node_src = "node/src/" # node.js directory is not available @@ -208,4 +226,4 @@ def check_node_builtins(): if (len(j2v8_missing) > 0): error += "\n\t" + "J2V8 definition is missing node-modules: " + str(j2v8_missing) - sys.exit(error) \ No newline at end of file + sys.exit(error) diff --git a/build_system/cli.py b/build_system/cli.py index d4b874269..e7effbd28 100644 --- a/build_system/cli.py +++ b/build_system/cli.py @@ -4,6 +4,7 @@ import build_constants as bc class BuildParams(object): + """Value container for all build-parameters""" def __init__(self, d): self.target = d.get("target") self.arch = d.get("arch") @@ -19,6 +20,8 @@ def __init__(self, d): self.cross_agent = None def init_args(parser): + """Initialize all supported build.py parameters and commands on the CLI parser""" + # Essential build settings parser.add_argument("--target", "-t", help="The build target platform name (must be a valid platform string identifier).", @@ -38,7 +41,7 @@ def init_args(parser): dest="vendor") parser.add_argument("--keep-native-libs", "-knl", - help="Do not delete the native libraries from the Java directories between builds.", + help="Do not delete the native J2V8 libraries from the Java directories between builds.", dest="keep_native_libs", default=False, action="store_const", @@ -54,14 +57,14 @@ def init_args(parser): # Docker / Vagrant cross-compile settings parser.add_argument("--docker", "-dkr", - help="Run a cross-compile build in a Docker container (all required build-tools are then fully contained & virtualized).", + help="Run a cross-compile environment in a Docker container (all required build-tools are then fully contained & virtualized).", dest="docker", default=False, action="store_const", const=True) parser.add_argument("--vagrant", "-vgr", - help="Run a cross-compile build in a Vagrant virtual machine (all required build-tools are then fully contained & virtualized).", + help="Run a cross-compile environment in a Vagrant virtual machine (all required build-tools are then fully contained & virtualized).", dest="vagrant", default=False, action="store_const", @@ -72,7 +75,7 @@ def init_args(parser): dest="sys_image") parser.add_argument("--no-shutdown", "-nos", - help="When using a cross-compile environment, do not shutdown any of the components when the build is finished or canceled.", + help="When using a cross-compile environment, do not shutdown the virtualized environment when the build is finished or canceled.", dest="no_shutdown", action="store_const", const=True) @@ -97,16 +100,17 @@ def init_args(parser): "(the order of the steps given to the CLI does not matter, the correct order will be restored internally).\n\n" + "the fundamental build steps (in order):\n" + "---------------------------------------\n" + - "\n".join(bc.build_step_sequence) + "\n\n" + + "\n".join([s.id + s.help for s in bc.atomic_build_steps]) + "\n\n" + "aliases / combinations of multiple of the above steps:\n" + "------------------------------------------------------\n" + - "\n".join(bc.composite_steps), + "\n".join([s.id + s.help for s in bc.advanced_steps]), metavar="build-steps", nargs="*", default="all", choices=bc.avail_build_steps) def get_parser(): + """Get a CLI parser instance that accepts all supported build.py parameters and commands""" parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) init_args(parser) return parser diff --git a/build_system/cmake_utils.py b/build_system/cmake_utils.py index cfb3b60b7..3392c98a1 100644 --- a/build_system/cmake_utils.py +++ b/build_system/cmake_utils.py @@ -1,3 +1,4 @@ +"""Commonly used CMake CLI commands and argument-formatters""" # see: https://cmake.org/cmake/help/v2.8.8/cmake.html#opt:-Dvar:typevalue def setVar(var, value, type = "STRING"): diff --git a/build_system/constants.py b/build_system/constants.py index eb96102e2..f32371307 100644 --- a/build_system/constants.py +++ b/build_system/constants.py @@ -1,3 +1,5 @@ +"""Commonly used string constants (platforms, architectures, vendors, build-steps)""" + # target platforms target_android = 'android' target_linux = 'linux' diff --git a/build_system/shared_build_steps.py b/build_system/shared_build_steps.py index 53c5c1b80..2fcce619c 100644 --- a/build_system/shared_build_steps.py +++ b/build_system/shared_build_steps.py @@ -1,3 +1,7 @@ +""" +A collection of commands, variables and functions that are very likely to be +reused between target-platform configurations or build-steps on the same platform. +""" import glob import os import sys @@ -75,6 +79,10 @@ def rm(args): return shell("rm", args) def clearNativeLibs(config): + """ + Delete previously built native J2V8 libraries from any platforms + (can be disabled by the "keep_native_libs" config property) + """ # the CLI can override this step if (config.keep_native_libs): print("Native libraries not cleared...") @@ -91,6 +99,10 @@ def clearLibs(lib_pattern): return rm_libs def copyNativeLibs(config): + """ + Copy the compiled native J2V8 library (.dll/.dylib/.so) into the Java resources tree + for inclusion into the later built Java JAR. + """ platform_cmake_out = config.inject_env(cmake_out_dir) if (utils.is_win32(config.platform)): @@ -121,6 +133,7 @@ def copyNativeLibs(config): return copy_cmds def apply_maven_null_settings(src_pom_path = "./pom.xml", target_pom_path = None): + """Copy the Maven pom.xml from src to target, while replacing the necessary XML element values with fixed dummy parameter values""" maven_settings = { "properties": { "os": "undefined", @@ -134,6 +147,7 @@ def apply_maven_null_settings(src_pom_path = "./pom.xml", target_pom_path = None apply_maven_settings(maven_settings, src_pom_path, target_pom_path) def apply_maven_config_settings(config, src_pom_path = "./pom.xml", target_pom_path = None): + """Copy the Maven pom.xml from src to target, while replacing the necessary XML element values based on the given build-step config""" os = config.inject_env("$VENDOR-$PLATFORM") arch = config.file_abi version = s.J2V8_FULL_VERSION @@ -152,6 +166,10 @@ def apply_maven_config_settings(config, src_pom_path = "./pom.xml", target_pom_p apply_maven_settings(maven_settings, src_pom_path, target_pom_path) def apply_maven_settings(settings, src_pom_path = "./pom.xml", target_pom_path = None): + """ + Copy the Maven pom.xml from src to target, while replacing the XML element values + based on the values from the hierarchical settings dictionary structure + """ #----------------------------------------------------------------------- pom_ns = "http://maven.apache.org/POM/4.0.0" ns = {"pom": pom_ns} diff --git a/nodejs.py b/nodejs.py index b3f5ca7ad..81374b00e 100644 --- a/nodejs.py +++ b/nodejs.py @@ -1,5 +1,5 @@ """ -Utility script to manage the Node.js dependency +Utility-belt script to manage the Node.js dependency """ import argparse import collections @@ -104,7 +104,7 @@ def package(): } def __add_platform_deps(platform, include, vendor = None): - target = bc.platform_targets.get(platform) + target = bc.platform_configs.get(platform) vendor_str = (vendor + "-" if vendor else "") selected = (vendor_str + platform) in platforms From 60920846f7ffcbb825e12838d88682c349db1a6c Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Wed, 2 Aug 2017 19:20:49 +0200 Subject: [PATCH 07/14] fix for #292 --- CMakeLists.txt | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index d1a09fd31..308fead1a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -93,6 +93,10 @@ elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32 ") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -m32 ") endif() + + # -lrt ... see: https://github.com/eclipsesource/J2V8/issues/292 + set (j2v8_Debug_libs "-lrt") + set (j2v8_Release_libs"-lrt") #} elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin") #{ @@ -211,8 +215,8 @@ include_directories(${include_dirs}) # link the necessary libraries target_link_libraries(j2v8 - debug "${njs_Debug_libs}" - optimized "${njs_Release_libs}" + debug "${njs_Debug_libs}" "${j2v8_Debug_libs}" + optimized "${njs_Release_libs}" "${j2v8_Release_libs}" ) #----------------------------------------------------------------------- From d372158481ffb98920684aa4c19b1d0c40676694 Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Mon, 14 Aug 2017 23:33:54 +0200 Subject: [PATCH 08/14] build-system unit-testing & other extras - fix install.alpine.packages.sh chmod +x - refactor nodejs.py sub-commands (git-CLI style) - add j2v8-cli convenience scripts with command aliases (for win32/linux/macos) - do not run NodeJsTest when native lib was not compiled with node features included - add check in CMake node js lib scripts if all njs lib files exist, exit with error if any is missing - add node 7.9.0 patch file (works for linux, but not windows currently) - j2v8jni build-step is now called j2v8cpp - j2v8jni is a new build-step using javah to regenerate J2V8 JNI header files - j2v8jni build-step is skipped if the required V8.class file does not yet exist - implement basic unit-testing for build-system - centralize maven build-steps logic and also just run maven pre-build actions once per build-process (e.g. copy native J2V8 libs to the Java directories) --- BUILDING.md | 27 ++- CMakeLists.txt | 4 +- build_system/.gitignore | 1 + build_system/build_constants.py | 3 +- build_system/build_executor.py | 65 +++--- build_system/build_interactive.py | 3 +- build_system/build_structures.py | 2 +- build_system/build_utils.py | 113 +++++++++- build_system/cli.py | 71 ++++-- build_system/config_android.py | 6 +- build_system/config_linux.py | 25 +-- build_system/config_macos.py | 24 +- build_system/config_win32.py | 24 +- build_system/constants.py | 1 + build_system/docker_build.py | 48 ++-- build_system/docker_configs.py | 6 + build_system/java_build_steps.py | 32 +++ build_system/run_tests.py | 18 ++ build_system/shared_build_steps.py | 85 +++++-- build_system/shell_build.py | 2 +- build_system/tests/__init__.py | 0 build_system/tests/runner/__init__.py | 0 .../tests/runner/output_redirector.py | 17 ++ build_system/tests/runner/test_asserts.py | 7 + build_system/tests/runner/test_result.py | 210 ++++++++++++++++++ build_system/tests/runner/test_runner.py | 113 ++++++++++ build_system/tests/runner/test_utils.py | 11 + build_system/tests/test_linux_docker.py | 41 ++++ build_system/tests/test_macos_vagrant.py | 40 ++++ build_system/tests/test_win32_docker.py | 39 ++++ build_system/tests/test_win32_native.py | 38 ++++ build_system/vagrant_build.py | 13 +- cmake/BuildUtils.cmake | 1 + cmake/FindJava.cmake | 4 +- cmake/NodeJsUtils.cmake | 93 +++++++- docker/android/Dockerfile | 41 ++-- docker/android/kill_supervisor.py | 0 docker/android/start-emulator.template.sh | 0 docker/android/supervisord.template.conf | 6 +- docker/android/wait-for-emulator.sh | 0 docker/shared/install.alpine.packages.sh | 0 docker/shared/install.jdk.sh | 2 + docker/win32/install.jdk.ps1 | 4 + gradle.properties | 3 + j2v8-cli.cmd | 5 + j2v8-cli.sh | 3 + jni/com_eclipsesource_v8_V8Impl.cpp | 9 + jni/com_eclipsesource_v8_V8Impl.h | 8 + node.patches/7.9.0.diff | 67 ++++++ node.patches/8.10.1.diff | 52 +++++ nodejs.py | 202 ++++++++++++----- pom.xml | 23 +- src/main/java/com/eclipsesource/v8/V8.java | 17 +- .../java/com/eclipsesource/v8/NodeJSTest.java | 9 + 54 files changed, 1395 insertions(+), 243 deletions(-) create mode 100644 build_system/.gitignore create mode 100644 build_system/docker_configs.py create mode 100644 build_system/java_build_steps.py create mode 100644 build_system/run_tests.py create mode 100644 build_system/tests/__init__.py create mode 100644 build_system/tests/runner/__init__.py create mode 100644 build_system/tests/runner/output_redirector.py create mode 100644 build_system/tests/runner/test_asserts.py create mode 100644 build_system/tests/runner/test_result.py create mode 100644 build_system/tests/runner/test_runner.py create mode 100644 build_system/tests/runner/test_utils.py create mode 100644 build_system/tests/test_linux_docker.py create mode 100644 build_system/tests/test_macos_vagrant.py create mode 100644 build_system/tests/test_win32_docker.py create mode 100644 build_system/tests/test_win32_native.py mode change 100644 => 100755 docker/android/kill_supervisor.py mode change 100644 => 100755 docker/android/start-emulator.template.sh mode change 100644 => 100755 docker/android/wait-for-emulator.sh mode change 100644 => 100755 docker/shared/install.alpine.packages.sh create mode 100644 gradle.properties create mode 100644 j2v8-cli.cmd create mode 100755 j2v8-cli.sh create mode 100644 node.patches/7.9.0.diff create mode 100644 node.patches/8.10.1.diff diff --git a/BUILDING.md b/BUILDING.md index cf563ee61..e3502c0a8 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -41,6 +41,9 @@ Override build-steps ? (leave empty to run pre-configured steps): j2v8 The J2V8 build-system performs several build steps in a fixed order to produce the final J2V8 packages for usage on the designated target platforms. What follows is a short summary for what each of the executed build-steps does and what output artifacts are produced by each step. +``` +Node.js --> CMake --> JNI --> C++ --> Optimize --> Java/Android --> JUnit +``` --- ## Node.js @@ -56,9 +59,10 @@ __Inputs:__ __Artifacts:__ - Node.js & V8 static link libraries - `./node/out/` - - `./node/build/` - - `./node/Debug/` - - `./node/Release/` + - *win32 specific* + - `./node/build/` + - `./node/Debug/` + - `./node/Release/` --- ## CMake @@ -75,12 +79,25 @@ __Artifacts:__ - CMake generated Makefiles / IDE Project-files - `./cmake.out/{platform}.{architecture}/` --- -## JNI +## JNI Header Generation + +Generate the JNI glue header file from the native method definitions of the Java `V8` class. + +__Inputs__: +- Java V8.class file + - `./target/classes/com/eclipsesource/v8/V8.class` + +__Artifacts:__ +- J2V8 C++ JNI header file + - `./jni/com_eclipsesource_v8_V8Impl.h` +--- +## C++ -Compile and link the J2V8 C++ shared libraries (.so/.dylib/.dll), which provide the JNI bridge to interop with the C++ code of Node.js / V8. +Compile and link the J2V8 native shared libraries (.so/.dylib/.dll), which contain the C++ JNI bridge code to interop with the embedded Node.js / V8 parts. __Inputs__: - CMake generated Makefiles / IDE Project-files +- Node.js / V8 static link libraries & C++ header files - J2V8 C++ JNI source code - `./jni/com_eclipsesource_v8_V8Impl.h` - `./jni/com_eclipsesource_v8_V8Impl.cpp` diff --git a/CMakeLists.txt b/CMakeLists.txt index 308fead1a..0f16ad7aa 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -34,8 +34,8 @@ set(J2V8_JDK_DIR ${Java_ROOT} CACHE STRING "Path to the Java JDK dependency") set(J2V8_NODEJS_DIR "${CMAKE_SOURCE_DIR}/node" CACHE STRING "Path to the Node.js dependency") # get the required Node.js link libraries -get_njs_libs(${J2V8_NODEJS_DIR} "Debug") -get_njs_libs(${J2V8_NODEJS_DIR} "Release") +get_njs_libs(${J2V8_NODEJS_DIR} "Debug" FALSE) +get_njs_libs(${J2V8_NODEJS_DIR} "Release" TRUE) # j2v8 build options set(J2V8_TARGET_ARCH "" CACHE STRING "The target architecture for the build.") diff --git a/build_system/.gitignore b/build_system/.gitignore new file mode 100644 index 000000000..77efafb9a --- /dev/null +++ b/build_system/.gitignore @@ -0,0 +1 @@ +test-reports diff --git a/build_system/build_constants.py b/build_system/build_constants.py index d3875d989..82d763919 100644 --- a/build_system/build_constants.py +++ b/build_system/build_constants.py @@ -19,7 +19,8 @@ CLIStep(c.build_node_js, " Builds the Node.js & V8 dependency artifacts that are later linked into the J2V8 native bridge code.\n" + " (only works if the Node.js source was checked out into the J2V8 ./node directory)"), CLIStep(c.build_j2v8_cmake, " Uses CMake to generate the native Makefiles / IDE project files to later build the J2V8 C++ native bridge shared libraries."), - CLIStep(c.build_j2v8_jni, " Compile and link the J2V8 C++ shared libraries (.so/.dylib/.dll), which provide the JNI bridge to interop with the C++ code of Node.js / V8."), + CLIStep(c.build_j2v8_jni, " Generate the J2V8 JNI C++ Header files."), + CLIStep(c.build_j2v8_cpp, " Compile and link the J2V8 C++ shared libraries (.so/.dylib/.dll), which provide the JNI bridge to interop with the C++ code of Node.js / V8."), CLIStep(c.build_j2v8_optimize, " The native J2V8 libraries are optimized for performance and/or filesize by using the available tools of the target-platform / compiler-toolchain."), CLIStep(c.build_j2v8_java, " Compiles the Java source code and packages it, including the previously built native libraries, into the final package artifacts.\n" + " For the execution of this build-step Maven (Java) or Gradle (Android) are used for the respective target platforms."), diff --git a/build_system/build_executor.py b/build_system/build_executor.py index 67ad87b57..8112c43e6 100644 --- a/build_system/build_executor.py +++ b/build_system/build_executor.py @@ -8,13 +8,14 @@ from shell_build import ShellBuildSystem import immutable -# collection of all parsed build-steps that will then be passed on to the core build function -# (this list must only contain atomic steps after all step evaluations are finished) -parsed_steps = set() +class BuildState: + # collection of all parsed build-steps that will then be passed on to the core build function + # (this list must only contain atomic steps after all step evaluations are finished) + parsed_steps = set() -# a registry/dictionary of evaluation-functions that translate from their corresponding step/alias -# into the list of atomic build-steps (see parsed_steps above) -step_evaluators = {} + # a registry/dictionary of evaluation-functions that translate from their corresponding step/alias + # into the list of atomic build-steps (see parsed_steps above) + step_evaluators = {} #----------------------------------------------------------------------- # Advanced build-step parsing (anti-steps, multi-steps) @@ -28,11 +29,14 @@ def atomic_step(step, alias = None): if (alias is None): alias = step + step_eval = BuildState.step_evaluators + parsed_steps = BuildState.parsed_steps + # add step handler (step => step) - step_evaluators[alias] = lambda: parsed_steps.add(step) + step_eval[alias] = lambda: parsed_steps.add(step) # add anti-step handler (step => ~step) - step_evaluators["~" + alias] = lambda: parsed_steps.discard(step) + step_eval["~" + alias] = lambda: parsed_steps.discard(step) # register additional anti-step in CLI bc.avail_build_steps.append("~" + alias) @@ -43,15 +47,18 @@ def multi_step(alias, include, exclude = []): the defined step alias name was detected. Also the inverted anti-steps sequence will be evaluated if the "~" prefixed alias is recognized. """ + + step_eval = BuildState.step_evaluators + # add aliased step-sequence (alias => step1, step2, ... , stepN) - step_evaluators[alias] = lambda: \ - [step_evaluators.get(s)() for s in include] + \ - [step_evaluators.get("~" + s)() for s in exclude] + step_eval[alias] = lambda: \ + [step_eval.get(s)() for s in include] + \ + [step_eval.get("~" + s)() for s in exclude] # add aliased anti-step-sequence (~alias => ~step1, ~step2, ... , ~stepN) - step_evaluators["~" + alias] = lambda: \ - [step_evaluators.get("~" + s)() for s in include] + \ - [step_evaluators.get(s)() for s in exclude] + step_eval["~" + alias] = lambda: \ + [step_eval.get("~" + s)() for s in include] + \ + [step_eval.get(s)() for s in exclude] # register additional anti-step in CLI bc.avail_build_steps.append("~" + alias) @@ -74,6 +81,7 @@ def init_buildsteps(): c.build_node_js, c.build_j2v8_cmake, c.build_j2v8_jni, + c.build_j2v8_cpp, c.build_j2v8_optimize, ]) @@ -83,11 +91,11 @@ def init_buildsteps(): def evaluate_build_step_option(step): """Find the registered evaluator function for the given step and execute it""" - step_eval_func = step_evaluators.get(step, raise_unhandled_option(step)) + step_eval_func = BuildState.step_evaluators.get(step, raise_unhandled_option(step)) step_eval_func() def raise_unhandled_option(step): - return lambda: sys.exit("INTERNAL-ERROR: Tried to handle unrecognized build-step \"" + step + "\"") + return lambda: utils.cli_exit("INTERNAL-ERROR: Tried to handle unrecognized build-step \"" + step + "\"") # initialize the advanced parsing evaluation handlers for the build.py CLI init_buildsteps() @@ -115,35 +123,38 @@ def execute_build(params): if (isinstance(params, dict)): params = cli.BuildParams(params) + # can be used to force output of all started sub-processes through the host-process stdout + utils.redirect_stdout_enabled = hasattr(params, "redirect_stdout") and params.redirect_stdout + if (params.target is None): - sys.exit("ERROR: No target platform specified") + utils.cli_exit("ERROR: No target platform specified") if (params.docker and params.vagrant): - sys.exit("ERROR: Choose either Docker or Vagrant for the build, can not use both") + utils.cli_exit("ERROR: Choose either Docker or Vagrant for the build, can not use both") target = params.target if (not target in bc.platform_configs): - sys.exit("ERROR: Unrecognized target platform: " + target) + utils.cli_exit("ERROR: Unrecognized target platform: " + target) # this defines the PlatformConfig / operating system the build should be run for target_platform = bc.platform_configs.get(target) if (params.arch is None): - sys.exit("ERROR: No target architecture specified") + utils.cli_exit("ERROR: No target architecture specified") avail_architectures = target_platform.architectures if (not params.arch in avail_architectures): - sys.exit("ERROR: Unsupported architecture: \"" + params.arch + "\" for selected target platform: " + target) + utils.cli_exit("ERROR: Unsupported architecture: \"" + params.arch + "\" for selected target platform: " + target) if (params.buildsteps is None): - sys.exit("ERROR: No build-step specified, valid values are: " + ", ".join(bc.avail_build_steps)) + utils.cli_exit("ERROR: No build-step specified, valid values are: " + ", ".join(bc.avail_build_steps)) if (not params.buildsteps is None and not isinstance(params.buildsteps, list)): params.buildsteps = [params.buildsteps] - global parsed_steps + parsed_steps = BuildState.parsed_steps parsed_steps.clear() # go through the raw list of build-steps (given by the CLI or an API call) @@ -155,7 +166,7 @@ def execute_build(params): parsed_steps = [step for step in bc.atomic_build_step_sequence if step in parsed_steps] if (len(parsed_steps) == 0): - sys.exit("WARNING: No build-steps to be done ... exiting") + utils.cli_exit("WARNING: No build-steps to be done ... exiting") build_cwd = utils.get_cwd() @@ -168,7 +179,7 @@ def execute_build(params): # try to find the configuration parameters to run the cross-compiler if (cross_sys): if (cross_configs.get(cross_sys) is None): - sys.exit("ERROR: target '" + target + "' does not have a recognized cross-compile host: '" + cross_sys + "'") + utils.cli_exit("ERROR: target '" + target + "' does not have a recognized cross-compile host: '" + cross_sys + "'") else: cross_cfg = cross_configs.get(cross_sys) @@ -231,14 +242,14 @@ def execute_build_step(build_system, build_step): cross_cfg = cross_configs.get(params.cross_agent) if (cross_cfg is None): - sys.exit("ERROR: internal error while looking for cross-compiler config: " + params.cross_agent) + utils.cli_exit("ERROR: internal error while looking for cross-compiler config: " + params.cross_agent) build_cwd = cross_cfg.build_cwd # execute all steps from a list that parsed / evaluated before (see the "build-step parsing" section above) for step in parsed_steps: if (not step in build_steps): - print("INFO: skipping build step \"" + step + "\" (not configured and/or supported for platform \"" + params.target + "\")") + print("WARNING: skipping build step \"" + step + "\" (not configured and/or supported for platform \"" + params.target + "\")") continue target_step = build_steps[step] diff --git a/build_system/build_interactive.py b/build_system/build_interactive.py index 30e1f1722..6f0300595 100644 --- a/build_system/build_interactive.py +++ b/build_system/build_interactive.py @@ -3,6 +3,7 @@ import build_configs as bcfg import build_executor as bex +import build_utils as utils def run_interactive_cli(): idx = 0 @@ -20,7 +21,7 @@ def run_interactive_cli(): else input("Select a predefined build-configuration to run: ") if not isinstance(sel_index, int) or sel_index < 0 or sel_index > len(bcfg.configs): - sys.exit("ERROR: Must enter a valid test index in the range [0 ... " + str(len(bcfg.configs)) + "]") + utils.cli_exit("ERROR: Must enter a valid test index in the range [0 ... " + str(len(bcfg.configs)) + "]") sel_cfg = bcfg.configs[sel_index] diff --git a/build_system/build_structures.py b/build_system/build_structures.py index 40a71b5df..32810acfa 100644 --- a/build_system/build_structures.py +++ b/build_system/build_structures.py @@ -33,7 +33,7 @@ def cross_compiler(self, cross_host_name): compiler = self.cross_compilers.get(cross_host_name) if (not compiler): - sys.exit("ERROR: internal error while looking for cross-compiler: " + cross_host_name) + utils.cli_exit("ERROR: internal error while looking for cross-compiler: " + cross_host_name) return compiler() diff --git a/build_system/build_utils.py b/build_system/build_utils.py index 1698c81bd..66083c409 100644 --- a/build_system/build_utils.py +++ b/build_system/build_utils.py @@ -15,6 +15,10 @@ def get_cwd(): def host_cmd_sep(): return "&& " if os.name == "nt" else "; " +def touch(filename, times=None): + with open(filename, 'a'): + os.utime(filename, times) + def is_android(platform): return c.target_android in platform @@ -38,18 +42,109 @@ def platform_libext(config): return lib_ext +def cli_exit(message): + """ + sys.exit() messages are not picked up correctly when unit-testing. + Use this function instead! + """ + sys.stderr.write(message + "\n") + sys.stderr.flush() + sys.exit(1) + +# based on code from: https://stackoverflow.com/a/16260159/425532 +def readlines(f, newlines): + buf = "" + while True: + #{ + def get_pos(): + #{ + pos = None + nl = None + for n in newlines: + if pos: + break + try: + pos = buf.index(n) + except Exception: + pass + + if pos: + nl = n + + return (pos, nl) + #} + + pos, nl = get_pos() + + while pos: + yield buf[:pos] + nl + buf = buf[pos + len(nl):] + pos, nl = get_pos() + + chunk = f.read(1) + + if chunk == ":": + # read another char to make sure we catch ": " delimiter + buf += chunk + chunk = f.read(1) + + if not chunk: + yield buf + break + buf += chunk + #} + +redirect_stdout_enabled = False + def execute(cmd, cwd = None): """ Low-Level CLI utility function to execute a shell command in a sub-process of the current python process - (redirects all output to stdout) + (redirects all output to the host-process stdout if redirect_stdout_enabled is True) """ - # flush any buffered console output, because popen could block the terminal - sys.stdout.flush() - - p = subprocess.Popen(cmd, universal_newlines=True, shell=True, cwd=cwd) - return_code = p.wait() - if return_code: - raise subprocess.CalledProcessError(return_code, cmd) + if not redirect_stdout_enabled: + # flush any buffered console output, because popen could block the terminal + sys.stdout.flush() + + p = subprocess.Popen(cmd, universal_newlines=True, shell=True, cwd=cwd) + return_code = p.wait() + + if return_code: + raise subprocess.CalledProcessError(return_code, cmd) + else: + # see: https://stackoverflow.com/a/22049757/425532 + # this way of running the process and handling the process output is important because + # when running unit-tests in python or running e.g. a docker process, if the + # output does not directly go through the stdout of the python process, + # then it will not be picked up by some of the available unit-test runners + + # flush any buffered console output, because popen could block the terminal + sys.stdout.flush() + + p = subprocess.Popen(cmd, + shell=True, + cwd=cwd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT#, + #stdin=sys.stdin + ) + # while True: + # line = p.stdout.readline() + # if line == '': + # break + # print(line.strip("\r\n")) # remove extra ws between lines + # sys.stdout.flush() + + # also look for ": " as a output separator, because Vagrant emits this + # right before some relevant user input is requested + # (this makes sure that we get all output for the input is required) + for line in readlines(p.stdout, [": ", ":", "\n"]): + sys.stdout.write(line) + sys.stdout.flush() + + return_code = p.wait() + + if return_code: + raise subprocess.CalledProcessError(return_code, cmd) def execute_to_str(cmd, cwd = None): """ @@ -226,4 +321,4 @@ def check_node_builtins(): if (len(j2v8_missing) > 0): error += "\n\t" + "J2V8 definition is missing node-modules: " + str(j2v8_missing) - sys.exit(error) + cli_exit(error) diff --git a/build_system/cli.py b/build_system/cli.py index e7effbd28..3708d4de3 100644 --- a/build_system/cli.py +++ b/build_system/cli.py @@ -5,24 +5,48 @@ class BuildParams(object): """Value container for all build-parameters""" - def __init__(self, d): - self.target = d.get("target") - self.arch = d.get("arch") - self.vendor = d.get("vendor") - self.keep_native_libs = d.get("keep_native_libs") - self.node_enabled = d.get("node_enabled") - self.docker = d.get("docker") - self.vagrant = d.get("vagrant") - self.sys_image = d.get("sys_image") - self.no_shutdown = d.get("no_shutdown") - self.buildsteps = d.get("buildsteps") or c.build_all - + def __init__(self, param_dict): + + known_params = { + "target": None, + "arch": None, + "vendor": None, + "keep_native_libs": None, + "node_enabled": None, + "docker": None, + "vagrant": None, + "sys_image": None, + "no_shutdown": None, + "redirect_stdout": None, + "buildsteps": c.build_all, + } + + unhandled = set(param_dict.keys()).difference(set(known_params.keys())) + + if any(unhandled): + raise Exception("Unhandled BuildParams: " + str(unhandled)) + + for param in known_params: + # try to read value from input + value = param_dict.get(param) + + if value != None: + # use input value + setattr(self, param, value) + else: + # use default value + default = known_params.get(param) + setattr(self, param, default) + + # this should never be passed in by the user, it is used just internally self.cross_agent = None def init_args(parser): """Initialize all supported build.py parameters and commands on the CLI parser""" + #----------------------------------------------------------------------- # Essential build settings + #----------------------------------------------------------------------- parser.add_argument("--target", "-t", help="The build target platform name (must be a valid platform string identifier).", dest="target", @@ -35,7 +59,9 @@ def init_args(parser): required=True, choices=bc.avail_architectures) + #----------------------------------------------------------------------- # Optional build settings + #----------------------------------------------------------------------- parser.add_argument("--vendor", "-v", help="The operating system vendor (most relevant when building for a specific Linux distribution).", dest="vendor") @@ -47,7 +73,9 @@ def init_args(parser): action="store_const", const=True) + #----------------------------------------------------------------------- # J2V8 Feature switches + #----------------------------------------------------------------------- parser.add_argument("--node-enabled", "-ne", help="Include the Node.js runtime and builtin node-modules for use in J2V8.", dest="node_enabled", @@ -55,7 +83,9 @@ def init_args(parser): action="store_const", const=True) + #----------------------------------------------------------------------- # Docker / Vagrant cross-compile settings + #----------------------------------------------------------------------- parser.add_argument("--docker", "-dkr", help="Run a cross-compile environment in a Docker container (all required build-tools are then fully contained & virtualized).", dest="docker", @@ -80,7 +110,9 @@ def init_args(parser): action="store_const", const=True) + #----------------------------------------------------------------------- # Meta-Args + #----------------------------------------------------------------------- # NOTE: this option is only used internally to distinguish the running of the build script within # the build-instigator and the actual build-executor (this is relevant when cross-compiling) parser.add_argument("--cross-agent", @@ -88,6 +120,16 @@ def init_args(parser): dest="cross_agent", type=str) + parser.add_argument("--redirect-stdout", "-rso", + help="Make sure that the stdout/stderr of sub-proccesses running shell commands is also going through the " + + "output interface of the python host process that is running the build.\n" + + "(this is required when running tests for the build-system, without this option the output of the subprocesses will "+ + "not show up in the test logs)", + dest="redirect_stdout", + default=False, + action="store_const", + const=True) + parser.add_argument("--interactive", "-i", help="Run the interactive version of the J2V8 build CLI.", dest="interactive", @@ -95,6 +137,9 @@ def init_args(parser): action="store_const", const=True) + #----------------------------------------------------------------------- + # Build-Steps + #----------------------------------------------------------------------- parser.add_argument("buildsteps", help="Pass a single build-step or a list of all the recognized build-steps that should be executed\n" + "(the order of the steps given to the CLI does not matter, the correct order will be restored internally).\n\n" + @@ -111,6 +156,6 @@ def init_args(parser): def get_parser(): """Get a CLI parser instance that accepts all supported build.py parameters and commands""" - parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) + parser = argparse.ArgumentParser(prog="build", formatter_class=argparse.RawTextHelpFormatter) init_args(parser) return parser diff --git a/build_system/config_android.py b/build_system/config_android.py index 0f33fdf75..54efc4900 100644 --- a/build_system/config_android.py +++ b/build_system/config_android.py @@ -62,13 +62,15 @@ def build_j2v8_cmake(config): android_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake) #----------------------------------------------------------------------- -def build_j2v8_jni(config): +android_config.build_step(c.build_j2v8_jni, u.build_j2v8_jni) +#----------------------------------------------------------------------- +def build_j2v8_cpp(config): return [ "cd " + u.cmake_out_dir, "make -j4", ] -android_config.build_step(c.build_j2v8_jni, build_j2v8_jni) +android_config.build_step(c.build_j2v8_cpp, build_j2v8_cpp) #----------------------------------------------------------------------- def build_j2v8_java(config): return \ diff --git a/build_system/config_linux.py b/build_system/config_linux.py index 418bb54f0..38c5e6594 100644 --- a/build_system/config_linux.py +++ b/build_system/config_linux.py @@ -1,6 +1,7 @@ import constants as c from build_structures import PlatformConfig from docker_build import DockerBuildSystem, DockerBuildStep +import java_build_steps as j import shared_build_steps as u import cmake_utils as cmu @@ -57,13 +58,15 @@ def build_j2v8_cmake(config): linux_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake) #----------------------------------------------------------------------- -def build_j2v8_jni(config): +linux_config.build_step(c.build_j2v8_jni, u.build_j2v8_jni) +#----------------------------------------------------------------------- +def build_j2v8_cpp(config): return [ "cd " + u.cmake_out_dir, "make -j4", ] -linux_config.build_step(c.build_j2v8_jni, build_j2v8_jni) +linux_config.build_step(c.build_j2v8_cpp, build_j2v8_cpp) #----------------------------------------------------------------------- def build_j2v8_optimize(config): # NOTE: execstack / strip are not part of the alpine tools, therefore we just skip this step @@ -78,21 +81,7 @@ def build_j2v8_optimize(config): linux_config.build_step(c.build_j2v8_optimize, build_j2v8_optimize) #----------------------------------------------------------------------- -def build_j2v8_java(config): - u.apply_maven_config_settings(config) - - return \ - u.clearNativeLibs(config) + \ - u.copyNativeLibs(config) + \ - u.setJavaHome(config) + \ - [u.build_cmd] + \ - u.copyOutput(config) - -linux_config.build_step(c.build_j2v8_java, build_j2v8_java) +j.add_java_step(linux_config, c.build_j2v8_java, [u.java_build_cmd]) #----------------------------------------------------------------------- -def build_j2v8_junit(config): - return \ - [u.run_tests_cmd] - -linux_config.build_step(c.build_j2v8_junit, build_j2v8_junit) +j.add_java_step(linux_config, c.build_j2v8_junit, [u.java_tests_cmd]) #----------------------------------------------------------------------- diff --git a/build_system/config_macos.py b/build_system/config_macos.py index d113c3942..c85723835 100644 --- a/build_system/config_macos.py +++ b/build_system/config_macos.py @@ -2,6 +2,7 @@ import constants as c from build_structures import PlatformConfig from vagrant_build import VagrantBuildSystem, VagrantBuildStep +import java_build_steps as j import shared_build_steps as u import cmake_utils as cmu @@ -57,28 +58,17 @@ def build_j2v8_cmake(config): macos_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake) #----------------------------------------------------------------------- -def build_j2v8_jni(config): +macos_config.build_step(c.build_j2v8_jni, u.build_j2v8_jni) +#----------------------------------------------------------------------- +def build_j2v8_cpp(config): return [ "cd " + u.cmake_out_dir, "make -j4", ] -macos_config.build_step(c.build_j2v8_jni, build_j2v8_jni) +macos_config.build_step(c.build_j2v8_cpp, build_j2v8_cpp) #----------------------------------------------------------------------- -def build_j2v8_java(config): - u.apply_maven_config_settings(config) - - return \ - u.clearNativeLibs(config) + \ - u.copyNativeLibs(config) + \ - [u.build_cmd] + \ - u.copyOutput(config) - -macos_config.build_step(c.build_j2v8_java, build_j2v8_java) +j.add_java_step(macos_config, c.build_j2v8_java, [u.java_build_cmd]) #----------------------------------------------------------------------- -def build_j2v8_junit(config): - return \ - [u.run_tests_cmd] - -macos_config.build_step(c.build_j2v8_junit, build_j2v8_junit) +j.add_java_step(macos_config, c.build_j2v8_junit, [u.java_tests_cmd]) #----------------------------------------------------------------------- diff --git a/build_system/config_win32.py b/build_system/config_win32.py index 00708fae8..42ec65605 100644 --- a/build_system/config_win32.py +++ b/build_system/config_win32.py @@ -3,6 +3,7 @@ from build_structures import PlatformConfig from docker_build import DockerBuildSystem, DockerBuildStep from vagrant_build import VagrantBuildSystem, VagrantBuildStep +import java_build_steps as j import shared_build_steps as u import cmake_utils as cmu @@ -60,7 +61,9 @@ def build_j2v8_cmake(config): win32_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake) #----------------------------------------------------------------------- -def build_j2v8_jni(config): +win32_config.build_step(c.build_j2v8_jni, u.build_j2v8_jni) +#----------------------------------------------------------------------- +def build_j2v8_cpp(config): # show docker container memory usage / limit show_mem = ["powershell C:/j2v8/docker/win32/mem.ps1"] if config.cross_agent == "docker" else [] @@ -72,22 +75,9 @@ def build_j2v8_jni(config): ] + \ show_mem -win32_config.build_step(c.build_j2v8_jni, build_j2v8_jni) +win32_config.build_step(c.build_j2v8_cpp, build_j2v8_cpp) #----------------------------------------------------------------------- -def build_j2v8_java(config): - u.apply_maven_config_settings(config) - - return \ - u.clearNativeLibs(config) + \ - u.copyNativeLibs(config) + \ - [u.build_cmd] + \ - u.copyOutput(config) - -win32_config.build_step(c.build_j2v8_java, build_j2v8_java) +j.add_java_step(win32_config, c.build_j2v8_java, [u.java_build_cmd]) #----------------------------------------------------------------------- -def build_j2v8_junit(config): - return \ - [u.run_tests_cmd] - -win32_config.build_step(c.build_j2v8_junit, build_j2v8_junit) +j.add_java_step(win32_config, c.build_j2v8_junit, [u.java_tests_cmd]) #----------------------------------------------------------------------- diff --git a/build_system/constants.py b/build_system/constants.py index f32371307..cd9c08593 100644 --- a/build_system/constants.py +++ b/build_system/constants.py @@ -18,6 +18,7 @@ build_node_js = 'nodejs' build_j2v8_cmake = 'j2v8cmake' build_j2v8_jni = 'j2v8jni' +build_j2v8_cpp = 'j2v8cpp' build_j2v8_optimize = 'j2v8optimize' build_j2v8_java = 'j2v8java' build_j2v8_junit = 'j2v8junit' diff --git a/build_system/docker_build.py b/build_system/docker_build.py index accd8b187..a80ac21e8 100644 --- a/build_system/docker_build.py +++ b/build_system/docker_build.py @@ -1,12 +1,13 @@ -import atexit import re +import signal import subprocess import sys from build_structures import BuildSystem, BuildStep import constants as c import build_utils as utils +import docker_configs as dkr_cfg class DockerBuildStep(BuildStep): def __init__(self, platform, build_cwd = None, host_cwd = None): @@ -21,22 +22,24 @@ def clean(self, config): return def health_check(self, config): + print "Verifying Docker build-system status..." try: # general docker availability check - self.exec_host_cmd("docker stats --no-stream", config) + self.exec_host_cmd("docker --version", config) + # check the currently active container technology (linux vs. windows containers) # NOTE: the additional newlines are important for the regex matching version_str = utils.execute_to_str("docker version") + "\n\n" server_match = re.search(r"Server:(.*)\n\n", version_str + "\n\n", re.DOTALL) if (server_match is None or server_match.group(1) is None): - sys.exit("ERROR: Unable to determine docker server version from version string: \n\n" + version_str) + utils.cli_exit("ERROR: Unable to determine docker server version from version string: \n\n" + version_str) version_match = re.search(r"^ OS/Arch:\s+(.*)$", server_match.group(1), re.MULTILINE) if (version_match is None): - sys.exit("ERROR: Unable to determine docker server platform from version string: \n\n" + version_str) + utils.cli_exit("ERROR: Unable to determine docker server platform from version string: \n\n" + version_str) docker_version = version_match.group(1) @@ -44,10 +47,10 @@ def health_check(self, config): # check if the docker engine is running the expected container platform (linux or windows) if (docker_req_platform not in docker_version): - sys.exit("ERROR: docker server must be using " + docker_req_platform + " containers, instead found server version using: " + docker_version) + utils.cli_exit("ERROR: docker server must be using " + docker_req_platform + " containers, instead found server version using: " + docker_version) except subprocess.CalledProcessError: - sys.exit("ERROR: Failed Docker build-system health check, make sure Docker is available and running!") + utils.cli_exit("ERROR: Failed Docker build-system health check, make sure Docker is available and running!") def get_image_name(self, config): return "j2v8-$VENDOR-$PLATFORM" @@ -61,22 +64,41 @@ def pre_build(self, config): container_name = self.get_container_name(config) docker_stop_str = self.inject_env("docker stop " + container_name, config) - def cli_exit_event(): - if (config.no_shutdown): + def cli_exit_event(signal, frame): + if config.no_shutdown: + print "INFO: Docker J2V8 container will continue running..." return print "Waiting for docker process to exit..." self.exec_host_cmd(docker_stop_str, config) - atexit.register(cli_exit_event) + signal.signal(signal.SIGINT, cli_exit_event) args_str = "" - if (config.sys_image): - args_str += " --build-arg sys_image=" + config.sys_image + def build_arg(name, value): + return (" --build-arg " + name + "=" + value) if value else "" - if (config.vendor): - args_str += " --build-arg vendor=" + config.vendor + def sys_image_arg(value): + return build_arg("sys_image", value) + + def vendor_arg(value): + return build_arg("vendor", value) + + # use custom sys-image if it was specified by the user + args_str += sys_image_arg(config.sys_image) + + # if we are building with docker + # and a specific vendor was specified for the build + # and no custom sys-image was specified ... + if (config.docker and config.vendor and not config.sys_image): + vendor_default_image = dkr_cfg.vendor_default_images.get(config.vendor) + + # ... then use the default image for that vendor if available + args_str += sys_image_arg(vendor_default_image) + + # pass a specified vendor string to the docker build + args_str += vendor_arg(config.vendor) image_name = self.get_image_name(config) diff --git a/build_system/docker_configs.py b/build_system/docker_configs.py new file mode 100644 index 000000000..d41e59481 --- /dev/null +++ b/build_system/docker_configs.py @@ -0,0 +1,6 @@ +import constants as c + +vendor_default_images = { + c.vendor_alpine: "openjdk:8u131-alpine", + c.vendor_debian: "debian:jessie", +} diff --git a/build_system/java_build_steps.py b/build_system/java_build_steps.py new file mode 100644 index 000000000..c740438cc --- /dev/null +++ b/build_system/java_build_steps.py @@ -0,0 +1,32 @@ +import shared_build_steps as u + +def add_java_step(platform_config, build_step, step_cmds): + # add the common preparation sequence for a maven build-step to the platform-config + if not hasattr(platform_config, "prepare_maven"): + platform_config.prepare_maven = lambda config: \ + u.clearNativeLibs(config) + \ + u.copyNativeLibs(config) + \ + u.setJavaHome(config) + #----------------------------------------------------------------------- + # add a build-step that involves running maven and requires some preparation + def java_build_step(cmds): + def build_func(config): + # update maven pom.xml settings + u.apply_maven_config_settings(config) + + # assemble the commands for this build-step + # includes the preparation commands for maven + steps = \ + platform_config.prepare_maven(config) + \ + cmds + \ + u.copyOutput(config) + + # the shell was already prepared for running maven, + # if another java step will run later on this does not to be done again + platform_config.prepare_maven = lambda cfg: ["echo Native lib already copied..."] + + return steps + return build_func + #----------------------------------------------------------------------- + platform_config.build_step(build_step, java_build_step(step_cmds)) + #----------------------------------------------------------------------- diff --git a/build_system/run_tests.py b/build_system/run_tests.py new file mode 100644 index 000000000..0ac3ebcb0 --- /dev/null +++ b/build_system/run_tests.py @@ -0,0 +1,18 @@ +from unittest import TestLoader, TestSuite +from tests.runner.test_runner import SurePhyreTestRunner + +import tests.test_linux_docker +import tests.test_macos_vagrant +import tests.test_win32_docker +import tests.test_win32_native + +loader = TestLoader() +suite = TestSuite(( + loader.loadTestsFromModule(tests.test_linux_docker), + # loader.loadTestsFromModule(tests.test_macos_vagrant), + # loader.loadTestsFromModule(tests.test_win32_docker), + # loader.loadTestsFromModule(tests.test_win32_native), +)) + +runner = SurePhyreTestRunner() +runner.run(suite) diff --git a/build_system/shared_build_steps.py b/build_system/shared_build_steps.py index 2fcce619c..ac8f8b03e 100644 --- a/build_system/shared_build_steps.py +++ b/build_system/shared_build_steps.py @@ -1,5 +1,5 @@ """ -A collection of commands, variables and functions that are very likely to be +A collection of commands, constants and functions that are very likely to be reused between target-platform configurations or build-steps on the same platform. """ import glob @@ -7,24 +7,41 @@ import sys import xml.etree.ElementTree as ET +# see: https://stackoverflow.com/a/27333347/425532 +class XmlCommentParser(ET.XMLTreeBuilder): + + def __init__(self): + ET.XMLTreeBuilder.__init__(self) + # assumes ElementTree 1.2.X + self._parser.CommentHandler = self.handle_comment + + def handle_comment(self, data): + self._target.start(ET.Comment, {}) + self._target.data(data) + self._target.end(ET.Comment) + import constants as c import build_settings as s import build_utils as utils # TODO: add CLI option to override / pass-in custom maven/gradle args -build_cmd = "mvn clean verify -DskipTests -e" -run_tests_cmd = "mvn test -e"# -Dtest=V8RuntimeNotLoadedTest" +# NOTE: --batch-mode is needed to avoid unicode symbols messing up stdout while unit-testing the build-system +java_build_cmd = "mvn clean verify --batch-mode -DskipTests -e" +java_tests_cmd = "mvn test -e --batch-mode" # the ./ should work fine on all platforms # IMPORTANT: on MacOSX the ./ prefix is a strict requirement by some CLI commands !!! cmake_out_dir = "./cmake.out/$VENDOR-$PLATFORM.$ARCH/" +#----------------------------------------------------------------------- +# Common shell commands & utils +#----------------------------------------------------------------------- def gradleCmd(): return "gradlew" if os.name == "nt" else "gradle" def gradle(cmd): return [ - gradleCmd() + " --daemon " + cmd, + gradleCmd() + " " + cmd, ] def outputLibName(config): @@ -43,23 +60,23 @@ def setEnvVar(name, value): return ["export " + name + "=" + value] def setJavaHome(config): - # NOTE: when running docker alpine-linux builds, we don't want to overwrite JAVA_HOME - if (config.vendor == c.vendor_alpine and config.cross_agent == "docker"): - return [] - - return setEnvVar("JAVA_HOME", "/opt/jdk/jdk1.8.0_131") + # NOTE: Docker Linux builds need some special handling, because not all images have + # a pre-defined JAVA_HOME environment variable + if (config.platform == c.target_linux and config.cross_agent == "docker"): + # currently only the Alpine image brings its own java-installation & JAVA_HOME + # for other Linux images we install the JDK and setup JAVA_HOME manually + if (config.vendor != c.vendor_alpine): + print "Setting JAVA_HOME env-var for Docker Linux build" + return setEnvVar("JAVA_HOME", "/opt/jdk/jdk1.8.0_131") + + # for any other builds, we can just assume that JAVA_HOME is already set system-wide + print "Using system-var JAVA_HOME" + return [] def setVersionEnv(config): return \ setEnvVar("J2V8_FULL_VERSION", s.J2V8_FULL_VERSION) -def copyOutput(config): - jar_name = outputJarName(config) - - return \ - mkdir("build.out") + \ - cp("target/" + jar_name + " build.out/") - def shell(cmd, args): """ Invokes the cross-platform polyfill for the shell command defined by the 'cmd' parameter @@ -77,6 +94,36 @@ def mkdir(args): def rm(args): """Invokes the cross-platform polyfill for the 'rm' shell command""" return shell("rm", args) +#----------------------------------------------------------------------- +# Uniform build-steps (cross-platform) +#----------------------------------------------------------------------- +def build_j2v8_jni(config): + java_class_id = "com.eclipsesource.v8.V8" + java_class_parts = java_class_id.split(".") + java_class_filepath = "./target/classes/" + "/".join(java_class_parts) + ".class" + + if (not os.path.exists(java_class_filepath)): + return [ + "echo WARNING: Could not find " + java_class_parts[-1] + ".class file at path: " + java_class_filepath, + "echo JNI Header generation will be skipped...", + ] + + return [ + "echo Generating JNI header files...", + "cd ./target/classes", + "javah " + java_class_id, + ] + cp("com_eclipsesource_v8_V8.h ../../jni/com_eclipsesource_v8_V8Impl.h") + [ + "echo Done", + ] +#----------------------------------------------------------------------- +# File generators, operations & utils +#----------------------------------------------------------------------- +def copyOutput(config): + jar_name = outputJarName(config) + + return \ + mkdir("build.out") + \ + cp("target/" + jar_name + " build.out/") def clearNativeLibs(config): """ @@ -112,7 +159,7 @@ def copyNativeLibs(config): platform_lib_path = glob.glob(lib_pattern) if (len(platform_lib_path) == 0): - sys.exit("ERROR: Could not find native library for inclusion in platform target package") + utils.cli_exit("ERROR: Could not find native library for inclusion in platform target package") platform_lib_path = platform_lib_path[0] @@ -126,7 +173,7 @@ def copyNativeLibs(config): else: lib_target_path = "src/main/resources/" - print "copying native lib from: " + platform_lib_path + " to: " + lib_target_path + print "Copying native lib from: " + platform_lib_path + " to: " + lib_target_path copy_cmds += cp(platform_lib_path + " " + lib_target_path) @@ -201,7 +248,7 @@ def __handle_setting(path, value): print "Updating Maven configuration (" + target_pom_path + ")..." - tree = ET.parse(src_pom_path) + tree = ET.parse(src_pom_path, XmlCommentParser()) root = tree.getroot() __recurse_maven_settings(settings, __handle_setting) diff --git a/build_system/shell_build.py b/build_system/shell_build.py index eadeeefd7..483174172 100644 --- a/build_system/shell_build.py +++ b/build_system/shell_build.py @@ -13,7 +13,7 @@ def health_check(self, config): shell_check_cmd = "ver" if utils.is_win32(config.platform) else "bash --version" self.exec_cmd(shell_check_cmd, config) except subprocess.CalledProcessError: - sys.exit("ERROR: Failed Shell build-system health check!") + utils.cli_exit("ERROR: Failed Shell build-system health check!") def pre_build(self, config): return diff --git a/build_system/tests/__init__.py b/build_system/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build_system/tests/runner/__init__.py b/build_system/tests/runner/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build_system/tests/runner/output_redirector.py b/build_system/tests/runner/output_redirector.py new file mode 100644 index 000000000..808698421 --- /dev/null +++ b/build_system/tests/runner/output_redirector.py @@ -0,0 +1,17 @@ + +class OutputRedirector(object): + """ Wrapper to redirect stdout, stderr or any other stream that it is given """ + def __init__(self, streams): + self.streams = streams + + def write(self, data): + for s in self.streams: + s.write(data) + + def writelines(self, lines): + for s in self.streams: + s.writelines(lines) + + def flush(self): + for s in self.streams: + s.flush() diff --git a/build_system/tests/runner/test_asserts.py b/build_system/tests/runner/test_asserts.py new file mode 100644 index 000000000..6b65fcfd5 --- /dev/null +++ b/build_system/tests/runner/test_asserts.py @@ -0,0 +1,7 @@ + +def expectOutput(regex): + """ After a test is completed successfully, also verify that the CLI output contains an expected regex pattern. """ + def expectOutput_wrapper(func): + func.__testRegex = regex + return func + return expectOutput_wrapper diff --git a/build_system/tests/runner/test_result.py b/build_system/tests/runner/test_result.py new file mode 100644 index 000000000..25ce43d61 --- /dev/null +++ b/build_system/tests/runner/test_result.py @@ -0,0 +1,210 @@ +import collections +import datetime +import re +import StringIO +import sys +import traceback +import unittest + +from output_redirector import OutputRedirector +import test_utils as utils + +TestResultBase = unittest.TestResult + +class TestOutcome: + Success, Failure, Error, Skip = range(4) + +TestRunData = collections.namedtuple("TestRunData", "outcome test errStr errObj output elapsed") +TestRunData.__new__.__defaults__ = (None,) * len(TestRunData._fields) + +class TestResult(TestResultBase): + def __init__(self, streams, test_cases): + TestResultBase.__init__(self) + self.__sys_stdout = None + self.__sys_stderr = None + + self.streams = streams + self.test_cases = test_cases + + self.class_start_time = None + self.class_stop_time = None + + self.test_start_time = None + self.test_stop_time = None + + # list of all generated TestRunData for this result + self.all_results = [] + + # lists of type-specific TestRunData for this result + self.success_results = [] + self.failure_results = [] + self.error_results = [] + self.skipped_results = [] + + #override + def startTest(self, test): + TestResultBase.startTest(self, test) + + # remember the original sys streams + self.__sys_stdout = sys.stdout + self.__sys_stderr = sys.stderr + + # just one buffer for both stdout and stderr + self.outputBuffer = StringIO.StringIO() + + sys.stdout = OutputRedirector(self.streams + [self.outputBuffer]) + sys.stderr = OutputRedirector(self.streams + [self.outputBuffer]) + + # now the real testing logic kicks in + test_class, test_method = utils.get_test_names(test) + + if (not self.class_start_time): + self.class_start_time = datetime.datetime.now() + + self.test_start_time = datetime.datetime.now() + + utils.write_separator() + utils.write_log("INFO", "Running %(test_class)s.%(test_method)s" % locals()) + + def finish_test(self, test): + if (self.testsRun != len(self.test_cases)): + return + + if (not self.class_stop_time): + self.class_stop_time = datetime.datetime.now() + + num_tests = len(self.all_results) + num_failures = len(self.failure_results) + num_errors = len(self.error_results) + num_skips = len(self.skipped_results) + + test_class, _ = utils.get_test_names(test) + + test_elapsed = self.class_stop_time - self.class_start_time + + log_level = "INFO" + failure_tag = "" + + if (num_failures or num_errors): + log_level = "ERROR" + failure_tag = "<<< FAILURE! " + elif (num_skips): + log_level = "WARNING" + + utils.write_separator() + print + utils.write_separator() + utils.write_log(log_level, "Tests run: %(num_tests)s, Failures: %(num_failures)s, Errors: %(num_errors)s, Skipped: %(num_skips)s, Time elapsed: %(test_elapsed)s s %(failure_tag)s- in %(test_class)s" % locals()) + utils.write_separator() + + def print_errors(test_class, err_list, kind): + for result in err_list: + test = result.test + elapsed = result.elapsed + test_method = test._testMethodName + utils.write_log("ERROR", "%(test_method)s(%(test_class)s) Time elapsed: %(elapsed)s s <<< %(kind)s!" % locals()) + err_frame = result.errObj[2].tb_next + traceback.print_tb(err_frame, 1) + print + + # write leading newline if detail error reports should be written + if any(self.error_results) or any(self.failure_results): + print + + print_errors(test_class, self.error_results, "ERROR") + print_errors(test_class, self.failure_results, "FAILURE") + + def complete_test_case(self, test, test_info = None): + """ + Disconnect output redirection and return buffer. + Safe to call multiple times. + """ + output = self.outputBuffer.getvalue() + + if (test_info): + self.test_stop_time = datetime.datetime.now() + # merge data produced during test with additional meta-data + test_result = TestRunData(*(test_info[:-2] + (output, self.test_stop_time - self.test_start_time))) + + self.all_results.append(test_result) + + if (test_result.outcome == TestOutcome.Success): + self.success_results.append(test_result) + + elif (test_result.outcome == TestOutcome.Error): + self.error_results.append(test_result) + + elif (test_result.outcome == TestOutcome.Failure): + self.failure_results.append(test_result) + + elif (test_result.outcome == TestOutcome.Skip): + self.skipped_results.append(test_result) + + if self.__sys_stdout: + self.finish_test(test) + + # turn off the shell output redirection + sys.stdout = self.__sys_stdout + sys.stderr = self.__sys_stderr + + self.__sys_stdout = None + self.__sys_stderr = None + + #override + def stopTest(self, test): + # Usually one of addSuccess, addError or addFailure would have been called. + # But there are some path in unittest that would bypass this. + # We must disconnect stdout in stopTest(), which is guaranteed to be called. + self.complete_test_case(test) + + def __assertTestOutput(self, test): + test_method = type(test).__dict__.get(test._testMethodName) + test_regex_field = "__testRegex" + + if (hasattr(test_method, test_regex_field)): + regex = test_method.__dict__.get(test_regex_field) + output = self.outputBuffer.getvalue() + + match_ok = re.search(regex, output) + + if (not match_ok): + try: + raise Exception("Unable to find expected pattern in test-output:\n\t\t" + regex) + except Exception: + ex_nfo = sys.exc_info() + self.addFailure(test, ex_nfo) + return False + + return True + + #override + def addSuccess(self, test): + + # after a test was successful, also run stdout/stderr asserts + # which can still result in a test-failure + if not self.__assertTestOutput(test): + return + + TestResultBase.addSuccess(self, test) + testData = TestRunData(TestOutcome.Success, test, '', None) + self.complete_test_case(test, testData) + + #override + def addError(self, test, err): + TestResultBase.addError(self, test, err) + _, _exc_str = self.errors[-1] + testData = TestRunData(TestOutcome.Error, test, _exc_str, err) + self.complete_test_case(test, testData) + + #override + def addFailure(self, test, err): + TestResultBase.addFailure(self, test, err) + _, _exc_str = self.failures[-1] + testData = TestRunData(TestOutcome.Failure, test, _exc_str, err) + self.complete_test_case(test, testData) + + #override + def addSkip(self, test, reason): + TestResultBase.addSkip(self, test, reason) + testData = TestRunData(TestOutcome.Skip, test, reason, None) + self.complete_test_case(test, testData) diff --git a/build_system/tests/runner/test_runner.py b/build_system/tests/runner/test_runner.py new file mode 100644 index 000000000..99db4991a --- /dev/null +++ b/build_system/tests/runner/test_runner.py @@ -0,0 +1,113 @@ +import datetime +import os +import sys +from unittest import TestSuite + +import __main__ + +from test_result import TestResult, TestOutcome +import test_utils as utils + +class SurePhyreTestRunner(object): + """ + """ + def __init__(self): + self.runner_start_time = None + self.runner_stop_time = None + + def run(self, test): + "Run the given test case or test suite." + + self.runner_start_time = datetime.datetime.now() + + test_class_dict = {} + + def find_test_methods(test_decl): + is_iterable = hasattr(test_decl, '__iter__') + + if (is_iterable): + for tests in test_decl: + find_test_methods(tests) + else: + cls_nm = type(test_decl).__name__ + + if not test_class_dict.get(cls_nm): + test_class_dict[cls_nm] = list() + + test_class_dict[cls_nm].append(test_decl) + + # convert the given TestCase/TestSuite into a dictionary of test-classes + find_test_methods(test) + + all_results = list() + success_results = list() + failure_results = list() + error_results = list() + skipped_results = list() + + utils.write_separator() + utils.write_log("INFO", "T E S T S") + + for k, class_tests in test_class_dict.iteritems(): + class_suite = TestSuite(class_tests) + reports_dir = os.path.join(os.path.dirname(__main__.__file__), "test-reports") + + if not os.path.exists(reports_dir): + os.makedirs(reports_dir) + + with file(os.path.join(reports_dir, k + '.txt'), 'wb') as fp: + # execute all tests in this test class + class_result = TestResult([sys.stdout, fp], class_tests) + class_suite(class_result) + + # get the test-results from this class and add them to the summary lists + all_results.extend(class_result.all_results) + success_results.extend(class_result.success_results) + failure_results.extend(class_result.failure_results) + error_results.extend(class_result.error_results) + skipped_results.extend(class_result.skipped_results) + + tests_success = not any(error_results) and not any(failure_results) + tests_result = "SUCCESS" if tests_success else "FAILURE" + self.runner_stop_time = datetime.datetime.now() + + # print final summary log after all tests are done running + print + utils.write_separator() + utils.write_log("INFO", "TESTS RUN %(tests_result)s" % locals()) + utils.write_separator() + utils.write_log("INFO") + utils.write_log("INFO", "Results:") + + if not tests_success: + utils.write_log("INFO") + + def print_summary_problems(err_list, kind): + if (any(err_list)): + utils.write_log("ERROR", kind + "s: ") + + for r in err_list: + test_class, test_method = utils.get_test_names(r.test) + err_message = r.errObj[1].message + err_frame = r.errObj[2].tb_next + err_lineno = err_frame.tb_lineno if err_frame else "" + utils.write_log("ERROR", " %(test_class)s.%(test_method)s:%(err_lineno)s %(err_message)s" % locals()) + + print_summary_problems(failure_results, "Failure") + print_summary_problems(error_results, "Error") + + num_success = len(success_results) + num_failures = len(failure_results) + num_errors = len(error_results) + num_skips = len(skipped_results) + + utils.write_log("INFO") + utils.write_log("ERROR", "Tests run: %(num_success)s, Failures: %(num_failures)s, Errors: %(num_errors)s, Skipped: %(num_skips)s" % locals()) + utils.write_log("INFO") + + total_elapsed = self.runner_stop_time - self.runner_start_time + + utils.write_separator() + utils.write_log("INFO", "Total time: %(total_elapsed)s s" % locals()) + utils.write_log("INFO", "Finished at: %s" % self.runner_stop_time) + utils.write_separator() diff --git a/build_system/tests/runner/test_utils.py b/build_system/tests/runner/test_utils.py new file mode 100644 index 000000000..27cbb9307 --- /dev/null +++ b/build_system/tests/runner/test_utils.py @@ -0,0 +1,11 @@ + +def write_log(level, message = ""): + print "$ [%(level)s] %(message)s" % locals() + +def write_separator(): + print "$---------------------------------------------------------------------------------------------------" + +def get_test_names(test): + test_class = type(test).__name__ + test_method = test._testMethodName + return (test_class, test_method) diff --git a/build_system/tests/test_linux_docker.py b/build_system/tests/test_linux_docker.py new file mode 100644 index 000000000..81d9d1b61 --- /dev/null +++ b/build_system/tests/test_linux_docker.py @@ -0,0 +1,41 @@ +import unittest + +from runner.test_asserts import * + +import constants as c +import build_executor as bex + +class TestLinuxDocker(unittest.TestCase): + + def with_x64_defaults(self, params): + x64_defaults = { + "target": c.target_linux, + "arch": c.arch_x64, + "docker": True, + "redirect_stdout": True, # important for test-logging + } + params.update(x64_defaults) + return params + + @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + def test_x64_node_disabled(self): + + params = self.with_x64_defaults( + { + "buildsteps": ["j2v8", "test"], + }) + + bex.execute_build(params) + + # TODO: could use functional parameter overload to return error message + details + # (e.g. match regex groups for numfails, numerrors, numskips, etc. and make advanced asserts) + @expectOutput(r"\[INFO\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + def test_x64_node_enabled(self): + + params = self.with_x64_defaults( + { + "node_enabled": True, + "buildsteps": ["j2v8", "test"], + }) + + bex.execute_build(params) diff --git a/build_system/tests/test_macos_vagrant.py b/build_system/tests/test_macos_vagrant.py new file mode 100644 index 000000000..029483e6d --- /dev/null +++ b/build_system/tests/test_macos_vagrant.py @@ -0,0 +1,40 @@ +import unittest + +from runner.test_asserts import * + +import constants as c +import build_executor as bex + +class TestMacOSVagrant(unittest.TestCase): + + def with_x64_defaults(self, params): + x64_defaults = { + "target": c.target_macos, + "arch": c.arch_x64, + "vagrant": True, + "no_shutdown": True, + "redirect_stdout": True, # important for test-logging + } + params.update(x64_defaults) + return params + + @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + def test_x64_node_disabled(self): + + params = self.with_x64_defaults( + { + "buildsteps": ["j2v8", "test"], + }) + + bex.execute_build(params) + + @expectOutput(r"\[INFO\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + def test_x64_node_enabled(self): + + params = self.with_x64_defaults( + { + "node_enabled": True, + "buildsteps": ["j2v8", "test"], + }) + + bex.execute_build(params) diff --git a/build_system/tests/test_win32_docker.py b/build_system/tests/test_win32_docker.py new file mode 100644 index 000000000..b6f517bcd --- /dev/null +++ b/build_system/tests/test_win32_docker.py @@ -0,0 +1,39 @@ +import unittest + +from runner.test_asserts import * + +import constants as c +import build_executor as bex + +class TestWin32Docker(unittest.TestCase): + + def with_x64_defaults(self, params): + x64_defaults = { + "target": c.target_win32, + "arch": c.arch_x64, + "docker": True, + "redirect_stdout": True, # important for test-logging + } + params.update(x64_defaults) + return params + + @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + def test_x64_node_disabled(self): + + params = self.with_x64_defaults( + { + "buildsteps": ["j2v8", "test"], + }) + + bex.execute_build(params) + + @expectOutput(r"\[INFO\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + def test_x64_node_enabled(self): + + params = self.with_x64_defaults( + { + "node_enabled": True, + "buildsteps": ["j2v8", "test"], + }) + + bex.execute_build(params) diff --git a/build_system/tests/test_win32_native.py b/build_system/tests/test_win32_native.py new file mode 100644 index 000000000..de341df53 --- /dev/null +++ b/build_system/tests/test_win32_native.py @@ -0,0 +1,38 @@ +import unittest + +from runner.test_asserts import * + +import constants as c +import build_executor as bex + +class TestWin32Native(unittest.TestCase): + + def with_x64_defaults(self, params): + x64_defaults = { + "target": c.target_win32, + "arch": c.arch_x64, + "redirect_stdout": True, # important for test-logging + } + params.update(x64_defaults) + return params + + @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + def test_x64_node_disabled(self): + + params = self.with_x64_defaults( + { + "buildsteps": ["j2v8", "test"], + }) + + bex.execute_build(params) + + @expectOutput(r"\[INFO\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + def test_x64_node_enabled(self): + + params = self.with_x64_defaults( + { + "node_enabled": True, + "buildsteps": ["j2v8", "test"], + }) + + bex.execute_build(params) diff --git a/build_system/vagrant_build.py b/build_system/vagrant_build.py index 25e9624f9..919be294b 100644 --- a/build_system/vagrant_build.py +++ b/build_system/vagrant_build.py @@ -1,4 +1,4 @@ -import atexit +import signal import subprocess import sys import build_utils as utils @@ -15,10 +15,11 @@ def clean(self, config): return def health_check(self, config): + print "Verifying Vagrant build-system status..." try: - self.exec_host_cmd("vagrant global-status", config) + self.exec_host_cmd("vagrant --version", config) except subprocess.CalledProcessError: - sys.exit("ERROR: Failed Vagrant build-system health check, make sure Vagrant is available and running!") + utils.cli_exit("ERROR: Failed Vagrant build-system health check, make sure Vagrant is available and running!") def pre_build(self, config): vagrant_start_cmd = "vagrant up" @@ -29,14 +30,15 @@ def pre_build(self, config): if (config.pre_build_cmd): vagrant_start_cmd = config.pre_build_cmd + utils.host_cmd_sep() + vagrant_start_cmd - def cli_exit_event(): + def cli_exit_event(signal, frame): if (config.no_shutdown): + print "INFO: Vagrant J2V8 machine will continue running..." return print "Waiting for vagrant virtual-machine to exit..." self.exec_host_cmd("vagrant halt", config) - atexit.register(cli_exit_event) + signal.signal(signal.SIGINT, cli_exit_event) self.exec_host_cmd(vagrant_start_cmd, config) @@ -60,6 +62,7 @@ def exec_build(self, config): def post_build(self, config): if (config.no_shutdown): + print "INFO: Vagrant J2V8 machine will continue running..." return self.exec_host_cmd("vagrant halt", config) diff --git a/cmake/BuildUtils.cmake b/cmake/BuildUtils.cmake index b65180262..1b7d4ee3b 100644 --- a/cmake/BuildUtils.cmake +++ b/cmake/BuildUtils.cmake @@ -1,5 +1,6 @@ macro (link_static_crt) + message("Linking against static MSVCRT") foreach(flag_var CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO) diff --git a/cmake/FindJava.cmake b/cmake/FindJava.cmake index 4159b7228..00dd9d56b 100644 --- a/cmake/FindJava.cmake +++ b/cmake/FindJava.cmake @@ -93,8 +93,8 @@ else() set(Java_ROOT "$ENV{JAVA_HOME}") endif() -if ("${Java_ROOT}" STREQUAL "") +if("${Java_ROOT}" STREQUAL "") message(FATAL_ERROR "Unable to locate Java JDK") endif() -message ("Using Java-Root: ${Java_ROOT}") +message("Using Java-Root: ${Java_ROOT}") diff --git a/cmake/NodeJsUtils.cmake b/cmake/NodeJsUtils.cmake index 2f0876a63..5f6c3b9d3 100644 --- a/cmake/NodeJsUtils.cmake +++ b/cmake/NodeJsUtils.cmake @@ -1,7 +1,27 @@ - -function (get_njs_libs nodejs_dir config_name) +#----------------------------------------------------------------------- +# Expects a list of absolute paths to the expectd Node.js static libraries +# and exits CMake with a fatal error if one of the libs does not exist. +#----------------------------------------------------------------------- +function (assert_nodejs_libs_exist) + # ARGN: a list of absolute paths + set (njs_libs ${ARGN}) + + foreach(lib ${njs_libs}) + if (NOT EXISTS ${lib}) + message(FATAL_ERROR "ERROR: Unable to locate required Node.js library: ${lib}") + endif() + endforeach() + +endfunction(assert_nodejs_libs_exist) +#----------------------------------------------------------------------- +# Based on the used operating system, look for the static Node.js libraries +# that must be included for linking the J2V8 JNI native bridge code. +#----------------------------------------------------------------------- +function (get_njs_libs nodejs_dir config_name fail_on_missing_libs) #{ + #----------------------------------------------------------------------- if (CMAKE_SYSTEM_NAME STREQUAL "Windows") + #----------------------------------------------------------------------- #{ # base directories for Node.js link libraries set (njs_build ${nodejs_dir}/build/${config_name}) @@ -10,7 +30,7 @@ function (get_njs_libs nodejs_dir config_name) set (njs_extra ${nodejs_dir}/${config_name}) set (njs_extra_lib ${nodejs_dir}/${config_name}/lib) - # project link libraries + # Node.js link libraries set (njs_libs # nodejs/build/$Config/lib ${njs_build_lib}/standalone_inspector.lib @@ -43,7 +63,15 @@ function (get_njs_libs nodejs_dir config_name) # nodejs/$Config ${njs_extra}/cctest.lib + ) + + # verify that all required Node.js libs actually exist + if (${fail_on_missing_libs}) + assert_nodejs_libs_exist(${njs_libs}) + endif() + # additional link libraries + set (njs_libs ${njs_libs} # additional windows libs, required by Node.js Dbghelp Shlwapi @@ -51,12 +79,14 @@ function (get_njs_libs nodejs_dir config_name) set (njs_${config_name}_libs ${njs_libs} PARENT_SCOPE) #} + #----------------------------------------------------------------------- elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin") + #----------------------------------------------------------------------- #{ # base directories for Node.js link libraries set (njs_out ${nodejs_dir}/out/${config_name}) - # project link libraries + # Node.js link libraries set (njs_libs # v8 libs ${njs_out}/libv8_base.a @@ -70,24 +100,35 @@ function (get_njs_libs nodejs_dir config_name) ${njs_out}/libgtest.a ${njs_out}/libhttp_parser.a ${njs_out}/libuv.a - -force_load ${njs_out}/libnode.a ${njs_out}/libopenssl.a ${njs_out}/libzlib.a ) + # verify that all required Node.js libs actually exist + if (${fail_on_missing_libs}) + assert_nodejs_libs_exist(${njs_libs} ${njs_out}/libnode.a) + endif() + + # additional link libraries + set (njs_libs ${njs_libs} + # Node.js libs that require special linker treatments + -force_load ${njs_out}/libnode.a + ) + set (njs_${config_name}_libs ${njs_libs} PARENT_SCOPE) #} + #----------------------------------------------------------------------- elseif(CMAKE_SYSTEM_NAME STREQUAL "Android") + #----------------------------------------------------------------------- #{ # base directories for Node.js link libraries set (njs_out_target ${nodejs_dir}/out/${config_name}/obj.target) set (njs_out_v8 ${nodejs_dir}/out/${config_name}/obj.target/deps/v8/src) set (njs_out_deps ${nodejs_dir}/out/${config_name}/obj.target/deps) - # project link libraries + # Node.js link libraries set (njs_libs # node libs - -Wl,--start-group ${njs_out_deps}/uv/libuv.a ${njs_out_deps}/openssl/libopenssl.a ${njs_out_deps}/http_parser/libhttp_parser.a @@ -101,25 +142,40 @@ function (get_njs_libs nodejs_dir config_name) ${njs_out_v8}/libv8_libplatform.a ${njs_out_v8}/libv8_libbase.a ${njs_out_v8}/libv8_libsampler.a + ) - -Wl,--whole-archive ${njs_out_target}/libnode.a -Wl,--no-whole-archive + # verify that all required Node.js libs actually exist + if (${fail_on_missing_libs}) + assert_nodejs_libs_exist(${njs_libs} ${njs_out_target}/libnode.a) + endif() + # finalize linker settings + set (njs_libs + # + -Wl,--start-group + # the carefree libs + ${njs_libs} + + # Node.js libs that require special linker treatments + -Wl,--whole-archive ${njs_out_target}/libnode.a -Wl,--no-whole-archive -Wl,--end-group + # ) set (njs_${config_name}_libs ${njs_libs} PARENT_SCOPE) #} + #----------------------------------------------------------------------- elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux") + #----------------------------------------------------------------------- #{ # base directories for Node.js link libraries set (njs_out_target ${nodejs_dir}/out/${config_name}/obj.target) set (njs_out_v8 ${nodejs_dir}/out/${config_name}/obj.target/deps/v8/src) set (njs_out_deps ${nodejs_dir}/out/${config_name}/obj.target/deps) - # project link libraries + # Node.js link libraries set (njs_libs # node libs - -Wl,--start-group ${njs_out_deps}/uv/libuv.a ${njs_out_deps}/openssl/libopenssl.a ${njs_out_deps}/http_parser/libhttp_parser.a @@ -133,10 +189,24 @@ function (get_njs_libs nodejs_dir config_name) ${njs_out_v8}/libv8_libplatform.a ${njs_out_v8}/libv8_libbase.a ${njs_out_v8}/libv8_libsampler.a + ) + + # verify that all required Node.js libs actually exist + if (${fail_on_missing_libs}) + assert_nodejs_libs_exist(${njs_libs} ${njs_out_target}/libnode.a) + endif() - -Wl,--whole-archive ${njs_out_target}/libnode.a -Wl,--no-whole-archive + # finalize linker settings + set (njs_libs + # + -Wl,--start-group + # the carefree libs + ${njs_libs} + # Node.js libs that require special linker treatments + -Wl,--whole-archive ${njs_out_target}/libnode.a -Wl,--no-whole-archive -Wl,--end-group + # ) set (njs_${config_name}_libs ${njs_libs} PARENT_SCOPE) @@ -144,3 +214,4 @@ function (get_njs_libs nodejs_dir config_name) endif() #} endfunction (get_njs_libs) +#----------------------------------------------------------------------- diff --git a/docker/android/Dockerfile b/docker/android/Dockerfile index 7b9f7aeb4..415e93763 100644 --- a/docker/android/Dockerfile +++ b/docker/android/Dockerfile @@ -14,6 +14,9 @@ WORKDIR /temp/docker/shared/ COPY ./shared/install.debian.packages.sh /temp/docker/shared RUN ./install.debian.packages.sh +# install the heaviest dependencies first +# (this keeps the big things cached in docker, even if we need to modify one of the shell scripts +# of one of the lighter dependencies below) ENV NDK_VERSION "r13b" ENV NDK_NAME "android-ndk-$NDK_VERSION-linux-x86_64" RUN echo "Preparing Android NDK..." && \ @@ -30,19 +33,6 @@ RUN echo "Preparing Android GCC-Toolchain..." && \ ENV NDK "/build/android-ndk-$NDK_VERSION" ENV PATH "$PATH:/build/android-gcc-toolchain:$NDK" -COPY ./shared/install.jdk.sh /temp/docker/shared -RUN ./install.jdk.sh -ENV JAVA_HOME "/opt/jdk/jdk1.8.0_131" - -COPY ./shared/install.cmake.sh /temp/docker/shared -RUN ./install.cmake.sh -ENV PATH "$PATH:/opt/cmake/bin" - -COPY ./shared/install.gradle.sh /temp/docker/shared -RUN ./install.gradle.sh -ENV GRADLE_HOME "/opt/gradle-3.5" -ENV PATH "$PATH:$GRADLE_HOME/bin" - RUN echo "Preparing Android SDK..." && \ wget -qO- http://dl.google.com/android/android-sdk_r23-linux.tgz | \ tar xvz -C /usr/local/ && \ @@ -54,18 +44,33 @@ ENV ANDROID_HOME "/usr/local/android-sdk" ENV PATH "$PATH:$ANDROID_HOME/tools" ENV PATH "$PATH:$ANDROID_HOME/platform-tools" +# Create fake keymap file +RUN mkdir /usr/local/android-sdk/tools/keymaps && \ + touch /usr/local/android-sdk/tools/keymaps/en-us + +# install the required license for sdk-build-tools +RUN mkdir -p $ANDROID_HOME/licenses && echo -e "\n8933bad161af4178b1185d1a37fbf41ea5269c55\n" > $ANDROID_HOME/licenses/android-sdk-license + +# java must be installed at this point, because the following android CLI commands depend on it +COPY ./shared/install.jdk.sh /temp/docker/shared +RUN ./install.jdk.sh +ENV JAVA_HOME "/opt/jdk/jdk1.8.0_131" + # set up the android emulator & android images for running the unit tests # see: https://github.com/tracer0tong/android-emulator RUN ( sleep 4 && while [ 1 ]; do sleep 1; echo y; done ) | android update sdk --no-ui --force -a --filter \ build-tools-24.0.3,tools,platform-tools,android-19,extra-android-m2repository,sys-img-x86-android-19,sys-img-armeabi-v7a-android-19 && \ echo "y" | android update adb -# Create fake keymap file -RUN mkdir /usr/local/android-sdk/tools/keymaps && \ - touch /usr/local/android-sdk/tools/keymaps/en-us +# now install the rest of the tools that are more lightweight +COPY ./shared/install.cmake.sh /temp/docker/shared +RUN ./install.cmake.sh +ENV PATH "$PATH:/opt/cmake/bin" -# install the required license for sdk-build-tools -RUN mkdir -p $ANDROID_HOME/licenses && echo -e "\n8933bad161af4178b1185d1a37fbf41ea5269c55\n" > $ANDROID_HOME/licenses/android-sdk-license +COPY ./shared/install.gradle.sh /temp/docker/shared +RUN ./install.gradle.sh +ENV GRADLE_HOME "/opt/gradle-3.5" +ENV PATH "$PATH:$GRADLE_HOME/bin" # download the most critical gradle dependencies for the build beforehand RUN mkdir -p /temp diff --git a/docker/android/kill_supervisor.py b/docker/android/kill_supervisor.py old mode 100644 new mode 100755 diff --git a/docker/android/start-emulator.template.sh b/docker/android/start-emulator.template.sh old mode 100644 new mode 100755 diff --git a/docker/android/supervisord.template.conf b/docker/android/supervisord.template.conf index 7fafa057e..f2a4e6a42 100644 --- a/docker/android/supervisord.template.conf +++ b/docker/android/supervisord.template.conf @@ -16,7 +16,7 @@ stopasgroup=true killasgroup=true [eventlistener:emulator_exit] -command=/j2v8/docker/android/kill_supervisor.py +command=python /j2v8/docker/android/kill_supervisor.py process_name=emulator events=PROCESS_STATE_EXITED,PROCESS_STATE_FATAL stdout_logfile=/dev/stdout @@ -38,7 +38,7 @@ stopasgroup=true killasgroup=true [eventlistener:tests_exit] -command=/j2v8/docker/android/kill_supervisor.py +command=python /j2v8/docker/android/kill_supervisor.py process_name=tests events=PROCESS_STATE_EXITED,PROCESS_STATE_FATAL stdout_logfile=/dev/stdout @@ -60,7 +60,7 @@ stopasgroup=true killasgroup=true [eventlistener:logcat_exit] -command=/j2v8/docker/android/kill_supervisor.py +command=python /j2v8/docker/android/kill_supervisor.py process_name=tests events=PROCESS_STATE_EXITED,PROCESS_STATE_FATAL stdout_logfile=/dev/stdout diff --git a/docker/android/wait-for-emulator.sh b/docker/android/wait-for-emulator.sh old mode 100644 new mode 100755 diff --git a/docker/shared/install.alpine.packages.sh b/docker/shared/install.alpine.packages.sh old mode 100644 new mode 100755 diff --git a/docker/shared/install.jdk.sh b/docker/shared/install.jdk.sh index b7d7a0f75..9942fa013 100755 --- a/docker/shared/install.jdk.sh +++ b/docker/shared/install.jdk.sh @@ -12,5 +12,7 @@ echo "Preparing JDK..." curl -L -C - -b "oraclelicense=accept-securebackup-cookie" -O http://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-linux-x64.tar.gz mkdir -p /opt/jdk tar x -C /opt/jdk -f jdk-8u131-linux-x64.tar.gz + update-alternatives --install /usr/bin/java java /opt/jdk/jdk1.8.0_131/bin/java 100 update-alternatives --install /usr/bin/javac javac /opt/jdk/jdk1.8.0_131/bin/javac 100 +update-alternatives --install /usr/bin/javah javah /opt/jdk/jdk1.8.0_131/bin/javah 100 diff --git a/docker/win32/install.jdk.ps1 b/docker/win32/install.jdk.ps1 index f5c94d9f9..1d369da7e 100644 --- a/docker/win32/install.jdk.ps1 +++ b/docker/win32/install.jdk.ps1 @@ -12,6 +12,10 @@ Start-Process C:/jdk.exe -Wait ` $env:JAVA_HOME = 'C:\Program Files\Java\jdk1.8.0_131'; [Environment]::SetEnvironmentVariable('JAVA_HOME', $env:JAVA_HOME, [EnvironmentVariableTarget]::Machine); +# add Java tools to path +$env:PATH = $env:JAVA_HOME+'\bin;'+$env:PATH; +[Environment]::SetEnvironmentVariable('PATH', $env:PATH, [EnvironmentVariableTarget]::Machine); + Write-Host 'Removing ...'; Remove-Item C:\jdk.exe -Force; diff --git a/gradle.properties b/gradle.properties new file mode 100644 index 000000000..85eb5dfb5 --- /dev/null +++ b/gradle.properties @@ -0,0 +1,3 @@ +#increase jvm heap space available for gradle +#(allows to run dex in the same process as gradle) +org.gradle.jvmargs=-Xmx4608M diff --git a/j2v8-cli.cmd b/j2v8-cli.cmd new file mode 100644 index 000000000..0652878a4 --- /dev/null +++ b/j2v8-cli.cmd @@ -0,0 +1,5 @@ +@echo off + +doskey build=python build.py $* +doskey nodejs=python nodejs.py $* +doskey citests=python build_system\run_tests.py $* diff --git a/j2v8-cli.sh b/j2v8-cli.sh new file mode 100755 index 000000000..3c84bae79 --- /dev/null +++ b/j2v8-cli.sh @@ -0,0 +1,3 @@ +alias build="python build.py" +alias nodejs="python nodejs.py" +alias citests="python build_system/run_tests.py" diff --git a/jni/com_eclipsesource_v8_V8Impl.cpp b/jni/com_eclipsesource_v8_V8Impl.cpp index 4dfc7f5eb..f4a0e6c5b 100644 --- a/jni/com_eclipsesource_v8_V8Impl.cpp +++ b/jni/com_eclipsesource_v8_V8Impl.cpp @@ -439,6 +439,15 @@ JNIEXPORT jboolean JNICALL Java_com_eclipsesource_v8_V8__1isRunning #endif } +JNIEXPORT jboolean JNICALL Java_com_eclipsesource_v8_V8__1isNodeCompatible + (JNIEnv *, jclass) { + #ifdef NODE_COMPATIBLE + return true; + #else + return false; + #endif +} + JNIEXPORT jlong JNICALL Java_com_eclipsesource_v8_V8__1createIsolate (JNIEnv *env, jobject v8, jstring globalAlias) { V8Runtime* runtime = new V8Runtime(); diff --git a/jni/com_eclipsesource_v8_V8Impl.h b/jni/com_eclipsesource_v8_V8Impl.h index 2e9fe4676..6739431b8 100644 --- a/jni/com_eclipsesource_v8_V8Impl.h +++ b/jni/com_eclipsesource_v8_V8Impl.h @@ -819,6 +819,14 @@ JNIEXPORT jboolean JNICALL Java_com_eclipsesource_v8_V8__1pumpMessageLoop JNIEXPORT jboolean JNICALL Java_com_eclipsesource_v8_V8__1isRunning (JNIEnv *, jclass, jlong); +/* + * Class: com_eclipsesource_v8_V8 + * Method: _isNodeCompatible + * Signature: ()Z + */ +JNIEXPORT jboolean JNICALL Java_com_eclipsesource_v8_V8__1isNodeCompatible + (JNIEnv *, jclass); + #ifdef __cplusplus } #endif diff --git a/node.patches/7.9.0.diff b/node.patches/7.9.0.diff new file mode 100644 index 000000000..82aeda68e --- /dev/null +++ b/node.patches/7.9.0.diff @@ -0,0 +1,67 @@ +diff --git a/common.gypi b/common.gypi +index 147cc70f..40e44baf 100644 +--- a/common.gypi ++++ b/common.gypi +@@ -190,7 +190,7 @@ + 'msvs_settings': { + 'VCCLCompilerTool': { + 'StringPooling': 'true', # pool string literals +- 'DebugInformationFormat': 3, # Generate a PDB ++ 'DebugInformationFormat': 0, # Generate a PDB + 'WarningLevel': 3, + 'BufferSecurityCheck': 'true', + 'ExceptionHandling': 0, # /EHsc +diff --git a/deps/cares/common.gypi b/deps/cares/common.gypi +index 609ad62a..d714cdd7 100644 +--- a/deps/cares/common.gypi ++++ b/deps/cares/common.gypi +@@ -80,7 +80,7 @@ + 'msvs_settings': { + 'VCCLCompilerTool': { + 'StringPooling': 'true', # pool string literals +- 'DebugInformationFormat': 3, # Generate a PDB ++ 'DebugInformationFormat': 0, # Generate a PDB + 'WarningLevel': 3, + 'BufferSecurityCheck': 'true', + 'ExceptionHandling': 1, # /EHsc +diff --git a/deps/uv/common.gypi b/deps/uv/common.gypi +index 470b7338..8dc3b3f9 100644 +--- a/deps/uv/common.gypi ++++ b/deps/uv/common.gypi +@@ -87,7 +87,7 @@ + 'msvs_settings': { + 'VCCLCompilerTool': { + 'StringPooling': 'true', # pool string literals +- 'DebugInformationFormat': 3, # Generate a PDB ++ 'DebugInformationFormat': 0, # Generate a PDB + 'WarningLevel': 3, + 'BufferSecurityCheck': 'true', + 'ExceptionHandling': 1, # /EHsc +diff --git a/src/node.h b/src/node.h +index 1255a4af..66911873 100644 +--- a/src/node.h ++++ b/src/node.h +@@ -417,7 +417,7 @@ extern "C" NODE_EXTERN void node_module_register(void* mod); + #ifdef NODE_SHARED_MODE + # define NODE_CTOR_PREFIX + #else +-# define NODE_CTOR_PREFIX static ++# define NODE_CTOR_PREFIX + #endif + + #if defined(_MSC_VER) +diff --git a/vcbuild.bat b/vcbuild.bat +index 01750a4a..f8392e4d 100644 +--- a/vcbuild.bat ++++ b/vcbuild.bat +@@ -176,8 +176,8 @@ goto run + if defined noprojgen goto msbuild + + @rem Generate the VS project. +-echo configure %configure_flags% --dest-cpu=%target_arch% --tag=%TAG% +-python configure %configure_flags% --dest-cpu=%target_arch% --tag=%TAG% ++echo configure %configure_flags% --dest-cpu=%target_arch% --tag=%TAG% --enable-static ++python configure %configure_flags% --dest-cpu=%target_arch% --tag=%TAG% --enable-static + if errorlevel 1 goto create-msvs-files-failed + if not exist node.sln goto create-msvs-files-failed + echo Project files generated. diff --git a/node.patches/8.10.1.diff b/node.patches/8.10.1.diff new file mode 100644 index 000000000..37f6af6fe --- /dev/null +++ b/node.patches/8.10.1.diff @@ -0,0 +1,52 @@ +diff --git a/common.gypi b/common.gypi +index ea08e803..fa94c9f4 100644 +--- a/common.gypi ++++ b/common.gypi +@@ -189,7 +189,7 @@ + 'msvs_settings': { + 'VCCLCompilerTool': { + 'StringPooling': 'true', # pool string literals +- 'DebugInformationFormat': 3, # Generate a PDB ++ 'DebugInformationFormat': 0, # Generate a PDB + 'WarningLevel': 3, + 'BufferSecurityCheck': 'true', + 'ExceptionHandling': 0, # /EHsc +diff --git a/deps/cares/common.gypi b/deps/cares/common.gypi +index 609ad62a..d714cdd7 100644 +--- a/deps/cares/common.gypi ++++ b/deps/cares/common.gypi +@@ -80,7 +80,7 @@ + 'msvs_settings': { + 'VCCLCompilerTool': { + 'StringPooling': 'true', # pool string literals +- 'DebugInformationFormat': 3, # Generate a PDB ++ 'DebugInformationFormat': 0, # Generate a PDB + 'WarningLevel': 3, + 'BufferSecurityCheck': 'true', + 'ExceptionHandling': 1, # /EHsc +diff --git a/deps/uv/common.gypi b/deps/uv/common.gypi +index ec482340..807de0aa 100644 +--- a/deps/uv/common.gypi ++++ b/deps/uv/common.gypi +@@ -93,7 +93,7 @@ + 'msvs_settings': { + 'VCCLCompilerTool': { + 'StringPooling': 'true', # pool string literals +- 'DebugInformationFormat': 3, # Generate a PDB ++ 'DebugInformationFormat': 0, # Generate a PDB + 'WarningLevel': 3, + 'BufferSecurityCheck': 'true', + 'ExceptionHandling': 1, # /EHsc +diff --git a/src/node.h b/src/node.h +index 596769a6..21dbc38d 100644 +--- a/src/node.h ++++ b/src/node.h +@@ -433,7 +433,7 @@ extern "C" NODE_EXTERN void node_module_register(void* mod); + #ifdef NODE_SHARED_MODE + # define NODE_CTOR_PREFIX + #else +-# define NODE_CTOR_PREFIX static ++# define NODE_CTOR_PREFIX + #endif + + #if defined(_MSC_VER) diff --git a/nodejs.py b/nodejs.py index 81374b00e..bd6111ea2 100644 --- a/nodejs.py +++ b/nodejs.py @@ -1,11 +1,12 @@ """ -Utility-belt script to manage the Node.js dependency +Utility-belt script to manage the Node.js/V8 dependency """ import argparse import collections import fnmatch import glob import io +from itertools import ifilter import os import sys import tarfile @@ -16,6 +17,8 @@ import build_system.build_utils as utils import build_system.build_settings as settings +CMD_LINEBREAK = "\n\n" + # helper classes to show zipping progress # original idea: https://stackoverflow.com/a/3668977/425532 class ReadProgressFileObject(io.FileIO): @@ -39,14 +42,11 @@ def write(self, b): sys.stdout.flush() return io.FileIO.write(self, b) -Command = collections.namedtuple("Command", "aliases function") +Command = collections.namedtuple("Command", "name function help") DepsDirectory = collections.namedtuple("DepsDirectory", "path include") -# Command-Line setup -parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) - #----------------------------------------------------------------------- -def flush_cache(silent = False): +def flush_cache(args = None, silent = False): if not silent: print "[flush-cache]" @@ -56,42 +56,66 @@ def flush_cache(silent = False): print "Done" cmd_flush_cache = Command( - aliases=["flush-cache", "fc"], + name="flush-cache", function=flush_cache, + help="Move any Node.js/V8 native build-artifacts (.o/.a/.lib) from the './node' directory into the 'node.out' cache subdirectory\n" + \ + " of the respective vendor/platform/architecture." ) #----------------------------------------------------------------------- -def git_init(): - print "[git-init]" +def git_clone(args): + print "[git-clone]" # TODO: add CLI overide options # - Node version # - J2V8 version - utils.store_nodejs_output(None, ".") + flush_cache(silent=True) if (not os.path.exists("node")): print "Cloning Node.js version: " + settings.NODE_VERSION # NOTE: autocrlf=false is very important for linux based cross-compiles of Node.js to work on a windows docker host utils.execute("git clone https://github.com/nodejs/node --config core.autocrlf=false --depth 1 --branch v" + settings.NODE_VERSION) else: - print "Node.js is already cloned & checked out" - apply_diff(True) + print "Skipped git-clone: Node.js source-code is already cloned & checked out at the './node' directory." + + print "Done" + +cmd_git_clone = Command( + name="git-clone", + function=git_clone, + help=" Clone the C++ source-code from the official Node.js GitHub repository." + \ + "\n (the Node.js version branch from build_settings.py will be checked out automatically)" +) +#----------------------------------------------------------------------- +def git_checkout(args): + print "[git-checkout]" + + flush_cache(silent=True) + + if (os.path.exists("node")): + print "Checkout Node.js version: " + settings.NODE_VERSION + + # TODO: is there a way to fetch/checkout only a single remote tag + utils.execute("git fetch -v --progress --tags --depth 1 origin", "node") + utils.execute("git checkout --progress tags/v" + settings.NODE_VERSION + " -b v" + settings.NODE_VERSION, "node") + else: + print "ERROR: Node.js source-code was not yet cloned into the './node' directory, run 'python nodejs.py git-clone' first." print "Done" -cmd_git_init = Command( - aliases=["git-init", "gi"], - function=git_init +cmd_git_checkout = Command( + name="git-checkout", + function=git_checkout, + help="Checkout the correct git branch for the Node.js version specified in build_settings.py" ) #----------------------------------------------------------------------- -def package(): +def package(platforms = None): print "[package]" - platforms = sys.argv[2:] - full = len(platforms) == 0 + full = platforms == None or len(platforms) == 0 # make sure all node.js binaries are stored in the cache before packaging - flush_cache(True) + flush_cache(silent=True) # C++ header files # NOTE: see https://stackoverflow.com/a/4851555/425532 why this weird syntax is necessary here @@ -116,7 +140,7 @@ def __add_platform_deps(platform, include, vendor = None): ) for arch in target.architectures ] - # speciffy the platforms & file patterns that should be included + # specify the platforms & file patterns that should be included __add_platform_deps(c.target_android, [".o", ".a"]) __add_platform_deps(c.target_linux, [".o", ".a"]) __add_platform_deps(c.target_linux, [".o", ".a"], vendor = c.vendor_alpine) @@ -169,11 +193,58 @@ def __add_platform_deps(platform, include, vendor = None): print "generated: " + package_filename cmd_package = Command( - aliases=["package", "pkg"], - function=package + name="package", + function=package, + help="Create a .tar.bz2 dependency package with all the currently built Node.js/V8 binaries from the './node.out' cache directories." ) #----------------------------------------------------------------------- -def store_diff(): +def touch(platforms = None): + full = platforms == None or len(platforms) == 0 + + # make sure all node.js binaries are stored in the cache before resetting file-times + flush_cache(silent=True) + + dependencies = { + "list": [], + } + + # TODO: extract shared code between this and "package" command + def __add_platform_deps(platform, include, vendor = None): + target = bc.platform_configs.get(platform) + vendor_str = (vendor + "-" if vendor else "") + selected = (vendor_str + platform) in platforms + + if (full or selected): + dependencies["list"] += [ + DepsDirectory( + path="./node.out/" + vendor_str + platform + "." + arch + "/", + include=["j2v8.node.out"] + include + ) for arch in target.architectures + ] + + # specify the platforms & file patterns that should be included + __add_platform_deps(c.target_android, [".o", ".a"]) + __add_platform_deps(c.target_linux, [".o", ".a"]) + __add_platform_deps(c.target_linux, [".o", ".a"], vendor = c.vendor_alpine) + __add_platform_deps(c.target_macos, [".a"]) + __add_platform_deps(c.target_win32, [".lib"]) + + # set modification-time of all found binary files + for dep in dependencies["list"]: + print "set current file-time " + dep.path + for root, dirs, filenames in os.walk(dep.path): + for pattern in dep.include: + for file_name in fnmatch.filter(filenames, '*' + pattern): + file_path = os.path.join(root, file_name) + utils.touch(file_path) + +cmd_touch = Command( + name="touch", + function=touch, + help="Set modification-time of all currently built Node.js/V8 binaries in the './node.out' cache directories." +) +#----------------------------------------------------------------------- +def store_diff(args): print "[store-diff]" patch_file = os.path.join("..", "node.patches", settings.NODE_VERSION + ".diff") @@ -183,11 +254,14 @@ def store_diff(): print "Done" cmd_store_diff = Command( - aliases=["store-diff", "sd"], - function=store_diff + name="store-diff", + function=store_diff, + help="Create a patch-file in the './node.patches' directory with the current local modifications\n" + + " to the Node.js/V8 source-code.\n" + + " (the Node.js version from build_settings.py will be included in the patch filename)." ) #----------------------------------------------------------------------- -def apply_diff(silent = False): +def apply_diff(args, silent = False): if not silent: print "[apply-diff]" @@ -203,32 +277,56 @@ def apply_diff(silent = False): print "Done" cmd_apply_diff = Command( - aliases=["apply-diff", "ad"], - function=apply_diff + name="apply-diff", + function=apply_diff, + help=" Apply a previously created patch-file to the currently checked out Node.js/V8 source-code." ) #----------------------------------------------------------------------- -all_cmds = [ - cmd_flush_cache, - cmd_git_init, - cmd_package, - cmd_store_diff, - cmd_apply_diff, -] - -parser.add_argument("cmd", - metavar="command", - nargs=1, - type=str, - choices=[cmd for commands in all_cmds for cmd in commands.aliases]) - -parser.add_argument("rest", - nargs="*", - help=argparse.SUPPRESS) - -args = parser.parse_args() - -for cmd_tuple in all_cmds: - if (args.cmd[0] in cmd_tuple.aliases): - cmd_tuple.function() - break +#----------------------------------------------------------------------- +# Command-Line setup +#----------------------------------------------------------------------- +commands = { + "git": { + "__help": " Download and manage the Node.js/V8 source code for building J2V8 from source.", + "clone": cmd_git_clone, + "checkout": cmd_git_checkout, + }, + "bin": { + "__help": " Manage the binary build-artifacts that are produced by Node.js/V8 builds.", + "flush": cmd_flush_cache, + "package": cmd_package, + "touch": cmd_touch, + }, + "diff": { + "__help": "Create and apply Git patch-files for Node.js that are required for interoperability with J2V8.", + "create": cmd_store_diff, + "apply": cmd_apply_diff, + }, +} +#----------------------------------------------------------------------- +def parse_sub_command(args, choices, help_formatter, extra_args = None): + parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) + help_str = [c + " " + help_formatter(c) for c in choices] + parser.add_argument("command", help="\n\n".join(help_str) + "\n\n", choices=choices) + + if (extra_args): + extra_args(parser) + + args = parser.parse_args(args) + return args +#----------------------------------------------------------------------- + +# parse first level command +args = parse_sub_command(sys.argv[1:2], commands, lambda c: commands[c].get("__help")) +lvl1_cmd = commands.get(args.command) + +# parse second level command +sub_choices = filter(lambda x: x != "__help", lvl1_cmd) +args = parse_sub_command(sys.argv[2:], sub_choices, lambda c: lvl1_cmd[c].help, \ + lambda parser: parser.add_argument("args", nargs="*")) +lvl2_cmd = args.command + +# get the final command handler and delegate all further parameters to it +cmd_handler = lvl1_cmd.get(lvl2_cmd) +cmd_handler.function(sys.argv[3:]) diff --git a/pom.xml b/pom.xml index c409b634b..02a524200 100644 --- a/pom.xml +++ b/pom.xml @@ -1,18 +1,31 @@ 4.0.0 - UTF-8 - alpine-linux + UTF-8 + + linux gtk - x86_64 + x86_64 com.eclipsesource.j2v8 - j2v8_alpine-linux_x86_64 + j2v8_linux_x86_64 4.8.0-SNAPSHOT bundle - j2v8_alpine-linux_x86_64 + j2v8_linux_x86_64 J2V8 is a set of Java bindings for V8 https://github.com/eclipsesource/j2v8 diff --git a/src/main/java/com/eclipsesource/v8/V8.java b/src/main/java/com/eclipsesource/v8/V8.java index 65d4500cf..08d0d4980 100644 --- a/src/main/java/com/eclipsesource/v8/V8.java +++ b/src/main/java/com/eclipsesource/v8/V8.java @@ -249,7 +249,9 @@ private void notifyReferenceDisposed(final V8Value object) { private static void checkNativeLibraryLoaded() { if (!nativeLibraryLoaded) { - String message = "J2V8 native library not loaded (" + LibraryLoader.computeLibraryShortName(true) + ")"; + String vendorName = LibraryLoader.computeLibraryShortName(true); + String baseName = LibraryLoader.computeLibraryShortName(true); + String message = "J2V8 native library not loaded (" + baseName + "/" + vendorName + ")"; if (nativeLoadError != null) { throw new IllegalStateException(message, nativeLoadError); @@ -1565,6 +1567,19 @@ protected void releaseMethodDescriptor(final long v8RuntimePtr, final long metho private native static boolean _isRunning(final long v8RuntimePtr); + private native static boolean _isNodeCompatible(); + + public static boolean isNodeCompatible() { + if (!nativeLibraryLoaded) { + synchronized (lock) { + if (!nativeLibraryLoaded) { + load(null); + } + } + } + return _isNodeCompatible(); + } + void addObjRef(final V8Value reference) { objectReferences++; if (!referenceHandlers.isEmpty()) { diff --git a/src/test/java/com/eclipsesource/v8/NodeJSTest.java b/src/test/java/com/eclipsesource/v8/NodeJSTest.java index 3d2aa95c7..7d934a969 100644 --- a/src/test/java/com/eclipsesource/v8/NodeJSTest.java +++ b/src/test/java/com/eclipsesource/v8/NodeJSTest.java @@ -13,6 +13,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeTrue; import java.io.File; import java.io.IOException; @@ -20,12 +21,20 @@ import org.junit.After; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; public class NodeJSTest { private NodeJS nodeJS; + @BeforeClass + public static void beforeClass() { + // only run this test if the underlying native J2V8 library was compiled + // with the Node.js features included, otherwise just skip all the tests + assumeTrue(V8.isNodeCompatible()); + } + @Before public void setup() { nodeJS = NodeJS.createNodeJS(); From 7ca861825e3166d81c36cae6ade829204843b7a8 Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Tue, 15 Aug 2017 21:34:59 +0200 Subject: [PATCH 09/14] more CLI documentation & build-step args - some more documentation for the build CLI (in BUILDING.md) - j2v8junit build-step is now called j2v8test - added CLI feature to pass custom parameters to build-tools used in build-steps - can now pass Maven/Gradle arguments to "--j2v8test" build-step - reworked NodeJSTest skipping to also work for Android tests (spoon) - fixed & improved shared java build-steps code --- BUILDING.md | 92 +++++++++++++++---- build_system/build_constants.py | 7 +- build_system/build_executor.py | 32 ++++++- build_system/cli.py | 54 +++++++---- build_system/config_android.py | 20 ++-- build_system/config_linux.py | 4 +- build_system/config_macos.py | 4 +- build_system/config_win32.py | 4 +- build_system/constants.py | 2 +- build_system/java_build_steps.py | 34 ++++++- build_system/run_tests.py | 17 ++++ build_system/shared_build_steps.py | 2 +- build_system/tests/runner/test_asserts.py | 13 ++- build_system/tests/runner/test_result.py | 51 ++++++++-- build_system/tests/runner/test_runner.py | 3 +- .../tests/test_alpine_linux_docker.py | 42 +++++++++ build_system/tests/test_android_docker.py | 45 +++++++++ build_system/tests/test_linux_docker.py | 4 +- build_system/tests/test_macos_vagrant.py | 4 +- build_system/tests/test_win32_docker.py | 4 +- build_system/tests/test_win32_native.py | 4 +- docker/linux/Dockerfile | 2 +- docker/win32/Dockerfile | 2 +- j2v8-cli.cmd | 2 + j2v8-cli.sh | 2 + .../java/com/eclipsesource/v8/NodeJSTest.java | 30 ++++-- 26 files changed, 392 insertions(+), 88 deletions(-) create mode 100644 build_system/tests/test_alpine_linux_docker.py create mode 100644 build_system/tests/test_android_docker.py diff --git a/BUILDING.md b/BUILDING.md index e3502c0a8..ecc204e10 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -1,21 +1,17 @@ -# Build-System CLI +# Getting started / building from source -## Non-interactive -``` -python build.py -h, --help +1. clone the source code from the [J2V8 GitHub repository](https://github.com/eclipsesource/J2V8) +2. run `j2v8-cli.cmd` (on Win32) or `source j2v8-cli.sh` on MacOS / Linux +3. `nodejs git clone` to clone the Node.js/V8 source code +4. `nodejs diff apply` to apply the required modifications to the Node.js source code +5. start the desired J2V8 build either via `build -i` or `build ...args` (see below for details) -usage: build.py [-h] --target {android,linux,macos,win32} --arch {x86,x64,arm} - [--vendor VENDOR] [--keep-native-libs] [--node-enabled] - [--docker] [--vagrant] [--sys-image SYS_IMAGE] [--no-shutdown] - [--interactive] - [build-steps [build-steps ...]] -``` -``` -python build.py -v alpine -t linux -a x64 -dkr -img openjdk:8u131-alpine -ne j2v8 -``` +# Build-System CLI ## Interactive -``` +```shell +build --i, --interactive +# or python build.py --i, --interactive entering interactive mode... @@ -37,15 +33,73 @@ Building: Docker >> alpine-linux-x64 >> NODE_ENABLED Override build-steps ? (leave empty to run pre-configured steps): j2v8 ``` +## Non-interactive +```shell +build -h, --help +# or +python build.py -h, --help + +usage: build [-h] --target {android,linux,macos,win32} --arch {x86,x64,arm} + [--vendor VENDOR] [--keep-native-libs] [--node-enabled] + [--docker] [--vagrant] [--sys-image SYS_IMAGE] [--no-shutdown] + [--redirect-stdout] [--interactive] + [build-steps [build-steps ...]] +``` + +### Basic Examples + +Build for Alpine-Linux x64 using Docker and Node.js features included:
+`build -v alpine -t linux -a x64 -dkr -ne` + +Build for MacOSX x64 using Vagrant excluding Node.js features:
+`build -t macos -a x64 -vgr` + +Build for Windows x64 directly on the host-system, Node.js features included:
+`build -t win32 -a x64 -ne` + +### Build-Step syntax + +If no build-steps are specified, then the CLI will run `all` available build-steps by default. +To see a list of available build-steps run `build --help` or see the ***Build-Steps*** section below. + +For ease of use, there are also some advanced build-step aliases that when specified will run a collection of some of the base-steps: + +- `all` ... is the default, and will run all known build-steps +- `native` ... will run only the build-steps that are relevant for building **native** artifacts + - `node_js`, `j2v8_cmake`, `j2v8_jni`, `j2v8_cpp`, `j2v8_optimize` +- `j2v8` ... runs all build-steps, except for `nodejs` and `j2v8test` +- `java` ... alias for the single `j2v8java` step +- `test` ... alias for the single `j2v8test` step + +#### Anti-Steps +provide a way to remove a particular step, or a step-alias from the set of build-steps that should be run. To use such an anti-step, just prefix any of the available build-steps with the "~" symbol. + +Build everything but do not optimize and do not run J2V8 unit tests:
+`build <...other-args> all ~j2v8optimize ~test` + +Build only the Java parts and also run tests:
+`build <...other-args> all ~native` + +#### Step-Arguments + +For some of the build-steps, you can pass additional command-line parameters that will be added as arguments when the CLI build-tool of this particular build-step is run. + +Run the `j2v8test` step with additional args that will be passed to maven:
+(e.g. run only the `LibraryLoaderTest`)
+`build -t linux -a x64 --j2v8test="-Dtest=LibraryLoaderTest"` + + + # Build-Steps The J2V8 build-system performs several build steps in a fixed order to produce the final J2V8 packages for usage on the designated target platforms. What follows is a short summary for what each of the executed build-steps does and what output artifacts are produced by each step. ``` -Node.js --> CMake --> JNI --> C++ --> Optimize --> Java/Android --> JUnit +Node.js --> CMake --> JNI --> C++ --> Optimize --> Java/Android Build --> Java/Android Test ``` --- ## Node.js +CLI name: `nodejs` Builds the [Node.js](https://nodejs.org/en/) & [V8](https://developers.google.com/v8/) dependency artifacts that are later linked into the J2V8 native bridge code. (only works if the Node.js source was checked out into the J2V8 `./node` directory) @@ -65,6 +119,7 @@ __Artifacts:__ - `./node/Release/` --- ## CMake +CLI name: `j2v8cmake` Uses [CMake](https://cmake.org/) to generate the native Makefiles / IDE project files to later build the J2V8 C++ native bridge shared libraries. @@ -80,6 +135,7 @@ __Artifacts:__ - `./cmake.out/{platform}.{architecture}/` --- ## JNI Header Generation +CLI name: `j2v8jni` Generate the JNI glue header file from the native method definitions of the Java `V8` class. @@ -92,6 +148,7 @@ __Artifacts:__ - `./jni/com_eclipsesource_v8_V8Impl.h` --- ## C++ +CLI name: `j2v8cpp` Compile and link the J2V8 native shared libraries (.so/.dylib/.dll), which contain the C++ JNI bridge code to interop with the embedded Node.js / V8 parts. @@ -108,6 +165,7 @@ __Artifacts:__ - e.g. `./cmake.out/linux.x64/libj2v8-alpine-linux-x86_64.so` --- ## Optimize +CLI name: `j2v8optimize` The native J2V8 libraries are optimized for performance and/or filesize by using the available tools of the target-platform / compiler-toolchain. @@ -127,6 +185,7 @@ __Artifacts:__ - e.g. `./cmake.out/linux.x64/libj2v8-alpine-linux-x86_64.so` --- ## Java / Android +CLI name: `j2v8java` / `java` Compiles the Java source code and packages it, including the previously built native libraries, into the final package artifacts. For the execution of this build-step [Maven](https://maven.apache.org/) (Java) or [Gradle](https://gradle.org/) (Android) are used for the respective target platforms. @@ -148,7 +207,8 @@ __Artifacts:__ - Gradle Android packages - `./build/outputs/aar/j2v8-release.aar` --- -## JUnit +## Java Tests +CLI name: `j2v8test` / `test` Runs the Java ([JUnit](http://junit.org/)) unit tests. diff --git a/build_system/build_constants.py b/build_system/build_constants.py index 82d763919..869924a12 100644 --- a/build_system/build_constants.py +++ b/build_system/build_constants.py @@ -24,7 +24,7 @@ CLIStep(c.build_j2v8_optimize, " The native J2V8 libraries are optimized for performance and/or filesize by using the available tools of the target-platform / compiler-toolchain."), CLIStep(c.build_j2v8_java, " Compiles the Java source code and packages it, including the previously built native libraries, into the final package artifacts.\n" + " For the execution of this build-step Maven (Java) or Gradle (Android) are used for the respective target platforms."), - CLIStep(c.build_j2v8_junit, " Runs the Java (JUnit) unit tests."), + CLIStep(c.build_j2v8_test, " Runs the Java (JUnit/Gradle) unit tests."), ] # build_steps_help = dict(atomic_build_steps) @@ -34,7 +34,7 @@ advanced_steps = [ # atomic aliases CLIStep(c.build_java, " Alias for " + c.build_j2v8_java), - CLIStep(c.build_test, " Alias for " + c.build_j2v8_junit), + CLIStep(c.build_test, " Alias for " + c.build_j2v8_test), # multi-step aliases CLIStep(c.build_all, " Run all build steps."), @@ -43,11 +43,8 @@ " This is useful when building with a pre-compiled Node.js dependency package."), ] -# advanced_steps_help = dict(advanced_steps) - advanced_steps_list = [s.id for s in advanced_steps] - avail_build_steps = atomic_build_step_sequence + advanced_steps_list #----------------------------------------------------------------------- diff --git a/build_system/build_executor.py b/build_system/build_executor.py index 8112c43e6..7e25ddb28 100644 --- a/build_system/build_executor.py +++ b/build_system/build_executor.py @@ -74,7 +74,7 @@ def init_buildsteps(): # atomic aliases atomic_step(c.build_j2v8_java, c.build_java) - atomic_step(c.build_j2v8_junit, c.build_test) + atomic_step(c.build_j2v8_test, c.build_test) # multi-step alias: build only the native parts (includes nodejs) multi_step(c.build_native, [ @@ -87,7 +87,7 @@ def init_buildsteps(): # multi-step alias: build everything that belongs to J2V8 (excludes Node.js) # this is useful when building J2V8 with a pre-compiled Node.js dependency package - multi_step(c.build_j2v8, [c.build_all], [c.build_node_js, c.build_j2v8_junit]) + multi_step(c.build_j2v8, [c.build_all], [c.build_node_js, c.build_j2v8_test]) def evaluate_build_step_option(step): """Find the registered evaluator function for the given step and execute it""" @@ -157,7 +157,20 @@ def execute_build(params): parsed_steps = BuildState.parsed_steps parsed_steps.clear() - # go through the raw list of build-steps (given by the CLI or an API call) + # first look for the advanced form of build-step where it might be specified with some arguments to be passed + # to the underlying build-tool (e.g. --j2v8test="-Dtest=NodeJSTest") + for step in bc.atomic_build_step_sequence: + step_args = getattr(params, step, None) + + if step_args: + parsed_steps.add(step) + + # if there were no special build-step args or atomic build-step args passed + # then fall back to the default behavior and run all known steps + if not any(parsed_steps) and not any(params.buildsteps): + params.buildsteps = ["all"] + + # then go through the raw list of basic build-steps (given by the CLI or an API call) # and generate a list of only the atomic build-steps that were derived in the evaluation for step in params.buildsteps: evaluate_build_step_option(step) @@ -205,6 +218,16 @@ def execute_build_step(build_system, build_step): if (cross_cfg): cross_compiler = target_platform.cross_compiler(cross_sys) + parsed_step_args = "" + + # look for build-step arguments that were passed in by the user + # e.g. --j2v8test="-Dtest=..." and pass them down to the cross-agent also + for step in bc.atomic_build_step_sequence: + step_args = getattr(params, step, None) + + if step_args: + parsed_step_args += " --" + step + "='" + step_args + "'" + # invoke the build.py CLI within the virtualized / self-contained build-system provider cross_cfg.custom_cmd = "python ./build.py " + \ "--cross-agent " + cross_sys + \ @@ -212,7 +235,7 @@ def execute_build_step(build_system, build_step): (" -ne" if params.node_enabled else "") + \ (" -v " + params.vendor if params.vendor else "") + \ (" -knl " if params.keep_native_libs else "") + \ - " " + " ".join(parsed_steps) + " " + " ".join(parsed_steps) + parsed_step_args # apply meta-vars & util functions cross_cfg.compiler = cross_compiler @@ -269,6 +292,7 @@ def execute_build_step(build_system, build_step): target_step.docker = params.docker target_step.vagrant = params.vagrant target_step.keep_native_libs = params.keep_native_libs + target_step.args = getattr(params, step, None) # run the current BuildStep execute_build_step(target_compiler, target_step) diff --git a/build_system/cli.py b/build_system/cli.py index 3708d4de3..61ae6ecac 100644 --- a/build_system/cli.py +++ b/build_system/cli.py @@ -5,21 +5,34 @@ class BuildParams(object): """Value container for all build-parameters""" - def __init__(self, param_dict): - known_params = { - "target": None, - "arch": None, - "vendor": None, - "keep_native_libs": None, - "node_enabled": None, - "docker": None, - "vagrant": None, - "sys_image": None, - "no_shutdown": None, - "redirect_stdout": None, - "buildsteps": c.build_all, - } + # essential build CLI parameters + user_params = { + "target": None, + "arch": None, + "vendor": None, + "keep_native_libs": None, + "node_enabled": None, + "docker": None, + "vagrant": None, + "sys_image": None, + "no_shutdown": None, + "redirect_stdout": None, + "buildsteps": c.build_all, + } + + # additional --buildstep parameters (e.g. --j2v8test) + step_arg_params = dict((step, None) for step in bc.atomic_build_step_sequence) + + # collection of all known parameters + known_params = {} + known_params.update(user_params) + known_params.update(step_arg_params) + + def __init__(self, param_dict): + # only the known & accepted parameters will be copied + # from the input dictionary, to an object-property of the BuildParams object + known_params = BuildParams.known_params unhandled = set(param_dict.keys()).difference(set(known_params.keys())) @@ -151,8 +164,17 @@ def init_args(parser): "\n".join([s.id + s.help for s in bc.advanced_steps]), metavar="build-steps", nargs="*", - default="all", - choices=bc.avail_build_steps) + default=None, + # NOTE: an empty list is what is passed to "buildsteps" when the user does not specify any steps explicitly + choices=bc.avail_build_steps + [[]]) + + #----------------------------------------------------------------------- + # Build-Steps with Arguments + #----------------------------------------------------------------------- + for step_name in bc.atomic_build_step_sequence: + parser.add_argument("--" + step_name, + help=argparse.SUPPRESS, + dest=step_name) def get_parser(): """Get a CLI parser instance that accepts all supported build.py parameters and commands""" diff --git a/build_system/config_android.py b/build_system/config_android.py index 54efc4900..fe4394624 100644 --- a/build_system/config_android.py +++ b/build_system/config_android.py @@ -47,10 +47,10 @@ def build_j2v8_cmake(config): cmake_vars = cmu.setAllVars(config) cmake_toolchain = cmu.setToolchain("$BUILD_CWD/docker/android/android.$ARCH.toolchain.cmake") - return [ - "mkdir -p " + u.cmake_out_dir, - "cd " + u.cmake_out_dir, - "rm -rf CMakeCache.txt CMakeFiles/", + return \ + u.mkdir(u.cmake_out_dir) + \ + ["cd " + u.cmake_out_dir] + \ + u.rm("CMakeCache.txt CMakeFiles/") + [ """cmake \ -DCMAKE_BUILD_TYPE=Release \ %(cmake_vars)s \ @@ -81,15 +81,17 @@ def build_j2v8_java(config): android_config.build_step(c.build_j2v8_java, build_j2v8_java) #----------------------------------------------------------------------- -def build_j2v8_junit(config): +def build_j2v8_test(config): # if you are running this step without cross-compiling, it is assumed that a proper target Android device # or emulator is running that can execute the tests (platform + architecture must be compatible to the the build settings) + # add the extra step arguments to the command if we got some + step_args = getattr(config, "args", None) + step_args = " " + step_args if step_args else "" + test_cmds = \ u.setVersionEnv(config) + \ - u.gradle("spoon") - # u.gradle("spoon -PtestClass=com.eclipsesource.v8.LibraryLoaderTest,com.eclipsesource.v8.PlatformDetectorTest") - # u.gradle("connectedCheck --info") + u.gradle("spoon" + step_args) # we are running a build directly on the host shell if (not config.cross_agent): @@ -116,5 +118,5 @@ def build_j2v8_junit(config): return ["/usr/bin/supervisord -c /j2v8/docker/android/supervisord.conf"] -android_config.build_step(c.build_j2v8_junit, build_j2v8_junit) +android_config.build_step(c.build_j2v8_test, build_j2v8_test) #----------------------------------------------------------------------- diff --git a/build_system/config_linux.py b/build_system/config_linux.py index 38c5e6594..8bd565562 100644 --- a/build_system/config_linux.py +++ b/build_system/config_linux.py @@ -81,7 +81,7 @@ def build_j2v8_optimize(config): linux_config.build_step(c.build_j2v8_optimize, build_j2v8_optimize) #----------------------------------------------------------------------- -j.add_java_step(linux_config, c.build_j2v8_java, [u.java_build_cmd]) +j.add_java_build_step(linux_config) #----------------------------------------------------------------------- -j.add_java_step(linux_config, c.build_j2v8_junit, [u.java_tests_cmd]) +j.add_java_test_step(linux_config) #----------------------------------------------------------------------- diff --git a/build_system/config_macos.py b/build_system/config_macos.py index c85723835..fb33f86fd 100644 --- a/build_system/config_macos.py +++ b/build_system/config_macos.py @@ -68,7 +68,7 @@ def build_j2v8_cpp(config): macos_config.build_step(c.build_j2v8_cpp, build_j2v8_cpp) #----------------------------------------------------------------------- -j.add_java_step(macos_config, c.build_j2v8_java, [u.java_build_cmd]) +j.add_java_build_step(macos_config) #----------------------------------------------------------------------- -j.add_java_step(macos_config, c.build_j2v8_junit, [u.java_tests_cmd]) +j.add_java_test_step(macos_config) #----------------------------------------------------------------------- diff --git a/build_system/config_win32.py b/build_system/config_win32.py index 42ec65605..f9f0531e2 100644 --- a/build_system/config_win32.py +++ b/build_system/config_win32.py @@ -77,7 +77,7 @@ def build_j2v8_cpp(config): win32_config.build_step(c.build_j2v8_cpp, build_j2v8_cpp) #----------------------------------------------------------------------- -j.add_java_step(win32_config, c.build_j2v8_java, [u.java_build_cmd]) +j.add_java_build_step(win32_config) #----------------------------------------------------------------------- -j.add_java_step(win32_config, c.build_j2v8_junit, [u.java_tests_cmd]) +j.add_java_test_step(win32_config) #----------------------------------------------------------------------- diff --git a/build_system/constants.py b/build_system/constants.py index cd9c08593..2d3b9fb28 100644 --- a/build_system/constants.py +++ b/build_system/constants.py @@ -21,7 +21,7 @@ build_j2v8_cpp = 'j2v8cpp' build_j2v8_optimize = 'j2v8optimize' build_j2v8_java = 'j2v8java' -build_j2v8_junit = 'j2v8junit' +build_j2v8_test = 'j2v8test' # aliases build_java = 'java' diff --git a/build_system/java_build_steps.py b/build_system/java_build_steps.py index c740438cc..965e5c213 100644 --- a/build_system/java_build_steps.py +++ b/build_system/java_build_steps.py @@ -1,6 +1,15 @@ +import constants as c import shared_build_steps as u -def add_java_step(platform_config, build_step, step_cmds): +def add_java_build_step(platform_config): + # after the maven build is complete, copy the JAR artifact to the central output directory + __add_maven_step(platform_config, c.build_j2v8_java, u.java_build_cmd, [u.copyOutput]) + +def add_java_test_step(platform_config): + # running maven tests by themselves usually does not generate any output we need to copy + __add_maven_step(platform_config, c.build_j2v8_test, u.java_tests_cmd) + +def __add_maven_step(platform_config, build_step, step_cmd, post_step_cmds = []): # add the common preparation sequence for a maven build-step to the platform-config if not hasattr(platform_config, "prepare_maven"): platform_config.prepare_maven = lambda config: \ @@ -9,17 +18,32 @@ def add_java_step(platform_config, build_step, step_cmds): u.setJavaHome(config) #----------------------------------------------------------------------- # add a build-step that involves running maven and requires some preparation - def java_build_step(cmds): + def java_build_step(): def build_func(config): # update maven pom.xml settings u.apply_maven_config_settings(config) + # add the extra step arguments to the command if we got some + step_args = getattr(config, "args", None) + step_args = " " + step_args if step_args else "" + + post_cmds = [] + + # post-cmds can be strings or functions + for ps_cmd in post_step_cmds: + if callable(ps_cmd): + ps = ps_cmd(config) + post_cmds += ps + else: + post_cmds.append(ps_cmd) + # assemble the commands for this build-step # includes the preparation commands for maven + # and also any commands that should be run after the maven command is finished steps = \ platform_config.prepare_maven(config) + \ - cmds + \ - u.copyOutput(config) + [step_cmd + step_args] + \ + post_cmds # the shell was already prepared for running maven, # if another java step will run later on this does not to be done again @@ -28,5 +52,5 @@ def build_func(config): return steps return build_func #----------------------------------------------------------------------- - platform_config.build_step(build_step, java_build_step(step_cmds)) + platform_config.build_step(build_step, java_build_step()) #----------------------------------------------------------------------- diff --git a/build_system/run_tests.py b/build_system/run_tests.py index 0ac3ebcb0..e51ffac0c 100644 --- a/build_system/run_tests.py +++ b/build_system/run_tests.py @@ -1,13 +1,30 @@ +""" +This is the main entry-point for running the J2V8 build-system test suite. +Some of the tests require certain environmental conditions to be able to run, +e.g. on Windows, Vagrant (using the Virtual-Box provider) can not be run side-by-side +with HyperV (which is used by Docker-For-Windows virtualization) and therefore it +always requires a reconfiguration of OS-level virtualization features and a reboot +before one or the other collection of tests can be run. + +Therefore if you want to run the unit-tests below, you currently have to cherry-pick +the ones that can be run together on your particular host-platform environment & configuration. +""" from unittest import TestLoader, TestSuite from tests.runner.test_runner import SurePhyreTestRunner +import tests.test_android_docker +import tests.test_alpine_linux_docker import tests.test_linux_docker import tests.test_macos_vagrant import tests.test_win32_docker import tests.test_win32_native +# TODO: we could add some clever host-environment detection logic to even +# automate the decision which tests can or can not be run loader = TestLoader() suite = TestSuite(( + # loader.loadTestsFromModule(tests.test_android_docker), + loader.loadTestsFromModule(tests.test_alpine_linux_docker), loader.loadTestsFromModule(tests.test_linux_docker), # loader.loadTestsFromModule(tests.test_macos_vagrant), # loader.loadTestsFromModule(tests.test_win32_docker), diff --git a/build_system/shared_build_steps.py b/build_system/shared_build_steps.py index ac8f8b03e..ec6d4d7a4 100644 --- a/build_system/shared_build_steps.py +++ b/build_system/shared_build_steps.py @@ -26,7 +26,7 @@ def handle_comment(self, data): # TODO: add CLI option to override / pass-in custom maven/gradle args # NOTE: --batch-mode is needed to avoid unicode symbols messing up stdout while unit-testing the build-system -java_build_cmd = "mvn clean verify --batch-mode -DskipTests -e" +java_build_cmd = "mvn clean verify -e --batch-mode -DskipTests" java_tests_cmd = "mvn test -e --batch-mode" # the ./ should work fine on all platforms diff --git a/build_system/tests/runner/test_asserts.py b/build_system/tests/runner/test_asserts.py index 6b65fcfd5..aeae97a85 100644 --- a/build_system/tests/runner/test_asserts.py +++ b/build_system/tests/runner/test_asserts.py @@ -2,6 +2,17 @@ def expectOutput(regex): """ After a test is completed successfully, also verify that the CLI output contains an expected regex pattern. """ def expectOutput_wrapper(func): - func.__testRegex = regex + + if not hasattr(func, "__testRegex"): + func.__testRegex = [] + + is_iterable = hasattr(regex, '__iter__') + + if is_iterable: + for rx in regex: + func.__testRegex.append(rx) + else: + func.__testRegex.append(regex) + return func return expectOutput_wrapper diff --git a/build_system/tests/runner/test_result.py b/build_system/tests/runner/test_result.py index 25ce43d61..3fa71623e 100644 --- a/build_system/tests/runner/test_result.py +++ b/build_system/tests/runner/test_result.py @@ -14,10 +14,31 @@ class TestOutcome: Success, Failure, Error, Skip = range(4) -TestRunData = collections.namedtuple("TestRunData", "outcome test errStr errObj output elapsed") -TestRunData.__new__.__defaults__ = (None,) * len(TestRunData._fields) +__TestRunData = collections.namedtuple("TestRunData", "outcome test errStr errObj output elapsed") +__TestRunData.__new__.__defaults__ = (None,) * len(__TestRunData._fields) + +class TestRunData(__TestRunData): + """ + Immutable tuple data-structure that contains the results of a single test-method that has been run. + + outcome -> one of the enumeration values of the "TestOutcome" class (Success, Failure, Error, Skip) + test -> information about which test-method this data is about + errStr -> an error string that was emitted if this test was not successful + errObj -> details about the output, exception and stackframe that were involved in a failing test + output -> a plain-text string of all the output (stdout/stdout) that was generated during this test + elapsed -> the duration that it took for the test-method to run + """ + pass class TestResult(TestResultBase): + """ + Collects and processes the results from an invoked set of tests. + + The main purpose is to: + 1) Track times that individual tests needed to complete + 2) Collect the stdout/stderr that each of the tests produces + 3) Collect statistics and reporting-details for successful and failed test-runs + """ def __init__(self, streams, test_cases): TestResultBase.__init__(self) self.__sys_stdout = None @@ -67,6 +88,11 @@ def startTest(self, test): utils.write_log("INFO", "Running %(test_class)s.%(test_method)s" % locals()) def finish_test(self, test): + """ + This is run after each single test-method is finished, but the below logic + will only be executed once the very last test-method from the original + set of given unit-tests is completed. + """ if (self.testsRun != len(self.test_cases)): return @@ -119,12 +145,14 @@ def complete_test_case(self, test, test_info = None): Disconnect output redirection and return buffer. Safe to call multiple times. """ - output = self.outputBuffer.getvalue() - if (test_info): self.test_stop_time = datetime.datetime.now() + + test_output = self.outputBuffer.getvalue() + test_duration = self.test_stop_time - self.test_start_time + # merge data produced during test with additional meta-data - test_result = TestRunData(*(test_info[:-2] + (output, self.test_stop_time - self.test_start_time))) + test_result = TestRunData(*(test_info[:-2] + (test_output, test_duration))) self.all_results.append(test_result) @@ -165,11 +193,18 @@ def __assertTestOutput(self, test): regex = test_method.__dict__.get(test_regex_field) output = self.outputBuffer.getvalue() - match_ok = re.search(regex, output) + regex_mismatches = [] + + for rx in regex: + match_ok = re.search(rx, output) + + if (not match_ok): + regex_mismatches.append(rx) - if (not match_ok): + if (any(regex_mismatches)): + mismatches_str = "\n\t\t".join(regex_mismatches) try: - raise Exception("Unable to find expected pattern in test-output:\n\t\t" + regex) + raise Exception("Unable to find expected patterns in test-output:\n\t\t" + mismatches_str) except Exception: ex_nfo = sys.exc_info() self.addFailure(test, ex_nfo) diff --git a/build_system/tests/runner/test_runner.py b/build_system/tests/runner/test_runner.py index 99db4991a..e5627a1b1 100644 --- a/build_system/tests/runner/test_runner.py +++ b/build_system/tests/runner/test_runner.py @@ -9,8 +9,7 @@ import test_utils as utils class SurePhyreTestRunner(object): - """ - """ + """ Run the given TestSuite and collect statistics & timing information about the tests being run. """ def __init__(self): self.runner_start_time = None self.runner_stop_time = None diff --git a/build_system/tests/test_alpine_linux_docker.py b/build_system/tests/test_alpine_linux_docker.py new file mode 100644 index 000000000..0a09f6315 --- /dev/null +++ b/build_system/tests/test_alpine_linux_docker.py @@ -0,0 +1,42 @@ +import unittest + +from runner.test_asserts import * + +import constants as c +import build_executor as bex + +class TestAlpineLinuxDocker(unittest.TestCase): + + def with_x64_defaults(self, params): + x64_defaults = { + "vendor": c.vendor_alpine, + "target": c.target_linux, + "arch": c.arch_x64, + "docker": True, + "redirect_stdout": True, # important for test-logging + } + params.update(x64_defaults) + return params + + @expectOutput(r"\[WARNING\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 9, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + def test_x64_node_disabled(self): + + params = self.with_x64_defaults( + { + "buildsteps": ["j2v8", "test"], + "j2v8test": "-Dtest=NodeJSTest", + }) + + bex.execute_build(params) + + @expectOutput(r"\[INFO\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + def test_x64_node_enabled(self): + + params = self.with_x64_defaults( + { + "node_enabled": True, + "buildsteps": ["j2v8", "test"], + "j2v8test": "-Dtest=NodeJSTest", + }) + + bex.execute_build(params) diff --git a/build_system/tests/test_android_docker.py b/build_system/tests/test_android_docker.py new file mode 100644 index 000000000..fa23ef781 --- /dev/null +++ b/build_system/tests/test_android_docker.py @@ -0,0 +1,45 @@ +import unittest + +from runner.test_asserts import * + +import constants as c +import build_executor as bex + +class TestAndroidDocker(unittest.TestCase): + + def with_x86_defaults(self, params): + x86_defaults = { + "target": c.target_android, + "arch": c.arch_x86, + "docker": True, + "redirect_stdout": True, # important for test-logging + } + params.update(x86_defaults) + return params + + @expectOutput([ + r"assumption failure org\.junit\.AssumptionViolatedException: Skipped test \(Node\.js features not included in native library\)", + r"Total tests 9, assumption_failure 9", + r"\n:spoon\n\nBUILD SUCCESSFUL\n\nTotal time: ", + ]) + def test_x86_node_disabled(self): + + params = self.with_x86_defaults( + { + "buildsteps": ["j2v8", "test"], + "j2v8test": "-PtestClass=com.eclipsesource.v8.NodeJSTest", + }) + + bex.execute_build(params) + + @expectOutput(r"\n:spoon\n\nBUILD SUCCESSFUL\n\nTotal time: ") + def test_x86_node_enabled(self): + + params = self.with_x86_defaults( + { + "node_enabled": True, + "buildsteps": ["j2v8", "test"], + "j2v8test": "-PtestClass=com.eclipsesource.v8.NodeJSTest", + }) + + bex.execute_build(params) diff --git a/build_system/tests/test_linux_docker.py b/build_system/tests/test_linux_docker.py index 81d9d1b61..5bb614f53 100644 --- a/build_system/tests/test_linux_docker.py +++ b/build_system/tests/test_linux_docker.py @@ -17,12 +17,13 @@ def with_x64_defaults(self, params): params.update(x64_defaults) return params - @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + @expectOutput(r"\[WARNING\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 9, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") def test_x64_node_disabled(self): params = self.with_x64_defaults( { "buildsteps": ["j2v8", "test"], + "j2v8test": "-Dtest=NodeJSTest", }) bex.execute_build(params) @@ -36,6 +37,7 @@ def test_x64_node_enabled(self): { "node_enabled": True, "buildsteps": ["j2v8", "test"], + "j2v8test": "-Dtest=NodeJSTest", }) bex.execute_build(params) diff --git a/build_system/tests/test_macos_vagrant.py b/build_system/tests/test_macos_vagrant.py index 029483e6d..8b6b07ddc 100644 --- a/build_system/tests/test_macos_vagrant.py +++ b/build_system/tests/test_macos_vagrant.py @@ -18,12 +18,13 @@ def with_x64_defaults(self, params): params.update(x64_defaults) return params - @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + @expectOutput(r"\[WARNING\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 9, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") def test_x64_node_disabled(self): params = self.with_x64_defaults( { "buildsteps": ["j2v8", "test"], + "j2v8test": "-Dtest=NodeJSTest", }) bex.execute_build(params) @@ -35,6 +36,7 @@ def test_x64_node_enabled(self): { "node_enabled": True, "buildsteps": ["j2v8", "test"], + "j2v8test": "-Dtest=NodeJSTest", }) bex.execute_build(params) diff --git a/build_system/tests/test_win32_docker.py b/build_system/tests/test_win32_docker.py index b6f517bcd..1f5975fc7 100644 --- a/build_system/tests/test_win32_docker.py +++ b/build_system/tests/test_win32_docker.py @@ -17,12 +17,13 @@ def with_x64_defaults(self, params): params.update(x64_defaults) return params - @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + @expectOutput(r"\[WARNING\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 9, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") def test_x64_node_disabled(self): params = self.with_x64_defaults( { "buildsteps": ["j2v8", "test"], + "j2v8test": "-Dtest=NodeJSTest", }) bex.execute_build(params) @@ -34,6 +35,7 @@ def test_x64_node_enabled(self): { "node_enabled": True, "buildsteps": ["j2v8", "test"], + "j2v8test": "-Dtest=NodeJSTest", }) bex.execute_build(params) diff --git a/build_system/tests/test_win32_native.py b/build_system/tests/test_win32_native.py index de341df53..e4f256c8d 100644 --- a/build_system/tests/test_win32_native.py +++ b/build_system/tests/test_win32_native.py @@ -16,12 +16,13 @@ def with_x64_defaults(self, params): params.update(x64_defaults) return params - @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") + @expectOutput(r"\[WARNING\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 9, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest") def test_x64_node_disabled(self): params = self.with_x64_defaults( { "buildsteps": ["j2v8", "test"], + "j2v8test": "-Dtest=NodeJSTest", }) bex.execute_build(params) @@ -33,6 +34,7 @@ def test_x64_node_enabled(self): { "node_enabled": True, "buildsteps": ["j2v8", "test"], + "j2v8test": "-Dtest=NodeJSTest", }) bex.execute_build(params) diff --git a/docker/linux/Dockerfile b/docker/linux/Dockerfile index f20eb1ba0..16100e7fc 100644 --- a/docker/linux/Dockerfile +++ b/docker/linux/Dockerfile @@ -32,4 +32,4 @@ ENV PATH "$PATH:/opt/cmake/bin" # download the most critical maven dependencies for the build beforehand COPY ./shared/pom.xml /temp WORKDIR /temp -RUN mvn clean verify -DskipTests || true +RUN mvn clean verify || true diff --git a/docker/win32/Dockerfile b/docker/win32/Dockerfile index 098b0626f..dde769116 100644 --- a/docker/win32/Dockerfile +++ b/docker/win32/Dockerfile @@ -42,4 +42,4 @@ RUN Remove-Item -Recurse -Force C:/j2v8 RUN mkdir C:/temp COPY ./shared/pom.xml C:/temp WORKDIR /temp -RUN Invoke-Command { mvn clean verify -DskipTests } -ErrorAction SilentlyContinue +RUN Invoke-Command { mvn clean verify } -ErrorAction SilentlyContinue diff --git a/j2v8-cli.cmd b/j2v8-cli.cmd index 0652878a4..8586b89ab 100644 --- a/j2v8-cli.cmd +++ b/j2v8-cli.cmd @@ -1,3 +1,5 @@ +:: This script adds aliases for some of the most often used commands for building J2V8 +:: to your current command-shell instance. (can be invoked as "j2v8-cli") @echo off doskey build=python build.py $* diff --git a/j2v8-cli.sh b/j2v8-cli.sh index 3c84bae79..fdbd4f12a 100755 --- a/j2v8-cli.sh +++ b/j2v8-cli.sh @@ -1,3 +1,5 @@ +# This script adds aliases for some of the most often used commands for building J2V8 +# to your current command-shell instance. (can be invoked as "source j2v8-cli.sh") alias build="python build.py" alias nodejs="python nodejs.py" alias citests="python build_system/run_tests.py" diff --git a/src/test/java/com/eclipsesource/v8/NodeJSTest.java b/src/test/java/com/eclipsesource/v8/NodeJSTest.java index 7d934a969..a6ccef1e1 100644 --- a/src/test/java/com/eclipsesource/v8/NodeJSTest.java +++ b/src/test/java/com/eclipsesource/v8/NodeJSTest.java @@ -13,7 +13,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; +import static org.junit.Assume.assumeFalse; import java.io.File; import java.io.IOException; @@ -28,30 +28,37 @@ public class NodeJSTest { private NodeJS nodeJS; - @BeforeClass - public static void beforeClass() { - // only run this test if the underlying native J2V8 library was compiled - // with the Node.js features included, otherwise just skip all the tests - assumeTrue(V8.isNodeCompatible()); - } - @Before public void setup() { + if (skipTest()) + return; + nodeJS = NodeJS.createNodeJS(); } @After public void tearDown() { + if (skipTest()) + return; + nodeJS.release(); } + private static boolean skipTest() { + return !V8.isNodeCompatible(); + } + + private final static String skipMessage = "Skipped test (Node.js features not included in native library)"; + @Test public void testCreateNodeJS() { + assumeFalse(skipMessage, skipTest()); // conditional skip assertNotNull(nodeJS); } @Test public void testSingleThreadAccess_Require() throws InterruptedException { + assumeFalse(skipMessage, skipTest()); // conditional skip final boolean[] result = new boolean[] { false }; Thread t = new Thread(new Runnable() { @Override @@ -73,6 +80,7 @@ public void run() { @Test public void testGetVersion() { + assumeFalse(skipMessage, skipTest()); // conditional skip String result = nodeJS.getNodeVersion(); assertEquals("7.4.0", result); @@ -80,6 +88,7 @@ public void testGetVersion() { @Test public void testSingleThreadAccess_HandleMessage() throws InterruptedException { + assumeFalse(skipMessage, skipTest()); // conditional skip final boolean[] result = new boolean[] { false }; Thread t = new Thread(new Runnable() { @Override @@ -99,6 +108,7 @@ public void run() { @Test public void testSingleThreadAccess_IsRunning() throws InterruptedException { + assumeFalse(skipMessage, skipTest()); // conditional skip final boolean[] result = new boolean[] { false }; Thread t = new Thread(new Runnable() { @Override @@ -118,6 +128,7 @@ public void run() { @Test public void testExecuteNodeScript_Startup() throws IOException { + assumeFalse(skipMessage, skipTest()); // conditional skip nodeJS.release(); File testScript = createTemporaryScriptFile("global.passed = true;", "testScript"); @@ -130,6 +141,7 @@ public void testExecuteNodeScript_Startup() throws IOException { @Test public void testExecNodeScript() throws IOException { + assumeFalse(skipMessage, skipTest()); // conditional skip nodeJS.release(); File testScript = createTemporaryScriptFile("global.passed = true;", "testScript"); @@ -143,6 +155,7 @@ public void testExecNodeScript() throws IOException { @Test public void testExecuteNodeScript_viaRequire() throws IOException { + assumeFalse(skipMessage, skipTest()); // conditional skip nodeJS.release(); File testScript = createTemporaryScriptFile("global.passed = true;", "testScript"); @@ -156,6 +169,7 @@ public void testExecuteNodeScript_viaRequire() throws IOException { @Test public void testExports() throws IOException { + assumeFalse(skipMessage, skipTest()); // conditional skip nodeJS.release(); File testScript = createTemporaryScriptFile("exports.foo=7", "testScript"); From e5e85bc3ce62769dffc7b8898ac3df77794a5395 Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Tue, 15 Aug 2017 23:20:56 +0200 Subject: [PATCH 10/14] docs cleanup & link to BUILDING.md - small fix to Exception message for when the native lib can not be loaded (should show generic libname and vendor specific libname) --- BUILDING.md | 14 +++++++++-- README.md | 28 ++-------------------- src/main/java/com/eclipsesource/v8/V8.java | 2 +- 3 files changed, 15 insertions(+), 29 deletions(-) diff --git a/BUILDING.md b/BUILDING.md index ecc204e10..3189b6036 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -30,7 +30,7 @@ entering interactive mode... Select a predefined build-configuration to run: 2 Building: Docker >> alpine-linux-x64 >> NODE_ENABLED -Override build-steps ? (leave empty to run pre-configured steps): j2v8 +Override build-steps ? (leave empty to run pre-configured steps): nodejs j2v8 test ``` ## Non-interactive @@ -72,7 +72,7 @@ For ease of use, there are also some advanced build-step aliases that when speci - `test` ... alias for the single `j2v8test` step #### Anti-Steps -provide a way to remove a particular step, or a step-alias from the set of build-steps that should be run. To use such an anti-step, just prefix any of the available build-steps with the "~" symbol. +Anti-steps provide a way to exclude a particular step, or a step-alias from the set of build-steps that should be run. To use such an anti-step, just prefix any of the available build-steps with the "~" symbol. Build everything but do not optimize and do not run J2V8 unit tests:
`build <...other-args> all ~j2v8optimize ~test` @@ -223,3 +223,13 @@ __Artifacts:__ - Gradle Spoon test reports (Android only) - `./build/spoon/debug/` --- + +# Cross-Compiling + +For cross-compiling J2V8 uses [Docker](https://www.docker.com/) (android, linux, windows) and [Vagrant](https://www.vagrantup.com/) (macos, windows). +The full source-code (of both J2V8 and Node.js) on the build-host are just shared via mounted volumes with the Docker / Vagrant machines, so you can quickly make changes and perform builds fast. + +To invoke a cross-compile build, simply invoke the `build.py` script as usual but add the `--docker`, `-dkr` or `--vagrant`, `-vgr` flags. +This will automatically provision and run the necessary virtualization to run the requested build fully independent of your local environment. + +Note: using Docker / Vagrant for cross-compilation requires many gigabytes of hard-drive space as well as downloading the required images & tools. diff --git a/README.md b/README.md index 43cb19e21..9c8a8f39c 100644 --- a/README.md +++ b/README.md @@ -10,35 +10,11 @@ We developed J2V8 as a high performance engine for our multi-platform mobile too Building J2V8 ============= -Building J2V8 requires building both the native parts and the Java library (.jar/.aar file). To build the native parts we first build node.js as a library and then statically link J2V8 to that. The Java parts are built with maven/gradle. +Building J2V8 requires building both the native parts and the Java library (.jar/.aar file). To build the native parts we first build Node.js as a library and then statically link J2V8 to that. The Java parts are built with maven/gradle. J2V8 uses a cross-platform, cross-compiling build-system written in Python. -Follow these steps to build J2V8 from source: - -1) clone the Node.js source code - - `python prepare_build.py` - - This will download & prepare the latest compatible Node.js version for use in J2V8 - - The Node.js source code will be cloned into the local `node` sub-directory, which is the expected default location for the J2V8 build -2) build Node.js and the J2V8 library - - `python build.py --target linux --arch x64 --node-enabled --cross-compile` - - or shorthand - - `python build.py -t linux -a x64 -ne -x` - -For all available options, supported platforms and architectures you can consult the build-script help: - -`python build.py --help` - -Cross-Compiling ---------------- - -For cross-compiling J2V8 uses [Docker](https://www.docker.com/) (android, linux, windows) and [Vagrant](https://www.vagrantup.com/) (macos). -The full source-code (of both J2V8 and Node.js) on the build-host are just shared via mounted volumes with the Docker / Vagrant machines, so you can quickly make changes and perform builds fast. - -To invoke a cross-compile build, simply invoke the `build.py` script as usual but add the `--cross-compile`, `-x` flag. -This will automatically provision and run the necessary virtualization to run the requested build fully independent of your local environment. - -Note: using Docker / Vagrant for cross-compiliation requires many gigabytes of harddrive space as well as downloading the required images & tools. +For any further build instructions & details please read [BUILDING.md](BUILDING.md) Tutorials ========== diff --git a/src/main/java/com/eclipsesource/v8/V8.java b/src/main/java/com/eclipsesource/v8/V8.java index 08d0d4980..fea065bb1 100644 --- a/src/main/java/com/eclipsesource/v8/V8.java +++ b/src/main/java/com/eclipsesource/v8/V8.java @@ -250,7 +250,7 @@ private void notifyReferenceDisposed(final V8Value object) { private static void checkNativeLibraryLoaded() { if (!nativeLibraryLoaded) { String vendorName = LibraryLoader.computeLibraryShortName(true); - String baseName = LibraryLoader.computeLibraryShortName(true); + String baseName = LibraryLoader.computeLibraryShortName(false); String message = "J2V8 native library not loaded (" + baseName + "/" + vendorName + ")"; if (nativeLoadError != null) { From 01fe08856be3a70d357a6cb9b251c84f65f1ea74 Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Wed, 16 Aug 2017 19:43:31 +0200 Subject: [PATCH 11/14] additional CLI examples & `build -i` step-parsing fix --- BUILDING.md | 6 ++++++ build_system/build_interactive.py | 27 ++++++++++++++++++++------- build_system/cli.py | 20 ++++++++++++++++++-- 3 files changed, 44 insertions(+), 9 deletions(-) diff --git a/BUILDING.md b/BUILDING.md index 3189b6036..0b9b41553 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -48,6 +48,12 @@ usage: build [-h] --target {android,linux,macos,win32} --arch {x86,x64,arm} ### Basic Examples +Build for Debian/Ubuntu Linux x64 on the host-system:
+`build -t linux -a x64` + +Build for Debian/Ubuntu Linux x64 using Docker:
+`build -t linux -a x64 -dkr` + Build for Alpine-Linux x64 using Docker and Node.js features included:
`build -v alpine -t linux -a x64 -dkr -ne` diff --git a/build_system/build_interactive.py b/build_system/build_interactive.py index 6f0300595..b2fe0a49e 100644 --- a/build_system/build_interactive.py +++ b/build_system/build_interactive.py @@ -1,9 +1,12 @@ """Provides a simple interactive CLI to start a selected build from a given set of build-configurations""" +import argparse import sys +import shlex import build_configs as bcfg import build_executor as bex import build_utils as utils +import cli as cli def run_interactive_cli(): idx = 0 @@ -23,19 +26,29 @@ def run_interactive_cli(): if not isinstance(sel_index, int) or sel_index < 0 or sel_index > len(bcfg.configs): utils.cli_exit("ERROR: Must enter a valid test index in the range [0 ... " + str(len(bcfg.configs)) + "]") - sel_cfg = bcfg.configs[sel_index] + selected_build_cfg = bcfg.configs[sel_index] - print ("Building: " + sel_cfg.get("name")) + print ("Building: " + selected_build_cfg.get("name")) print # newline - build_params = sel_cfg.get("params") + build_params = selected_build_cfg.get("params") - build_steps = \ + # use build-steps from sys.argv or alternatively ask the user + build_steps_argv = \ sys.argv[base_arg_count + 1:] \ if len(sys.argv) > base_arg_count + 1 \ - else raw_input("Override build-steps ? (leave empty to run pre-configured steps): ").split() + else shlex.split(raw_input("Override build-steps ? (leave empty to run pre-configured steps): ")) - if (len(build_steps) > 0): - build_params["buildsteps"] = build_steps + # create a parser that only expects the build-step args + parser = cli.get_blank_parser() + cli.init_build_steps(parser) + # parse the build-step syntax + build_step_params = parser.parse_args(build_steps_argv) + + # merge the potentially customized build-steps with the + # original pre-defined build-config para,s + build_params.update(vars(build_step_params)) + + # start the build bex.execute_build(build_params) diff --git a/build_system/cli.py b/build_system/cli.py index 61ae6ecac..13ddd730d 100644 --- a/build_system/cli.py +++ b/build_system/cli.py @@ -56,7 +56,14 @@ def __init__(self, param_dict): def init_args(parser): """Initialize all supported build.py parameters and commands on the CLI parser""" - + init_required_args(parser) + init_optional_args(parser) + init_feature_args(parser) + init_cross_compile_args(parser) + init_meta_args(parser) + init_build_steps(parser) + +def init_required_args(parser): #----------------------------------------------------------------------- # Essential build settings #----------------------------------------------------------------------- @@ -72,6 +79,7 @@ def init_args(parser): required=True, choices=bc.avail_architectures) +def init_optional_args(parser): #----------------------------------------------------------------------- # Optional build settings #----------------------------------------------------------------------- @@ -86,6 +94,7 @@ def init_args(parser): action="store_const", const=True) +def init_feature_args(parser): #----------------------------------------------------------------------- # J2V8 Feature switches #----------------------------------------------------------------------- @@ -96,6 +105,7 @@ def init_args(parser): action="store_const", const=True) +def init_cross_compile_args(parser): #----------------------------------------------------------------------- # Docker / Vagrant cross-compile settings #----------------------------------------------------------------------- @@ -123,6 +133,7 @@ def init_args(parser): action="store_const", const=True) +def init_meta_args(parser): #----------------------------------------------------------------------- # Meta-Args #----------------------------------------------------------------------- @@ -150,6 +161,7 @@ def init_args(parser): action="store_const", const=True) +def init_build_steps(parser): #----------------------------------------------------------------------- # Build-Steps #----------------------------------------------------------------------- @@ -178,6 +190,10 @@ def init_args(parser): def get_parser(): """Get a CLI parser instance that accepts all supported build.py parameters and commands""" - parser = argparse.ArgumentParser(prog="build", formatter_class=argparse.RawTextHelpFormatter) + parser = get_blank_parser() init_args(parser) return parser + +def get_blank_parser(): + parser = argparse.ArgumentParser(prog="build", formatter_class=argparse.RawTextHelpFormatter) + return parser From 25b7b5ccfcb7be5064da1c494712c7bfbaabd11a Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Thu, 17 Aug 2017 01:23:22 +0200 Subject: [PATCH 12/14] atexit fix & prettier `build -i` listing - we must use atexit to stop Vagrant VMs / Docker containers in case of an error --- BUILDING.md | 28 ++++++++++++----------- build_system/build_configs.py | 43 ++++++++++++++++++++++++----------- build_system/docker_build.py | 6 ++--- build_system/vagrant_build.py | 6 ++--- 4 files changed, 51 insertions(+), 32 deletions(-) diff --git a/BUILDING.md b/BUILDING.md index 0b9b41553..b917f7589 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -16,19 +16,21 @@ python build.py --i, --interactive entering interactive mode... -[0] Docker >> android-x86 >> NODE_ENABLED -[1] Docker >> android-arm >> NODE_ENABLED -[2] Docker >> alpine-linux-x64 >> NODE_ENABLED -[3] Docker >> linux-x64 >> NODE_ENABLED -[4] Docker >> linux-x86 >> NODE_ENABLED -[5] Vagrant >> macosx-x64 >> NODE_ENABLED -[6] Vagrant >> macosx-x86 >> NODE_ENABLED -[7] Native >> windows-x64 >> NODE_ENABLED -[8] Docker >> windows-x64 >> NODE_ENABLED -[9] Vagrant >> windows-x64 >> NODE_ENABLED - -Select a predefined build-configuration to run: 2 -Building: Docker >> alpine-linux-x64 >> NODE_ENABLED +[0] android-x86 @ Docker +[1] android-arm @ Docker +[2] alpine-linux-x64 @ Docker +[3] linux-x64 +[4] linux-x64 @ Docker +[5] linux-x86 @ Docker +[6] macosx-x64 +[7] macosx-x64 @ Vagrant +[8] macosx-x86 @ Vagrant +[9] windows-x64 +[10] windows-x64 @ Docker +[11] windows-x64 @ Vagrant + +Select a predefined build-configuration to run: 3 +Building: linux-x64 Override build-steps ? (leave empty to run pre-configured steps): nodejs j2v8 test ``` diff --git a/build_system/build_configs.py b/build_system/build_configs.py index dcf8b4e1b..7824cebee 100644 --- a/build_system/build_configs.py +++ b/build_system/build_configs.py @@ -10,7 +10,7 @@ configs = [ # ANDROID builds { - "name": "Docker >> android-x86 >> NODE_ENABLED", + "name": "android-x86 @ Docker", "params": { "target": c.target_android, "arch": c.arch_x86, @@ -19,7 +19,7 @@ }, }, { - "name": "Docker >> android-arm >> NODE_ENABLED", + "name": "android-arm @ Docker", "params": { "target": c.target_android, "arch": c.arch_arm, @@ -27,9 +27,9 @@ "node_enabled": True, }, }, - # LINUX builds + # ALPINE LINUX builds { - "name": "Docker >> alpine-linux-x64 >> NODE_ENABLED", + "name": "alpine-linux-x64 @ Docker", "params": { "target": c.target_linux, "vendor": "alpine", @@ -42,7 +42,7 @@ # TODO: build not supported, because default gcc/g++ on alpine does not support x32 compilation # (see: https://stackoverflow.com/a/40574830/425532) # { - # "name": "Docker >> alpine-linux-x86 >> NODE_ENABLED", + # "name": "alpine-linux-x86 @ Docker", # "params": { # "target": c.target_linux, # "vendor": "alpine", @@ -52,8 +52,17 @@ # "node_enabled": True, # }, # }, + # DEBIAN / UBUNTU LINUX builds { - "name": "Docker >> linux-x64 >> NODE_ENABLED", + "name": "linux-x64", + "params": { + "target": c.target_linux, + "arch": c.arch_x64, + "node_enabled": True, + }, + }, + { + "name": "linux-x64 @ Docker", "params": { "target": c.target_linux, "arch": c.arch_x64, @@ -62,7 +71,7 @@ }, }, { - "name": "Docker >> linux-x86 >> NODE_ENABLED", + "name": "linux-x86 @ Docker", "params": { "target": c.target_linux, "arch": c.arch_x86, @@ -72,7 +81,15 @@ }, # MACOSX builds { - "name": "Vagrant >> macosx-x64 >> NODE_ENABLED", + "name": "macosx-x64", + "params": { + "target": c.target_macos, + "arch": c.arch_x64, + "node_enabled": True, + }, + }, + { + "name": "macosx-x64 @ Vagrant", "params": { "target": c.target_macos, "arch": c.arch_x64, @@ -81,7 +98,7 @@ }, }, { - "name": "Vagrant >> macosx-x86 >> NODE_ENABLED", + "name": "macosx-x86 @ Vagrant", "params": { "target": c.target_macos, "arch": c.arch_x86, @@ -91,7 +108,7 @@ }, # WINDOWS builds { - "name": "Native >> windows-x64 >> NODE_ENABLED", + "name": "windows-x64", "params": { "target": c.target_win32, "arch": c.arch_x64, @@ -101,7 +118,7 @@ # TODO: this build is currently broken due to a Node.js build-system issue # { # # see: https://github.com/nodejs/node/issues/13569 - # "name": "Native >> windows-x86 >> NODE_ENABLED", + # "name": "windows-x86", # "params": { # "target": c.target_win32, # "arch": c.arch_x86, @@ -109,7 +126,7 @@ # }, # }, { - "name": "Docker >> windows-x64 >> NODE_ENABLED", + "name": "windows-x64 @ Docker", "params": { "target": c.target_win32, "arch": c.arch_x64, @@ -118,7 +135,7 @@ }, }, { - "name": "Vagrant >> windows-x64 >> NODE_ENABLED", + "name": "windows-x64 @ Vagrant", "params": { "target": c.target_win32, "arch": c.arch_x64, diff --git a/build_system/docker_build.py b/build_system/docker_build.py index a80ac21e8..e696a5649 100644 --- a/build_system/docker_build.py +++ b/build_system/docker_build.py @@ -1,6 +1,6 @@ +import atexit import re -import signal import subprocess import sys @@ -64,7 +64,7 @@ def pre_build(self, config): container_name = self.get_container_name(config) docker_stop_str = self.inject_env("docker stop " + container_name, config) - def cli_exit_event(signal, frame): + def cli_exit_event(): if config.no_shutdown: print "INFO: Docker J2V8 container will continue running..." return @@ -72,7 +72,7 @@ def cli_exit_event(signal, frame): print "Waiting for docker process to exit..." self.exec_host_cmd(docker_stop_str, config) - signal.signal(signal.SIGINT, cli_exit_event) + atexit.register(cli_exit_event) args_str = "" diff --git a/build_system/vagrant_build.py b/build_system/vagrant_build.py index 919be294b..35fb0aae1 100644 --- a/build_system/vagrant_build.py +++ b/build_system/vagrant_build.py @@ -1,4 +1,4 @@ -import signal +import atexit import subprocess import sys import build_utils as utils @@ -30,7 +30,7 @@ def pre_build(self, config): if (config.pre_build_cmd): vagrant_start_cmd = config.pre_build_cmd + utils.host_cmd_sep() + vagrant_start_cmd - def cli_exit_event(signal, frame): + def cli_exit_event(): if (config.no_shutdown): print "INFO: Vagrant J2V8 machine will continue running..." return @@ -38,7 +38,7 @@ def cli_exit_event(signal, frame): print "Waiting for vagrant virtual-machine to exit..." self.exec_host_cmd("vagrant halt", config) - signal.signal(signal.SIGINT, cli_exit_event) + atexit.register(cli_exit_event) self.exec_host_cmd(vagrant_start_cmd, config) From c44946ce94b5b4218f799269dec43b366d2762a1 Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Sat, 19 Aug 2017 20:48:31 +0200 Subject: [PATCH 13/14] show V8 & Node.js version on build - improve merging of pre-defined and user build-params for `build -i` --- build_system/build_executor.py | 8 ++++++ build_system/build_interactive.py | 12 ++++++--- build_system/build_utils.py | 45 +++++++++++++++++++++++++++++++ 3 files changed, 61 insertions(+), 4 deletions(-) diff --git a/build_system/build_executor.py b/build_system/build_executor.py index 7e25ddb28..8987ebf64 100644 --- a/build_system/build_executor.py +++ b/build_system/build_executor.py @@ -201,6 +201,14 @@ def execute_build(params): print "Checking Node.js builtins integration consistency..." utils.check_node_builtins() + major,minor,build,patch,is_candidate = utils.get_v8_version() + major,minor,patch,is_release = utils.get_nodejs_version() + + print "--------------------------------------------------" + print "V8: %(major)s.%(minor)s.%(build)s.%(patch)s (candidate: %(is_candidate)s)" % locals() + print "Node.js: %(major)s.%(minor)s.%(patch)s (release: %(is_release)s)" % locals() + print "--------------------------------------------------" + print "Caching Node.js artifacts..." curr_node_tag = (params.vendor + "-" if params.vendor else "") + target + "." + params.arch utils.store_nodejs_output(curr_node_tag, build_cwd) diff --git a/build_system/build_interactive.py b/build_system/build_interactive.py index b2fe0a49e..eb2665535 100644 --- a/build_system/build_interactive.py +++ b/build_system/build_interactive.py @@ -44,11 +44,15 @@ def run_interactive_cli(): cli.init_build_steps(parser) # parse the build-step syntax - build_step_params = parser.parse_args(build_steps_argv) + user_params = parser.parse_args(build_steps_argv) - # merge the potentially customized build-steps with the - # original pre-defined build-config para,s - build_params.update(vars(build_step_params)) + # convert into dictionary form + user_params = vars(user_params) + + # merge the potentially customized build-steps into the + # original pre-defined build-config params + # see: https://stackoverflow.com/a/15277395/425532 + build_params.update((k,v) for k,v in user_params.iteritems() if v is not None) # start the build bex.execute_build(build_params) diff --git a/build_system/build_utils.py b/build_system/build_utils.py index 66083c409..5f69e507c 100644 --- a/build_system/build_utils.py +++ b/build_system/build_utils.py @@ -9,6 +9,9 @@ import constants as c +V8Version = collections.namedtuple("V8Version", "major minor build patch is_candidate") +NodeJSVersion = collections.namedtuple("NodeJSVersion", "major minor patch is_release") + def get_cwd(): return os.getcwd().replace("\\", "/") @@ -51,6 +54,48 @@ def cli_exit(message): sys.stderr.flush() sys.exit(1) +def get_v8_version(): + v8_version_text = None + + with file("./node/deps/v8/include/v8-version.h", "r") as v8_version_file: + v8_version_text = v8_version_file.read() + + major = re.search(r"#define V8_MAJOR_VERSION (\d+)", v8_version_text) + minor = re.search(r"#define V8_MINOR_VERSION (\d+)", v8_version_text) + build = re.search(r"#define V8_BUILD_NUMBER (\d+)", v8_version_text) + patch = re.search(r"#define V8_PATCH_LEVEL (\d+)", v8_version_text) + + is_candidate = re.search(r"#define V8_IS_CANDIDATE_VERSION (\d+)", v8_version_text) + + major = major.group(1) if major else None + minor = minor.group(1) if minor else None + build = build.group(1) if build else None + patch = patch.group(1) if patch else None + + is_candidate = is_candidate.group(1) == "1" if is_candidate else None + + return V8Version(major, minor, build, patch, is_candidate) + +def get_nodejs_version(): + njs_version_text = None + + with file("./node/src/node_version.h", "r") as njs_version_file: + njs_version_text = njs_version_file.read() + + major = re.search(r"#define NODE_MAJOR_VERSION (\d+)", njs_version_text) + minor = re.search(r"#define NODE_MINOR_VERSION (\d+)", njs_version_text) + patch = re.search(r"#define NODE_PATCH_VERSION (\d+)", njs_version_text) + + is_release = re.search(r"#define NODE_VERSION_IS_RELEASE (\d+)", njs_version_text) + + major = major.group(1) if major else None + minor = minor.group(1) if minor else None + patch = patch.group(1) if patch else None + + is_release = is_release.group(1) == "1" if is_release else None + + return NodeJSVersion(major, minor, patch, is_release) + # based on code from: https://stackoverflow.com/a/16260159/425532 def readlines(f, newlines): buf = "" From 95d4543f188d3fb44d2984cf357705bd045846d9 Mon Sep 17 00:00:00 2001 From: Wolfgang Steiner Date: Tue, 29 Aug 2017 01:20:28 +0200 Subject: [PATCH 14/14] some minor fixes --- build_system/build_executor.py | 8 ++++---- build_system/build_utils.py | 4 ++-- build_system/shared_build_steps.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/build_system/build_executor.py b/build_system/build_executor.py index 8987ebf64..d6da3179b 100644 --- a/build_system/build_executor.py +++ b/build_system/build_executor.py @@ -201,12 +201,12 @@ def execute_build(params): print "Checking Node.js builtins integration consistency..." utils.check_node_builtins() - major,minor,build,patch,is_candidate = utils.get_v8_version() - major,minor,patch,is_release = utils.get_nodejs_version() + v8_major,v8_minor,v8_build,v8_patch,v8_is_candidate = utils.get_v8_version() + njs_major,njs_minor,njs_patch,njs_is_release = utils.get_nodejs_version() print "--------------------------------------------------" - print "V8: %(major)s.%(minor)s.%(build)s.%(patch)s (candidate: %(is_candidate)s)" % locals() - print "Node.js: %(major)s.%(minor)s.%(patch)s (release: %(is_release)s)" % locals() + print "V8: %(v8_major)s.%(v8_minor)s.%(v8_build)s.%(v8_patch)s (candidate: %(v8_is_candidate)s)" % locals() + print "Node.js: %(njs_major)s.%(njs_minor)s.%(njs_patch)s (release: %(njs_is_release)s)" % locals() print "--------------------------------------------------" print "Caching Node.js artifacts..." diff --git a/build_system/build_utils.py b/build_system/build_utils.py index 5f69e507c..65d88a1c2 100644 --- a/build_system/build_utils.py +++ b/build_system/build_utils.py @@ -300,14 +300,14 @@ def check_node_builtins(): code in jni/com_eclipsesource_v8_V8Impl.cpp to make sure that every module is correctly initialized and linked into the native J2V8 library. """ - node_src = "node/src/" + node_src = "./node/src/" # node.js directory is not available if (not os.path.exists(node_src)): return # building from a pre-built dependency package (does not include c++ source files) - if (len(glob.glob(node_src + ".cc")) == 0): + if (len(glob.glob(node_src + "*.cc")) == 0): return j2v8_jni_cpp_path = "jni/com_eclipsesource_v8_V8Impl.cpp" diff --git a/build_system/shared_build_steps.py b/build_system/shared_build_steps.py index ec6d4d7a4..3a3376780 100644 --- a/build_system/shared_build_steps.py +++ b/build_system/shared_build_steps.py @@ -57,7 +57,7 @@ def setEnvVar(name, value): if (os.name == "nt"): return ["set \"" + name + "=" + value + "\""] else: - return ["export " + name + "=" + value] + return ["export " + name + "=\"" + value + "\""] def setJavaHome(config): # NOTE: Docker Linux builds need some special handling, because not all images have