From 97d747ec7fe26eeff38b5861f4cecef16d004b53 Mon Sep 17 00:00:00 2001 From: Alex Huszagh Date: Tue, 25 Oct 2022 13:37:57 -0500 Subject: [PATCH] Add support for different Android versions. We provide the following arguments to configure the Android version: - `ANDROID_NDK` - `ANDROID_SDK` (note that this is dependent on `ANDROID_VERSION) - `ANDROID_VERSION` - `ANDROID_SYSTEM_COMPLETE`: do a complete Android build - `ANDROID_SYSTEM_NONE`: do not build the Android system, disables runner support We now support NDK versions r10e-r25b, SDK versions 21-33, and Android versions 5.0, 5.1, 6.0, 7.0, 8.0, 8.1, 9.0, and 10.0. We also validate that the NDK, SDK, and Android versions are compatible. Next, we've improved the removal of unittests during the build process, to ensure fast builds while maintaining compatibility with various newer Android versions. To do this, we've implemented a Python library and a script. The Python library contains a complete parser (correctly parses all valid input) for Soong blueprint files, using an LALR grammar, and a rudimentary parser for Makefiles. The Soong parser removes any `gtest` dependencies, as well as any subdirectories or scope names containing `test`. For example: ```go cc_library { name: "lib", srcs: ["lib.cc",], } cc_test { name: "test", defaults: ["target"], srcs: ["test.cc"], } ``` Will become: ```go cc_library { name: "lib", srcs: ["lib.cc",], } ``` The Makefile parser first splits the file based on conditional directives (`ifeq`, `endif`, etc.) to ensure any subsequent processing doesn't lead to unbalanced directives. Next, we split the text within each directive based on comment sections used in the Android source tree. For example: ```Makefile test_tags := tests include $(call subdir,$(LOCAL_PATH)) c_flags := \ -g \ -Wall ``` We can therefore remove the `Benchmarks` and `Unit tests` sections without removing the `Other section`. The Python library is reasonably performant (it adds no noticeable overhead to the build process) and is compact (in total, < 60KB). Also, it is much more resilient than a series of `sed` scripts. Finally, extensive unittests have been added for the Python library, for both code linting (`flake8`) and unittests (via `tox`). Since we cannot assume the presence of Python on the host machine, the tests can be optionally enabled via the `--python` flag (or `PYTHON` environment variable, to hook into the git hooks), and custom overrides for the `flake8` and `tox` commands are provided (since the user may wish to specify a specific Python version, such as `python3.7 -m flake8`). --- .changes/1023.json | 4 + .gitignore | 9 + Cargo.lock | 1 + docker/.dockerignore | 11 + docker/Dockerfile.aarch64-linux-android | 34 +- docker/Dockerfile.arm-linux-androideabi | 34 +- docker/Dockerfile.armv7-linux-androideabi | 34 +- docker/Dockerfile.i686-linux-android | 41 +- .../Dockerfile.thumbv7neon-linux-androideabi | 23 +- docker/Dockerfile.x86_64-linux-android | 36 +- docker/android-ndk.sh | 72 +- docker/android-symlink.sh | 50 ++ docker/android-system.sh | 689 +++++++++++++++--- docker/android/README.md | 4 + docker/android/android/__init__.py | 15 + docker/android/android/make.py | 475 ++++++++++++ docker/android/android/soong.py | 646 ++++++++++++++++ docker/android/android/util.py | 7 + docker/android/pyproject.toml | 12 + docker/android/scripts/build-system.py | 187 +++++ docker/android/tests/Addition.bp | 62 ++ docker/android/tests/Android.bp | 80 ++ docker/android/tests/Android.mk | 101 +++ docker/android/tests/Comments.mk | 5 + docker/android/tests/Empty.bp | 1 + docker/android/tests/Empty.mk | 1 + docker/android/tests/Grouped.mk | 22 + docker/android/tests/Nested.mk | 58 ++ docker/android/tests/README.md | 13 + docker/android/tests/Single.mk | 22 + docker/android/tests/test_make.py | 313 ++++++++ docker/android/tests/test_metadata.py | 24 + docker/android/tests/test_soong.py | 323 ++++++++ docker/android/tox.ini | 30 + docker/validate-android-args.sh | 213 ++++++ src/rustc.rs | 2 +- xtask/Cargo.toml | 1 + xtask/src/hooks.rs | 124 +++- xtask/src/main.rs | 2 +- 39 files changed, 3618 insertions(+), 163 deletions(-) create mode 100644 .changes/1023.json create mode 100644 docker/.dockerignore create mode 100755 docker/android-symlink.sh create mode 100644 docker/android/README.md create mode 100644 docker/android/android/__init__.py create mode 100644 docker/android/android/make.py create mode 100644 docker/android/android/soong.py create mode 100644 docker/android/android/util.py create mode 100644 docker/android/pyproject.toml create mode 100644 docker/android/scripts/build-system.py create mode 100644 docker/android/tests/Addition.bp create mode 100644 docker/android/tests/Android.bp create mode 100644 docker/android/tests/Android.mk create mode 100644 docker/android/tests/Comments.mk create mode 100644 docker/android/tests/Empty.bp create mode 100644 docker/android/tests/Empty.mk create mode 100644 docker/android/tests/Grouped.mk create mode 100644 docker/android/tests/Nested.mk create mode 100644 docker/android/tests/README.md create mode 100644 docker/android/tests/Single.mk create mode 100644 docker/android/tests/test_make.py create mode 100644 docker/android/tests/test_metadata.py create mode 100644 docker/android/tests/test_soong.py create mode 100644 docker/android/tox.ini create mode 100755 docker/validate-android-args.sh diff --git a/.changes/1023.json b/.changes/1023.json new file mode 100644 index 000000000..c4aeb55f4 --- /dev/null +++ b/.changes/1023.json @@ -0,0 +1,4 @@ +{ + "description": "support different Android NDK, API, and Android versions using Docker build args.", + "type": "added" +} diff --git a/.gitignore b/.gitignore index d89dbc825..25daee628 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,12 @@ **/*.log /cargo-timing*.html CHANGELOG.md.draft + +# python stuff +__pycache__/ +.pytest_cache/ +*.py[cod] +*$py.class +*.egg-info/ +*.egg +.tox diff --git a/Cargo.lock b/Cargo.lock index 24a4443ba..1ad41e9c8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1007,6 +1007,7 @@ dependencies = [ "semver", "serde", "serde_json", + "shell-words", "toml", "walkdir", "which", diff --git a/docker/.dockerignore b/docker/.dockerignore new file mode 100644 index 000000000..cc0396c4b --- /dev/null +++ b/docker/.dockerignore @@ -0,0 +1,11 @@ +# don't copy any of the python artifacts to the docker context +__pycache__/ +.pytest_cache/ +*.py[cod] +*$py.class +**/*.egg-info/ +*.egg +.tox + +# also skip our test suite +android/tests/ diff --git a/docker/Dockerfile.aarch64-linux-android b/docker/Dockerfile.aarch64-linux-android index 412fe0da0..ae5575514 100644 --- a/docker/Dockerfile.aarch64-linux-android +++ b/docker/Dockerfile.aarch64-linux-android @@ -10,18 +10,31 @@ RUN /cmake.sh COPY xargo.sh / RUN /xargo.sh +COPY qemu.sh / +RUN /qemu.sh aarch64 + +ARG ANDROID_NDK=r21d +ARG ANDROID_SDK=28 +ARG ANDROID_VERSION=9.0.0_r1 +ARG ANDROID_SYSTEM_NONE=0 +ARG ANDROID_SYSTEM_COMPLETE=0 +ARG PYTHON_TMPDIR=/tmp/android + +COPY validate-android-args.sh / +RUN /validate-android-args.sh arm64 + COPY android-ndk.sh / -RUN /android-ndk.sh arm64 28 +RUN /android-ndk.sh arm64 ENV PATH=$PATH:/android-ndk/bin COPY android-system.sh / +RUN mkdir -p $PYTHON_TMPDIR +COPY android $PYTHON_TMPDIR RUN /android-system.sh arm64 -COPY qemu.sh / -RUN /qemu.sh aarch64 - ENV CROSS_SYSROOT=/android-ndk/sysroot -RUN cp $CROSS_SYSROOT/usr/lib/aarch64-linux-android/28/libz.so /system/lib/ +COPY android-symlink.sh / +RUN /android-symlink.sh aarch64 aarch64-linux-android COPY android-runner / @@ -29,8 +42,19 @@ COPY android-runner / # found in the build process of some crates, so we explicit set the DEP_Z_ROOT ENV CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER=aarch64-linux-android-gcc \ CARGO_TARGET_AARCH64_LINUX_ANDROID_RUNNER="/android-runner aarch64" \ + AR_aarch64_linux_android=aarch64-linux-android-ar \ + AS_aarch64_linux_android=aarch64-linux-android-as \ CC_aarch64_linux_android=aarch64-linux-android-gcc \ CXX_aarch64_linux_android=aarch64-linux-android-g++ \ + LD_aarch64_linux_android=aarch64-linux-android-ld \ + NM_aarch64_linux_android=aarch64-linux-android-nm \ + OBJCOPY_aarch64_linux_android=aarch64-linux-android-objcopy \ + OBJDUMP_aarch64_linux_android=aarch64-linux-android-objdump \ + RANLIB_aarch64_linux_android=aarch64-linux-android-ranlib \ + READELF_aarch64_linux_android=aarch64-linux-android-readelf \ + SIZE_aarch64_linux_android=aarch64-linux-android-size \ + STRINGS_aarch64_linux_android=aarch64-linux-android-strings \ + STRIP_aarch64_linux_android=aarch64-linux-android-strip \ BINDGEN_EXTRA_CLANG_ARGS_aarch64_linux_android="--sysroot=$CROSS_SYSROOT" \ DEP_Z_INCLUDE="$CROSS_SYSROOT/usr/include"/ \ RUST_TEST_THREADS=1 \ diff --git a/docker/Dockerfile.arm-linux-androideabi b/docker/Dockerfile.arm-linux-androideabi index ac35c4716..ee2005197 100644 --- a/docker/Dockerfile.arm-linux-androideabi +++ b/docker/Dockerfile.arm-linux-androideabi @@ -10,18 +10,29 @@ RUN /cmake.sh COPY xargo.sh / RUN /xargo.sh +COPY qemu.sh / +RUN /qemu.sh arm + +ARG ANDROID_NDK=r21d +ARG ANDROID_SDK=28 +ARG ANDROID_VERSION=9.0.0_r1 +ARG ANDROID_SYSTEM_NONE=0 +ARG ANDROID_SYSTEM_COMPLETE=0 +ARG PYTHON_TMPDIR=/tmp/android + +COPY validate-android-args.sh / +RUN /validate-android-args.sh arm + COPY android-ndk.sh / -RUN /android-ndk.sh arm 28 +RUN /android-ndk.sh arm ENV PATH=$PATH:/android-ndk/bin -COPY android-system.sh / +COPY android-system.sh remove_android_tests.py / RUN /android-system.sh arm -COPY qemu.sh / -RUN /qemu.sh arm - ENV CROSS_SYSROOT=/android-ndk/sysroot -RUN cp $CROSS_SYSROOT/usr/lib/arm-linux-androideabi/28/libz.so /system/lib/ +COPY android-symlink.sh / +RUN /android-symlink.sh arm arm-linux-androideabi COPY android-runner / @@ -29,8 +40,19 @@ COPY android-runner / # found in the build process of some crates, so we explicit set the DEP_Z_ROOT ENV CARGO_TARGET_ARM_LINUX_ANDROIDEABI_LINKER=arm-linux-androideabi-gcc \ CARGO_TARGET_ARM_LINUX_ANDROIDEABI_RUNNER="/android-runner arm" \ + AR_arm_linux_androideabi=arm-linux-androideabi-ar \ + AS_arm_linux_androideabi=arm-linux-androideabi-as \ CC_arm_linux_androideabi=arm-linux-androideabi-gcc \ CXX_arm_linux_androideabi=arm-linux-androideabi-g++ \ + LD_arm_linux_androideabi=arm-linux-androideabi-ld \ + NM_arm_linux_androideabi=arm-linux-androideabi-nm \ + OBJCOPY_arm_linux_androideabi=arm-linux-androideabi-objcopy \ + OBJDUMP_arm_linux_androideabi=arm-linux-androideabi-objdump \ + RANLIB_arm_linux_androideabi=arm-linux-androideabi-ranlib \ + READELF_arm_linux_androideabi=arm-linux-androideabi-readelf \ + SIZE_arm_linux_androideabi=arm-linux-androideabi-size \ + STRINGS_arm_linux_androideabi=arm-linux-androideabi-strings \ + STRIP_arm_linux_androideabi=arm-linux-androideabi-strip \ BINDGEN_EXTRA_CLANG_ARGS_arm_linux_androideabi="--sysroot=$CROSS_SYSROOT" \ DEP_Z_INCLUDE="$CROSS_SYSROOT/usr/include/" \ RUST_TEST_THREADS=1 \ diff --git a/docker/Dockerfile.armv7-linux-androideabi b/docker/Dockerfile.armv7-linux-androideabi index 6d226c46b..7752a4f60 100644 --- a/docker/Dockerfile.armv7-linux-androideabi +++ b/docker/Dockerfile.armv7-linux-androideabi @@ -10,18 +10,29 @@ RUN /cmake.sh COPY xargo.sh / RUN /xargo.sh +COPY qemu.sh / +RUN /qemu.sh arm + +ARG ANDROID_NDK=r21d +ARG ANDROID_SDK=28 +ARG ANDROID_VERSION=9.0.0_r1 +ARG ANDROID_SYSTEM_NONE=0 +ARG ANDROID_SYSTEM_COMPLETE=0 +ARG PYTHON_TMPDIR=/tmp/android + +COPY validate-android-args.sh / +RUN /validate-android-args.sh arm + COPY android-ndk.sh / -RUN /android-ndk.sh arm 28 +RUN /android-ndk.sh arm ENV PATH=$PATH:/android-ndk/bin -COPY android-system.sh / +COPY android-system.sh remove_android_tests.py / RUN /android-system.sh arm -COPY qemu.sh / -RUN /qemu.sh arm - ENV CROSS_SYSROOT=/android-ndk/sysroot -RUN cp $CROSS_SYSROOT/usr/lib/arm-linux-androideabi/28/libz.so /system/lib/ +COPY android-symlink.sh / +RUN /android-symlink.sh arm arm-linux-androideabi COPY android-runner / @@ -29,8 +40,19 @@ COPY android-runner / # found in the build process of some crates, so we explicit set the DEP_Z_ROOT ENV CARGO_TARGET_ARMV7_LINUX_ANDROIDEABI_LINKER=arm-linux-androideabi-gcc \ CARGO_TARGET_ARMV7_LINUX_ANDROIDEABI_RUNNER="/android-runner arm" \ + AR_armv7_linux_androideabi=arm-linux-androideabi-ar \ + AS_armv7_linux_androideabi=arm-linux-androideabi-as \ CC_armv7_linux_androideabi=arm-linux-androideabi-gcc \ CXX_armv7_linux_androideabi=arm-linux-androideabi-g++ \ + LD_armv7_linux_androideabi=arm-linux-androideabi-ld \ + NM_armv7_linux_androideabi=arm-linux-androideabi-nm \ + OBJCOPY_armv7_linux_androideabi=arm-linux-androideabi-objcopy \ + OBJDUMP_armv7_linux_androideabi=arm-linux-androideabi-objdump \ + RANLIB_armv7_linux_androideabi=arm-linux-androideabi-ranlib \ + READELF_armv7_linux_androideabi=arm-linux-androideabi-readelf \ + SIZE_armv7_linux_androideabi=arm-linux-androideabi-size \ + STRINGS_armv7_linux_androideabi=arm-linux-androideabi-strings \ + STRIP_armv7_linux_androideabi=arm-linux-androideabi-strip \ BINDGEN_EXTRA_CLANG_ARGS_armv7_linux_androideabi="--sysroot=$CROSS_SYSROOT" \ DEP_Z_INCLUDE="$CROSS_SYSROOT/usr/include/" \ RUST_TEST_THREADS=1 \ diff --git a/docker/Dockerfile.i686-linux-android b/docker/Dockerfile.i686-linux-android index 1f4411860..8882af715 100644 --- a/docker/Dockerfile.i686-linux-android +++ b/docker/Dockerfile.i686-linux-android @@ -10,25 +10,37 @@ RUN /cmake.sh COPY xargo.sh / RUN /xargo.sh -COPY android-ndk.sh / -RUN /android-ndk.sh x86 28 -ENV PATH=$PATH:/android-ndk/bin - -COPY android-system.sh / -RUN /android-system.sh x86 - # We could supposedly directly run i686 binaries like we do for x86_64, but # doing so generates an assertion failure: # ... assertion failed: signal(libc::SIGPIPE, libc::SIG_IGN) != libc::SIG_ERR # ... src/libstd/sys/unix/mod.rs # fatal runtime error: failed to initiate panic, error 5 # -# Running with qemu works as expected +# Running with qemu works as expected. it also ensures that're we're +# running on a CPU common 32-bit x86 systems. COPY qemu.sh / RUN /qemu.sh i386 +ARG ANDROID_NDK=r21d +ARG ANDROID_SDK=28 +ARG ANDROID_VERSION=9.0.0_r1 +ARG ANDROID_SYSTEM_NONE=0 +ARG ANDROID_SYSTEM_COMPLETE=0 +ARG PYTHON_TMPDIR=/tmp/android + +COPY validate-android-args.sh / +RUN /validate-android-args.sh x86 + +COPY android-ndk.sh / +RUN /android-ndk.sh x86 +ENV PATH=$PATH:/android-ndk/bin + +COPY android-system.sh remove_android_tests.py / +RUN /android-system.sh x86 + ENV CROSS_SYSROOT=/android-ndk/sysroot -RUN cp $CROSS_SYSROOT/usr/lib/i686-linux-android/28/libz.so /system/lib/ +COPY android-symlink.sh / +RUN /android-symlink.sh i386 i686-linux-android COPY android-runner / @@ -36,8 +48,19 @@ COPY android-runner / # found in the build process of some crates, so we explicit set the DEP_Z_ROOT ENV CARGO_TARGET_I686_LINUX_ANDROID_LINKER=i686-linux-android-gcc \ CARGO_TARGET_I686_LINUX_ANDROID_RUNNER="/android-runner i686" \ + AR_i686_linux_android=i686-linux-android-ar \ + AS_i686_linux_android=i686-linux-android-as \ CC_i686_linux_android=i686-linux-android-gcc \ CXX_i686_linux_android=i686-linux-android-g++ \ + LD_i686_linux_android=i686-linux-android-ld \ + NM_i686_linux_android=i686-linux-android-nm \ + OBJCOPY_i686_linux_android=i686-linux-android-objcopy \ + OBJDUMP_i686_linux_android=i686-linux-android-objdump \ + RANLIB_i686_linux_android=i686-linux-android-ranlib \ + READELF_i686_linux_android=i686-linux-android-readelf \ + SIZE_i686_linux_android=i686-linux-android-size \ + STRINGS_i686_linux_android=i686-linux-android-strings \ + STRIP_i686_linux_android=i686-linux-android-strip \ BINDGEN_EXTRA_CLANG_ARGS_i686_linux_android="--sysroot=$CROSS_SYSROOT" \ DEP_Z_INCLUDE="$CROSS_SYSROOT/usr/include/" \ LIBZ_SYS_STATIC=1 \ diff --git a/docker/Dockerfile.thumbv7neon-linux-androideabi b/docker/Dockerfile.thumbv7neon-linux-androideabi index 3a3689560..d4e960871 100644 --- a/docker/Dockerfile.thumbv7neon-linux-androideabi +++ b/docker/Dockerfile.thumbv7neon-linux-androideabi @@ -10,18 +10,29 @@ RUN /cmake.sh COPY xargo.sh / RUN /xargo.sh +COPY qemu.sh / +RUN /qemu.sh arm + +ARG ANDROID_NDK=r21d +ARG ANDROID_SDK=28 +ARG ANDROID_VERSION=9.0.0_r1 +ARG ANDROID_SYSTEM_NONE=0 +ARG ANDROID_SYSTEM_COMPLETE=0 +ARG PYTHON_TMPDIR=/tmp/android + +COPY validate-android-args.sh / +RUN /validate-android-args.sh arm + COPY android-ndk.sh / -RUN /android-ndk.sh arm 28 +RUN /android-ndk.sh arm ENV PATH=$PATH:/android-ndk/bin -COPY android-system.sh / +COPY android-system.sh remove_android_tests.py / RUN /android-system.sh arm -COPY qemu.sh / -RUN /qemu.sh arm - ENV CROSS_SYSROOT=/android-ndk/sysroot -RUN cp $CROSS_SYSROOT/usr/lib/arm-linux-androideabi/28/libz.so /system/lib/ +COPY android-symlink.sh / +RUN /android-symlink.sh arm arm-linux-androideabi COPY android-runner / diff --git a/docker/Dockerfile.x86_64-linux-android b/docker/Dockerfile.x86_64-linux-android index 06698eeed..3295a9612 100644 --- a/docker/Dockerfile.x86_64-linux-android +++ b/docker/Dockerfile.x86_64-linux-android @@ -10,19 +10,30 @@ RUN /cmake.sh COPY xargo.sh / RUN /xargo.sh +# Using qemu allows older host cpus (without sse4) to execute the target binaries +COPY qemu.sh / +RUN /qemu.sh x86_64 + +ARG ANDROID_NDK=r21d +ARG ANDROID_SDK=28 +ARG ANDROID_VERSION=9.0.0_r1 +ARG ANDROID_SYSTEM_NONE=0 +ARG ANDROID_SYSTEM_COMPLETE=0 +ARG PYTHON_TMPDIR=/tmp/android + +COPY validate-android-args.sh / +RUN /validate-android-args.sh x86_64 + COPY android-ndk.sh / -RUN /android-ndk.sh x86_64 28 +RUN /android-ndk.sh x86_64 ENV PATH=$PATH:/android-ndk/bin -COPY android-system.sh / +COPY android-system.sh remove_android_tests.py / RUN /android-system.sh x86_64 -# Using qemu allows older host cpus (without sse4) to execute the target binaries -COPY qemu.sh / -RUN /qemu.sh x86_64 - ENV CROSS_SYSROOT=/android-ndk/sysroot -RUN cp $CROSS_SYSROOT/usr/lib/x86_64-linux-android/28/libz.so /system/lib/ +COPY android-symlink.sh / +RUN /android-symlink.sh x86_64 x86_64-linux-android COPY android-runner / @@ -30,8 +41,19 @@ COPY android-runner / # found in the build process of some crates, so we explicit set the DEP_Z_ROOT ENV CARGO_TARGET_X86_64_LINUX_ANDROID_LINKER=x86_64-linux-android-gcc \ CARGO_TARGET_X86_64_LINUX_ANDROID_RUNNER="/android-runner x86_64" \ + AR_x86_64_linux_android=x86_64-linux-android-ar \ + AS_x86_64_linux_android=x86_64-linux-android-as \ CC_x86_64_linux_android=x86_64-linux-android-gcc \ CXX_x86_64_linux_android=x86_64-linux-android-g++ \ + LD_x86_64_linux_android=x86_64-linux-android-ld \ + NM_x86_64_linux_android=x86_64-linux-android-nm \ + OBJCOPY_x86_64_linux_android=x86_64-linux-android-objcopy \ + OBJDUMP_x86_64_linux_android=x86_64-linux-android-objdump \ + RANLIB_x86_64_linux_android=x86_64-linux-android-ranlib \ + READELF_x86_64_linux_android=x86_64-linux-android-readelf \ + SIZE_x86_64_linux_android=x86_64-linux-android-size \ + STRINGS_x86_64_linux_android=x86_64-linux-android-strings \ + STRIP_x86_64_linux_android=x86_64-linux-android-strip \ BINDGEN_EXTRA_CLANG_ARGS_x86_64_linux_android="--sysroot=$CROSS_SYSROOT" \ DEP_Z_INCLUDE="$CROSS_SYSROOT/usr/include/" \ RUST_TEST_THREADS=1 \ diff --git a/docker/android-ndk.sh b/docker/android-ndk.sh index 49b3dff7e..6d26cbfa7 100755 --- a/docker/android-ndk.sh +++ b/docker/android-ndk.sh @@ -6,26 +6,51 @@ set -euo pipefail # shellcheck disable=SC1091 . lib.sh -NDK_URL=https://dl.google.com/android/repository/android-ndk-r21d-linux-x86_64.zip - main() { - local arch="${1}" \ - api="${2}" + local arch="${1}" + + if [[ "${ANDROID_SDK}" -eq 25 ]]; then + echo "Android SDK level 25 is not supported." 1>&2 + exit 1 + fi - install_packages curl unzip python + # python3 is still needed for newer NDK versions, just since it + # simplifies making symlinks even though the toolchain is prebuilt + install_packages curl python python3 + get_ndk_info + if [[ "${NDK_VERSION}" -le 9 ]]; then + install_packages bzip2 + else + install_packages unzip + fi local td td="$(mktemp -d)" pushd "${td}" curl --retry 3 -sSfL "${NDK_URL}" -O - unzip -q android-ndk-*.zip - rm android-ndk-*.zip - pushd android-ndk-* - ./build/tools/make_standalone_toolchain.py \ - --install-dir /android-ndk \ - --arch "${arch}" \ - --api "${api}" + if [[ "${NDK_VERSION}" -le 9 ]]; then + tar -xjf "${NDK_FILENAME}" + else + unzip -q "${NDK_FILENAME}" + fi + rm "${NDK_FILENAME}" + pushd "android-ndk-${ANDROID_NDK}" + # android NDK versions <= 13 error without the verbose flag + local build_cmd= + local api= + if [[ "${NDK_VERSION}" -le 11 ]]; then + build_cmd=make-standalone-toolchain.sh + api=--platform="android-${ANDROID_SDK}" + else + build_cmd=make_standalone_toolchain.py + api=--api="${ANDROID_SDK}" + fi + "./build/tools/${build_cmd}" \ + --install-dir=/android-ndk \ + --arch="${arch}" \ + "${api}" \ + --verbose # clean up unused toolchains to reduce image size local triple @@ -60,4 +85,27 @@ main() { rm "${0}" } +get_ndk_info() { + local ndk_os=linux + local ndk_platform="${ndk_os}-x86_64" + # format is generally r21d, r25b, etc. it can however, be r24, for example. + NDK_VERSION=$(echo "${ANDROID_NDK}" | tr -dc '0-9') + # android NDK 23 and higher moved from `linux-x86_64` to `linux` + if [[ "${NDK_VERSION}" -ge 23 ]]; then + NDK_FILENAME="android-ndk-${ANDROID_NDK}-${ndk_os}.zip" + elif [[ "${NDK_VERSION}" -le 9 ]]; then + NDK_FILENAME="android-ndk-${ANDROID_NDK}-${ndk_platform}.tar.bz2" + else + NDK_FILENAME="android-ndk-${ANDROID_NDK}-${ndk_platform}.zip" + fi + if [[ "${NDK_VERSION}" -le 9 ]]; then + NDK_URL="https://dl.google.com/android/ndk/${NDK_FILENAME}" + else + NDK_URL="https://dl.google.com/android/repository/${NDK_FILENAME}" + fi + export NDK_VERSION + export NDK_FILENAME + export NDK_URL +} + main "${@}" diff --git a/docker/android-symlink.sh b/docker/android-symlink.sh new file mode 100755 index 000000000..508079e51 --- /dev/null +++ b/docker/android-symlink.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash +# shellcheck disable=SC2125,SC2207 + +set -x +set -euo pipefail + +main() { + local arch="${1}" + local target="${2}" + local libdir="/android-ndk/lib64/clang/"*"/lib/linux/${arch}/" + local expanded=($(echo "/android-ndk/lib64/clang/"*"/lib/linux/${arch}/")) + + if [[ "${#expanded[@]}" == "1" ]] && [[ "${expanded[0]}" != "${libdir}" ]]; then + libdir=$(realpath "/android-ndk/lib64/clang/"*"/lib/linux/${arch}/") + + # In Android NDK versions r23-beta3, libgcc has been replaced by libunwind + # Older Rust versions always link to libgcc, so we need a symlink. + # https://github.com/rust-lang/rust/pull/85806 + if [[ -f "${libdir}/libunwind.a" ]]; then + ln -s "${libdir}/libunwind.a" "${libdir}/libgcc.a" + fi + fi + + # older SDK versions install the libraries directly in the lib directory. + local sysroot=/android-ndk/sysroot + if [[ -d "${sysroot}/usr/lib/${target}/" ]]; then + cp "${sysroot}/usr/lib/${target}/${ANDROID_SDK}/libz.so" /system/lib/ + else + cp "${sysroot}/usr/lib/libz.so" /system/lib/ + fi + + # later NDK versions switch to using `llvm-${tool}` rather than `${target}-tool` + # want to ensure we just have backwards-compatible aliases + local tool= + local tool_src= + local tool_dst= + for tool in ar as nm objcopy objdump ranlib readelf size string strip; do + tool_src="/android-ndk/bin/llvm-${tool}" + tool_dst="/android-ndk/bin/${target}-${tool}" + if [[ ! -f "${tool_dst}" ]] && [[ -f "${tool_src}" ]]; then + ln -s "${tool_src}" "${tool_dst}" + elif [[ "${tool}" == "ld" ]] && [[ ! -f "${tool_dst}" ]]; then + ln -s "/android-ndk/bin/${tool}" "${tool_dst}" + fi + done + + rm "${0}" +} + +main "${@}" diff --git a/docker/android-system.sh b/docker/android-system.sh index 9a5ddb6b1..d989b5ddd 100755 --- a/docker/android-system.sh +++ b/docker/android-system.sh @@ -1,4 +1,30 @@ #!/usr/bin/env bash +# The API level details are mentioned here: +# https://developer.android.com/studio/releases/platforms +# These are controlled by `ANDROID_VERSION` and `ANDROID_SDK`, +# for example, `ANDROID_SDK=30` and `ANDROID_VERSION=11.0.0_r48`. +# +# You can also build the entire Android source tree with +# `ANDROID_SYSTEM_COMPLETE`, or skip it altogether with +# `ANDROID_SYSTEM_NONE`. Note that runners will not be +# available if the the Android system is not built. +# +# The versions are: +# 5.0: 21 (tested at NDK r13b, 5.0.0_r1) +# 5.1: 22 (tested at NDK r21d, 5.1.1_r38, unused DT) +# 6.0: 23 (tested at NDK r21dm 6.0.1_r81) +# 7.0: 24 (tested at NDK r21d, 7.0.0_r36) +# 7.1: 25 (tested at NDK r21d, 7.1.2_r39, not supported) +# 8.0: 26 (tested at NDK r21d, 8.0.0_r51) +# 8.1: 27 (tested at NDK r21d, 8.1.0_r81) +# 9.0: 28 (tested at NDK r21d and r25b) +# 10.0: 29 (tested at NDK r25b) +# 11.0: 30 +# 12.0: 31, 32 +# 13.0: 33 +# +# API level 25 seems to be missing from Android NDK versions, +# and therefore is not supported. set -x set -euo pipefail @@ -7,32 +33,36 @@ set -euo pipefail . lib.sh main() { - local arch="${1}" - local td - td="$(mktemp -d)" - pushd "${td}" + export ARCH="${1}" + MAJOR_VERSION=$(echo "${ANDROID_VERSION}" | cut -d '.' -f 1) + MINOR_VERSION=$(echo "${ANDROID_VERSION}" | cut -d '.' -f 2) + TAG="android-${ANDROID_VERSION}" - # fake java and javac, it is not necessary for what we build, but the build - # script ask for it - cat << EOF > /usr/bin/java -#!/usr/bin/env bash -echo "java version \"1.7.0\"" -echo "OpenJDK Runtime Environment (IcedTea 2.6.9)" -echo "OpenJDK 64-Bit Server VM (build 24.131-b00, mixed mode)" -EOF + export MAJOR_VERSION + export MINOR_VERSION + export TAG - cat << EOF > /usr/bin/javac -#!/usr/bin/env bash -echo "javac 1.7.0" -EOF + if [[ "${ANDROID_SYSTEM_NONE}" == "1" ]]; then + rm -rf "${PYTHON_TMPDIR}" + rm "${0}" + return + fi - chmod +x /usr/bin/java - chmod +x /usr/bin/javac + # TODO(ahuszagh) Remove this once we add complete support + if [[ "${ANDROID_SYSTEM_COMPLETE}" != "1" ]] && [[ "${MAJOR_VERSION}" -ge "11" ]]; then + echo "Only minimal Android builds for versions 10 or lower are supported" 1>&2 + echo "Preliminary support is currently being worked on." 1>&2 + exit 1 + elif [[ "${MAJOR_VERSION}" -eq 7 ]] && [[ "${MINOR_VERSION}" -eq 1 ]]; then + echo "Android version 7.1 is not supported." 1>&2 + exit 1 + fi - # more faking - export ANDROID_JAVA_HOME=/tmp - mkdir /tmp/lib/ - touch /tmp/lib/tools.jar + local td + td="$(mktemp -d)" + pushd "${td}" + + fake_java install_packages ca-certificates \ curl \ @@ -46,64 +76,471 @@ EOF python \ python3 \ xz-utils - purge_list+=(default-jre) curl --retry 3 -sSfL https://storage.googleapis.com/git-repo-downloads/repo -O chmod +x repo + python3 ./repo init -u https://android.googlesource.com/platform/manifest -b "${TAG}" + + local tools=( + cat chmod chown cmp cp ctrlaltdel date df dmesg du hd id ifconfig + iftop insmod ioctl ionice kill ln log ls lsmod lsof lsusb md5 mkdir + mount mv nandread netstat notify printenv ps reboot renice rm rmdir + rmmod route schedtop sendevent setconsole setprop sleep smd start + stop sync top touch umount uptime vmstat watchprops wipe + ) + if [[ "${ANDROID_SYSTEM_COMPLETE}" == "1" ]]; then + android_repo_complete + else + case "${MAJOR_VERSION}" in + 5) + android_repo_v5 + tools+=(dd getevent getprop grep newfs_msdos) + ;; + 6) + android_repo_v6 + ;; + 7) + android_repo_v7 + ;; + 8) + android_repo_v8 + ;; + 9) + android_repo_v9 + ;; + 10) + android_repo_v10 + ;; +# 11) +# #android_repo_v11 +# # TODO(ahuszagh) Here. +# ;; +# 12) +# #android_repo_v12 +# # TODO(ahuszagh) Here. +# ;; +# 13) +# #android_repo_v13 +# # TODO(ahuszagh) Here. +# ;; + *) + echo "Currently unsupported Android version ${MAJOR_VERSION}." 1>&2 + echo "Please submit a feature request if you need support." 1>&2 + exit 1 + ;; + esac + fi + + build_android + install_android "${tools[@]}" + + purge_packages + + popd + + rm -rf "${td}" + rm -rf "${PYTHON_TMPDIR}" + rm "${0}" +} + +# java isn't required for the build, but the build expects to +# find a java compiler. the supported android versions are: +# https://source.android.com/docs/setup/start/older-versions +# Android 7: OpenJDK-8 +fake_java() { + local java_type= + local java_version= + local jre_info= + local build_info= + + case "${MAJOR_VERSION}" in + 5|6) + java_type=java + java_version=1.7.0 + jre_info="IcedTea 2.6.9" + build_info="build 24.131-b00, mixed mode" + ;; + *) + java_type=openjdk + java_version=1.8.0_342 + jre_info="build 1.8.0_342-8u342-b07-0ubuntu1~20.04-b07" + build_info="build 25.342-b07, mixed mode" + ;; + esac + + cat << EOF > /usr/bin/java +#!/usr/bin/env bash +echo "${java_type} version \"${java_version}\"" +echo "OpenJDK Runtime Environment (${jre_info})" +echo "OpenJDK 64-Bit Server VM (${build_info})" +EOF + + cat << EOF > /usr/bin/javac +#!/usr/bin/env bash +echo "javac ${java_version}" +EOF + + chmod +x /usr/bin/java + chmod +x /usr/bin/javac + + # more faking + export ANDROID_JAVA_HOME=/tmp + mkdir -p /tmp/lib/ + touch /tmp/lib/tools.jar +} + +build_android() { + if [[ "${ANDROID_SYSTEM_COMPLETE}" != "1" ]]; then + export ALLOW_MISSING_DEPENDENCIES=true + fi + + set +u + # shellcheck disable=SC1091 + source build/envsetup.sh + lunch "aosp_${ARCH}-user" + if [[ "${ANDROID_SYSTEM_COMPLETE}" != "1" ]]; then + mmma bionic/ + mmma external/mksh/ + mmma system/core/toolbox/ + else + mma + fi + + set -u +} + +install_android() { + local outdir= + if [[ "${ARCH}" = "arm" ]]; then + outdir=out/target/product/generic + else + outdir="out/target/product/generic_${ARCH}" + fi + mv "${outdir}/system/" / + if [[ "${ANDROID_SYSTEM_COMPLETE}" == "1" ]] && [[ -d "${outdir}/apex" ]]; then + # can use the APEX linker, no need to use the bootstrap one + mv "${outdir}/apex/" / + elif [[ "${MAJOR_VERSION}" -ge 10 ]]; then + symlink_bootstrap + fi + + # list from https://elinux.org/Android_toolbox + local tool= + for tool in "${@}"; do + if [[ ! -f "/system/bin/${tool}" ]]; then + ln -s /system/bin/toolbox "/system/bin/${tool}" + fi + done + + echo "127.0.0.1 localhost" > /system/etc/hosts +} + +symlink_bootstrap() { + # for Android 10+, we need to use the bootstrap linker rather than + # the APEX linker, which is gigantic. we also symlink the ASAN + # linker just in case using the bootstrapped one. + local linker + local file + for linker in /system/bin/bootstrap/*; do + file=$(basename "${linker}") + unlink "/system/bin/${file}" + ln -s "/system/bin/bootstrap/${file}" "/system/bin/${file}" + done + + # also need to ensure the shared libraries aren't symlinks + local lib + local libdir + for libdir in /system/lib{,64}; do + for lib in "${libdir}/bootstrap/"*; do + file=$(basename "${lib}") + unlink "${libdir}/${file}" + ln -s "${libdir}/bootstrap/${file}" "${libdir}/${file}" + done + done +} + +# this are the minimum set of modules that are need to build bionic +# this was created by trial and error. this is based on the minimum +# set of modules required for each android version, starting with +# a minimal number of dependencies. for android 10+ versions, we use +# the bootstrap linker rather than the APEX linker for non-complete +# system builds, as the APEX linker drags in nearly the entire Android +# runtime, requiring 60+GB images. + +android_repo_complete() { + python3 ./repo sync -c +} + +# tested on 5.0.0_r1 (SDK 21) +# tested on 5.1.1_r38 (SDK 22) +android_repo_v5() { + sync bionic + sync build + sync external/compiler-rt + sync external/jemalloc + sync external/libcxx + sync external/libcxxabi + sync external/libselinux + sync external/mksh + sync external/openssl + sync external/pcre + sync external/stlport + sync prebuilts/clang/linux-x86/host/3.5 + sync system/core + + case "${ARCH}" in + arm) + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.8 + ;; + arm64) + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.8 + sync prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9 + ;; + x86) + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.8 + ;; + x86_64) + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.8 + ;; + esac + + # avoid build tests + rm bionic/linker/tests/Android.mk + rm bionic/tests/Android.mk + rm bionic/benchmarks/Android.mk + + # patch the linker to avoid the error + # FATAL: kernel did not supply AT_SECURE + sed -i -e 's/if (!kernel_supplied_AT_SECURE)/if (false)/g' bionic/linker/linker_environ.cpp +} - # this is the minimum set of modules that are need to build bionic - # this was created by trial and error - python3 ./repo init -u https://android.googlesource.com/platform/manifest -b android-9.0.0_r1 - - python3 ./repo sync -c art - python3 ./repo sync -c bionic - python3 ./repo sync -c build/make - python3 ./repo sync -c build/blueprint - python3 ./repo sync -c build/soong - python3 ./repo sync -c external/clang - python3 ./repo sync -c external/compiler-rt - python3 ./repo sync -c external/elfutils - python3 ./repo sync -c external/jemalloc - python3 ./repo sync -c external/libcxx - python3 ./repo sync -c external/libcxxabi - python3 ./repo sync -c external/libunwind - python3 ./repo sync -c external/libunwind_llvm - python3 ./repo sync -c external/llvm - python3 ./repo sync -c external/lzma - python3 ./repo sync -c external/mksh - python3 ./repo sync -c external/safe-iop - python3 ./repo sync -c external/valgrind - python3 ./repo sync -c external/vixl - python3 ./repo sync -c external/zlib - python3 ./repo sync -c frameworks/hardware/interfaces - python3 ./repo sync -c hardware/interfaces - python3 ./repo sync -c libnativehelper - python3 ./repo sync -c prebuilts/build-tools - python3 ./repo sync -c prebuilts/clang/host/linux-x86 - python3 ./repo sync -c prebuilts/clang-tools - #python3 ./repo sync -c prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9 - #python3 ./repo sync -c prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 - python3 ./repo sync -c prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8 - python3 ./repo sync -c prebuilts/go/linux-x86 - python3 ./repo sync -c prebuilts/misc - python3 ./repo sync -c prebuilts/sdk - python3 ./repo sync -c system/core - python3 ./repo sync -c system/libhidl - python3 ./repo sync -c system/tools/hidl - - case "${arch}" in +# tested on 6.0.1_r81 (SDK 23) +android_repo_v6() { + sync bionic + sync build + sync external/compiler-rt + sync external/libcxx + sync external/libcxxabi + sync external/libselinux + sync external/elfutils + sync external/jemalloc + sync external/mksh + sync external/pcre + sync external/safe-iop + sync external/zlib + sync libnativehelper + sync prebuilts/clang/linux-x86/host/3.6 + sync prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8 + sync prebuilts/misc + sync system/core + + case "${ARCH}" in arm) - python3 ./repo sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 ;; arm64) - python3 ./repo sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 - python3 ./repo sync prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9 + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 + sync prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9 ;; x86) - python3 ./repo sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 ;; x86_64) - python3 ./repo sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 + ;; + esac + + # avoid build tests + rm bionic/linker/tests/Android.mk + rm bionic/tests/Android.mk + rm bionic/benchmarks/Android.mk + # we don't need the relocation packer, and removing + # the unittests from it is a bit of work. + rm bionic/tools/relocation_packer/Android.mk +} + +# tested on 7.0.0_r36 (SDK 24) +# tested on 7.1.2_r39 (SDK 25, not supported) +# API level 25, requires for Android 7.1, is not provided in NDKs +android_repo_v7() { + sync bionic + sync build + sync build/kati + sync external/boringssl + sync external/compiler-rt + sync external/elfutils + sync external/jemalloc + sync external/libcxx + sync external/libcxxabi + sync external/libselinux + sync external/libunwind + sync external/libunwind_llvm + sync external/llvm + sync external/mksh + sync external/pcre + sync external/safe-iop + sync external/zlib + sync prebuilts/clang/host/linux-x86 + sync prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8 + sync prebuilts/misc + sync prebuilts/ndk + sync prebuilts/ninja/linux-x86 + sync system/core + + case "${ARCH}" in + arm) + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 + ;; + arm64) + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 + sync prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9 + ;; + x86) + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 + ;; + x86_64) + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 + ;; + esac + + # avoid build tests + rm bionic/linker/tests/Android.mk + rm bionic/tests/Android.mk + rm bionic/benchmarks/Android.mk + rm prebuilts/misc/common/android-support-test/Android.mk + # we don't need the relocation packer, and removing + # the unittests from it is a bit of work. + rm bionic/tools/relocation_packer/Android.mk + + remove_tests +} + +# tested on 8.0.0_r51 (SDK 26) +# tested on 8.1.0_r81 (SDK 27) +android_repo_v8() { + # need to build LLVM components, or libLLVM is disabled. + export FORCE_BUILD_LLVM_COMPONENTS=true + + sync bionic + sync build/make + sync build/blueprint + sync build/soong + sync external/boringssl + sync external/clang + sync external/compiler-rt + sync external/elfutils + sync external/jemalloc + sync external/libcxx + sync external/libcxxabi + sync external/libevent + sync external/libunwind + sync external/libunwind_llvm + sync external/llvm + sync external/lzma + sync external/mksh + sync external/pcre + sync external/safe-iop + sync external/selinux + sync external/zlib + sync libnativehelper + sync prebuilts/build-tools + sync prebuilts/clang/host/linux-x86 + sync prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8 + sync prebuilts/go/linux-x86 + # we only need the relocation packer binary. everything else + # interferes with the build, so we remove the makefiles below. + sync prebuilts/misc + sync prebuilts/ndk + sync system/core + sync toolchain/binutils + + case "${ARCH}" in + arm) + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 + ;; + arm64) + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 + sync prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9 + ;; + x86) + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 + ;; + x86_64) + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 + ;; + esac + + # avoid build tests + rm bionic/linker/tests/Android.mk + rm bionic/tests/Android.mk + rm bionic/tests/Android.bp + rm bionic/benchmarks/Android.bp + rm bionic/tests/libs/Android.bp + + # remove extra utilities + rm system/core/libgrallocusage/Android.bp + rm system/core/libmemtrack/Android.bp + rm system/core/libsysutils/Android.bp + local path= + find prebuilts/misc/ -name 'Android.mk' | while IFS= read -r path; do + rm "${path}" + done + + # avoid java dependencies + rm external/lzma/Java/Tukaani/Android.mk + + remove_tests +} + +# tested on 9.0.0_r1 (SDK 28) +android_repo_v9() { + sync art + sync bionic + sync build/make + sync build/blueprint + sync build/soong + sync external/clang + sync external/compiler-rt + sync external/elfutils + sync external/jemalloc + sync external/libcxx + sync external/libcxxabi + sync external/libunwind + sync external/libunwind_llvm + sync external/llvm + sync external/lzma + sync external/mksh + sync external/safe-iop + sync external/valgrind + sync external/vixl + sync external/zlib + sync frameworks/hardware/interfaces + sync hardware/interfaces + sync libnativehelper + sync prebuilts/build-tools + sync prebuilts/clang/host/linux-x86 + sync prebuilts/clang-tools + sync prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8 + sync prebuilts/go/linux-x86 + sync prebuilts/misc + sync prebuilts/sdk + sync system/core + sync system/libhidl + sync system/tools/hidl + + case "${ARCH}" in + arm) + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 + ;; + arm64) + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 + sync prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9 + ;; + x86) + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 + ;; + x86_64) + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 ;; esac @@ -116,49 +553,93 @@ EOF rm bionic/tests/headers/Android.bp rm bionic/tests/headers/posix/Android.bp - sed -i -z -e 's/cc_test {.*}//g' bionic/libc/malloc_debug/Android.bp - sed -i -z -e 's/cc_test {.*}//g' bionic/libc/malloc_hooks/Android.bp - sed -i -z -e 's/cc_test_host {.*}//g' bionic/tools/relocation_packer/Android.bp - - export ALLOW_MISSING_DEPENDENCIES=true + remove_tests +} - # patch the linker to avoid the error - # FATAL: kernel did not supply AT_SECURE - #sed -i -e 's/if (!kernel_supplied_AT_SECURE)/if (false)/g' bionic/linker/linker_environ.cpp +# tested on 10.0.0_r47 (SDK 29) +android_repo_v10() { + sync art + sync bionic + sync build/make + sync build/blueprint + sync build/soong + sync external/clang + sync external/compiler-rt + sync external/elfutils + sync external/jemalloc + sync external/jemalloc_new + sync external/libcxx + sync external/libcxxabi + sync external/libunwind + sync external/libunwind_llvm + sync external/llvm + sync external/lzma + sync external/mksh + sync external/vixl + sync external/zlib + sync external/golang-protobuf + sync libnativehelper + sync prebuilts/go/linux-x86 + sync prebuilts/build-tools + sync prebuilts/clang-tools + sync prebuilts/clang/host/linux-x86 + sync prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.17-4.8 + sync prebuilts/ndk + sync prebuilts/sdk + sync prebuilts/vndk/v28 + sync system/core + sync system/sepolicy - set +u - # shellcheck disable=SC1091 - source build/envsetup.sh - lunch "aosp_${arch}-user" - mmma bionic/ - mmma external/mksh/ - mmma system/core/toolbox/ - set -u + case "${ARCH}" in + arm) + sync external/arm-optimized-routines + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 + ;; + arm64) + sync external/arm-optimized-routines + sync prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9 + sync prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9 + ;; + x86) + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 + ;; + x86_64) + sync prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9 + ;; + esac - if [[ "${arch}" = "arm" ]]; then - mv out/target/product/generic/system/ / - else - mv "out/target/product/generic_${arch}/system"/ / - fi + # avoid build tests + rm bionic/tests/Android.mk + rm bionic/tests/Android.bp + rm bionic/benchmarks/Android.bp + rm bionic/tests/libs/Android.bp + rm bionic/tests/headers/Android.bp + rm bionic/tests/headers/posix/Android.bp - # list from https://elinux.org/Android_toolbox - for tool in cat chmod chown cmp cp ctrlaltdel date df dmesg du \ - hd id ifconfig iftop insmod ioctl ionice kill ln log ls \ - lsmod lsof lsusb md5 mkdir mount mv nandread netstat notify \ - printenv ps reboot renice rm rmdir rmmod route schedtop sendevent \ - setconsole setprop sleep smd start stop sync top touch umount \ - uptime vmstat watchprops wipe; do - ln -s /system/bin/toolbox "/system/bin/${tool}" - done + remove_tests +} - echo "127.0.0.1 localhost" > /system/etc/hosts +remove_tests() { + install_packages python3-pip - purge_packages + local version= + version=$(python3 -c 'import sys +major = sys.version_info.major +minor = sys.version_info.minor +print(f"{major}.{minor}")') + set +u + export PYTHONPATH="${PYTHON_TMPDIR}/lib/python${version}/site-packages/:${PYTHONPATH}" + set -u + mkdir -p "${PYTHON_TMPDIR}" + python3 -m pip install sly==0.4.0 --prefix "${PYTHON_TMPDIR}" - popd + python3 "${PYTHON_TMPDIR}/scripts/build-system.py" \ + --remove-tests \ + --verbose +} - rm -rf "${td}" - rm "${0}" +sync() { + python3 ./repo sync -c --no-clone-bundle "${1}" } main "${@}" diff --git a/docker/android/README.md b/docker/android/README.md new file mode 100644 index 000000000..f433c2ad7 --- /dev/null +++ b/docker/android/README.md @@ -0,0 +1,4 @@ +android +======= + +Utilities for working with the Android project, particularly, for modifying and working with the Android build system. This facilitates modifying both Soong and Make build files, to remove unittests to speed up builds and minimize the number of dependencies. diff --git a/docker/android/android/__init__.py b/docker/android/android/__init__.py new file mode 100644 index 000000000..4b7224d20 --- /dev/null +++ b/docker/android/android/__init__.py @@ -0,0 +1,15 @@ +import sys + +# we run this script once every build, and we'd rather +# have much smaller image sizes, so copying without +# any bytecode is a better idea. +sys.dont_write_bytecode = True + +__version__ = '0.0.0-dev.0' +__version_info__ = (0, 0, 0, 'dev.0') +__license__ = 'MIT OR Apache-2.0' + +__all__ = [ + "make", + "soong", +] diff --git a/docker/android/android/make.py b/docker/android/android/make.py new file mode 100644 index 000000000..e6ae347b5 --- /dev/null +++ b/docker/android/android/make.py @@ -0,0 +1,475 @@ +''' + make + ==== + + utilities to process makefiles. this parser is not sophisticated + nor correct, but it tries to avoid a few common pitfalls by + handling conditional blocks, and first separating all conditional + blocks into sections, and then parsing comment blocks within those + sections. + + validate conditional directives are: + - ifeq + - ifneq + - ifdef + - ifndef + - else + - endif + + makefiles are whitespace-sensitive, but not with leading whitespace + for conditional directives. for example, this is valid (replacing the + spaces with tabs): + + # --------------- + # Section 1. + # --------------- + ifneq ($(USE_A),) + # ----------- + # Section 2. + # ----------- + ifneq ($(USE_B),) + SOURCES=b.cc + else + SOURCES=a.cc + endif + else + SOURCES=c.cc + endif + + our goals are fairly different from a regular parser: we want to detect + and excise sections based on the comments, while ensuring that we do + not produce invalid output. other than unbalanced conditional directives, + we do not actually care about the actual contents. + + for this, we use a 3 step parsing approach: + 1. break up document into blocks separated by directives + - each block can be a regular or directive block + - directive blocks have a start and end directive as well as contents + - directives can be infinitely nested: the contents can also be a list + 2. break each text block based on comment sections + 3. group blocks within comment sections + + for example, in the above, we want the entire makefile to be inside the + section 1 comment block, so removing it would remove that whole tree. + similarly, the inner directive block should be inside the section 2 + comment block. we would therefore produce something like this: + + CommentBlock: Section 1 + Directive Block: + start=ifneq ($(USE_A),) + end=endif + children: + CommentBlock: Section 2 + Directive Block: + start=ifneq ($(USE_B),) + end=endif + children: + Block: `SOURCES=b.cc\nelse\nSOURCES=a.cc` + Block: `else\nSOURCES=c.cc` +''' + +import re + +from . import util + + +def loads(contents, *_, **__): + return Makefile.loads(contents) + + +def load(fp, *_, **__): + return Makefile.load(fp) + + +def dumps(makefile, *_, **__): + return makefile.dumps() + + +def dump(makefile, fp, *_, **__): + return makefile.dump(fp) + + +class Makefile(list): + @staticmethod + def loads(contents, *_, **__): + directives = _split_directives(iter(contents.splitlines()))[0] + blocks = directives.split_comments() + blocks.group_comments() + + return Makefile(blocks) + + @staticmethod + def load(fp, *_, **__): + return Makefile.loads(fp.read()) + + def dumps(self, *_, **__): + return str(self) + + def dump(self, fp, *_, **__): + fp.write(self.dumps() + '\n') + + def filter(self, op): + return _filter_list(self, op) + + def recurse(self, max_depth=-1, depth=0): + yield from _recurse_list(self, max_depth, depth) + + def __repr__(self): + return f'Makefile({str(self)})' + + def __str__(self): + return '\n'.join([str(i) for i in self]) + + +class Node: + def is_block(self): + return False + + def is_block_list(self): + return False + + def is_comment(self): + return False + + def is_directive(self): + return False + + def is_test(self): + return False + + def is_benchmark(self): + return False + + def is_dev(self): + return self.is_test() or self.is_benchmark() + + def has_block_list(self): + return False + + def filter(self, op): + raise NotImplementedError + + def recurse(self, max_depth=-1, depth=0): + raise NotImplementedError + + +class Block(str, Node): + @property + def child(self): + return str(self) + + def __repr__(self): + return f'Block({str(self)})' + + def __str__(self): + return super().__str__() + + def is_block(self): + return True + + def split_comments(self): + return _split_comments(str(self)) + + def group_comments(self): + pass + + def filter(self, op): + return op(self) + + +class BlockList(list, Node): + def __init__(self, *args, **kwds): + super().__init__(*args, **kwds) + assert all([isinstance(i, Node) for i in self]) + + @property + def child(self): + return self + + def __repr__(self): + return f'BlockList({str(self)})' + + def __str__(self): + return '\n'.join([str(i) for i in self]) + + def is_block_list(self): + return True + + def split_comments(self): + return BlockList(util.flatten([i.split_comments() for i in self])) + + def group_comments(self): + self[:] = _group_comments(self) + + def filter(self, op): + return _filter_list(self, op) + + def recurse(self, max_depth=-1, depth=0): + yield from _recurse_list(self, max_depth, depth) + + +class CommentBlock(Node): + # the child is either a Block or BlockList + def __init__(self, comment, title, child): + assert isinstance(child, Node) + + self.comment = comment + self.title = title + self.child = child + + def __eq__(self, other): + return (self.comment, self.title, self.child) == (other.comment, other.title, other.child) + + def __repr__(self): + return f'CommentBlock({str(self)})' + + def __str__(self): + return f'{self.comment}\n{str(self.child)}' + + def is_comment(self): + return True + + def is_test(self): + return self.title is not None and 'test' in self.title.lower() + + def is_benchmark(self): + return self.title is not None and 'benchmark' in self.title.lower() + + def has_block_list(self): + return self.child.is_block_list() + + def split_comments(self): + raise NotImplementedError('cannot split comments in split comment block') + + def group_comments(self): + raise NotImplementedError('grouping comments should be done outside a comment block') + + def flatten_single(self): + if isinstance(self.child, list) and len(self.child) == 1: + self.child = self.child[0] + + def filter(self, op): + return op(self) and self.child.filter(op) + + +class DirectiveBlock(Node): + # the child is either a Block or BlockList + def __init__(self, start, end, child): + assert isinstance(child, Node) + if isinstance(child, list) and len(child) == 1: + child = child[0] + + self.start = start + self.end = end + self.child = child + + def __eq__(self, other): + return (self.start, self.end, self.child) == (other.start, other.end, other.child) + + def __repr__(self): + return f'DirectiveBlock({str(self)})' + + def __str__(self): + return f'{self.start}\n{str(self.child)}\n{self.end}' + + def is_directive(self): + return True + + def has_block_list(self): + return self.child.is_block_list() + + def split_comments(self): + child = self.child.split_comments() + # every caller expects a list, so we return a single-element list + return BlockList([DirectiveBlock(self.start, self.end, child)]) + + def group_comments(self): + self.child.group_comments() + self.flatten_single() + + def flatten_single(self): + if isinstance(self.child, list) and len(self.child) == 1: + self.child = self.child[0] + + def filter(self, op): + return op(self) and self.child.filter(op) + + +# split on comment sections, for example the below will split on the +# benchmarks section. +# +# LOCAL_PATH := $(call my-dir) +# +# # ----------------------------------------------------------------------------- +# # Benchmarks. +# # ----------------------------------------------------------------------------- +# +# test_tags := tests +def _split_comments(contents): + def new_comment(match, next=None): + comment = match.group(1) + title = match.group(2) or match.group(3) + if next is None: + data = contents[match.end():] + else: + data = contents[match.end():next.start()] + if next is not None: + assert data.endswith('\n') + data = data[:-1] + return CommentBlock(comment, title, Block(data)) + + # if we just have 1 or 2 characters, can falsely match + sep1 = r'#\s+={5,}' + sep2 = r'#\s+-{5,}' + sp = r'[ \t]' + nl = r'(?:\r\n|\r|\n)' + comment = r'[A-Za-z0-9._ -]+' + pat1 = fr'(?:{sp}*{sep1}{nl})?{sp}*#{sp}*({comment}){nl}{sp}*{sep1}' + pat2 = fr'(?:{sp}*{sep2}{nl})?{sp}*#{sp}*({comment}){nl}{sp}*{sep2}' + pattern = fr'(?m)^((?:{pat1})|(?:{pat2})){nl}?' + + blocks = BlockList() + if not contents: + return blocks + + matches = list(re.finditer(pattern, contents)) + if len(matches) == 0: + blocks.append(Block(contents)) + else: + first = matches[0] + last = matches[-1] + if first.start() != 0: + assert contents[first.start() - 1] == '\n' + blocks.append(Block(contents[:first.start() - 1])) + for (match, next) in util.windows(matches, 2): + blocks.append(new_comment(match, next)) + blocks.append(new_comment(last)) + + return blocks + + +# lines is an iterable over each line in the content. splits like something +# above into a start token of `ifneq ($(ENV2),)`, and end of `endif`, +# and the internal contents as a `Block`. +# +# ifneq ($(ENV2),) +# benchmark_src_files += bench1.cc +# else +# benchmark_src_files += bench2.cc +# endif +def _split_directives(lines, in_scope=False): + def add_current(blocks, current): + if current: + blocks.append(Block('\n'.join(current))) + + # we ignore else since removing it won't actually affect the code + start_directives = ('ifeq', 'ifneq', 'ifdef', 'ifndef') + end_directives = ('endif',) + + blocks = BlockList() + current = [] + for line in lines: + trimmed = line.lstrip() + if trimmed.startswith(start_directives): + start = line + add_current(blocks, current) + child, end = _split_directives(lines, True) + directive = DirectiveBlock(start, end, child) + directive.flatten_single() + blocks.append(directive) + current = [] + elif in_scope and trimmed.startswith(end_directives): + end = line + add_current(blocks, current) + return blocks, end + else: + current.append(line) + + add_current(blocks, current) + + return blocks, None + + +# this groups directives and comments so any directives within a +# comment block are properly grouped. say i have the following: +# +# LOCAL_PATH := $(call my-dir) +# +# # ----------------------------------------------------------------------------- +# # Section 1. +# # ----------------------------------------------------------------------------- +# LOCAL_SRC_FILES := src.c +# ifneq ($(ENV2),) +# benchmark_src_files += bench1.cc +# else +# benchmark_src_files += bench2.cc +# endif +# +# # ----------------------------------------------------------------------------- +# # Section 2. +# # ----------------------------------------------------------------------------- +# LOCAL_CFLAGS := $(test_c_flags) +# +# normally, we'd have 5 sections: block, comment, directive, block, comment +# however, we want to group it in block, comment, comment, where the directive +# and subsequent block are in the comment. +def _group_comments(blocks): + def add_current(result, current): + if isinstance(current.child, list) and len(current.child) == 1: + current.child = current.child[0] + result.append(current) + + def new_comment(block): + current = CommentBlock(block.comment, block.title, BlockList()) + if block.child: + current.child.append(block.child) + return current + + result = BlockList() + current = BlockList() + for block in blocks: + # any comments cannot have been grouped already, so we assume str values + assert not block.is_comment() or isinstance(block.child, str) + assert not block.is_block_list() + if not block.is_comment(): + block.group_comments() + + if current.is_comment() and block.is_comment(): + # new comment replaces the old one + current.flatten_single() + result.append(current) + current = new_comment(block) + elif block.is_comment(): + # first comment block seen in the file + result += current + current = new_comment(block) + elif current.is_comment(): + # regular block after a comment block + current.child.append(block) + else: + # regular block before any comment blocks + current.append(block) + + if current.is_comment(): + current.flatten_single() + result.append(current) + else: + result += current + + return result + + +# retain all items matching the condition in a list +def _filter_list(lst, op): + # use slice assignment to ensure this happens in-place + lst[:] = [i for i in lst if i.filter(op)] + return lst + + +# yield iteratively all child blocks +def _recurse_list(lst, max_depth=-1, depth=0): + if depth != max_depth: + for node in lst: + yield node + if node.has_block_list(): + yield from node.child.recurse(max_depth, depth + 1) diff --git a/docker/android/android/soong.py b/docker/android/android/soong.py new file mode 100644 index 000000000..24176817a --- /dev/null +++ b/docker/android/android/soong.py @@ -0,0 +1,646 @@ +''' + soong + ===== + + utilities to process soong blueprint files. these are a go-like, + json-like data file format similar. they support nested maps, lists, + bools, strings, and use of variables. for example: + + array = ["..."] + cc_defaults { + name: "target", + options: array, + flags: ["..."], + } + cc_test { + name: "test", + defaults: ["target"], + srcs: ["test.cc"], + nested: { + array: { + option: false, + }, + }, + } + + the specification can be found below: + https://source.android.com/docs/core/tests/development/blueprints + https://android.googlesource.com/platform/build/soong/+/refs/heads/master/README.md + + they also support single-line C++-style and multiline C-style comments. + the valid types are: + - bool (`true`, `false`) + - int + - string + - list (of strings) + - map + + both lists and maps support optional trailing commas. any value type + can be present in a map, while only strings are allowed in lists. + integers, strings, arrays and maps also also support the `+` operator, + where `+` sums up integers. for strings and arrays, it appends the new + data. for maps, it produces the union of both keys, and for keys present + in both, it appends the value on the right-operand to the value in the + left one. + + variable assignment produces immutable types, except for the `+=` operator. + `+=` does the described operation above in-place. + + this parser doesn't need to be exactly correct: it does not need to reject + subtley invalid input. for example `name = { }` may or may not be correct, + but it's fine to accept it as long as we output it identically. this is + supposed to handle all correct input and outputs it as correct output: + it doesn't need to validate type correctness. + + this uses LALR parsing since it makes the grammar very easy to define and + the parsing simple. since the build step and repository synchronization + is much slower, the performance here is practically irrelevant. +''' + +import json +import sys + +import sly + +# dictionaries got insertion order in 3.6, guaranteed in 3.7 +assert sys.version_info >= (3, 6) + +# base character defs +_H = r'[0-9a-f]' +_NL = r'\n|\r\n|\r|\f' +_UNICODE = fr'\\{_H}{1,6}(\r\n|[ \t\r\n\f])?' +_ESCAPE = r'{_UNICODE}|\\[^\r\n\f0-9a-f]' +_SINGLELINE_COMMENT = r'\/\/.*' +# can't use reflags without setting them for all, so do manual dotall +_MULTILINE_COMMENT = r'\/\*[\u0000-\U0010FFFF]*?\*\/' +_COMMENT = fr'(?:{_SINGLELINE_COMMENT})|(?:{_MULTILINE_COMMENT})' + + +def loads(contents, *_, **__): + return Ast.loads(contents) + + +def load(fp, *_, **__): + return Ast.load(fp) + + +def dumps(soong, pretty=True, indent=4, *_, **__): + return soong.dumps(pretty, indent) + + +def dump(soong, fp, pretty=True, indent=4, *_, **__): + return soong.dump(fp, pretty, indent) + + +class Lexer(sly.Lexer): + tokens = { + BOOL, + INTEGER, + IDENT, + STRING, + LBRACKET, + RBRACKET, + LBRACE, + RBRACE, + COLON, + COMMA, + EQUALS, + PLUS, + } + ignore = ' \t' + ignore_comment = _COMMENT + + # Tokens + # this uses a string regex based on the CSS2.1 grammar + STRING = fr'"([^\n\r\f\\"]|\\{_NL}|{_ESCAPE})*"' + INTEGER = r'\d+' + BOOL = '(?:true)|(?:false)' + IDENT = r'[a-zA-Z_][a-zA-Z0-9_]*' + LBRACKET = r'\[' + RBRACKET = r'\]' + LBRACE = r'\{' + RBRACE = r'\}' + COLON = r':' + COMMA = r',' + EQUALS = r'=' + PLUS = r'\+' + + @_(r'\n+') + def newline(self, token): + self.lineno += token.value.count('\n') + + def error(self, token): + raise ValueError(f'Illegal character \'{token.value[0]}\'') + + +class Parser(sly.Parser): + tokens = Lexer.tokens + + precedence = ( + ('left', PLUS), + ) + + @_('rules') + def ast(self, prod): + return Ast(prod.rules) + + @_('empty') + def ast(self, prod): + return Ast() + + @_('rules rule') + def rules(self, prod): + return prod.rules + [prod.rule] + + @_('rule') + def rules(self, prod): + return [prod.rule] + + @_('assignment', 'binary_operator_assignment', 'scope') + def rule(self, prod): + return prod[0] + + @_('ident EQUALS expr') + def assignment(self, prod): + return Assignment(prod.ident, prod.expr) + + @_('ident PLUS EQUALS expr') + def binary_operator_assignment(self, prod): + return BinaryOperatorAssignment( + prod.ident, + f'{prod[1]}{prod[2]}', + prod.expr, + ) + + @_('expr PLUS expr') + def binary_operator(self, prod): + return BinaryOperator(prod[0], prod[1], prod[2]) + + @_('ident map') + def scope(self, prod): + return Scope(prod.ident, prod.map) + + @_('LBRACE pairs RBRACE', 'LBRACE pairs COMMA RBRACE') + def map(self, prod): + return Map(prod.pairs) + + @_('LBRACE RBRACE') + def map(self, prod): + return Map() + + @_('pairs COMMA pair') + def pairs(self, prod): + return prod.pairs + [prod.pair] + + @_('pair') + def pairs(self, prod): + return [prod.pair] + + @_('ident COLON expr', 'ident EQUALS expr') + def pair(self, prod): + return (prod.ident, MapValue(prod[1], prod.expr)) + + @_('ident', 'binary_operator', 'map', 'list', 'string', 'integer', 'bool') + def expr(self, prod): + return prod[0] + + @_('LBRACKET sequence RBRACKET', 'LBRACKET sequence COMMA RBRACKET') + def list(self, prod): + return List(prod.sequence) + + @_('LBRACKET RBRACKET') + def list(self, prod): + return List() + + @_('sequence COMMA list_item') + def sequence(self, prod): + return prod.sequence + [prod.list_item] + + @_('list_item') + def sequence(self, prod): + return [prod.list_item] + + @_('list_item PLUS list_item') + def list_item(self, prod): + return BinaryOperator(prod[0], '+', prod[2]) + + @_('string', 'ident') + def list_item(self, prod): + return prod[0] + + @_('IDENT') + def ident(self, prod): + return Ident(prod.IDENT) + + @_('STRING') + def string(self, prod): + return String(prod.STRING) + + @_('INTEGER') + def integer(self, prod): + return Integer(prod.INTEGER) + + @_('BOOL') + def bool(self, prod): + return Bool(json.loads(prod.BOOL)) + + # needed in case no tokens are produced + @_('') + def empty(self, p): + pass + + def error(self, token): + raise ValueError(f'Illegal token {repr(token)}') + + +class Node: + def is_assignment(self): + return False + + def is_binary_operator_assignment(self): + return False + + def is_binary_operator(self): + return False + + def is_scope(self): + return False + + def is_map(self): + return False + + def is_list(self): + return False + + def is_map_value(self): + return False + + def is_ident(self): + return False + + def is_string(self): + return False + + def is_integer(self): + return False + + def is_bool(self): + return False + + +class Ast(list, Node): + def __init__(self, values=None): + if values is None: + values = [] + valid_nodes = (Assignment, BinaryOperatorAssignment, Scope) + assert all(isinstance(i, valid_nodes) for i in values) + super().__init__(values) + + def __repr__(self): + return f'Ast({str(self)})' + + def __str__(self): + return self.to_str(pretty=False) + + def to_str(self, pretty=True, indent=4, depth=0): + assert depth == 0 + return '\n'.join([i.to_str(pretty, indent, depth) for i in self]) + + @staticmethod + def loads(contents, *_, **__): + lexer = Lexer() + tokens = lexer.tokenize(contents) + parser = Parser() + return parser.parse(tokens) + + @staticmethod + def load(fp, *_, **__): + return Ast.loads(fp.read()) + + def dumps(self, pretty=True, indent=4, *_, **__): + return self.to_str(pretty, indent) + + def dump(self, fp, pretty=True, indent=4, *_, **__): + # always write a trailing newline + fp.write(self.dumps(pretty, indent) + '\n') + + def filter(self, op): + # use slice assignment to ensure this happens in-place + self[:] = [i for i in self if op(i)] + + +class Assignment(Node): + def __init__(self, name, expr): + self.name = name + self.expr = expr + + def __repr__(self): + return f'Assignment({str(self)})' + + def __str__(self): + return self.to_str(pretty=False) + + def to_str(self, pretty=True, indent=4, depth=0): + return f'{str(self.name)} = {self.expr.to_str(pretty, indent, depth)}' + + def is_assignment(self): + return True + + def __eq__(self, other): + return (self.name, self.expr) == (other.name, other.expr) + + +class BinaryOperatorAssignment(Node): + def __init__(self, name, op, expr): + self.name = name + self.op = op + self.expr = expr + + def __repr__(self): + return f'BinaryOperatorAssignment({str(self)})' + + def __str__(self): + return self.to_str(pretty=False) + + def to_str(self, pretty=True, indent=4, depth=0): + expr = self.expr.to_str(pretty, indent, depth) + return f'{str(self.name)} {self.op} {expr}' + + def is_binary_operator_assignment(self): + return True + + def __eq__(self, other): + return (self.name, self.op, self.expr) == (other.name, other.op, other.expr) + + +class BinaryOperator(Node): + def __init__(self, lhs, op, rhs): + self.lhs = lhs + self.op = op + self.rhs = rhs + + def __repr__(self): + return f'BinaryOperator({str(self)})' + + def __str__(self): + return self.to_str(pretty=False) + + def to_str(self, pretty=True, indent=4, depth=0): + lhs = self.lhs.to_str(pretty, indent, depth) + rhs = self.rhs.to_str(pretty, indent, depth) + return f'{lhs} {self.op} {rhs}' + + def is_binary_operator(self): + return True + + def str_op(self, cmp): + return ( + (self.lhs.is_string() and self.lhs.str_op(cmp)) + or (self.rhs.is_string() and self.rhs.str_op(cmp)) + ) + + def __eq__(self, other): + return (self.lhs, self.op, self.rhs) == (other.lhs, other.op, other.rhs) + + +class Scope(Node): + def __init__(self, name, map): + self.name = name + self.map = map + + def __repr__(self): + return f'Scope({str(self)})' + + def __str__(self): + return self.to_str(pretty=False) + + def to_str(self, pretty=True, indent=4, depth=0): + return f'{str(self.name)} {self.map.to_str(pretty, indent, depth)}' + + def is_scope(self): + return True + + def __eq__(self, other): + return (self.name, self.map) == (other.name, other.map) + + def is_art_check(self): + return 'art-check' in self.name.lower() or self.map.is_art_check() + + def is_test(self): + return 'test' in self.name.lower() or self.map.is_test() + + def is_benchmark(self): + return 'benchmark' in self.name.lower() or self.map.is_benchmark() + + def is_dev(self): + return self.is_art_check() or self.is_test() or self.is_benchmark() + + +class Map(dict, Node): + def __repr__(self): + return f'Map({str(self)})' + + def __str__(self): + return self.to_str(pretty=False) + + def to_str(self, pretty=True, indent=4, depth=0): + fmt = lambda x: x.to_str(pretty, indent, depth + 1) + result = '{' + pairs = [f'{fmt(k)}{fmt(v)}' for k, v in self.items()] + if len(self) == 0: + result += '}' + elif pretty: + result += '\n' + for pair in pairs: + result += _indent(indent, depth + 1) + f'{pair},\n' + result += _indent(indent, depth) + '}' + else: + result += ', '.join(pairs) + '}' + + return result + + def is_map(self): + return True + + def is_art_check(self): + name = self.get('name') + if name is None: + return False + return 'art-check' in name.value.lower() + + def is_test(self): + name = self.get('name') + if name is None: + return False + # cannot remove `py2-c-module-_ctypes_test` type tests, + # since they're needed to be linked in the final binary. + lower = name.value.lower() + return 'test' in lower and 'py2-c-module' not in lower + + def is_benchmark(self): + name = self.get('name') + if name is None: + return False + return 'benchmark' in name.value.lower() + + def is_dev(self): + return self.is_test() or self.is_benchmark() + + def filter(self, op): + filtered = {k: v for k, v in self.items() if op(k, v)} + self.clear() + self.update(filtered) + + def recurse(self, max_depth=-1, depth=0): + # recursively find all key/value pairs the current and any submaps + if depth != max_depth: + for key, value in self.items(): + yield (key, value, depth + 1, self) + if value.value.is_map(): + yield from value.value.recurse(max_depth, depth + 1) + + +class List(list, Node): + def __repr__(self): + return f'List({str(self)})' + + def __str__(self): + return self.to_str(pretty=False) + + def to_str(self, pretty=True, indent=4, depth=0): + fmt = lambda x: x.to_str(pretty, indent, depth + 1) + result = '[' + if len(self) <= 1 or not pretty: + result += ', '.join([fmt(i) for i in self]) + ']' + else: + result += '\n' + for element in self: + result += _indent(indent, depth + 1) + f'{fmt(element)},\n' + result += _indent(indent, depth) + ']' + + return result + + def is_list(self): + return True + + def filter(self, op): + # use slice assignment to ensure this happens in-place + self[:] = [i for i in self if op(i)] + + +class MapValue(Node): + def __init__(self, delimiter, value): + # map key/value separators can be `:` or `=`. + assert delimiter in (':', '=') + self.delimiter = delimiter + self.value = value + + def __repr__(self): + return f'MapValue({str(self)})' + + def __str__(self): + return self.to_str(False) + + def __eq__(self, other): + # delimiter doesn't matter for equality comparison + if isinstance(other, MapValue): + return self.value == other.value + return self.value == other + + def __len__(self): + return len(self.value) + + def to_str(self, pretty=True, indent=4, depth=0): + value = self.value.to_str(pretty, indent, depth) + if self.delimiter == '=': + return f' = {value}' + return f': {value}' + + def is_map_value(self): + return True + + def filter(self, op): + self.value.filter(op) + + +class Ident(str, Node): + def __repr__(self): + return f'Ident({str(self)})' + + def __str__(self): + return super().__str__() + + def to_str(self, *_, **__): + return str(self) + + def is_ident(self): + return True + + +class String(str, Node): + def __repr__(self): + return f'String({self.to_str()})' + + def to_str(self, *_, **__): + return f'{super().__str__()}' + + def str_op(self, cmp): + return cmp(self) + + def __str__(self): + # `"target"` should be shown as `'target'`, not `'"target"'` + return super().__str__()[1:-1] + + def __eq__(self, other): + if type(other) is String: + return str(self) == str(other) + # we want to be compare equal to the string's value + return str(self) == other + + def __ne__(self, other): + # need to override `__ne__` which normally uses a pyslot + return not self.__eq__(other) + + def is_string(self): + return True + + +class Integer(int, Node): + def __repr__(self): + return f'Integer({str(self)})' + + def __str__(self): + return str(int(self)) + + def to_str(self, *_, **__): + return str(self) + + def is_integer(self): + return True + + +class Bool(Node): + def __init__(self, value=False): + self.value = value + + def __bool__(self): + return self.value + + def __repr__(self): + return f'Bool({json.dumps(self.value)})' + + def __str__(self): + return json.dumps(self.value) + + def to_str(self, *_, **__): + return str(self) + + def is_bool(self): + return True + + def __eq__(self, other): + return self.value == other.value + + +def _indent(indent=4, depth=0, char=' '): + return char * indent * depth diff --git a/docker/android/android/util.py b/docker/android/android/util.py new file mode 100644 index 000000000..51088e170 --- /dev/null +++ b/docker/android/android/util.py @@ -0,0 +1,7 @@ +def windows(sequence, count): + for i in range(len(sequence) - count + 1): + yield sequence[i:i + count] + + +def flatten(lst): + return [i for sublist in lst for i in sublist] diff --git a/docker/android/pyproject.toml b/docker/android/pyproject.toml new file mode 100644 index 000000000..4cf1680f8 --- /dev/null +++ b/docker/android/pyproject.toml @@ -0,0 +1,12 @@ +[project] +name = "android" +version = "0.0.0-dev.0" +license = { text = "MIT OR Apache-2.0" } +dependencies = ["sly==0.4"] + +[build-system] +requires = [ + "setuptools >= 35.0.2", + "setuptools_scm >= 2.0.0, <3" +] +build-backend = "setuptools.build_meta" diff --git a/docker/android/scripts/build-system.py b/docker/android/scripts/build-system.py new file mode 100644 index 000000000..e6c5b8820 --- /dev/null +++ b/docker/android/scripts/build-system.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python +''' + Remove most unittests from Android soong blueprint + files, most of which are identified via a `cc_test*` + scope identifier, as well as some additional `subdirs` + identifiers and Makefile specifiers. + + This also allows you to backup and restore these scripts. + The build files are automatically backed up by default. +''' + +import argparse +import glob +import os +import shutil +import subprocess +import sys + +SCRIPTS_DIR = os.path.dirname(os.path.realpath(__file__)) +PROJECT_DIR = os.path.dirname(SCRIPTS_DIR) +sys.path.insert(0, PROJECT_DIR) + +import android +import android.make +import android.soong + + +def print_verbose(message, verbose): + if verbose: + print(message) + + +def backup(src, args, *_): + dst = src + '.bak' + print_verbose(f'creating backup of file "{src}" at "{dst}"', args.verbose) + shutil.copy2(src, dst) + + +def restore(dst, args, *_): + src = dst + '.bak' + if os.path.exists(src): + print_verbose(f'restoring from backup "{src}" to "{dst}"', args.verbose) + shutil.copy2(src, dst) + + +def remove_soong_tests(path, args, *_): + def filter_tests(x, remove): + return x.str_op(lambda y: not any(i in y.lower() for i in remove)) + + print_verbose(f'removing soong tests from "{path}"', args.verbose) + with open(path) as file: + ast = android.soong.load(file) + # remove the test or benchmark scopes, IE, this with `cc_test` + # or those with `{name: "test"}`, etc. + ast.filter(lambda x: not (x.is_scope() and x.is_dev())) + # need to remove test and benchmark subdirs + test_names = ('test', 'benchmark') + subdirs = [i for i in ast if i.name == 'subdirs'] + for sub in subdirs: + assert type(sub.expr) is android.soong.List + sub.expr.filter(lambda x: filter_tests(x, test_names)) + # remove gtest dependencies from regular targets. + for node in ast: + map = None + if not node.is_scope() and not node.expr.is_map(): + continue + if node.is_scope(): + map = node.map + else: + map = node.expr + test_names = ('libgtest', 'test-proto') + for key, value, *_ in map.recurse(): + if value.value.is_list(): + if key == 'testSrcs': + value.value.clear() + else: + value.filter(lambda x: filter_tests(x, test_names)) + + with open(path, 'w') as file: + ast.dump(file) + + +def remove_makefile_tests(path, args, *_): + print_verbose(f'removing makefile tests from "{path}"', args.verbose) + with open(path) as file: + makefile = android.make.load(file) + makefile.filter(lambda x: not x.is_dev()) + with open(path, 'w') as file: + makefile.dump(file) + + +def remove_tests(path, args, processor): + if os.path.exists(path + '.bak'): + restore(path, args) + elif not args.disable_backup: + backup(path, args) + processor(path, args) + + +def stash(root): + git_glob = f'{root}/**/.git' + for path in glob.iglob(git_glob, recursive=True): + os.chdir(os.path.dirname(path)) + subprocess.check_call(['git', 'stash']) + + +def main(): + parser = argparse.ArgumentParser() + action_group = parser.add_mutually_exclusive_group(required=True) + action_group.add_argument( + '--backup', + help='backup build files', + action='store_true', + ) + action_group.add_argument( + '--restore', + help='restore build files', + action='store_true', + ) + action_group.add_argument( + '--remove-tests', + help='remove most tests from the build system.', + action='store_true', + ) + action_group.add_argument( + '--stash', + help='stash all local changes.', + action='store_true', + ) + parser.add_argument( + '--disable-backup', + help='disable automatic backup of build files during processing.', + action='store_false', + ) + flags_group = parser.add_mutually_exclusive_group() + flags_group.add_argument( + '--soong-only', + help='only process soong build files.', + action='store_true', + ) + flags_group.add_argument( + '--makefile-only', + help='only process makefiles.', + action='store_true', + ) + parser.add_argument( + '-V', + '--version', + action='version', + version=android.__version__ + ) + parser.add_argument( + '-v', + '--verbose', + help='display verbose diagnostic info.', + action='store_true', + ) + args = parser.parse_args() + if args.backup: + action = backup + elif args.restore: + action = restore + elif args.remove_tests: + action = remove_tests + elif args.stash: + action = stash + + # root_dir is only available 3.10+ + root = os.environ.get('ANDROID_ROOT') + if root is None: + root = os.getcwd() + if args.stash: + return stash(root) + + if not args.makefile_only: + soong_glob = f'{root}/**/Android.bp' + for path in glob.iglob(soong_glob, recursive=True): + action(path, args, remove_soong_tests) + + if not args.soong_only: + make_glob = f'{root}/**/Android.mk' + for path in glob.iglob(make_glob, recursive=True): + action(path, args, remove_makefile_tests) + + +if __name__ == '__main__': + main() diff --git a/docker/android/tests/Addition.bp b/docker/android/tests/Addition.bp new file mode 100644 index 000000000..03c2fb001 --- /dev/null +++ b/docker/android/tests/Addition.bp @@ -0,0 +1,62 @@ +// special file testing `+` and `+=` operators +// this is assignment + add assignment +list = ["value1"] +list += ["value2"] +number = 1 +number += 2 +string = "string" +string += "_suffix" +scope { + name: "target", +} +scope += { + name: "_suffix", + srcs: [ + // sequence items just have to evaluate to strings + "tree.cc" + string, + "lib.cc", + ], +} +// this is addition with lhs idents +lhs_sum = number + 4 +lhs_string = string + "_suffix" +lhs_list = list + ["value3"] +lhs_scope = scope + { + name: "_suffix", + cflags: [ + "-Wall", + ], +} +// this is addition with rhs idents +rhs_sum = 4 + number +rhs_string = "prefix_" + string +rhs_list = ["value0"] + list +rhs_scope = { + name: "_suffix", + cflags: [ + "-Wall", + ], +} + scope +// this is addition with both being non-idents +expr_sum = 4 + 1 +expr_string = "prefix_" + "suffix" +expr_list = ["value0"] + ["value1"] +expr_scope = {} + { + name: "target", +} +// test multiple binary ops +tri_sum = 4 + 1 + 2 +tri_string = "prefix_" + "middle" + "_suffix" +tri_list = ["value0"] + ["value1"] + ["value2"] +tri_scope = {} + { + name: "target", +} + {} +// test sequence lhs and rhs strings +home = "dir/" +test = "test.c" + +files = [ + home + "file.c", + "test/" + test, + home + test, +] diff --git a/docker/android/tests/Android.bp b/docker/android/tests/Android.bp new file mode 100644 index 000000000..c0e5cf6ea --- /dev/null +++ b/docker/android/tests/Android.bp @@ -0,0 +1,80 @@ +// sample heading comment +sample_array = [ + "value1", + "value2", +] +/** + sample + multiline + comment + */ +cc_defaults { + name: "target", + cflags: [ + "-Wall", + "-fstrict-aliasing", + ], + option: true, + tidy_checks: sample_array, + tidy_checks_as_errors: sample_array, + array: [ + "-short", + "--root='/path/to/dir'", + ], +} +cc_library_static { + name: "static_lib", + srcs: [ + "tree.cc", + "lib.cc", + ], + include_dirs: ["bionic/libc"], + export_include_dirs: ["."], +} +cc_library { + name: "lib", + srcs: [ + "tree.cc", + "lib.cc", + ], + include_dirs: ["bionic/libc"], + export_include_dirs: ["."], +} +cc_test { + name: "test", + defaults: ["target"], + srcs: ["test.cc"], + nested: { + array: { + option: false, + }, + }, +} +cc_test_host { + name: "host_test", + include_dirs: ["path/to/lib"], + compile_multilib: "64", + static_libs: [ + "libm", + "libz", + ], + host_ldlibs: [ + "-ldl", + "-lzstd", + "-l" + "z", + ], + shared_libs: [], + cflags = [ + "-Wall", + "-fstrict-aliasing", + ], +} +cc_defaults { + name: "custom", + shared_libs: ["libcustom"], + whole_static_libs: [ + "libz", + "libgtest_main", + ], + host_ldlibs: ["-lgtest"], +} diff --git a/docker/android/tests/Android.mk b/docker/android/tests/Android.mk new file mode 100644 index 000000000..3a87ec550 --- /dev/null +++ b/docker/android/tests/Android.mk @@ -0,0 +1,101 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := config.c +LOCAL_MODULE := config +LOCAL_SHARED_LIBRARIES := libcutils +LOCAL_CFLAGS := -Werror + +include $(BUILD_HOST_EXECUTABLE) + +LOCAL_PATH := $(call my-dir) + +# ----------------------------------------------------------------------------- +# Benchmarks. +# ----------------------------------------------------------------------------- + +test_tags := tests + +benchmark_c_flags := \ + -Wall -Wextra \ + -Werror \ + -fno-builtin \ + +benchmark_src_files := \ + benchmark_main.cc \ + bench.cc + +# Build benchmarks. +include $(CLEAR_VARS) +LOCAL_MODULE := benchmarks +LOCAL_MODULE_TAGS := tests +LOCAL_CFLAGS += $(benchmark_c_flags) +LOCAL_SHARED_LIBRARIES += libm libdl +LOCAL_SRC_FILES := $(benchmark_src_files) + +# ----------------------------------------------------------------------------- +# Unit tests. +# ----------------------------------------------------------------------------- + +test_c_flags := \ + -g \ + -Wall \ + -Werror + +################################## +# test executable +LOCAL_MODULE := module +LOCAL_SRC_FILES := src.c +LOCAL_SHARED_LIBRARIES := libcutils +LOCAL_CFLAGS := $(test_c_flags) +LOCAL_MODULE_RELATIVE_PATH := config-tests + +# Unit tests. +# ========================================================= + +include $(CLEAR_VARS) +LOCAL_MODULE := init_tests +LOCAL_SRC_FILES := \ + init_parser_test.cc \ + property_service_test.cc \ + service_test.cc \ + util_test.cc \ + +################################## +# test executable +LOCAL_MODULE := module +LOCAL_SRC_FILES := src.c +LOCAL_SHARED_LIBRARIES := libcutils +LOCAL_CFLAGS := $(test_c_flags) +LOCAL_MODULE_RELATIVE_PATH := config-tests +LOCAL_SHARED_LIBRARIES += \ + libcutils \ + libbase \ + +LOCAL_STATIC_LIBRARIES := libinit +LOCAL_SANITIZE := integer +LOCAL_CLANG := true +LOCAL_CPPFLAGS := -Wall -Wextra -Werror +include $(BUILD_NATIVE_TEST) + +# Other section. +# ========================================================= +include $(call all-makefiles-under,$(LOCAL_PATH)) + +# ============================================================================= +# Unit tests. +# ============================================================================= + +test_c_flags := \ + -g \ + -Wall \ + -Werror + +################################## +# test executable +LOCAL_MODULE := mod2 +LOCAL_SRC_FILES := mod.c +LOCAL_SHARED_LIBRARIES := libcutils +LOCAL_CFLAGS := $(test_c_flags) +LOCAL_MODULE_RELATIVE_PATH := mod2-tests diff --git a/docker/android/tests/Comments.mk b/docker/android/tests/Comments.mk new file mode 100644 index 000000000..e89ae9c22 --- /dev/null +++ b/docker/android/tests/Comments.mk @@ -0,0 +1,5 @@ +# 1) sample grouping: +# - text + suffix +# - some more text (the format) +# - API and policy info +# - more API + policy info diff --git a/docker/android/tests/Empty.bp b/docker/android/tests/Empty.bp new file mode 100644 index 000000000..30e5411e8 --- /dev/null +++ b/docker/android/tests/Empty.bp @@ -0,0 +1 @@ +// this file only has comments diff --git a/docker/android/tests/Empty.mk b/docker/android/tests/Empty.mk new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/docker/android/tests/Empty.mk @@ -0,0 +1 @@ + diff --git a/docker/android/tests/Grouped.mk b/docker/android/tests/Grouped.mk new file mode 100644 index 000000000..d50eebe98 --- /dev/null +++ b/docker/android/tests/Grouped.mk @@ -0,0 +1,22 @@ +LOCAL_PATH := $(call my-dir) +# ----------------------------------------------------------------------------- +# Section 1. +# ----------------------------------------------------------------------------- +LOCAL_SRC_FILES := src.c +ifneq ($(ENV1),) + # ----------------------------------------------------------------------------- + # Section 2. + # ----------------------------------------------------------------------------- + ifneq ($(ENV2),) + benchmark_src_files += bench1.cc + else + benchmark_src_files += bench2.cc + endif +else + benchmark_src_files += bench3.cc +endif + +# ----------------------------------------------------------------------------- +# Section 3. +# ----------------------------------------------------------------------------- +LOCAL_CFLAGS := $(test_c_flags) diff --git a/docker/android/tests/Nested.mk b/docker/android/tests/Nested.mk new file mode 100644 index 000000000..98908ea71 --- /dev/null +++ b/docker/android/tests/Nested.mk @@ -0,0 +1,58 @@ +# this is a special makefile checking we handle nested +# conditionals properly, that removing sections won't +# cause unequal conditional blocks. it may still lead +# to missing definitions, but it won't fail due to +# unmatched if and endif directives. + +LOCAL_PATH := $(call my-dir) + +ifneq ($(ENV1),) + +# ----------------------------------------------------------------------------- +# Benchmarks. +# ----------------------------------------------------------------------------- + +test_tags := tests + +benchmark_c_flags := \ + -Wall -Wextra \ + -Werror \ + -fno-builtin \ + +benchmark_src_files := benchmark_main.cc +ifneq ($(ENV2),) + benchmark_src_files += bench1.cc +else + benchmark_src_files += bench2.cc +endif + +# Build benchmarks. +include $(CLEAR_VARS) +LOCAL_MODULE := benchmarks +LOCAL_MODULE_TAGS := tests +LOCAL_CFLAGS += $(benchmark_c_flags) +LOCAL_SHARED_LIBRARIES += libm libdl +LOCAL_SRC_FILES := $(benchmark_src_files) + +endif + +# Other section. +# ========================================================= +include $(call all-makefiles-under,$(LOCAL_PATH)) + +# ============================================================================= +# Unit tests. +# ============================================================================= + +test_c_flags := \ + -g \ + -Wall \ + -Werror + +################################## +# test executable +LOCAL_MODULE := mod2 +LOCAL_SRC_FILES := mod.c +LOCAL_SHARED_LIBRARIES := libcutils +LOCAL_CFLAGS := $(test_c_flags) +LOCAL_MODULE_RELATIVE_PATH := mod2-tests diff --git a/docker/android/tests/README.md b/docker/android/tests/README.md new file mode 100644 index 000000000..d9de0a096 --- /dev/null +++ b/docker/android/tests/README.md @@ -0,0 +1,13 @@ +android +======= + +Contains sample Soong blueprint files and Makefiles to test removal of unittests for build configurations. + +This requires a Python3 interpreter, and therefore is not run as part of the core test suite. Running the test suite requires: +- sly >= 0.4 +- pytest >= 7 +- toml >= 0.10 + +The module itself and the scripts only require: +- python >= 3.6 +- sly >= 0.4 diff --git a/docker/android/tests/Single.mk b/docker/android/tests/Single.mk new file mode 100644 index 000000000..e2e303279 --- /dev/null +++ b/docker/android/tests/Single.mk @@ -0,0 +1,22 @@ +# this is a special makefile without any blocks + +LOCAL_PATH := $(call my-dir) + +test_tags := tests + +benchmark_c_flags := \ + -Wall -Wextra \ + -Werror \ + -fno-builtin \ + +benchmark_src_files := \ + benchmark_main.cc \ + bench.cc + +# Build benchmarks. +include $(CLEAR_VARS) +LOCAL_MODULE := benchmarks +LOCAL_MODULE_TAGS := tests +LOCAL_CFLAGS += $(benchmark_c_flags) +LOCAL_SHARED_LIBRARIES += libm libdl +LOCAL_SRC_FILES := $(benchmark_src_files) diff --git a/docker/android/tests/test_make.py b/docker/android/tests/test_make.py new file mode 100644 index 000000000..d566d6e81 --- /dev/null +++ b/docker/android/tests/test_make.py @@ -0,0 +1,313 @@ +import copy +import os +import sys + +TEST_DIR = os.path.dirname(os.path.realpath(__file__)) +PROJECT_DIR = os.path.dirname(TEST_DIR) +sys.path.insert(0, PROJECT_DIR) + +from android import make + + +def test(): + path = os.path.join(TEST_DIR, 'Android.mk') + contents = open(path).read() + makefile = make.loads(contents) + stripped = contents[:-1] + assert repr(makefile) == f'Makefile({stripped})' + assert str(makefile) == stripped + assert len(makefile) == 6 + + assert not makefile[0].is_dev() + assert makefile[1].is_dev() + assert makefile[1].is_benchmark() + assert makefile[2].is_dev() + assert makefile[2].is_test() + assert makefile[4].title == 'Other section.' + + filtered = copy.deepcopy(makefile) + filtered.filter(lambda x: not x.is_dev()) + assert type(filtered) is make.Makefile + assert len(filtered) == 2 + assert not filtered[0].is_comment() + assert filtered[1].title == 'Other section.' + + assert makefile == make.load(open(path)) + assert contents == makefile.dumps() + '\n' + + +def test_nested(): + path = os.path.join(TEST_DIR, 'Nested.mk') + contents = open(path).read() + makefile = make.loads(contents) + assert str(makefile) + '\n' == contents + assert len(makefile) == 5 + + assert makefile[0].is_block() + assert makefile[0].child.startswith('# this is a special makefile') + + assert makefile[1].is_directive() + assert len(makefile[1].child) == 2 + assert makefile[1].child[0].is_block() + assert makefile[1].child[1].is_comment() + assert makefile[1].child[1].title == 'Benchmarks.' + + outer = makefile[1].child[1] + assert len(outer.child) == 3 + assert outer.child[0].is_block() + assert outer.child[1].is_directive() + assert outer.child[2].is_block() + + inner = outer.child[1] + assert inner.child.is_block() + + +def test_comments(): + path = os.path.join(TEST_DIR, 'Comments.mk') + contents = open(path).read() + makefile = make.loads(contents) + assert str(makefile) + '\n' == contents + assert len(makefile) == 1 + + assert makefile[0].is_block() + assert makefile[0].child.startswith('# 1) sample grouping:') + + +def test_grouped(): + path = os.path.join(TEST_DIR, 'Grouped.mk') + contents = open(path).read() + makefile = make.loads(contents) + assert str(makefile) + '\n' == contents + assert len(makefile) == 3 + + assert makefile[0].is_block() + assert makefile[0].child.startswith('LOCAL_PATH := $(call my-dir)') + + comment = makefile[1] + assert comment.is_comment() + assert len(comment.child) == 3 + assert comment.child[0].child.startswith('LOCAL_SRC_FILES := src.c') + assert comment.child[1].is_directive() + assert len(comment.child[2].child) == 0 + + directives = comment.child[1] + inner_comment = directives.child + assert inner_comment.is_comment() + assert len(inner_comment.child) == 2 + assert inner_comment.child[0].is_directive() + assert inner_comment.child[1].child.startswith('else') + + inner = inner_comment.child[0] + assert inner.child.lstrip().startswith('benchmark_src_files') + + assert makefile[2].is_comment() + + +def test_recurse(): + path = os.path.join(TEST_DIR, 'Nested.mk') + contents = open(path).read() + makefile = make.loads(contents) + assert str(makefile) + '\n' == contents + nodes = list(makefile.recurse()) + assert len(nodes) == 10 + + assert nodes[0] == makefile[0] + assert nodes[1] == makefile[1] + assert nodes[2] == makefile[1].child[0] + assert nodes[3] == makefile[1].child[1] + assert nodes[4] == makefile[1].child[1].child[0] + assert nodes[5] == makefile[1].child[1].child[1] + assert nodes[6] == makefile[1].child[1].child[2] + assert nodes[7] == makefile[2] + assert nodes[8] == makefile[3] + assert nodes[9] == makefile[4] + + +def test_filter(): + path = os.path.join(TEST_DIR, 'Nested.mk') + contents = open(path).read() + makefile = make.loads(contents) + assert str(makefile) + '\n' == contents + assert len(makefile) == 5 + assert makefile[1].is_directive() + assert len(makefile[1].child) == 2 + + filtered = copy.deepcopy(makefile) + filtered.filter(lambda x: not x.is_dev()) + assert len(filtered) == 4 + assert filtered[0].is_block() + assert filtered[1].is_directive() + assert filtered[2].is_block() + assert filtered[3].is_comment() + + directive = filtered[1] + assert len(directive.child) == 1 + assert directive.child[0].is_block() + + assert filtered[3].title.lstrip().startswith('Other section.') + + +def test_split_directives(): + path = os.path.join(TEST_DIR, 'Nested.mk') + contents = open(path).read() + iterable = iter(contents.splitlines()) + blocks = make._split_directives(iterable)[0] + assert len(blocks) == 3 + + assert blocks[0].is_block() + assert blocks[0].startswith('# this is a special makefile') + + assert blocks[2].is_block() + assert blocks[2].lstrip().startswith('# Other section.') + + assert not blocks[1].is_comment() + assert blocks[1].is_directive() + assert blocks[1].has_block_list() + + directives = blocks[1].child + assert len(directives) == 3 + assert directives[0].is_block() + assert directives[1].is_directive() + assert directives[2].is_block() + + assert not directives[1].child.has_block_list() + assert directives[1].child.lstrip().startswith('benchmark_src_files') + + path = os.path.join(TEST_DIR, 'Grouped.mk') + contents = open(path).read() + iterable = iter(contents.splitlines()) + blocks = make._split_directives(iterable)[0] + assert len(blocks) == 3 + + assert blocks[0].is_block() + assert blocks[1].is_directive() + assert blocks[2].is_block() + + directives = blocks[1].child + assert len(directives) == 3 + assert directives[0].is_block() + assert directives[1].is_directive() + assert directives[2].is_block() + + +def test_split_comments(): + path = os.path.join(TEST_DIR, 'Android.mk') + contents = open(path).read() + blocks = make._split_comments(contents) + assert repr(blocks) == f'BlockList({contents})' + assert str(blocks) == contents + + assert not blocks[0].is_dev() + assert blocks[1].is_dev() + assert blocks[1].is_benchmark() + assert blocks[2].is_dev() + assert blocks[2].is_test() + assert blocks[4].title == 'Other section.' + + path = os.path.join(TEST_DIR, 'Empty.mk') + contents = open(path).read() + blocks = make._split_comments(contents) + assert len(blocks) == 1 + assert repr(blocks) == 'BlockList(\n)' + assert str(blocks) == '\n' + assert str(blocks[0]) == '\n' + + blocks = make._split_comments('') + assert len(blocks) == 0 + assert repr(blocks) == 'BlockList()' + assert str(blocks) == '' + + +def test_block(): + data = '''LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS)''' + block = make.Block(data) + assert repr(block) == f'Block({data})' + assert str(block) == data + assert block.is_block() + assert not block.is_block_list() + assert not block.is_comment() + assert not block.is_directive() + assert not block.is_dev() + + +def test_block_list(): + data1 = 'LOCAL_PATH := $(call my-dir)' + data2 = 'test_tags := tests' + blocks = make.BlockList([make.Block(data1), make.Block(data2)]) + assert repr(blocks) == f'BlockList({data1}\n{data2})' + assert str(blocks) == f'{data1}\n{data2}' + assert not blocks.is_block() + assert blocks.is_block_list() + assert not blocks.is_comment() + assert not blocks.is_directive() + assert not blocks.is_dev() + + +def test_comment_block(): + # single block + comment = '''# ----------------------------------------------------------------------------- +# Benchmarks. +# ----------------------------------------------------------------------------- +''' + title = 'Benchmarks.' + data = 'test_tags := tests' + block = make.CommentBlock(comment, title, make.Block(data)) + assert repr(block) == f'CommentBlock({comment}\n{data})' + assert str(block) == f'{comment}\n{data}' + assert not block.is_block() + assert not block.is_block_list() + assert block.is_comment() + assert not block.is_directive() + assert block.is_dev() + + title = 'Other Section.' + blocks = make.BlockList([ + make.Block('LOCAL_PATH := $(call my-dir)'), + make.Block('test_tags := tests'), + ]) + block = make.CommentBlock(comment, title, blocks) + assert repr(block) == f'CommentBlock({comment}\n{str(blocks)})' + assert str(block) == f'{comment}\n{str(blocks)}' + assert not block.is_block() + assert not block.is_block_list() + assert block.is_comment() + assert not block.is_directive() + assert not block.is_dev() + + +def test_directive_block(): + start_inner = ' ifneq ($(USE_B),)' + end_inner = ' endif' + data_inner = ''' SOURCES=b.cc + else + SOURCES=a.cc''' + inner = make.DirectiveBlock(start_inner, end_inner, make.Block(data_inner)) + str_inner = f'{start_inner}\n{data_inner}\n{end_inner}' + assert repr(inner) == f'DirectiveBlock({str_inner})' + assert str(inner) == str_inner + assert not inner.is_block() + assert not inner.is_block_list() + assert not inner.is_comment() + assert inner.is_directive() + assert not inner.is_dev() + + data_else = '''else + SOURCES=c.cc''' + else_block = make.Block(data_else) + blocks = make.BlockList([inner, else_block]) + str_blocks = '\n'.join([str(i) for i in blocks]) + assert repr(blocks) == f'BlockList({str_blocks})' + assert str(blocks) == str_blocks + + start = 'ifneq ($(USE_A),)' + end = 'endif' + block = make.DirectiveBlock(start, end, blocks) + str_block = f'{start}\n{str_blocks}\n{end}' + assert repr(block) == f'DirectiveBlock({str_block})' + assert str(block) == str_block + assert not block.is_block() + assert not block.is_block_list() + assert not block.is_comment() + assert block.is_directive() + assert not block.is_dev() diff --git a/docker/android/tests/test_metadata.py b/docker/android/tests/test_metadata.py new file mode 100644 index 000000000..b12a38c3e --- /dev/null +++ b/docker/android/tests/test_metadata.py @@ -0,0 +1,24 @@ +import os +import sys + +import toml + +TEST_DIR = os.path.dirname(os.path.realpath(__file__)) +PROJECT_DIR = os.path.dirname(TEST_DIR) +sys.path.insert(0, PROJECT_DIR) + +import android + + +# ensure our pyproject and module metadata don't go out-of-date +def test_metadata(): + pyproject_path = open(os.path.join(PROJECT_DIR, 'pyproject.toml')) + pyproject = toml.load(pyproject_path) + project = pyproject['project'] + assert project['name'] == android.__name__ + assert project['version'] == android.__version__ + assert project['license']['text'] == android.__license__ + + version, dev = android.__version__.split('-') + major, minor, patch = [int(i) for i in version.split('.')] + assert (major, minor, patch, dev) == android.__version_info__ diff --git a/docker/android/tests/test_soong.py b/docker/android/tests/test_soong.py new file mode 100644 index 000000000..8e6accc3e --- /dev/null +++ b/docker/android/tests/test_soong.py @@ -0,0 +1,323 @@ +import copy +import os +import sys + +TEST_DIR = os.path.dirname(os.path.realpath(__file__)) +PROJECT_DIR = os.path.dirname(TEST_DIR) +sys.path.insert(0, PROJECT_DIR) + +from android import soong + + +def test(): + path = os.path.join(TEST_DIR, 'Android.bp') + contents = open(path).read() + lexer = soong.Lexer() + tokens = list(lexer.tokenize(contents)) + assert (tokens[0].type, tokens[0].value) == ('IDENT', 'sample_array') + assert (tokens[51].type, tokens[51].value) == ('IDENT', 'srcs') + assert (tokens[52].type, tokens[52].value) == ('COLON', ':') + assert (tokens[53].type, tokens[53].value) == ('LBRACKET', '[') + assert (tokens[54].type, tokens[54].value) == ('STRING', '"tree.cc"') + + parser = soong.Parser() + result = parser.parse(iter(tokens)) + assert len(result) == 7 + + assert result[0].is_assignment() + assert result[0].to_str() == '''sample_array = [ + "value1", + "value2", +]''' + + assert result[1].is_scope() + assert result[1].name == 'cc_defaults' + assert result[1].name.is_ident() + assert result[1].map['name'] == 'target' + assert result[1].map['tidy_checks'] == 'sample_array' + assert result[1].map.get('srcs') is None + assert result[1].map.is_map() + + assert result[2].is_scope() + assert result[2].name == 'cc_library_static' + assert result[2].map['name'] == 'static_lib' + + ast = soong.loads(contents) + assert ast == result + ast = soong.load(open(path)) + assert ast == result + lines = contents.splitlines() + assert ast.dumps() == '\n'.join(lines[1:5] + lines[10:]) + + assert ast[4].is_test() + assert ast[4].map.is_test() + + filtered = copy.deepcopy(ast) + filtered.filter(lambda x: not (x.is_scope() and x.is_dev())) + assert type(filtered) is soong.Ast + assert len(filtered) == 5 + assert filtered == ast[:4] + [ast[6]] + + map = filtered[1].map + assert 'cflags' in map + map.filter(lambda k, v: k != 'cflags') + assert 'cflags' not in map + assert len(map['array']) == 2 + map['array'].filter(lambda x: x != '-short') + assert len(map['array']) == 1 + + custom = filtered[4].map + assert 'whole_static_libs' in custom + custom['whole_static_libs'].filter(lambda x: x.str_op(lambda y: 'gtest' not in y.lower())) + assert custom['whole_static_libs'] == ['libz'] + + assert 'host_ldlibs' in custom + custom['host_ldlibs'].filter(lambda x: x.str_op(lambda y: 'gtest' not in y.lower())) + assert custom['host_ldlibs'] == [] + + +def test_addition(): + path = os.path.join(TEST_DIR, 'Addition.bp') + ast = soong.load(open(path)) + assert len(ast) == 27 + assert ast[0].is_assignment() + assert ast[1].is_binary_operator_assignment() + assert ast[2].is_assignment() + assert ast[3].is_binary_operator_assignment() + assert ast[4].is_assignment() + assert ast[5].is_binary_operator_assignment() + assert ast[6].is_scope() + assert ast[7].is_binary_operator_assignment() + assert ast[8].expr.is_binary_operator() + + assert ast[0].name == 'list' + assert ast[0].expr == ['value1'] + assert ast[1].name == 'list' + assert ast[1].op == '+=' + assert ast[1].expr == ['value2'] + + assert ast[8].expr.lhs == 'number' + assert ast[8].expr.op == '+' + assert ast[8].expr.rhs == 4 + assert ast[11].expr.lhs == 'scope' + assert ast[11].expr.op == '+' + assert ast[11].expr.rhs.is_map() + + assert ast[12].expr.lhs == 4 + assert ast[12].expr.op == '+' + assert ast[12].expr.rhs == 'number' + assert ast[15].expr.lhs.is_map() + assert ast[15].expr.op == '+' + assert ast[15].expr.rhs == 'scope' + + assert ast[16].expr.lhs == 4 + assert ast[16].expr.op == '+' + assert ast[16].expr.rhs == 1 + assert ast[19].expr.lhs == {} + assert ast[19].expr.op == '+' + assert ast[19].expr.rhs == {'name': 'target'} + + assert ast[20].expr.lhs.is_binary_operator() + assert ast[20].expr.lhs.lhs == 4 + assert ast[20].expr.lhs.rhs == 1 + assert ast[20].expr.op == '+' + assert ast[20].expr.rhs == 2 + + assert ast[26].name == 'files' + assert ast[26].expr.is_list() + assert len(ast[26].expr) == 3 + + assert ast[26].expr[0].lhs == 'home' + assert ast[26].expr[0].lhs.is_ident() + assert ast[26].expr[0].rhs == 'file.c' + assert ast[26].expr[0].rhs.is_string() + + assert ast[26].expr[1].lhs == 'test/' + assert ast[26].expr[1].lhs.is_string() + assert ast[26].expr[1].rhs == 'test' + assert ast[26].expr[1].rhs.is_ident() + + assert ast[26].expr[2].lhs == 'home' + assert ast[26].expr[2].lhs.is_ident() + assert ast[26].expr[2].rhs == 'test' + assert ast[26].expr[2].rhs.is_ident() + + # test a few binops, just in case + binop = ast[26].expr[1] + assert binop.str_op(lambda x: 'test' in x.lower()) + assert binop.lhs.str_op(lambda x: 'test' in x.lower()) + + +def test_empty(): + path = os.path.join(TEST_DIR, 'Empty.bp') + ast = soong.load(open(path)) + assert len(ast) == 0 + + +def test_ast(): + array = soong.List([soong.String('"value1"'), soong.String('"value2"')]) + assignment = soong.Assignment(soong.Ident('name'), array) + value = soong.MapValue('=', soong.String('"value"')) + map = soong.Map({soong.Ident('key'): value}) + scope = soong.Scope(soong.Ident('name'), map) + ast = soong.Ast([assignment, scope]) + assert repr(ast) == '''Ast(name = ["value1", "value2"] +name {key = "value"})''' + assert str(ast) == '''name = ["value1", "value2"] +name {key = "value"}''' + assert ast.to_str() == '''name = [ + "value1", + "value2", +] +name { + key = "value", +}''' + + +def test_assignment(): + array = soong.List([soong.String('"value1"'), soong.String('"value2"')]) + assignment = soong.Assignment(soong.Ident('name'), array) + assert repr(assignment) == 'Assignment(name = ["value1", "value2"])' + assert str(assignment) == 'name = ["value1", "value2"]' + assert assignment.to_str(pretty=False) == 'name = ["value1", "value2"]' + assert assignment.to_str() == '''name = [ + "value1", + "value2", +]''' + assert assignment.to_str(depth=1) == '''name = [ + "value1", + "value2", + ]''' + + +def test_binary_operator_assignment(): + ident = soong.Ident('name') + expr = soong.Integer('1') + assignment = soong.BinaryOperatorAssignment(ident, '+=', expr) + assert repr(assignment) == 'BinaryOperatorAssignment(name += 1)' + assert str(assignment) == 'name += 1' + assert assignment.to_str(pretty=False) == 'name += 1' + assert assignment.to_str() == 'name += 1' + + +def test_binary_operator(): + ident = soong.Ident('name') + expr = soong.Integer('1') + operator = soong.BinaryOperator(ident, '+', expr) + assert repr(operator) == 'BinaryOperator(name + 1)' + assert str(operator) == 'name + 1' + assert operator.to_str(pretty=False) == 'name + 1' + assert operator.to_str() == 'name + 1' + + +def test_scope(): + value = soong.MapValue(':', soong.String('"value"')) + map = soong.Map({soong.Ident('key'): value}) + scope = soong.Scope(soong.Ident('name'), map) + assert repr(scope) == 'Scope(name {key: "value"})' + assert str(scope) == 'name {key: "value"}' + assert scope.to_str(pretty=False) == 'name {key: "value"}' + assert scope.to_str() == '''name { + key: "value", +}''' + assert scope.to_str(depth=1) == '''name { + key: "value", + }''' + + +def test_map(): + value = soong.MapValue(':', soong.String('"value"')) + map = soong.Map({soong.Ident('key'): value}) + assert repr(map) == 'Map({key: "value"})' + assert str(map) == '{key: "value"}' + assert map.to_str(pretty=False) == '{key: "value"}' + assert map.to_str() == '''{ + key: "value", +}''' + assert map.to_str(depth=1) == '''{ + key: "value", + }''' + + map = soong.Map() + assert str(map) == '{}' + assert map.to_str() == '{}' + + +def test_recurse(): + path = os.path.join(TEST_DIR, 'Android.bp') + ast = soong.load(open(path)) + cc_defaults = ast[1] + assert cc_defaults.name == 'cc_defaults' + for (key, value, depth, parent) in cc_defaults.map.recurse(): + assert depth == 1 + + cc_test = ast[4] + assert cc_test.name == 'cc_test' + seen = [] + for (key, value, depth, parent) in cc_test.map.recurse(): + if depth > 1 and parent.is_map(): + seen.append(key) + assert seen == ['array', 'option'] + + +def test_list(): + sequence = soong.List([soong.String('"value1"'), soong.String('"value2"')]) + assert repr(sequence) == 'List(["value1", "value2"])' + assert str(sequence) == '["value1", "value2"]' + assert sequence.to_str(pretty=False) == '["value1", "value2"]' + assert sequence.to_str() == '''[ + "value1", + "value2", +]''' + assert sequence.to_str(depth=1) == '''[ + "value1", + "value2", + ]''' + + sequence = soong.List([soong.String('"value"')]) + assert repr(sequence) == 'List(["value"])' + assert str(sequence) == '["value"]' + assert sequence.to_str() == '["value"]' + + sequence = soong.List([]) + assert sequence.to_str() == '[]' + + +def test_map_value(): + value = soong.MapValue(':', soong.String('"value"')) + assert repr(value) == 'MapValue(: "value")' + assert str(value) == ': "value"' + assert value.to_str() == ': "value"' + + value = soong.MapValue('=', soong.String('"value"')) + assert repr(value) == 'MapValue( = "value")' + assert str(value) == ' = "value"' + assert value.to_str() == ' = "value"' + + +def test_ident(): + ident = soong.Ident('name') + assert repr(ident) == 'Ident(name)' + assert str(ident) == 'name' + assert ident.to_str() == 'name' + + +def test_string(): + string = soong.String('"value1"') + assert repr(string) == 'String("value1")' + assert str(string) == 'value1' + assert string.to_str() == '"value1"' + + +def test_integer(): + number = soong.Integer('3') + assert repr(number) == 'Integer(3)' + assert str(number) == '3' + assert number.to_str() == '3' + + +def test_bool(): + boolean = soong.Bool(True) + assert repr(boolean) == 'Bool(true)' + assert str(boolean) == 'true' + assert boolean.to_str() == 'true' diff --git a/docker/android/tox.ini b/docker/android/tox.ini new file mode 100644 index 000000000..c5886d373 --- /dev/null +++ b/docker/android/tox.ini @@ -0,0 +1,30 @@ +[tox] +envlist = py36,py3 +skip_missing_interpreters = True +isolated_build = True + +[testenv] +deps = + sly >= 0.4 + pytest + toml +commands = pytest -o cache_dir={toxworkdir}/.pytest_cache +passenv = + PYTHONDONTWRITEBYTECODE + PYTHONPYCACHEPREFIX + PYTHON_EGG_CACHE + +[flake8] +max-line-length = 100 +ignore = + # we use lambdas for short, one-line conditions and formatters + E731 + # opt-in to new behavior with operators after line breaks + W503 +per-file-ignores = + # the sly grammar uses variables before they are defined via a metaclass + # likewise, it uses redefinitions to extend parsers via SLR grammar + android/soong.py: F811 F821 + # need to add the project to the path for our tests and scripts + tests/*.py: E402 + scripts/*.py: E402 diff --git a/docker/validate-android-args.sh b/docker/validate-android-args.sh new file mode 100755 index 000000000..c573811c4 --- /dev/null +++ b/docker/validate-android-args.sh @@ -0,0 +1,213 @@ +#!/usr/bin/env bash +# Ensure the NDK, SDK, and Android versions match to exit +# before a build or even worse, a runner later fails. + +set -x +set -euo pipefail + +main() { + local arch="${1}" + + validate_ndk "${arch}" + validate_sdk + validate_system + validate_ndk_sdk "${arch}" + validate_sdk_system +} + +validate_ndk() { + local arch="${1}" + local ndk_version= + ndk_version=$(echo "${ANDROID_NDK}" | tr -dc '0-9') + + case "${arch}" in + mips|mips64) + if [[ "${ndk_version}" -ge 17 ]]; then + echo "Android NDKs r17+ removed support for MIPS architectures." 1>&2 + exit 1 + fi + ;; + *) + ;; + esac +} + +validate_sdk() { + local invalid_sdk_versions=(6 7 10 11 20 25) + # shellcheck disable=SC2076 + if [[ "${invalid_sdk_versions[*]}" =~ "${ANDROID_SDK}" ]]; then + echo "The Android SDK version ${ANDROID_SDK} is not provided by Android and therefore not supported." 1>&2 + exit 1 + fi +} + +validate_system() { + local major_version + major_version=$(echo "${ANDROID_VERSION}" | cut -d '.' -f 1) + if [[ "${major_version}" -lt 5 ]]; then + echo "Invalid Android version ${ANDROID_VERSION}, must be Android 5+." 1>&2 + exit 1 + fi +} + +validate_ndk_sdk() { + local arch="${1}" + local ndk_version= + ndk_version=$(echo "${ANDROID_NDK}" | tr -dc '0-9') + + # no minimum version for most 32-bit architectures + case "${arch}" in + arm|x86) + ;; + mips) + check_min_sdk_arch "${arch}" 9 + ;; + arm64|mips64|x86_64) + check_min_sdk_arch "${arch}" 21 + ;; + *) + echo "Unsupported architecture, got ${arch}." 1>&2 + exit 1 + ;; + esac + + case "${ndk_version}" in + 9) + check_sdk_range 3 19 + ;; + 10) + check_sdk_range 3 21 + ;; + 11) + check_sdk_range 3 24 + ;; + 12|13|14) + check_sdk_range 9 24 + ;; + 15) + check_sdk_range 14 26 + ;; + 16) + check_sdk_range 14 27 + ;; + 17) + check_sdk_range 14 28 + ;; + 18) + check_sdk_range 16 28 + ;; + 19) + check_sdk_range 16 28 + ;; + 20) + check_sdk_range 16 29 + ;; + 21|22) + check_sdk_range 21 30 + ;; + 23) + check_sdk_range 21 31 + ;; + 24) + check_sdk_range 21 32 + ;; + 25) + check_sdk_range 21 33 + ;; + *) + echo "Currently unsupported NDK version of ${ndk_version}." 1>&2 + echo "If you would like support, please file an issue." 1>&2 + exit 1 + ;; + esac +} + +check_min_sdk_arch() { + local arch="${1}" + local minimum="${2}" + if [[ "${ANDROID_SDK}" -lt "${minimum}" ]]; then + echo "Invalid SDK version ${ANDROID_SDK} for architecture ${arch}" 1>&2 + echo "The minimum supported SDK version is ${minimum}." 1>&2 + exit 1 + fi +} + +check_sdk_range() { + local lower="${1}" + local upper="${2}" + if [[ "${ANDROID_SDK}" -lt "${lower}" ]] || [[ "${ANDROID_SDK}" -gt "${upper}" ]]; then + echo "Invalid SDK version ${ANDROID_SDK} for NDK version ${ANDROID_NDK}" 1>&2 + echo "Valid SDK versions are ${lower}-${upper}." 1>&2 + exit 1 + fi +} + +validate_sdk_system() { + local major_version + local minor_version + major_version=$(echo "${ANDROID_VERSION}" | cut -d '.' -f 1) + minor_version=$(echo "${ANDROID_VERSION}" | cut -d '.' -f 2) + local system_version="${major_version}.${minor_version}" + case "${system_version}" in + 5.0) + check_sdk_system_equal 21 + ;; + 5.1) + check_sdk_system_equal 22 + ;; + 6.0) + check_sdk_system_equal 23 + ;; + 7.0) + check_sdk_system_equal 24 + ;; + 7.1) + check_sdk_system_equal 25 + ;; + 8.0) + check_sdk_system_equal 26 + ;; + 8.1) + check_sdk_system_equal 27 + ;; + 9.0) + check_sdk_system_equal 28 + ;; + 10.0) + check_sdk_system_equal 29 + ;; + 11.0) + check_sdk_system_equal 30 + ;; + 12.0) + check_sdk_system_equal 31 32 + ;; + 13.0) + check_sdk_system_equal 33 + ;; + *) + echo "Currently unsupported Android system version of ${system_version}." 1>&2 + echo "If you would like support, please file an issue." 1>&2 + exit 1 + ;; + esac +} + +check_sdk_system_equal() { + local expected=("$@") + local valid=0 + + for version in "${expected[@]}"; do + if [[ "${ANDROID_SDK}" == "${version}" ]]; then + valid=1 + fi + done + + if [[ "${valid}" -ne 1 ]]; then + # shellcheck disable=SC2145 + echo "Invalid SDK version, got ${ANDROID_SDK} and expected ${expected[@]}." 1>&2 + exit 1 + fi +} + +main "${@}" diff --git a/src/rustc.rs b/src/rustc.rs index 86dc99b3b..c4795e7fc 100644 --- a/src/rustc.rs +++ b/src/rustc.rs @@ -265,7 +265,7 @@ impl QualifiedToolchain { Ok(_) | Err(_) if config.custom_toolchain() => { QualifiedToolchain::custom(toolchain, &sysroot, config, msg_info) } - Ok(_) => return Err(eyre::eyre!("toolchain is not fully qualified") + Ok(_) => Err(eyre::eyre!("toolchain is not fully qualified") .with_note(|| "cross expects the toolchain to be a rustup installed toolchain") .with_suggestion(|| { "if you're using a custom toolchain try setting `CROSS_CUSTOM_TOOLCHAIN=1` or install rust via rustup" diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 9b610f2b4..040352e47 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -17,6 +17,7 @@ clap = { version = "4.0", features = ["derive", "env"] } which = { version = "4.2", default_features = false } serde = { version = "1", features = ["derive"] } serde_json = "1.0" +shell-words = "1.1.0" toml = "0.5.9" once_cell = "1.15" semver = "1" diff --git a/xtask/src/hooks.rs b/xtask/src/hooks.rs index 2f5ec07cb..ededbbab5 100644 --- a/xtask/src/hooks.rs +++ b/xtask/src/hooks.rs @@ -1,12 +1,14 @@ use std::fs::File; use std::io::{BufRead, BufReader, ErrorKind}; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::process::Command; -use crate::util::{cargo, get_channel_prefer_nightly}; +use crate::util::{cargo, cargo_metadata, get_channel_prefer_nightly}; +use clap::builder::BoolishValueParser; use clap::Args; use cross::shell::MessageInfo; use cross::CommandExt; +use eyre::Context; const CARGO_FLAGS: &[&str] = &["--all-features", "--all-targets", "--workspace"]; @@ -24,6 +26,12 @@ pub struct Check { /// Run shellcheck on all files, not just staged files. #[clap(short, long)] all: bool, + /// Run Python linter checks. + #[clap(short, long, env = "PYTHON", value_parser = BoolishValueParser::new())] + python: bool, + /// Flake8 command (either an executable or list of arguments) + #[clap(short, long, env = "FLAKE8")] + flake8: Option, } #[derive(Args, Debug)] @@ -37,6 +45,12 @@ pub struct Test { /// Coloring: auto, always, never #[clap(long)] pub color: Option, + /// Run Python test suite. + #[clap(short, long, env = "PYTHON", value_parser = BoolishValueParser::new())] + python: bool, + /// Tox command (either an executable or list of arguments) + #[clap(short, long, env = "TOX")] + tox: Option, } fn cargo_fmt(msg_info: &mut MessageInfo, channel: Option<&str>) -> cross::Result<()> { @@ -122,27 +136,125 @@ fn shellcheck(all: bool, msg_info: &mut MessageInfo) -> cross::Result<()> { Ok(()) } +fn parse_command(value: &str) -> cross::Result> { + shell_words::split(value).wrap_err_with(|| format!("could not parse command of {}", value)) +} + +fn python_dir(metadata: &cross::CargoMetadata) -> PathBuf { + metadata.workspace_root.join("docker").join("android") +} + +fn python_env(cmd: &mut Command, metadata: &cross::CargoMetadata) { + cmd.env("PYTHONDONTWRITEBYTECODE", "1"); + cmd.env( + "PYTHONPYCACHEPREFIX", + metadata.target_directory.join("__pycache__"), + ); +} + +fn python_lint(flake8: Option<&str>, msg_info: &mut MessageInfo) -> cross::Result<()> { + let metadata = cargo_metadata(msg_info)?; + let args = flake8 + .map(parse_command) + .unwrap_or_else(|| Ok(vec!["flake8".to_owned()]))?; + let mut cmd = Command::new( + args.get(0) + .ok_or_else(|| eyre::eyre!("empty string provided for flake8 command"))?, + ); + cmd.args(&args[1..]); + python_env(&mut cmd, &metadata); + if msg_info.is_verbose() { + cmd.arg("--verbose"); + } + cmd.current_dir(python_dir(&metadata)); + cmd.run(msg_info, false)?; + + Ok(()) +} + +fn python_test(tox: Option<&str>, msg_info: &mut MessageInfo) -> cross::Result<()> { + let metadata = cargo_metadata(msg_info)?; + let args = tox + .map(parse_command) + .unwrap_or_else(|| Ok(vec!["tox".to_owned()]))?; + let mut cmd = Command::new( + args.get(0) + .ok_or_else(|| eyre::eyre!("empty string provided for tox command"))?, + ); + cmd.args(&args[1..]); + cmd.args(["-e", "py3"]); + python_env(&mut cmd, &metadata); + cmd.arg("--workdir"); + cmd.arg(&metadata.target_directory); + if msg_info.is_verbose() { + cmd.arg("--verbose"); + } + cmd.current_dir(python_dir(&metadata)); + cmd.run(msg_info, false)?; + + Ok(()) +} + pub fn check( - Check { all, .. }: Check, + Check { + all, + python, + flake8, + .. + }: Check, toolchain: Option<&str>, msg_info: &mut MessageInfo, ) -> cross::Result<()> { - msg_info.info("Running rustfmt, clippy, and shellcheck checks.")?; + let mut checks = vec!["rustfmt", "clippy", "shellcheck"]; + if python { + checks.push("python"); + } + let join_to = checks.len() - 1; + let mut joined = checks[0..join_to].join(", "); + if join_to > 1 { + joined.push(','); + } + msg_info.info(format_args!( + "Running {joined} and {} checks.", + checks[join_to] + ))?; let channel = get_channel_prefer_nightly(msg_info, toolchain)?; cargo_fmt(msg_info, channel)?; cargo_clippy(msg_info, channel)?; shellcheck(all, msg_info)?; + if python { + python_lint(flake8.as_deref(), msg_info)?; + } Ok(()) } -pub fn test(toolchain: Option<&str>, msg_info: &mut MessageInfo) -> cross::Result<()> { - msg_info.info("Running cargo fmt and tests")?; +pub fn test( + Test { python, tox, .. }: Test, + toolchain: Option<&str>, + msg_info: &mut MessageInfo, +) -> cross::Result<()> { + let mut tests = vec!["rustfmt", "unit"]; + if python { + tests.push("python"); + } + let join_to = tests.len() - 1; + let mut joined = tests[0..join_to].join(", "); + if join_to > 1 { + joined.push(','); + } + msg_info.info(format_args!( + "Running {joined} and {} tests.", + tests[join_to] + ))?; let channel = get_channel_prefer_nightly(msg_info, toolchain)?; cargo_fmt(msg_info, channel)?; cargo_test(msg_info, channel)?; + if python { + python_test(tox.as_deref(), msg_info)?; + } Ok(()) } diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 35a306726..41779c61c 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -112,7 +112,7 @@ pub fn main() -> cross::Result<()> { } Commands::Test(args) => { let mut msg_info = get_msg_info!(args, args.verbose)?; - hooks::test(cli.toolchain.as_deref(), &mut msg_info)?; + hooks::test(args, cli.toolchain.as_deref(), &mut msg_info)?; } Commands::CiJob(args) => { let metadata = cargo_metadata(&mut Verbosity::Verbose(2).into())?;