From 5d401ec6240f849701d3d7827dd3d7a7e97ddbc1 Mon Sep 17 00:00:00 2001 From: phlax Date: Tue, 22 Sep 2020 18:58:13 +0100 Subject: [PATCH] Shellcheck: ci/ (2) (#13169) Signed-off-by: Ryan Northey --- ci/build_setup.sh | 48 +++++-- ci/do_ci.sh | 142 ++++++++++--------- ci/do_circle_ci.sh | 16 ++- ci/docker-entrypoint.sh | 2 + ci/docker_ci.sh | 33 +++-- ci/docker_rebuild_google-vrp.sh | 3 +- ci/envoy_build_sha.sh | 4 +- ci/filter_example_setup.sh | 8 +- ci/flaky_test/run_process_xml.sh | 5 +- ci/mac_ci_steps.sh | 20 ++- ci/run_clang_tidy.sh | 26 ++-- ci/run_envoy_docker.sh | 56 ++++++-- ci/run_envoy_docker_windows.sh | 37 +++-- ci/setup_cache.sh | 6 +- ci/upload_gcs_artifact.sh | 2 +- ci/windows_ci_steps.sh | 28 ++-- tools/code_format/check_shellcheck_format.sh | 2 +- 17 files changed, 277 insertions(+), 161 deletions(-) diff --git a/ci/build_setup.sh b/ci/build_setup.sh index ab8705edccce..b7ddcd90cf1f 100755 --- a/ci/build_setup.sh +++ b/ci/build_setup.sh @@ -6,17 +6,25 @@ set -e export PPROF_PATH=/thirdparty_build/bin/pprof -[ -z "${NUM_CPUS}" ] && NUM_CPUS=`grep -c ^processor /proc/cpuinfo` +[ -z "${NUM_CPUS}" ] && NUM_CPUS=$(grep -c ^processor /proc/cpuinfo) [ -z "${ENVOY_SRCDIR}" ] && export ENVOY_SRCDIR=/source [ -z "${ENVOY_BUILD_TARGET}" ] && export ENVOY_BUILD_TARGET=//source/exe:envoy-static [ -z "${ENVOY_BUILD_DEBUG_INFORMATION}" ] && export ENVOY_BUILD_DEBUG_INFORMATION=//source/exe:envoy-static.dwp -[ -z "${ENVOY_BUILD_ARCH}" ] && export ENVOY_BUILD_ARCH=$(uname -m) +[ -z "${ENVOY_BUILD_ARCH}" ] && { + ENVOY_BUILD_ARCH=$(uname -m) + export ENVOY_BUILD_ARCH +} + +read -ra BAZEL_BUILD_EXTRA_OPTIONS <<< "${BAZEL_BUILD_EXTRA_OPTIONS:-}" +read -ra BAZEL_EXTRA_TEST_OPTIONS <<< "${BAZEL_EXTRA_TEST_OPTIONS:-}" +read -ra BAZEL_OPTIONS <<< "${BAZEL_OPTIONS:-}" + echo "ENVOY_SRCDIR=${ENVOY_SRCDIR}" echo "ENVOY_BUILD_TARGET=${ENVOY_BUILD_TARGET}" echo "ENVOY_BUILD_ARCH=${ENVOY_BUILD_ARCH}" function setup_gcc_toolchain() { - if [[ ! -z "${ENVOY_STDLIB}" && "${ENVOY_STDLIB}" != "libstdc++" ]]; then + if [[ -n "${ENVOY_STDLIB}" && "${ENVOY_STDLIB}" != "libstdc++" ]]; then echo "gcc toolchain doesn't support ${ENVOY_STDLIB}." exit 1 fi @@ -26,7 +34,7 @@ function setup_gcc_toolchain() { export BAZEL_COMPILER=gcc echo "$CC/$CXX toolchain configured" else - export BAZEL_BUILD_OPTIONS="--config=remote-gcc ${BAZEL_BUILD_OPTIONS}" + BAZEL_BUILD_OPTIONS=("--config=remote-gcc" "${BAZEL_BUILD_OPTIONS[@]}") fi } @@ -34,15 +42,15 @@ function setup_clang_toolchain() { ENVOY_STDLIB="${ENVOY_STDLIB:-libc++}" if [[ -z "${ENVOY_RBE}" ]]; then if [[ "${ENVOY_STDLIB}" == "libc++" ]]; then - export BAZEL_BUILD_OPTIONS="--config=libc++ ${BAZEL_BUILD_OPTIONS}" + BAZEL_BUILD_OPTIONS=("--config=libc++" "${BAZEL_BUILD_OPTIONS[@]}") else - export BAZEL_BUILD_OPTIONS="--config=clang ${BAZEL_BUILD_OPTIONS}" + BAZEL_BUILD_OPTIONS=("--config=clang" "${BAZEL_BUILD_OPTIONS[@]}") fi else if [[ "${ENVOY_STDLIB}" == "libc++" ]]; then - export BAZEL_BUILD_OPTIONS="--config=remote-clang-libc++ ${BAZEL_BUILD_OPTIONS}" + BAZEL_BUILD_OPTIONS=("--config=remote-clang-libc++" "${BAZEL_BUILD_OPTIONS[@]}") else - export BAZEL_BUILD_OPTIONS="--config=remote-clang ${BAZEL_BUILD_OPTIONS}" + BAZEL_BUILD_OPTIONS=("--config=remote-clang" "${BAZEL_BUILD_OPTIONS[@]}") fi fi echo "clang toolchain with ${ENVOY_STDLIB} configured" @@ -61,7 +69,7 @@ export PATH=/opt/llvm/bin:${PATH} export CLANG_FORMAT="${CLANG_FORMAT:-clang-format}" if [[ -f "/etc/redhat-release" ]]; then - export BAZEL_BUILD_EXTRA_OPTIONS+="--copt=-DENVOY_IGNORE_GLIBCXX_USE_CXX11_ABI_ERROR=1" + BAZEL_BUILD_EXTRA_OPTIONS+=("--copt=-DENVOY_IGNORE_GLIBCXX_USE_CXX11_ABI_ERROR=1") fi function cleanup() { @@ -76,16 +84,25 @@ trap cleanup EXIT export LLVM_ROOT="${LLVM_ROOT:-/opt/llvm}" "$(dirname "$0")"/../bazel/setup_clang.sh "${LLVM_ROOT}" -[[ "${BUILD_REASON}" != "PullRequest" ]] && BAZEL_EXTRA_TEST_OPTIONS+=" --nocache_test_results" +[[ "${BUILD_REASON}" != "PullRequest" ]] && BAZEL_EXTRA_TEST_OPTIONS+=("--nocache_test_results") export BAZEL_QUERY_OPTIONS="${BAZEL_OPTIONS}" # Use https://docs.bazel.build/versions/master/command-line-reference.html#flag--experimental_repository_cache_hardlinks # to save disk space. -export BAZEL_BUILD_OPTIONS=" ${BAZEL_OPTIONS} --verbose_failures --show_task_finish --experimental_generate_json_trace_profile \ - --test_output=errors --repository_cache=${BUILD_DIR}/repository_cache --experimental_repository_cache_hardlinks \ - ${BAZEL_BUILD_EXTRA_OPTIONS} ${BAZEL_EXTRA_TEST_OPTIONS}" - -[[ "${ENVOY_BUILD_ARCH}" == "aarch64" ]] && BAZEL_BUILD_OPTIONS="${BAZEL_BUILD_OPTIONS} --flaky_test_attempts=2 --test_env=HEAPCHECK=" +BAZEL_BUILD_OPTIONS=( + "${BAZEL_OPTIONS[@]}" + "--verbose_failures" + "--show_task_finish" + "--experimental_generate_json_trace_profile" + "--test_output=errors" + "--repository_cache=${BUILD_DIR}/repository_cache" + "--experimental_repository_cache_hardlinks" + "${BAZEL_BUILD_EXTRA_OPTIONS[@]}" + "${BAZEL_EXTRA_TEST_OPTIONS[@]}") + +[[ "${ENVOY_BUILD_ARCH}" == "aarch64" ]] && BAZEL_BUILD_OPTIONS+=( + "--flaky_test_attempts=2" + "--test_env=HEAPCHECK=") [[ "${BAZEL_EXPUNGE}" == "1" ]] && bazel clean --expunge @@ -119,6 +136,7 @@ export BUILDOZER_BIN="${BUILDOZER_BIN:-/usr/local/bin/buildozer}" # source tree is different than the current workspace, the setup step is # skipped. if [[ "$1" != "-nofetch" && "${ENVOY_SRCDIR}" == "$(bazel info workspace)" ]]; then + # shellcheck source=ci/filter_example_setup.sh . "$(dirname "$0")"/filter_example_setup.sh else echo "Skip setting up Envoy Filter Example." diff --git a/ci/do_ci.sh b/ci/do_ci.sh index 2f5f183ea937..0ab1bd0f81f2 100755 --- a/ci/do_ci.sh +++ b/ci/do_ci.sh @@ -14,7 +14,9 @@ fi SRCDIR="${PWD}" NO_BUILD_SETUP="${NO_BUILD_SETUP:-}" if [[ -z "$NO_BUILD_SETUP" ]]; then + # shellcheck source=ci/setup_cache.sh . "$(dirname "$0")"/setup_cache.sh + # shellcheck source=ci/build_setup.sh . "$(dirname "$0")"/build_setup.sh $build_setup_args fi cd "${SRCDIR}" @@ -38,21 +40,21 @@ function collect_build_profile() { } function bazel_with_collection() { + local failed_logs declare -r BAZEL_OUTPUT="${ENVOY_SRCDIR}"/bazel.output.txt - bazel $* | tee "${BAZEL_OUTPUT}" + bazel "$@" | tee "${BAZEL_OUTPUT}" declare BAZEL_STATUS="${PIPESTATUS[0]}" if [ "${BAZEL_STATUS}" != "0" ] then - declare -r FAILED_TEST_LOGS="$(grep " /build.*test.log" "${BAZEL_OUTPUT}" | sed -e 's/ \/build.*\/testlogs\/\(.*\)/\1/')" pushd bazel-testlogs - for f in ${FAILED_TEST_LOGS} - do - cp --parents -f $f "${ENVOY_FAILED_TEST_LOGS}" - done + failed_logs=$(grep " /build.*test.log" "${BAZEL_OUTPUT}" | sed -e 's/ \/build.*\/testlogs\/\(.*\)/\1/') + while read -r f; do + cp --parents -f "$f" "${ENVOY_FAILED_TEST_LOGS}" + done <<< "$failed_logs" popd exit "${BAZEL_STATUS}" fi - collect_build_profile $1 + collect_build_profile "$1" run_process_test_result } @@ -112,9 +114,9 @@ function bazel_binary_build() { ENVOY_BIN=$(echo "${ENVOY_BUILD_TARGET}" | sed -e 's#^@\([^/]*\)/#external/\1#;s#^//##;s#:#/#') # This is a workaround for https://github.com/bazelbuild/bazel/issues/11834 - [[ ! -z "${ENVOY_RBE}" ]] && rm -rf bazel-bin/"${ENVOY_BIN}"* + [[ -n "${ENVOY_RBE}" ]] && rm -rf bazel-bin/"${ENVOY_BIN}"* - bazel build ${BAZEL_BUILD_OPTIONS} -c "${COMPILE_TYPE}" "${ENVOY_BUILD_TARGET}" ${CONFIG_ARGS} + bazel build "${BAZEL_BUILD_OPTIONS[@]}" -c "${COMPILE_TYPE}" "${ENVOY_BUILD_TARGET}" ${CONFIG_ARGS} collect_build_profile "${BINARY_TYPE}"_build # Copy the built envoy binary somewhere that we can access outside of the @@ -124,7 +126,7 @@ function bazel_binary_build() { if [[ "${COMPILE_TYPE}" == "dbg" || "${COMPILE_TYPE}" == "opt" ]]; then # Generate dwp file for debugging since we used split DWARF to reduce binary # size - bazel build ${BAZEL_BUILD_OPTIONS} -c "${COMPILE_TYPE}" "${ENVOY_BUILD_DEBUG_INFORMATION}" ${CONFIG_ARGS} + bazel build "${BAZEL_BUILD_OPTIONS[@]}" -c "${COMPILE_TYPE}" "${ENVOY_BUILD_DEBUG_INFORMATION}" ${CONFIG_ARGS} # Copy the debug information cp_debug_info_for_outside_access envoy fi @@ -142,12 +144,12 @@ CI_TARGET=$1 shift if [[ $# -ge 1 ]]; then - COVERAGE_TEST_TARGETS=$* - TEST_TARGETS="$COVERAGE_TEST_TARGETS" + COVERAGE_TEST_TARGETS=("$@") + TEST_TARGETS=("$@") else # Coverage test will add QUICHE tests by itself. - COVERAGE_TEST_TARGETS=//test/... - TEST_TARGETS="${COVERAGE_TEST_TARGETS} @com_googlesource_quiche//:ci_tests" + COVERAGE_TEST_TARGETS=("//test/...") + TEST_TARGETS=("${COVERAGE_TEST_TARGETS[@]}" "@com_googlesource_quiche//:ci_tests") fi if [[ "$CI_TARGET" == "bazel.release" ]]; then @@ -157,11 +159,11 @@ if [[ "$CI_TARGET" == "bazel.release" ]]; then # toolchain is kept consistent. This ifdef is checked in # test/common/stats/stat_test_utility.cc when computing # Stats::TestUtil::MemoryTest::mode(). - [[ "${ENVOY_BUILD_ARCH}" == "x86_64" ]] && BAZEL_BUILD_OPTIONS="${BAZEL_BUILD_OPTIONS} --test_env=ENVOY_MEMORY_TEST_EXACT=true" + [[ "${ENVOY_BUILD_ARCH}" == "x86_64" ]] && BAZEL_BUILD_OPTIONS+=("--test_env=ENVOY_MEMORY_TEST_EXACT=true") setup_clang_toolchain - echo "Testing ${TEST_TARGETS} with options: ${BAZEL_BUILD_OPTIONS}" - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} -c opt ${TEST_TARGETS} + echo "Testing ${TEST_TARGETS[*]} with options: ${BAZEL_BUILD_OPTIONS[*]}" + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" -c opt "${TEST_TARGETS[@]}" echo "bazel release build with tests..." bazel_binary_build release @@ -178,26 +180,26 @@ elif [[ "$CI_TARGET" == "bazel.sizeopt.server_only" ]]; then exit 0 elif [[ "$CI_TARGET" == "bazel.sizeopt" ]]; then setup_clang_toolchain - echo "Testing ${TEST_TARGETS}" - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} --config=sizeopt ${TEST_TARGETS} + echo "Testing ${TEST_TARGETS[*]}" + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" --config=sizeopt "${TEST_TARGETS[@]}" echo "bazel size optimized build with tests..." bazel_binary_build sizeopt exit 0 elif [[ "$CI_TARGET" == "bazel.gcc" ]]; then - BAZEL_BUILD_OPTIONS="${BAZEL_BUILD_OPTIONS} --test_env=HEAPCHECK=" + BAZEL_BUILD_OPTIONS+=("--test_env=HEAPCHECK=") setup_gcc_toolchain - echo "Testing ${TEST_TARGETS}" - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} -c opt ${TEST_TARGETS} + echo "Testing ${TEST_TARGETS[*]}" + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" -c opt "${TEST_TARGETS[@]}" echo "bazel release build with gcc..." bazel_binary_build release exit 0 elif [[ "$CI_TARGET" == "bazel.debug" ]]; then setup_clang_toolchain - echo "Testing ${TEST_TARGETS}" - bazel test ${BAZEL_BUILD_OPTIONS} -c dbg ${TEST_TARGETS} + echo "Testing ${TEST_TARGETS[*]}" + bazel test "${BAZEL_BUILD_OPTIONS[@]}" -c dbg "${TEST_TARGETS[@]}" echo "bazel debug build with tests..." bazel_binary_build debug @@ -209,14 +211,14 @@ elif [[ "$CI_TARGET" == "bazel.debug.server_only" ]]; then exit 0 elif [[ "$CI_TARGET" == "bazel.asan" ]]; then setup_clang_toolchain - BAZEL_BUILD_OPTIONS="${BAZEL_BUILD_OPTIONS} -c dbg --config=clang-asan --build_tests_only" + BAZEL_BUILD_OPTIONS+=(-c dbg "--config=clang-asan" "--build_tests_only") echo "bazel ASAN/UBSAN debug build with tests" - echo "Building and testing envoy tests ${TEST_TARGETS}" - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} ${TEST_TARGETS} + echo "Building and testing envoy tests ${TEST_TARGETS[*]}" + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" "${TEST_TARGETS[@]}" if [ "${ENVOY_BUILD_FILTER_EXAMPLE}" == "1" ]; then echo "Building and testing envoy-filter-example tests..." pushd "${ENVOY_FILTER_EXAMPLE_SRCDIR}" - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} ${ENVOY_FILTER_EXAMPLE_TESTS} + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" "${ENVOY_FILTER_EXAMPLE_TESTS[@]}" popd fi @@ -225,7 +227,7 @@ elif [[ "$CI_TARGET" == "bazel.asan" ]]; then # works. This requires that we set TAP_PATH. We do this under bazel.asan to # ensure a debug build in CI. echo "Validating integration test traffic tapping..." - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} \ + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" \ --run_under=@envoy//bazel/test:verify_tap_test.sh \ //test/extensions/transport_sockets/tls/integration:ssl_integration_test fi @@ -233,12 +235,12 @@ elif [[ "$CI_TARGET" == "bazel.asan" ]]; then elif [[ "$CI_TARGET" == "bazel.tsan" ]]; then setup_clang_toolchain echo "bazel TSAN debug build with tests" - echo "Building and testing envoy tests ${TEST_TARGETS}" - bazel_with_collection test --config=rbe-toolchain-tsan ${BAZEL_BUILD_OPTIONS} -c dbg --build_tests_only ${TEST_TARGETS} + echo "Building and testing envoy tests ${TEST_TARGETS[*]}" + bazel_with_collection test --config=rbe-toolchain-tsan "${BAZEL_BUILD_OPTIONS[@]}" -c dbg --build_tests_only "${TEST_TARGETS[@]}" if [ "${ENVOY_BUILD_FILTER_EXAMPLE}" == "1" ]; then echo "Building and testing envoy-filter-example tests..." pushd "${ENVOY_FILTER_EXAMPLE_SRCDIR}" - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} -c dbg --config=clang-tsan ${ENVOY_FILTER_EXAMPLE_TESTS} + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" -c dbg --config=clang-tsan "${ENVOY_FILTER_EXAMPLE_TESTS[@]}" popd fi exit 0 @@ -246,10 +248,10 @@ elif [[ "$CI_TARGET" == "bazel.msan" ]]; then ENVOY_STDLIB=libc++ setup_clang_toolchain # rbe-toolchain-msan must comes as first to win library link order. - BAZEL_BUILD_OPTIONS="--config=rbe-toolchain-msan ${BAZEL_BUILD_OPTIONS} -c dbg --build_tests_only" + BAZEL_BUILD_OPTIONS=("--config=rbe-toolchain-msan" "${BAZEL_BUILD_OPTIONS[@]}" "-c dbg" "--build_tests_only") echo "bazel MSAN debug build with tests" - echo "Building and testing envoy tests ${TEST_TARGETS}" - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} ${TEST_TARGETS} + echo "Building and testing envoy tests ${TEST_TARGETS[*]}" + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" "${TEST_TARGETS[@]}" exit 0 elif [[ "$CI_TARGET" == "bazel.dev" ]]; then setup_clang_toolchain @@ -258,8 +260,8 @@ elif [[ "$CI_TARGET" == "bazel.dev" ]]; then echo "Building..." bazel_binary_build fastbuild - echo "Building and testing ${TEST_TARGETS}" - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} -c fastbuild ${TEST_TARGETS} + echo "Building and testing ${TEST_TARGETS[*]}" + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" -c fastbuild "${TEST_TARGETS[@]}" # TODO(foreseeable): consolidate this and the API tool tests in a dedicated target. bazel_with_collection //tools/envoy_headersplit:headersplit_test --spawn_strategy=local bazel_with_collection //tools/envoy_headersplit:replace_includes_test --spawn_strategy=local @@ -268,41 +270,41 @@ elif [[ "$CI_TARGET" == "bazel.compile_time_options" ]]; then # Right now, none of the available compile-time options conflict with each other. If this # changes, this build type may need to be broken up. # TODO(mpwarres): remove quiche=enabled once QUICHE is built by default. - COMPILE_TIME_OPTIONS="\ - --define signal_trace=disabled \ - --define hot_restart=disabled \ - --define google_grpc=disabled \ - --define boringssl=fips \ - --define log_debug_assert_in_release=enabled \ - --define quiche=enabled \ - --define path_normalization_by_default=true \ - --define deprecated_features=disabled \ - --define use_new_codecs_in_integration_tests=true \ - --define zlib=ng \ - " + COMPILE_TIME_OPTIONS=( + "--define" "signal_trace=disabled" + "--define" "hot_restart=disabled" + "--define" "google_grpc=disabled" + "--define" "boringssl=fips" + "--define" "log_debug_assert_in_release=enabled" + "--define" "quiche=enabled" + "--define" "path_normalization_by_default=true" + "--define" "deprecated_features=disabled" + "--define" "use_new_codecs_in_integration_tests=true" + "--define" "zlib=ng") + ENVOY_STDLIB="${ENVOY_STDLIB:-libstdc++}" setup_clang_toolchain # This doesn't go into CI but is available for developer convenience. echo "bazel with different compiletime options build with tests..." - if [[ "${TEST_TARGETS}" == "//test/..." ]]; then + if [[ "${TEST_TARGETS[*]}" == "//test/..." ]]; then cd "${ENVOY_FILTER_EXAMPLE_SRCDIR}" - TEST_TARGETS="@envoy//test/..." + TEST_TARGETS=("@envoy//test/...") fi # Building all the dependencies from scratch to link them against libc++. - echo "Building and testing ${TEST_TARGETS}" - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} ${COMPILE_TIME_OPTIONS} -c dbg ${TEST_TARGETS} --test_tag_filters=-nofips --build_tests_only + echo "Building and testing ${TEST_TARGETS[*]}" + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" "${COMPILE_TIME_OPTIONS[@]}" -c dbg "${TEST_TARGETS[@]}" --test_tag_filters=-nofips --build_tests_only # Legacy codecs "--define legacy_codecs_in_integration_tests=true" should also be tested in # integration tests with asan. - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} ${COMPILE_TIME_OPTIONS} -c dbg @envoy//test/integration/... --config=clang-asan --build_tests_only + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" "${COMPILE_TIME_OPTIONS[@]}" -c dbg @envoy//test/integration/... --config=clang-asan --build_tests_only # "--define log_debug_assert_in_release=enabled" must be tested with a release build, so run only # these tests under "-c opt" to save time in CI. - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} ${COMPILE_TIME_OPTIONS} -c opt @envoy//test/common/common:assert_test @envoy//test/server:server_test + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" "${COMPILE_TIME_OPTIONS[@]}" -c opt @envoy//test/common/common:assert_test @envoy//test/server:server_test echo "Building binary..." - bazel build ${BAZEL_BUILD_OPTIONS} ${COMPILE_TIME_OPTIONS} -c dbg @envoy//source/exe:envoy-static --build_tag_filters=-nofips + bazel build "${BAZEL_BUILD_OPTIONS[@]}" "${COMPILE_TIME_OPTIONS[@]}" -c dbg @envoy//source/exe:envoy-static --build_tag_filters=-nofips collect_build_profile build exit 0 elif [[ "$CI_TARGET" == "bazel.api" ]]; then @@ -310,30 +312,30 @@ elif [[ "$CI_TARGET" == "bazel.api" ]]; then echo "Validating API structure..." ./tools/api/validate_structure.py echo "Building API..." - bazel build ${BAZEL_BUILD_OPTIONS} -c fastbuild @envoy_api_canonical//envoy/... + bazel build "${BAZEL_BUILD_OPTIONS[@]}" -c fastbuild @envoy_api_canonical//envoy/... echo "Testing API..." - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} -c fastbuild @envoy_api_canonical//test/... @envoy_api_canonical//tools/... \ + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" -c fastbuild @envoy_api_canonical//test/... @envoy_api_canonical//tools/... \ @envoy_api_canonical//tools:tap2pcap_test echo "Testing API boosting (unit tests)..." - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} -c fastbuild @envoy_dev//clang_tools/api_booster/... + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" -c fastbuild @envoy_dev//clang_tools/api_booster/... echo "Testing API boosting (golden C++ tests)..." # We use custom BAZEL_BUILD_OPTIONS here; the API booster isn't capable of working with libc++ yet. LLVM_CONFIG="${LLVM_ROOT}"/bin/llvm-config BAZEL_BUILD_OPTIONS="--config=clang" python3.8 ./tools/api_boost/api_boost_test.py exit 0 elif [[ "$CI_TARGET" == "bazel.coverage" || "$CI_TARGET" == "bazel.fuzz_coverage" ]]; then setup_clang_toolchain - echo "${CI_TARGET} build with tests ${COVERAGE_TEST_TARGETS}" + echo "${CI_TARGET} build with tests ${COVERAGE_TEST_TARGETS[*]}" [[ "$CI_TARGET" == "bazel.fuzz_coverage" ]] && export FUZZ_COVERAGE=true - test/run_envoy_bazel_coverage.sh ${COVERAGE_TEST_TARGETS} + BAZEL_BUILD_OPTIONS="${BAZEL_BUILD_OPTIONS[*]}" test/run_envoy_bazel_coverage.sh "${COVERAGE_TEST_TARGETS[@]}" collect_build_profile coverage exit 0 elif [[ "$CI_TARGET" == "bazel.clang_tidy" ]]; then # clang-tidy will warn on standard library issues with libc++ ENVOY_STDLIB="libstdc++" setup_clang_toolchain - NUM_CPUS=$NUM_CPUS ci/run_clang_tidy.sh "$@" + BAZEL_BUILD_OPTIONS="${BAZEL_BUILD_OPTIONS[*]}" NUM_CPUS=$NUM_CPUS ci/run_clang_tidy.sh "$@" exit 0 elif [[ "$CI_TARGET" == "bazel.coverity" ]]; then # Coverity Scan version 2017.07 fails to analyze the entirely of the Envoy @@ -343,7 +345,7 @@ elif [[ "$CI_TARGET" == "bazel.coverity" ]]; then setup_gcc_toolchain echo "bazel Coverity Scan build" echo "Building..." - /build/cov-analysis/bin/cov-build --dir "${ENVOY_BUILD_DIR}"/cov-int bazel build --action_env=LD_PRELOAD ${BAZEL_BUILD_OPTIONS} \ + /build/cov-analysis/bin/cov-build --dir "${ENVOY_BUILD_DIR}"/cov-int bazel build --action_env=LD_PRELOAD "${BAZEL_BUILD_OPTIONS[@]}" \ -c opt "${ENVOY_BUILD_TARGET}" # tar up the coverity results tar czvf "${ENVOY_BUILD_DIR}"/envoy-coverity-output.tgz -C "${ENVOY_BUILD_DIR}" cov-int @@ -354,10 +356,10 @@ elif [[ "$CI_TARGET" == "bazel.coverity" ]]; then exit 0 elif [[ "$CI_TARGET" == "bazel.fuzz" ]]; then setup_clang_toolchain - FUZZ_TEST_TARGETS="$(bazel query "attr('tags','fuzzer',${TEST_TARGETS})")" - echo "bazel ASAN libFuzzer build with fuzz tests ${FUZZ_TEST_TARGETS}" + FUZZ_TEST_TARGETS=("$(bazel query "attr('tags','fuzzer',${TEST_TARGETS[*]})")") + echo "bazel ASAN libFuzzer build with fuzz tests ${FUZZ_TEST_TARGETS[*]}" echo "Building envoy fuzzers and executing 100 fuzz iterations..." - bazel_with_collection test ${BAZEL_BUILD_OPTIONS} --config=asan-fuzzer ${FUZZ_TEST_TARGETS} --test_arg="-runs=10" + bazel_with_collection test "${BAZEL_BUILD_OPTIONS[@]}" --config=asan-fuzzer "${FUZZ_TEST_TARGETS[@]}" --test_arg="-runs=10" exit 0 elif [[ "$CI_TARGET" == "fix_format" ]]; then # proto_format.sh needs to build protobuf. @@ -405,8 +407,12 @@ elif [[ "$CI_TARGET" == "docs" ]]; then elif [[ "$CI_TARGET" == "verify_examples" ]]; then echo "verify examples..." docker load < "$ENVOY_DOCKER_BUILD_DIR/docker/envoy-docker-images.tar.xz" - images=($(docker image list --format "{{.Repository}}")) - tags=($(docker image list --format "{{.Tag}}")) + _images=$(docker image list --format "{{.Repository}}") + while read -r line; do images+=("$line"); done \ + <<< "$_images" + _tags=$(docker image list --format "{{.Tag}}") + while read -r line; do tags+=("$line"); done \ + <<< "$_tags" for i in "${!images[@]}"; do if [[ "${images[i]}" =~ "envoy" ]]; then docker tag "${images[$i]}:${tags[$i]}" "${images[$i]}:latest" diff --git a/ci/do_circle_ci.sh b/ci/do_circle_ci.sh index 29469a24b814..3602f6a00239 100755 --- a/ci/do_circle_ci.sh +++ b/ci/do_circle_ci.sh @@ -7,7 +7,7 @@ ulimit -s 16384 # bazel uses jgit internally and the default circle-ci .gitconfig says to # convert https://github.com to ssh://git@github.com, which jgit does not support. -if [[ -e "~/.gitconfig" ]]; then +if [[ -e "${HOME}/.gitconfig" ]]; then mv ~/.gitconfig ~/.gitconfig_save fi @@ -21,7 +21,8 @@ export HOME="${FAKE_HOME}" export PYTHONUSERBASE="${FAKE_HOME}" export USER=bazel -export ENVOY_SRCDIR="$(pwd)" +ENVOY_SRCDIR="$(pwd)" +export ENVOY_SRCDIR # xlarge resource_class. # See note: https://circleci.com/docs/2.0/configuration-reference/#resource_class for why we @@ -30,8 +31,13 @@ export NUM_CPUS=6 # CircleCI doesn't support IPv6 by default, so we run all tests with IPv4 only. # IPv6 tests are run with Azure Pipelines. -export BAZEL_BUILD_EXTRA_OPTIONS+="--test_env=ENVOY_IP_TEST_VERSIONS=v4only --local_cpu_resources=${NUM_CPUS} \ - --action_env=HOME --action_env=PYTHONUSERBASE --test_env=HOME --test_env=PYTHONUSERBASE" +export BAZEL_BUILD_EXTRA_OPTIONS+=" \ + --test_env=ENVOY_IP_TEST_VERSIONS=v4only \ + --local_cpu_resources=${NUM_CPUS} \ + --action_env=HOME \ + --action_env=PYTHONUSERBASE \ + --test_env=HOME \ + --test_env=PYTHONUSERBASE" function finish { echo "disk space at end of build:" @@ -42,4 +48,4 @@ trap finish EXIT echo "disk space at beginning of build:" df -h -ci/do_ci.sh $* +ci/do_ci.sh "$@" diff --git a/ci/docker-entrypoint.sh b/ci/docker-entrypoint.sh index 677e617e9fce..4815acb1956a 100755 --- a/ci/docker-entrypoint.sh +++ b/ci/docker-entrypoint.sh @@ -1,6 +1,8 @@ #!/usr/bin/env sh set -e +loglevel="${loglevel:-}" + # if the first argument look like a parameter (i.e. start with '-'), run Envoy if [ "${1#-}" != "$1" ]; then set -- envoy "$@" diff --git a/ci/docker_ci.sh b/ci/docker_ci.sh index d4bb8e5e20e2..1488ea726f98 100755 --- a/ci/docker_ci.sh +++ b/ci/docker_ci.sh @@ -12,7 +12,7 @@ config_env() { docker run --rm --privileged multiarch/qemu-user-static --reset -p yes # Remove older build instance - docker buildx rm multi-builder | true + docker buildx rm multi-builder || : docker buildx create --use --name multi-builder --platform linux/arm64,linux/amd64 } @@ -31,10 +31,11 @@ build_args() { TYPE=$1 FILE_SUFFIX="${TYPE/-debug/}" - echo "-f ci/Dockerfile-envoy${FILE_SUFFIX}" - [[ "${TYPE}" == *-debug ]] && echo "--build-arg ENVOY_BINARY_SUFFIX=" - if [[ "${TYPE}" == "-google-vrp" ]]; then - echo "--build-arg ENVOY_VRP_BASE_IMAGE=${VRP_BASE_IMAGE}" + printf ' -f ci/Dockerfile-envoy%s' "${FILE_SUFFIX}" + if [[ "${TYPE}" == *-debug ]]; then + printf ' --build-arg ENVOY_BINARY_SUFFIX=' + elif [[ "${TYPE}" == "-google-vrp" ]]; then + printf ' --build-arg ENVOY_VRP_BASE_IMAGE=%s' "${VRP_BASE_IMAGE}" fi } @@ -50,16 +51,18 @@ use_builder() { IMAGES_TO_SAVE=() build_images() { + local _args args=() TYPE=$1 BUILD_TAG=$2 use_builder "${TYPE}" - ARGS="$(build_args ${TYPE})" - PLATFORM="$(build_platforms ${TYPE})" + _args=$(build_args "${TYPE}") + read -ra args <<< "$_args" + PLATFORM="$(build_platforms "${TYPE}")" - docker buildx build --platform "${PLATFORM}" ${ARGS} -t "${BUILD_TAG}" . + docker buildx build --platform "${PLATFORM}" "${args[@]}" -t "${BUILD_TAG}" . - PLATFORM="$(build_platforms ${TYPE} | tr ',' ' ')" + PLATFORM="$(build_platforms "${TYPE}" | tr ',' ' ')" # docker buildx load cannot have multiple platform, load individually for ARCH in ${PLATFORM}; do if [[ "${ARCH}" == "linux/amd64" ]]; then @@ -67,20 +70,22 @@ build_images() { else IMAGE_TAG="${BUILD_TAG}-${ARCH/linux\//}" fi - docker buildx build --platform "${ARCH}" ${ARGS} -t "${IMAGE_TAG}" . --load + docker buildx build --platform "${ARCH}" "${args[@]}" -t "${IMAGE_TAG}" . --load IMAGES_TO_SAVE+=("${IMAGE_TAG}") done } push_images() { + local _args args=() TYPE=$1 BUILD_TAG=$2 use_builder "${TYPE}" - ARGS="$(build_args ${TYPE})" - PLATFORM="$(build_platforms ${TYPE})" + _args=$(build_args "${TYPE}") + read -ra args <<< "$_args" + PLATFORM="$(build_platforms "${TYPE}")" # docker buildx doesn't do push with default builder - docker buildx build --platform "${PLATFORM}" ${ARGS} -t ${BUILD_TAG} . --push || \ + docker buildx build --platform "${PLATFORM}" "${args[@]}" -t "${BUILD_TAG}" . --push || \ docker push "${BUILD_TAG}" } @@ -90,7 +95,7 @@ RELEASE_TAG_REGEX="^refs/tags/v.*" # For master builds and release branch builds use the dev repo. Otherwise we assume it's a tag and # we push to the primary repo. -if [[ "${AZP_BRANCH}" =~ "${RELEASE_TAG_REGEX}" ]]; then +if [[ "${AZP_BRANCH}" =~ ${RELEASE_TAG_REGEX} ]]; then IMAGE_POSTFIX="" IMAGE_NAME="${AZP_BRANCH/refs\/tags\//}" else diff --git a/ci/docker_rebuild_google-vrp.sh b/ci/docker_rebuild_google-vrp.sh index 3a9bb5f711dc..4f3149e6732f 100755 --- a/ci/docker_rebuild_google-vrp.sh +++ b/ci/docker_rebuild_google-vrp.sh @@ -23,7 +23,8 @@ set -e # this local dep which is fairly stable. BASE_DOCKER_IMAGE="envoyproxy/envoy-dev:latest" -declare -r BUILD_DIR="$(mktemp -d)" +BUILD_DIR="$(mktemp -d)" +declare -r BUILD_DIR cp ci/Dockerfile-envoy-google-vrp "${BUILD_DIR}" declare -r DOCKER_BUILD_FILE="${BUILD_DIR}"/Dockerfile-envoy-google-vrp diff --git a/ci/envoy_build_sha.sh b/ci/envoy_build_sha.sh index 6ea4600faeef..e2923189e35e 100644 --- a/ci/envoy_build_sha.sh +++ b/ci/envoy_build_sha.sh @@ -1,2 +1,4 @@ -ENVOY_BUILD_SHA=$(grep envoyproxy/envoy-build-ubuntu $(dirname $0)/../.bazelrc | sed -e 's#.*envoyproxy/envoy-build-ubuntu:\(.*\)#\1#' | uniq) +#!/bin/bash + +ENVOY_BUILD_SHA=$(grep envoyproxy/envoy-build-ubuntu "$(dirname "$0")"/../.bazelrc | sed -e 's#.*envoyproxy/envoy-build-ubuntu:\(.*\)#\1#' | uniq) [[ $(wc -l <<< "${ENVOY_BUILD_SHA}" | awk '{$1=$1};1') == 1 ]] || (echo ".bazelrc envoyproxy/envoy-build-ubuntu hashes are inconsistent!" && exit 1) diff --git a/ci/filter_example_setup.sh b/ci/filter_example_setup.sh index 4101c63445ee..0fd954bf2319 100644 --- a/ci/filter_example_setup.sh +++ b/ci/filter_example_setup.sh @@ -8,7 +8,11 @@ set -e ENVOY_FILTER_EXAMPLE_GITSHA="493e2e5bee10bbed1c3c097e09d83d7f672a9f2e" ENVOY_FILTER_EXAMPLE_SRCDIR="${BUILD_DIR}/envoy-filter-example" -export ENVOY_FILTER_EXAMPLE_TESTS="//:echo2_integration_test //http-filter-example:http_filter_integration_test //:envoy_binary_test" +# shellcheck disable=SC2034 +ENVOY_FILTER_EXAMPLE_TESTS=( + "//:echo2_integration_test" + "//http-filter-example:http_filter_integration_test" + "//:envoy_binary_test") if [[ ! -d "${ENVOY_FILTER_EXAMPLE_SRCDIR}/.git" ]]; then rm -rf "${ENVOY_FILTER_EXAMPLE_SRCDIR}" @@ -23,4 +27,4 @@ ln -sf "${ENVOY_SRCDIR}"/bazel/get_workspace_status "${ENVOY_FILTER_EXAMPLE_SRCD cp -f "${ENVOY_SRCDIR}"/.bazelrc "${ENVOY_FILTER_EXAMPLE_SRCDIR}"/ cp -f "$(bazel info workspace)"/*.bazelrc "${ENVOY_FILTER_EXAMPLE_SRCDIR}"/ -FILTER_WORKSPACE_SET=1 +export FILTER_WORKSPACE_SET=1 diff --git a/ci/flaky_test/run_process_xml.sh b/ci/flaky_test/run_process_xml.sh index a5c5043c92d4..85cab7c4711d 100755 --- a/ci/flaky_test/run_process_xml.sh +++ b/ci/flaky_test/run_process_xml.sh @@ -1,10 +1,11 @@ #!/bin/bash +# shellcheck source=tools/shell_utils.sh . "${ENVOY_SRCDIR}"/tools/shell_utils.sh if [[ "${ENVOY_BUILD_ARCH}" == "aarch64" ]]; then export MULTIDICT_NO_EXTENSIONS=1 - export YARL_NO_EXTENSIONS=1 + export YARL_NO_EXTENSIONS=1 fi -python_venv process_xml $1 +python_venv process_xml "$1" diff --git a/ci/mac_ci_steps.sh b/ci/mac_ci_steps.sh index 41e01d0fd134..41d21c0d6f57 100755 --- a/ci/mac_ci_steps.sh +++ b/ci/mac_ci_steps.sh @@ -11,13 +11,23 @@ trap finish EXIT echo "disk space at beginning of build:" df -h +# shellcheck source=ci/setup_cache.sh . "$(dirname "$0")"/setup_cache.sh +read -ra BAZEL_BUILD_EXTRA_OPTIONS <<< "${BAZEL_BUILD_EXTRA_OPTIONS:-}" +read -ra BAZEL_EXTRA_TEST_OPTIONS <<< "${BAZEL_EXTRA_TEST_OPTIONS:-}" + # TODO(zuercher): remove --flaky_test_attempts when https://github.com/envoyproxy/envoy/issues/2428 # is resolved. -BAZEL_BUILD_OPTIONS="--curses=no --show_task_finish --verbose_failures \ - --action_env=PATH=/usr/local/bin:/opt/local/bin:/usr/bin:/bin --test_output=all \ - --flaky_test_attempts=integration@2 ${BAZEL_BUILD_EXTRA_OPTIONS} ${BAZEL_EXTRA_TEST_OPTIONS}" +BAZEL_BUILD_OPTIONS=( + "--curses=no" + --show_task_finish + --verbose_failures + "--action_env=PATH=/usr/local/bin:/opt/local/bin:/usr/bin:/bin" + "--test_output=all" + "--flaky_test_attempts=integration@2" + "${BAZEL_BUILD_EXTRA_OPTIONS[@]}" + "${BAZEL_EXTRA_TEST_OPTIONS[@]}") # Build envoy and run tests as separate steps so that failure output # is somewhat more deterministic (rather than interleaving the build @@ -30,6 +40,6 @@ else fi if [[ "$TEST_TARGETS" == "//test/..." || "$TEST_TARGETS" == "//test/integration/..." ]]; then - bazel build ${BAZEL_BUILD_OPTIONS} //source/exe:envoy-static + bazel build "${BAZEL_BUILD_OPTIONS[@]}" //source/exe:envoy-static fi -bazel test ${BAZEL_BUILD_OPTIONS} ${TEST_TARGETS} +bazel test "${BAZEL_BUILD_OPTIONS[@]}" ${TEST_TARGETS} diff --git a/ci/run_clang_tidy.sh b/ci/run_clang_tidy.sh index d594553b6cb2..1146cd15ff3d 100755 --- a/ci/run_clang_tidy.sh +++ b/ci/run_clang_tidy.sh @@ -61,22 +61,22 @@ function filter_excludes() { function run_clang_tidy() { python3 "${LLVM_PREFIX}/share/clang/run-clang-tidy.py" \ - -clang-tidy-binary=${CLANG_TIDY} \ - -clang-apply-replacements-binary=${CLANG_APPLY_REPLACEMENTS} \ - -export-fixes=${FIX_YAML} -j ${NUM_CPUS:-0} -p ${SRCDIR} -quiet \ - ${APPLY_CLANG_TIDY_FIXES:+-fix} $@ + -clang-tidy-binary="${CLANG_TIDY}" \ + -clang-apply-replacements-binary="${CLANG_APPLY_REPLACEMENTS}" \ + -export-fixes=${FIX_YAML} -j "${NUM_CPUS:-0}" -p "${SRCDIR}" -quiet \ + ${APPLY_CLANG_TIDY_FIXES:+-fix} "$@" } function run_clang_tidy_diff() { - git diff $1 | filter_excludes | \ + git diff "$1" | filter_excludes | \ python3 "${LLVM_PREFIX}/share/clang/clang-tidy-diff.py" \ - -clang-tidy-binary=${CLANG_TIDY} \ - -export-fixes=${FIX_YAML} -j ${NUM_CPUS:-0} -p 1 -quiet + -clang-tidy-binary="${CLANG_TIDY}" \ + -export-fixes="${FIX_YAML}" -j "${NUM_CPUS:-0}" -p 1 -quiet } if [[ $# -gt 0 ]]; then - echo "Running clang-tidy on: $@" - run_clang_tidy $@ + echo "Running clang-tidy on: $*" + run_clang_tidy "$@" elif [[ "${RUN_FULL_CLANG_TIDY}" == 1 ]]; then echo "Running a full clang-tidy" run_clang_tidy @@ -87,15 +87,15 @@ else elif [[ "${BUILD_REASON}" == *CI ]]; then DIFF_REF="HEAD^" else - DIFF_REF=$(${ENVOY_SRCDIR}/tools/git/last_github_commit.sh) + DIFF_REF=$("${ENVOY_SRCDIR}"/tools/git/last_github_commit.sh) fi fi - echo "Running clang-tidy-diff against ${DIFF_REF} ($(git rev-parse ${DIFF_REF})), current HEAD ($(git rev-parse HEAD))" - run_clang_tidy_diff ${DIFF_REF} + echo "Running clang-tidy-diff against ${DIFF_REF} ($(git rev-parse "${DIFF_REF}")), current HEAD ($(git rev-parse HEAD))" + run_clang_tidy_diff "${DIFF_REF}" fi if [[ -s "${FIX_YAML}" ]]; then echo "clang-tidy check failed, potentially fixed by clang-apply-replacements:" - cat ${FIX_YAML} + cat "${FIX_YAML}" exit 1 fi diff --git a/ci/run_envoy_docker.sh b/ci/run_envoy_docker.sh index 5bafffb89522..30372f67a17a 100755 --- a/ci/run_envoy_docker.sh +++ b/ci/run_envoy_docker.sh @@ -2,7 +2,15 @@ set -e -. $(dirname $0)/envoy_build_sha.sh +# shellcheck source=ci/envoy_build_sha.sh +. "$(dirname "$0")"/envoy_build_sha.sh + +read -ra ENVOY_DOCKER_OPTIONS <<< "${ENVOY_DOCKER_OPTIONS:-}" + +# TODO(phlax): uppercase these env vars +export HTTP_PROXY="${http_proxy:-}" +export HTTPS_PROXY="${https_proxy:-}" +export NO_PROXY="${no_proxy:-}" # We run as root and later drop permissions. This is required to setup the USER # in useradd below, which is need for correct Python execution in the Docker @@ -16,21 +24,47 @@ USER_GROUP=root [[ -z "${IMAGE_ID}" ]] && IMAGE_ID="${ENVOY_BUILD_SHA}" [[ -z "${ENVOY_DOCKER_BUILD_DIR}" ]] && ENVOY_DOCKER_BUILD_DIR=/tmp/envoy-docker-build -[[ -t 1 ]] && ENVOY_DOCKER_OPTIONS+=" -it" -[[ -f .git ]] && [[ ! -d .git ]] && ENVOY_DOCKER_OPTIONS+=" -v $(git rev-parse --git-common-dir):$(git rev-parse --git-common-dir)" +[[ -t 1 ]] && ENVOY_DOCKER_OPTIONS+=("-it") +[[ -f .git ]] && [[ ! -d .git ]] && ENVOY_DOCKER_OPTIONS+=(-v "$(git rev-parse --git-common-dir):$(git rev-parse --git-common-dir)") export ENVOY_BUILD_IMAGE="${IMAGE_NAME}:${IMAGE_ID}" mkdir -p "${ENVOY_DOCKER_BUILD_DIR}" # Since we specify an explicit hash, docker-run will pull from the remote repo if missing. -docker run --rm ${ENVOY_DOCKER_OPTIONS} -e HTTP_PROXY=${http_proxy} -e HTTPS_PROXY=${https_proxy} -e NO_PROXY=${no_proxy} \ - -u "${USER}":"${USER_GROUP}" -v "${ENVOY_DOCKER_BUILD_DIR}":/build -v /var/run/docker.sock:/var/run/docker.sock \ - -e BAZEL_BUILD_EXTRA_OPTIONS -e BAZEL_EXTRA_TEST_OPTIONS -e BAZEL_REMOTE_CACHE -e ENVOY_STDLIB -e BUILD_REASON \ - -e BAZEL_REMOTE_INSTANCE -e GCP_SERVICE_ACCOUNT_KEY -e NUM_CPUS -e ENVOY_RBE -e FUZZIT_API_KEY -e ENVOY_BUILD_IMAGE \ - -e ENVOY_SRCDIR -e ENVOY_BUILD_TARGET -e SYSTEM_PULLREQUEST_TARGETBRANCH -e SYSTEM_PULLREQUEST_PULLREQUESTNUMBER \ - -e GCS_ARTIFACT_BUCKET -e BUILD_SOURCEBRANCHNAME -e BAZELISK_BASE_URL -e ENVOY_BUILD_ARCH -e SLACK_TOKEN -e BUILD_URI\ - -e REPO_URI -v "$PWD":/source --cap-add SYS_PTRACE --cap-add NET_RAW --cap-add NET_ADMIN "${ENVOY_BUILD_IMAGE}" \ - /bin/bash -lc "\ +docker run --rm \ + "${ENVOY_DOCKER_OPTIONS[@]}" \ + -u "${USER}":"${USER_GROUP}" \ + -v "${ENVOY_DOCKER_BUILD_DIR}":/build \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v "$PWD":/source \ + -e HTTP_PROXY \ + -e HTTPS_PROXY \ + -e NO_PROXY \ + -e BAZEL_BUILD_EXTRA_OPTIONS \ + -e BAZEL_EXTRA_TEST_OPTIONS \ + -e BAZEL_REMOTE_CACHE \ + -e ENVOY_STDLIB \ + -e BUILD_REASON \ + -e BAZEL_REMOTE_INSTANCE \ + -e GCP_SERVICE_ACCOUNT_KEY \ + -e NUM_CPUS \ + -e ENVOY_RBE \ + -e FUZZIT_API_KEY \ + -e ENVOY_BUILD_IMAGE \ + -e ENVOY_SRCDIR \ + -e ENVOY_BUILD_TARGET \ + -e SYSTEM_PULLREQUEST_TARGETBRANCH \ + -e SYSTEM_PULLREQUEST_PULLREQUESTNUMBER \ + -e GCS_ARTIFACT_BUCKET \ + -e BUILD_SOURCEBRANCHNAME \ + -e BAZELISK_BASE_URL \ + -e ENVOY_BUILD_ARCH \ + -e SLACK_TOKEN \ + -e BUILD_URI\ + -e REPO_URI \ + --cap-add SYS_PTRACE --cap-add NET_RAW --cap-add NET_ADMIN \ + "${ENVOY_BUILD_IMAGE}" \ + /bin/bash -lc "\ groupadd --gid $(id -g) -f envoygroup \ && useradd -o --uid $(id -u) --gid $(id -g) --no-create-home --home-dir /build envoybuild \ && usermod -a -G pcap envoybuild \ diff --git a/ci/run_envoy_docker_windows.sh b/ci/run_envoy_docker_windows.sh index a1f4e7372b52..0b5efc66df2a 100644 --- a/ci/run_envoy_docker_windows.sh +++ b/ci/run_envoy_docker_windows.sh @@ -3,7 +3,12 @@ set -e # The image tag for the Windows image is the same as the Linux one so we use the same mechanism to find it -. $(dirname $0)/envoy_build_sha.sh +# shellcheck source=ci/envoy_build_sha.sh +. "$(dirname "$0")/envoy_build_sha.sh" + +export HTTP_PROXY="${http_proxy:-}" +export HTTPS_PROXY="${https_proxy:-}" +DOCKER_OPTIONS=() [[ -z "${IMAGE_NAME}" ]] && IMAGE_NAME="envoyproxy/envoy-build-windows2019" # The IMAGE_ID defaults to the CI hash but can be set to an arbitrary image ID (found with 'docker @@ -12,16 +17,30 @@ set -e ENVOY_SOURCE_DIR=$(echo "${PWD}" | sed -E "s#/([a-zA-Z])/#\1:/#") -[[ -f .git ]] && [[ ! -d .git ]] && GIT_VOLUME_OPTION="-v $(git rev-parse --git-common-dir):$(git rev-parse --git-common-dir)" +[[ -t 1 ]] && DOCKER_OPTIONS+=(-it) -[[ -t 1 ]] && DOCKER_TTY_OPTION=-it +[[ -f .git ]] && [[ ! -d .git ]] && DOCKER_OPTIONS+=(-v "$(git rev-parse --git-common-dir):$(git rev-parse --git-common-dir)") export ENVOY_BUILD_IMAGE="${IMAGE_NAME}:${IMAGE_ID}" # Since we specify an explicit hash, docker-run will pull from the remote repo if missing. -docker run --rm ${DOCKER_TTY_OPTION} -e HTTP_PROXY=${http_proxy} -e HTTPS_PROXY=${https_proxy} \ - ${GIT_VOLUME_OPTION} -e BAZEL_BUILD_EXTRA_OPTIONS -e BAZEL_EXTRA_TEST_OPTIONS -e BAZEL_REMOTE_CACHE \ - -e ENVOY_STDLIB -e BUILD_REASON -e BAZEL_REMOTE_INSTANCE -e GCP_SERVICE_ACCOUNT_KEY -e NUM_CPUS -e ENVOY_RBE \ - -e ENVOY_BUILD_IMAGE -e ENVOY_SRCDIR -e ENVOY_BUILD_TARGET -e SYSTEM_PULLREQUEST_TARGETBRANCH -v ${ENVOY_SOURCE_DIR}:C:/source \ - "${ENVOY_BUILD_IMAGE}" \ - bash -c "cd source && $*" +docker run --rm \ + "${DOCKER_OPTIONS[@]}" \ + -e HTTP_PROXY \ + -e HTTPS_PROXY \ + -e BAZEL_BUILD_EXTRA_OPTIONS \ + -e BAZEL_EXTRA_TEST_OPTIONS \ + -e BAZEL_REMOTE_CACHE \ + -e ENVOY_STDLIB \ + -e BUILD_REASON \ + -e BAZEL_REMOTE_INSTANCE \ + -e GCP_SERVICE_ACCOUNT_KEY \ + -e NUM_CPUS \ + -e ENVOY_RBE \ + -e ENVOY_BUILD_IMAGE \ + -e ENVOY_SRCDIR \ + -e ENVOY_BUILD_TARGET \ + -e SYSTEM_PULLREQUEST_TARGETBRANCH \ + -v "${ENVOY_SOURCE_DIR}":C:/source \ + "${ENVOY_BUILD_IMAGE}" \ + bash -c "cd source && $*" diff --git a/ci/setup_cache.sh b/ci/setup_cache.sh index f615b8b41d5d..0733f679b784 100755 --- a/ci/setup_cache.sh +++ b/ci/setup_cache.sh @@ -2,7 +2,7 @@ set -e -if [[ ! -z "${GCP_SERVICE_ACCOUNT_KEY:0:1}" ]]; then +if [[ -n "${GCP_SERVICE_ACCOUNT_KEY:0:1}" ]]; then # mktemp will create a tempfile with u+rw permission minus umask, it will not be readable by all # users by default. GCP_SERVICE_ACCOUNT_KEY_FILE=$(mktemp -t gcp_service_account.XXXXXX.json) @@ -20,11 +20,11 @@ if [[ ! -z "${GCP_SERVICE_ACCOUNT_KEY:0:1}" ]]; then fi -if [[ ! -z "${BAZEL_REMOTE_CACHE}" ]]; then +if [[ -n "${BAZEL_REMOTE_CACHE}" ]]; then export BAZEL_BUILD_EXTRA_OPTIONS+=" --remote_cache=${BAZEL_REMOTE_CACHE}" echo "Set up bazel remote read/write cache at ${BAZEL_REMOTE_CACHE}." - if [[ ! -z "${BAZEL_REMOTE_INSTANCE}" ]]; then + if [[ -n "${BAZEL_REMOTE_INSTANCE}" ]]; then export BAZEL_BUILD_EXTRA_OPTIONS+=" --remote_instance_name=${BAZEL_REMOTE_INSTANCE}" echo "instance_name: ${BAZEL_REMOTE_INSTANCE}." elif [[ -z "${ENVOY_RBE}" ]]; then diff --git a/ci/upload_gcs_artifact.sh b/ci/upload_gcs_artifact.sh index 7bd5b0201359..755abf3a39d5 100755 --- a/ci/upload_gcs_artifact.sh +++ b/ci/upload_gcs_artifact.sh @@ -22,5 +22,5 @@ BRANCH=${SYSTEM_PULLREQUEST_PULLREQUESTNUMBER:-${BUILD_SOURCEBRANCHNAME}} GCS_LOCATION="${GCS_ARTIFACT_BUCKET}/${BRANCH}/${TARGET_SUFFIX}" echo "Uploading to gs://${GCS_LOCATION} ..." -gsutil -mq rsync -dr ${SOURCE_DIRECTORY} gs://${GCS_LOCATION} +gsutil -mq rsync -dr "${SOURCE_DIRECTORY}" "gs://${GCS_LOCATION}" echo "Artifacts uploaded to: https://storage.googleapis.com/${GCS_LOCATION}/index.html" diff --git a/ci/windows_ci_steps.sh b/ci/windows_ci_steps.sh index 498445d9b949..a7a000bdb6fa 100755 --- a/ci/windows_ci_steps.sh +++ b/ci/windows_ci_steps.sh @@ -11,38 +11,46 @@ trap finish EXIT echo "disk space at beginning of build:" df -h +# shellcheck source=ci/setup_cache.sh . "$(dirname "$0")"/setup_cache.sh +read -ra BAZEL_BUILD_EXTRA_OPTIONS <<< "${BAZEL_BUILD_EXTRA_OPTIONS:---config=msvc-cl}" +read -ra BAZEL_EXTRA_TEST_OPTIONS <<< "${BAZEL_EXTRA_TEST_OPTIONS:-}" + # Set up TMPDIR so bash and non-bash can access # e.g. TMPDIR=/d/tmp, make a link from /d/d to /d so both bash and Windows programs resolve the # same path # This is due to this issue: https://github.com/bazelbuild/rules_foreign_cc/issues/334 # rules_foreign_cc does not currently use bazel output/temp directories by default, it uses mktemp # which respects the value of the TMPDIR environment variable -drive="$(readlink -f $TMPDIR | cut -d '/' -f2)" +drive="$(readlink -f "$TMPDIR" | cut -d '/' -f2)" if [ ! -e "/$drive/$drive" ]; then /c/windows/system32/cmd.exe /c "mklink /d $drive:\\$drive $drive:\\" fi BAZEL_STARTUP_OPTIONS="--output_base=c:/_eb" # Default to msvc-cl if not overridden -BAZEL_BUILD_EXTRA_OPTIONS=${BAZEL_BUILD_EXTRA_OPTIONS:---config=msvc-cl} -BAZEL_BUILD_OPTIONS="-c opt --show_task_finish --verbose_failures \ - --test_output=errors ${BAZEL_BUILD_EXTRA_OPTIONS} ${BAZEL_EXTRA_TEST_OPTIONS}" +BAZEL_BUILD_OPTIONS=( + -c opt + --show_task_finish + --verbose_failures + "--test_output=errors" + "${BAZEL_BUILD_EXTRA_OPTIONS[@]}" + "${BAZEL_EXTRA_TEST_OPTIONS[@]}") # Test to validate updates of all dependency libraries in bazel/external and bazel/foreign_cc # bazel ${BAZEL_STARTUP_OPTIONS} build ${BAZEL_BUILD_OPTIONS} //bazel/... --build_tag_filters=-skip_on_windows # Complete envoy-static build (nothing needs to be skipped, build failure indicates broken dependencies) -bazel ${BAZEL_STARTUP_OPTIONS} build ${BAZEL_BUILD_OPTIONS} //source/exe:envoy-static +bazel "${BAZEL_STARTUP_OPTIONS}" build "${BAZEL_BUILD_OPTIONS[@]}" //source/exe:envoy-static # Test invocations of known-working tests on Windows -bazel ${BAZEL_STARTUP_OPTIONS} test ${BAZEL_BUILD_OPTIONS} //test/... --test_tag_filters=-skip_on_windows,-fails_on_windows,-flaky_on_windows --build_tests_only +bazel "${BAZEL_STARTUP_OPTIONS}" test "${BAZEL_BUILD_OPTIONS[@]}" //test/... --test_tag_filters=-skip_on_windows,-fails_on_windows,-flaky_on_windows --build_tests_only # Build tests that are known-flaky or known-failing to ensure no compilation regressions -bazel ${BAZEL_STARTUP_OPTIONS} build ${BAZEL_BUILD_OPTIONS} //test/... --test_tag_filters=-skip_on_windows,fails_on_windows,flaky_on_windows --build_tests_only +bazel "${BAZEL_STARTUP_OPTIONS}" build "${BAZEL_BUILD_OPTIONS[@]}" //test/... --test_tag_filters=-skip_on_windows,fails_on_windows,flaky_on_windows --build_tests_only # Summarize tests bypasssed to monitor the progress of porting to Windows -echo Tests bypassed as skip_on_windows: `bazel query 'kind(".*test rule", attr("tags", "skip_on_windows", //test/...))' 2>/dev/null | sort | wc -l` known unbuildable or inapplicable tests -echo Tests bypassed as fails_on_windows: `bazel query 'kind(".*test rule", attr("tags", "fails_on_windows", //test/...))' 2>/dev/null | sort | wc -l` known incompatible tests -echo Tests bypassed as flaky_on_windows: `bazel query 'kind(".*test rule", attr("tags", "flaky_on_windows", //test/...))' 2>/dev/null | sort | wc -l` known unstable tests +echo "Tests bypassed as skip_on_windows: $(bazel query 'kind(".*test rule", attr("tags", "skip_on_windows", //test/...))' 2>/dev/null | sort | wc -l) known unbuildable or inapplicable tests" +echo "Tests bypassed as fails_on_windows: $(bazel query 'kind(".*test rule", attr("tags", "fails_on_windows", //test/...))' 2>/dev/null | sort | wc -l) known incompatible tests" +echo "Tests bypassed as flaky_on_windows: $(bazel query 'kind(".*test rule", attr("tags", "flaky_on_windows", //test/...))' 2>/dev/null | sort | wc -l) known unstable tests" diff --git a/tools/code_format/check_shellcheck_format.sh b/tools/code_format/check_shellcheck_format.sh index d4ac10d0508b..8b26e7064ed4 100755 --- a/tools/code_format/check_shellcheck_format.sh +++ b/tools/code_format/check_shellcheck_format.sh @@ -1,6 +1,6 @@ #!/bin/bash -e -EXCLUDED_SHELLFILES=${EXCLUDED_SHELLFILES:-"^examples|^ci|^bin|^source|^bazel|^.github"} +EXCLUDED_SHELLFILES=${EXCLUDED_SHELLFILES:-"^examples|^bin|^source|^bazel|^.github"} find_shell_files () {