From c14bf25b84f9295d685c8bee24a7efba9c99c1de Mon Sep 17 00:00:00 2001 From: Romain Grecourt Date: Wed, 31 Jul 2024 18:53:46 -0700 Subject: [PATCH] ShellCheck - Add a new job to run shellcheck - Fix existing scripts to pass the validation - Remove unused scripts - Inline some scripts in validate.yaml - etc/scripts/docs.sh - etc/scripts/github-build.sh - etc/scripts/github-compile.sh - etc/scripts/mp-tck.sh - etc/scripts/spotbugs.sh - etc/scripts/test-archetypes.sh - Remove etc/scripts/BUILDDOCS.md (invalid) - Update etc/scripts/RELEASE.md - Move etc/scripts/actions/*.sh to etc/scripts --- .github/workflows/assign-issue-to-project.yml | 2 +- .github/workflows/create-backport-issues.yml | 2 +- .github/workflows/validate.yml | 80 ++++- etc/scripts/BUILDDOCS.md | 49 --- etc/scripts/RELEASE.md | 108 +++--- .../{actions => }/assign-issue-to-project.sh | 73 ++-- etc/scripts/build.sh | 35 -- etc/scripts/checkstyle.sh | 49 ++- etc/scripts/copyright.sh | 56 +-- .../{actions => }/create-backport-issues.sh | 72 ++-- etc/scripts/docs.sh | 38 --- etc/scripts/gen-javadoc-packagelist.sh | 31 +- etc/scripts/github-build.sh | 32 -- etc/scripts/github-compile.sh | 31 -- etc/scripts/github-release.sh | 23 +- etc/scripts/includes/docker-env.sh | 83 ----- etc/scripts/includes/error_handlers.sh | 55 --- etc/scripts/includes/mysql.sh | 38 --- etc/scripts/includes/pgsql.sh | 36 -- etc/scripts/includes/pipeline-env.sh | 157 --------- etc/scripts/mp-tck.sh | 37 -- etc/scripts/owasp-dependency-check.sh | 45 ++- etc/scripts/release.sh | 323 +++++++++--------- etc/scripts/shellcheck.sh | 53 +++ etc/scripts/smoketest.sh | 141 ++++---- etc/scripts/spotbugs.sh | 31 -- etc/scripts/test-archetypes.sh | 37 -- etc/scripts/test-integ-dbclient.sh | 116 ------- etc/scripts/test-integ-example.sh | 98 ------ etc/scripts/test-integ-mysql.sh | 48 --- etc/scripts/test-integ-pgsql.sh | 48 --- etc/scripts/test-integ-vault.sh | 40 --- etc/scripts/test-nightly.sh | 32 +- etc/scripts/test-packaging-jar.sh | 40 ++- etc/scripts/test-packaging-jlink.sh | 57 ++-- etc/scripts/test-packaging-native.sh | 50 ++- etc/scripts/test-quickstarts.sh | 32 +- .../messaging/docker/kafka/init_topics.sh | 18 +- .../messaging/docker/kafka/start_kafka.sh | 4 +- .../docker/oracle-aq-18-xe/buildAndRun.sh | 4 +- .../createAndStartEmptyDomain.sh | 59 ++-- .../etc/unsupported-cert-tools/create-keys.sh | 35 +- .../etc/unsupported-cert-tools/rotate-keys.sh | 18 +- .../etc/unsupported-cert-tools/utils.sh | 80 ++--- .../mutual-tls/automatic-store-generator.sh | 93 ++--- tests/integration/dbclient/test.sh | 138 -------- .../store/automatic-store-generator.sh | 138 ++++---- 47 files changed, 1013 insertions(+), 1852 deletions(-) delete mode 100644 etc/scripts/BUILDDOCS.md rename etc/scripts/{actions => }/assign-issue-to-project.sh (51%) delete mode 100755 etc/scripts/build.sh rename etc/scripts/{actions => }/create-backport-issues.sh (67%) delete mode 100755 etc/scripts/docs.sh delete mode 100755 etc/scripts/github-build.sh delete mode 100755 etc/scripts/github-compile.sh delete mode 100644 etc/scripts/includes/docker-env.sh delete mode 100644 etc/scripts/includes/error_handlers.sh delete mode 100644 etc/scripts/includes/mysql.sh delete mode 100644 etc/scripts/includes/pgsql.sh delete mode 100644 etc/scripts/includes/pipeline-env.sh delete mode 100755 etc/scripts/mp-tck.sh create mode 100755 etc/scripts/shellcheck.sh delete mode 100755 etc/scripts/spotbugs.sh delete mode 100755 etc/scripts/test-archetypes.sh delete mode 100755 etc/scripts/test-integ-dbclient.sh delete mode 100755 etc/scripts/test-integ-example.sh delete mode 100755 etc/scripts/test-integ-mysql.sh delete mode 100755 etc/scripts/test-integ-pgsql.sh delete mode 100644 etc/scripts/test-integ-vault.sh delete mode 100755 tests/integration/dbclient/test.sh diff --git a/.github/workflows/assign-issue-to-project.yml b/.github/workflows/assign-issue-to-project.yml index d639c270277..c728e810958 100644 --- a/.github/workflows/assign-issue-to-project.yml +++ b/.github/workflows/assign-issue-to-project.yml @@ -13,4 +13,4 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@v4 - - run: etc/scripts/actions/assign-issue-to-project.sh $GITHUB_REPOSITORY ${{ github.event.issue.number }} Backlog Triage + - run: etc/scripts/assign-issue-to-project.sh $GITHUB_REPOSITORY ${{ github.event.issue.number }} Backlog Triage diff --git a/.github/workflows/create-backport-issues.yml b/.github/workflows/create-backport-issues.yml index a42abfdbe49..ec0fb5d79bf 100644 --- a/.github/workflows/create-backport-issues.yml +++ b/.github/workflows/create-backport-issues.yml @@ -38,4 +38,4 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@v4 - - run: etc/scripts/actions/create-backport-issues.sh $GITHUB_REPOSITORY ${{ github.event.inputs.issue }} ${{ github.event.inputs.version }} ${{ github.event.inputs.target-2 }} ${{ github.event.inputs.target-3 }} ${{ github.event.inputs.target-4 }} + - run: etc/scripts/create-backport-issues.sh $GITHUB_REPOSITORY ${{ github.event.inputs.issue }} ${{ github.event.inputs.version }} ${{ github.event.inputs.target-2 }} ${{ github.event.inputs.target-3 }} ${{ github.event.inputs.target-4 }} diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 6f895265ee3..8a9767e19c3 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -6,13 +6,17 @@ name: "Validate" on: pull_request: + push: + branches-ignore: [ 'main', 'helidon-*.x' ] workflow_call: env: JAVA_VERSION: '21' JAVA_DISTRO: 'oracle' HELIDON_PIPELINES: 'true' - MAVEN_HTTP_ARGS: '-Dmaven.wagon.httpconnectionManager.ttlSeconds=60 -Dmaven.wagon.http.retryHandler.count=3' + MAVEN_ARGS: | + -Dmaven.wagon.httpconnectionManager.ttlSeconds=60 + -Dmaven.wagon.http.retryHandler.count=3 concurrency: group: Validate-${{ github.ref }} @@ -47,6 +51,13 @@ jobs: cache: maven - name: Checkstyle run: etc/scripts/checkstyle.sh + shellcheck: + timeout-minutes: 5 + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v4 + - name: ShellCheck + run: etc/scripts/shellcheck.sh spotbugs: timeout-minutes: 45 runs-on: ubuntu-20.04 @@ -59,7 +70,12 @@ jobs: java-version: ${{ env.JAVA_VERSION }} cache: maven - name: Spotbugs - run: etc/scripts/spotbugs.sh + run: | + mvn ${MAVEN_ARGS} -e \ + -DskipTests \ + -Dmaven.test.skip=true \ + -Pspotbugs,pipeline \ + install docs: timeout-minutes: 30 runs-on: ubuntu-20.04 @@ -72,7 +88,16 @@ jobs: java-version: ${{ env.JAVA_VERSION }} cache: maven - name: Docs - run: etc/scripts/docs.sh + run: | + mvn ${MAVEN_ARGS} -e \ + -Dmaven.test.skip=true \ + -DskipTests \ + -Ppipeline \ + install + mvn ${MAVEN_ARGS} -e \ + -f docs/pom.xml \ + -Pjavadoc \ + install build: timeout-minutes: 60 strategy: @@ -88,7 +113,11 @@ jobs: java-version: ${{ env.JAVA_VERSION }} cache: maven - name: Maven build - run: etc/scripts/github-build.sh + run: | + mvn ${MAVEN_ARGS} -e \ + -Dmaven.test.failure.ignore=false \ + -Pjavadoc,sources,tests,pipeline \ + install examples: timeout-minutes: 40 strategy: @@ -112,9 +141,15 @@ jobs: cache: maven - name: Maven build run: | - mvn -B -e "-Dmaven.test.skip=true" $MAVEN_HTTP_ARGS -DskipTests -Ppipeline install - cd examples - mvn -B verify + # prime build + mvn ${MAVEN_ARGS} -e \ + -Dmaven.test.skip=true \ + -DskipTests \ + -Ppipeline \ + install + mvn ${MAVEN_ARGS} -e \ + -f examples/pom.xml \ + verify - name: Test quickstarts native image run: etc/scripts/test-quickstarts.sh mp-tck: @@ -133,7 +168,17 @@ jobs: java-version: ${{ env.JAVA_VERSION }} cache: maven - name: Maven build - run: etc/scripts/mp-tck.sh + run: | + # prime build + mvn ${MAVEN_ARGS} -e \ + -Dmaven.test.skip=true \ + -DskipTests \ + -Ppipeline \ + install + mvn ${MAVEN_ARGS} -e \ + -f microprofile/tests/tck/pom.xml \ + -Ptck-ft \ + verify archetypes: timeout-minutes: 45 strategy: @@ -149,7 +194,16 @@ jobs: java-version: ${{ env.JAVA_VERSION }} cache: maven - name: Test archetypes - run: etc/scripts/test-archetypes.sh + run: | + # prime build + mvn ${MAVEN_ARGS} -e \ + -Dmaven.test.skip=true \ + -DskipTests \ + -Ppipeline \ + install + mvn ${MAVEN_ARGS} -e \ + -f archetypes/pom.xml \ + install packaging: timeout-minutes: 60 strategy: @@ -166,7 +220,13 @@ jobs: native-image-job-reports: true cache: maven - name: Build Helidon - run: etc/scripts/github-compile.sh + run: | + # prime build + mvn ${MAVEN_ARGS} -e \ + -Dmaven.test.skip=true \ + -DskipTests \ + -Ppipeline \ + install - name: JAR packaging run: etc/scripts/test-packaging-jar.sh - name: JLink packaging diff --git a/etc/scripts/BUILDDOCS.md b/etc/scripts/BUILDDOCS.md deleted file mode 100644 index 72df8548d15..00000000000 --- a/etc/scripts/BUILDDOCS.md +++ /dev/null @@ -1,49 +0,0 @@ - -# Building Docs - -If you want to do a local build of the documentation and javadocs -so you can preview them, here is what you do. - -## Build - -If you're behind a proxy, you'll want to set `JAVA_TOOL_OPTIONS` to -pass proxy system properties to javadoc: - -``` -export JAVA_TOOL_OPTIONS="-DproxyHost=yourproxy.com -DproxyPort=80 -DnonProxyHosts=localhost|127.0.0.1" -``` - -First do a priming build to ensure your local Maven repo cache is populated with -Helidon artifacts. These are needed to build the aggregated javadocs. - -``` -mvn clean install -DskipTests -``` - -Next build the docs (including aggregated javadocs): - -``` -mvn site -``` - -Without javadocs: - -``` -mvn site -Dmaven.javadoc.skip -``` - -## View the docs - -The built docs will be in the top level target directory: - -``` -cd target/site -python -m SimpleHTTPServer 8000 -``` - -View them in your browser: - -``` -http://localhost:8000 -``` - diff --git a/etc/scripts/RELEASE.md b/etc/scripts/RELEASE.md index d44c1066695..61e3fbd999b 100644 --- a/etc/scripts/RELEASE.md +++ b/etc/scripts/RELEASE.md @@ -1,12 +1,11 @@ - # Releasing Helidon These are the steps for doing a release of Helidon. These steps -will use release 0.7.0 in examples. Of course you are not releasing +will use release 0.7.0 in examples. Of course, you are not releasing 0.7.0, so make sure to change that release number to your release number when copy/pasting. -# Overview +## Overview The Helidon release pipeline is triggered when a change is pushed to a branch that starts with `release-`. The release pipeline performs @@ -21,16 +20,15 @@ is the overall flow: 5. Create GitHub release 6. Increment version in master and update changelog -# Steps in detail +## Steps in detail -``` +```shell # Set this to the version you are releasing export VERSION="0.7.0" ``` - 1. Create local release branch - ``` + ```shell git clone git@github.com:oracle/helidon.git git checkout -b release-${VERSION} ``` @@ -66,7 +64,7 @@ export VERSION="0.7.0" 1. Do quick smoke test by trying an archetype that is in the staging repo (see staging repository profile at end of this document) - ``` + ```shell mvn -U archetype:generate -DinteractiveMode=false \ -DarchetypeGroupId=io.helidon.archetypes \ -DarchetypeArtifactId=helidon-quickstart-se \ @@ -81,87 +79,85 @@ export VERSION="0.7.0" mvn package -Possrh-staging ``` - 2. Do full smoke test using test script (this requires staging profile to - be configured): - ``` + 2. Do full smoke test using test script (this requires staging profile to be configured): + ```shell smoketest.sh --giturl=https://github.com/oracle/helidon.git --version=${VERSION} --clean --staged full ``` 3. The smoketest script will leave its work in `/var/tmp/helidon-smoke.XXXX`. - Go there, into the quickstarts and test the native builds and Docker - builds (for Docker builds you'll need to update the pom to include - the staging repositories. + Go there, into the quickstarts and test the native builds and Docker builds. + For Docker builds you'll need to update the pom to include the staging repositories. -6. Release repository: Select repository then click Release (up at the top) - 1. In description you can put something like "Helidon 0.7.0 Release" +7. Release repository: Select repository then click Release (up at the top) + 1. In the description you can put something like "Helidon 0.7.0 Release" 2. It might take a while (possibly hours) before the release appears in Maven Central 3. To check on progress look at https://repo1.maven.org/maven2/io/helidon/helidon-bom/ -6. Create GitHub release +8. Create GitHub release 1. Create a fragment of the change log that you want used for the release description on the GitHub Releases page. Assume it is in `/tmp/change-frag.md` 2. Set your API key (you generate this on your GitHub Settings): - ``` + ```shell export GITHUB_API_KEY= ``` 3. Run script to create release in GitHub: - ``` + ```shell etc/scripts/github-release.sh --changelog=/tmp/change-frag.md --version=${VERSION} ``` 4. Go to https://github.com/oracle/helidon/releases and verify release looks like you expect. You can edit it if you need to. -7. Update version and CHANGELOG in master +9. Update version and CHANGELOG in master 1. Create post release branch: `git checkout -b post-release-${VERSION}` 2. Copy CHANGELOG from your release branch. Add empty Unrelease section. 3. Update SNAPSHOT version number. Remember to use your version number! - ``` + ```shell etc/scripts/release.sh --version=0.7.1-SNAPSHOT update_version ``` If you perfromed a Milestone release you will likely leave the SNAPSHOT version in master alone. 4. Add and commit changes then push - ``` + ```shell git push origin post-release-${VERSION} ``` 5. Create PR and merge into master -8. Now go to helidon-site and look at the RELEASE.md there to release the website with updated docs +10. Now go to helidon-site and look at the RELEASE.md there to release the website with updated docs # Staging Repository Profile To pull artifacts from the sonatype staging repository add this profile to your `settings.xml`: -``` - - ossrh-staging - - false - - - - ossrh-staging - OSS Sonatype Staging - https://oss.sonatype.org/content/groups/staging/ - - false - - - true - - - - - - ossrh-staging - OSS Sonatype Staging - https://oss.sonatype.org/content/groups/staging/ - - false - - - true - - - - +```xml + + ossrh-staging + + false + + + + ossrh-staging + OSS Sonatype Staging + https://oss.sonatype.org/content/groups/staging/ + + false + + + true + + + + + + ossrh-staging + OSS Sonatype Staging + https://oss.sonatype.org/content/groups/staging/ + + false + + + true + + + + ``` diff --git a/etc/scripts/actions/assign-issue-to-project.sh b/etc/scripts/assign-issue-to-project.sh similarity index 51% rename from etc/scripts/actions/assign-issue-to-project.sh rename to etc/scripts/assign-issue-to-project.sh index f34d2be2fc6..e8c30950eca 100755 --- a/etc/scripts/actions/assign-issue-to-project.sh +++ b/etc/scripts/assign-issue-to-project.sh @@ -1,6 +1,6 @@ #!/bin/bash -l # -# Copyright (c) 2021, 2022 Oracle and/or its affiliates. +# Copyright (c) 2021, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,29 +28,28 @@ set -e # Immediately exit if any command has a non-zero exit status set -u # Immediately exit if an uninitialized variable is referenced -readonly REPOSITORY_FULL_NAME="$1" -readonly ISSUE_NUMBER="$2" -readonly PROJECT_NAME="$3" -readonly COLUMN_NAME="$4" +readonly REPOSITORY_FULL_NAME="${1}" +readonly ISSUE_NUMBER="${2}" +readonly PROJECT_NAME="${3}" +readonly COLUMN_NAME="${4}" -if [ -z "${REPOSITORY_FULL_NAME}" -o -z "${ISSUE_NUMBER}" -o -z "${PROJECT_NAME}" -o -z "${COLUMN_NAME}" ]; then - echo "usage: $0 " +if [ -z "${REPOSITORY_FULL_NAME}" ] || [ -z "${ISSUE_NUMBER}" ] || [ -z "${PROJECT_NAME}" ] || [ -z "${COLUMN_NAME}" ]; then + echo "usage: ${0} " exit 1 fi #echo "DEBUG: $1 $2 $3 $4" -readonly OWNER_NAME=$(echo ${REPOSITORY_FULL_NAME} | cut -d/ -f1) -readonly REPOSITORY_NAME=$(echo ${REPOSITORY_FULL_NAME} | cut -d/ -f2) - -readonly GITHUB_API="https://api.github.com" +OWNER_NAME=$(echo "${REPOSITORY_FULL_NAME}" | cut -d/ -f1) +GITHUB_API="https://api.github.com" +GET_ISSUE_URL="${GITHUB_API}/repos/${REPOSITORY_FULL_NAME}/issues/${ISSUE_NUMBER}" +readonly OWNER_NAME GITHUB_API GET_ISSUE_URL # Verify issue number is valid -readonly GET_ISSUE_URL="${GITHUB_API}/repos/${REPOSITORY_FULL_NAME}/issues/${ISSUE_NUMBER}" HTTP_CODE=$(curl -o /dev/null -X GET -u "${OWNER_NAME}:${GITHUB_API_KEY}" --retry 3 \ -s -w "%{http_code}" \ -H 'Accept: application/vnd.github.inertia-preview+json' \ - "$GET_ISSUE_URL") + "${GET_ISSUE_URL}") if [ "${HTTP_CODE}" == "404" ]; then echo "Could not find issue number ${ISSUE_NUMBER} in ${REPOSITORY_FULL_NAME}" @@ -58,16 +57,18 @@ if [ "${HTTP_CODE}" == "404" ]; then fi # Get issue's ID -readonly ISSUE=$(curl -s -X GET -u "${OWNER_NAME}:${GITHUB_API_KEY}" --retry 3 \ +ISSUE=$(curl -s -X GET -u "${OWNER_NAME}:${GITHUB_API_KEY}" --retry 3 \ -H 'Accept: application/vnd.github.inertia-preview+json' \ - "$GET_ISSUE_URL") -readonly ISSUEID=$(echo "$ISSUE" | jq -r ".id") + "${GET_ISSUE_URL}") +ISSUE_ID=$(echo "${ISSUE}" | jq -r ".id") +readonly ISSUE ISSUE_ID # Get list of all projects. Assume there are less than 100! -readonly LIST_PROJECTS_URL="${GITHUB_API}/repos/${REPOSITORY_FULL_NAME}/projects?per_page=100" -readonly PROJECTS=$(curl -s -X GET -u "${OWNER_NAME}:${GITHUB_API_KEY}" --retry 3 \ +LIST_PROJECTS_URL="${GITHUB_API}/repos/${REPOSITORY_FULL_NAME}/projects?per_page=100" +PROJECTS=$(curl -s -X GET -u "${OWNER_NAME}:${GITHUB_API_KEY}" --retry 3 \ -H 'Accept: application/vnd.github.inertia-preview+json' \ - "$LIST_PROJECTS_URL") + "${LIST_PROJECTS_URL}") +readonly LIST_PROJECTS_URL PROJECTS if [ -z "${PROJECTS}" ]; then echo "Found no projects in ${REPOSITORY_FULL_NAME}" @@ -75,27 +76,30 @@ if [ -z "${PROJECTS}" ]; then fi # Extract projectid for the given project name -readonly PROJECTID=$(echo "$PROJECTS" | jq -r ".[] | select(.name == \"$PROJECT_NAME\").id") -if [ -z "${PROJECTID}" ]; then - echo "Could not find project ${PROJECT_NAME} in ${REPOSITORY_FULL_NAME}" +PROJECT_ID=$(echo "${PROJECTS}" | jq -r ".[] | select(.name == \"${PROJECT_NAME}\").id") +readonly PROJECT_ID +if [ -z "${PROJECT_ID}" ]; then + echo "Could not find project ${PROJECT_NAME} in ${REPOSITORY_FULL_NAME}" exit 1 fi # Get list of columns on the project -readonly LIST_COLUMNS_URL="${GITHUB_API}/projects/${PROJECTID}/columns" -readonly COLUMNS=$(curl -s -X GET -u "${OWNER_NAME}:${GITHUB_API_KEY}" --retry 3 \ +LIST_COLUMNS_URL="${GITHUB_API}/projects/${PROJECT_ID}/columns" +COLUMNS=$(curl -s -X GET -u "${OWNER_NAME}:${GITHUB_API_KEY}" --retry 3 \ -H 'Accept: application/vnd.github.inertia-preview+json' \ - "$LIST_COLUMNS_URL") + "${LIST_COLUMNS_URL}") +readonly LIST_COLUMNS_URL COLUMNS # Extract columnid for the given column -readonly COLUMNID=$(echo "$COLUMNS" | jq -r ".[] | select(.name == \"$COLUMN_NAME\").id") -if [ -z "${COLUMNID}" ]; then +COLUMN_ID=$(echo "$COLUMNS" | jq -r ".[] | select(.name == \"${COLUMN_NAME}\").id") +readonly COLUMN_ID +if [ -z "${COLUMN_ID}" ]; then echo "Could not find column named ${COLUMN_NAME} in project ${PROJECT_NAME} in ${REPOSITORY_FULL_NAME}" exit 1 fi -echo "Assigning issue ${ISSUE_NUMBER}:${ISSUEID} to column ${COLUMN_NAME}:${COLUMNID} in project ${PROJECT_NAME}:${PROJECTID} in ${REPOSITORY_FULL_NAME}" +echo "Assigning issue ${ISSUE_NUMBER}:${ISSUE_ID} to column ${COLUMN_NAME}:${COLUMN_ID} in project ${PROJECT_NAME}:${PROJECT_ID} in ${REPOSITORY_FULL_NAME}" # Add issue to project board column @@ -103,18 +107,17 @@ HTTP_CODE=$(curl -s -X POST -u "${OWNER_NAME}:${GITHUB_API_KEY}" --retry 3 \ -o /dev/null \ -w "%{http_code}" \ -H 'Accept: application/vnd.github.inertia-preview+json' \ - -d "{\"content_type\": \"Issue\", \"content_id\": $ISSUEID}" \ - "${GITHUB_API}/projects/columns/$COLUMNID/cards") + -d "{\"content_type\": \"Issue\", \"content_id\": ${ISSUE_ID}" \ + "${GITHUB_API}/projects/columns/${COLUMN_ID}/cards") if [ "${HTTP_CODE}" == "422" ]; then - echo "Issue ${ISSUE_NUMBER}:${ISSUEID} already exists in ${COLUMN_NAME}:${COLUMNID} in project ${PROJECT_NAME}:${PROJECTID} in ${REPOSITORY_FULL_NAME}" + echo "Issue ${ISSUE_NUMBER}:${ISSUE_ID} already exists in ${COLUMN_NAME}:${COLUMN_ID} in project ${PROJECT_NAME}:${PROJECT_ID} in ${REPOSITORY_FULL_NAME}" exit 0 fi -if [ "${HTTP_CODE}" == "200" -o "${HTTP_CODE}" == 201 -o "${HTTP_CODE}" == 204 ]; then +if [ "${HTTP_CODE}" == "200" ] || [ "${HTTP_CODE}" == 201 ] || [ "${HTTP_CODE}" == 204 ]; then exit 0 fi -echo "Error adding ${ISSUE_NUMBER}:${ISSUEID} to ${COLUMN_NAME}:${COLUMNID} in project ${PROJECT_NAME}:${PROJECTID} in ${REPOSITORY_FULL_NAME}: ${HTTP_CODE}" -exit ${HTTP_CODE} - +echo "Error adding ${ISSUE_NUMBER}:${ISSUE_ID} to ${COLUMN_NAME}:${COLUMN_ID} in project ${PROJECT_NAME}:${PROJECT_ID} in ${REPOSITORY_FULL_NAME}: ${HTTP_CODE}" +exit "${HTTP_CODE}" diff --git a/etc/scripts/build.sh b/etc/scripts/build.sh deleted file mode 100755 index 7d75e58f900..00000000000 --- a/etc/scripts/build.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2018, 2024 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -mvn ${MAVEN_ARGS} --version - -mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml \ - clean install -e \ - -Dmaven.test.failure.ignore=true \ - -Pexamples,archetypes,spotbugs,javadoc,sources,tck,tests,pipeline - -# Build site and agregated javadocs -mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml site diff --git a/etc/scripts/checkstyle.sh b/etc/scripts/checkstyle.sh index 0ea1448c781..96ede1599f5 100755 --- a/etc/scripts/checkstyle.sh +++ b/etc/scripts/checkstyle.sh @@ -1,6 +1,6 @@ -#!/bin/bash -e +#!/bin/bash # -# Copyright (c) 2018, 2022 Oracle and/or its affiliates. +# Copyright (c) 2018, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,26 +15,49 @@ # limitations under the License. # +set -o pipefail || true # trace ERR through pipes +set -o errtrace || true # trace ERR through commands and functions +set -o errexit || true # exit the script if any statement returns a non-true return value + +on_error(){ + CODE="${?}" && \ + set +x && \ + printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ + "${CODE}" "${BASH_SOURCE[0]}" "${LINENO}" "${BASH_COMMAND}" +} +trap on_error ERR + # Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +if [ -h "${0}" ] ; then + SCRIPT_PATH="$(readlink "${0}")" +else + # shellcheck disable=SC155 + SCRIPT_PATH="${0}" +fi +readonly SCRIPT_PATH -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' +# Path to the root of the workspace +# shellcheck disable=SC2046 +WS_DIR=$(cd $(dirname -- "${SCRIPT_PATH}") ; cd ../.. ; pwd -P) +readonly WS_DIR -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup +LOG_FILE=$(mktemp -t XXXcheckstyle-log) +readonly LOG_FILE -readonly LOG_FILE=$(mktemp -t XXXcheckstyle-log) +RESULT_FILE=$(mktemp -t XXXcheckstyle-result) +readonly RESULT_FILE -readonly RESULT_FILE=$(mktemp -t XXXcheckstyle-result) +die(){ echo "${1}" ; exit 1 ;} -die() { echo "${1}" ; exit 1 ;} +# Remove cache +rm -f "${WS_DIR}"/target/checkstyle-* +# shellcheck disable=SC2086 mvn ${MAVEN_ARGS} checkstyle:checkstyle-aggregate \ - -f ${WS_DIR}/pom.xml \ + -f "${WS_DIR}"/pom.xml \ -Dcheckstyle.output.format="plain" \ -Dcheckstyle.output.file="${RESULT_FILE}" \ - -Pexamples,ossrh-releases > ${LOG_FILE} 2>&1 || (cat ${LOG_FILE} ; exit 1) + > ${LOG_FILE} 2>&1 || (cat ${LOG_FILE} ; exit 1) -grep "^\[ERROR\]" ${RESULT_FILE} \ +grep "^\[ERROR\]" "${RESULT_FILE}" \ && die "CHECKSTYLE ERROR" || echo "CHECKSTYLE OK" diff --git a/etc/scripts/copyright.sh b/etc/scripts/copyright.sh index 0bc45dc0b66..5ac5e0120e4 100755 --- a/etc/scripts/copyright.sh +++ b/etc/scripts/copyright.sh @@ -1,6 +1,6 @@ -#!/bin/bash -e +#!/bin/bash # -# Copyright (c) 2018, 2022 Oracle and/or its affiliates. +# Copyright (c) 2018, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,29 +15,47 @@ # limitations under the License. # -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +set -o pipefail || true # trace ERR through pipes +set -o errtrace || true # trace ERR through commands and functions +set -o errexit || true # exit the script if any statement returns a non-true return value + +on_error(){ + CODE="${?}" && \ + set +x && \ + printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ + "${CODE}" "${BASH_SOURCE[0]}" "${LINENO}" "${BASH_COMMAND}" +} +trap on_error ERR -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' +# Path to this script +if [ -h "${0}" ] ; then + SCRIPT_PATH="$(readlink "${0}")" +else + SCRIPT_PATH="${0}" +fi +readonly SCRIPT_PATH -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup +# Path to the root of the workspace +# shellcheck disable=SC2046 +WS_DIR=$(cd $(dirname -- "${SCRIPT_PATH}") ; cd ../.. ; pwd -P) +readonly WS_DIR -readonly LOG_FILE=$(mktemp -t XXXcopyright-log) +LOG_FILE=$(mktemp -t XXXcopyright-log) +readonly LOG_FILE -readonly RESULT_FILE=$(mktemp -t XXXcopyright-result) +RESULT_FILE=$(mktemp -t XXXcopyright-result) +readonly RESULT_FILE die() { echo "${1}" ; exit 1 ;} +# shellcheck disable=SC2086 mvn ${MAVEN_ARGS} \ - -f ${WS_DIR}/pom.xml \ - -Dhelidon.enforcer.output.file="${RESULT_FILE}" \ - -Dhelidon.enforcer.rules=copyright \ - -Dhelidon.enforcer.failOnError=false \ - -Pcopyright \ - -N \ - validate > ${LOG_FILE} 2>&1 || (cat ${LOG_FILE} ; exit 1) - -grep "^\[ERROR\]" ${RESULT_FILE} \ + -N -f ${WS_DIR}/pom.xml \ + -Dhelidon.enforcer.output.file="${RESULT_FILE}" \ + -Dhelidon.enforcer.rules=copyright \ + -Dhelidon.enforcer.failOnError=false \ + -Pcopyright \ + validate > ${LOG_FILE} 2>&1 || (cat ${LOG_FILE} ; exit 1) + +grep "^\[ERROR\]" "${RESULT_FILE}" \ && die "COPYRIGHT ERROR" || echo "COPYRIGHT OK" diff --git a/etc/scripts/actions/create-backport-issues.sh b/etc/scripts/create-backport-issues.sh similarity index 67% rename from etc/scripts/actions/create-backport-issues.sh rename to etc/scripts/create-backport-issues.sh index 12e1ae9cf16..ad36d3f487c 100755 --- a/etc/scripts/actions/create-backport-issues.sh +++ b/etc/scripts/create-backport-issues.sh @@ -34,28 +34,28 @@ function join_by { echo "$*" } -readonly REPOSITORY_FULL_NAME="$1" -readonly ISSUE_NUMBER="$2" -readonly HELIDON_VERSION="$3" +readonly REPOSITORY_FULL_NAME="${1}" +readonly ISSUE_NUMBER="${2}" +readonly HELIDON_VERSION="${3}" -if [ -z "${REPOSITORY_FULL_NAME}" -o -z "${ISSUE_NUMBER}" -o -z "${HELIDON_VERSION}" -o $# -le 3 ]; then - echo "usage: $0 " +if [ -z "${REPOSITORY_FULL_NAME}" ] || [ -z "${ISSUE_NUMBER}" ] || [ -z "${HELIDON_VERSION}" ] || [ ${#} -le 3 ]; then + echo "usage: ${0} " exit 1 fi -readonly OWNER_NAME=$(echo ${REPOSITORY_FULL_NAME} | cut -d/ -f1) -readonly REPOSITORY_NAME=$(echo ${REPOSITORY_FULL_NAME} | cut -d/ -f2) - -readonly GITHUB_API="https://api.github.com" +OWNER_NAME=$(echo "${REPOSITORY_FULL_NAME}" | cut -d/ -f1) +REPOSITORY_NAME=$(echo "${REPOSITORY_FULL_NAME}" | cut -d/ -f2) +GITHUB_API="https://api.github.com" +GET_ISSUE_URL="${GITHUB_API}/repos/${REPOSITORY_FULL_NAME}/issues/${ISSUE_NUMBER}" +readonly OWNER_NAME REPOSITORY_NAME GITHUB_API GET_ISSUE_URL # Verify issue number is valid -readonly GET_ISSUE_URL="${GITHUB_API}/repos/${REPOSITORY_FULL_NAME}/issues/${ISSUE_NUMBER}" HTTP_CODE=$(curl -o /dev/null -X GET \ -H "Authorization: Bearer ${GITHUB_API_KEY}" \ --retry 3 \ -s -w "%{http_code}" \ -H 'Accept: application/vnd.github.inertia-preview+json' \ - "$GET_ISSUE_URL") + "${GET_ISSUE_URL}") if [ "${HTTP_CODE}" == "404" ]; then echo "Could not find issue number ${ISSUE_NUMBER} in ${REPOSITORY_FULL_NAME}" @@ -63,29 +63,32 @@ if [ "${HTTP_CODE}" == "404" ]; then fi # Get issue information -readonly ISSUE=$(curl -s -X GET \ +ISSUE=$(curl -s -X GET \ -H "Authorization: Bearer ${GITHUB_API_KEY}" \ --retry 3 \ -H 'Accept: application/vnd.github.inertia-preview+json' \ - "$GET_ISSUE_URL") + "${GET_ISSUE_URL}") +readonly ISSUE # Get issue information issue_title=$(echo "$ISSUE" | jq -r ".title") -readonly ISSUE_ASSIGNEE=$(echo "$ISSUE" | jq -r ".assignee.login") -readonly ISSUE_LABELS=$(echo "$ISSUE" | jq -r ".labels") # JSON Array +ISSUE_ASSIGNEE=$(echo "$ISSUE" | jq -r ".assignee.login") +ISSUE_LABELS=$(echo "$ISSUE" | jq -r ".labels") # JSON Array +readonly ISSUE_ASSIGNEE ISSUE_LABELS # Create an issue for each version that is not used -readonly VERSIONS=("2.x" "3.x" "4.x") +VERSIONS=("2.x" "3.x" "4.x") +readonly VERSIONS ############################################################ # If original issue does not have a version label, add it ############################################################ version_labels=() for row in $(echo "${ISSUE_LABELS}" | jq -r '.[] | @base64'); do - label=$(echo ${row} | base64 --decode) - label_text=$(echo $label | jq -r ".name") + label=$(echo "${row}" | base64 --decode) + label_text=$(echo "${label}" | jq -r ".name") - if [[ " ${VERSIONS[*]} " =~ " ${label_text} " ]]; then + if [[ " ${VERSIONS[*]} " =~ [:blank:]${label_text}[:blank:] ]]; then version_labels+=("\"${label_text}\"") fi done @@ -99,7 +102,7 @@ if [ ${#version_labels[@]} -eq 0 ]; then --retry 3 \ -H "Authorization: Bearer ${GITHUB_API_KEY}" \ -H "Accept: application/vnd.github+json" \ - https://api.github.com/repos/${OWNER_NAME}/${REPOSITORY_NAME}/issues/${ISSUE_NUMBER}/labels \ + "https://api.github.com/repos/${OWNER_NAME}/${REPOSITORY_NAME}/issues/${ISSUE_NUMBER}/labels" \ -d "{\"labels\":[\"${HELIDON_VERSION}\"]}") if [ "${HTTP_CODE}" != "200" ]; then @@ -109,19 +112,18 @@ if [ ${#version_labels[@]} -eq 0 ]; then fi # Replace all instances of " with ' in the Issue Title to avoid JSON parsing issue -issue_title=$(sed "s/\"/'/g" <<< "$issue_title") +issue_title=${issue_title//\"/\'} ############################################################ # For each version the caller specified add a porting issue. ############################################################ -version_targets=() next_version_to_check=2 for is_version_selected in "${@:4}"; do version=${next_version_to_check}.x next_version_to_check=$((next_version_to_check+1)) - if [ "$version" != "$HELIDON_VERSION" -a "$is_version_selected" = "true" ]; then + if [ "${version}" != "${HELIDON_VERSION}" ] && [ "${is_version_selected}" = "true" ]; then # Create issue for other indicated versions and add the same labels and assignee - new_issue_title="[$version] ${issue_title}" + new_issue_title="[${version}] ${issue_title}" new_issue_text="Backport of #${ISSUE_NUMBER} for Helidon ${version}" # by default, add label for the version we are backporting into, and for backport itself @@ -129,10 +131,10 @@ for is_version_selected in "${@:4}"; do # then add all labels from the issue that are not version labels for row in $(echo "${ISSUE_LABELS}" | jq -r '.[] | @base64'); do - label=$(echo ${row} | base64 --decode) - label_text=$(echo $label | jq -r ".name") + label=$(echo "${row}" | base64 --decode) + label_text=$(echo "${label}" | jq -r ".name") - if [[ ! " ${VERSIONS[*]} " =~ " ${label_text} " ]]; then + if [[ ! " ${VERSIONS[*]} " =~ [:blank:]${label_text}[:blank:] ]]; then labels_to_add+=("\"${label_text}\"") fi done @@ -142,9 +144,9 @@ for is_version_selected in "${@:4}"; do # create request JSON (if original issue does not have an assignee, do not add it to new issue) if [ "${ISSUE_ASSIGNEE}" == "null" ]; then - new_issue_json="{\"title\":\"$new_issue_title\",\"body\":\"$new_issue_text\",\"labels\":[$labels_text]}" + new_issue_json="{\"title\":\"${new_issue_title}\",\"body\":\"${new_issue_text}\",\"labels\":[${labels_text}]}" else - new_issue_json="{\"title\":\"$new_issue_title\",\"body\":\"$new_issue_text\",\"assignees\":[\"$ISSUE_ASSIGNEE\"],\"labels\":[$labels_text]}" + new_issue_json="{\"title\":\"${new_issue_title}\",\"body\":\"${new_issue_text}\",\"assignees\":[\"${ISSUE_ASSIGNEE}\"],\"labels\":[${labels_text}]}" fi # create the issue using Github API @@ -154,17 +156,17 @@ for is_version_selected in "${@:4}"; do --retry 3 \ -H "Authorization: Bearer ${GITHUB_API_KEY}" \ -H "Accept: application/vnd.github+json" \ - https://api.github.com/repos/${OWNER_NAME}/${REPOSITORY_NAME}/issues \ - -d "$new_issue_json") + "https://api.github.com/repos/${OWNER_NAME}/${REPOSITORY_NAME}/issues" \ + -d "${new_issue_json}") - new_issue_number=$(echo $new_issue | jq -r ".number") - new_issue_url=$(echo $new_issue | jq -r ".html_url") + new_issue_number=$(echo "${new_issue}" | jq -r ".number") + new_issue_url=$(echo "${new_issue}" | jq -r ".html_url") # Print out the Github API Server response if unable to parse the issue number. Also display the # json payload that was sent so it can be inspected for problems if such issue occur. if [ "${new_issue_number}" == "null" ]; then - echo "Encountered an error while attempting to create an issue: $new_issue" - echo "Json payload: $new_issue_json" + echo "Encountered an error while attempting to create an issue: ${new_issue}" + echo "Json payload: ${new_issue_json}" else echo "Created issue for version ${version}, issue number: ${new_issue_number}, url: ${new_issue_url}" fi diff --git a/etc/scripts/docs.sh b/etc/scripts/docs.sh deleted file mode 100755 index 773e59fe1b9..00000000000 --- a/etc/scripts/docs.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2022, 2023 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -mvn ${MAVEN_ARGS} --version - -# Temporary workaround until job stages will share maven repository -mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml \ - install -e \ - -Dmaven.test.skip=true \ - -DskipTests \ - -Ppipeline - -mvn ${MAVEN_ARGS} -f ${WS_DIR}/docs/pom.xml \ - install -e \ - -Pjavadoc diff --git a/etc/scripts/gen-javadoc-packagelist.sh b/etc/scripts/gen-javadoc-packagelist.sh index 102da41ba3a..fa810df9ae1 100755 --- a/etc/scripts/gen-javadoc-packagelist.sh +++ b/etc/scripts/gen-javadoc-packagelist.sh @@ -1,6 +1,6 @@ #!/bin/bash -e # -# Copyright (c) 2019, 2023 Oracle and/or its affiliates. +# Copyright (c) 2019, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -32,13 +32,20 @@ # # Path to this script -# shellcheck disable=SC2015 -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +if [ -h "${0}" ] ; then + SCRIPT_PATH="$(readlink "${0}")" +else + SCRIPT_PATH="${0}" +fi +readonly SCRIPT_PATH # Path to the root of the workspace -readonly WS_DIR=$(cd "$(dirname -- "${SCRIPT_PATH}")" ; cd ../.. ; pwd -P) +# shellcheck disable=SC2046 +WS_DIR=$(cd $(dirname -- "${SCRIPT_PATH}") ; cd ../.. ; pwd -P) +readonly WS_DIR -readonly tmpOutputDir=/tmp +TMP_OUTPUT_DIR=/tmp +readonly TMP_OUTPUT_DIR # The project pom file has a set of properties that look like this: # @@ -54,7 +61,7 @@ linkPropNames=() execArgs="" i=0 while read -r linkProp ; do - linkPropNames[${i}]=${linkProp} + linkPropNames[i]=${linkProp} execArgs+="\${javadoc.link.${linkProp}} " i=$((i+1)) done < <(grep "' -f1 | cut -d '.' -f3) @@ -79,14 +86,14 @@ for ((i=0;i<${#linkPropNames[@]};i++)) # Go get package-list file! We save in a temp file so we don't overwrite # anything in the workspace until we know the request is good - code=$(curl -L -s --user-agent '' -o ${tmpOutputDir}/package-list -w "%{http_code}" "${value}/package-list") + code=$(curl -L -s --user-agent '' -o ${TMP_OUTPUT_DIR}/package-list -w "%{http_code}" "${value}/package-list") if [ "$code" -ne "200" ]; then # No package-list. Try element-list - rm -f ${tmpOutputDir}/package-list - code=$(curl -L -s --user-agent '' -o ${tmpOutputDir}/element-list -w "%{http_code}" "${value}/element-list") + rm -f ${TMP_OUTPUT_DIR}/package-list + code=$(curl -L -s --user-agent '' -o ${TMP_OUTPUT_DIR}/element-list -w "%{http_code}" "${value}/element-list") if [ "$code" -ne "200" ]; then - rm -f ${tmpOutputDir}/element-list + rm -f ${TMP_OUTPUT_DIR}/element-list echo "${code} ${name} ${value}" echo "WARNING! Could not download package-list nor element-list for" >&2 echo "${value}" >&2 @@ -94,11 +101,11 @@ for ((i=0;i<${#linkPropNames[@]};i++)) echo "${outputDir}/" >&2 else echo "${code} ${name} ${value}/element-list" - mv ${tmpOutputDir}/element-list "${outputDir}/" + mv ${TMP_OUTPUT_DIR}/element-list "${outputDir}/" fi else echo "${code} ${name} ${value}/package-list" - mv ${tmpOutputDir}/package-list "${outputDir}/" + mv ${TMP_OUTPUT_DIR}/package-list "${outputDir}/" fi } diff --git a/etc/scripts/github-build.sh b/etc/scripts/github-build.sh deleted file mode 100755 index f160b5b6d54..00000000000 --- a/etc/scripts/github-build.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2018, 2022 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -mvn ${MAVEN_ARGS} --version - -mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml \ - install -e \ - -Dmaven.test.failure.ignore=false \ - -Pjavadoc,sources,tests,pipeline diff --git a/etc/scripts/github-compile.sh b/etc/scripts/github-compile.sh deleted file mode 100755 index 2c01d8d8277..00000000000 --- a/etc/scripts/github-compile.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2022 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml \ - install -e \ - -Dmaven.test.skip=true \ - -DskipTests \ - -Ppipeline diff --git a/etc/scripts/github-release.sh b/etc/scripts/github-release.sh index 66af2354ce4..5c16be53a6b 100755 --- a/etc/scripts/github-release.sh +++ b/etc/scripts/github-release.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2020, 2023 Oracle and/or its affiliates. +# Copyright (c) 2020, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,14 +15,17 @@ # limitations under the License. # -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +set -o pipefail || true # trace ERR through pipes +set -o errtrace || true # trace ERR through commands and functions +set -o errexit || true # exit the script if any statement returns a non-true return value -# Load error handling functions and define error handling -. $(dirname -- "${SCRIPT_PATH}")/includes/error_handlers.sh - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup +on_error(){ + CODE="${?}" && \ + set +x && \ + printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ + "${CODE}" "${BASH_SOURCE[0]}" "${LINENO}" "${BASH_COMMAND}" +} +trap on_error ERR usage(){ cat <> ${HOME}/.npmrc - fi - if [ -n "${https_proxy}" ] ; then - echo "https-proxy = ${https_proxy}" >> ${HOME}/.npmrc - fi - if [ -n "${http_proxy}" ] ; then - echo "proxy = ${http_proxy}" >> ${HOME}/.npmrc - fi - if [ -n "${NO_PROXY}" ] ; then - echo "noproxy = ${NO_PROXY}" >> ${HOME}/.npmrc - fi - fi - - if [ -n "${GPG_PUBLIC_KEY}" ] ; then - gpg --import --no-tty --batch ${GPG_PUBLIC_KEY} - fi - if [ -n "${GPG_PRIVATE_KEY}" ] ; then - gpg --allow-secret-key-import --import --no-tty --batch ${GPG_PRIVATE_KEY} - fi - if [ -n "${GPG_PASSPHRASE}" ] ; then - echo "allow-preset-passphrase" >> ~/.gnupg/gpg-agent.conf - gpg-connect-agent reloadagent /bye - GPG_KEYGRIP=$(gpg --with-keygrip -K | grep "Keygrip" | head -1 | awk '{print $3}') - /usr/lib/gnupg/gpg-preset-passphrase --preset "${GPG_KEYGRIP}" <<< "${GPG_PASSPHRASE}" - fi - fi - - if [ -n "${RELEASE_WORKFLOW}" ] ; then - if [ -n "${MAVEN_SETTINGS}" ] ; then - export MAVEN_SETTINGS_FILE="${HOME}/.m2/settings.xml" - echo "${MAVEN_SETTINGS}" > "${MAVEN_SETTINGS_FILE}" - MAVEN_ARGS="${MAVEN_ARGS} -s ${MAVEN_SETTINGS_FILE}" - fi - if [ -n "${GPG_PUBLIC_KEY}" ] ; then - tmpfile=$(mktemp /tmp/pub.XXXXXX.key) - echo "${GPG_PUBLIC_KEY}" > "${tmpfile}" - gpg --import --no-tty --batch "${tmpfile}" - rm "$tmpfile" - fi - if [ -n "${GPG_PRIVATE_KEY}" ] ; then - tmpfile=$(mktemp /tmp/pri.XXXXXX.key) - echo "${GPG_PRIVATE_KEY}" > "${tmpfile}" - gpg --allow-secret-key-import --import --no-tty --batch "${tmpfile}" - rm "$tmpfile" - fi - if [ -n "${GPG_PASSPHRASE}" ] ; then - echo "allow-preset-passphrase" >> ~/.gnupg/gpg-agent.conf - gpg-connect-agent reloadagent /bye - GPG_KEYGRIP=$(gpg --with-keygrip -K | grep "Keygrip" | head -1 | awk '{print $3}') - /usr/lib/gnupg/gpg-preset-passphrase --preset "${GPG_KEYGRIP}" <<< "${GPG_PASSPHRASE}" - fi - fi - export MAVEN_ARGS - export MAVEN_OPTS -else - echo "WARNING: ${WS_DIR}/etc/scripts/includes/pipeline-env.sh included multiple times." -fi diff --git a/etc/scripts/mp-tck.sh b/etc/scripts/mp-tck.sh deleted file mode 100755 index 8edf13077b5..00000000000 --- a/etc/scripts/mp-tck.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2022 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml \ - install -e \ - -Dmaven.test.skip=true \ - -DskipTests \ - -Ppipeline - -# Run MicroProfile TCK tests -cd ${WS_DIR}/microprofile/tests/tck - -# Prime build all native-image tests -mvn ${MAVEN_ARGS} verify -Ptck-ft diff --git a/etc/scripts/owasp-dependency-check.sh b/etc/scripts/owasp-dependency-check.sh index dcf2941230a..34e00014f1f 100755 --- a/etc/scripts/owasp-dependency-check.sh +++ b/etc/scripts/owasp-dependency-check.sh @@ -1,6 +1,6 @@ #!/bin/bash -e # -# Copyright (c) 2020, 2023 Oracle and/or its affiliates. +# Copyright (c) 2020, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,31 +15,50 @@ # limitations under the License. # +set -o pipefail || true # trace ERR through pipes +set -o errtrace || true # trace ERR through commands and functions +set -o errexit || true # exit the script if any statement returns a non-true return value + # Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +if [ -h "${0}" ] ; then + SCRIPT_PATH="$(readlink "${0}")" +else + # shellcheck disable=SC155 + SCRIPT_PATH="${0}" +fi +readonly SCRIPT_PATH -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' +# Path to the root of the workspace +# shellcheck disable=SC2046 +WS_DIR=$(cd $(dirname -- "${SCRIPT_PATH}") ; cd ../.. ; pwd -P) -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup +on_error(){ + CODE="${?}" && \ + set +x && \ + printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ + "${CODE}" "${BASH_SOURCE[0]}" "${LINENO}" "${BASH_COMMAND}" +} +trap on_error ERR -readonly RESULT_FILE=$(mktemp -t XXXdependency-check-result) +RESULT_FILE=$(mktemp -t XXXdependency-check-result) +readonly RESULT_FILE -die() { cat ${RESULT_FILE} ; echo "Dependency report in ${WS_DIR}/target" ; echo "${1}" ; exit 1 ;} +die() { cat "${RESULT_FILE}" ; echo "Dependency report in ${WS_DIR}/target" ; echo "${1}" ; exit 1 ;} if [ "${PIPELINE}" = "true" ] ; then # If in pipeline do a priming build before scan - mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml clean install -DskipTests + # shellcheck disable=SC2086 + mvn ${MAVEN_ARGS} -f "${WS_DIR}"/pom.xml clean install -DskipTests fi # Setting NVD_API_KEY is not required but improves behavior of NVD API throttling +# shellcheck disable=SC2086 mvn ${MAVEN_ARGS} -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN org.owasp:dependency-check-maven:aggregate \ - -f ${WS_DIR}/pom.xml \ + -f "${WS_DIR}"/pom.xml \ -Dtop.parent.basedir="${WS_DIR}" \ - -Dnvd-api-key=${NVD_API_KEY} \ - > ${RESULT_FILE} || die "Error running the Maven command" + -Dnvd-api-key="${NVD_API_KEY}" \ + > "${RESULT_FILE}" || die "Error running the Maven command" -grep -i "One or more dependencies were identified with known vulnerabilities" ${RESULT_FILE} \ +grep -i "One or more dependencies were identified with known vulnerabilities" "${RESULT_FILE}" \ && die "CVE SCAN ERROR" || echo "CVE SCAN OK" diff --git a/etc/scripts/release.sh b/etc/scripts/release.sh index 72917c36fce..f719394be30 100755 --- a/etc/scripts/release.sh +++ b/etc/scripts/release.sh @@ -15,14 +15,31 @@ # limitations under the License. # -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +set -o pipefail || true # trace ERR through pipes +set -o errtrace || true # trace ERR through commands and functions +set -o errexit || true # exit the script if any statement returns a non-true return value + +on_error(){ + CODE="${?}" && \ + set +x && \ + printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ + "${CODE}" "${BASH_SOURCE[0]}" "${LINENO}" "${BASH_COMMAND}" +} +trap on_error ERR -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' +# Path to this script +if [ -h "${0}" ] ; then + SCRIPT_PATH="$(readlink "${0}")" +else + # shellcheck disable=SC155 + SCRIPT_PATH="${0}" +fi +readonly SCRIPT_PATH -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup +# Path to the root of the workspace +# shellcheck disable=SC2046 +WS_DIR=$(cd $(dirname -- "${SCRIPT_PATH}") ; cd ../.. ; pwd -P) +readonly WS_DIR usage(){ cat < 0 )); do + case ${1} in "--version="*) - VERSION=${ARG#*=} + VERSION=${1#*=} + shift ;; "--help") usage exit 0 ;; + "update_version"|"release_version"|"create_tag"|"release_build"|"deploy_snapshot") + COMMAND="${1}" + shift + ;; *) - if [ "${ARG}" = "update_version" ] || [ "${ARG}" = "release_build" ] || [ "${ARG}" = "deploy_snapshot" ] ; then - readonly COMMAND="${ARG}" - else - echo "ERROR: unknown argument: ${ARG}" - exit 1 - fi + ARGS+=( "${1}" ) + shift ;; esac -} +done +readonly ARGS +readonly COMMAND -if [ -z "${COMMAND}" ] ; then - echo "ERROR: no command provided" - usage - exit 1 +if [ -z "${COMMAND+x}" ] ; then + echo "ERROR: no command provided" + exit 1 fi -# Hooks for version substitution work -readonly PREPARE_HOOKS=( ) +# copy stdout as fd 6 and redirect stdout to stderr +# this allows us to use fd 6 for returning data +exec 6>&1 1>&2 -# Hooks for deployment work -readonly PERFORM_HOOKS=( ) - -# Resolve FULL_VERSION -if [ -z "${VERSION+x}" ]; then - - # get maven version - MVN_VERSION=$(mvn ${MAVEN_ARGS} \ - -q \ - -f ${WS_DIR}/pom.xml \ +current_version() { + # shellcheck disable=SC2086 + mvn ${MAVEN_ARGS} -q \ + -f "${WS_DIR}"/file.xml \ -Dexec.executable="echo" \ -Dexec.args="\${project.version}" \ --non-recursive \ - org.codehaus.mojo:exec-maven-plugin:1.3.1:exec) + org.codehaus.mojo:exec-maven-plugin:1.3.1:exec +} - # strip qualifier - readonly VERSION="${MVN_VERSION%-*}" - readonly FULL_VERSION="${VERSION}" -else - readonly FULL_VERSION="${VERSION}" -fi +release_version() { + local current_version + current_version=$(current_version) + echo "${current_version%-*}" +} -export FULL_VERSION -printf "\n%s: FULL_VERSION=%s\n\n" "$(basename ${0})" "${FULL_VERSION}" +replace() { + local pattern value include + pattern="${1}" + value="${2}" + include="${3}" + for file in $(grep "${pattern}" -Er . --include "${include}" | cut -d ':' -f 1 | sort | uniq); do + echo "Updating ${file}" + sed -e s@"${pattern}"@"${pattern/\.\*/${value}}"@g \ + < "${file}" \ + > "${file}.tmp" + mv "${file}.tmp" "${file}" + done +} update_version(){ - # Update version - mvn ${MAVEN_ARGS} -f ${WS_DIR}/parent/pom.xml versions:set versions:set-property \ - -DgenerateBackupPoms=false \ - -DnewVersion="${FULL_VERSION}" \ - -Dproperty=helidon.version \ - -DprocessAllModules=true + local version + version=${1-${VERSION}} + if [ -z "${version+x}" ] ; then + echo "ERROR: version required" + usage + exit 1 + fi + + # shellcheck disable=SC2086 + mvn ${MAVEN_ARGS} "${ARGS[@]}" \ + -f ${WS_DIR}/parent/file.xml versions:set versions:set-property \ + -DgenerateBackupPoms="false" \ + -DnewVersion="${version}" \ + -Dproperty="helidon.version" \ + -DprocessFromLocalAggregationRoot="false" \ + -DupdateMatchingVersions="false" # Hack to update helidon.version - for pom in `egrep ".*" -r . --include pom.xml | cut -d ':' -f 1 | sort | uniq ` - do - echo "Updating helidon.version property in ${pom} to ${FULL_VERSION}" - cat ${pom} | \ - sed -e s@'.*'@"${FULL_VERSION}"@g \ - > ${pom}.tmp - mv ${pom}.tmp ${pom} - done + replace ".*" "${version}" "pom.xml" # Hack to update helidon.version in build.gradle files - for bfile in `egrep "helidonversion = .*" -r . --include build.gradle | cut -d ':' -f 1 | sort | uniq ` - do - echo "Updating helidonversion property in ${bfile} to ${FULL_VERSION}" - cat ${bfile} | \ - sed -e s@'helidonversion = .*'@"helidonversion = \'${FULL_VERSION}\'"@g \ - > ${bfile}.tmp - mv ${bfile}.tmp ${bfile} - done + replace "helidonversion = .*" "${version}" "build.gradle" # Hack to update helidon-version in doc files - for dfile in `egrep ":helidon-version: .*" -r . --include attributes.adoc | cut -d ':' -f 1 | sort | uniq ` - do - echo "Updating helidon-version property in ${dfile} to ${FULL_VERSION}" - cat ${dfile} | \ - sed -e s@':helidon-version: .*'@":helidon-version: ${FULL_VERSION}"@g \ - > ${dfile}.tmp - mv ${dfile}.tmp ${dfile} - done + replace ":helidon-version: .*" "${version}" "attributes.adoc" # Hack to update helidon-version-is-release in doc files # We are a released version if we are not a SNAPSHOT version - if [[ ${HELIDON_VERSION} == *-SNAPSHOT ]]; then - readonly IS_RELEASED="false" + local is_released + if [[ ${1} == *-SNAPSHOT ]]; then + is_released="false" else - readonly IS_RELEASED="true" - fi - for dfile in `egrep ":helidon-version-is-release: .*" -r . --include attributes.adoc | cut -d ':' -f 1 | sort | uniq ` - do - echo "Updating helidon-version-is-release property in ${dfile} to ${IS_RELEASED}" - cat ${dfile} | \ - sed -e s@':helidon-version-is-release: .*'@":helidon-version-is-release: ${IS_RELEASED}"@g \ - > ${dfile}.tmp - mv ${dfile}.tmp ${dfile} - done - - # Invoke prepare hook - if [ -n "${PREPARE_HOOKS}" ]; then - for prepare_hook in ${PREPARE_HOOKS} ; do - bash "${prepare_hook}" - done + is_released="true" fi + replace ":helidon-version-is-release: .*" "${is_released}" "attributes.adoc" } -release_build(){ +create_tag() { + local git_branch version - VERSION_FROM_BRANCH_NAME=$(git branch --show-current | cut -d- -f2) - if [ "${FULL_VERSION}" != "${VERSION_FROM_BRANCH_NAME}" ]; then - echo "ERROR: version derived from pom files (${FULL_VERSION}) does not match version used in branch name (${VERSION_FROM_BRANCH_NAME})." - echo "Failing release build" - exit 1 - fi + version=$(release_version) + git_branch="release/${version}" - # Do the release work in a branch - local GIT_BRANCH="release/${FULL_VERSION}" - git branch -D "${GIT_BRANCH}" > /dev/null 2>&1 || true - git checkout -b "${GIT_BRANCH}" + # Use a separate branch + git branch -D "${git_branch}" > /dev/null 2>&1 || true + git checkout -b "${git_branch}" # Invoke update_version - update_version - - # Update scm/tag entry in the parent pom - cat parent/pom.xml | \ - sed -e s@'HEAD'@"${FULL_VERSION}"@g \ - > parent/pom.xml.tmp - mv parent/pom.xml.tmp parent/pom.xml + update_version "${version}" # Git user info git config user.email || git config --global user.email "info@helidon.io" git config user.name || git config --global user.name "Helidon Robot" # Commit version changes - git commit -a -m "Release ${FULL_VERSION} [ci skip]" - - # Create the nexus staging repository - local STAGING_DESC="Helidon v${FULL_VERSION}" - mvn ${MAVEN_ARGS} nexus-staging:rc-open \ - -DstagingProfileId="6026dab46eed94" \ - -DstagingDescription="${STAGING_DESC}" - - export STAGING_REPO_ID=$(mvn ${MAVEN_ARGS} nexus-staging:rc-list | \ - egrep "^[0-9:,]*[ ]?\[INFO\] iohelidon\-[0-9]+[ ]+OPEN[ ]+${STAGING_DESC}" | \ - awk '{print $2" "$3}' | \ - sed -e s@'\[INFO\] '@@g -e s@'OPEN'@@g | \ - head -1) - echo "Nexus staging repository ID: ${STAGING_REPO_ID}" - - # Perform deployment - mvn ${MAVEN_ARGS} clean deploy \ - -Prelease,archetypes,javadoc,docs \ - -DskipTests \ - -DstagingRepositoryId="${STAGING_REPO_ID}" \ - -DretryFailedDeploymentCount="10" - - # Invoke perform hooks - if [ -n "${PERFORM_HOOKS}" ]; then - for perform_hook in ${PERFORM_HOOKS} ; do - bash "${perform_hook}" - done - fi - - # Close the nexus staging repository - mvn ${MAVEN_ARGS} nexus-staging:rc-close \ - -DstagingRepositoryId="${STAGING_REPO_ID}" \ - -DstagingDescription="${STAGING_DESC}" + git commit -a -m "Release ${version}" # Create and push a git tag - git tag -f "${FULL_VERSION}" - if [ -n "${JENKINS_HOME}" ] ; then - # In Jenkins use SSH to access remote - local GIT_REMOTE=$(git config --get remote.origin.url | \ - sed "s,https://\([^/]*\)/,git@\1:,") + git tag -f "${version}" + git push --force origin refs/tags/"${version}":refs/tags/"${version}" - git remote add release "${GIT_REMOTE}" > /dev/null 2>&1 || \ - git remote set-url release "${GIT_REMOTE}" + echo "tag=refs/tags/${version}" >&6 +} - git push --force release refs/tags/"${FULL_VERSION}":refs/tags/"${FULL_VERSION}" - else - git push --force origin refs/tags/"${FULL_VERSION}":refs/tags/"${FULL_VERSION}" +credentials() { + local tmpfile + + # Bootstrap credentials from environment + if [ -n "${MAVEN_SETTINGS}" ] ; then + tmpfile=$(mktemp XXXXXXsettings.xml) + echo "${MAVEN_SETTINGS}" > "${tmpfile}" + MAVEN_ARGS="${MAVEN_ARGS} -s ${tmpfile}" + fi + if [ -n "${GPG_PRIVATE_KEY}" ] ; then + tmpfile=$(mktemp XXXXXX.key) + echo "${GPG_PRIVATE_KEY}" > "${tmpfile}" + gpg --allow-secret-key-import --import --no-tty --batch "${tmpfile}" + rm "${tmpfile}" + fi + if [ -n "${GPG_PASSPHRASE}" ] ; then + echo "allow-preset-passphrase" >> ~/.gnupg/gpg-agent.conf + gpg-connect-agent reloadagent /bye + GPG_KEYGRIP=$(gpg --with-keygrip -K | grep "Keygrip" | head -1 | awk '{print $3}') + /usr/lib/gnupg/gpg-preset-passphrase --preset "${GPG_KEYGRIP}" <<< "${GPG_PASSPHRASE}" fi } -deploy_snapshot(){ +release_build(){ + local tmpfile version + + credentials + + # Perform local deployment + # shellcheck disable=SC2086 + mvn ${MAVEN_ARGS} "${ARGS[@]}" \ + deploy \ + -Prelease \ + -DskipTests \ + -DskipRemoteStaging=true + + # Upload all artifacts to nexus + version=$(release_version) + # shellcheck disable=SC2086 + mvn ${MAVEN_ARGS} -N nexus-staging:deploy-staged \ + -DstagingDescription="Helidon v${version}" +} + +readonly NEXUS_SNAPSHOT_URL="https://oss.sonatype.org/content/repositories/snapshots/" + +deploy_snapshot() { + local version + version=$(current_version) # Make sure version ends in -SNAPSHOT - if [[ ${MVN_VERSION} != *-SNAPSHOT ]]; then - echo "Helidon version ${MVN_VERSION} is not a SNAPSHOT version. Failing snapshot release." + if [[ ${version} != *-SNAPSHOT ]]; then + echo "Helidon version ${version} is not a SNAPSHOT version. Failing snapshot release." exit 1 fi - readonly NEXUS_SNAPSHOT_URL="https://oss.sonatype.org/content/repositories/snapshots/" - echo "Deploying snapshot build ${MVN_VERSION} to ${NEXUS_SNAPSHOT_URL}" + echo "Deploying snapshot build ${version} to ${NEXUS_SNAPSHOT_URL}" + + credentials # The nexus-staging-maven-plugin had issues deploying the module # helidon-applications because the distributionManagement section is empty. @@ -269,7 +270,7 @@ deploy_snapshot(){ # property. The deployAtEnd option requires version 3.0.0 of maven-deploy-plugin # or newer to work correctly on multi-module systems set -x - mvn ${MAVEN_ARGS} -e clean deploy \ + mvn "${MAVEN_ARGS}" -e clean deploy \ -Parchetypes \ -DskipTests \ -DaltDeploymentRepository="ossrh::${NEXUS_SNAPSHOT_URL}" \ diff --git a/etc/scripts/shellcheck.sh b/etc/scripts/shellcheck.sh new file mode 100755 index 00000000000..74f4dfe464f --- /dev/null +++ b/etc/scripts/shellcheck.sh @@ -0,0 +1,53 @@ +#!/bin/bash -e +# +# Copyright (c) 2024 Oracle and/or its affiliates. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +BASE_URL="https://github.com/koalaman/shellcheck/releases/download" +readonly BASE_URL + +VERSION=0.9.0 +readonly VERSION + +CACHE_DIR="${HOME}/.shellcheck" +readonly CACHE_DIR + +# Caching the shellcheck +mkdir -p "${CACHE_DIR}" +if [ ! -e "${CACHE_DIR}/${VERSION}/shellcheck" ] ; then + ARCH=$(uname -m | tr "[:upper:]" "[:lower:]") + PLATFORM=$(uname -s | tr "[:upper:]" "[:lower:]") + curl -Lso "${CACHE_DIR}/sc.tar.xz" "${BASE_URL}/v${VERSION}/shellcheck-v${VERSION}.${PLATFORM}.${ARCH}.tar.xz" + tar -xf "${CACHE_DIR}/sc.tar.xz" -C "${CACHE_DIR}" + mkdir "${CACHE_DIR}/${VERSION}" + mv "${CACHE_DIR}/shellcheck-v${VERSION}/shellcheck" "${CACHE_DIR}/${VERSION}/shellcheck" + rm -rf "${CACHE_DIR}/shellcheck-v${VERSION}" "${CACHE_DIR}/sc.tar.xz" +fi +export PATH="${CACHE_DIR}/${VERSION}:${PATH}" + +echo "ShellCheck version" +shellcheck --version + +status_code=0 +# shellcheck disable=SC2044 +for file in $(find . -name "*.sh") ; do + # only check tracked files + if git ls-files --error-unmatch "${file}" > /dev/null 2>&1 ; then + printf "\n-- Checking file: %s --\n" "${file}" + shellcheck "${file}" || status_code=${?} + fi +done + +exit ${status_code} diff --git a/etc/scripts/smoketest.sh b/etc/scripts/smoketest.sh index d0913618714..a64cf6bba96 100755 --- a/etc/scripts/smoketest.sh +++ b/etc/scripts/smoketest.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2019, 2023 Oracle and/or its affiliates. +# Copyright (c) 2019, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,16 +22,34 @@ # https://oss.sonatype.org/content/groups/staging/ as a repository # See bottom of RELEASE.md for details +set -o pipefail || true # trace ERR through pipes +set -o errtrace || true # trace ERR through commands and functions +set -o errexit || true # exit the script if any statement returns a non-true return value + +on_error(){ + CODE="${?}" && \ + set +x && \ + printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ + "${CODE}" "${BASH_SOURCE[0]}" "${LINENO}" "${BASH_COMMAND}" +} +trap on_error ERR + # Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +if [ -h "${0}" ] ; then + SCRIPT_PATH="$(readlink "${0}")" +else + SCRIPT_PATH="${0}" +fi +readonly SCRIPT_PATH + -# Load error handling functions and define error handling -. $(dirname -- "${SCRIPT_PATH}")/includes/error_handlers.sh +SCRIPT_DIR=$(dirname "${SCRIPT_PATH}") +readonly SCRIPT_DIR # Local error handler smoketest_on_error(){ on_error - echo "===== Log file: ${OUTPUTFILE} =====" + echo "===== Log file: ${OUTPUT_FILE} =====" # In case there is a process left running } @@ -45,7 +63,7 @@ DESCRIPTION: Helidon Smoke Test Script USAGE: -$(basename ${0}) [ --staged ] [ --giturl=URL ] [ --clean ] [--help ] --version=V CMD +$(basename "${0}") [ --staged ] [ --giturl=URL ] [ --clean ] [--help ] --version=V CMD --staged Use the OSS Sonatype Staging repository at @@ -131,16 +149,18 @@ if [ -z "${VERSION}" ] ; then exit 1 fi -readonly MAVEN_ARGS="" +MAVEN_ARGS="" +readonly MAVEN_ARGS -readonly SCRIPT_DIR=$(dirname ${SCRIPT_PATH}) +DATESTAMP=$(date +%Y-%m-%d-%H-%M-%S) +readonly DATESTAMP -readonly DATESTAMP=$(date +%Y-%m-%d-%H-%M-%S) mkdir -p /var/tmp/helidon-smoke -readonly SCRATCH=$(mktemp -d /var/tmp/helidon-smoke/${VERSION}-${DATESTAMP}.XXXX) +SCRATCH=$(mktemp -d "/var/tmp/helidon-smoke/${VERSION}-${DATESTAMP}.XXXX") +readonly SCRATCH if [ -z "${GIT_URL}" ] ; then - cd ${SCRIPT_DIR} + cd "${SCRIPT_DIR}" GIT_URL=$(git remote get-url origin) fi @@ -153,41 +173,41 @@ set -u full(){ echo "===== Full Test =====" - cd ${SCRATCH} + cd "${SCRATCH}" quick - cd ${SCRATCH} + cd "${SCRATCH}" if [[ "${VERSION}" =~ .*SNAPSHOT ]]; then echo "WARNING! SNAPSHOT version. Skipping tag checkout" else echo "===== Cloning Workspace ${GIT_URL} =====" - git clone ${GIT_URL} - cd ${SCRATCH}/helidon + git clone "${GIT_URL}" + cd "${SCRATCH}/helidon" echo "===== Checking out tags/${VERSION} =====" - git checkout tags/${VERSION} + git checkout "tags/${VERSION}" fi echo "===== Building examples =====" - cd ${SCRATCH}/helidon/examples + cd "${SCRATCH}/helidon/examples" # XXX we exclude todo-app frontend due to the issues with npm behind firewall - mvn ${MAVEN_ARGS} clean install -pl '!todo-app/frontend' ${STAGED_PROFILE} - cd ${SCRATCH} + mvn "${MAVEN_ARGS}" clean install -pl '!todo-app/frontend' ${STAGED_PROFILE} + cd "${SCRATCH}" echo "===== Building test support =====" - cd ${SCRATCH}/helidon/microprofile/tests/ - mvn -N ${MAVEN_ARGS} clean install ${STAGED_PROFILE} - cd ${SCRATCH}/helidon/microprofile/tests/junit5 - mvn ${MAVEN_ARGS} clean install ${STAGED_PROFILE} - cd ${SCRATCH}/helidon/microprofile/tests/junit5-tests - mvn ${MAVEN_ARGS} clean install ${STAGED_PROFILE} + cd "${SCRATCH}/helidon/microprofile/tests/" + mvn -N "${MAVEN_ARGS}" clean install ${STAGED_PROFILE} + cd "${SCRATCH}/helidon/microprofile/tests/junit5" + mvn "${MAVEN_ARGS}" clean install ${STAGED_PROFILE} + cd "${SCRATCH}/helidon/microprofile/tests/junit5-tests" + mvn "${MAVEN_ARGS}" clean install ${STAGED_PROFILE} echo "===== Running tests =====" - cd ${SCRATCH}/helidon/tests - mvn ${MAVEN_ARGS} clean install ${STAGED_PROFILE} + cd "${SCRATCH}/helidon/tests" + mvn "${MAVEN_ARGS}" clean install ${STAGED_PROFILE} # Primes dependencies for native-image builds - cd ${SCRATCH}/helidon/tests/integration/native-image - mvn ${MAVEN_ARGS} clean install ${STAGED_PROFILE} + cd "${SCRATCH}/helidon/tests/integration/native-image" + mvn "${MAVEN_ARGS}" clean install ${STAGED_PROFILE} echo "===== Running native image tests =====" if [ -z "${GRAALVM_HOME}" ]; then @@ -196,12 +216,12 @@ full(){ echo "GRAALVM_HOME=${GRAALVM_HOME}" readonly native_image_tests="mp-1 mp-2 mp-3" for native_test in ${native_image_tests}; do - cd ${SCRATCH}/helidon/tests/integration/native-image/${native_test} - mvn ${MAVEN_ARGS} clean package -Pnative-image ${STAGED_PROFILE} + cd "${SCRATCH}/helidon/tests/integration/native-image/${native_test}" + mvn "${MAVEN_ARGS}" clean package -Pnative-image ${STAGED_PROFILE} done # Run this one because it has no pre-reqs and self-tests - cd ${SCRATCH}/helidon/tests/integration/native-image/mp-1 + cd "${SCRATCH}/helidon/tests/integration/native-image/mp-1" target/helidon-tests-native-image-mp-1 fi @@ -218,10 +238,10 @@ waituntilready() { testGET() { echo "GET $1" - http_code=`curl -s -o /dev/null -w "%{http_code}" -X GET $1` - if [ ${http_code} -ne "200" ]; then - echo "ERROR: Bad HTTP code. Expected 200 got ${http_code}. GET $1" - kill ${PID} + http_code=$(curl -s -o /dev/null -w "%{http_code}" -X GET "${1}") + if [ "${http_code}" -ne "200" ]; then + echo "ERROR: Bad HTTP code. Expected 200 got ${http_code}. GET ${1}" + kill "${PID}" return 1 fi return 0 @@ -230,37 +250,37 @@ testGET() { # # $1 = archetype name: "quickstart-se" buildAndTestArchetype(){ - archetype_name=$1 - archetype_pkg=`echo ${archetype_name} | tr "\-" "\."` + archetype_name=${1} + archetype_pkg=$(echo "${archetype_name}" | tr "\-" "\.") echo "===== Testing Archetype ${archetype_name} =====" - mvn ${MAVEN_ARGS} -U archetype:generate -DinteractiveMode=false \ + mvn "${MAVEN_ARGS}" -U archetype:generate -DinteractiveMode=false \ -DarchetypeGroupId=io.helidon.archetypes \ - -DarchetypeArtifactId=helidon-${archetype_name} \ - -DarchetypeVersion=${VERSION} \ + -DarchetypeArtifactId="helidon-${archetype_name}" \ + -DarchetypeVersion="${VERSION}" \ -DgroupId=io.helidon.examples \ - -DartifactId=helidon-${archetype_name} \ - -Dpackage=io.helidon.examples.${archetype_pkg} \ + -DartifactId=helidon-"${archetype_name}" \ + -Dpackage=io.helidon.examples."${archetype_pkg}" \ ${STAGED_PROFILE} echo "===== ${archetype_name}: building jar =====" - mvn ${MAVEN_ARGS} -f helidon-${archetype_name}/pom.xml ${STAGED_PROFILE} clean package + mvn "${MAVEN_ARGS}" -f helidon-"${archetype_name}"/pom.xml ${STAGED_PROFILE} clean package echo "===== Running and pinging ${archetype_name} app using jar =====" - java -jar helidon-${archetype_name}/target/helidon-${archetype_name}.jar & + java -jar "helidon-${archetype_name}/target/helidon-${archetype_name}.jar" & PID=$! - testApp ${archetype_name} + testApp "${archetype_name}" kill ${PID} echo "===== ${archetype_name}: building jlink image =====" - mvn ${MAVEN_ARGS} -f helidon-${archetype_name}/pom.xml ${STAGED_PROFILE} -Pjlink-image package -DskipTests + mvn "${MAVEN_ARGS}" -f "helidon-${archetype_name}/pom.xml" ${STAGED_PROFILE} -Pjlink-image package -DskipTests echo "===== Running and pinging ${archetype_name} app using jlink image =====" - helidon-${archetype_name}/target/helidon-${archetype_name}-jri/bin/start & + "helidon-${archetype_name}/target/helidon-${archetype_name}-jri/bin/start" & PID=$! - testApp ${archetype_name} + testApp "${archetype_name}" kill ${PID} sleep 1 } @@ -270,7 +290,7 @@ testApp(){ waituntilready # Hit some endpoints - if [ "${archetype_name}" = "quickstart-se" -o "${archetype_name}" = "quickstart-mp" ]; then + if [ "${archetype_name}" = "quickstart-se" ] || [ "${archetype_name}" = "quickstart-mp" ]; then testGET http://localhost:8080/greet testGET http://localhost:8080/greet/Joe fi @@ -289,29 +309,30 @@ quick(){ " echo "===== Quick Test =====" - cd ${SCRATCH} + cd "${SCRATCH}" echo "===== Testing Archetypes =====" for a in ${archetypes}; do - buildAndTestArchetype $a + buildAndTestArchetype "${a}" done } -cd ${SCRATCH} +cd "${SCRATCH}" -readonly OUTPUTFILE=${SCRATCH}/helidon-smoketest-log.txt -readonly LOCAL_MVN_REPO=$(mvn ${MAVEN_ARGS} help:evaluate -Dexpression=settings.localRepository | grep -v '\[INFO\]') +OUTPUT_FILE=${SCRATCH}/helidon-smoketest-log.txt +LOCAL_MVN_REPO=$(mvn "${MAVEN_ARGS}" help:evaluate -Dexpression=settings.localRepository | grep -v '\[INFO\]') +readonly OUTPUT_FILE LOCAL_MVN_REPO echo "===== Running in ${SCRATCH} =====" -echo "===== Log file: ${OUTPUTFILE} =====" +echo "===== Log file: ${OUTPUT_FILE} =====" -if [ ! -z "${CLEAN_MVN_REPO}" -a -d "${LOCAL_MVN_REPO}" ]; then +if [ -n "${CLEAN_MVN_REPO}" ] && [ -d "${LOCAL_MVN_REPO}" ]; then echo "===== Cleaning release from local maven repository ${LOCAL_MVN_REPO} =====" - find ${LOCAL_MVN_REPO}/io/helidon -depth -name ${VERSION} -type d -exec rm -rf {} \; + find "${LOCAL_MVN_REPO}/io/helidon" -depth -name "${VERSION}" -type d -exec rm -rf {} \; fi # Invoke command -${COMMAND} | tee $OUTPUTFILE +${COMMAND} | tee "${OUTPUT_FILE}" -echo "===== Log file: ${OUTPUTFILE} =====" +echo "===== Log file: ${OUTPUT_FILE} =====" diff --git a/etc/scripts/spotbugs.sh b/etc/scripts/spotbugs.sh deleted file mode 100755 index 8a81cd77832..00000000000 --- a/etc/scripts/spotbugs.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2022 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml \ - install -e \ - -DskipTests \ - -Dmaven.test.skip=true \ - -Pspotbugs,pipeline diff --git a/etc/scripts/test-archetypes.sh b/etc/scripts/test-archetypes.sh deleted file mode 100755 index 2875775f8f1..00000000000 --- a/etc/scripts/test-archetypes.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2022 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -mvn ${MAVEN_ARGS} --version - -# Temporary workaround until job stages will share maven repository -mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml \ - install -e \ - -Dmaven.test.skip=true \ - -DskipTests \ - -Ppipeline - -cd ${WS_DIR}/archetypes -mvn ${MAVEN_ARGS} -e clean install diff --git a/etc/scripts/test-integ-dbclient.sh b/etc/scripts/test-integ-dbclient.sh deleted file mode 100755 index 5ecac9cfdfa..00000000000 --- a/etc/scripts/test-integ-dbclient.sh +++ /dev/null @@ -1,116 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2021 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -# Set Graal VM into JAVA_HOME and PATH (defined in includes/pipeline-env.sh) -graalvm - -print_help() { - echo 'Usage: test-integ-dbclient.sh [-hsjn] -d ' - echo '' - echo ' -h print this help and exit' - echo ' -s execute simple tests (default when no -s, -j or -n is passed)' - echo ' -j execute remote application tests in Java VM mode' - echo ' -n execute remote application tests in native image mode' - echo ' -d select database' - echo ' :: mysql | pgsql' -} - -# Evaluate command line arguments -if [ "$#" -gt '0' ]; then - while getopts 'hsjnd:' flag 2> /dev/null; do - case "${flag}" in - h) print_help && exit;; - d) readonly FLAG_D=${OPTARG};; - s) readonly FLAG_S='1';; - j) readonly FLAG_J='1';; - n) readonly FLAG_N='1';; - esac - done -fi - -# Load database setup -if [ -n "${FLAG_D}" ]; then - case "${FLAG_D}" in - mysql) . ${WS_DIR}/etc/scripts/includes/mysql.sh;; - pgsql) . ${WS_DIR}/etc/scripts/includes/pgsql.sh;; - *) echo 'ERROR: Unknown database name, exitting.' && exit 1;; - esac -else - echo 'ERROR: No database was selected, exitting.' - exit 1 -fi - -# Turn simple tests on when no test was selected -[ -z "${FLAG_J}" -a -z "${FLAG_N}" -a -z "${FLAG_S}" ] && \ - readonly FLAG_S='1' - -# Run simple JDBC tests -[ -n "${FLAG_S}" ] && \ - (cd ${WS_DIR}/tests/integration/dbclient && \ - echo mvn -P${DB_PROFILE} -pl common,jdbc \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - verify && \ - mvn -P${DB_PROFILE} -pl common,jdbc \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - verify) - -# Run remote application tests in Java VM mode -[ -n "${FLAG_J}" ] && \ - (cd ${WS_DIR}/tests/integration/dbclient && \ - echo mvn -P${DB_PROFILE} \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - -pl appl verify && \ - mvn -P${DB_PROFILE} \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - -pl appl verify) - -# Run remote application tests in native image mode -[ -n "${FLAG_N}" ] && \ - (cd ${WS_DIR}/tests/integration/dbclient && \ - echo mvn -P${DB_PROFILE} -Pnative-image \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - -pl appl verify && \ - mvn -P${DB_PROFILE} -Pnative-image \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - -pl appl verify) diff --git a/etc/scripts/test-integ-example.sh b/etc/scripts/test-integ-example.sh deleted file mode 100755 index 497eb66ba1b..00000000000 --- a/etc/scripts/test-integ-example.sh +++ /dev/null @@ -1,98 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2021 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -# Set Graal VM into JAVA_HOME and PATH (defined in includes/pipeline-env.sh) -graalvm - -print_help() { - echo 'Usage: test-integ-example.sh [-hjn] -d ' - echo '' - echo ' -h print this help and exit' - echo ' -j execute remote application tests in Java VM mode (default)' - echo ' -n execute remote application tests in native image mode' - echo ' -d select database' - echo ' :: mysql | pgsql' -} - -# Evaluate command line arguments -if [ "$#" -gt '0' ]; then - while getopts 'hjnd:' flag 2> /dev/null; do - case "${flag}" in - h) print_help && exit;; - d) readonly FLAG_D=${OPTARG};; - j) readonly FLAG_J='1';; - n) readonly FLAG_N='1';; - esac - done -fi - -# Load database setup -if [ -n "${FLAG_D}" ]; then - case "${FLAG_D}" in - mysql) . ${WS_DIR}/etc/scripts/includes/mysql.sh;; - pgsql) . ${WS_DIR}/etc/scripts/includes/pgsql.sh;; - *) echo 'ERROR: Unknown database name, exitting.' && exit 1;; - esac -else - echo 'ERROR: No database was selected, exitting.' - exit 1 -fi - -# Turn simple tests on when no test was selected -[ -z "${FLAG_J}" -a -z "${FLAG_N}" ] && \ - readonly FLAG_J='1' - -# Run remote application tests in Java VM mode -[ -n "${FLAG_J}" ] && \ - (cd ${WS_DIR}/tests/integration/tools/example && \ - echo mvn -P${DB_PROFILE} \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - verify && \ - mvn -P${DB_PROFILE} \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - verify) - -# Run remote application tests in native image mode -[ -n "${FLAG_N}" ] && \ - (cd ${WS_DIR}/tests/integration/tools/example && \ - echo mvn -P${DB_PROFILE} -Pnative-image \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - verify && \ - mvn -P${DB_PROFILE} -Pnative-image \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - verify) diff --git a/etc/scripts/test-integ-mysql.sh b/etc/scripts/test-integ-mysql.sh deleted file mode 100755 index e4384e23ce1..00000000000 --- a/etc/scripts/test-integ-mysql.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2018, 2022 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -# Set Graal VM into JAVA_HOME and PATH (defined in includes/pipeline-env.sh) -graalvm - -mvn ${MAVEN_ARGS} --version - -# Temporary workaround until job stages will share maven repository -mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml \ - clean install -e \ - -DskipTests \ - -Ppipeline - -# Run tests in Java VM application -(cd tests/integration/jpa && \ - mvn ${MAVEN_ARGS} clean verify \ - -Dmaven.test.failure.ignore=true -Dmysql \ - -pl model,appl) - -# Run tests in native image application -(cd tests/integration/jpa && \ - mvn ${MAVEN_ARGS} clean verify \ - -Dmaven.test.failure.ignore=true -Dmysql \ - -Pnative-image -Dnative-image -pl model,appl) diff --git a/etc/scripts/test-integ-pgsql.sh b/etc/scripts/test-integ-pgsql.sh deleted file mode 100755 index 02ddb4404bb..00000000000 --- a/etc/scripts/test-integ-pgsql.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2018, 2022 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -# Set Graal VM into JAVA_HOME and PATH (defined in includes/pipeline-env.sh) -graalvm - -mvn ${MAVEN_ARGS} --version - -# Temporary workaround until job stages will share maven repository -mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml \ - clean install -e \ - -DskipTests \ - -Ppipeline - -# Run tests in Java VM application -(cd tests/integration/jpa && \ - mvn ${MAVEN_ARGS} clean verify \ - -Dmaven.test.failure.ignore=true -Dpgsql \ - -pl model,appl) - -# Run tests in native image application -(cd tests/integration/jpa && \ - mvn ${MAVEN_ARGS} clean verify \ - -Dmaven.test.failure.ignore=true -Dpgsql \ - -Pnative-image -Dnative-image -pl model,appl) diff --git a/etc/scripts/test-integ-vault.sh b/etc/scripts/test-integ-vault.sh deleted file mode 100644 index 121d7cf32c6..00000000000 --- a/etc/scripts/test-integ-vault.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2021, 2022 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup - -mvn ${MAVEN_ARGS} --version - -# Temporary workaround until job stages will share maven repository -mvn ${MAVEN_ARGS} -f ${WS_DIR}/pom.xml \ - install -e \ - -DskipTests \ - -Dmaven.test.skip=true \ - -Ppipeline - -# Run integrations tests for Vault -cd tests/integration/vault - -mvn ${MAVEN_ARGS} clean verify \ - -Dmaven.test.failure.ignore=true diff --git a/etc/scripts/test-nightly.sh b/etc/scripts/test-nightly.sh index 93a14bd9877..3d658e13136 100755 --- a/etc/scripts/test-nightly.sh +++ b/etc/scripts/test-nightly.sh @@ -1,6 +1,6 @@ #!/bin/bash -e # -# Copyright (c) 2022 Oracle and/or its affiliates. +# Copyright (c) 2022, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,20 +15,36 @@ # limitations under the License. # -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +set -o pipefail || true # trace ERR through pipes +set -o errtrace || true # trace ERR through commands and functions +set -o errexit || true # exit the script if any statement returns a non-true return value -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' +on_error(){ + CODE="${?}" && \ + set +x && \ + printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ + "${CODE}" "${BASH_SOURCE[0]}" "${LINENO}" "${BASH_COMMAND}" +} +trap on_error ERR -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup +# Path to this script +if [ -h "${0}" ] ; then + SCRIPT_PATH="$(readlink "${0}")" +else + SCRIPT_PATH="${0}" +fi +readonly SCRIPT_PATH + +# Path to the root of the workspace +# shellcheck disable=SC2046 +WS_DIR=$(cd $(dirname -- "${SCRIPT_PATH}") ; cd ../.. ; pwd -P) +readonly WS_DIR readonly MODULES_TO_BUILD="\ microprofile/tests/tck/tck-fault-tolerance\ " -cd ${WS_DIR} +cd "${WS_DIR}" # Do priming build mvn clean install -DskipTests diff --git a/etc/scripts/test-packaging-jar.sh b/etc/scripts/test-packaging-jar.sh index c648b3bf180..e3deeebfb5f 100755 --- a/etc/scripts/test-packaging-jar.sh +++ b/etc/scripts/test-packaging-jar.sh @@ -1,6 +1,6 @@ -#!/bin/bash -e +#!/bin/bash # -# Copyright (c) 2021, 2023 Oracle and/or its affiliates. +# Copyright (c) 2021, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,19 +15,37 @@ # limitations under the License. # -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +set -o pipefail || true # trace ERR through pipes +set -o errtrace || true # trace ERR through commands and functions +set -o errexit || true # exit the script if any statement returns a non-true return value + +on_error(){ + CODE="${?}" && \ + set +x && \ + printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ + "${CODE}" "${BASH_SOURCE[0]}" "${LINENO}" "${BASH_COMMAND}" +} +trap on_error ERR -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' +# Path to this script +if [ -h "${0}" ] ; then + SCRIPT_PATH="$(readlink "${0}")" +else + # shellcheck disable=SC155 + SCRIPT_PATH="${0}" +fi +readonly SCRIPT_PATH -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup +# Path to the root of the workspace +# shellcheck disable=SC2046 +WS_DIR=$(cd $(dirname -- "${SCRIPT_PATH}") ; cd ../.. ; pwd -P) +readonly WS_DIR # Run native image tests -cd ${WS_DIR}/tests/integration/native-image +cd "${WS_DIR}/tests/integration/native-image" # Prime build all native-image tests +# shellcheck disable=SC2086 mvn ${MAVEN_ARGS} -e clean install # Run tests with classpath and then module path @@ -35,7 +53,7 @@ mvn ${MAVEN_ARGS} -e clean install # # Run MP-1 # -cd ${WS_DIR}/tests/integration/native-image/mp-1 +cd "${WS_DIR}/tests/integration/native-image/mp-1" # Classpath java -jar target/helidon-tests-native-image-mp-1.jar @@ -46,7 +64,7 @@ java --module-path target/helidon-tests-native-image-mp-1.jar:target/libs \ # # Run MP-3 (just start and stop) # -cd ${WS_DIR}/tests/integration/native-image/mp-3 +cd "${WS_DIR}/tests/integration/native-image/mp-3" # Classpath java -Dexit.on.started=! -jar target/helidon-tests-native-image-mp-3.jar diff --git a/etc/scripts/test-packaging-jlink.sh b/etc/scripts/test-packaging-jlink.sh index 7557ca49375..3675332168e 100755 --- a/etc/scripts/test-packaging-jlink.sh +++ b/etc/scripts/test-packaging-jlink.sh @@ -1,6 +1,6 @@ -#!/bin/bash -e +#!/bin/bash # -# Copyright (c) 2021, 2023 Oracle and/or its affiliates. +# Copyright (c) 2021, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,52 +15,71 @@ # limitations under the License. # -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +set -o pipefail || true # trace ERR through pipes +set -o errtrace || true # trace ERR through commands and functions +set -o errexit || true # exit the script if any statement returns a non-true return value + +on_error(){ + CODE="${?}" && \ + set +x && \ + printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ + "${CODE}" "${BASH_SOURCE[0]}" "${LINENO}" "${BASH_COMMAND}" +} +trap on_error ERR -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' +# Path to this script +if [ -h "${0}" ] ; then + SCRIPT_PATH="$(readlink "${0}")" +else + # shellcheck disable=SC155 + SCRIPT_PATH="${0}" +fi +readonly SCRIPT_PATH -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup +# Path to the root of the workspace +# shellcheck disable=SC2046 +WS_DIR=$(cd $(dirname -- "${SCRIPT_PATH}") ; cd ../.. ; pwd -P) +readonly WS_DIR # Run native image tests -cd ${WS_DIR}/tests/integration/native-image +cd "${WS_DIR}/tests/integration/native-image" # Prime build all native-image tests +# shellcheck disable=SC2086 mvn ${MAVEN_ARGS} -e clean install # Build jlink images # mp-2 fails because of https://github.com/oracle/helidon-build-tools/issues/478 readonly native_image_tests="mp-1 mp-3" for native_test in ${native_image_tests}; do - cd ${WS_DIR}/tests/integration/native-image/${native_test} + cd "${WS_DIR}/tests/integration/native-image/${native_test}" + # shellcheck disable=SC2086 mvn ${MAVEN_ARGS} package -e -Pjlink-image,staging -Djlink.image.addClassDataSharingArchive=false -Djlink.image.testImage=false done # Run tests with classpath and then module path # Run MP-1 -cd ${WS_DIR}/tests/integration/native-image/mp-1 -jri_dir=${WS_DIR}/tests/integration/native-image/mp-1/target/helidon-tests-native-image-mp-1-jri +cd "${WS_DIR}/tests/integration/native-image/mp-1" +jri_dir="${WS_DIR}/tests/integration/native-image/mp-1/target/helidon-tests-native-image-mp-1-jri" # Classpath -${jri_dir}/bin/start +"${jri_dir}"/bin/start # Module Path -${jri_dir}/bin/java \ - --module-path ${jri_dir}/app/helidon-tests-native-image-mp-1.jar:${jri_dir}/app/libs \ +"${jri_dir}"/bin/java \ + --module-path "${jri_dir}/app/helidon-tests-native-image-mp-1.jar:${jri_dir}/app/libs" \ --module helidon.tests.nimage.mp # Run MP-3 (just start and stop) -cd ${WS_DIR}/tests/integration/native-image/mp-3 +cd "${WS_DIR}/tests/integration/native-image/mp-3" jri_dir=${WS_DIR}/tests/integration/native-image/mp-3/target/helidon-tests-native-image-mp-3-jri # Classpath -${jri_dir}/bin/start --test +"${jri_dir}"/bin/start --test # Module Path -${jri_dir}/bin/java -Dexit.on.started=! \ - --module-path ${jri_dir}/app/helidon-tests-native-image-mp-3.jar:${jri_dir}/app/libs \ +"${jri_dir}"/bin/java -Dexit.on.started=! \ + --module-path "${jri_dir}/app/helidon-tests-native-image-mp-3.jar:${jri_dir}/app/libs" \ --add-modules helidon.tests.nimage.quickstartmp \ --module io.helidon.microprofile.cdi/io.helidon.microprofile.cdi.Main diff --git a/etc/scripts/test-packaging-native.sh b/etc/scripts/test-packaging-native.sh index 067edb143e7..e5b5da63f33 100755 --- a/etc/scripts/test-packaging-native.sh +++ b/etc/scripts/test-packaging-native.sh @@ -1,6 +1,6 @@ -#!/bin/bash -e +#!/bin/bash # -# Copyright (c) 2021, 2023 Oracle and/or its affiliates. +# Copyright (c) 2021, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,14 +15,31 @@ # limitations under the License. # -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +set -o pipefail || true # trace ERR through pipes +set -o errtrace || true # trace ERR through commands and functions +set -o errexit || true # exit the script if any statement returns a non-true return value + +on_error(){ + CODE="${?}" && \ + set +x && \ + printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ + "${CODE}" "${BASH_SOURCE[0]}" "${LINENO}" "${BASH_COMMAND}" +} +trap on_error ERR -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' +# Path to this script +if [ -h "${0}" ] ; then + SCRIPT_PATH="$(readlink "${0}")" +else + # shellcheck disable=SC155 + SCRIPT_PATH="${0}" +fi +readonly SCRIPT_PATH -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup +# Path to the root of the workspace +# shellcheck disable=SC2046 +WS_DIR=$(cd $(dirname -- "${SCRIPT_PATH}") ; cd ../.. ; pwd -P) +readonly WS_DIR if [ -z "${GRAALVM_HOME}" ]; then echo "ERROR: GRAALVM_HOME is not set"; @@ -34,29 +51,32 @@ if [ ! -x "${GRAALVM_HOME}/bin/native-image" ]; then exit 1 fi +# shellcheck disable=SC2086 mvn ${MAVEN_ARGS} --version echo "GRAALVM_HOME=${GRAALVM_HOME}"; -${GRAALVM_HOME}/bin/native-image --version; +"${GRAALVM_HOME}"/bin/native-image --version; # Run native image tests -cd ${WS_DIR}/tests/integration/native-image +cd "${WS_DIR}/tests/integration/native-image" # Prime build all native-image tests +# shellcheck disable=SC2086 mvn ${MAVEN_ARGS} -e clean install # Build native images readonly native_image_tests="se-1 mp-1 mp-3" for native_test in ${native_image_tests}; do - cd ${WS_DIR}/tests/integration/native-image/${native_test} + cd "${WS_DIR}/tests/integration/native-image/${native_test}" + # shellcheck disable=SC2086 mvn ${MAVEN_ARGS} -e clean package -Pnative-image done # Run this one because it has no pre-reqs and self-tests # Uses relative path to read configuration -cd ${WS_DIR}/tests/integration/native-image/mp-1 -${WS_DIR}/tests/integration/native-image/mp-1/target/helidon-tests-native-image-mp-1 || true +cd "${WS_DIR}/tests/integration/native-image/mp-1" +./target/helidon-tests-native-image-mp-1 || true # Run se-1 exiting on started -cd ${WS_DIR}/tests/integration/native-image/se-1 -${WS_DIR}/tests/integration/native-image/se-1/target/helidon-tests-native-image-se-1 -Dexit.on.started=! || true +cd "${WS_DIR}/tests/integration/native-image/se-1" +./target/helidon-tests-native-image-se-1 -Dexit.on.started=! || true diff --git a/etc/scripts/test-quickstarts.sh b/etc/scripts/test-quickstarts.sh index 520a0f8617a..78d878257ef 100755 --- a/etc/scripts/test-quickstarts.sh +++ b/etc/scripts/test-quickstarts.sh @@ -15,14 +15,30 @@ # limitations under the License. # -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" +set -o pipefail || true # trace ERR through pipes +set -o errtrace || true # trace ERR through commands and functions +set -o errexit || true # exit the script if any statement returns a non-true return value + +on_error(){ + CODE="${?}" && \ + set +x && \ + printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ + "${CODE}" "${BASH_SOURCE[0]}" "${LINENO}" "${BASH_COMMAND}" +} +trap on_error ERR -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/includes/pipeline-env.sh "${SCRIPT_PATH}" '../..' +# Path to this script +if [ -h "${0}" ] ; then + SCRIPT_PATH="$(readlink "${0}")" +else + SCRIPT_PATH="${0}" +fi +readonly SCRIPT_PATH -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup +# Path to the root of the workspace +# shellcheck disable=SC2046 +WS_DIR=$(cd $(dirname -- "${SCRIPT_PATH}") ; cd ../.. ; pwd -P) +readonly WS_DIR if [ -z "${GRAALVM_HOME}" ]; then echo "ERROR: GRAALVM_HOME is not set"; @@ -34,14 +50,16 @@ if [ ! -x "${GRAALVM_HOME}/bin/native-image" ]; then exit 1 fi +# shellcheck disable=SC2086 mvn ${MAVEN_ARGS} --version -${GRAALVM_HOME}/bin/native-image --version; +"${GRAALVM_HOME}"/bin/native-image --version; # Build quickstart native-image executable and run jar file readonly quickstarts="helidon-quickstart-mp helidon-quickstart-se" for quickstart in ${quickstarts}; do cd "${WS_DIR}"/examples/quickstarts/"${quickstart}" + # shellcheck disable=SC2086 mvn ${MAVEN_ARGS} -e clean install -Pnative-image -DskipTests ./target/"${quickstart}" -Dexit.on.started=! done diff --git a/examples/messaging/docker/kafka/init_topics.sh b/examples/messaging/docker/kafka/init_topics.sh index 4957126f588..90864ce5fb5 100644 --- a/examples/messaging/docker/kafka/init_topics.sh +++ b/examples/messaging/docker/kafka/init_topics.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2020, 2023 Oracle and/or its affiliates. +# Copyright (c) 2020, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,44 +21,44 @@ # ZOOKEEPER_URL=localhost:2181 -KAFKA_TOPICS="/opt/kafka/bin/kafka-topics.sh --if-not-exists --zookeeper $ZOOKEEPER_URL" +KAFKA_TOPICS="/opt/kafka/bin/kafka-topics.sh --if-not-exists --zookeeper ${ZOOKEEPER_URL}" while sleep 2; do - brokers=$(echo dump | nc localhost 2181 | grep brokers | wc -l) + brokers=$(echo dump | nc localhost 2181 | grep -c brokers) echo "Checking if Kafka is up: ${brokers}" if [[ "$brokers" -gt "0" ]]; then echo "KAFKA IS UP !!!" echo "Creating test topics" - bash $KAFKA_TOPICS \ + bash "${KAFKA_TOPICS}" \ --create \ --replication-factor 1 \ --partitions 10 \ --topic messaging-test-topic-1 - bash $KAFKA_TOPICS \ + bash "${KAFKA_TOPICS}" \ --create \ --replication-factor 1 \ --partitions 10 \ --topic messaging-test-topic-2 - bash $KAFKA_TOPICS \ + bash "${KAFKA_TOPICS}" \ --create \ --replication-factor 1 \ --partitions 10 \ --config compression.type=snappy \ --topic messaging-test-topic-snappy-compressed - bash $KAFKA_TOPICS \ + bash "${KAFKA_TOPICS}" \ --create \ --replication-factor 1 \ --partitions 10 \ --config compression.type=lz4 \ --topic messaging-test-topic-lz4-compressed - bash $KAFKA_TOPICS \ + bash "${KAFKA_TOPICS}" \ --create \ --replication-factor 1 \ --partitions 10 \ --config compression.type=zstd \ --topic messaging-test-topic-zstd-compressed - bash $KAFKA_TOPICS \ + bash "${KAFKA_TOPICS}" \ --create \ --replication-factor 1 \ --partitions 10 \ diff --git a/examples/messaging/docker/kafka/start_kafka.sh b/examples/messaging/docker/kafka/start_kafka.sh index e1d1178658c..0bf0323acd1 100644 --- a/examples/messaging/docker/kafka/start_kafka.sh +++ b/examples/messaging/docker/kafka/start_kafka.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2020, 2021 Oracle and/or its affiliates. +# Copyright (c) 2020, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -46,4 +46,4 @@ if [ $state -ne 0 ]; then fi # Keep Kafka up till Ctrl+C -read ; +read -r ; diff --git a/examples/messaging/docker/oracle-aq-18-xe/buildAndRun.sh b/examples/messaging/docker/oracle-aq-18-xe/buildAndRun.sh index 0d9d3334c70..6c0095563e1 100755 --- a/examples/messaging/docker/oracle-aq-18-xe/buildAndRun.sh +++ b/examples/messaging/docker/oracle-aq-18-xe/buildAndRun.sh @@ -39,7 +39,7 @@ if [[ "$(docker images -q ${BASE_IMAGE_NAME} 2>/dev/null)" == "" ]]; then rm -f ${TEMP_DIR}/ora-images.zip # download official oracle docker images - curl -LJ -o ${TEMP_DIR}/ora-images.zip ${IMAGES_ZIP_URL} + curl -LJ -o ${TEMP_DIR}/ora-images.zip "${IMAGES_ZIP_URL}" # unzip only image for Oracle database 18.4.0 unzip -qq ${TEMP_DIR}/ora-images.zip "${IMAGES_ZIP_DIR}/*" -d ${IMAGES_DIR} mv ${IMAGES_DIR}/${IMAGES_ZIP_DIR}/${ORA_DB_VERSION} ${IMAGES_DIR}/ @@ -53,7 +53,7 @@ if [[ "$(docker images -q ${BASE_IMAGE_NAME} 2>/dev/null)" == "" ]]; then # can take long(15 minutes or so) cd ${IMAGES_DIR} || exit bash ./buildContainerImage.sh -v ${ORA_DB_VERSION} -x || exit - cd ${CURR_DIR} || exit + cd "${CURR_DIR}" || exit else printf "OK\n" fi diff --git a/examples/messaging/weblogic-jms-mp/weblogic/container-scripts/createAndStartEmptyDomain.sh b/examples/messaging/weblogic-jms-mp/weblogic/container-scripts/createAndStartEmptyDomain.sh index 1d1a3e4eaff..a87a296315b 100644 --- a/examples/messaging/weblogic-jms-mp/weblogic/container-scripts/createAndStartEmptyDomain.sh +++ b/examples/messaging/weblogic-jms-mp/weblogic/container-scripts/createAndStartEmptyDomain.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2022 Oracle and/or its affiliates. +# Copyright (c) 2022, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,66 +22,59 @@ function _term() { echo "Stopping container." echo "SIGTERM received, shutting down the server!" - ${DOMAIN_HOME}/bin/stopWebLogic.sh -} - -########### SIGKILL handler ############ -function _kill() { - echo "SIGKILL received, shutting down the server!" - kill -9 $childPID + "${DOMAIN_HOME}/bin/stopWebLogic.sh" } # Set SIGTERM handler trap _term SIGTERM -# Set SIGKILL handler -trap _kill SIGKILL +DOMAIN_NAME="base_domain" +DOMAIN_HOME="/u01/oracle/user_projects/domains/${DOMAIN_NAME}" +export DOMAIN_NAME DOMAIN_HOME +echo "Domain Home is: " "${DOMAIN_HOME}" -#Define DOMAIN_HOME -export DOMAIN_HOME=/u01/oracle/user_projects/domains/$DOMAIN_NAME -echo "Domain Home is: " $DOMAIN_HOME - -mkdir -p $ORACLE_HOME/properties +mkdir -p "${ORACLE_HOME}/properties" # Create Domain only if 1st execution -if [ ! -e ${DOMAIN_HOME}/servers/${ADMIN_NAME}/logs/${ADMIN_NAME}.log ]; then +if [ ! -e "${DOMAIN_HOME}/servers/${ADMIN_NAME}/logs/${ADMIN_NAME}.log" ]; then echo "Create Domain" - PROPERTIES_FILE=/u01/oracle/properties/domain.properties - if [ ! -e "$PROPERTIES_FILE" ]; then + PROPERTIES_FILE="/u01/oracle/properties/domain.properties" + if [ ! -e "${PROPERTIES_FILE}" ]; then echo "A properties file with the username and password needs to be supplied." exit fi # Get Username - USER=`awk '{print $1}' $PROPERTIES_FILE | grep username | cut -d "=" -f2` - if [ -z "$USER" ]; then + USER=$(awk '{print $1}' ${PROPERTIES_FILE} | grep username | cut -d "=" -f2) + if [ -z "${USER}" ]; then echo "The domain username is blank. The Admin username must be set in the properties file." exit fi + # Get Password - PASS=`awk '{print $1}' $PROPERTIES_FILE | grep password | cut -d "=" -f2` - if [ -z "$PASS" ]; then + PASS=$(awk '{print $1}' ${PROPERTIES_FILE} | grep password | cut -d "=" -f2) + if [ -z "${PASS}" ]; then echo "The domain password is blank. The Admin password must be set in the properties file." exit fi # Create an empty domain wlst.sh -skipWLSModuleScanning -loadProperties $PROPERTIES_FILE /u01/oracle/create-wls-domain.py - mkdir -p ${DOMAIN_HOME}/servers/${ADMIN_NAME}/security/ - chmod -R g+w ${DOMAIN_HOME} - echo "username=${USER}" >> $DOMAIN_HOME/servers/${ADMIN_NAME}/security/boot.properties - echo "password=${PASS}" >> $DOMAIN_HOME/servers/${ADMIN_NAME}/security/boot.properties - ${DOMAIN_HOME}/bin/setDomainEnv.sh + mkdir -p "${DOMAIN_HOME}/servers/${ADMIN_NAME}/security/" + chmod -R g+w "${DOMAIN_HOME}" + echo "username=${USER}" >> "${DOMAIN_HOME}/servers/${ADMIN_NAME}/security/boot.properties" + echo "password=${PASS}" >> "${DOMAIN_HOME}/servers/${ADMIN_NAME}/security/boot.properties" + "${DOMAIN_HOME}/bin/setDomainEnv.sh" # Setup JMS examples # wlst.sh -skipWLSModuleScanning -loadProperties $PROPERTIES_FILE /u01/oracle/setupTestJMSQueue.py fi # Start Admin Server and tail the logs -${DOMAIN_HOME}/startWebLogic.sh -if [ -e ${DOMAIN_HOME}/servers/${ADMIN_NAME}/logs/${ADMIN_NAME}.log ]; then +"${DOMAIN_HOME}/startWebLogic.sh" +if [ -e "${DOMAIN_HOME}/servers/${ADMIN_NAME}/logs/${ADMIN_NAME}.log" ]; then echo "${DOMAIN_HOME}/servers/${ADMIN_NAME}/logs/${ADMIN_NAME}.log" fi -touch ${DOMAIN_HOME}/servers/${ADMIN_NAME}/logs/${ADMIN_NAME}.log -tail -f ${DOMAIN_HOME}/servers/${ADMIN_NAME}/logs/${ADMIN_NAME}.log +touch "${DOMAIN_HOME}/servers/${ADMIN_NAME}/logs/${ADMIN_NAME}.log" +tail -f "${DOMAIN_HOME}/servers/${ADMIN_NAME}/logs/${ADMIN_NAME}.log" -childPID=$! -wait $childPID +childPID=${!} +wait ${childPID} diff --git a/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/create-keys.sh b/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/create-keys.sh index 16cb5edebca..49ba4e63c84 100644 --- a/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/create-keys.sh +++ b/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/create-keys.sh @@ -1,7 +1,7 @@ #!/bin/bash # -# Copyright (c) 2023 Oracle and/or its affiliates. +# Copyright (c) 2023, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,7 +18,9 @@ set -e +# shellcheck disable=SC1091 source ./config.sh +# shellcheck disable=SC1091 source ./utils.sh # Cleanup @@ -28,32 +30,33 @@ mkdir -p server client CDIR=$(pwd) # Rotate server cert and key -cd ${CDIR}/server +cd "${CDIR}/server" genCertAndCSR server -NEW_SERVER_CERT_OCID=$(uploadNewCert server $DISPLAY_NAME_PREFIX) +NEW_SERVER_CERT_OCID=$(uploadNewCert server "${DISPLAY_NAME_PREFIX}") prepareKeyToUpload server -NEW_SERVER_KEY_OCID=$(createKeyInVault server $DISPLAY_NAME_PREFIX) +# shellcheck disable=SC2086 +NEW_SERVER_KEY_OCID=$(createKeyInVault server ${DISPLAY_NAME_PREFIX}) # Rotate client cert and key -cd ${CDIR}/client +cd "${CDIR}/client" genCertAndCSR client -NEW_CLIENT_CERT_OCID=$(uploadNewCert client $DISPLAY_NAME_PREFIX) +NEW_CLIENT_CERT_OCID=$(uploadNewCert client "${DISPLAY_NAME_PREFIX}") prepareKeyToUpload client -NEW_CLIENT_KEY_OCID=$(createKeyInVault client $DISPLAY_NAME_PREFIX) +NEW_CLIENT_KEY_OCID=$(createKeyInVault client "${DISPLAY_NAME_PREFIX}") echo "======= ALL done! =======" echo "Newly created OCI resources:" -echo "Server certificate OCID: $NEW_SERVER_CERT_OCID" -echo "Server private key OCID: $NEW_SERVER_KEY_OCID" -echo "Client certificate OCID: $NEW_CLIENT_CERT_OCID" -echo "Client private key OCID: $NEW_CLIENT_KEY_OCID" +echo "Server certificate OCID: ${NEW_SERVER_CERT_OCID}" +echo "Server private key OCID: ${NEW_SERVER_KEY_OCID}" +echo "Client certificate OCID: ${NEW_CLIENT_CERT_OCID}" +echo "Client private key OCID: ${NEW_CLIENT_KEY_OCID}" echo "Saving to gen-config.sh" -tee ${CDIR}/generated-config.sh << EOF +tee "${CDIR}/generated-config.sh" << EOF #!/bin/bash ## Content of this file gets rewritten by create-keys.sh -export SERVER_CERT_OCID=$NEW_SERVER_CERT_OCID -export SERVER_KEY_OCID=$NEW_SERVER_KEY_OCID +export SERVER_CERT_OCID=${NEW_SERVER_CERT_OCID} +export SERVER_KEY_OCID=${NEW_SERVER_KEY_OCID} -export CLIENT_CERT_OCID=$NEW_CLIENT_CERT_OCID -export CLIENT_KEY_OCID=$NEW_CLIENT_KEY_OCID +export CLIENT_CERT_OCID=${NEW_CLIENT_CERT_OCID} +export CLIENT_KEY_OCID=${NEW_CLIENT_KEY_OCID} EOF diff --git a/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/rotate-keys.sh b/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/rotate-keys.sh index 27e4f3b27b0..e709c9db331 100644 --- a/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/rotate-keys.sh +++ b/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/rotate-keys.sh @@ -1,7 +1,7 @@ #!/bin/bash # -# Copyright (c) 2023 Oracle and/or its affiliates. +# Copyright (c) 2023, 2024 Oracle and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,8 +18,11 @@ set -e +# shellcheck disable=SC1091 source ./config.sh +# shellcheck disable=SC1091 source ./generated-config.sh +# shellcheck disable=SC1091 source ./utils.sh # Cleanup @@ -29,17 +32,18 @@ mkdir -p server client CDIR=$(pwd) # Rotate server cert and key -cd ${CDIR}/server +cd "${CDIR}/server" genCertAndCSR server -rotateCert server $SERVER_CERT_OCID +rotateCert server "${SERVER_CERT_OCID}" prepareKeyToUpload server -rotateKeyInVault server $SERVER_KEY_OCID +rotateKeyInVault server "${SERVER_KEY_OCID}" # Rotate client cert and key -cd ${CDIR}/client +cd "${CDIR}/client" genCertAndCSR client -rotateCert client $CLIENT_CERT_OCID +rotateCert client "${CLIENT_CERT_OCID}" prepareKeyToUpload client -rotateKeyInVault client $CLIENT_KEY_OCID +# shellcheck disable=SC2086 +rotateKeyInVault client ${CLIENT_KEY_OCID} echo "ALL done!" diff --git a/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/utils.sh b/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/utils.sh index 684f9304504..8955c80ee40 100644 --- a/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/utils.sh +++ b/examples/microprofile/oci-tls-certificates/etc/unsupported-cert-tools/utils.sh @@ -33,18 +33,18 @@ prepareKeyToUpload() { oci kms management wrapping-key get \ --query 'data."public-key"' \ --raw-output \ - --endpoint ${VAULT_MANAGEMENT_ENDPOINT} \ - >$VAULT_PUBLIC_WRAPPING_KEY_PATH + --endpoint "${VAULT_MANAGEMENT_ENDPOINT}" \ + > ${VAULT_PUBLIC_WRAPPING_KEY_PATH} # Extract server/client private key - openssl pkcs12 -in "$KEYSTORE_FILE" \ + openssl pkcs12 -in "${KEYSTORE_FILE}" \ -nocerts \ -passin pass:changeit -passout pass:changeit \ -out $PRIVATE_KEY_AS_PEM ## Upload server/client private key to vault # Generate a temporary AES key - openssl rand -out $TEMPORARY_AES_KEY_PATH 32 + openssl rand -out ${TEMPORARY_AES_KEY_PATH} 32 # Wrap the temporary AES key with the public wrapping key using RSA-OAEP with SHA-256: openssl pkeyutl -encrypt -in $TEMPORARY_AES_KEY_PATH \ @@ -60,100 +60,102 @@ prepareKeyToUpload() { openssl pkcs8 -topk8 -nocrypt \ -inform PEM -outform DER \ -passin pass:changeit -passout pass:changeit \ - -in $PRIVATE_KEY_AS_PEM -out $PRIVATE_KEY_AS_DER + -in ${PRIVATE_KEY_AS_PEM} -out ${PRIVATE_KEY_AS_DER} # Wrap RSA private key with the temporary AES key: - openssl enc -id-aes256-wrap-pad -iv A65959A6 -K "${TEMPORARY_AES_KEY_HEXDUMP}" -in $PRIVATE_KEY_AS_DER -out $WRAPPED_TARGET_KEY_FILE + openssl enc -id-aes256-wrap-pad -iv A65959A6 -K "${TEMPORARY_AES_KEY_HEXDUMP}" -in ${PRIVATE_KEY_AS_DER} -out ${WRAPPED_TARGET_KEY_FILE} # Create the wrapped key material by concatenating both wrapped keys: - cat $WRAPPED_TEMPORARY_AES_KEY_FILE $WRAPPED_TARGET_KEY_FILE >$WRAPPED_KEY_MATERIAL_FILE + cat ${WRAPPED_TEMPORARY_AES_KEY_FILE} ${WRAPPED_TARGET_KEY_FILE} >${WRAPPED_KEY_MATERIAL_FILE} # linux # KEY_MATERIAL_AS_BASE64=$(base64 -w 0 readyToUpload.der) # macOS KEY_MATERIAL_AS_BASE64=$(base64 -i readyToUpload.der) - JSON_KEY_MATERIAL="{\"keyMaterial\": \"$KEY_MATERIAL_AS_BASE64\",\"wrappingAlgorithm\": \"RSA_OAEP_AES_SHA256\"}" + JSON_KEY_MATERIAL="{\"keyMaterial\": \"${KEY_MATERIAL_AS_BASE64}\",\"wrappingAlgorithm\": \"RSA_OAEP_AES_SHA256\"}" - echo $JSON_KEY_MATERIAL >key-material.json + echo "${JSON_KEY_MATERIAL}" >key-material.json } createKeyInVault() { - TYPE=$1 + TYPE=${1} KEY_NAME=${2} - export NEW_KEY_OCID=$(oci kms management key import \ - --compartment-id ${COMPARTMENT_OCID} \ - --display-name ${KEY_NAME}-${TYPE} \ + NEW_KEY_OCID=$(oci kms management key import \ + --compartment-id "${COMPARTMENT_OCID}" \ + --display-name "${KEY_NAME}-${TYPE}" \ --key-shape '{"algorithm": "RSA", "length": 256}' \ --protection-mode SOFTWARE \ - --endpoint ${VAULT_MANAGEMENT_ENDPOINT} \ + --endpoint "${VAULT_MANAGEMENT_ENDPOINT}" \ --wrapped-import-key file://key-material.json \ --query 'data.id' \ --raw-output) + export NEW_KEY_OCID - echo "$NEW_KEY_OCID" + echo "${NEW_KEY_OCID}" } rotateKeyInVault() { - TYPE=$1 + TYPE=${1} KEY_OCID=${2} oci kms management key-version import \ - --key-id $KEY_OCID \ - --endpoint ${VAULT_MANAGEMENT_ENDPOINT} \ + --key-id "${KEY_OCID}" \ + --endpoint "${VAULT_MANAGEMENT_ENDPOINT}" \ --wrapped-import-key file://key-material.json } genCertAndCSR() { - TYPE=$1 + TYPE=${1} # Get CA cert oci certificates certificate-authority-bundle get --query 'data."certificate-pem"' \ --raw-output \ - --certificate-authority-id ${CA_OCID} \ - >ca.pem + --certificate-authority-id "${CA_OCID}" \ + > ca.pem # Generating new server key store keytool -genkeypair -keyalg RSA -keysize 2048 \ - -alias ${TYPE} \ + -alias "${TYPE}" \ -dname "CN=localhost" \ -validity 60 \ - -keystore ${TYPE}.jks \ + -keystore "${TYPE}.jks" \ -storepass password -keypass password \ -deststoretype pkcs12 # Create CSR keytool -certreq -keystore "${TYPE}.jks" \ - -alias ${TYPE} \ + -alias "${TYPE}" \ -keypass password \ -storepass password \ -validity 60 \ -keyalg rsa \ - -file ${TYPE}.csr + -file "${TYPE}.csr" } uploadNewCert() { - TYPE=$1 - CERT_NAME=$2 + TYPE=${1} + CERT_NAME=${2} ## Create server/client certificate in OCI - export NEW_CERT_OCID=$(oci certs-mgmt certificate create-certificate-managed-externally-issued-by-internal-ca \ - --compartment-id ${COMPARTMENT_OCID} \ - --issuer-certificate-authority-id ${CA_OCID} \ - --name ${CERT_NAME}-${TYPE} \ - --csr-pem "$(cat ${TYPE}.csr)" \ - --query 'data.id' \ - --raw-output) - - echo "$NEW_CERT_OCID" + NEW_CERT_OCID=$(oci certs-mgmt certificate create-certificate-managed-externally-issued-by-internal-ca \ + --compartment-id "${COMPARTMENT_OCID}" \ + --issuer-certificate-authority-id "${CA_OCID}" \ + --name "${CERT_NAME}-${TYPE}" \ + --csr-pem "$(cat "${TYPE}.csr")" \ + --query 'data.id' \ + --raw-output) + export NEW_CERT_OCID + + echo "${NEW_CERT_OCID}" } rotateCert() { - TYPE=$1 - CERT_OCID=$2 + TYPE=${1} + CERT_OCID=${2} ## Renew server certificate in OCI oci certs-mgmt certificate update-certificate-managed-externally \ --certificate-id "${CERT_OCID}" \ - --csr-pem "$(cat ${TYPE}.csr)" + --csr-pem "$(cat "${TYPE}.csr")" } diff --git a/examples/webserver/mutual-tls/automatic-store-generator.sh b/examples/webserver/mutual-tls/automatic-store-generator.sh index 03d7da1ca75..35ff6449b2c 100644 --- a/examples/webserver/mutual-tls/automatic-store-generator.sh +++ b/examples/webserver/mutual-tls/automatic-store-generator.sh @@ -52,16 +52,16 @@ createCertificatesAndStores() { openssl pkcs12 -export -in server-signed.cer -inkey server-private.key -out server-signed.p12 -name server -passout pass:changeit keytool -delete -alias server -keystore server.jks -storepass changeit keytool -importkeystore -srckeystore server-signed.p12 -srcstoretype PKCS12 -destkeystore server.jks -srcstorepass changeit -deststorepass changeit - + echo "Importing CA cert to the client and server stores..." - if [ "$SINGLE" = true ] ; then + if [ "${SINGLE}" = true ] ; then keytool -v -trustcacerts -keystore client.jks -importcert -file ca.pem -alias root-ca -storepass changeit -noprompt keytool -v -trustcacerts -keystore server.jks -importcert -file ca.pem -alias root-ca -storepass changeit -noprompt - else + else keytool -v -trustcacerts -keystore client-truststore.jks -importcert -file ca.pem -alias root-ca -storepass changeit -noprompt keytool -v -trustcacerts -keystore server-truststore.jks -importcert -file ca.pem -alias root-ca -storepass changeit -noprompt fi - + echo "Changing aliases to 1..." keytool -changealias -alias server -destalias 1 -keypass changeit -keystore server.jks -storepass changeit keytool -changealias -alias client -destalias 1 -keypass changeit -keystore client.jks -storepass changeit @@ -70,14 +70,14 @@ createCertificatesAndStores() { if [ "$TYPE" = PKCS12 ] || [ "$TYPE" = P12 ] ; then keytool -importkeystore -srckeystore client.jks -destkeystore out/client.p12 -srcstoretype JKS -deststoretype PKCS12 -srcstorepass changeit -deststorepass changeit keytool -importkeystore -srckeystore server.jks -destkeystore out/server.p12 -srcstoretype JKS -deststoretype PKCS12 -srcstorepass changeit -deststorepass changeit - if [ "$SINGLE" = false ] ; then + if [ "${SINGLE}" = false ] ; then keytool -importkeystore -srckeystore server-truststore.jks -destkeystore out/server-truststore.p12 -srcstoretype JKS -deststoretype PKCS12 -srcstorepass changeit -deststorepass changeit keytool -importkeystore -srckeystore client-truststore.jks -destkeystore out/client-truststore.p12 -srcstoretype JKS -deststoretype PKCS12 -srcstorepass changeit -deststorepass changeit fi - else + else mv client.jks out/client.jks mv server.jks out/server.jks - if [ "$SINGLE" = false ] ; then + if [ "${SINGLE}" = false ] ; then mv client-truststore.jks out/client-truststore.jks mv server-truststore.jks out/server-truststore.jks fi @@ -85,7 +85,7 @@ createCertificatesAndStores() { } removeAllPreviouslyCreatedStores() { - echo 'Removing all of previously created items...' + echo "Removing all of previously created items..." rm -fv ca.key rm -fv ca.jks @@ -110,53 +110,58 @@ removeAllPreviouslyCreatedStores() { rm -fv client-truststore.jks rm -rf out - echo 'Clean up finished' + echo "Clean up finished" } while [ "$1" != "" ]; do case $1 in - -n | --name ) shift - NAME=$1 - ;; - -t | --type ) shift - TYPE=$1 - ;; - -s | --single ) shift - SINGLE=$1 - ;; - -h | --help ) echo "Some cool help" - exit - ;; - * ) echo "ERROR: Invalid parameter" $1 - exit 1 + -n | --name) + shift + NAME=$1 + ;; + -t | --type ) + shift + TYPE="${1}" + ;; + -s | --single ) + shift + SINGLE="${1}" + ;; + -h | --help ) + echo "Some cool help" + exit + ;; + * ) + echo "ERROR: Invalid parameter ${1}" + exit 1 esac - shift + shift done -if [ -z "$NAME" ]; then +if [ -z "${NAME}" ]; then echo "ERROR: Please specify the name of Organization/Application by parameter -n | --name" exit 1 else - echo "Generating certs for Organization/Application "$NAME + echo "Generating certs for Organization/Application ${NAME}" fi -case $TYPE in - JKS | P12 | PKCS12 ) - echo "Output file will be of type" $TYPE - ;; - *) - echo 'ERROR: Invalid output type' $TYPE - echo 'Only JKS | P12 | PKCS12 supported' - return 1 +case ${TYPE} in + JKS | P12 | PKCS12 ) + echo "Output file will be of type ${TYPE}" + ;; + *) + echo "ERROR: Invalid output type ${TYPE}" + echo "Only JKS | P12 | PKCS12 supported" + return 1 esac -case $SINGLE in - true ) - echo "Truststore and private key will be in single file" - ;; - false ) - echo "Truststore and private key will be in separate files" - ;; - *) - echo "ERROR: Only value true/false valid in single parameter! Current " $SINGLE - exit 1 +case ${SINGLE} in + true) + echo "Truststore and private key will be in single file" + ;; + false) + echo "Truststore and private key will be in separate files" + ;; + *) + echo "ERROR: Only value true/false valid in single parameter! Current ${SINGLE}" + exit 1 esac removeAllPreviouslyCreatedStores diff --git a/tests/integration/dbclient/test.sh b/tests/integration/dbclient/test.sh deleted file mode 100755 index bebc1602d29..00000000000 --- a/tests/integration/dbclient/test.sh +++ /dev/null @@ -1,138 +0,0 @@ -#!/bin/bash -e -# -# Copyright (c) 2021, 2023 Oracle and/or its affiliates. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Path to this script -[ -h "${0}" ] && readonly SCRIPT_PATH="$(readlink "${0}")" || readonly SCRIPT_PATH="${0}" - -# Load pipeline environment setup and define WS_DIR -. $(dirname -- "${SCRIPT_PATH}")/../../../etc/scripts/includes/docker-env.sh "${SCRIPT_PATH}" '../../..' - -# Local error handler -test_on_error() { - CODE="${?}" && \ - set +x && \ - printf "[ERROR] Error(code=%s) occurred at %s:%s command: %s\n" \ - "${CODE}" "${BASH_SOURCE}" "${LINENO}" "${BASH_COMMAND}" - docker_stop "${DOCKER_CONT_NAME}" 'FLAG_C_RUN' -} - -# Setup error handling using default settings (defined in includes/error_handlers.sh) -error_trap_setup 'test_on_error' - -print_help() { - echo 'Usage: test.sh [-hcsjn] -d ' - echo '' - echo ' -h print this help and exit' - echo ' -c start and stop Docker containers' - echo ' -s execute simple tests (default when no -s, -j or -n is passed)' - echo ' -j execute remote application tests in Java VM mode' - echo ' -n execute remote application tests in native image mode' - echo ' -d select database' - echo ' :: mysql | pgsql' -} - -# Evaluate command line arguments -if [ "$#" -gt '0' ]; then - while getopts 'hcsjnd:' flag 2> /dev/null; do - case "${flag}" in - h) print_help && exit;; - c) readonly FLAG_C='1';; - d) readonly FLAG_D=${OPTARG};; - s) readonly FLAG_S='1';; - j) readonly FLAG_J='1';; - n) readonly FLAG_N='1';; - esac - done -fi - -# Load database setup -if [ -n "${FLAG_D}" ]; then - case "${FLAG_D}" in - mysql) . ${WS_DIR}/etc/scripts/includes/mysql.sh;; - pgsql) . ${WS_DIR}/etc/scripts/includes/pgsql.sh;; - *) echo 'ERROR: Unknown database name, exitting.' && exit 1;; - esac -else - echo 'ERROR: No database was selected, exitting.' - exit 1 -fi - -# Start docker Container -if [ -n "${FLAG_C}" ]; then - readonly DOCKER_CONT_NAME="helidon-tests-dbclient-${FLAG_D}" - docker_start "${DOCKER_IMG}" \ - "${DOCKER_CONT_NAME}" \ - "${DB_HOST}:${DB_PORT}:${DB_PORT}" \ - "${DOCKER_ENV}" \ - 'FLAG_C_RUN' \ - 'FLAG_C' -fi - -# Turn simple tests on when no test was selected -[ -z "${FLAG_J}" -a -z "${FLAG_N}" -a -z "${FLAG_S}" ] && \ - readonly FLAG_S='1' - -# Run simple JDBC tests -[ -n "${FLAG_S}" ] && \ - (cd ${WS_DIR}/tests/integration/dbclient && \ - echo mvn -D${DB_PROPERTY} -pl common,jdbc \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - verify && \ - mvn -D${DB_PROPERTY} -pl common,jdbc \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - verify) - -# Run remote application tests in Java VM mode -[ -n "${FLAG_J}" ] && \ - (cd ${WS_DIR}/tests/integration/dbclient && \ - echo mvn -D${DB_PROPERTY} \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - -pl appl verify && \ - mvn -D${DB_PROPERTY} \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - -pl appl verify) - -# Run remote application tests in native image mode -[ -n "${FLAG_N}" ] && \ - (cd ${WS_DIR}/tests/integration/dbclient && \ - echo mvn -D${DB_PROPERTY} -Pnative-image \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - -pl appl verify && \ - mvn -D${DB_PROPERTY} -Pnative-image \ - -Dapp.config=${TEST_CONFIG} \ - -Ddb.user=${DB_USER} \ - -Ddb.password=${DB_PASSWORD} \ - -Ddb.url="${DB_URL}" \ - -pl appl verify) - -# Stop docker Container -docker_stop "${DOCKER_CONT_NAME}" 'FLAG_C_RUN' diff --git a/tests/integration/tls-revocation-config/store/automatic-store-generator.sh b/tests/integration/tls-revocation-config/store/automatic-store-generator.sh index 7b54606ff7c..21ae361e405 100644 --- a/tests/integration/tls-revocation-config/store/automatic-store-generator.sh +++ b/tests/integration/tls-revocation-config/store/automatic-store-generator.sh @@ -55,16 +55,16 @@ createCertificatesAndStores() { openssl pkcs12 -export -in server-signed.cer -inkey server-private.key -out server-signed.p12 -name server -passout pass:changeit keytool -delete -alias server -keystore server.jks -storepass changeit keytool -importkeystore -srckeystore server-signed.p12 -srcstoretype PKCS12 -destkeystore server.jks -srcstorepass changeit -deststorepass changeit - + echo "Importing CA cert to the client and server stores..." if [ "$SINGLE" = true ] ; then keytool -v -trustcacerts -keystore client.jks -importcert -file ca.pem -alias root-ca -storepass changeit -noprompt keytool -v -trustcacerts -keystore server.jks -importcert -file ca.pem -alias root-ca -storepass changeit -noprompt - else + else keytool -v -trustcacerts -keystore client-truststore.jks -importcert -file ca.pem -alias root-ca -storepass changeit -noprompt keytool -v -trustcacerts -keystore server-truststore.jks -importcert -file ca.pem -alias root-ca -storepass changeit -noprompt fi - + echo "Changing aliases to 1..." keytool -changealias -alias server -destalias 1 -keypass changeit -keystore server.jks -storepass changeit keytool -changealias -alias client -destalias 1 -keypass changeit -keystore client.jks -storepass changeit @@ -77,7 +77,7 @@ createCertificatesAndStores() { keytool -importkeystore -srckeystore server-truststore.jks -destkeystore out/server-truststore.p12 -srcstoretype JKS -deststoretype PKCS12 -srcstorepass changeit -deststorepass changeit keytool -importkeystore -srckeystore client-truststore.jks -destkeystore out/client-truststore.p12 -srcstoretype JKS -deststoretype PKCS12 -srcstorepass changeit -deststorepass changeit fi - else + else mv client.jks out/client.jks mv server.jks out/server.jks if [ "$SINGLE" = false ] ; then @@ -92,89 +92,93 @@ createCertificatesAndStores() { echo 01 > crlnumber openssl ca -config ca.conf -revoke client-signed.cer -keyfile ca.key -cert ca.crt -passin pass:changeit - openssl ca -config ca.conf -gencrl -keyfile ca.key -cert ca.crt -out rt.crl.pem -passin pass:changeit openssl crl -inform PEM -in rt.crl.pem -outform DER -out out/ca.crl rm rt.crl.pem } removeAllPreviouslyCreatedStores() { - echo 'Removing all of previously created items...' + echo 'Removing all of previously created items...' - rm -fv ca.key - rm -fv ca.crt - rm -fv ca.jks - rm -fv ca.p12 - rm -fv ca.pem - rm -fv ca.srl - rm -fv server.jks - rm -fv server.cer - rm -fv server.csr - rm -fv server.p12 - rm -fv server-private.key - rm -fv server-signed.cer - rm -fv server-signed.p12 - rm -fv server-truststore.jks - rm -fv client.cer - rm -fv client.csr - rm -fv client.p12 - rm -fv client-private.key - rm -fv client-signed.cer - rm -fv client-signed.p12 - rm -fv client.jks - rm -fv client-truststore.jks - rm -fv root.crl - rm -rf out - rm -rf certindex - rm -rf certserial - rm -rf crlnumber + rm -fv ca.key + rm -fv ca.crt + rm -fv ca.jks + rm -fv ca.p12 + rm -fv ca.pem + rm -fv ca.srl + rm -fv server.jks + rm -fv server.cer + rm -fv server.csr + rm -fv server.p12 + rm -fv server-private.key + rm -fv server-signed.cer + rm -fv server-signed.p12 + rm -fv server-truststore.jks + rm -fv client.cer + rm -fv client.csr + rm -fv client.p12 + rm -fv client-private.key + rm -fv client-signed.cer + rm -fv client-signed.p12 + rm -fv client.jks + rm -fv client-truststore.jks + rm -fv root.crl + rm -rf out + rm -rf certindex + rm -rf certserial + rm -rf crlnumber - echo 'Clean up finished' + echo 'Clean up finished' } while [ "$1" != "" ]; do case $1 in - -n | --name ) shift - NAME=$1 - ;; - -t | --type ) shift - TYPE=$1 - ;; - -s | --single ) shift - SINGLE=$1 - ;; - -h | --help ) echo "Some cool help" - exit - ;; - * ) echo "ERROR: Invalid parameter" $1 - exit 1 + -n | --name) + shift + NAME="${1}" + ;; + -t | --type) + shift + TYPE="${1}" + ;; + -s | --single) + shift + SINGLE="${1}" + ;; + -h | --help) + echo "Some cool help" + exit + ;; + *) + echo "ERROR: Invalid parameter" "${1}" + exit 1 esac shift done if [ -z "$NAME" ]; then NAME="Helidon" else - echo "Generating certs for Organization/Application "$NAME + echo "Generating certs for Organization/Application ${NAME}" fi -case $TYPE in - JKS | P12 | PKCS12 ) - echo "Output file will be of type" $TYPE - ;; - *) - echo 'ERROR: Invalid output type' $TYPE - echo 'Only JKS | P12 | PKCS12 supported' - return 1 +case ${TYPE} in + JKS | P12 | PKCS12 ) + echo "Output file will be of type ${TYPE}" + ;; + *) + echo "ERROR: Invalid output type ${TYPE}" + echo "Only JKS | P12 | PKCS12 supported" + return 1 esac -case $SINGLE in - true ) - echo "Truststore and private key will be in single file" - ;; - false ) - echo "Truststore and private key will be in separate files" - ;; - *) - echo "ERROR: Only value true/false valid in single parameter! Current " $SINGLE - exit 1 +case ${SINGLE} in + true) + echo "Truststore and private key will be in single file" + ;; + false) + echo "Truststore and private key will be in separate files" + ;; + *) + echo "ERROR: Only value true/false valid in single parameter! Current ${SINGLE}" + exit 1 esac removeAllPreviouslyCreatedStores