From 3da276dbbc4b9b79261bc77379c52df1c82b184c Mon Sep 17 00:00:00 2001 From: Benedikt Labrenz Date: Tue, 15 Apr 2025 10:00:45 +0200 Subject: [PATCH 01/27] build nifi-opa-plugin from source with workaround --- nifi/Dockerfile | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/nifi/Dockerfile b/nifi/Dockerfile index 93daaa019..ff30691fa 100644 --- a/nifi/Dockerfile +++ b/nifi/Dockerfile @@ -80,6 +80,28 @@ rm -rf /stackable/nifi-${PRODUCT}/docs chmod -R g=u /stackable EOF +FROM stackable/image/java-devel AS opa-authorizer-builder + +ARG STACKABLE_USER_UID +ARG PRODUCT + +USER ${STACKABLE_USER_UID} +WORKDIR /stackable + +COPY --chown=${STACKABLE_USER_UID}:0 nifi/stackable/opa-authorizer/nifi-framework-api-version.patch /stackable/nifi-framework-api-version.patch + +# TODO: Set to tag after new release of nifi-opa-plugin +RUN git clone --depth 1 https://github.com/DavidGitter/nifi-opa-plugin.git && \ + cd nifi-opa-plugin && \ + git reset --hard cdf8c36 && \ + git apply /stackable/nifi-framework-api-version.patch && \ + cd authorizer && \ + mvn \ + --batch-mode \ + --no-transfer-progress \ + install \ + -DskipTests + FROM stackable/image/java-base AS final ARG PRODUCT @@ -97,6 +119,8 @@ LABEL name="Apache NiFi" \ COPY --chown=${STACKABLE_USER_UID}:0 --from=nifi-builder /stackable/nifi-${PRODUCT} /stackable/nifi-${PRODUCT}/ COPY --chown=${STACKABLE_USER_UID}:0 --from=nifi-builder /stackable/stackable-bcrypt.jar /stackable/stackable-bcrypt.jar +COPY --chown=${STACKABLE_USER_UID}:0 --from=opa-authorizer-builder /stackable/nifi-opa-plugin/authorizer/target/opa-authorizer.nar /stackable/nifi-${PRODUCT}/extensions/opa-authorizer.nar + COPY --chown=${STACKABLE_USER_UID}:0 nifi/stackable/bin /stackable/bin COPY --chown=${STACKABLE_USER_UID}:0 nifi/licenses /licenses COPY --chown=${STACKABLE_USER_UID}:0 nifi/python /stackable/python @@ -127,7 +151,7 @@ ln -s /stackable/nifi-${PRODUCT} /stackable/nifi chown --no-dereference ${STACKABLE_USER_UID}:0 /stackable/nifi chmod --recursive g=u /stackable/python chmod --recursive g=u /stackable/bin -chmod g=u /stackable/nifi-${PRODUCT} +chmod --recursive g=u /stackable/nifi-${PRODUCT} EOF # ---------------------------------------- From 66ec5ccaa00f47b7c7cb3a2c2abe63757f23ae68 Mon Sep 17 00:00:00 2001 From: Benedikt Labrenz Date: Tue, 15 Apr 2025 10:13:15 +0200 Subject: [PATCH 02/27] add changelog entry --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a15846112..a6a6f611a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,7 @@ All notable changes to this project will be documented in this file. `check-permissions-ownership.sh` provided in stackable-base image ([#1025]). - zookeeper: check for correct permissions and ownerships in /stackable folder via `check-permissions-ownership.sh` provided in stackable-base image ([#1043]). +- nifi: Add OPA authorizer plugin with workaround ([#1058]). ### Changed @@ -75,6 +76,7 @@ All notable changes to this project will be documented in this file. [#1054]: https://github.com/stackabletech/docker-images/pull/1054 [#1055]: https://github.com/stackabletech/docker-images/pull/1055 [#1056]: https://github.com/stackabletech/docker-images/pull/1056 +[#1058]: https://github.com/stackabletech/docker-images/pull/1058 ## [25.3.0] - 2025-03-21 From 625ed2407c1153859901841800ebb354613faf51 Mon Sep 17 00:00:00 2001 From: Benedikt Labrenz Date: Tue, 22 Apr 2025 16:54:02 +0200 Subject: [PATCH 03/27] add patch --- .../opa-authorizer/nifi-framework-api-version.patch | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 nifi/stackable/opa-authorizer/nifi-framework-api-version.patch diff --git a/nifi/stackable/opa-authorizer/nifi-framework-api-version.patch b/nifi/stackable/opa-authorizer/nifi-framework-api-version.patch new file mode 100644 index 000000000..0147ccb87 --- /dev/null +++ b/nifi/stackable/opa-authorizer/nifi-framework-api-version.patch @@ -0,0 +1,13 @@ +diff --git a/authorizer/pom.xml b/authorizer/pom.xml +index da074d7..55e76c3 100644 +--- a/authorizer/pom.xml ++++ b/authorizer/pom.xml +@@ -19,7 +19,7 @@ + UTF-8 + 11 + 11 +- 2.3.0 ++ 1.28.1 + + + From fe883b99271db99c741830ec332cd3f4d1ff5c73 Mon Sep 17 00:00:00 2001 From: Benedikt Labrenz Date: Fri, 2 May 2025 10:21:40 +0200 Subject: [PATCH 04/27] build nifi-opa-plugin from branch feat/reworked-opa-response --- nifi/Dockerfile | 17 ++++++++--------- .../nifi-framework-api-version.patch | 13 ------------- 2 files changed, 8 insertions(+), 22 deletions(-) delete mode 100644 nifi/stackable/opa-authorizer/nifi-framework-api-version.patch diff --git a/nifi/Dockerfile b/nifi/Dockerfile index ff30691fa..4b64c88b6 100644 --- a/nifi/Dockerfile +++ b/nifi/Dockerfile @@ -88,19 +88,18 @@ ARG PRODUCT USER ${STACKABLE_USER_UID} WORKDIR /stackable -COPY --chown=${STACKABLE_USER_UID}:0 nifi/stackable/opa-authorizer/nifi-framework-api-version.patch /stackable/nifi-framework-api-version.patch - # TODO: Set to tag after new release of nifi-opa-plugin -RUN git clone --depth 1 https://github.com/DavidGitter/nifi-opa-plugin.git && \ - cd nifi-opa-plugin && \ - git reset --hard cdf8c36 && \ - git apply /stackable/nifi-framework-api-version.patch && \ - cd authorizer && \ +RUN git clone --depth 1 --branch feat/reworked-opa-response https://github.com/DavidGitter/nifi-opa-plugin.git && \ + cd nifi-opa-plugin/authorizer && \ mvn \ --batch-mode \ --no-transfer-progress \ install \ - -DskipTests + -DskipTests \ + -Pnifi-${PRODUCT} + +# Set correct permissions +RUN chmod g=u /stackable/nifi-opa-plugin/authorizer/target/opa-authorizer.nar FROM stackable/image/java-base AS final @@ -151,7 +150,7 @@ ln -s /stackable/nifi-${PRODUCT} /stackable/nifi chown --no-dereference ${STACKABLE_USER_UID}:0 /stackable/nifi chmod --recursive g=u /stackable/python chmod --recursive g=u /stackable/bin -chmod --recursive g=u /stackable/nifi-${PRODUCT} +chmod g=u /stackable/nifi-${PRODUCT} EOF # ---------------------------------------- diff --git a/nifi/stackable/opa-authorizer/nifi-framework-api-version.patch b/nifi/stackable/opa-authorizer/nifi-framework-api-version.patch deleted file mode 100644 index 0147ccb87..000000000 --- a/nifi/stackable/opa-authorizer/nifi-framework-api-version.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/authorizer/pom.xml b/authorizer/pom.xml -index da074d7..55e76c3 100644 ---- a/authorizer/pom.xml -+++ b/authorizer/pom.xml -@@ -19,7 +19,7 @@ - UTF-8 - 11 - 11 -- 2.3.0 -+ 1.28.1 - - - From 999b8080bb85b955b049004b8f960e36dffaead6 Mon Sep 17 00:00:00 2001 From: Benedikt Labrenz Date: Thu, 8 May 2025 14:09:40 +0200 Subject: [PATCH 05/27] Update nifi/Dockerfile Co-authored-by: Malte Sander --- nifi/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nifi/Dockerfile b/nifi/Dockerfile index 4b64c88b6..6f79ef8db 100644 --- a/nifi/Dockerfile +++ b/nifi/Dockerfile @@ -119,7 +119,7 @@ COPY --chown=${STACKABLE_USER_UID}:0 --from=nifi-builder /stackable/nifi-${PRODU COPY --chown=${STACKABLE_USER_UID}:0 --from=nifi-builder /stackable/stackable-bcrypt.jar /stackable/stackable-bcrypt.jar COPY --chown=${STACKABLE_USER_UID}:0 --from=opa-authorizer-builder /stackable/nifi-opa-plugin/authorizer/target/opa-authorizer.nar /stackable/nifi-${PRODUCT}/extensions/opa-authorizer.nar - +COPY --chown=${STACKABLE_USER_UID}:0 --from=opa-authorizer-builder /stackable/nifi-opa-plugin/LICENSE /licenses/NIFI_OPA_PLUGIN_LICENSE COPY --chown=${STACKABLE_USER_UID}:0 nifi/stackable/bin /stackable/bin COPY --chown=${STACKABLE_USER_UID}:0 nifi/licenses /licenses COPY --chown=${STACKABLE_USER_UID}:0 nifi/python /stackable/python From f044dac78ca365cc282143a58e1ebab8748aa0c5 Mon Sep 17 00:00:00 2001 From: Nick <10092581+NickLarsenNZ@users.noreply.github.com> Date: Thu, 24 Apr 2025 09:43:52 +0200 Subject: [PATCH 06/27] =?UTF-8?q?chore(=E2=8F=B2):=20Use=20fast=20download?= =?UTF-8?q?=20mirrors=20(#1061)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Use fast download mirrors * chore: Show download progress bar * chore: Improve gpg key warning message * chore: Show upload progress bar Thanks to * Apply suggestions from code review Co-authored-by: Techassi * chore: Make output more consistent --------- Co-authored-by: Techassi --- .scripts/upload_new_async-profiler_version.sh | 4 +- .scripts/upload_new_inotify-tools_version.sh | 4 +- .scripts/upload_new_jmx_exporter_version.sh | 8 ++-- .scripts/upload_new_kcat_version.sh | 2 +- .scripts/upload_new_protoc_version.sh | 2 +- .scripts/upload_new_tini_version.sh | 2 +- druid/upload_new_druid_version.sh | 23 ++++++---- hadoop/upload_new_hadoop_version.sh | 20 ++++----- ...upload_new_hbase-operator-tools_version.sh | 23 ++++++---- hbase/upload_new_hbase_version.sh | 23 ++++++---- hbase/upload_new_phoenix_version.sh | 23 ++++++---- hive/upload_new_hive_version.sh | 38 +++++++++------- kafka/upload_new_kafka_version.sh | 36 ++++++++------- nifi/upload_new_nifi_version.sh | 24 +++++----- omid/upload_new_omid_version.sh | 23 ++++++---- opa/upload_new_opa_version.sh | 2 +- .../upload_new_hbase-connector_version.sh | 2 +- spark-k8s/upload_new_spark_version.sh | 23 ++++++---- .../upload_new_statsd_exporter_version.sh | 4 +- trino-cli/upload_new_trino_version.sh | 12 ++--- ...oad_new_trino_storage_connector_version.sh | 4 +- trino/upload_new_trino_version.sh | 4 +- vector/upload_new_vector_version.sh | 11 +++-- zookeeper/upload_new_zookeeper_version.sh | 45 ++++++++++--------- 24 files changed, 205 insertions(+), 157 deletions(-) diff --git a/.scripts/upload_new_async-profiler_version.sh b/.scripts/upload_new_async-profiler_version.sh index e70e27ee2..209625785 100755 --- a/.scripts/upload_new_async-profiler_version.sh +++ b/.scripts/upload_new_async-profiler_version.sh @@ -43,10 +43,10 @@ for arch in "${ARCHITECTURES[@]}"; do file=async-profiler-$VERSION-linux-$arch.tar.gz echo "Downloading $file from github.com" - curl --fail -LOs "https://github.com/async-profiler/async-profiler/releases/download/v$VERSION/$file" + curl --fail -LO --progress-bar "https://github.com/async-profiler/async-profiler/releases/download/v$VERSION/$file" echo "Uploading $file to Nexus" - curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" \ + curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" \ --upload-file "$file" \ 'https://repo.stackable.tech/repository/packages/async-profiler/' done diff --git a/.scripts/upload_new_inotify-tools_version.sh b/.scripts/upload_new_inotify-tools_version.sh index 8d5294a77..18f1683f6 100755 --- a/.scripts/upload_new_inotify-tools_version.sh +++ b/.scripts/upload_new_inotify-tools_version.sh @@ -49,10 +49,10 @@ for arch in "${ARCHITECTURES[@]}"; do file=inotify-tools-$VERSION.$arch.rpm echo "Downloading $file from dl.fedoraproject.org" - curl --fail -LOs "https://dl.fedoraproject.org/pub/epel/$EPEL_VERSION/Everything/$arch/Packages/i/$file" + curl --fail -LO --progress-bar "https://dl.fedoraproject.org/pub/epel/$EPEL_VERSION/Everything/$arch/Packages/i/$file" echo "Uploading $file to Nexus" - curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" \ + curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" \ --upload-file "$file" \ 'https://repo.stackable.tech/repository/packages/inotify-tools/' done diff --git a/.scripts/upload_new_jmx_exporter_version.sh b/.scripts/upload_new_jmx_exporter_version.sh index 33386270c..8ad909d2a 100755 --- a/.scripts/upload_new_jmx_exporter_version.sh +++ b/.scripts/upload_new_jmx_exporter_version.sh @@ -35,15 +35,15 @@ JAR_FILE="jmx_prometheus_javaagent-$VERSION.jar" SUM_FILE="$JAR_FILE.sha256" echo "Downloading JMX Exporter" -curl --fail -LOs "https://github.com/prometheus/jmx_exporter/releases/download/$VERSION/$JAR_FILE" -curl --fail -LOs "https://github.com/prometheus/jmx_exporter/releases/download/$VERSION/$SUM_FILE" +curl --fail -LO --progress-bar "https://github.com/prometheus/jmx_exporter/releases/download/$VERSION/$JAR_FILE" +curl --fail -LO --progress-bar "https://github.com/prometheus/jmx_exporter/releases/download/$VERSION/$SUM_FILE" # Check that sha256 sum matches before uploading sha256sum --check --status "$SUM_FILE" && echo "SHA256 Sum matches" echo "Uploading to Nexus" -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$JAR_FILE" 'https://repo.stackable.tech/repository/packages/jmx-exporter/' -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$SUM_FILE" 'https://repo.stackable.tech/repository/packages/jmx-exporter/' +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$JAR_FILE" 'https://repo.stackable.tech/repository/packages/jmx-exporter/' +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$SUM_FILE" 'https://repo.stackable.tech/repository/packages/jmx-exporter/' echo "Successfully uploaded new version of the JMX Exporter ($VERSION) Jar to Nexus" echo "https://repo.stackable.tech/service/rest/repository/browse/packages/jmx-exporter/" diff --git a/.scripts/upload_new_kcat_version.sh b/.scripts/upload_new_kcat_version.sh index a6b91b3b3..5915d8858 100755 --- a/.scripts/upload_new_kcat_version.sh +++ b/.scripts/upload_new_kcat_version.sh @@ -38,7 +38,7 @@ echo "Downloading kcat" curl --fail -Ls -o "kcat-$VERSION.tar.gz" "https://github.com/edenhill/kcat/archive/refs/tags/$VERSION.tar.gz" echo "Uploading to Nexus" -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "kcat-$VERSION.tar.gz" 'https://repo.stackable.tech/repository/packages/kcat/' +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "kcat-$VERSION.tar.gz" 'https://repo.stackable.tech/repository/packages/kcat/' echo "Successfully uploaded new version of kcat ($VERSION) to Nexus" diff --git a/.scripts/upload_new_protoc_version.sh b/.scripts/upload_new_protoc_version.sh index 29a72b6c2..889f2ccdd 100755 --- a/.scripts/upload_new_protoc_version.sh +++ b/.scripts/upload_new_protoc_version.sh @@ -50,7 +50,7 @@ for arch in "${ARCHITECTURES[@]}"; do FILE_NAME=$(basename "$DOWNLOAD_URL") echo "Uploading protoc to Nexus" - if ! curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$FILE_NAME" 'https://repo.stackable.tech/repository/packages/protoc/'; then + if ! curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$FILE_NAME" 'https://repo.stackable.tech/repository/packages/protoc/'; then echo "Failed to upload protoc to Nexus" exit 1 fi diff --git a/.scripts/upload_new_tini_version.sh b/.scripts/upload_new_tini_version.sh index 96744887a..e22df07be 100755 --- a/.scripts/upload_new_tini_version.sh +++ b/.scripts/upload_new_tini_version.sh @@ -41,7 +41,7 @@ for arch in "${ARCHITECTURES[@]}"; do curl --fail -Ls -o "tini-$VERSION-$arch" "https://github.com/krallin/tini/releases/download/v$VERSION/tini-$arch" echo "Uploading to Nexus" - curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "tini-$VERSION-$arch" 'https://repo.stackable.tech/repository/packages/tini/' + curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "tini-$VERSION-$arch" 'https://repo.stackable.tech/repository/packages/tini/' done echo "Successfully uploaded new version of TINI ($VERSION) to Nexus" diff --git a/druid/upload_new_druid_version.sh b/druid/upload_new_druid_version.sh index dd54b25dc..0a3a0929e 100755 --- a/druid/upload_new_druid_version.sh +++ b/druid/upload_new_druid_version.sh @@ -5,6 +5,11 @@ set -euo pipefail VERSION=${1:?"Missing version number argument (arg 1)"} NEXUS_USER=${2:?"Missing Nexus username argument (arg 2)"} +# We prefer fast downloads... +BASE_DOWNLOAD_URL="https://dlcdn.apache.org/druid" +# However, if the version is not available, use the slow archive instead: +# BASE_DOWNLOAD_URL="https://archive.apache.org/dist/druid" + read -r -s -p "Nexus Password: " NEXUS_PASSWORD echo "" @@ -33,10 +38,10 @@ cd "$WORK_DIR" || exit src_file="apache-druid-${VERSION}-src.tar.gz" -echo "Downloading Druid (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "https://archive.apache.org/dist/druid/${VERSION}/${src_file}" -curl --fail -LOs "https://archive.apache.org/dist/druid/${VERSION}/${src_file}.asc" -curl --fail -LOs "https://archive.apache.org/dist/druid/${VERSION}/${src_file}.sha512" +echo "Downloading Druid source (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/${VERSION}/${src_file}" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/${VERSION}/${src_file}.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/${VERSION}/${src_file}.sha512" # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA512 Checksum" @@ -46,18 +51,18 @@ if ! (sha512sum "${src_file}" | cut -d " " -f 1 | diff -Z - "${src_file}.sha512" fi echo "Validating signature" -echo '--> NOTE: Make sure you have downloaded and added the KEYS file (https://www.apache.org/dist/druid/KEYS) to GPG: https://www.apache.org/info/verification.html (e.g. by using "curl https://archive.apache.org/dist/druid/KEYS | gpg --import")' - if ! (gpg --verify "${src_file}.asc" "${src_file}" 2> /dev/null); then echo "ERROR: The signature could not be verified" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" exit 1 fi echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}" 'https://repo.stackable.tech/repository/packages/druid/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.asc" 'https://repo.stackable.tech/repository/packages/druid/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.sha512" 'https://repo.stackable.tech/repository/packages/druid/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}" 'https://repo.stackable.tech/repository/packages/druid/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.asc" 'https://repo.stackable.tech/repository/packages/druid/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.sha512" 'https://repo.stackable.tech/repository/packages/druid/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/hadoop/upload_new_hadoop_version.sh b/hadoop/upload_new_hadoop_version.sh index 15f58a9f9..9c390c967 100755 --- a/hadoop/upload_new_hadoop_version.sh +++ b/hadoop/upload_new_hadoop_version.sh @@ -39,16 +39,16 @@ cd "$WORK_DIR" || exit bin_file=hadoop-$VERSION.tar.gz src_file=hadoop-$VERSION-src.tar.gz -echo "Downloading Hadoop (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +echo "Downloading Hadoop binary (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hadoop-$VERSION/$bin_file" curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hadoop-$VERSION/$bin_file.asc" curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hadoop-$VERSION/$bin_file.sha512" +echo "Downloading Hadoop source (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hadoop-$VERSION/$src_file" curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hadoop-$VERSION/$src_file.asc" curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hadoop-$VERSION/$src_file.sha512" - # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA512 Checksums" if ! (sha512sum --tag "$bin_file" | diff - "$bin_file.sha512" && sha512sum --tag "$src_file" | diff - "$src_file.sha512"); then @@ -58,22 +58,22 @@ if ! (sha512sum --tag "$bin_file" | diff - "$bin_file.sha512" && sha512sum --tag fi echo "Validating signatures" -echo "--> NOTE: Make sure you have downloaded and added the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) to GPG: https://www.apache.org/info/verification.html (e.g. by using \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\")" - if ! (gpg --verify "$bin_file.asc" "$bin_file" 2> /dev/null && gpg --verify "$src_file.asc" "$src_file" 2> /dev/null); then echo "ERROR: One of the signatures could not be verified" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" exit 1 fi echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.asc" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.sha512" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.asc" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.sha512" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/hadoop/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/hbase/upload_new_hbase-operator-tools_version.sh b/hbase/upload_new_hbase-operator-tools_version.sh index 0872e2ee3..aa33a3379 100755 --- a/hbase/upload_new_hbase-operator-tools_version.sh +++ b/hbase/upload_new_hbase-operator-tools_version.sh @@ -5,6 +5,11 @@ set -euo pipefail VERSION=${1:?"Missing version number argument (arg 1)"} NEXUS_USER=${2:?"Missing Nexus username argument (arg 2)"} +# We prefer fast downloads... +BASE_DOWNLOAD_URL="https://dlcdn.apache.org/hbase" +# However, if the version is not available, use the slow archive instead: +# BASE_DOWNLOAD_URL="https://archive.apache.org/dist/hbase" + read -r -s -p "Nexus Password: " NEXUS_PASSWORD echo "" @@ -33,11 +38,10 @@ cd "$WORK_DIR" || exit src_file=hbase-operator-tools-$VERSION-src.tar.gz -echo "Downloading hbase-operator-tools (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "https://archive.apache.org/dist/hbase/hbase-operator-tools-$VERSION/$src_file" -curl --fail -LOs "https://archive.apache.org/dist/hbase/hbase-operator-tools-$VERSION/$src_file.asc" -curl --fail -LOs "https://archive.apache.org/dist/hbase/hbase-operator-tools-$VERSION/$src_file.sha512" - +echo "Downloading hbase-operator-tools source (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hbase-operator-tools-$VERSION/$src_file" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hbase-operator-tools-$VERSION/$src_file.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hbase-operator-tools-$VERSION/$src_file.sha512" # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA512 Checksums" @@ -47,9 +51,10 @@ if ! (gpg --print-md SHA512 "$src_file" | diff - "$src_file.sha512"); then fi echo "Validating signatures" -echo '--> NOTE: Make sure you have downloaded and added the KEYS file (https://downloads.apache.org/hbase/KEYS) to GPG: https://www.apache.org/info/verification.html (e.g. by using "curl https://downloads.apache.org/hbase/KEYS | gpg --import")' if ! (gpg --verify "$src_file.asc" "$src_file" 2> /dev/null); then echo "ERROR: One of the signatures could not be verified" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" exit 1 fi @@ -57,9 +62,9 @@ fi echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/hbase-operator-tools/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/hbase-operator-tools/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/hbase-operator-tools/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/hbase-operator-tools/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/hbase-operator-tools/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/hbase-operator-tools/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then diff --git a/hbase/upload_new_hbase_version.sh b/hbase/upload_new_hbase_version.sh index 80594d4d5..9b0c9ceef 100755 --- a/hbase/upload_new_hbase_version.sh +++ b/hbase/upload_new_hbase_version.sh @@ -5,6 +5,11 @@ set -euo pipefail VERSION=${1:?"Missing version number argument (arg 1)"} NEXUS_USER=${2:?"Missing Nexus username argument (arg 2)"} +# We prefer fast downloads... +BASE_DOWNLOAD_URL="https://dlcdn.apache.org/hbase" +# However, if the version is not available, use the slow archive instead: +# BASE_DOWNLOAD_URL="https://archive.apache.org/dist/hbase" + read -r -s -p "Nexus Password: " NEXUS_PASSWORD echo "" @@ -33,11 +38,10 @@ cd "$WORK_DIR" || exit src_file=hbase-$VERSION-src.tar.gz -echo "Downloading HBase source (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "https://archive.apache.org/dist/hbase/$VERSION/$src_file" -curl --fail -LOs "https://archive.apache.org/dist/hbase/$VERSION/$src_file.asc" -curl --fail -LOs "https://archive.apache.org/dist/hbase/$VERSION/$src_file.sha512" - +echo "Downloading HBase source (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/$src_file" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/$src_file.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/$src_file.sha512" # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA512 Checksums" @@ -47,17 +51,18 @@ if ! (gpg --print-md SHA512 "$src_file" | diff - "$src_file.sha512"); then fi echo "Validating signatures" -echo '--> NOTE: Make sure you have downloaded and added the KEYS file (https://downloads.apache.org/hbase/KEYS) to GPG: https://www.apache.org/info/verification.html (e.g. by using "curl https://downloads.apache.org/hbase/KEYS | gpg --import")' if ! (gpg --verify "$src_file.asc" "$src_file" 2> /dev/null); then echo "ERROR: One of the signatures could not be verified" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" exit 1 fi echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/hbase/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/hbase/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/hbase/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/hbase/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/hbase/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/hbase/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/hbase/upload_new_phoenix_version.sh b/hbase/upload_new_phoenix_version.sh index 83c099df3..921af8625 100755 --- a/hbase/upload_new_phoenix_version.sh +++ b/hbase/upload_new_phoenix_version.sh @@ -5,6 +5,11 @@ set -euo pipefail VERSION=${1:?"Missing version number argument (arg 1)"} NEXUS_USER=${2:?"Missing Nexus username argument (arg 2)"} +# We prefer fast downloads... +BASE_DOWNLOAD_URL="https://dlcdn.apache.org/phoenix" +# However, if the version is not available, use the slow archive instead: +# BASE_DOWNLOAD_URL="https://archive.apache.org/dist/phoenix" + read -r -s -p "Nexus Password: " NEXUS_PASSWORD echo "" @@ -33,11 +38,10 @@ cd "$WORK_DIR" || exit src_file=phoenix-$VERSION-src.tar.gz -echo "Downloading phoenix (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "https://archive.apache.org/dist/phoenix/phoenix-$VERSION/$src_file" -curl --fail -LOs "https://archive.apache.org/dist/phoenix/phoenix-$VERSION/$src_file.asc" -curl --fail -LOs "https://archive.apache.org/dist/phoenix/phoenix-$VERSION/$src_file.sha512" - +echo "Downloading phoenix source (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/phoenix-$VERSION/$src_file" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/phoenix-$VERSION/$src_file.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/phoenix-$VERSION/$src_file.sha512" # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA512 Checksums" @@ -47,9 +51,10 @@ if ! (gpg --print-md SHA512 "$src_file" | diff - "$src_file.sha512"); then fi echo "Validating signatures" -echo '--> NOTE: Make sure you have downloaded and added the KEYS file (https://downloads.apache.org/phoenix/KEYS) to GPG: https://www.apache.org/info/verification.html (e.g. by using "curl https://downloads.apache.org/phoenix/KEYS | gpg --import")' if ! (gpg --verify "$src_file.asc" "$src_file" 2> /dev/null); then echo "ERROR: One of the signatures could not be verified" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" exit 1 fi @@ -57,9 +62,9 @@ fi echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/phoenix/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/phoenix/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/phoenix/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/phoenix/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/phoenix/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/phoenix/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then diff --git a/hive/upload_new_hive_version.sh b/hive/upload_new_hive_version.sh index 1ed8bbb59..2c7e51804 100755 --- a/hive/upload_new_hive_version.sh +++ b/hive/upload_new_hive_version.sh @@ -5,6 +5,11 @@ set -euo pipefail VERSION=${1:?"Missing version number argument (arg 1)"} NEXUS_USER=${2:?"Missing Nexus username argument (arg 2)"} +# We prefer fast downloads... +BASE_DOWNLOAD_URL="https://dlcdn.apache.org/hive" +# However, if the version is not available, use the slow archive instead: +# BASE_DOWNLOAD_URL="https://archive.apache.org/dist/hive" + read -r -s -p "Nexus Password: " NEXUS_PASSWORD echo "" @@ -34,16 +39,15 @@ cd "$WORK_DIR" || exit bin_file="apache-hive-${VERSION}-bin.tar.gz" src_file="apache-hive-$VERSION-src.tar.gz" -echo "Downloading Hive (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "https://dlcdn.apache.org/hive/hive-${VERSION}/${bin_file}" -curl --fail -LOs "https://dlcdn.apache.org/hive/hive-${VERSION}/${bin_file}.asc" -curl --fail -LOs "https://dlcdn.apache.org/hive/hive-${VERSION}/${bin_file}.sha256" - -echo "Downloading Hive (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "https://dlcdn.apache.org/hive/hive-${VERSION}/${src_file}" -curl --fail -LOs "https://dlcdn.apache.org/hive/hive-${VERSION}/${src_file}.asc" -curl --fail -LOs "https://dlcdn.apache.org/hive/hive-${VERSION}/${src_file}.sha256" +echo "Downloading Hive binary (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hive-${VERSION}/${bin_file}" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hive-${VERSION}/${bin_file}.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hive-${VERSION}/${bin_file}.sha256" +echo "Downloading Hive source (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hive-${VERSION}/${src_file}" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hive-${VERSION}/${src_file}.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/hive-${VERSION}/${src_file}.sha256" # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA256 Checksums" @@ -53,22 +57,22 @@ if ! (sha256sum "${bin_file}" | diff - "${bin_file}.sha256" && sha256sum "${src_ fi echo "Validating signatures" -echo '--> NOTE: Make sure you have downloaded and added the KEYS file (https://dlcdn.apache.org/hive/KEYS) to GPG: https://www.apache.org/info/verification.html (e.g. by using "curl https://dlcdn.apache.org/hive/KEYS | gpg --import")' - if ! (gpg --verify "$bin_file.asc" "$bin_file" 2> /dev/null && gpg --verify "$src_file.asc" "$src_file" 2> /dev/null); then echo "ERROR: Signature could not be verified" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" exit 1 fi echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.asc" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.sha256" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.asc" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.sha256" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha256" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha256" 'https://repo.stackable.tech/repository/packages/hive/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then diff --git a/kafka/upload_new_kafka_version.sh b/kafka/upload_new_kafka_version.sh index 546880da7..8145f672e 100755 --- a/kafka/upload_new_kafka_version.sh +++ b/kafka/upload_new_kafka_version.sh @@ -5,6 +5,11 @@ set -euo pipefail VERSION=${1:?"Missing version number argument (arg 1)"} NEXUS_USER=${2:?"Missing Nexus username argument (arg 2)"} +# We prefer fast downloads... +BASE_DOWNLOAD_URL="https://dlcdn.apache.org/kafka" +# However, if the version is not available, use the slow archive instead: +# BASE_DOWNLOAD_URL="https://archive.apache.org/dist/kafka" + read -r -s -p "Nexus Password: " NEXUS_PASSWORD echo "" @@ -34,14 +39,15 @@ cd "$WORK_DIR" || exit bin_file=kafka_2.13-$VERSION.tgz src_file=kafka-$VERSION-src.tgz -echo "Downloading Kafka (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "https://archive.apache.org/dist/kafka/$VERSION/$bin_file" -curl --fail -LOs "https://archive.apache.org/dist/kafka/$VERSION/$bin_file.asc" -curl --fail -LOs "https://archive.apache.org/dist/kafka/$VERSION/$bin_file.sha512" +echo "Downloading Kafka binary (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/$bin_file" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/$bin_file.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/$bin_file.sha512" -curl --fail -LOs "https://archive.apache.org/dist/kafka/$VERSION/$src_file" -curl --fail -LOs "https://archive.apache.org/dist/kafka/$VERSION/$src_file.asc" -curl --fail -LOs "https://archive.apache.org/dist/kafka/$VERSION/$src_file.sha512" +echo "Downloading Kafka source (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/$src_file" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/$src_file.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/$src_file.sha512" # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA512 Checksum" @@ -51,22 +57,22 @@ if ! (gpg --print-md SHA512 "$bin_file" | diff - "$bin_file.sha512" && gpg --pri fi echo "Validating signatures" -echo '--> NOTE: Make sure you have downloaded and added the KEYS file (https://downloads.apache.org/kafka/KEYS) to GPG: https://www.apache.org/info/verification.html (e.g. by using "curl https://downloads.apache.org/kafka/KEYS | gpg --import")' - if ! (gpg --verify "$bin_file.asc" "$bin_file" 2> /dev/null && gpg --verify "$src_file.asc" "$src_file" 2> /dev/null); then echo "ERROR: One of the signatures could not be verified" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" exit 1 fi echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.asc" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.sha512" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.asc" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.sha512" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/kafka/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/nifi/upload_new_nifi_version.sh b/nifi/upload_new_nifi_version.sh index 1b053253e..a05445af4 100755 --- a/nifi/upload_new_nifi_version.sh +++ b/nifi/upload_new_nifi_version.sh @@ -5,6 +5,11 @@ set -euo pipefail VERSION=${1:?"Missing version number argument (arg 1)"} NEXUS_USER=${2:?"Missing Nexus username argument (arg 2)"} +# We prefer fast downloads... +BASE_DOWNLOAD_URL="https://dlcdn.apache.org/nifi" +# However, if the version is not available, use the slow archive instead: +# BASE_DOWNLOAD_URL="https://archive.apache.org/dist/nifi" + read -r -s -p "Nexus Password: " NEXUS_PASSWORD echo "" @@ -32,12 +37,11 @@ trap cleanup EXIT cd "$WORK_DIR" || exit src_file="nifi-$VERSION-source-release.zip" -download_url="https://archive.apache.org/dist/nifi/${VERSION}" -echo "Downloading NiFi source (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "${download_url}/${src_file}" -curl --fail -LOs "${download_url}/${src_file}.asc" -curl --fail -LOs "${download_url}/${src_file}.sha512" +echo "Downloading NiFi source (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/${src_file}" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/${src_file}.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/$VERSION/${src_file}.sha512" # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA512 Checksums" @@ -51,18 +55,18 @@ if ! (sha512sum "$src_file" | cut -d ' ' -f 1 | diff - <(echo -e "$(<"${src_file fi echo "Validating signatures" -echo '--> NOTE: Make sure you have downloaded and added the KEYS file (https://archive.apache.org/dist/nifi/KEYS) to GPG: https://www.apache.org/info/verification.html (e.g. by using "curl https://archive.apache.org/dist/nifi/KEYS | gpg --import")' - if ! (gpg --verify "$src_file.asc" "$src_file" 2> /dev/null); then echo "ERROR: One of the signatures could not be verified" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" exit 1 fi echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}" 'https://repo.stackable.tech/repository/packages/nifi/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.asc" 'https://repo.stackable.tech/repository/packages/nifi/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.sha512" 'https://repo.stackable.tech/repository/packages/nifi/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}" 'https://repo.stackable.tech/repository/packages/nifi/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.asc" 'https://repo.stackable.tech/repository/packages/nifi/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.sha512" 'https://repo.stackable.tech/repository/packages/nifi/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/omid/upload_new_omid_version.sh b/omid/upload_new_omid_version.sh index cbfc5db94..d3d9ea7b5 100755 --- a/omid/upload_new_omid_version.sh +++ b/omid/upload_new_omid_version.sh @@ -5,6 +5,11 @@ set -e VERSION=${1:?"Missing version number argument (arg 1)"} NEXUS_USER=${2:?"Missing Nexus username argument (arg 2)"} +# We prefer fast downloads... +BASE_DOWNLOAD_URL="https://dlcdn.apache.org/phoenix" +# However, if the version is not available, use the slow archive instead: +# BASE_DOWNLOAD_URL="https://archive.apache.org/dist/phoenix" + read -r -s -p "Nexus Password: " NEXUS_PASSWORD echo "" @@ -32,11 +37,10 @@ trap cleanup EXIT cd "$WORK_DIR" || exit src_file=phoenix-omid-$VERSION-src.tar.gz -echo "Downloading Omid (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "https://downloads.apache.org/phoenix/phoenix-omid-${VERSION}/${src_file}" -curl --fail -LOs "https://downloads.apache.org/phoenix/phoenix-omid-$VERSION/$src_file.asc" -curl --fail -LOs "https://downloads.apache.org/phoenix/phoenix-omid-$VERSION/$src_file.sha512" - +echo "Downloading Omid source (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/phoenix-omid-${VERSION}/${src_file}" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/phoenix-omid-$VERSION/$src_file.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/phoenix-omid-$VERSION/$src_file.sha512" # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA512 Checksums" @@ -46,17 +50,18 @@ if ! (gpg --print-md SHA512 "$src_file" | diff - "$src_file.sha512"); then fi echo "Validating signatures" -echo '--> NOTE: Make sure you have downloaded and added the KEYS file (https://downloads.apache.org/phoenix/KEYS) to GPG: https://www.apache.org/info/verification.html' if ! (gpg --verify "$src_file.asc" "$src_file" 2> /dev/null); then echo "ERROR: One of the signatures could not be verified" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" exit 1 fi echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/omid/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/omid/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/omid/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" 'https://repo.stackable.tech/repository/packages/omid/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" 'https://repo.stackable.tech/repository/packages/omid/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" 'https://repo.stackable.tech/repository/packages/omid/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/opa/upload_new_opa_version.sh b/opa/upload_new_opa_version.sh index d26ead135..0f2b797ef 100755 --- a/opa/upload_new_opa_version.sh +++ b/opa/upload_new_opa_version.sh @@ -40,7 +40,7 @@ curl --fail -L -o "${tar_gz_file}" "${download_url}" echo "Uploading OPA source to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${tar_gz_file}" 'https://repo.stackable.tech/repository/packages/opa/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${tar_gz_file}" 'https://repo.stackable.tech/repository/packages/opa/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/spark-k8s/upload_new_hbase-connector_version.sh b/spark-k8s/upload_new_hbase-connector_version.sh index 6cc74533a..46a19af66 100755 --- a/spark-k8s/upload_new_hbase-connector_version.sh +++ b/spark-k8s/upload_new_hbase-connector_version.sh @@ -40,7 +40,7 @@ curl --fail -L -o "${tar_gz_file}" "${download_url}" echo "Uploading hbase-connectors source to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${tar_gz_file}" 'https://repo.stackable.tech/repository/packages/hbase-connectors/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${tar_gz_file}" 'https://repo.stackable.tech/repository/packages/hbase-connectors/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/spark-k8s/upload_new_spark_version.sh b/spark-k8s/upload_new_spark_version.sh index 3fe0994c3..4ed6906c7 100755 --- a/spark-k8s/upload_new_spark_version.sh +++ b/spark-k8s/upload_new_spark_version.sh @@ -5,6 +5,11 @@ set -euo pipefail VERSION=${1:?"Missing version number argument (arg 1)"} NEXUS_USER=${2:?"Missing Nexus username argument (arg 2)"} +# We prefer fast downloads... +BASE_DOWNLOAD_URL="https://dlcdn.apache.org/spark" +# However, if the version is not available, use the slow archive instead: +# BASE_DOWNLOAD_URL="https://archive.apache.org/dist/spark" + read -r -s -p "Nexus Password: " NEXUS_PASSWORD echo "" @@ -33,10 +38,10 @@ cd "$WORK_DIR" || exit src_file="spark-${VERSION}.tgz" -echo "Downloading Spark (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "https://archive.apache.org/dist/spark/spark-${VERSION}/${src_file}" -curl --fail -LOs "https://archive.apache.org/dist/spark/spark-${VERSION}/${src_file}.asc" -curl --fail -LOs "https://archive.apache.org/dist/spark/spark-${VERSION}/${src_file}.sha512" +echo "Downloading Spark source (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/spark-${VERSION}/${src_file}" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/spark-${VERSION}/${src_file}.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/spark-${VERSION}/${src_file}.sha512" # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA512 Checksum" @@ -46,18 +51,18 @@ if ! (sha512sum "${src_file}" | diff - "${src_file}.sha512"); then fi echo "Validating signature" -echo '--> NOTE: Make sure you have downloaded and added the KEYS file (https://archive.apache.org/dist/spark/KEYS) to GPG: https://www.apache.org/info/verification.html (e.g. by using "curl https://archive.apache.org/dist/spark/KEYS | gpg --import")' - if ! (gpg --verify "${src_file}.asc" "${src_file}" 2>/dev/null); then echo "ERROR: The signature could not be verified" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" exit 1 fi echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}" 'https://repo.stackable.tech/repository/packages/spark/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.asc" 'https://repo.stackable.tech/repository/packages/spark/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.sha512" 'https://repo.stackable.tech/repository/packages/spark/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}" 'https://repo.stackable.tech/repository/packages/spark/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.asc" 'https://repo.stackable.tech/repository/packages/spark/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.sha512" 'https://repo.stackable.tech/repository/packages/spark/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/statsd_exporter/upload_new_statsd_exporter_version.sh b/statsd_exporter/upload_new_statsd_exporter_version.sh index 5a23f4899..0a37fb23c 100755 --- a/statsd_exporter/upload_new_statsd_exporter_version.sh +++ b/statsd_exporter/upload_new_statsd_exporter_version.sh @@ -32,10 +32,10 @@ trap cleanup EXIT cd "$WORK_DIR" || exit echo "Downloading STATSD EXPORTER source code" -curl --fail -LOs "https://github.com/prometheus/statsd_exporter/archive/refs/tags/v$VERSION.tar.gz" && mv "v$VERSION.tar.gz" "statsd_exporter-$VERSION.src.tar.gz" +curl --fail -LO --progress-bar "https://github.com/prometheus/statsd_exporter/archive/refs/tags/v$VERSION.tar.gz" && mv "v$VERSION.tar.gz" "statsd_exporter-$VERSION.src.tar.gz" echo "Uploading to Nexus" -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "statsd_exporter-$VERSION.src.tar.gz" 'https://repo.stackable.tech/repository/packages/statsd_exporter/' +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "statsd_exporter-$VERSION.src.tar.gz" 'https://repo.stackable.tech/repository/packages/statsd_exporter/' echo "Successfully uploaded new version of STATSD-EXPORTER source code ($VERSION) to Nexus" echo "https://repo.stackable.tech/service/rest/repository/browse/packages/statsd_exporter/" diff --git a/trino-cli/upload_new_trino_version.sh b/trino-cli/upload_new_trino_version.sh index 3ed018aa6..fea3cf748 100755 --- a/trino-cli/upload_new_trino_version.sh +++ b/trino-cli/upload_new_trino_version.sh @@ -34,9 +34,9 @@ cd "$WORK_DIR" || exit bin_file=trino-cli-${VERSION}-executable.jar echo "Downloading Trino (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "https://repo1.maven.org/maven2/io/trino/trino-cli/${VERSION}/${bin_file}" -curl --fail -LOs "https://repo1.maven.org/maven2/io/trino/trino-cli/${VERSION}/${bin_file}.asc" -curl --fail -LOs "https://repo1.maven.org/maven2/io/trino/trino-cli/${VERSION}/${bin_file}.sha1" +curl --fail -LO --progress-bar "https://repo1.maven.org/maven2/io/trino/trino-cli/${VERSION}/${bin_file}" +curl --fail -LO --progress-bar "https://repo1.maven.org/maven2/io/trino/trino-cli/${VERSION}/${bin_file}.asc" +curl --fail -LO --progress-bar "https://repo1.maven.org/maven2/io/trino/trino-cli/${VERSION}/${bin_file}.sha1" # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA1 Checksum" @@ -89,9 +89,9 @@ fi echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${bin_file}" 'https://repo.stackable.tech/repository/packages/trino-cli/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${bin_file}.asc" 'https://repo.stackable.tech/repository/packages/trino-cli/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${bin_file}.sha1" 'https://repo.stackable.tech/repository/packages/trino-cli/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${bin_file}" 'https://repo.stackable.tech/repository/packages/trino-cli/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${bin_file}.asc" 'https://repo.stackable.tech/repository/packages/trino-cli/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${bin_file}.sha1" 'https://repo.stackable.tech/repository/packages/trino-cli/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/trino-storage-connector/upload_new_trino_storage_connector_version.sh b/trino-storage-connector/upload_new_trino_storage_connector_version.sh index d260e823d..737b063d5 100755 --- a/trino-storage-connector/upload_new_trino_storage_connector_version.sh +++ b/trino-storage-connector/upload_new_trino_storage_connector_version.sh @@ -43,8 +43,8 @@ sha256sum "${src_file}" | cut --delimiter=' ' --field=1 > "${src_file}.sha256" echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}" 'https://repo.stackable.tech/repository/packages/trino-storage/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.sha256" 'https://repo.stackable.tech/repository/packages/trino-storage/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}" 'https://repo.stackable.tech/repository/packages/trino-storage/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.sha256" 'https://repo.stackable.tech/repository/packages/trino-storage/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/trino/upload_new_trino_version.sh b/trino/upload_new_trino_version.sh index 4386e50b6..2dfda18ea 100755 --- a/trino/upload_new_trino_version.sh +++ b/trino/upload_new_trino_version.sh @@ -43,8 +43,8 @@ sha256sum "${src_file}" | cut --delimiter=' ' --field=1 > "${src_file}.sha256" echo "Uploading everything to Nexus" EXIT_STATUS=0 -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}" 'https://repo.stackable.tech/repository/packages/trino-server/' || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.sha256" 'https://repo.stackable.tech/repository/packages/trino-server/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}" 'https://repo.stackable.tech/repository/packages/trino-server/' || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "${src_file}.sha256" 'https://repo.stackable.tech/repository/packages/trino-server/' || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" diff --git a/vector/upload_new_vector_version.sh b/vector/upload_new_vector_version.sh index 4ec13cf40..2d71eb08c 100755 --- a/vector/upload_new_vector_version.sh +++ b/vector/upload_new_vector_version.sh @@ -32,11 +32,6 @@ for arch in "${ARCHITECTURES[@]}"; do "https://yum.vector.dev/stable/vector-$major_version/$arch/$file" echo "Validating signature" - echo "--> NOTE: Make sure you have downloaded and added Datadog's \ -public key (https://keys.datadoghq.com/DATADOG_RPM_KEY_B01082D3.public) \ -to the RPM package database: -rpmkeys --import --dbpath $RPM_PACKAGE_DB_PATH DATADOG_APT_KEY_CURRENT.public" - EXIT_STATUS=0 # `rpmkeys --checksig` also succeeds if the digests of an unsigned # package are okay. Therefore, test explicitly if the output @@ -50,11 +45,15 @@ rpmkeys --import --dbpath $RPM_PACKAGE_DB_PATH DATADOG_APT_KEY_CURRENT.public" EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: The signature could not be verified." + echo "--> NOTE: Make sure you have downloaded and added Datadog's \ +public key (https://keys.datadoghq.com/DATADOG_RPM_KEY_B01082D3.public) \ +to the RPM package database: +rpmkeys --import --dbpath $RPM_PACKAGE_DB_PATH DATADOG_APT_KEY_CURRENT.public" exit 1 fi echo "Uploading $file to Nexus" - curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" \ + curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" \ --upload-file "$file" \ 'https://repo.stackable.tech/repository/packages/vector/' diff --git a/zookeeper/upload_new_zookeeper_version.sh b/zookeeper/upload_new_zookeeper_version.sh index efaa6ccac..d3b74f65a 100755 --- a/zookeeper/upload_new_zookeeper_version.sh +++ b/zookeeper/upload_new_zookeeper_version.sh @@ -5,6 +5,11 @@ set -euo pipefail VERSION=${1:?"Missing version number argument (arg 1)"} NEXUS_USER=${2:?"Missing Nexus username argument (arg 2)"} +# We prefer fast downloads... +BASE_DOWNLOAD_URL="https://dlcdn.apache.org/zookeeper" +# However, if the version is not available, use the slow archive instead: +# BASE_DOWNLOAD_URL="https://archive.apache.org/dist/zookeeper" + read -r -s -p "Nexus Password: " NEXUS_PASSWORD echo "" @@ -33,18 +38,16 @@ cd "$WORK_DIR" || exit bin_file=apache-zookeeper-$VERSION-bin.tar.gz src_file=apache-zookeeper-$VERSION.tar.gz -download_url=https://archive.apache.org/dist/zookeeper - -echo "Downloading ZooKeeper (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "$download_url/zookeeper-$VERSION/$bin_file" -curl --fail -LOs "$download_url/zookeeper-$VERSION/$bin_file.asc" -curl --fail -LOs "$download_url/zookeeper-$VERSION/$bin_file.sha512" -echo "Downloading ZooKeeper sources (this can take a while, it is intentionally downloading from a slow mirror that contains all old versions)" -curl --fail -LOs "$download_url/zookeeper-$VERSION/$src_file" -curl --fail -LOs "$download_url/zookeeper-$VERSION/$src_file.asc" -curl --fail -LOs "$download_url/zookeeper-$VERSION/$src_file.sha512" +echo "Downloading ZooKeeper binary (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/zookeeper-$VERSION/$bin_file" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/zookeeper-$VERSION/$bin_file.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/zookeeper-$VERSION/$bin_file.sha512" +echo "Downloading ZooKeeper source (if this fails, try switching the BASE_DOWNLOAD_URL to the archive)" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/zookeeper-$VERSION/$src_file" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/zookeeper-$VERSION/$src_file.asc" +curl --fail -LO --progress-bar "${BASE_DOWNLOAD_URL}/zookeeper-$VERSION/$src_file.sha512" # It is probably redundant to check both the checksum and the signature but it's cheap and why not echo "Validating SHA512 Checksums for binary releases" @@ -59,30 +62,32 @@ if ! (sha512sum "$src_file" | diff -Z - "$src_file.sha512"); then fi echo "Validating signatures for binary releases" -echo '--> NOTE: Make sure you have downloaded and added the KEYS file (https://archive.apache.org/dist/zookeeper/KEYS) to GPG: https://www.apache.org/info/verification.html (e.g. by using "curl https://archive.apache.org/dist/zookeeper/KEYS | gpg --import")' - if ! (gpg --verify "$bin_file.asc" "$bin_file" 2> /dev/null); then echo "ERROR: One of the signatures could not be verified for a binary release" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" exit 1 fi echo "Validating signatures for source releases" if ! (gpg --verify "$src_file.asc" "$src_file" 2> /dev/null); then - echo "ERROR: One of the signatures could not be verified for a source release" - exit 1 + echo "ERROR: One of the signatures could not be verified for a source release" + echo "--> Make sure you have imported the KEYS file (${BASE_DOWNLOAD_URL}/KEYS) into GPG: https://www.apache.org/info/verification.html" + echo "--> e.g. \"curl ${BASE_DOWNLOAD_URL}/KEYS | gpg --import\"" + exit 1 fi echo "Uploading everything to Nexus" EXIT_STATUS=0 repo_url=https://repo.stackable.tech/repository/packages/zookeeper/ -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file" "$repo_url" || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.asc" "$repo_url" || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.sha512" "$repo_url" || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file" "$repo_url" || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.asc" "$repo_url" || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$bin_file.sha512" "$repo_url" || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" "$repo_url" || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" "$repo_url" || EXIT_STATUS=$? -curl --fail -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" "$repo_url" || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file" "$repo_url" || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.asc" "$repo_url" || EXIT_STATUS=$? +curl --fail -o /dev/null --progress-bar -u "$NEXUS_USER:$NEXUS_PASSWORD" --upload-file "$src_file.sha512" "$repo_url" || EXIT_STATUS=$? if [ $EXIT_STATUS -ne 0 ]; then echo "ERROR: Upload failed" From ecb9ab7dc17028f4546d51e5d6f3be9b243ccb53 Mon Sep 17 00:00:00 2001 From: Nick <10092581+NickLarsenNZ@users.noreply.github.com> Date: Thu, 24 Apr 2025 13:02:22 +0200 Subject: [PATCH 07/27] =?UTF-8?q?chore(=E2=8F=B2):=20Add=20missing=20tool?= =?UTF-8?q?=20update=20tasks=20to=20templates=20(#1062)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(issue_templates): Use YY.M.X placeholders * chore(issue_templates): Remove tasklists :sob: * chore(issue_templates): Add missing tool update tasks for cyclonedx and auditable --- .github/ISSUE_TEMPLATE/add-product.md | 5 +- .github/ISSUE_TEMPLATE/early-pre-release.md | 57 ++++++++++--------- .github/ISSUE_TEMPLATE/update-base-java.md | 21 ++++--- .../ISSUE_TEMPLATE/update-base-stackable.md | 21 ++++--- .../update-base-ubi-rust-builders.md | 21 ++++--- .github/ISSUE_TEMPLATE/update-base-vector.md | 27 +++++---- .../ISSUE_TEMPLATE/update-product-airflow.md | 27 +++++---- .../ISSUE_TEMPLATE/update-product-druid.md | 27 +++++---- .../update-product-hbase-phoenix-omid.md | 34 +++++------ .github/ISSUE_TEMPLATE/update-product-hdfs.md | 27 +++++---- .github/ISSUE_TEMPLATE/update-product-hive.md | 27 +++++---- .../ISSUE_TEMPLATE/update-product-kafka.md | 35 ++++++------ .github/ISSUE_TEMPLATE/update-product-nifi.md | 27 +++++---- .github/ISSUE_TEMPLATE/update-product-opa.md | 27 +++++---- .../ISSUE_TEMPLATE/update-product-spark.md | 27 +++++---- .../ISSUE_TEMPLATE/update-product-superset.md | 27 +++++---- .../ISSUE_TEMPLATE/update-product-trino.md | 34 +++++------ .../update-product-zookeeper.md | 27 +++++---- .github/pull_request_template.md | 2 - 19 files changed, 243 insertions(+), 257 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/add-product.md b/.github/ISSUE_TEMPLATE/add-product.md index 72b90c663..7828dd229 100644 --- a/.github/ISSUE_TEMPLATE/add-product.md +++ b/.github/ISSUE_TEMPLATE/add-product.md @@ -10,8 +10,8 @@ projects: ['stackabletech/10'] assignees: '' --- -```[tasklist] -### Tasks +## Tasks + - [ ] Create a new top-level folder for the product. The name of the folder must use the lowercase product name. - [ ] Create a README.md file outlining special considerations required to @@ -24,7 +24,6 @@ assignees: '' `.github/workflows` folder. Use existing local action whenever possible or consider creating a new one when there is no fitting action available. - [ ] Run `.scripts/update_readme_badges.sh` to generate the new status badge. -``` _Please consider updating this template if these instructions are wrong, or could be made clearer._ diff --git a/.github/ISSUE_TEMPLATE/early-pre-release.md b/.github/ISSUE_TEMPLATE/early-pre-release.md index 345ab8f83..0ccbc9dd3 100644 --- a/.github/ISSUE_TEMPLATE/early-pre-release.md +++ b/.github/ISSUE_TEMPLATE/early-pre-release.md @@ -1,7 +1,7 @@ --- name: Early Pre-Release Container Image Updates about: This template can be used to track the container image updates leading up to the next Stackable release -title: "chore: Update Container Images for Stackable Release XX.(X)X" +title: "chore: Update Container Images for Stackable Release YY.M.X" labels: ['epic'] assignees: '' --- @@ -14,49 +14,50 @@ assignees: '' Part of stackabletech/issues#xxx. -## Container Image Updates for Stackable Release XX.(X)X - > [!NOTE] > Update the product versions based on what has been decided upon in the _Product Spreadsheet[^1]_. [^1]: Currently this is a private spreadsheet -Replace the items in the task lists below with the subsequent tracking issue. +> [!IMPORTANT] +> Replace the items in the task lists below with the subsequent tracking issue. + +## Product Container Images -```[tasklist] -### Product Container Images -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-base-java.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-base-stackable.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-base-ubi-rust-builders.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-base-vector.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-airflow.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-druid.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-hbase-phoenix-omid.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-hdfs.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-hive.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-kafka.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-nifi.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-opa.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-spark.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-superset.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-trino.md -- [ ] https://github.com/stackabletech/docker-images/issues/new?template=update-product-zookeeper.md -``` - -```[tasklist] -### Additional items which don't have a tracking issue + +- [ ] [Create issue from template: update-base-java.md](https://github.com/stackabletech/docker-images/issues/new?template=update-base-java.md) +- [ ] [Create issue from template: update-base-stackable.md](https://github.com/stackabletech/docker-images/issues/new?template=update-base-stackable.md) +- [ ] [Create issue from template: update-base-ubi-rust-builders.md](https://github.com/stackabletech/docker-images/issues/new?template=update-base-ubi-rust-builders.md) +- [ ] [Create issue from template: update-base-vector.md](https://github.com/stackabletech/docker-images/issues/new?template=update-base-vector.md) +- [ ] [Create issue from template: update-product-airflow.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-airflow.md) +- [ ] [Create issue from template: update-product-druid.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-druid.md) +- [ ] [Create issue from template: update-product-hbase-phoenix-omid.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-hbase-phoenix-omid.md) +- [ ] [Create issue from template: update-product-hdfs.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-hdfs.md) +- [ ] [Create issue from template: update-product-hive.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-hive.md) +- [ ] [Create issue from template: update-product-kafka.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-kafka.md) +- [ ] [Create issue from template: update-product-nifi.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-nifi.md) +- [ ] [Create issue from template: update-product-opa.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-opa.md) +- [ ] [Create issue from template: update-product-spark.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-spark.md) +- [ ] [Create issue from template: update-product-superset.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-superset.md) +- [ ] [Create issue from template: update-product-trino.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-trino.md) +- [ ] [Create issue from template: update-product-zookeeper.md](https://github.com/stackabletech/docker-images/issues/new?template=update-product-zookeeper.md) + +## Additional items which don't have a tracking issue + - [ ] hello-world - [ ] krb5 - [ ] tools - [ ] testing-tools - [ ] statsd_exporter -``` +- [ ] cyclonedx-bom (pip) +- [ ] cargo-cyclonedx (Look for: `CARGO_CYCLONEDX_CRATE_VERSION`) +- [ ] cargo-auditable (Look for: `CARGO_AUDITABLE_CRATE_VERSION`) diff --git a/.github/ISSUE_TEMPLATE/update-base-java.md b/.github/ISSUE_TEMPLATE/update-base-java.md index 95fc8b38b..b29a47cb0 100644 --- a/.github/ISSUE_TEMPLATE/update-base-java.md +++ b/.github/ISSUE_TEMPLATE/update-base-java.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(java-bases): Update container images ahead of Stackable Release XX.(X)X + chore(java-bases): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -39,25 +39,24 @@ we should also make new versions of Java available for use. > term `openjdk-headless`. > _It isn't perfect, as it will depend on what is available via microdnf._ -```[tasklist] -### Update tasks +## Update tasks + - [ ] Add any new versions of java to both `java-base/versions.py` and `java-devel/versions.py` - [ ] Remove versions when there are no long any references (eg: `grep java- **/versions.py | grep "1.8.0"`) -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + - [ ] _Link to the docker-images PR (product update)_ -``` -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +## Acceptance + +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build a product image that uses the new version(s) - [ ] Both `java-base` and `java-devel` have the same Java versions in `versions.py` - [ ] Kuttl smoke test passes locally for a product using the new Java version -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-base-stackable.md b/.github/ISSUE_TEMPLATE/update-base-stackable.md index 934ed9ea1..1d20147cf 100644 --- a/.github/ISSUE_TEMPLATE/update-base-stackable.md +++ b/.github/ISSUE_TEMPLATE/update-base-stackable.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(stackable-base): Update container images ahead of Stackable Release XX.(X)X + chore(stackable-base): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,28 +25,27 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update UBI version hash in the Dockerfile (`FROM`) - [ ] Update `RUST_DEFAULT_TOOLCHAIN_VERSION` - [ ] Update `CARGO_CYCLONEDX_CRATE_VERSION` - [ ] Update `CARGO_AUDITABLE_CRATE_VERSION` - [ ] Update `PROTOC_VERSION` - [ ] Update `CONFIG_UTILS_VERSION` -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + - [ ] _Link to the docker-images PR (product update)_ -``` -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +## Acceptance + +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build the image locally - [ ] Can build the vector image -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-base-ubi-rust-builders.md b/.github/ISSUE_TEMPLATE/update-base-ubi-rust-builders.md index 226173607..6daed3bed 100644 --- a/.github/ISSUE_TEMPLATE/update-base-ubi-rust-builders.md +++ b/.github/ISSUE_TEMPLATE/update-base-ubi-rust-builders.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(ubi-rust-builders): Update container images ahead of Stackable Release XX.(X)X + chore(ubi-rust-builders): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -31,31 +31,30 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update UBI version hash in the Dockerfile (`FROM`) - [ ] Update `RUST_DEFAULT_TOOLCHAIN_VERSION` - [ ] Update `CARGO_CYCLONEDX_CRATE_VERSION` - [ ] Update `CARGO_AUDITABLE_CRATE_VERSION` - [ ] Update `PROTOC_VERSION` -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Bump rust toolchain in operator-rs_ - [ ] _Bump rust toolchain in operator-templating_ -``` -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +## Acceptance + +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - Done for [ubi8-rust-builder/Dockerfile](https://github.com/stackabletech/docker-images/blob/main/ubi8-rust-builder/Dockerfile) - Done for [ubi9-rust-builder/Dockerfile](https://github.com/stackabletech/docker-images/blob/main/ubi9-rust-builder/Dockerfile) - [ ] Can build the image locally - [ ] Can build an operator image -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-base-vector.md b/.github/ISSUE_TEMPLATE/update-base-vector.md index 0a994c976..43ea66ea6 100644 --- a/.github/ISSUE_TEMPLATE/update-base-vector.md +++ b/.github/ISSUE_TEMPLATE/update-base-vector.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(vector): Update container images ahead of Stackable Release XX.(X)X + chore(vector): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -31,8 +31,8 @@ Add/Change/Remove anything that isn't applicable anymore > - Uses [stackable-base](https://github.com/stackabletech/docker-images/blob/main/stackable-base/Dockerfile). > - Used as a base for [java-base](https://github.com/stackabletech/docker-images/blob/main/java-base/Dockerfile). -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Update all `versions.py` files which reference vector. - [ ] Upload new version (see `vector/upload_new_vector_version.sh`). @@ -40,31 +40,30 @@ Add/Change/Remove anything that isn't applicable anymore - [ ] Update other dependencies if applicable (eg: inotify_tools, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to the operator PR (getting_started / kuttl / supported-versions)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-airflow.md b/.github/ISSUE_TEMPLATE/update-product-airflow.md index 0351f3339..0672a95c8 100644 --- a/.github/ISSUE_TEMPLATE/update-product-airflow.md +++ b/.github/ISSUE_TEMPLATE/update-product-airflow.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(airflow): Update container images ahead of Stackable Release XX.(X)X + chore(airflow): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,38 +25,37 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Download new constraints file (see `airflow/download_constraints.sh`). - [ ] Update other dependencies if applicable (eg: python, statsd_exporter, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to operator PR (getting_started / kuttl)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-druid.md b/.github/ISSUE_TEMPLATE/update-product-druid.md index 4db8638cd..479c369f0 100644 --- a/.github/ISSUE_TEMPLATE/update-product-druid.md +++ b/.github/ISSUE_TEMPLATE/update-product-druid.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(druid): Update container images ahead of Stackable Release XX.(X)X + chore(druid): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,8 +25,8 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new version (see `druid/upload_new_druid_version.sh`). - [ ] Create a file: `druid/stackable/patches/x.y.z/.gitkeep`, add patches if applicable. @@ -35,32 +35,31 @@ Add/Change/Remove anything that isn't applicable anymore - [ ] Update the [druid-opa-authorizer](https://github.com/stackabletech/druid-opa-authorizer/) with the new set of versions. - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to the operator PR (getting_started / kuttl / supported-versions)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to [druid-opa-authorizer](https://github.com/stackabletech/druid-opa-authorizer/) PR_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-hbase-phoenix-omid.md b/.github/ISSUE_TEMPLATE/update-product-hbase-phoenix-omid.md index 205a5afa4..88a5768db 100644 --- a/.github/ISSUE_TEMPLATE/update-product-hbase-phoenix-omid.md +++ b/.github/ISSUE_TEMPLATE/update-product-hbase-phoenix-omid.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(hbase-phoenix-omid): Update container images ahead of Stackable Release XX.(X)X + chore(hbase-phoenix-omid): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,46 +25,46 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks (HBase, Phoenix) +## Update tasks + +### HBase and Phoenix + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new versions (see the `hbase/*.sh` scripts). - [ ] Update `versions.py` to the latest supported version of JVM (base and devel). - [ ] Update other dependencies if applicable (eg: phoenix, opa_authorizer, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR to the list below. -``` -```[tasklist] -### Update tasks (Omid) +### Omid + - [ ] Update `omid/versions.py`to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new version (see `omid/upload_new_omid_version.sh`). - [ ] Update `versions.py` to the latest supported version of JVM (base and devel). - [ ] Update other dependencies if applicable (eg: jmx_exporter, etc). -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to operator PR (getting_started / kuttl)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-hdfs.md b/.github/ISSUE_TEMPLATE/update-product-hdfs.md index 7481f5656..1f9b13419 100644 --- a/.github/ISSUE_TEMPLATE/update-product-hdfs.md +++ b/.github/ISSUE_TEMPLATE/update-product-hdfs.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(hdfs): Update container images ahead of Stackable Release XX.(X)X + chore(hdfs): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,40 +25,39 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new version (see `hadoop/upload_new_hadoop_version.sh`). - [ ] Update `versions.py` to the latest supported version of JVM (base and devel). - [ ] Update other dependencies if applicable (eg: hdfs_utils, jmx_exporter, protobuf, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to [hdfs-utils](https://github.com/stackabletech/hdfs-utils/) PR (if applicable)_ - [ ] _Link to the operator PR (getting_started / kuttl / supported-versions)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-hive.md b/.github/ISSUE_TEMPLATE/update-product-hive.md index fb90030b3..ae3adcf6e 100644 --- a/.github/ISSUE_TEMPLATE/update-product-hive.md +++ b/.github/ISSUE_TEMPLATE/update-product-hive.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(hive): Update container images ahead of Stackable Release XX.(X)X + chore(hive): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,39 +25,38 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new version (see `hive/upload_new_hive_version.sh`). - [ ] Update `versions.py` to the latest supported version of JVM (base and devel). - [ ] Update other dependencies if applicable (eg: jmx_exporter, aws_java_sdk_bundle, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to the operator PR (getting_started / kuttl / supported-versions)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-kafka.md b/.github/ISSUE_TEMPLATE/update-product-kafka.md index c526e30b0..cff61c59c 100644 --- a/.github/ISSUE_TEMPLATE/update-product-kafka.md +++ b/.github/ISSUE_TEMPLATE/update-product-kafka.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(kafka): Update container images ahead of Stackable Release XX.(X)X + chore(kafka): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,50 +25,51 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks (kafka) +## Update tasks + +### Kafka + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new version (see `kafka/upload_new_kafka_version.sh`). - [ ] Update `versions.py` to the latest supported version of JVM (base and devel). - [ ] Update other dependencies if applicable (eg: jmx_exporter, kcat, scala, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` + +### kcat and kafka-testing-tools -```[tasklist] -### Update tasks (kcat and kafka-testing-tools) + - [ ] Update `kcat/versions.py`. - [ ] Update `kafka-testing-tools/versions.py`. - [ ] Upload new version (see `.scripts/upload_new_kcat_version.sh`). - [ ] Update `versions.py` to the latest supported version of JVM (base and devel). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to the operator PR (getting_started / kuttl / supported-versions)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-nifi.md b/.github/ISSUE_TEMPLATE/update-product-nifi.md index aa90495dc..078bc1680 100644 --- a/.github/ISSUE_TEMPLATE/update-product-nifi.md +++ b/.github/ISSUE_TEMPLATE/update-product-nifi.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(nifi): Update container images ahead of Stackable Release XX.(X)X + chore(nifi): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,39 +25,38 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new version (see `nifi/upload_new_nifi_version.sh`). - [ ] Update `versions.py` to the latest supported version of JVM (base and devel). - [ ] Update other dependencies if applicable (eg: jmx_exporter, kcat, scala, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to the operator PR (getting_started / kuttl / supported-versions)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-opa.md b/.github/ISSUE_TEMPLATE/update-product-opa.md index b0a48a473..b49776b42 100644 --- a/.github/ISSUE_TEMPLATE/update-product-opa.md +++ b/.github/ISSUE_TEMPLATE/update-product-opa.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(opa): Update container images ahead of Stackable Release XX.(X)X + chore(opa): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,38 +25,37 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new version (see `opa/upload_new_opa_version.sh`). - [ ] Update other dependencies if applicable (eg: opa_bundle_builder, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to the operator PR (getting_started / kuttl / supported-versions)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-spark.md b/.github/ISSUE_TEMPLATE/update-product-spark.md index 02dcd3022..30e4be6ff 100644 --- a/.github/ISSUE_TEMPLATE/update-product-spark.md +++ b/.github/ISSUE_TEMPLATE/update-product-spark.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(spark): Update container images ahead of Stackable Release XX.(X)X + chore(spark): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,8 +25,8 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new version (see `spark/upload_new_spark_version.sh`). - [ ] Update `versions.py` to the latest supported version of JVM (base and devel). @@ -34,31 +34,30 @@ Add/Change/Remove anything that isn't applicable anymore - [ ] Update other dependencies if applicable (eg: python, jmx_exporter, aws_java_sdk_bundle, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to the operator PR (getting_started / kuttl / supported-versions)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-superset.md b/.github/ISSUE_TEMPLATE/update-product-superset.md index e56bb273f..5cb248ae2 100644 --- a/.github/ISSUE_TEMPLATE/update-product-superset.md +++ b/.github/ISSUE_TEMPLATE/update-product-superset.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(superset): Update container images ahead of Stackable Release XX.(X)X + chore(superset): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,8 +25,8 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Create a new constraints file (see `superset/README.md`). - [ ] Create a file: `superset/stackable/patches/x.y.z/.gitkeep`, add patches if applicable. @@ -34,31 +34,30 @@ Add/Change/Remove anything that isn't applicable anymore - [ ] Update other dependencies if applicable (eg: python, auth_lib, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to the operator PR (getting_started / kuttl / supported-versions)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-trino.md b/.github/ISSUE_TEMPLATE/update-product-trino.md index 7f328f218..06656a38e 100644 --- a/.github/ISSUE_TEMPLATE/update-product-trino.md +++ b/.github/ISSUE_TEMPLATE/update-product-trino.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(trino): Update container images ahead of Stackable Release XX.(X)X + chore(trino): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,48 +25,48 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks (trino) +## Update tasks + +### Trino + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new version (see `trino/*.sh` scripts). - [ ] Update `versions.py` to the latest supported version of JVM (base and devel). - [ ] Update other dependencies if applicable (eg: jmx_exporter, opa_authorizer, storage_connector, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Update tasks (trino-cli) +### trino-cli + - [ ] Update `trino-cli/versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new version (see `trino-cli/upload_new_trino_version.sh` scripts). - [ ] Update `versions.py` to the latest supported version of JVM (base and devel). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to the operator PR (getting_started / kuttl / supported-versions)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/ISSUE_TEMPLATE/update-product-zookeeper.md b/.github/ISSUE_TEMPLATE/update-product-zookeeper.md index 3bc459017..0f2f5ca44 100644 --- a/.github/ISSUE_TEMPLATE/update-product-zookeeper.md +++ b/.github/ISSUE_TEMPLATE/update-product-zookeeper.md @@ -4,7 +4,7 @@ about: >- This template contains instructions specific to updating this product and/or container image(s). title: >- - chore(zookeeper): Update container images ahead of Stackable Release XX.(X)X + chore(zookeeper): Update container images ahead of Stackable Release YY.M.X labels: [] # Currently, projects cannot be assigned via front-matter. projects: ['stackabletech/10'] @@ -25,39 +25,38 @@ Add/Change/Remove anything that isn't applicable anymore > > [1]: https://github.com/orgs/stackabletech/projects/10 -```[tasklist] -### Update tasks +## Update tasks + - [ ] Update `versions.py` to reflect the agreed upon versions in the spreadsheet (including the removal of old versions). - [ ] Upload new version (see `zookeeper/upload_new_zookeeper_version.sh`). - [ ] Update `versions.py` to the latest supported version of JVM (base and devel). - [ ] Update other dependencies if applicable (eg: jmx_exporter, etc). - [ ] Check other operators (getting_started / kuttl / supported-versions) for usage of the versions. Add the PR(s) to the list below. - [ ] Update the version in demos. Add the PR(s) to the list below. -``` -```[tasklist] -### Related Pull Requests +## Related Pull Requests + +> [!TIP] +> Delete any items that do not apply so that all applicable items can be checked. +> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. + - [ ] _Link to the docker-images PR (product update)_ - [ ] _Link to the operator PR (getting_started / kuttl / supported-versions)_ - [ ] _Link to any other operator PRs (getting_started / kuttl)_ - [ ] _Link to demo PR (raise against the `main` branch)_ - [ ] _Link to the Release Notes PR in the documentation repo (if not a comment below)_ -``` -> [!TIP] -> Delete any items that do not apply so that all applicable items can be checked. -> For example, if you add release notes to the documentation repository, you do not need the latter two criteria. +## Acceptance -This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been checked, the issue can be moved into _Development: Done_. +> [!TIP] +> This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been +> checked, the issue can be moved into _Development: Done_. -```[tasklist] -### Acceptance - [ ] Can build image (either locally, or in CI) - [ ] Kuttl smoke tests passes (either locally, or in CI) - [ ] Release notes added to documentation and linked as a PR above - [ ] Release notes written in a comment below - [ ] Applicable `release-note` label added to this issue -```
Testing instructions diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index f1258c6b8..0060c7a10 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -7,14 +7,12 @@ - Not all of these items are applicable to all PRs, the author should update this template to only leave the boxes in that are relevant - Please make sure all these things are done and tick the boxes -```[tasklist] - [ ] Changes are OpenShift compatible - [ ] All added packages (via microdnf or otherwise) have a comment on why they are added - [ ] Things not downloaded from Red Hat repositories should be mirrored in the Stackable repository and downloaded from there - [ ] All packages should have (if available) signatures/hashes verified - [ ] Add an entry to the CHANGELOG.md file - [ ] Integration tests ran successfully -```
TIP: Running integration tests with a new product image From 0fe577d384017b4e32ab736bf88dcc1341517138 Mon Sep 17 00:00:00 2001 From: Lukas Krug Date: Thu, 24 Apr 2025 15:58:52 +0200 Subject: [PATCH 08/27] feat: move patch apply logic to patchable (#1032) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * wip * Update druid/Dockerfile Co-authored-by: Natalie Klestrup Röijezon * fix: remove unnecessary check / shadow repo root var * fix: druid src path * fix: druid src path * feat: introduce stackable-devel image * fix: use PathBuf in ProductVersionContext * chore: align zookeeper patch directory structure * fix: stackable-devel dnf and shell config * chore: switch patch process in other products * fix: hive build * fix: trino build * fix: spark build * chore: make hadolint happy * fix: remove hbase intermediate sources / remove unnecessary and operator * fix: permissions in patchable build process * chore: remove unnecessary curl command in build process * chore: move adding of JMX config and start-metastore script from builder stage to final stage * chore: remove git repo in trino and hbase-operator-tools to avoid maven commit plugin bug --------- Co-authored-by: Natalie Klestrup Röijezon --- conf.py | 2 + druid/Dockerfile | 12 +- druid/stackable/patches/apply_patches.sh | 44 ------- hadoop/Dockerfile | 7 +- hadoop/stackable/patches/apply_patches.sh | 43 ------- hbase/Dockerfile | 53 ++++---- hbase/stackable/patches/apply_patches.sh | 49 -------- hive/Dockerfile | 46 +++---- hive/stackable/patches/apply_patches.sh | 50 -------- java-devel/Dockerfile | 4 +- java-devel/versions.py | 12 +- kafka/Dockerfile | 21 +--- kafka/stackable/patches/apply_patches.sh | 44 ------- nifi/Dockerfile | 29 +---- nifi/stackable/patches/apply_patches.sh | 43 ------- omid/Dockerfile | 7 +- omid/stackable/patches/apply_patches.sh | 44 ------- rust/patchable/src/main.rs | 30 +++-- spark-k8s/Dockerfile | 74 ++---------- spark-k8s/hbase-connectors/apply_patches.sh | 44 ------- spark-k8s/stackable/patches/apply_patches.sh | 44 ------- stackable-base/Dockerfile | 44 +------ stackable-base/versions.py | 1 + stackable-devel/Dockerfile | 114 ++++++++++++++++++ stackable-devel/versions.py | 5 + superset/Dockerfile | 3 - superset/stackable/patches/4.0.2/.gitkeep | 0 .../stackable/patches/4.0.2/patchable.toml | 2 - superset/stackable/patches/4.1.1/.gitkeep | 0 .../stackable/patches/4.1.1/patchable.toml | 2 - superset/stackable/patches/apply_patches.sh | 54 --------- trino-storage-connector/Dockerfile | 18 +-- .../stackable/patches/apply_patches.sh | 44 ------- trino/Dockerfile | 31 ++--- trino/stackable/patches/apply_patches.sh | 44 ------- zookeeper/Dockerfile | 19 ++- zookeeper/stackable/patches/apply_patches.sh | 50 -------- 37 files changed, 249 insertions(+), 884 deletions(-) delete mode 100755 druid/stackable/patches/apply_patches.sh delete mode 100755 hadoop/stackable/patches/apply_patches.sh delete mode 100644 hbase/stackable/patches/apply_patches.sh delete mode 100755 hive/stackable/patches/apply_patches.sh delete mode 100755 kafka/stackable/patches/apply_patches.sh delete mode 100644 nifi/stackable/patches/apply_patches.sh delete mode 100755 omid/stackable/patches/apply_patches.sh delete mode 100755 spark-k8s/hbase-connectors/apply_patches.sh delete mode 100755 spark-k8s/stackable/patches/apply_patches.sh create mode 100644 stackable-devel/Dockerfile create mode 100644 stackable-devel/versions.py delete mode 100644 superset/stackable/patches/4.0.2/.gitkeep delete mode 100644 superset/stackable/patches/4.0.2/patchable.toml delete mode 100644 superset/stackable/patches/4.1.1/.gitkeep delete mode 100644 superset/stackable/patches/4.1.1/patchable.toml delete mode 100755 superset/stackable/patches/apply_patches.sh delete mode 100755 trino-storage-connector/stackable/patches/apply_patches.sh delete mode 100755 trino/stackable/patches/apply_patches.sh delete mode 100755 zookeeper/stackable/patches/apply_patches.sh diff --git a/conf.py b/conf.py index 85a818d58..c3a016e16 100644 --- a/conf.py +++ b/conf.py @@ -26,6 +26,7 @@ opa = importlib.import_module("opa.versions") spark_k8s = importlib.import_module("spark-k8s.versions") stackable_base = importlib.import_module("stackable-base.versions") +stackable_devel = importlib.import_module("stackable-devel.versions") superset = importlib.import_module("superset.versions") trino_cli = importlib.import_module("trino-cli.versions") trino = importlib.import_module("trino.versions") @@ -55,6 +56,7 @@ {"name": "opa", "versions": opa.versions}, {"name": "spark-k8s", "versions": spark_k8s.versions}, {"name": "stackable-base", "versions": stackable_base.versions}, + {"name": "stackable-devel", "versions": stackable_devel.versions}, {"name": "superset", "versions": superset.versions}, {"name": "trino-cli", "versions": trino_cli.versions}, {"name": "trino", "versions": trino.versions}, diff --git a/druid/Dockerfile b/druid/Dockerfile index c4c928f7e..a45d6b02a 100644 --- a/druid/Dockerfile +++ b/druid/Dockerfile @@ -23,10 +23,7 @@ microdnf update # This requirement is documented in docs/development/build.md and version 5.1 or later is required. # UBI 9 ships with 5.4.x so that should be fine # -# patch: Required for the apply-patches.sh script -microdnf install \ - python-pyyaml \ - patch +microdnf install python-pyyaml microdnf clean all rm -rf /var/cache/yum @@ -35,8 +32,7 @@ EOF USER ${STACKABLE_USER_UID} WORKDIR /stackable -COPY --chown=${STACKABLE_USER_UID}:0 druid/stackable/patches/apply_patches.sh /stackable/apache-druid-${PRODUCT}-src/patches/apply_patches.sh -COPY --chown=${STACKABLE_USER_UID}:0 druid/stackable/patches/${PRODUCT} /stackable/apache-druid-${PRODUCT}-src/patches/${PRODUCT} +COPY --chown=${STACKABLE_USER_UID}:0 druid/stackable/patches/${PRODUCT} /stackable/src/druid/stackable/patches/${PRODUCT} # Cache mounts are owned by root by default # We need to explicitly give the uid to use which is hardcoded to "1000" in stackable-base @@ -50,9 +46,7 @@ RUN --mount=type=cache,id=maven-${PRODUCT},uid=${STACKABLE_USER_UID},target=/sta --mount=type=cache,id=npm-${PRODUCT},uid=${STACKABLE_USER_UID},target=/stackable/.npm \ --mount=type=cache,id=cache-${PRODUCT},uid=${STACKABLE_USER_UID},target=/stackable/.cache \ < /stackable/bin/hbck2 chmod +x /stackable/bin/hbck2 @@ -246,20 +241,13 @@ ARG STACKABLE_USER_UID # This can be used to speed up builds when disk space is of no concern. ARG DELETE_CACHES="true" -COPY --chown=${STACKABLE_USER_UID}:0 hbase/phoenix/stackable/patches /stackable/patches -COPY --chown=${STACKABLE_USER_UID}:0 hbase/stackable/patches/apply_patches.sh /stackable/patches +COPY --chown=${STACKABLE_USER_UID}:0 hbase/phoenix/stackable/patches/${PHOENIX} /stackable/src/phoenix/stackable/patches/${PHOENIX} USER ${STACKABLE_USER_UID} WORKDIR /stackable RUN --mount=type=cache,id=maven-phoenix-${PHOENIX},uid=${STACKABLE_USER_UID},target=/stackable/.m2/repository < { +struct ProductVersionContext { pv: ProductVersion, - images_repo_root: &'a Path, + images_repo_root: PathBuf, } -impl ProductVersionContext<'_> { +impl ProductVersionContext { fn load_config(&self) -> Result { let path = &self.config_path(); tracing::info!( @@ -107,6 +103,10 @@ impl ProductVersionContext<'_> { struct Opts { #[clap(subcommand)] cmd: Cmd, + + /// Specify a custom root directory for the images repository + #[clap(long)] + images_repo_root: Option, } #[derive(clap::Parser)] @@ -165,7 +165,7 @@ enum Cmd { pv: ProductVersion, }, - /// Shwos the images repository root + /// Shows the images repository root ImagesDir, } @@ -263,8 +263,16 @@ fn main() -> Result<()> { .context(ConfigureGitLoggingSnafu)?; let opts = ::parse(); - let images_repo = repo::discover_images_repo(".").context(FindImagesRepoSnafu)?; - let images_repo_root = images_repo.workdir().context(NoImagesRepoWorkdirSnafu)?; + let images_repo_root = match opts.images_repo_root { + Some(path) => path, + None => { + let images_repo = repo::discover_images_repo(".").context(FindImagesRepoSnafu)?; + images_repo + .workdir() + .context(NoImagesRepoWorkdirSnafu)? + .to_owned() + } + }; match opts.cmd { Cmd::Checkout { pv, base_only } => { let ctx = ProductVersionContext { diff --git a/spark-k8s/Dockerfile b/spark-k8s/Dockerfile index 4a1276c9d..4f20dc1bb 100644 --- a/spark-k8s/Dockerfile +++ b/spark-k8s/Dockerfile @@ -7,43 +7,17 @@ FROM stackable/image/hadoop AS hadoop-builder # hbase-builder: Provides HBase libraries FROM stackable/image/hbase AS hbase-builder -# spark-source-builder: Download the Spark source code into -# /stackable/spark and apply the patches +# spark-source-builder: Clone the Spark source code and apply patches FROM stackable/image/java-devel AS spark-source-builder ARG PRODUCT ARG STACKABLE_USER_UID -RUN <>> Build spark diff --git a/spark-k8s/hbase-connectors/apply_patches.sh b/spark-k8s/hbase-connectors/apply_patches.sh deleted file mode 100755 index 833b3e9c7..000000000 --- a/spark-k8s/hbase-connectors/apply_patches.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash - -# Enable error handling and unset variable checking -set -eu -set -o pipefail - -# Check if $1 (VERSION) is provided -if [ -z "${1-}" ]; then - echo "Please provide a value for VERSION as the first argument." - exit 1 -fi - -VERSION="$1" -PATCH_DIR="patches/$VERSION" - -# Check if version-specific patches directory exists -if [ ! -d "$PATCH_DIR" ]; then - echo "Patches directory '$PATCH_DIR' does not exist." - exit 1 -fi - -# Create an array to hold the patches in sorted order -declare -a patch_files=() - -echo "Applying patches from ${PATCH_DIR}" now - -# Read the patch files into the array -while IFS= read -r -d $'\0' file; do - patch_files+=("$file") -done < <(find "$PATCH_DIR" -name "*.patch" -print0 | sort -zV) - -echo "Found ${#patch_files[@]} patches, applying now" - -# Iterate through sorted patch files -for patch_file in "${patch_files[@]}"; do - echo "Applying $patch_file" - # We can not use Git here, as we are not within a Git repo - patch --directory "." --strip=1 < "$patch_file" || { - echo "Failed to apply $patch_file" - exit 1 - } -done - -echo "All patches applied successfully." diff --git a/spark-k8s/stackable/patches/apply_patches.sh b/spark-k8s/stackable/patches/apply_patches.sh deleted file mode 100755 index 833b3e9c7..000000000 --- a/spark-k8s/stackable/patches/apply_patches.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash - -# Enable error handling and unset variable checking -set -eu -set -o pipefail - -# Check if $1 (VERSION) is provided -if [ -z "${1-}" ]; then - echo "Please provide a value for VERSION as the first argument." - exit 1 -fi - -VERSION="$1" -PATCH_DIR="patches/$VERSION" - -# Check if version-specific patches directory exists -if [ ! -d "$PATCH_DIR" ]; then - echo "Patches directory '$PATCH_DIR' does not exist." - exit 1 -fi - -# Create an array to hold the patches in sorted order -declare -a patch_files=() - -echo "Applying patches from ${PATCH_DIR}" now - -# Read the patch files into the array -while IFS= read -r -d $'\0' file; do - patch_files+=("$file") -done < <(find "$PATCH_DIR" -name "*.patch" -print0 | sort -zV) - -echo "Found ${#patch_files[@]} patches, applying now" - -# Iterate through sorted patch files -for patch_file in "${patch_files[@]}"; do - echo "Applying $patch_file" - # We can not use Git here, as we are not within a Git repo - patch --directory "." --strip=1 < "$patch_file" || { - echo "Failed to apply $patch_file" - exit 1 - } -done - -echo "All patches applied successfully." diff --git a/stackable-base/Dockerfile b/stackable-base/Dockerfile index f38d5b614..b40b09788 100644 --- a/stackable-base/Dockerfile +++ b/stackable-base/Dockerfile @@ -1,61 +1,29 @@ # syntax=docker/dockerfile:1.10.0@sha256:865e5dd094beca432e8c0a1d5e1c465db5f998dca4e439981029b3b81fb39ed5 # check=error=true -# Find the latest version at https://catalog.redhat.com/software/containers/ubi9/ubi-minimal/615bd9b4075b022acc111bf5?container-tabs=gti -# IMPORTANT: Make sure to use the "Manifest List Digest" that references the images for multiple architectures -# rather than just the "Image Digest" that references the image for the selected architecture. -FROM registry.access.redhat.com/ubi9/ubi-minimal@sha256:c0e70387664f30cd9cf2795b547e4a9a51002c44a4a86aa9335ab030134bf392 AS product-utils-builder +FROM stackable/image/stackable-devel AS config-utils # Find the latest version here: https://github.com/stackabletech/config-utils/tags # renovate: datasource=github-tags packageName=stackabletech/config-utils ENV CONFIG_UTILS_VERSION=0.2.0 -# Find the latest version here: https://github.com/stackabletech/containerdebug/tags -# renovate: datasource=github-tags packageName=stackabletech/containerdebug -ENV CONTAINERDEBUG_VERSION=0.1.1 -# This SHOULD be kept in sync with operator-templating and other tools to reduce build times -# Find the latest version here: https://doc.rust-lang.org/stable/releases.html -# renovate: datasource=github-releases packageName=rust-lang/rust -ENV RUST_DEFAULT_TOOLCHAIN_VERSION=1.84.1 -# Find the latest version here: https://crates.io/crates/cargo-cyclonedx -# renovate: datasource=crate packageName=cargo-cyclonedx -ENV CARGO_CYCLONEDX_CRATE_VERSION=0.5.7 -# Find the latest version here: https://crates.io/crates/cargo-auditable -# renovate: datasource=crate packageName=cargo-auditable -ENV CARGO_AUDITABLE_CRATE_VERSION=0.6.6 - -RUN < Date: Thu, 24 Apr 2025 18:16:45 +0200 Subject: [PATCH 09/27] fix: add missing patchable config for Kafka 3.8.0 (#1065) --- ...ugin.patch => 0001-Add-CycloneDX-plugin.patch} | 11 ++++++++++- ...ange-Gradle-to-use-the-Nexus-Build-Repo.patch} | 15 ++++++--------- kafka/stackable/patches/3.8.0/patchable.toml | 2 ++ 3 files changed, 18 insertions(+), 10 deletions(-) rename kafka/stackable/patches/3.8.0/{001-cyclonedx-plugin.patch => 0001-Add-CycloneDX-plugin.patch} (86%) rename kafka/stackable/patches/3.8.0/{002-use-stackable-repo.patch => 0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch} (68%) create mode 100644 kafka/stackable/patches/3.8.0/patchable.toml diff --git a/kafka/stackable/patches/3.8.0/001-cyclonedx-plugin.patch b/kafka/stackable/patches/3.8.0/0001-Add-CycloneDX-plugin.patch similarity index 86% rename from kafka/stackable/patches/3.8.0/001-cyclonedx-plugin.patch rename to kafka/stackable/patches/3.8.0/0001-Add-CycloneDX-plugin.patch index f4587320f..588cfe3ba 100644 --- a/kafka/stackable/patches/3.8.0/001-cyclonedx-plugin.patch +++ b/kafka/stackable/patches/3.8.0/0001-Add-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From 9bc39c677cb825c4ddae900005e41420bb63a48c Mon Sep 17 00:00:00 2001 +From: dervoeti +Date: Thu, 24 Apr 2025 17:24:47 +0200 +Subject: Add CycloneDX plugin + +--- + build.gradle | 42 ++++++++++++++++++++++++++++++++++++++++++ + 1 file changed, 42 insertions(+) + diff --git a/build.gradle b/build.gradle -index 92082fe..e3d6c72 100644 +index 92082fe7cf..e3d6c722b6 100644 --- a/build.gradle +++ b/build.gradle @@ -48,6 +48,48 @@ plugins { diff --git a/kafka/stackable/patches/3.8.0/002-use-stackable-repo.patch b/kafka/stackable/patches/3.8.0/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch similarity index 68% rename from kafka/stackable/patches/3.8.0/002-use-stackable-repo.patch rename to kafka/stackable/patches/3.8.0/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch index 6c18efdaa..3f8fe00ea 100644 --- a/kafka/stackable/patches/3.8.0/002-use-stackable-repo.patch +++ b/kafka/stackable/patches/3.8.0/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch @@ -1,14 +1,14 @@ -From e5102449fe825cfbba20ce6ace1f51cd91550780 Mon Sep 17 00:00:00 2001 -From: Lars Francke -Date: Thu, 12 Dec 2024 10:09:47 +0100 -Subject: [PATCH] Change Gradle to use the Nexus Build Repo +From 5cead6a5c6cc28dbfbb2cd4088d9598989287feb Mon Sep 17 00:00:00 2001 +From: dervoeti +Date: Thu, 24 Apr 2025 17:25:17 +0200 +Subject: Change Gradle to use the Nexus Build Repo --- build.gradle | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle -index 92082fe7cf..3b56a2ad98 100644 +index e3d6c722b6..8eec9a4a96 100644 --- a/build.gradle +++ b/build.gradle @@ -20,7 +20,9 @@ import java.nio.charset.StandardCharsets @@ -22,7 +22,7 @@ index 92082fe7cf..3b56a2ad98 100644 } apply from: "$rootDir/gradle/dependencies.gradle" -@@ -126,7 +128,9 @@ ext { +@@ -168,7 +170,9 @@ ext { allprojects { repositories { @@ -33,6 +33,3 @@ index 92082fe7cf..3b56a2ad98 100644 } dependencyUpdates { --- -2.47.1 - diff --git a/kafka/stackable/patches/3.8.0/patchable.toml b/kafka/stackable/patches/3.8.0/patchable.toml new file mode 100644 index 000000000..e24b68512 --- /dev/null +++ b/kafka/stackable/patches/3.8.0/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/kafka.git" +base = "771b9576b00ecf5b64ab6e8bedf04156fbdb5cd6" From 14fb8ba16481e4dad725961d950aaa21a3ab94b9 Mon Sep 17 00:00:00 2001 From: Nick <10092581+NickLarsenNZ@users.noreply.github.com> Date: Fri, 2 May 2025 17:01:23 +0200 Subject: [PATCH 10/27] fix(pr-template): Fix the rendering of the list (#1073) --- .github/pull_request_template.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 0060c7a10..8e88a3faf 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -4,8 +4,10 @@ ## Definition of Done Checklist -- Not all of these items are applicable to all PRs, the author should update this template to only leave the boxes in that are relevant -- Please make sure all these things are done and tick the boxes +> [!NOTE] +> Not all of these items are applicable to all PRs, the author should update this template to only leave the boxes in that are relevant. + +Please make sure all these things are done and tick the boxes - [ ] Changes are OpenShift compatible - [ ] All added packages (via microdnf or otherwise) have a comment on why they are added From 692f0bad26ab80654e75346cc6dd13b3b40d9a94 Mon Sep 17 00:00:00 2001 From: Razvan-Daniel Mihai <84674+razvan@users.noreply.github.com> Date: Fri, 2 May 2025 20:58:50 +0200 Subject: [PATCH 11/27] feat: make image namespace a workflow input (default sdp) (#1072) * feat: make image namespace a workflow input (default sdp) * feat: add registry namespace input to build workflows and remove default --- .github/workflows/build_airflow.yaml | 1 + .github/workflows/build_druid.yaml | 1 + .github/workflows/build_hadoop.yaml | 1 + .github/workflows/build_hbase.yaml | 1 + .github/workflows/build_hello-world.yaml | 1 + .github/workflows/build_hive.yaml | 1 + .github/workflows/build_java-base.yaml | 1 + .github/workflows/build_java-devel.yaml | 1 + .github/workflows/build_kafka-testing-tools.yaml | 1 + .github/workflows/build_kafka.yaml | 1 + .github/workflows/build_kcat.yaml | 1 + .github/workflows/build_krb5.yaml | 1 + .github/workflows/build_nifi.yaml | 1 + .github/workflows/build_omid.yaml | 1 + .github/workflows/build_opa.yaml | 1 + .github/workflows/build_spark-connect-client.yaml | 1 + .github/workflows/build_spark-k8s.yaml | 1 + .github/workflows/build_stackable-base.yaml | 1 + .github/workflows/build_superset.yaml | 1 + .github/workflows/build_testing-tools.yaml | 1 + .github/workflows/build_tools.yaml | 1 + .github/workflows/build_trino-cli.yaml | 1 + .github/workflows/build_trino.yaml | 1 + .github/workflows/build_vector.yaml | 1 + .github/workflows/build_zookeeper.yaml | 1 + .github/workflows/reusable_build_image.yaml | 7 +++++-- 26 files changed, 30 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_airflow.yaml b/.github/workflows/build_airflow.yaml index bc53a8907..a59ed2ef6 100644 --- a/.github/workflows/build_airflow.yaml +++ b/.github/workflows/build_airflow.yaml @@ -30,3 +30,4 @@ jobs: with: product-name: airflow sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_druid.yaml b/.github/workflows/build_druid.yaml index 424b74bb8..33e0973a1 100644 --- a/.github/workflows/build_druid.yaml +++ b/.github/workflows/build_druid.yaml @@ -32,3 +32,4 @@ jobs: with: product-name: druid sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_hadoop.yaml b/.github/workflows/build_hadoop.yaml index 2a77fb2e9..95b50d06c 100644 --- a/.github/workflows/build_hadoop.yaml +++ b/.github/workflows/build_hadoop.yaml @@ -32,3 +32,4 @@ jobs: with: product-name: hadoop sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_hbase.yaml b/.github/workflows/build_hbase.yaml index 9e8b1cb95..e53b28c14 100644 --- a/.github/workflows/build_hbase.yaml +++ b/.github/workflows/build_hbase.yaml @@ -33,3 +33,4 @@ jobs: with: product-name: hbase sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_hello-world.yaml b/.github/workflows/build_hello-world.yaml index 0a5bf28c5..5bf41506e 100644 --- a/.github/workflows/build_hello-world.yaml +++ b/.github/workflows/build_hello-world.yaml @@ -28,3 +28,4 @@ jobs: with: product-name: hello-world sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_hive.yaml b/.github/workflows/build_hive.yaml index a13f26be8..688f6eeaa 100644 --- a/.github/workflows/build_hive.yaml +++ b/.github/workflows/build_hive.yaml @@ -33,3 +33,4 @@ jobs: with: product-name: hive sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_java-base.yaml b/.github/workflows/build_java-base.yaml index 07795d94c..658632c6a 100644 --- a/.github/workflows/build_java-base.yaml +++ b/.github/workflows/build_java-base.yaml @@ -28,3 +28,4 @@ jobs: with: product-name: java-base sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_java-devel.yaml b/.github/workflows/build_java-devel.yaml index 78eaab939..cf8357534 100644 --- a/.github/workflows/build_java-devel.yaml +++ b/.github/workflows/build_java-devel.yaml @@ -28,3 +28,4 @@ jobs: with: product-name: java-devel sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_kafka-testing-tools.yaml b/.github/workflows/build_kafka-testing-tools.yaml index 881e6bc56..d62f9d2e8 100644 --- a/.github/workflows/build_kafka-testing-tools.yaml +++ b/.github/workflows/build_kafka-testing-tools.yaml @@ -32,3 +32,4 @@ jobs: with: product-name: kafka-testing-tools sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_kafka.yaml b/.github/workflows/build_kafka.yaml index b3e2434e2..bcde914df 100644 --- a/.github/workflows/build_kafka.yaml +++ b/.github/workflows/build_kafka.yaml @@ -34,3 +34,4 @@ jobs: with: product-name: kafka sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_kcat.yaml b/.github/workflows/build_kcat.yaml index 431e673fc..8cc26f9c7 100644 --- a/.github/workflows/build_kcat.yaml +++ b/.github/workflows/build_kcat.yaml @@ -32,3 +32,4 @@ jobs: with: product-name: kcat sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_krb5.yaml b/.github/workflows/build_krb5.yaml index 443fbdeb6..b475ee2af 100644 --- a/.github/workflows/build_krb5.yaml +++ b/.github/workflows/build_krb5.yaml @@ -28,3 +28,4 @@ jobs: with: product-name: krb5 sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_nifi.yaml b/.github/workflows/build_nifi.yaml index d13e76eab..07b8a08f7 100644 --- a/.github/workflows/build_nifi.yaml +++ b/.github/workflows/build_nifi.yaml @@ -32,3 +32,4 @@ jobs: with: product-name: nifi sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_omid.yaml b/.github/workflows/build_omid.yaml index b88a685d8..3ec294889 100644 --- a/.github/workflows/build_omid.yaml +++ b/.github/workflows/build_omid.yaml @@ -32,3 +32,4 @@ jobs: with: product-name: omid sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_opa.yaml b/.github/workflows/build_opa.yaml index 17c516a6a..c426731c8 100644 --- a/.github/workflows/build_opa.yaml +++ b/.github/workflows/build_opa.yaml @@ -30,3 +30,4 @@ jobs: with: product-name: opa sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_spark-connect-client.yaml b/.github/workflows/build_spark-connect-client.yaml index f3f25879d..6ee247c11 100644 --- a/.github/workflows/build_spark-connect-client.yaml +++ b/.github/workflows/build_spark-connect-client.yaml @@ -31,3 +31,4 @@ jobs: with: product-name: spark-connect-client sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: stackable diff --git a/.github/workflows/build_spark-k8s.yaml b/.github/workflows/build_spark-k8s.yaml index f9886f7f9..e5d614413 100644 --- a/.github/workflows/build_spark-k8s.yaml +++ b/.github/workflows/build_spark-k8s.yaml @@ -33,3 +33,4 @@ jobs: with: product-name: spark-k8s sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_stackable-base.yaml b/.github/workflows/build_stackable-base.yaml index 502ef66af..ba3051d9f 100644 --- a/.github/workflows/build_stackable-base.yaml +++ b/.github/workflows/build_stackable-base.yaml @@ -29,3 +29,4 @@ jobs: with: product-name: stackable-base sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_superset.yaml b/.github/workflows/build_superset.yaml index ac181c8b7..8ea11984d 100644 --- a/.github/workflows/build_superset.yaml +++ b/.github/workflows/build_superset.yaml @@ -30,3 +30,4 @@ jobs: with: product-name: superset sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_testing-tools.yaml b/.github/workflows/build_testing-tools.yaml index 42cd701cc..33fddb16e 100644 --- a/.github/workflows/build_testing-tools.yaml +++ b/.github/workflows/build_testing-tools.yaml @@ -28,3 +28,4 @@ jobs: with: product-name: testing-tools sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_tools.yaml b/.github/workflows/build_tools.yaml index 34ed34f84..8da2375b5 100644 --- a/.github/workflows/build_tools.yaml +++ b/.github/workflows/build_tools.yaml @@ -29,3 +29,4 @@ jobs: with: product-name: tools sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_trino-cli.yaml b/.github/workflows/build_trino-cli.yaml index e28f10b2e..549412a39 100644 --- a/.github/workflows/build_trino-cli.yaml +++ b/.github/workflows/build_trino-cli.yaml @@ -31,3 +31,4 @@ jobs: with: product-name: trino-cli sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_trino.yaml b/.github/workflows/build_trino.yaml index 850234033..7397cafc3 100644 --- a/.github/workflows/build_trino.yaml +++ b/.github/workflows/build_trino.yaml @@ -32,3 +32,4 @@ jobs: with: product-name: trino sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_vector.yaml b/.github/workflows/build_vector.yaml index 2d237f771..e5044377d 100644 --- a/.github/workflows/build_vector.yaml +++ b/.github/workflows/build_vector.yaml @@ -28,3 +28,4 @@ jobs: with: product-name: vector sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/build_zookeeper.yaml b/.github/workflows/build_zookeeper.yaml index e761eb0da..887afb21c 100644 --- a/.github/workflows/build_zookeeper.yaml +++ b/.github/workflows/build_zookeeper.yaml @@ -32,3 +32,4 @@ jobs: with: product-name: zookeeper sdp-version: ${{ github.ref_type == 'tag' && github.ref_name || '0.0.0-dev' }} + registry-namespace: sdp diff --git a/.github/workflows/reusable_build_image.yaml b/.github/workflows/reusable_build_image.yaml index ed226d60e..2491bd057 100644 --- a/.github/workflows/reusable_build_image.yaml +++ b/.github/workflows/reusable_build_image.yaml @@ -7,6 +7,9 @@ on: sdp-version: required: true type: string + registry-namespace: + required: true + type: string secrets: harbor-robot-secret: description: The secret for the Harbor robot user used to push images and manifest @@ -66,7 +69,7 @@ jobs: image-registry-uri: oci.stackable.tech image-registry-username: robot$sdp+github-action-build image-registry-password: ${{ secrets.harbor-robot-secret }} - image-repository: sdp/${{ inputs.product-name }} + image-repository: ${{ inputs.registry-namespace }}/${{ inputs.product-name }} image-manifest-tag: ${{ steps.build.outputs.image-manifest-tag }} source-image-uri: localhost/${{ inputs.product-name }}:${{ steps.build.outputs.image-manifest-tag }} @@ -92,7 +95,7 @@ jobs: image-registry-uri: oci.stackable.tech image-registry-username: robot$sdp+github-action-build image-registry-password: ${{ secrets.harbor-robot-secret }} - image-repository: sdp/${{ inputs.product-name }} + image-repository: ${{ inputs.registry-namespace }}/${{ inputs.product-name }} image-index-manifest-tag: ${{ matrix.versions }}-stackable${{ inputs.sdp-version }} notify: From 7bce6970cfc69014b3a31582cea5611a65356e79 Mon Sep 17 00:00:00 2001 From: Techassi Date: Mon, 5 May 2025 11:45:21 +0200 Subject: [PATCH 12/27] ci(mirror): Include image name and version in run-name (#1089) * ci(mirror): Include image and version in run-name * chore: Use colon --- .github/workflows/mirror.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/mirror.yaml b/.github/workflows/mirror.yaml index c9a4b700e..b34aa7366 100644 --- a/.github/workflows/mirror.yaml +++ b/.github/workflows/mirror.yaml @@ -1,7 +1,7 @@ --- name: Mirror Container Image run-name: | - Mirror Container Image (attempt #${{ github.run_attempt }}) + Mirror Container Image (${{ inputs.image-repository-uri }}:${{ inputs.image-index-manifest-tag }}, attempt #${{ github.run_attempt }}) on: workflow_dispatch: From 51c5f8cfcdb55670becfb384aaf25920b03f2db2 Mon Sep 17 00:00:00 2001 From: Lukas Krug Date: Mon, 5 May 2025 12:02:40 +0200 Subject: [PATCH 13/27] fix: spark connect client Harbor credentials (#1088) --- .github/workflows/build_spark-connect-client.yaml | 2 +- .github/workflows/reusable_build_image.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_spark-connect-client.yaml b/.github/workflows/build_spark-connect-client.yaml index 6ee247c11..eda499f28 100644 --- a/.github/workflows/build_spark-connect-client.yaml +++ b/.github/workflows/build_spark-connect-client.yaml @@ -26,7 +26,7 @@ jobs: name: Reusable Workflow uses: ./.github/workflows/reusable_build_image.yaml secrets: - harbor-robot-secret: ${{ secrets.HARBOR_ROBOT_SDP_GITHUB_ACTION_BUILD_SECRET }} + harbor-robot-secret: ${{ secrets.HARBOR_ROBOT_STACKABLE_GITHUB_ACTION_BUILD_SECRET }} slack-token: ${{ secrets.SLACK_CONTAINER_IMAGE_TOKEN }} with: product-name: spark-connect-client diff --git a/.github/workflows/reusable_build_image.yaml b/.github/workflows/reusable_build_image.yaml index 2491bd057..6d31ba711 100644 --- a/.github/workflows/reusable_build_image.yaml +++ b/.github/workflows/reusable_build_image.yaml @@ -67,7 +67,7 @@ jobs: uses: stackabletech/actions/publish-image@2d3d7ddad981ae09901d45a0f6bf30c2658b1b78 # 0.7.0 with: image-registry-uri: oci.stackable.tech - image-registry-username: robot$sdp+github-action-build + image-registry-username: robot$${{ inputs.registry-namespace }}+github-action-build image-registry-password: ${{ secrets.harbor-robot-secret }} image-repository: ${{ inputs.registry-namespace }}/${{ inputs.product-name }} image-manifest-tag: ${{ steps.build.outputs.image-manifest-tag }} From d779cfc1ac4832963624488594ebb298c9c9e523 Mon Sep 17 00:00:00 2001 From: Lukas Krug Date: Mon, 5 May 2025 13:52:05 +0200 Subject: [PATCH 14/27] fix: spark connect client Harbor credentials (#1092) --- .github/workflows/reusable_build_image.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reusable_build_image.yaml b/.github/workflows/reusable_build_image.yaml index 6d31ba711..a16be2aa0 100644 --- a/.github/workflows/reusable_build_image.yaml +++ b/.github/workflows/reusable_build_image.yaml @@ -93,7 +93,7 @@ jobs: uses: stackabletech/actions/publish-index-manifest@2d3d7ddad981ae09901d45a0f6bf30c2658b1b78 # 0.7.0 with: image-registry-uri: oci.stackable.tech - image-registry-username: robot$sdp+github-action-build + image-registry-username: robot$${{ inputs.registry-namespace }}+github-action-build image-registry-password: ${{ secrets.harbor-robot-secret }} image-repository: ${{ inputs.registry-namespace }}/${{ inputs.product-name }} image-index-manifest-tag: ${{ matrix.versions }}-stackable${{ inputs.sdp-version }} From e4f3866b02727f2a3a0257ced9340c2d195db229 Mon Sep 17 00:00:00 2001 From: Razvan-Daniel Mihai <84674+razvan@users.noreply.github.com> Date: Mon, 5 May 2025 14:02:01 +0200 Subject: [PATCH 15/27] feat: connect client image includes JupyterLab (#1071) * feat: install demo dependencies * spark-connect-client is now built directly off of spark-k8s * run pre-commit hooks * fix shellcheck sc2102 --- CHANGELOG.md | 1 + spark-connect-client/Dockerfile | 36 +- .../.jupyter/jupyter_server_config.py | 1833 +++++++++++++++++ 3 files changed, 1848 insertions(+), 22 deletions(-) create mode 100644 spark-connect-client/stackable/.jupyter/jupyter_server_config.py diff --git a/CHANGELOG.md b/CHANGELOG.md index a6a6f611a..ccf39138a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,6 +37,7 @@ All notable changes to this project will be documented in this file. - ubi-rust-builder: Bump Rust toolchain to 1.85.0, cargo-cyclonedx to 0.5.7, and cargo-auditable to 0.6.6 ([#1050]). - spark-k8s: Include spark-connect jars. Replace OpenJDK with Temurin JDK. Cleanup. ([#1034]) +- spark-connect-client: Image is now completely based on spark-k8s and includes JupyterLab and other demo dependencies ([#1071]) ### Fixed diff --git a/spark-connect-client/Dockerfile b/spark-connect-client/Dockerfile index 6b8bb6955..89f2ba525 100644 --- a/spark-connect-client/Dockerfile +++ b/spark-connect-client/Dockerfile @@ -3,8 +3,6 @@ # spark-builder: provides client libs for spark-connect FROM stackable/image/spark-k8s AS spark-builder -FROM stackable/image/java-base - ARG PRODUCT ARG PYTHON ARG RELEASE @@ -18,42 +16,36 @@ LABEL name="Stackable Spark Connect Examples" \ summary="Spark Connect Examples" \ description="Spark Connect client libraries for Python and the JVM, including some examples." +# Need root to install setuptools +USER root -ENV HOME=/stackable - -COPY spark-connect-client/stackable/spark-connect-examples /stackable/spark-connect-examples -COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark/connect /stackable/spark/connect +COPY --chown=${STACKABLE_USER_UID}:0 spark-connect-client/stackable/spark-connect-examples /stackable/spark-connect-examples +COPY --chown=${STACKABLE_USER_UID}:0 spark-connect-client/stackable/.jupyter /stackable/.jupyter RUN <", +# } +# }, +# "loggers": { +# "": { +# "level": "DEBUG", +# # NOTE: if you don't list the default "console" +# # handler here then it will be disabled +# "handlers": ["console", "file"], +# }, +# }, +# } +# Default: {} +# c.Application.logging_config = {} + +## Instead of starting the Application, dump configuration to stdout +# Default: False +# c.Application.show_config = False + +## Instead of starting the Application, dump configuration to stdout (as JSON) +# Default: False +# c.Application.show_config_json = False + +# ------------------------------------------------------------------------------ +# JupyterApp(Application) configuration +# ------------------------------------------------------------------------------ +## Base class for Jupyter applications + +## Answer yes to any prompts. +# Default: False +# c.JupyterApp.answer_yes = False + +## Full path of a config file. +# Default: '' +# c.JupyterApp.config_file = '' + +## Specify a config file to load. +# Default: '' +# c.JupyterApp.config_file_name = '' + +## Generate default config file. +# Default: False +# c.JupyterApp.generate_config = False + +## The date format used by logging formatters for %(asctime)s +# See also: Application.log_datefmt +# c.JupyterApp.log_datefmt = '%Y-%m-%d %H:%M:%S' + +## The Logging format template +# See also: Application.log_format +# c.JupyterApp.log_format = '[%(name)s]%(highlevel)s %(message)s' + +## Set the log level by value or name. +# See also: Application.log_level +# c.JupyterApp.log_level = 30 + +## +# See also: Application.logging_config +# c.JupyterApp.logging_config = {} + +## Instead of starting the Application, dump configuration to stdout +# See also: Application.show_config +# c.JupyterApp.show_config = False + +## Instead of starting the Application, dump configuration to stdout (as JSON) +# See also: Application.show_config_json +# c.JupyterApp.show_config_json = False + +# ------------------------------------------------------------------------------ +# ServerApp(JupyterApp) configuration +# ------------------------------------------------------------------------------ +## The Jupyter Server application class. + +## Set the Access-Control-Allow-Credentials: true header +# Default: False +# c.ServerApp.allow_credentials = False + +## Whether or not to allow external kernels, whose connection files are placed in +# external_connection_dir. +# Default: False +# c.ServerApp.allow_external_kernels = False + +## Set the Access-Control-Allow-Origin header +# +# Use '*' to allow any origin to access your server. +# +# Takes precedence over allow_origin_pat. +# Default: '' +# c.ServerApp.allow_origin = '' + +## Use a regular expression for the Access-Control-Allow-Origin header +# +# Requests from an origin matching the expression will get replies with: +# +# Access-Control-Allow-Origin: origin +# +# where `origin` is the origin of the request. +# +# Ignored if allow_origin is set. +# Default: '' +# c.ServerApp.allow_origin_pat = '' + +## DEPRECATED in 2.0. Use PasswordIdentityProvider.allow_password_change +# Default: True +# c.ServerApp.allow_password_change = True + +## Allow requests where the Host header doesn't point to a local server +# +# By default, requests get a 403 forbidden response if the 'Host' header +# shows that the browser thinks it's on a non-local domain. +# Setting this option to True disables this check. +# +# This protects against 'DNS rebinding' attacks, where a remote web server +# serves you a page and then changes its DNS to send later requests to a +# local IP, bypassing same-origin checks. +# +# Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, +# along with hostnames configured in local_hostnames. +# Default: False +# c.ServerApp.allow_remote_access = False + +## Whether to allow the user to run the server as root. +# Default: False +# c.ServerApp.allow_root = False + +## Allow unauthenticated access to endpoints without authentication rule. +# +# When set to `True` (default in jupyter-server 2.0, subject to change +# in the future), any request to an endpoint without an authentication rule +# (either `@tornado.web.authenticated`, or `@allow_unauthenticated`) +# will be permitted, regardless of whether user has logged in or not. +# +# When set to `False`, logging in will be required for access to each endpoint, +# excluding the endpoints marked with `@allow_unauthenticated` decorator. +# +# This option can be configured using `JUPYTER_SERVER_ALLOW_UNAUTHENTICATED_ACCESS` +# environment variable: any non-empty value other than "true" and "yes" will +# prevent unauthenticated access to endpoints without `@allow_unauthenticated`. +# Default: True +# c.ServerApp.allow_unauthenticated_access = True + +## Answer yes to any prompts. +# See also: JupyterApp.answer_yes +# c.ServerApp.answer_yes = False + +## " +# Require authentication to access prometheus metrics. +# Default: True +# c.ServerApp.authenticate_prometheus = True + +## The authorizer class to use. +# Default: 'jupyter_server.auth.authorizer.AllowAllAuthorizer' +# c.ServerApp.authorizer_class = 'jupyter_server.auth.authorizer.AllowAllAuthorizer' + +## Reload the webapp when changes are made to any Python src files. +# Default: False +# c.ServerApp.autoreload = False + +## The base URL for the Jupyter server. +# +# Leading and trailing slashes can be omitted, +# and will automatically be added. +# Default: '/' +# c.ServerApp.base_url = '/' + +## Specify what command to use to invoke a web +# browser when starting the server. If not specified, the +# default browser will be determined by the `webbrowser` +# standard library module, which allows setting of the +# BROWSER environment variable to override it. +# Default: '' +# c.ServerApp.browser = '' + +## The full path to an SSL/TLS certificate file. +# Default: '' +# c.ServerApp.certfile = '' + +## The full path to a certificate authority certificate for SSL/TLS client +# authentication. +# Default: '' +# c.ServerApp.client_ca = '' + +## Full path of a config file. +# See also: JupyterApp.config_file +# c.ServerApp.config_file = '' + +## Specify a config file to load. +# See also: JupyterApp.config_file_name +# c.ServerApp.config_file_name = '' + +## The config manager class to use +# Default: 'jupyter_server.services.config.manager.ConfigManager' +# c.ServerApp.config_manager_class = 'jupyter_server.services.config.manager.ConfigManager' + +## The content manager class to use. +# Default: 'jupyter_server.services.contents.largefilemanager.AsyncLargeFileManager' +# c.ServerApp.contents_manager_class = 'jupyter_server.services.contents.largefilemanager.AsyncLargeFileManager' + +## DEPRECATED. Use IdentityProvider.cookie_options +# Default: {} +# c.ServerApp.cookie_options = {} + +## The random bytes used to secure cookies. +# By default this is generated on first start of the server and persisted across server +# sessions by writing the cookie secret into the `cookie_secret_file` file. +# When using an executable config file you can override this to be random at each server restart. +# +# Note: Cookie secrets should be kept private, do not share config files with +# cookie_secret stored in plaintext (you can read the value from a file). +# Default: b'' +# c.ServerApp.cookie_secret = b'' + +## The file where the cookie secret is stored. +# Default: '' +# c.ServerApp.cookie_secret_file = '' + +## Override URL shown to users. +# +# Replace actual URL, including protocol, address, port and base URL, +# with the given value when displaying URL to the users. Do not change +# the actual connection URL. If authentication token is enabled, the +# token is added to the custom URL automatically. +# +# This option is intended to be used when the URL to display to the user +# cannot be determined reliably by the Jupyter server (proxified +# or containerized setups for example). +# Default: '' +# c.ServerApp.custom_display_url = '' + +## The default URL to redirect to from `/` +# Default: '/' +# c.ServerApp.default_url = '/' + +## Disable cross-site-request-forgery protection +# +# Jupyter server includes protection from cross-site request forgeries, +# requiring API requests to either: +# +# - originate from pages served by this server (validated with XSRF cookie and token), or +# - authenticate with a token +# +# Some anonymous compute resources still desire the ability to run code, +# completely without authentication. +# These services can disable all authentication and security checks, +# with the full knowledge of what that implies. +# Default: False +# c.ServerApp.disable_check_xsrf = False + +## The directory to look at for external kernel connection files, if +# allow_external_kernels is True. Defaults to Jupyter +# runtime_dir/external_kernels. Make sure that this directory is not filled with +# left-over connection files, that could result in unnecessary kernel manager +# creations. +# Default: None +# c.ServerApp.external_connection_dir = None + +## handlers that should be loaded at higher priority than the default services +# Default: [] +# c.ServerApp.extra_services = [] + +## Extra paths to search for serving static files. +# +# This allows adding javascript/css to be available from the Jupyter server machine, +# or overriding individual files in the IPython +# Default: [] +# c.ServerApp.extra_static_paths = [] + +## Extra paths to search for serving jinja templates. +# +# Can be used to override templates from jupyter_server.templates. +# Default: [] +# c.ServerApp.extra_template_paths = [] + +## Open the named file when the application is launched. +# Default: '' +# c.ServerApp.file_to_run = '' + +## The URL prefix where files are opened directly. +# Default: 'notebooks' +# c.ServerApp.file_url_prefix = 'notebooks' + +## Generate default config file. +# See also: JupyterApp.generate_config +# c.ServerApp.generate_config = False + +## DEPRECATED. Use IdentityProvider.get_secure_cookie_kwargs +# Default: {} +# c.ServerApp.get_secure_cookie_kwargs = {} + +## The identity provider class to use. +# Default: 'jupyter_server.auth.identity.PasswordIdentityProvider' +# c.ServerApp.identity_provider_class = 'jupyter_server.auth.identity.PasswordIdentityProvider' + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_data_rate_limit +# Default: 0.0 +# c.ServerApp.iopub_data_rate_limit = 0.0 + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_msg_rate_limit +# Default: 0.0 +# c.ServerApp.iopub_msg_rate_limit = 0.0 + +## The IP address the Jupyter server will listen on. +# Default: 'localhost' +c.ServerApp.ip = "0.0.0.0" + +## Supply extra arguments that will be passed to Jinja environment. +# Default: {} +# c.ServerApp.jinja_environment_options = {} + +## Extra variables to supply to jinja templates when rendering. +# Default: {} +# c.ServerApp.jinja_template_vars = {} + +## Dict of Python modules to load as Jupyter server extensions.Entry values can +# be used to enable and disable the loading ofthe extensions. The extensions +# will be loaded in alphabetical order. +# Default: {} +# c.ServerApp.jpserver_extensions = {} + +## The kernel manager class to use. +# Default: 'jupyter_server.services.kernels.kernelmanager.MappingKernelManager' +# c.ServerApp.kernel_manager_class = 'jupyter_server.services.kernels.kernelmanager.MappingKernelManager' + +## The kernel spec manager class to use. Should be a subclass of +# `jupyter_client.kernelspec.KernelSpecManager`. +# +# The Api of KernelSpecManager is provisional and might change without warning +# between this version of Jupyter and the next stable one. +# Default: 'builtins.object' +# c.ServerApp.kernel_spec_manager_class = 'builtins.object' + +## The kernel websocket connection class to use. +# Default: 'jupyter_server.services.kernels.connection.base.BaseKernelWebsocketConnection' +# c.ServerApp.kernel_websocket_connection_class = 'jupyter_server.services.kernels.connection.base.BaseKernelWebsocketConnection' + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.kernel_ws_protocol +# Default: '' +# c.ServerApp.kernel_ws_protocol = '' + +## The full path to a private key file for usage with SSL/TLS. +# Default: '' +# c.ServerApp.keyfile = '' + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.limit_rate +# Default: False +# c.ServerApp.limit_rate = False + +## Hostnames to allow as local when allow_remote_access is False. +# +# Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted +# as local as well. +# Default: ['localhost'] +# c.ServerApp.local_hostnames = ['localhost'] + +## The date format used by logging formatters for %(asctime)s +# See also: Application.log_datefmt +# c.ServerApp.log_datefmt = '%Y-%m-%d %H:%M:%S' + +## The Logging format template +# See also: Application.log_format +# c.ServerApp.log_format = '[%(name)s]%(highlevel)s %(message)s' + +## Set the log level by value or name. +# See also: Application.log_level +# c.ServerApp.log_level = 30 + +## +# See also: Application.logging_config +# c.ServerApp.logging_config = {} + +## The login handler class to use. +# Default: 'jupyter_server.auth.login.LegacyLoginHandler' +# c.ServerApp.login_handler_class = 'jupyter_server.auth.login.LegacyLoginHandler' + +## The logout handler class to use. +# Default: 'jupyter_server.auth.logout.LogoutHandler' +# c.ServerApp.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' + +## Sets the maximum allowed size of the client request body, specified in the +# Content-Length request header field. If the size in a request exceeds the +# configured value, a malformed HTTP message is returned to the client. +# +# Note: max_body_size is applied even in streaming mode. +# Default: 536870912 +# c.ServerApp.max_body_size = 536870912 + +## Gets or sets the maximum amount of memory, in bytes, that is allocated for use +# by the buffer manager. +# Default: 536870912 +# c.ServerApp.max_buffer_size = 536870912 + +## Gets or sets a lower bound on the open file handles process resource limit. +# This may need to be increased if you run into an OSError: [Errno 24] Too many +# open files. This is not applicable when running on Windows. +# Default: 0 +# c.ServerApp.min_open_files_limit = 0 + +## DEPRECATED, use root_dir. +# Default: '' +# c.ServerApp.notebook_dir = '' + +## Whether to open in a browser after starting. +# The specific browser used is platform dependent and +# determined by the python standard library `webbrowser` +# module, unless it is overridden using the --browser +# (ServerApp.browser) configuration option. +# Default: False +# c.ServerApp.open_browser = False + +## DEPRECATED in 2.0. Use PasswordIdentityProvider.hashed_password +# Default: '' +# c.ServerApp.password = '' + +## DEPRECATED in 2.0. Use PasswordIdentityProvider.password_required +# Default: False +# c.ServerApp.password_required = False + +## The port the server will listen on (env: JUPYTER_PORT). +# Default: 0 +c.ServerApp.port = 8888 + +## The number of additional ports to try if the specified port is not available +# (env: JUPYTER_PORT_RETRIES). +# Default: 50 +# c.ServerApp.port_retries = 50 + +## Preferred starting directory to use for notebooks and kernels. +# ServerApp.preferred_dir is deprecated in jupyter-server 2.0. Use +# FileContentsManager.preferred_dir instead +# Default: '' +# c.ServerApp.preferred_dir = '' + +## DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. +# Default: 'disabled' +# c.ServerApp.pylab = 'disabled' + +## If True, display controls to shut down the Jupyter server, such as menu items +# or buttons. +# Default: True +# c.ServerApp.quit_button = True + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.rate_limit_window +# Default: 0.0 +# c.ServerApp.rate_limit_window = 0.0 + +## Reraise exceptions encountered loading server extensions? +# Default: False +# c.ServerApp.reraise_server_extension_failures = False + +## The directory to use for notebooks and kernels. +# Default: '' +# c.ServerApp.root_dir = '' + +## The session manager class to use. +# Default: 'builtins.object' +# c.ServerApp.session_manager_class = 'builtins.object' + +## Instead of starting the Application, dump configuration to stdout +# See also: Application.show_config +# c.ServerApp.show_config = False + +## Instead of starting the Application, dump configuration to stdout (as JSON) +# See also: Application.show_config_json +# c.ServerApp.show_config_json = False + +## Shut down the server after N seconds with no kernelsrunning and no activity. +# This can be used together with culling idle kernels +# (MappingKernelManager.cull_idle_timeout) to shutdown the Jupyter server when +# it's not in use. This is not precisely timed: it may shut down up to a minute +# later. 0 (the default) disables this automatic shutdown. +# Default: 0 +# c.ServerApp.shutdown_no_activity_timeout = 0 + +## The UNIX socket the Jupyter server will listen on. +# Default: '' +# c.ServerApp.sock = '' + +## The permissions mode for UNIX socket creation (default: 0600). +# Default: '0600' +# c.ServerApp.sock_mode = '0600' + +## Supply SSL options for the tornado HTTPServer. +# See the tornado docs for details. +# Default: {} +# c.ServerApp.ssl_options = {} + +## Paths to set up static files as immutable. +# +# This allow setting up the cache control of static files as immutable. It +# should be used for static file named with a hash for instance. +# Default: [] +# c.ServerApp.static_immutable_cache = [] + +## Supply overrides for terminado. Currently only supports "shell_command". +# Default: {} +# c.ServerApp.terminado_settings = {} + +## Set to False to disable terminals. +# +# This does *not* make the server more secure by itself. +# Anything the user can in a terminal, they can also do in a notebook. +# +# Terminals may also be automatically disabled if the terminado package +# is not available. +# Default: False +# c.ServerApp.terminals_enabled = False + +## DEPRECATED. Use IdentityProvider.token +# Default: '' +# c.ServerApp.token = '' + +## Supply overrides for the tornado.web.Application that the Jupyter server uses. +# Default: {} +# c.ServerApp.tornado_settings = {} + +## Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- +# For headerssent by the upstream reverse proxy. Necessary if the proxy handles +# SSL +# Default: False +# c.ServerApp.trust_xheaders = False + +## Disable launching browser by redirect file +# For versions of notebook > 5.7.2, a security feature measure was added that +# prevented the authentication token used to launch the browser from being visible. +# This feature makes it difficult for other users on a multi-user system from +# running code in your Jupyter session as you. +# However, some environments (like Windows Subsystem for Linux (WSL) and Chromebooks), +# launching a browser using a redirect file can lead the browser failing to load. +# This is because of the difference in file structures/paths between the runtime and +# the browser. +# +# Disabling this setting to False will disable this behavior, allowing the browser +# to launch by using a URL and visible token (as before). +# Default: True +# c.ServerApp.use_redirect_file = True + +## Specify where to open the server on startup. This is the +# `new` argument passed to the standard library method `webbrowser.open`. +# The behaviour is not guaranteed, but depends on browser support. Valid +# values are: +# +# - 2 opens a new tab, +# - 1 opens a new window, +# - 0 opens in an existing window. +# +# See the `webbrowser.open` documentation for details. +# Default: 2 +# c.ServerApp.webbrowser_open_new = 2 + +## Set the tornado compression options for websocket connections. +# +# This value will be returned from +# :meth:`WebSocketHandler.get_compression_options`. None (default) will disable +# compression. A dict (even an empty one) will enable compression. +# +# See the tornado docs for WebSocketHandler.get_compression_options for details. +# Default: None +# c.ServerApp.websocket_compression_options = None + +## Configure the websocket ping interval in seconds. +# +# Websockets are long-lived connections that are used by some Jupyter Server +# extensions. +# +# Periodic pings help to detect disconnected clients and keep the connection +# active. If this is set to None, then no pings will be performed. +# +# When a ping is sent, the client has ``websocket_ping_timeout`` seconds to +# respond. If no response is received within this period, the connection will be +# closed from the server side. +# Default: 0 +# c.ServerApp.websocket_ping_interval = 0 + +## Configure the websocket ping timeout in seconds. +# +# See ``websocket_ping_interval`` for details. +# Default: 0 +# c.ServerApp.websocket_ping_timeout = 0 + +## The base URL for websockets, +# if it differs from the HTTP server (hint: it almost certainly doesn't). +# +# Should be in the form of an HTTP origin: ws[s]://hostname[:port] +# Default: '' +# c.ServerApp.websocket_url = '' + +# ------------------------------------------------------------------------------ +# ConnectionFileMixin(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ +## Mixin for configurable classes that work with connection files + +## JSON file in which to store connection info [default: kernel-.json] +# +# This file will contain the IP, ports, and authentication key needed to connect +# clients to this kernel. By default, this file will be created in the security dir +# of the current profile, but can be specified by absolute path. +# Default: '' +# c.ConnectionFileMixin.connection_file = '' + +## set the control (ROUTER) port [default: random] +# Default: 0 +# c.ConnectionFileMixin.control_port = 0 + +## set the heartbeat port [default: random] +# Default: 0 +# c.ConnectionFileMixin.hb_port = 0 + +## set the iopub (PUB) port [default: random] +# Default: 0 +# c.ConnectionFileMixin.iopub_port = 0 + +## Set the kernel's IP address [default localhost]. +# If the IP address is something other than localhost, then +# Consoles on other machines will be able to connect +# to the Kernel, so be careful! +# Default: '' +# c.ConnectionFileMixin.ip = '' + +## set the shell (ROUTER) port [default: random] +# Default: 0 +# c.ConnectionFileMixin.shell_port = 0 + +## set the stdin (ROUTER) port [default: random] +# Default: 0 +# c.ConnectionFileMixin.stdin_port = 0 + +# Choices: any of ['tcp', 'ipc'] (case-insensitive) +# Default: 'tcp' +# c.ConnectionFileMixin.transport = 'tcp' + +# ------------------------------------------------------------------------------ +# KernelManager(ConnectionFileMixin) configuration +# ------------------------------------------------------------------------------ +## Manages a single kernel in a subprocess on this host. +# +# This version starts kernels with Popen. + +## Should we autorestart the kernel if it dies. +# Default: True +# c.KernelManager.autorestart = True + +## True if the MultiKernelManager should cache ports for this KernelManager +# instance +# Default: False +# c.KernelManager.cache_ports = False + +## JSON file in which to store connection info [default: kernel-.json] +# See also: ConnectionFileMixin.connection_file +# c.KernelManager.connection_file = '' + +## set the control (ROUTER) port [default: random] +# See also: ConnectionFileMixin.control_port +# c.KernelManager.control_port = 0 + +## set the heartbeat port [default: random] +# See also: ConnectionFileMixin.hb_port +# c.KernelManager.hb_port = 0 + +## set the iopub (PUB) port [default: random] +# See also: ConnectionFileMixin.iopub_port +# c.KernelManager.iopub_port = 0 + +## Set the kernel's IP address [default localhost]. +# See also: ConnectionFileMixin.ip +c.KernelManager.ip = "0.0.0.0" + +## set the shell (ROUTER) port [default: random] +# See also: ConnectionFileMixin.shell_port +# c.KernelManager.shell_port = 0 + +## Time to wait for a kernel to terminate before killing it, in seconds. When a +# shutdown request is initiated, the kernel will be immediately sent an +# interrupt (SIGINT), followedby a shutdown_request message, after 1/2 of +# `shutdown_wait_time`it will be sent a terminate (SIGTERM) request, and finally +# at the end of `shutdown_wait_time` will be killed (SIGKILL). terminate and +# kill may be equivalent on windows. Note that this value can beoverridden by +# the in-use kernel provisioner since shutdown times mayvary by provisioned +# environment. +# Default: 5.0 +# c.KernelManager.shutdown_wait_time = 5.0 + +## set the stdin (ROUTER) port [default: random] +# See also: ConnectionFileMixin.stdin_port +# c.KernelManager.stdin_port = 0 + +# See also: ConnectionFileMixin.transport +# c.KernelManager.transport = 'tcp' + +# ------------------------------------------------------------------------------ +# Session(Configurable) configuration +# ------------------------------------------------------------------------------ +## Object for handling serialization and sending of messages. +# +# The Session object handles building messages and sending them +# with ZMQ sockets or ZMQStream objects. Objects can communicate with each +# other over the network via Session objects, and only need to work with the +# dict-based IPython message spec. The Session will handle +# serialization/deserialization, security, and metadata. +# +# Sessions support configurable serialization via packer/unpacker traits, +# and signing with HMAC digests via the key/keyfile traits. +# +# Parameters +# ---------- +# +# debug : bool +# whether to trigger extra debugging statements +# packer/unpacker : str : 'json', 'pickle' or import_string +# importstrings for methods to serialize message parts. If just +# 'json' or 'pickle', predefined JSON and pickle packers will be used. +# Otherwise, the entire importstring must be used. +# +# The functions must accept at least valid JSON input, and output +# *bytes*. +# +# For example, to use msgpack: +# packer = 'msgpack.packb', unpacker='msgpack.unpackb' +# pack/unpack : callables +# You can also set the pack/unpack callables for serialization directly. +# session : bytes +# the ID of this Session object. The default is to generate a new UUID. +# username : unicode +# username added to message headers. The default is to ask the OS. +# key : bytes +# The key used to initialize an HMAC signature. If unset, messages +# will not be signed or checked. +# keyfile : filepath +# The file containing a key. If this is set, `key` will be initialized +# to the contents of the file. + +## Threshold (in bytes) beyond which an object's buffer should be extracted to +# avoid pickling. +# Default: 1024 +# c.Session.buffer_threshold = 1024 + +## Whether to check PID to protect against calls after fork. +# +# This check can be disabled if fork-safety is handled elsewhere. +# Default: True +# c.Session.check_pid = True + +## Threshold (in bytes) beyond which a buffer should be sent without copying. +# Default: 65536 +# c.Session.copy_threshold = 65536 + +## Debug output in the Session +# Default: False +# c.Session.debug = False + +## The maximum number of digests to remember. +# +# The digest history will be culled when it exceeds this value. +# Default: 65536 +# c.Session.digest_history_size = 65536 + +## The maximum number of items for a container to be introspected for custom serialization. +# Containers larger than this are pickled outright. +# Default: 64 +# c.Session.item_threshold = 64 + +## execution key, for signing messages. +# Default: b'' +# c.Session.key = b'' + +## path to file containing execution key. +# Default: '' +# c.Session.keyfile = '' + +## Metadata dictionary, which serves as the default top-level metadata dict for +# each message. +# Default: {} +# c.Session.metadata = {} + +## The name of the packer for serializing messages. +# Should be one of 'json', 'pickle', or an import name +# for a custom callable serializer. +# Default: 'json' +# c.Session.packer = 'json' + +## The UUID identifying this session. +# Default: '' +# c.Session.session = '' + +## The digest scheme used to construct the message signatures. +# Must have the form 'hmac-HASH'. +# Default: 'hmac-sha256' +# c.Session.signature_scheme = 'hmac-sha256' + +## The name of the unpacker for unserializing messages. +# Only used with custom functions for `packer`. +# Default: 'json' +# c.Session.unpacker = 'json' + +## Username for the Session. Default is your system username. +# Default: 'username' +# c.Session.username = 'username' + +# ------------------------------------------------------------------------------ +# MultiKernelManager(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ +## A class for managing multiple kernels. + +## The name of the default kernel to start +# Default: 'python3' +# c.MultiKernelManager.default_kernel_name = 'python3' + +## The kernel manager class. This is configurable to allow +# subclassing of the KernelManager for customized behavior. +# Default: 'jupyter_client.ioloop.IOLoopKernelManager' +# c.MultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager' + +## Share a single zmq.Context to talk to all my kernels +# Default: True +# c.MultiKernelManager.shared_context = True + +# ------------------------------------------------------------------------------ +# MappingKernelManager(MultiKernelManager) configuration +# ------------------------------------------------------------------------------ +## A KernelManager that handles +# - File mapping +# - HTTP error handling +# - Kernel message filtering + +## Whether to send tracebacks to clients on exceptions. +# Default: True +# c.MappingKernelManager.allow_tracebacks = True + +## White list of allowed kernel message types. +# When the list is empty, all message types are allowed. +# Default: [] +# c.MappingKernelManager.allowed_message_types = [] + +## Whether messages from kernels whose frontends have disconnected should be +# buffered in-memory. +# +# When True (default), messages are buffered and replayed on reconnect, +# avoiding lost messages due to interrupted connectivity. +# +# Disable if long-running kernels will produce too much output while +# no frontends are connected. +# Default: True +# c.MappingKernelManager.buffer_offline_messages = True + +## Whether to consider culling kernels which are busy. +# Only effective if cull_idle_timeout > 0. +# Default: False +# c.MappingKernelManager.cull_busy = False + +## Whether to consider culling kernels which have one or more connections. +# Only effective if cull_idle_timeout > 0. +# Default: False +# c.MappingKernelManager.cull_connected = False + +## Timeout (in seconds) after which a kernel is considered idle and ready to be culled. +# Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled +# for users with poor network connections. +# Default: 0 +# c.MappingKernelManager.cull_idle_timeout = 0 + +## The interval (in seconds) on which to check for idle kernels exceeding the +# cull timeout value. +# Default: 300 +# c.MappingKernelManager.cull_interval = 300 + +## The name of the default kernel to start +# See also: MultiKernelManager.default_kernel_name +# c.MappingKernelManager.default_kernel_name = 'python3' + +## Timeout for giving up on a kernel (in seconds). +# +# On starting and restarting kernels, we check whether the +# kernel is running and responsive by sending kernel_info_requests. +# This sets the timeout in seconds for how long the kernel can take +# before being presumed dead. +# This affects the MappingKernelManager (which handles kernel restarts) +# and the ZMQChannelsHandler (which handles the startup). +# Default: 60 +# c.MappingKernelManager.kernel_info_timeout = 60 + +## The kernel manager class. This is configurable to allow +# See also: MultiKernelManager.kernel_manager_class +# c.MappingKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager' + +# Default: '' +# c.MappingKernelManager.root_dir = '' + +## Share a single zmq.Context to talk to all my kernels +# See also: MultiKernelManager.shared_context +# c.MappingKernelManager.shared_context = True + +## Message to print when allow_tracebacks is False, and an exception occurs +# Default: 'An exception occurred at runtime, which is not shown due to security reasons.' +# c.MappingKernelManager.traceback_replacement_message = 'An exception occurred at runtime, which is not shown due to security reasons.' + +## List of kernel message types excluded from user activity tracking. +# +# This should be a superset of the message types sent on any channel other +# than the shell channel. +# Default: ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] +# c.MappingKernelManager.untracked_message_types = ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] + +# ------------------------------------------------------------------------------ +# KernelSpecManager(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ +## A manager for kernel specs. + +## List of allowed kernel names. +# +# By default, all installed kernels are allowed. +# Default: set() +# c.KernelSpecManager.allowed_kernelspecs = set() + +## If there is no Python kernelspec registered and the IPython +# kernel is available, ensure it is added to the spec list. +# Default: True +# c.KernelSpecManager.ensure_native_kernel = True + +## The kernel spec class. This is configurable to allow +# subclassing of the KernelSpecManager for customized behavior. +# Default: 'jupyter_client.kernelspec.KernelSpec' +# c.KernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' + +## Deprecated, use `KernelSpecManager.allowed_kernelspecs` +# Default: set() +# c.KernelSpecManager.whitelist = set() + +# ------------------------------------------------------------------------------ +# AsyncMultiKernelManager(MultiKernelManager) configuration +# ------------------------------------------------------------------------------ +## The name of the default kernel to start +# See also: MultiKernelManager.default_kernel_name +# c.AsyncMultiKernelManager.default_kernel_name = 'python3' + +## The kernel manager class. This is configurable to allow +# subclassing of the AsyncKernelManager for customized behavior. +# Default: 'jupyter_client.ioloop.AsyncIOLoopKernelManager' +# c.AsyncMultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' + +## Share a single zmq.Context to talk to all my kernels +# See also: MultiKernelManager.shared_context +# c.AsyncMultiKernelManager.shared_context = True + +## Whether to make kernels available before the process has started. The +# kernel has a `.ready` future which can be awaited before connecting +# Default: False +# c.AsyncMultiKernelManager.use_pending_kernels = False + +# ------------------------------------------------------------------------------ +# AsyncMappingKernelManager(MappingKernelManager, AsyncMultiKernelManager) configuration +# ------------------------------------------------------------------------------ +## An asynchronous mapping kernel manager. + +## Whether to send tracebacks to clients on exceptions. +# See also: MappingKernelManager.allow_tracebacks +# c.AsyncMappingKernelManager.allow_tracebacks = True + +## White list of allowed kernel message types. +# See also: MappingKernelManager.allowed_message_types +# c.AsyncMappingKernelManager.allowed_message_types = [] + +## Whether messages from kernels whose frontends have disconnected should be +# buffered in-memory. +# See also: MappingKernelManager.buffer_offline_messages +# c.AsyncMappingKernelManager.buffer_offline_messages = True + +## Whether to consider culling kernels which are busy. +# See also: MappingKernelManager.cull_busy +# c.AsyncMappingKernelManager.cull_busy = False + +## Whether to consider culling kernels which have one or more connections. +# See also: MappingKernelManager.cull_connected +# c.AsyncMappingKernelManager.cull_connected = False + +## Timeout (in seconds) after which a kernel is considered idle and ready to be +# culled. +# See also: MappingKernelManager.cull_idle_timeout +# c.AsyncMappingKernelManager.cull_idle_timeout = 0 + +## The interval (in seconds) on which to check for idle kernels exceeding the +# cull timeout value. +# See also: MappingKernelManager.cull_interval +# c.AsyncMappingKernelManager.cull_interval = 300 + +## The name of the default kernel to start +# See also: MultiKernelManager.default_kernel_name +# c.AsyncMappingKernelManager.default_kernel_name = 'python3' + +## Timeout for giving up on a kernel (in seconds). +# See also: MappingKernelManager.kernel_info_timeout +# c.AsyncMappingKernelManager.kernel_info_timeout = 60 + +## The kernel manager class. This is configurable to allow +# See also: AsyncMultiKernelManager.kernel_manager_class +# c.AsyncMappingKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' + +# See also: MappingKernelManager.root_dir +# c.AsyncMappingKernelManager.root_dir = '' + +## Share a single zmq.Context to talk to all my kernels +# See also: MultiKernelManager.shared_context +# c.AsyncMappingKernelManager.shared_context = True + +## Message to print when allow_tracebacks is False, and an exception occurs +# See also: MappingKernelManager.traceback_replacement_message +# c.AsyncMappingKernelManager.traceback_replacement_message = 'An exception occurred at runtime, which is not shown due to security reasons.' + +## List of kernel message types excluded from user activity tracking. +# See also: MappingKernelManager.untracked_message_types +# c.AsyncMappingKernelManager.untracked_message_types = ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] + +## Whether to make kernels available before the process has started. The +# See also: AsyncMultiKernelManager.use_pending_kernels +# c.AsyncMappingKernelManager.use_pending_kernels = False + +# ------------------------------------------------------------------------------ +# ContentsManager(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ +## Base class for serving files and directories. +# +# This serves any text or binary file, +# as well as directories, +# with special handling for JSON notebook documents. +# +# Most APIs take a path argument, +# which is always an API-style unicode path, +# and always refers to a directory. +# +# - unicode, not url-escaped +# - '/'-separated +# - leading and trailing '/' will be stripped +# - if unspecified, path defaults to '', +# indicating the root path. + +## Allow access to hidden files +# Default: False +# c.ContentsManager.allow_hidden = False + +# Default: None +# c.ContentsManager.checkpoints = None + +# Default: 'jupyter_server.services.contents.checkpoints.Checkpoints' +# c.ContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.Checkpoints' + +# Default: {} +# c.ContentsManager.checkpoints_kwargs = {} + +# Default: None +# c.ContentsManager.event_logger = None + +## handler class to use when serving raw file requests. +# +# Default is a fallback that talks to the ContentsManager API, +# which may be inefficient, especially for large files. +# +# Local files-based ContentsManagers can use a StaticFileHandler subclass, +# which will be much more efficient. +# +# Access to these files should be Authenticated. +# Default: 'jupyter_server.files.handlers.FilesHandler' +# c.ContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# +# For example, StaticFileHandlers generally expect a `path` argument +# specifying the root directory from which to serve files. +# Default: {} +# c.ContentsManager.files_handler_params = {} + +## Glob patterns to hide in file and directory listings. +# Default: ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] +# c.ContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] + +## Python callable or importstring thereof +# +# to be called on the path of a file just saved. +# +# This can be used to process the file on disk, +# such as converting the notebook to a script or HTML via nbconvert. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(os_path=os_path, model=model, contents_manager=instance) +# +# - path: the filesystem path to the file just written +# - model: the model representing the file +# - contents_manager: this ContentsManager instance +# Default: None +# c.ContentsManager.post_save_hook = None + +## Python callable or importstring thereof +# +# To be called on a contents model prior to save. +# +# This can be used to process the structure, +# such as removing notebook outputs or other side effects that +# should not be saved. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(path=path, model=model, contents_manager=self) +# +# - model: the model to be saved. Includes file contents. +# Modifying this dict will affect the file that is stored. +# - path: the API path of the save destination +# - contents_manager: this ContentsManager instance +# Default: None +# c.ContentsManager.pre_save_hook = None + +## Preferred starting directory to use for notebooks. This is an API path (`/` +# separated, relative to root dir) +# Default: '' +# c.ContentsManager.preferred_dir = '' + +# Default: '/' +# c.ContentsManager.root_dir = '/' + +## The base name used when creating untitled directories. +# Default: 'Untitled Folder' +# c.ContentsManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +# Default: 'untitled' +# c.ContentsManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +# Default: 'Untitled' +# c.ContentsManager.untitled_notebook = 'Untitled' + +# ------------------------------------------------------------------------------ +# FileManagerMixin(LoggingConfigurable, Configurable) configuration +# ------------------------------------------------------------------------------ +## Mixin for ContentsAPI classes that interact with the filesystem. +# +# Provides facilities for reading, writing, and copying files. +# +# Shared by FileContentsManager and FileCheckpoints. +# +# Note ---- Classes using this mixin must provide the following attributes: +# +# root_dir : unicode +# A directory against against which API-style paths are to be resolved. +# +# log : logging.Logger + +## Hash algorithm to use for file content, support by hashlib +# Choices: any of ['sha512', 'shake_256', 'sha512_256', 'sha256', 'shake_128', 'sha3_224', 'ripemd160', 'sha3_256', 'sha3_384', 'sha512_224', 'sha3_512', 'md5-sha1', 'blake2b', 'md5', 'sha384', 'sm3', 'blake2s', 'sha1', 'sha224'] +# Default: 'sha256' +# c.FileManagerMixin.hash_algorithm = 'sha256' + +## By default notebooks are saved on disk on a temporary file and then if successfully written, it replaces the old ones. +# This procedure, namely 'atomic_writing', causes some bugs on file system without operation order enforcement (like some networked fs). +# If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota ) +# Default: True +# c.FileManagerMixin.use_atomic_writing = True + +# ------------------------------------------------------------------------------ +# FileContentsManager(FileManagerMixin, ContentsManager) configuration +# ------------------------------------------------------------------------------ +## A file contents manager. + +## Allow access to hidden files +# See also: ContentsManager.allow_hidden +# c.FileContentsManager.allow_hidden = False + +## If True, deleting a non-empty directory will always be allowed. +# WARNING this may result in files being permanently removed; e.g. on Windows, +# if the data size is too big for the trash/recycle bin the directory will be permanently +# deleted. If False (default), the non-empty directory will be sent to the trash only +# if safe. And if ``delete_to_trash`` is True, the directory won't be deleted. +# Default: False +# c.FileContentsManager.always_delete_dir = False + +# See also: ContentsManager.checkpoints +# c.FileContentsManager.checkpoints = None + +# See also: ContentsManager.checkpoints_class +# c.FileContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.Checkpoints' + +# See also: ContentsManager.checkpoints_kwargs +# c.FileContentsManager.checkpoints_kwargs = {} + +## If True (default), deleting files will send them to the +# platform's trash/recycle bin, where they can be recovered. If False, +# deleting files really deletes them. +# Default: True +# c.FileContentsManager.delete_to_trash = True + +# See also: ContentsManager.event_logger +# c.FileContentsManager.event_logger = None + +## handler class to use when serving raw file requests. +# See also: ContentsManager.files_handler_class +# c.FileContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# See also: ContentsManager.files_handler_params +# c.FileContentsManager.files_handler_params = {} + +## Hash algorithm to use for file content, support by hashlib +# See also: FileManagerMixin.hash_algorithm +# c.FileContentsManager.hash_algorithm = 'sha256' + +## +# See also: ContentsManager.hide_globs +# c.FileContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] + +## The max folder size that can be copied +# Default: 500 +# c.FileContentsManager.max_copy_folder_size_mb = 500 + +## Python callable or importstring thereof +# See also: ContentsManager.post_save_hook +# c.FileContentsManager.post_save_hook = None + +## Python callable or importstring thereof +# See also: ContentsManager.pre_save_hook +# c.FileContentsManager.pre_save_hook = None + +## Preferred starting directory to use for notebooks. This is an API path (`/` +# separated, relative to root dir) +# See also: ContentsManager.preferred_dir +# c.FileContentsManager.preferred_dir = '' + +# Default: '' +# c.FileContentsManager.root_dir = '' + +## The base name used when creating untitled directories. +# See also: ContentsManager.untitled_directory +# c.FileContentsManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +# See also: ContentsManager.untitled_file +# c.FileContentsManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +# See also: ContentsManager.untitled_notebook +# c.FileContentsManager.untitled_notebook = 'Untitled' + +## By default notebooks are saved on disk on a temporary file and then if +# successfully written, it replaces the old ones. +# See also: FileManagerMixin.use_atomic_writing +# c.FileContentsManager.use_atomic_writing = True + +# ------------------------------------------------------------------------------ +# AsyncContentsManager(ContentsManager) configuration +# ------------------------------------------------------------------------------ +## Base class for serving files and directories asynchronously. + +## Allow access to hidden files +# See also: ContentsManager.allow_hidden +# c.AsyncContentsManager.allow_hidden = False + +# Default: None +# c.AsyncContentsManager.checkpoints = None + +# Default: 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' +# c.AsyncContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' + +# Default: {} +# c.AsyncContentsManager.checkpoints_kwargs = {} + +# See also: ContentsManager.event_logger +# c.AsyncContentsManager.event_logger = None + +## handler class to use when serving raw file requests. +# See also: ContentsManager.files_handler_class +# c.AsyncContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# See also: ContentsManager.files_handler_params +# c.AsyncContentsManager.files_handler_params = {} + +## +# See also: ContentsManager.hide_globs +# c.AsyncContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] + +## Python callable or importstring thereof +# See also: ContentsManager.post_save_hook +# c.AsyncContentsManager.post_save_hook = None + +## Python callable or importstring thereof +# See also: ContentsManager.pre_save_hook +# c.AsyncContentsManager.pre_save_hook = None + +## Preferred starting directory to use for notebooks. This is an API path (`/` +# separated, relative to root dir) +# See also: ContentsManager.preferred_dir +# c.AsyncContentsManager.preferred_dir = '' + +# See also: ContentsManager.root_dir +# c.AsyncContentsManager.root_dir = '/' + +## The base name used when creating untitled directories. +# See also: ContentsManager.untitled_directory +# c.AsyncContentsManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +# See also: ContentsManager.untitled_file +# c.AsyncContentsManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +# See also: ContentsManager.untitled_notebook +# c.AsyncContentsManager.untitled_notebook = 'Untitled' + +# ------------------------------------------------------------------------------ +# AsyncFileManagerMixin(FileManagerMixin) configuration +# ------------------------------------------------------------------------------ +## Mixin for ContentsAPI classes that interact with the filesystem +# asynchronously. + +## Hash algorithm to use for file content, support by hashlib +# See also: FileManagerMixin.hash_algorithm +# c.AsyncFileManagerMixin.hash_algorithm = 'sha256' + +## By default notebooks are saved on disk on a temporary file and then if +# successfully written, it replaces the old ones. +# See also: FileManagerMixin.use_atomic_writing +# c.AsyncFileManagerMixin.use_atomic_writing = True + +# ------------------------------------------------------------------------------ +# AsyncFileContentsManager(FileContentsManager, AsyncFileManagerMixin, AsyncContentsManager) configuration +# ------------------------------------------------------------------------------ +## An async file contents manager. + +## Allow access to hidden files +# See also: ContentsManager.allow_hidden +# c.AsyncFileContentsManager.allow_hidden = False + +## If True, deleting a non-empty directory will always be allowed. +# See also: FileContentsManager.always_delete_dir +# c.AsyncFileContentsManager.always_delete_dir = False + +# See also: AsyncContentsManager.checkpoints +# c.AsyncFileContentsManager.checkpoints = None + +# See also: AsyncContentsManager.checkpoints_class +# c.AsyncFileContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' + +# See also: AsyncContentsManager.checkpoints_kwargs +# c.AsyncFileContentsManager.checkpoints_kwargs = {} + +## If True (default), deleting files will send them to the +# See also: FileContentsManager.delete_to_trash +# c.AsyncFileContentsManager.delete_to_trash = True + +# See also: ContentsManager.event_logger +# c.AsyncFileContentsManager.event_logger = None + +## handler class to use when serving raw file requests. +# See also: ContentsManager.files_handler_class +# c.AsyncFileContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# See also: ContentsManager.files_handler_params +# c.AsyncFileContentsManager.files_handler_params = {} + +## Hash algorithm to use for file content, support by hashlib +# See also: FileManagerMixin.hash_algorithm +# c.AsyncFileContentsManager.hash_algorithm = 'sha256' + +## +# See also: ContentsManager.hide_globs +# c.AsyncFileContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] + +## The max folder size that can be copied +# See also: FileContentsManager.max_copy_folder_size_mb +# c.AsyncFileContentsManager.max_copy_folder_size_mb = 500 + +## Python callable or importstring thereof +# See also: ContentsManager.post_save_hook +# c.AsyncFileContentsManager.post_save_hook = None + +## Python callable or importstring thereof +# See also: ContentsManager.pre_save_hook +# c.AsyncFileContentsManager.pre_save_hook = None + +## Preferred starting directory to use for notebooks. This is an API path (`/` +# separated, relative to root dir) +# See also: ContentsManager.preferred_dir +# c.AsyncFileContentsManager.preferred_dir = '' + +# See also: FileContentsManager.root_dir +# c.AsyncFileContentsManager.root_dir = '' + +## The base name used when creating untitled directories. +# See also: ContentsManager.untitled_directory +# c.AsyncFileContentsManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +# See also: ContentsManager.untitled_file +# c.AsyncFileContentsManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +# See also: ContentsManager.untitled_notebook +# c.AsyncFileContentsManager.untitled_notebook = 'Untitled' + +## By default notebooks are saved on disk on a temporary file and then if +# successfully written, it replaces the old ones. +# See also: FileManagerMixin.use_atomic_writing +# c.AsyncFileContentsManager.use_atomic_writing = True + +# ------------------------------------------------------------------------------ +# NotebookNotary(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ +## A class for computing and verifying notebook signatures. + +## The hashing algorithm used to sign notebooks. +# Choices: any of ['sha3_224', 'blake2b', 'sha512', 'sha3_256', 'md5', 'sha384', 'sha3_384', 'sha3_512', 'blake2s', 'sha1', 'sha224', 'sha256'] +# Default: 'sha256' +# c.NotebookNotary.algorithm = 'sha256' + +## The storage directory for notary secret and database. +# Default: '' +# c.NotebookNotary.data_dir = '' + +## The sqlite file in which to store notebook signatures. +# By default, this will be in your Jupyter data directory. +# You can set it to ':memory:' to disable sqlite writing to the filesystem. +# Default: '' +# c.NotebookNotary.db_file = '' + +## The secret key with which notebooks are signed. +# Default: b'' +# c.NotebookNotary.secret = b'' + +## The file where the secret key is stored. +# Default: '' +# c.NotebookNotary.secret_file = '' + +## A callable returning the storage backend for notebook signatures. +# The default uses an SQLite database. +# Default: traitlets.Undefined +# c.NotebookNotary.store_factory = traitlets.Undefined + +# ------------------------------------------------------------------------------ +# GatewayMappingKernelManager(AsyncMappingKernelManager) configuration +# ------------------------------------------------------------------------------ +## Kernel manager that supports remote kernels hosted by Jupyter Kernel or +# Enterprise Gateway. + +## Whether to send tracebacks to clients on exceptions. +# See also: MappingKernelManager.allow_tracebacks +# c.GatewayMappingKernelManager.allow_tracebacks = True + +## White list of allowed kernel message types. +# See also: MappingKernelManager.allowed_message_types +# c.GatewayMappingKernelManager.allowed_message_types = [] + +## Whether messages from kernels whose frontends have disconnected should be +# buffered in-memory. +# See also: MappingKernelManager.buffer_offline_messages +# c.GatewayMappingKernelManager.buffer_offline_messages = True + +## Whether to consider culling kernels which are busy. +# See also: MappingKernelManager.cull_busy +# c.GatewayMappingKernelManager.cull_busy = False + +## Whether to consider culling kernels which have one or more connections. +# See also: MappingKernelManager.cull_connected +# c.GatewayMappingKernelManager.cull_connected = False + +## Timeout (in seconds) after which a kernel is considered idle and ready to be +# culled. +# See also: MappingKernelManager.cull_idle_timeout +# c.GatewayMappingKernelManager.cull_idle_timeout = 0 + +## The interval (in seconds) on which to check for idle kernels exceeding the +# cull timeout value. +# See also: MappingKernelManager.cull_interval +# c.GatewayMappingKernelManager.cull_interval = 300 + +## The name of the default kernel to start +# See also: MultiKernelManager.default_kernel_name +# c.GatewayMappingKernelManager.default_kernel_name = 'python3' + +## Timeout for giving up on a kernel (in seconds). +# See also: MappingKernelManager.kernel_info_timeout +# c.GatewayMappingKernelManager.kernel_info_timeout = 60 + +## The kernel manager class. This is configurable to allow +# See also: AsyncMultiKernelManager.kernel_manager_class +# c.GatewayMappingKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' + +# See also: MappingKernelManager.root_dir +# c.GatewayMappingKernelManager.root_dir = '' + +## Share a single zmq.Context to talk to all my kernels +# See also: MultiKernelManager.shared_context +# c.GatewayMappingKernelManager.shared_context = True + +## Message to print when allow_tracebacks is False, and an exception occurs +# See also: MappingKernelManager.traceback_replacement_message +# c.GatewayMappingKernelManager.traceback_replacement_message = 'An exception occurred at runtime, which is not shown due to security reasons.' + +## List of kernel message types excluded from user activity tracking. +# See also: MappingKernelManager.untracked_message_types +# c.GatewayMappingKernelManager.untracked_message_types = ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] + +## Whether to make kernels available before the process has started. The +# See also: AsyncMultiKernelManager.use_pending_kernels +# c.GatewayMappingKernelManager.use_pending_kernels = False + +# ------------------------------------------------------------------------------ +# GatewayKernelSpecManager(KernelSpecManager) configuration +# ------------------------------------------------------------------------------ +## A gateway kernel spec manager. + +## List of allowed kernel names. +# See also: KernelSpecManager.allowed_kernelspecs +# c.GatewayKernelSpecManager.allowed_kernelspecs = set() + +## If there is no Python kernelspec registered and the IPython +# See also: KernelSpecManager.ensure_native_kernel +# c.GatewayKernelSpecManager.ensure_native_kernel = True + +## The kernel spec class. This is configurable to allow +# See also: KernelSpecManager.kernel_spec_class +# c.GatewayKernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' + +## Deprecated, use `KernelSpecManager.allowed_kernelspecs` +# See also: KernelSpecManager.whitelist +# c.GatewayKernelSpecManager.whitelist = set() + +# ------------------------------------------------------------------------------ +# SessionManager(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ +## A session manager. + +## The filesystem path to SQLite Database file (e.g. +# /path/to/session_database.db). By default, the session database is stored in- +# memory (i.e. `:memory:` setting from sqlite3) and does not persist when the +# current Jupyter Server shuts down. +# Default: ':memory:' +# c.SessionManager.database_filepath = ':memory:' + +# ------------------------------------------------------------------------------ +# GatewaySessionManager(SessionManager) configuration +# ------------------------------------------------------------------------------ +## A gateway session manager. + +## The filesystem path to SQLite Database file (e.g. +# /path/to/session_database.db). By default, the session database is stored in- +# memory (i.e. `:memory:` setting from sqlite3) and does not persist when the +# current Jupyter Server shuts down. +# See also: SessionManager.database_filepath +# c.GatewaySessionManager.database_filepath = ':memory:' + +# ------------------------------------------------------------------------------ +# BaseKernelWebsocketConnection(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ +## A configurable base class for connecting Kernel WebSockets to ZMQ sockets. + +## Preferred kernel message protocol over websocket to use (default: None). If an +# empty string is passed, select the legacy protocol. If None, the selected +# protocol will depend on what the front-end supports (usually the most recent +# protocol supported by the back-end and the front-end). +# Default: None +# c.BaseKernelWebsocketConnection.kernel_ws_protocol = None + +# Default: None +# c.BaseKernelWebsocketConnection.session = None + +# ------------------------------------------------------------------------------ +# GatewayWebSocketConnection(BaseKernelWebsocketConnection) configuration +# ------------------------------------------------------------------------------ +## Web socket connection that proxies to a kernel/enterprise gateway. + +# Default: '' +# c.GatewayWebSocketConnection.kernel_ws_protocol = '' + +# See also: BaseKernelWebsocketConnection.session +# c.GatewayWebSocketConnection.session = None + +# ------------------------------------------------------------------------------ +# GatewayClient(SingletonConfigurable) configuration +# ------------------------------------------------------------------------------ +## This class manages the configuration. It's its own singleton class so +# that we can share these values across all objects. It also contains some +# options. +# helper methods to build request arguments out of the various config + +## Accept and manage cookies sent by the service side. This is often useful +# for load balancers to decide which backend node to use. +# (JUPYTER_GATEWAY_ACCEPT_COOKIES env var) +# Default: False +# c.GatewayClient.accept_cookies = False + +## A comma-separated list of environment variable names that will be included, +# along with their values, in the kernel startup request. The corresponding +# `client_envs` configuration value must also be set on the Gateway server - +# since that configuration value indicates which environmental values to make +# available to the kernel. (JUPYTER_GATEWAY_ALLOWED_ENVS env var) +# Default: '' +# c.GatewayClient.allowed_envs = '' + +## The authorization header's key name (typically 'Authorization') used in the +# HTTP headers. The header will be formatted as:: +# +# {'{auth_header_key}': '{auth_scheme} {auth_token}'} +# +# If the authorization header key takes a single value, `auth_scheme` should be +# set to None and 'auth_token' should be configured to use the appropriate +# value. +# +# (JUPYTER_GATEWAY_AUTH_HEADER_KEY env var) +# Default: '' +# c.GatewayClient.auth_header_key = '' + +## The auth scheme, added as a prefix to the authorization token used in the HTTP +# headers. (JUPYTER_GATEWAY_AUTH_SCHEME env var) +# Default: '' +# c.GatewayClient.auth_scheme = '' + +## The authorization token used in the HTTP headers. The header will be formatted +# as:: +# +# {'{auth_header_key}': '{auth_scheme} {auth_token}'} +# +# (JUPYTER_GATEWAY_AUTH_TOKEN env var) +# Default: None +# c.GatewayClient.auth_token = None + +## The filename of CA certificates or None to use defaults. +# (JUPYTER_GATEWAY_CA_CERTS env var) +# Default: None +# c.GatewayClient.ca_certs = None + +## The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT +# env var) +# Default: None +# c.GatewayClient.client_cert = None + +## The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) +# Default: None +# c.GatewayClient.client_key = None + +## The time allowed for HTTP connection establishment with the Gateway server. +# (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var) +# Default: 40.0 +# c.GatewayClient.connect_timeout = 40.0 + +## Deprecated, use `GatewayClient.allowed_envs` +# Default: '' +# c.GatewayClient.env_whitelist = '' + +# Default: None +# c.GatewayClient.event_logger = None + +## The time allowed for HTTP reconnection with the Gateway server for the first +# time. Next will be JUPYTER_GATEWAY_RETRY_INTERVAL multiplied by two in factor +# of numbers of retries but less than JUPYTER_GATEWAY_RETRY_INTERVAL_MAX. +# (JUPYTER_GATEWAY_RETRY_INTERVAL env var) +# Default: 1.0 +# c.GatewayClient.gateway_retry_interval = 1.0 + +## The maximum time allowed for HTTP reconnection retry with the Gateway server. +# (JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var) +# Default: 30.0 +# c.GatewayClient.gateway_retry_interval_max = 30.0 + +## The maximum retries allowed for HTTP reconnection with the Gateway server. +# (JUPYTER_GATEWAY_RETRY_MAX env var) +# Default: 5 +# c.GatewayClient.gateway_retry_max = 5 + +## The class to use for Gateway token renewal. +# (JUPYTER_GATEWAY_TOKEN_RENEWER_CLASS env var) +# Default: 'jupyter_server.gateway.gateway_client.GatewayTokenRenewerBase' +# c.GatewayClient.gateway_token_renewer_class = 'jupyter_server.gateway.gateway_client.GatewayTokenRenewerBase' + +## Additional HTTP headers to pass on the request. This value will be converted to a dict. +# (JUPYTER_GATEWAY_HEADERS env var) +# Default: '{}' +# c.GatewayClient.headers = '{}' + +## The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) +# Default: None +# c.GatewayClient.http_pwd = None + +## The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) +# Default: None +# c.GatewayClient.http_user = None + +## The gateway API endpoint for accessing kernel resources +# (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var) +# Default: '/api/kernels' +# c.GatewayClient.kernels_endpoint = '/api/kernels' + +## The gateway API endpoint for accessing kernelspecs +# (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var) +# Default: '/api/kernelspecs' +# c.GatewayClient.kernelspecs_endpoint = '/api/kernelspecs' + +## The gateway endpoint for accessing kernelspecs resources +# (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var) +# Default: '/kernelspecs' +# c.GatewayClient.kernelspecs_resource_endpoint = '/kernelspecs' + +## Timeout pad to be ensured between KERNEL_LAUNCH_TIMEOUT and request_timeout +# such that request_timeout >= KERNEL_LAUNCH_TIMEOUT + launch_timeout_pad. +# (JUPYTER_GATEWAY_LAUNCH_TIMEOUT_PAD env var) +# Default: 2.0 +# c.GatewayClient.launch_timeout_pad = 2.0 + +## The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT +# env var) +# Default: 42.0 +# c.GatewayClient.request_timeout = 42.0 + +## The url of the Kernel or Enterprise Gateway server where kernel specifications +# are defined and kernel management takes place. If defined, this Notebook +# server acts as a proxy for all kernel management and kernel specification +# retrieval. (JUPYTER_GATEWAY_URL env var) +# Default: None +# c.GatewayClient.url = None + +## For HTTPS requests, determines if server's certificate should be validated or +# not. (JUPYTER_GATEWAY_VALIDATE_CERT env var) +# Default: True +# c.GatewayClient.validate_cert = True + +## The websocket url of the Kernel or Enterprise Gateway server. If not +# provided, this value will correspond to the value of the Gateway url with 'ws' +# in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) +# Default: None +# c.GatewayClient.ws_url = None + +# ------------------------------------------------------------------------------ +# EventLogger(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ +## An Event logger for emitting structured events. +# +# Event schemas must be registered with the EventLogger using the +# `register_schema` or `register_schema_file` methods. Every schema will be +# validated against Jupyter Event's metaschema. + +## A list of logging.Handler instances to send events to. +# +# When set to None (the default), all events are discarded. +# Default: None +# c.EventLogger.handlers = None + +# ------------------------------------------------------------------------------ +# ZMQChannelsWebsocketConnection(BaseKernelWebsocketConnection) configuration +# ------------------------------------------------------------------------------ +## A Jupyter Server Websocket Connection + +## (bytes/sec) +# Maximum rate at which stream output can be sent on iopub before they are +# limited. +# Default: 1000000 +# c.ZMQChannelsWebsocketConnection.iopub_data_rate_limit = 1000000 + +## (msgs/sec) +# Maximum rate at which messages can be sent on iopub before they are +# limited. +# Default: 1000 +# c.ZMQChannelsWebsocketConnection.iopub_msg_rate_limit = 1000 + +## Preferred kernel message protocol over websocket to use (default: None). If an +# empty string is passed, select the legacy protocol. If None, the selected +# protocol will depend on what the front-end supports (usually the most recent +# protocol supported by the back-end and the front-end). +# See also: BaseKernelWebsocketConnection.kernel_ws_protocol +# c.ZMQChannelsWebsocketConnection.kernel_ws_protocol = None + +## Whether to limit the rate of IOPub messages (default: True). If True, use +# iopub_msg_rate_limit, iopub_data_rate_limit and/or rate_limit_window to tune +# the rate. +# Default: True +# c.ZMQChannelsWebsocketConnection.limit_rate = True + +## (sec) Time window used to +# check the message and data rate limits. +# Default: 3 +# c.ZMQChannelsWebsocketConnection.rate_limit_window = 3 + +# See also: BaseKernelWebsocketConnection.session +# c.ZMQChannelsWebsocketConnection.session = None From 5bed54dfd02d8d59f709cc25442c43ed523458fd Mon Sep 17 00:00:00 2001 From: Nick <10092581+NickLarsenNZ@users.noreply.github.com> Date: Mon, 5 May 2025 16:24:07 +0200 Subject: [PATCH 16/27] chore: Bump tools ahead of 25.7.0 (#1090) * chore(jmx_exporter): Bump products to use 1.2.0 * chore(tools): Bump kubectl (1.33.0 and yq (4.45.2) * chore(cyclonedx-bom): Bump to 6.0.0 * chore: Update changelog * chore: Changelog formatting --- CHANGELOG.md | 9 +++++++-- airflow/Dockerfile | 4 +++- airflow/versions.py | 4 ++++ hadoop/versions.py | 8 ++++---- hbase/versions.py | 2 +- hive/versions.py | 6 +++--- kafka/versions.py | 8 ++++---- omid/versions.py | 4 ++-- spark-k8s/versions.py | 4 ++-- superset/Dockerfile | 3 ++- superset/versions.py | 2 ++ tools/versions.py | 4 ++-- trino/versions.py | 6 +++--- zookeeper/versions.py | 4 ++-- 14 files changed, 41 insertions(+), 27 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ccf39138a..e5e0e9289 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -36,8 +36,12 @@ All notable changes to this project will be documented in this file. ### Changed - ubi-rust-builder: Bump Rust toolchain to 1.85.0, cargo-cyclonedx to 0.5.7, and cargo-auditable to 0.6.6 ([#1050]). -- spark-k8s: Include spark-connect jars. Replace OpenJDK with Temurin JDK. Cleanup. ([#1034]) -- spark-connect-client: Image is now completely based on spark-k8s and includes JupyterLab and other demo dependencies ([#1071]) +- spark-k8s: Include spark-connect jars. Replace OpenJDK with Temurin JDK. Cleanup ([#1034]). +- spark-connect-client: Image is now completely based on spark-k8s and includes JupyterLab and other demo dependencies ([#1071]). +- jmx_exporter: Bump products to use `1.2.0` ([#1090]). +- kubectl: Bump products to use `1.33.0` ([#1090]). +- yq: Bump products to use `4.45.2` ([#1090]). +- cyclonedx-bom: Bump airflow and superset to use `6.0.0` ([#1090]). ### Fixed @@ -78,6 +82,7 @@ All notable changes to this project will be documented in this file. [#1055]: https://github.com/stackabletech/docker-images/pull/1055 [#1056]: https://github.com/stackabletech/docker-images/pull/1056 [#1058]: https://github.com/stackabletech/docker-images/pull/1058 +[#1090]: https://github.com/stackabletech/docker-images/pull/1090 ## [25.3.0] - 2025-03-21 diff --git a/airflow/Dockerfile b/airflow/Dockerfile index 951f07ea9..fffa6fc25 100644 --- a/airflow/Dockerfile +++ b/airflow/Dockerfile @@ -29,6 +29,8 @@ ARG STATSD_EXPORTER ARG PYTHON ARG TARGETARCH ARG STACKABLE_USER_UID +ARG S3FS +ARG CYCLONEDX_BOM COPY airflow/constraints-${PRODUCT}-python${PYTHON}.txt /tmp/constraints.txt COPY --from=opa-auth-manager-builder /tmp/opa-auth-manager/dist/opa_auth_manager-0.1.0-py3-none-any.whl /tmp/ @@ -68,7 +70,7 @@ source /stackable/app/bin/activate pip install --no-cache-dir --upgrade pip pip install --no-cache-dir apache-airflow[${AIRFLOW_EXTRAS}]==${PRODUCT} --constraint /tmp/constraints.txt # Needed for pandas S3 integration to e.g. write and read csv and parquet files to/from S3 -pip install --no-cache-dir s3fs==2024.9.0 cyclonedx-bom==5.0.0 +pip install --no-cache-dir s3fs==${S3FS} cyclonedx-bom==${CYCLONEDX_BOM} # Needed for OIDC pip install --no-cache-dir Flask_OIDC==2.2.0 Flask-OpenID==1.3.1 diff --git a/airflow/versions.py b/airflow/versions.py index 21b34efb8..b48301b36 100644 --- a/airflow/versions.py +++ b/airflow/versions.py @@ -3,6 +3,8 @@ "product": "2.9.3", "python": "3.9", "git_sync": "v4.4.0", + "s3fs": "2024.9.0", + "cyclonedx_bom": "6.0.0", "statsd_exporter": "0.28.0", "tini": "0.19.0", "vector": "0.43.1", @@ -11,6 +13,8 @@ "product": "2.10.4", "python": "3.12", "git_sync": "v4.4.0", + "s3fs": "2024.9.0", + "cyclonedx_bom": "6.0.0", "statsd_exporter": "0.28.0", "tini": "0.19.0", "vector": "0.43.1", diff --git a/hadoop/versions.py b/hadoop/versions.py index cb506b7b9..94f7a33dd 100644 --- a/hadoop/versions.py +++ b/hadoop/versions.py @@ -4,7 +4,7 @@ "java-base": "11", "java-devel": "11", "async_profiler": "2.9", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", "protobuf": "3.7.1", "hdfs_utils": "0.4.0", }, @@ -13,7 +13,7 @@ "java-base": "11", "java-devel": "11", "async_profiler": "2.9", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", "protobuf": "3.7.1", "hdfs_utils": "0.4.0", }, @@ -22,7 +22,7 @@ "java-base": "11", "java-devel": "11", "async_profiler": "2.9", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", "protobuf": "3.7.1", "hdfs_utils": "0.4.0", }, @@ -31,7 +31,7 @@ "java-base": "11", "java-devel": "11", "async_profiler": "2.9", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", "protobuf": "3.7.1", "hdfs_utils": "0.4.1", }, diff --git a/hbase/versions.py b/hbase/versions.py index 9cec0dd50..326b83c4d 100644 --- a/hbase/versions.py +++ b/hbase/versions.py @@ -11,7 +11,7 @@ "phoenix": "5.2.1", "hbase_profile": "2.4", "hadoop": "3.3.6", - "jmx_exporter": "1.1.0", # update the stackable/jmx/config folder too + "jmx_exporter": "1.2.0", # update the stackable/jmx/config folder too "opa_authorizer": "", # only for HBase 2.6.1 "delete_caches": "true", }, diff --git a/hive/versions.py b/hive/versions.py index 921c32b96..0e00a7d9f 100644 --- a/hive/versions.py +++ b/hive/versions.py @@ -1,7 +1,7 @@ versions = [ { "product": "3.1.3", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", # Hive 3 must be built with Java 8 but will run on Java 11 "java-base": "11", "java-devel": "8", @@ -13,7 +13,7 @@ }, { "product": "4.0.0", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", # Hive 4 must be built with Java 8 (according to GitHub README) but seems to run on Java 11 "java-base": "11", "java-devel": "8", @@ -25,7 +25,7 @@ }, { "product": "4.0.1", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", # Hive 4 must be built with Java 8 (according to GitHub README) but seems to run on Java 11 "java-base": "11", "java-devel": "8", diff --git a/kafka/versions.py b/kafka/versions.py index 5c0e64120..e58ff8d53 100644 --- a/kafka/versions.py +++ b/kafka/versions.py @@ -6,7 +6,7 @@ "scala": "2.13", "kcat": "1.7.0", "opa_authorizer": "1.5.1", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", }, { "product": "3.7.2", @@ -15,7 +15,7 @@ "scala": "2.13", "kcat": "1.7.0", "opa_authorizer": "1.5.1", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", }, { "product": "3.8.0", @@ -24,7 +24,7 @@ "scala": "2.13", "kcat": "1.7.0", "opa_authorizer": "1.5.1", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", }, { "product": "3.9.0", @@ -33,6 +33,6 @@ "scala": "2.13", "kcat": "1.7.0", "opa_authorizer": "1.5.1", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", }, ] diff --git a/omid/versions.py b/omid/versions.py index e52294db1..6ae6947b5 100644 --- a/omid/versions.py +++ b/omid/versions.py @@ -3,12 +3,12 @@ "product": "1.1.2", "java-base": "11", "java-devel": "11", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", }, { "product": "1.1.3-SNAPSHOT", "java-base": "11", "java-devel": "11", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", }, ] diff --git a/spark-k8s/versions.py b/spark-k8s/versions.py index d00619a16..035e206fc 100644 --- a/spark-k8s/versions.py +++ b/spark-k8s/versions.py @@ -13,7 +13,7 @@ "stax2_api": "4.2.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 "woodstox_core": "6.5.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 "vector": "0.43.1", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", "tini": "0.19.0", "hbase_connector": "1.0.1", }, @@ -31,7 +31,7 @@ "stax2_api": "4.2.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 "woodstox_core": "6.5.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 "vector": "0.43.1", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", "tini": "0.19.0", "hbase_connector": "1.0.1", }, diff --git a/superset/Dockerfile b/superset/Dockerfile index 2ce4ea76b..285e0c9b0 100644 --- a/superset/Dockerfile +++ b/superset/Dockerfile @@ -40,6 +40,7 @@ ARG PYTHON ARG AUTHLIB ARG TARGETARCH ARG TARGETOS +ARG CYCLONEDX_BOM COPY superset/constraints-${PRODUCT}.txt /tmp/constraints.txt COPY --from=opa-authorizer-builder /tmp/opa-authorizer/dist/opa_authorizer-0.1.0-py3-none-any.whl /tmp/ @@ -109,7 +110,7 @@ RUN python3 -m venv /stackable/app \ --no-cache-dir \ --upgrade \ python-json-logger \ - cyclonedx-bom \ + cyclonedx-bom==${CYCLONEDX_BOM} \ && if [ -n "$AUTHLIB" ]; then pip install Authlib==${AUTHLIB}; fi && \ pip install --no-cache-dir /tmp/opa_authorizer-0.1.0-py3-none-any.whl diff --git a/superset/versions.py b/superset/versions.py index 793966157..b76d08621 100644 --- a/superset/versions.py +++ b/superset/versions.py @@ -2,6 +2,7 @@ { "product": "4.0.2", "python": "3.9", + "cyclonedx_bom": "6.0.0", "vector": "0.43.1", "statsd_exporter": "0.28.0", "authlib": "1.2.1", # https://github.com/dpgaspar/Flask-AppBuilder/blob/release/4.4.1/requirements/extra.txt#L7 @@ -10,6 +11,7 @@ { "product": "4.1.1", "python": "3.9", # 3.11 support was merged in January 2025 (two months after 4.1.1 release), 3.10 is not available in our UBI image, so we need to stay on 3.9 for now + "cyclonedx_bom": "6.0.0", "vector": "0.43.1", "statsd_exporter": "0.28.0", "authlib": "1.2.1", # https://github.com/dpgaspar/Flask-AppBuilder/blob/release/4.5.0/requirements/extra.txt#L7 diff --git a/tools/versions.py b/tools/versions.py index 6fd96cda0..78269452c 100644 --- a/tools/versions.py +++ b/tools/versions.py @@ -1,9 +1,9 @@ versions = [ { "product": "1.0.0", - "kubectl_version": "1.31.1", + "kubectl_version": "1.33.0", "jq_version": "1.7.1", "stackable-base": "1.0.0", - "yq_version": "4.44.3", + "yq_version": "4.45.2", }, ] diff --git a/trino/versions.py b/trino/versions.py index 9205bd2dd..daaba71a9 100644 --- a/trino/versions.py +++ b/trino/versions.py @@ -3,21 +3,21 @@ "product": "451", "java-base": "22", "java-devel": "22", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", "trino-storage-connector": "451", }, { "product": "455", "java-base": "22", "java-devel": "22", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", "trino-storage-connector": "455", }, { "product": "470", "java-base": "23", "java-devel": "23", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", "trino-storage-connector": "470", }, ] diff --git a/zookeeper/versions.py b/zookeeper/versions.py index f7b7387fd..067e9d3f9 100644 --- a/zookeeper/versions.py +++ b/zookeeper/versions.py @@ -7,7 +7,7 @@ # zookeeper: Execution spotbugs of goal com.github.spotbugs:spotbugs-maven-plugin:4.0.0:spotbugs failed: Java # returned: 1 -> [Help 1] "java-devel": "11", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", }, { "product": "3.9.3", @@ -17,6 +17,6 @@ # zookeeper: Execution spotbugs of goal com.github.spotbugs:spotbugs-maven-plugin:4.0.0:spotbugs failed: Java # returned: 1 -> [Help 1] "java-devel": "11", - "jmx_exporter": "1.1.0", + "jmx_exporter": "1.2.0", }, ] From 24c4afa45533af9f743ff792a9d5abaa1122c138 Mon Sep 17 00:00:00 2001 From: Nick <10092581+NickLarsenNZ@users.noreply.github.com> Date: Mon, 5 May 2025 21:09:15 +0200 Subject: [PATCH 17/27] chore(java): Add JDK 24 (#1097) * chore(java): Add JDK 24 * chore: Update changelog --- CHANGELOG.md | 2 ++ java-base/versions.py | 4 ++++ java-devel/versions.py | 4 ++++ 3 files changed, 10 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e5e0e9289..fa6b734f4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,7 @@ All notable changes to this project will be documented in this file. - zookeeper: check for correct permissions and ownerships in /stackable folder via `check-permissions-ownership.sh` provided in stackable-base image ([#1043]). - nifi: Add OPA authorizer plugin with workaround ([#1058]). +- java: Add JDK 24 ([#1097]). ### Changed @@ -83,6 +84,7 @@ All notable changes to this project will be documented in this file. [#1056]: https://github.com/stackabletech/docker-images/pull/1056 [#1058]: https://github.com/stackabletech/docker-images/pull/1058 [#1090]: https://github.com/stackabletech/docker-images/pull/1090 +[#1097]: https://github.com/stackabletech/docker-images/pull/1097 ## [25.3.0] - 2025-03-21 diff --git a/java-base/versions.py b/java-base/versions.py index 58e38ba05..cb0977a68 100644 --- a/java-base/versions.py +++ b/java-base/versions.py @@ -23,4 +23,8 @@ "product": "23", "vector": "0.43.1", }, + { + "product": "24", + "vector": "0.43.1", + }, ] diff --git a/java-devel/versions.py b/java-devel/versions.py index 0ef5fa87d..cd0838e4f 100644 --- a/java-devel/versions.py +++ b/java-devel/versions.py @@ -23,4 +23,8 @@ "product": "23", "stackable-devel": "1.0.0", }, + { + "product": "24", + "stackable-devel": "1.0.0", + }, ] From 27c527ce75905cc0d0d594871c2764ba2f5585ce Mon Sep 17 00:00:00 2001 From: Nick <10092581+NickLarsenNZ@users.noreply.github.com> Date: Mon, 5 May 2025 21:15:04 +0200 Subject: [PATCH 18/27] chore(vector): Bump to 0.46.1 (#1098) * chore(vector): Bump to 0.46.1 * chore(vector): Bump products to use 0.46.1 --- CHANGELOG.md | 2 ++ airflow/versions.py | 4 ++-- java-base/versions.py | 12 ++++++------ opa/versions.py | 4 ++-- spark-k8s/versions.py | 4 ++-- superset/versions.py | 4 ++-- vector/versions.py | 2 +- 7 files changed, 17 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fa6b734f4..c86b4d7fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -43,6 +43,7 @@ All notable changes to this project will be documented in this file. - kubectl: Bump products to use `1.33.0` ([#1090]). - yq: Bump products to use `4.45.2` ([#1090]). - cyclonedx-bom: Bump airflow and superset to use `6.0.0` ([#1090]). +- vector: Bump to `0.46.1` ([#1098]). ### Fixed @@ -85,6 +86,7 @@ All notable changes to this project will be documented in this file. [#1058]: https://github.com/stackabletech/docker-images/pull/1058 [#1090]: https://github.com/stackabletech/docker-images/pull/1090 [#1097]: https://github.com/stackabletech/docker-images/pull/1097 +[#1098]: https://github.com/stackabletech/docker-images/pull/1098 ## [25.3.0] - 2025-03-21 diff --git a/airflow/versions.py b/airflow/versions.py index b48301b36..5d442f5c5 100644 --- a/airflow/versions.py +++ b/airflow/versions.py @@ -7,7 +7,7 @@ "cyclonedx_bom": "6.0.0", "statsd_exporter": "0.28.0", "tini": "0.19.0", - "vector": "0.43.1", + "vector": "0.46.1", }, { "product": "2.10.4", @@ -17,6 +17,6 @@ "cyclonedx_bom": "6.0.0", "statsd_exporter": "0.28.0", "tini": "0.19.0", - "vector": "0.43.1", + "vector": "0.46.1", }, ] diff --git a/java-base/versions.py b/java-base/versions.py index cb0977a68..1c6f7706d 100644 --- a/java-base/versions.py +++ b/java-base/versions.py @@ -1,27 +1,27 @@ versions = [ { "product": "8", - "vector": "0.43.1", + "vector": "0.46.1", }, { "product": "11", - "vector": "0.43.1", + "vector": "0.46.1", }, { "product": "17", - "vector": "0.43.1", + "vector": "0.46.1", }, { "product": "21", - "vector": "0.43.1", + "vector": "0.46.1", }, { "product": "22", - "vector": "0.43.1", + "vector": "0.46.1", }, { "product": "23", - "vector": "0.43.1", + "vector": "0.46.1", }, { "product": "24", diff --git a/opa/versions.py b/opa/versions.py index b5340fe53..ae22cb29e 100644 --- a/opa/versions.py +++ b/opa/versions.py @@ -1,13 +1,13 @@ versions = [ { "product": "1.0.1", - "vector": "0.43.1", + "vector": "0.46.1", "bundle_builder_version": "1.1.2", "stackable-base": "1.0.0", }, { "product": "0.67.1", - "vector": "0.43.1", + "vector": "0.46.1", "bundle_builder_version": "1.1.2", "stackable-base": "1.0.0", }, diff --git a/spark-k8s/versions.py b/spark-k8s/versions.py index 035e206fc..0fedd8b23 100644 --- a/spark-k8s/versions.py +++ b/spark-k8s/versions.py @@ -12,7 +12,7 @@ "jackson_dataformat_xml": "2.15.2", # https://mvnrepository.com/artifact/org.apache.spark/spark-core_2.13/3.5.1 "stax2_api": "4.2.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 "woodstox_core": "6.5.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 - "vector": "0.43.1", + "vector": "0.46.1", "jmx_exporter": "1.2.0", "tini": "0.19.0", "hbase_connector": "1.0.1", @@ -30,7 +30,7 @@ "jackson_dataformat_xml": "2.15.2", # https://mvnrepository.com/artifact/org.apache.spark/spark-core_2.13/3.5.2 "stax2_api": "4.2.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 "woodstox_core": "6.5.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 - "vector": "0.43.1", + "vector": "0.46.1", "jmx_exporter": "1.2.0", "tini": "0.19.0", "hbase_connector": "1.0.1", diff --git a/superset/versions.py b/superset/versions.py index b76d08621..4615098d6 100644 --- a/superset/versions.py +++ b/superset/versions.py @@ -3,7 +3,7 @@ "product": "4.0.2", "python": "3.9", "cyclonedx_bom": "6.0.0", - "vector": "0.43.1", + "vector": "0.46.1", "statsd_exporter": "0.28.0", "authlib": "1.2.1", # https://github.com/dpgaspar/Flask-AppBuilder/blob/release/4.4.1/requirements/extra.txt#L7 "stackable-base": "1.0.0", @@ -12,7 +12,7 @@ "product": "4.1.1", "python": "3.9", # 3.11 support was merged in January 2025 (two months after 4.1.1 release), 3.10 is not available in our UBI image, so we need to stay on 3.9 for now "cyclonedx_bom": "6.0.0", - "vector": "0.43.1", + "vector": "0.46.1", "statsd_exporter": "0.28.0", "authlib": "1.2.1", # https://github.com/dpgaspar/Flask-AppBuilder/blob/release/4.5.0/requirements/extra.txt#L7 "stackable-base": "1.0.0", diff --git a/vector/versions.py b/vector/versions.py index 674cc454b..f66006719 100644 --- a/vector/versions.py +++ b/vector/versions.py @@ -1,6 +1,6 @@ versions = [ { - "product": "0.43.1", + "product": "0.46.1", "rpm_release": "1", "stackable-base": "1.0.0", "inotify_tools": "3.22.1.0-1.el9", From 439d9fe83e42906e096d16bc59865add782c4da9 Mon Sep 17 00:00:00 2001 From: Nick <10092581+NickLarsenNZ@users.noreply.github.com> Date: Mon, 5 May 2025 21:18:26 +0200 Subject: [PATCH 19/27] chore(zookeeper): Remove 3.9.2 (#1093) * chore: Changelog formatting * chore(zookeeper): Remove 2.9.2 * chore: Update changelog * chore: Update changelog --- CHANGELOG.md | 2 ++ zookeeper/stackable/patches/3.9.2/.gitkeep | 0 .../3.9.2/0001-Add-CycloneDX-plugin.patch | 34 ------------------- .../stackable/patches/3.9.2/patchable.toml | 2 -- zookeeper/versions.py | 10 ------ 5 files changed, 2 insertions(+), 46 deletions(-) delete mode 100644 zookeeper/stackable/patches/3.9.2/.gitkeep delete mode 100644 zookeeper/stackable/patches/3.9.2/0001-Add-CycloneDX-plugin.patch delete mode 100644 zookeeper/stackable/patches/3.9.2/patchable.toml diff --git a/CHANGELOG.md b/CHANGELOG.md index c86b4d7fc..d93973c9e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -64,6 +64,7 @@ All notable changes to this project will be documented in this file. - ci: Remove Nexus steps from build, mirror and release workflows ([#1056]). Also remove the old release workflow. +- zookeeper: Remove 3.9.2 ([#1093]). [#1025]: https://github.com/stackabletech/docker-images/pull/1025 [#1027]: https://github.com/stackabletech/docker-images/pull/1027 @@ -85,6 +86,7 @@ All notable changes to this project will be documented in this file. [#1056]: https://github.com/stackabletech/docker-images/pull/1056 [#1058]: https://github.com/stackabletech/docker-images/pull/1058 [#1090]: https://github.com/stackabletech/docker-images/pull/1090 +[#1093]: https://github.com/stackabletech/docker-images/pull/1093 [#1097]: https://github.com/stackabletech/docker-images/pull/1097 [#1098]: https://github.com/stackabletech/docker-images/pull/1098 diff --git a/zookeeper/stackable/patches/3.9.2/.gitkeep b/zookeeper/stackable/patches/3.9.2/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/zookeeper/stackable/patches/3.9.2/0001-Add-CycloneDX-plugin.patch b/zookeeper/stackable/patches/3.9.2/0001-Add-CycloneDX-plugin.patch deleted file mode 100644 index a37700a52..000000000 --- a/zookeeper/stackable/patches/3.9.2/0001-Add-CycloneDX-plugin.patch +++ /dev/null @@ -1,34 +0,0 @@ -From 0ac6dd909cbcb2d6d16ec7120ad65d5874ea0e8e Mon Sep 17 00:00:00 2001 -From: Lukas Voetmand -Date: Fri, 6 Sep 2024 17:53:52 +0200 -Subject: Add CycloneDX plugin - ---- - pom.xml | 7 ++++++- - 1 file changed, 6 insertions(+), 1 deletion(-) - -diff --git a/pom.xml b/pom.xml -index 743b87f7..3873e403 100644 ---- a/pom.xml -+++ b/pom.xml -@@ -925,7 +925,7 @@ - - org.cyclonedx - cyclonedx-maven-plugin -- 2.7.9 -+ 2.8.0 - - - -@@ -1200,6 +1200,11 @@ - - org.cyclonedx - cyclonedx-maven-plugin -+ -+ application -+ 1.5 -+ false -+ - - - diff --git a/zookeeper/stackable/patches/3.9.2/patchable.toml b/zookeeper/stackable/patches/3.9.2/patchable.toml deleted file mode 100644 index f3ebf6062..000000000 --- a/zookeeper/stackable/patches/3.9.2/patchable.toml +++ /dev/null @@ -1,2 +0,0 @@ -upstream = "https://github.com/apache/zookeeper.git" -base = "e454e8c7283100c7caec6dcae2bc82aaecb63023" diff --git a/zookeeper/versions.py b/zookeeper/versions.py index 067e9d3f9..d38b73b96 100644 --- a/zookeeper/versions.py +++ b/zookeeper/versions.py @@ -1,14 +1,4 @@ versions = [ - { - "product": "3.9.2", - "java-base": "17", - # NOTE (@NickLarsenNZ): Builds fail on Java 17, with the output: - # [ERROR] Failed to execute goal com.github.spotbugs:spotbugs-maven-plugin:4.0.0:spotbugs (spotbugs) on project - # zookeeper: Execution spotbugs of goal com.github.spotbugs:spotbugs-maven-plugin:4.0.0:spotbugs failed: Java - # returned: 1 -> [Help 1] - "java-devel": "11", - "jmx_exporter": "1.2.0", - }, { "product": "3.9.3", "java-base": "17", From 3b0674fe805c44594d83350fc6ee3e7aaadbc986 Mon Sep 17 00:00:00 2001 From: Nick <10092581+NickLarsenNZ@users.noreply.github.com> Date: Mon, 5 May 2025 21:47:56 +0200 Subject: [PATCH 20/27] chore(ubi-rust-builders): Update container images ahead of Stackable Release 25.7.0 (#1091) * chore(ubi-rust-builders): Remove ubi8-rust-builder * chore(ubi-rust-builders): Update base image and use protoc 30.2 * Apply suggestions from code review Co-authored-by: Techassi Co-authored-by: Sebastian Bernauer --------- Co-authored-by: Techassi Co-authored-by: Sebastian Bernauer --- .../update-base-ubi-rust-builders.md | 3 +- .github/workflows/ubi-rust-builder.yml | 4 +- CHANGELOG.md | 5 +- ubi8-rust-builder/Dockerfile | 111 ------------------ ubi9-rust-builder/Dockerfile | 4 +- 5 files changed, 9 insertions(+), 118 deletions(-) delete mode 100644 ubi8-rust-builder/Dockerfile diff --git a/.github/ISSUE_TEMPLATE/update-base-ubi-rust-builders.md b/.github/ISSUE_TEMPLATE/update-base-ubi-rust-builders.md index 6daed3bed..7e3a3e26e 100644 --- a/.github/ISSUE_TEMPLATE/update-base-ubi-rust-builders.md +++ b/.github/ISSUE_TEMPLATE/update-base-ubi-rust-builders.md @@ -51,8 +51,7 @@ Add/Change/Remove anything that isn't applicable anymore > This list should be completed by the assignee(s), once respective PRs have been merged. Once all items have been > checked, the issue can be moved into _Development: Done_. -- Done for [ubi8-rust-builder/Dockerfile](https://github.com/stackabletech/docker-images/blob/main/ubi8-rust-builder/Dockerfile) -- Done for [ubi9-rust-builder/Dockerfile](https://github.com/stackabletech/docker-images/blob/main/ubi9-rust-builder/Dockerfile) +- [ ] Done for [ubi9-rust-builder/Dockerfile](https://github.com/stackabletech/docker-images/blob/main/ubi9-rust-builder/Dockerfile) - [ ] Can build the image locally - [ ] Can build an operator image diff --git a/.github/workflows/ubi-rust-builder.yml b/.github/workflows/ubi-rust-builder.yml index 132bc7152..91254d182 100644 --- a/.github/workflows/ubi-rust-builder.yml +++ b/.github/workflows/ubi-rust-builder.yml @@ -19,7 +19,7 @@ jobs: fail-fast: false matrix: runner: ["ubuntu-latest", "ubicloud-standard-8-arm"] - ubi-version: ["ubi8", "ubi9"] + ubi-version: ["ubi9"] runs-on: ${{ matrix.runner }} steps: - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 @@ -59,7 +59,7 @@ jobs: strategy: fail-fast: false matrix: - ubi-version: ["ubi8", "ubi9"] + ubi-version: ["ubi9"] runs-on: ubuntu-latest needs: ["build"] steps: diff --git a/CHANGELOG.md b/CHANGELOG.md index d93973c9e..936f1b03a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,7 +37,8 @@ All notable changes to this project will be documented in this file. ### Changed - ubi-rust-builder: Bump Rust toolchain to 1.85.0, cargo-cyclonedx to 0.5.7, and cargo-auditable to 0.6.6 ([#1050]). -- spark-k8s: Include spark-connect jars. Replace OpenJDK with Temurin JDK. Cleanup ([#1034]). +- ubi9-rust-builder: Bump base image and update protoc to `30.2` ([#1091]). +- spark-k8s: Include spark-connect jars, replace OpenJDK with Temurin JDK, cleanup ([#1034]). - spark-connect-client: Image is now completely based on spark-k8s and includes JupyterLab and other demo dependencies ([#1071]). - jmx_exporter: Bump products to use `1.2.0` ([#1090]). - kubectl: Bump products to use `1.33.0` ([#1090]). @@ -65,6 +66,7 @@ All notable changes to this project will be documented in this file. - ci: Remove Nexus steps from build, mirror and release workflows ([#1056]). Also remove the old release workflow. - zookeeper: Remove 3.9.2 ([#1093]). +- Remove ubi8-rust-builder image ([#1091]). [#1025]: https://github.com/stackabletech/docker-images/pull/1025 [#1027]: https://github.com/stackabletech/docker-images/pull/1027 @@ -86,6 +88,7 @@ All notable changes to this project will be documented in this file. [#1056]: https://github.com/stackabletech/docker-images/pull/1056 [#1058]: https://github.com/stackabletech/docker-images/pull/1058 [#1090]: https://github.com/stackabletech/docker-images/pull/1090 +[#1091]: https://github.com/stackabletech/docker-images/pull/1091 [#1093]: https://github.com/stackabletech/docker-images/pull/1093 [#1097]: https://github.com/stackabletech/docker-images/pull/1097 [#1098]: https://github.com/stackabletech/docker-images/pull/1098 diff --git a/ubi8-rust-builder/Dockerfile b/ubi8-rust-builder/Dockerfile deleted file mode 100644 index 4a30f0e97..000000000 --- a/ubi8-rust-builder/Dockerfile +++ /dev/null @@ -1,111 +0,0 @@ -# syntax=docker/dockerfile:1.10.0@sha256:865e5dd094beca432e8c0a1d5e1c465db5f998dca4e439981029b3b81fb39ed5 -# check=error=true - -# Deprecation notice: Stackable has moved to UBI9 as of its 24.7 release -# This builder is kept around and updated until the last SDP release is EOL that uses UBI8 (which is 24.3) so we'll remove this sometime in the summer of 2025 - -# Find the latest version at https://catalog.redhat.com/software/containers/ubi8-minimal/5c64772edd19c77a158ea216?container-tabs=gti -# IMPORTANT: Be sure to use the Manifest List Digest for multi-arch support -FROM registry.access.redhat.com/ubi8-minimal@sha256:7583ca0ea52001562bd81a961da3f75222209e6192e4e413ee226cff97dbd48c AS builder - -LABEL maintainer="Stackable GmbH" - -# This SHOULD be kept in sync with operator-templating and other tools to reduce build times -# Find the latest version here: https://doc.rust-lang.org/stable/releases.html -# renovate: datasource=github-releases packageName=rust-lang/rust -ENV RUST_DEFAULT_TOOLCHAIN_VERSION=1.85.0 -# Find the latest version here: https://crates.io/crates/cargo-cyclonedx -# renovate: datasource=crate packageName=cargo-cyclonedx -ENV CARGO_CYCLONEDX_CRATE_VERSION=0.5.7 -# Find the latest version here: https://crates.io/crates/cargo-auditable -# renovate: datasource=crate packageName=cargo-auditable -ENV CARGO_AUDITABLE_CRATE_VERSION=0.6.6 -# Find the latest version here: https://github.com/protocolbuffers/protobuf/releases -# Upload any newer version to nexus with ./.scripts/upload_new_protoc_version.sh -# renovate: datasource=github-releases packageName=protocolbuffers/protobuf -ENV PROTOC_VERSION=27.3 - -# Sets the default shell to Bash with strict error handling and robust pipeline processing. -# "-e": Exits immediately if a command exits with a non-zero status -# "-u": Treats unset variables as an error, preventing unexpected behavior from undefined variables. -# "-o pipefail": Causes a pipeline to return the exit status of the last command in the pipe that failed, ensuring errors in any part of a pipeline are not ignored. -# "-c": Allows the execution of commands passed as a string -# This is automatically inherited in all other Dockerfiles that use this unless it is overwritten -SHELL ["/bin/bash", "-euo", "pipefail", "-c"] - -# We configure microdnf to not install weak dependencies in this file -# Not doing this caused the content of images to become unpredictable because -# based on which packages get updated by `microdnf update` new weak dependencies -# might be installed that were not present earlier (the ubi base image doesn't -# seem to install weak dependencies) -# This also affects the packages that are installed in our Dockerfiles (java as prime -# example). -# https://github.com/stackabletech/docker-images/pull/533 -COPY stackable-base/stackable/dnf.conf /etc/dnf/dnf.conf - -# Update image and install everything needed for Rustup & Rust -RUN microdnf update \ - && microdnf install \ - clang \ - cmake \ - curl \ - findutils \ - gcc \ - gcc-c++ \ - krb5-libs \ - libkadm5 \ - make \ - openssl-devel \ - pkg-config \ - systemd-devel \ - unzip \ - && microdnf clean all \ - && rm -rf /var/cache/yum - -# Container Storage Interface is defined using GRPC/Protobuf, our operators that use it (secret-operator/listener-operator) require -# protoc via Prost (https://github.com/tokio-rs/prost). -WORKDIR /opt/protoc -# Prost does not document which version of protoc it expects (https://docs.rs/prost-build/0.12.4/prost_build/), so this should be the latest upstream version -# (within reason). -RUN ARCH=$(arch | sed 's/^aarch64$/aarch_64/') \ - && curl --fail --location --output protoc.zip "https://repo.stackable.tech/repository/packages/protoc/protoc-${PROTOC_VERSION}-linux-${ARCH}.zip" \ - && unzip protoc.zip \ - && rm protoc.zip -ENV PROTOC=/opt/protoc/bin/protoc -WORKDIR / - -# IMPORTANT -# If you change the toolchain version here, make sure to also change the "rust_version" -# property in operator-templating/config/rust.yaml -RUN < Date: Wed, 7 May 2025 12:50:37 +0200 Subject: [PATCH 21/27] chore(opa): Update versions ahead of 25.7.0 (#1103) * chore(opa): Remove 0.67.1 * chore(opa): Remove legacy bundle-builder * chore(opa): Bump ubi9 base image * chore(opa): Add 1.4.2 * fix(opa): Manually install Go version NOTE: The dnf package was 1.23.6 and OPA required 1.23.8 NOTE: I tried making the version paramaterized, but bake wouldn't allow it (it worked fine with `docker build` and `docker build xbuild`). * ci(mirror): Add golang * chore(opa): Use mirrored golang image * chore(opa): Parameterise golang container version * chore: Update changelog --- .github/workflows/mirror.yaml | 1 + CHANGELOG.md | 7 ++++++ opa/Dockerfile | 43 ++++++----------------------------- opa/versions.py | 8 +++---- stackable-devel/Dockerfile | 2 +- 5 files changed, 20 insertions(+), 41 deletions(-) diff --git a/.github/workflows/mirror.yaml b/.github/workflows/mirror.yaml index b34aa7366..fa8ef3721 100644 --- a/.github/workflows/mirror.yaml +++ b/.github/workflows/mirror.yaml @@ -14,6 +14,7 @@ on: - registry.k8s.io/sig-storage/csi-node-driver-registrar - registry.k8s.io/sig-storage/csi-provisioner - registry.k8s.io/git-sync/git-sync + - registry-1.docker.io/library/golang image-index-manifest-tag: description: | The image index manifest tag, like 1.0.14 or v1.0.14 diff --git a/CHANGELOG.md b/CHANGELOG.md index 936f1b03a..2a4010b53 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ All notable changes to this project will be documented in this file. `check-permissions-ownership.sh` provided in stackable-base image ([#1027]). - opa: check for correct permissions and ownerships in /stackable folder via `check-permissions-ownership.sh` provided in stackable-base image ([#1038]). +- opa: Add `1.4.2` ([#1103]). - spark-k8s: check for correct permissions and ownerships in /stackable folder via `check-permissions-ownership.sh` provided in stackable-base image ([#1055]). - superset: check for correct permissions and ownerships in /stackable folder via @@ -33,11 +34,13 @@ All notable changes to this project will be documented in this file. `check-permissions-ownership.sh` provided in stackable-base image ([#1043]). - nifi: Add OPA authorizer plugin with workaround ([#1058]). - java: Add JDK 24 ([#1097]). +- ci: Add golang image to mirror workflow ([#1103]). ### Changed - ubi-rust-builder: Bump Rust toolchain to 1.85.0, cargo-cyclonedx to 0.5.7, and cargo-auditable to 0.6.6 ([#1050]). - ubi9-rust-builder: Bump base image and update protoc to `30.2` ([#1091]). +- stackable-devel: Bump ubi9 base image ([#1103]). - spark-k8s: Include spark-connect jars, replace OpenJDK with Temurin JDK, cleanup ([#1034]). - spark-connect-client: Image is now completely based on spark-k8s and includes JupyterLab and other demo dependencies ([#1071]). - jmx_exporter: Bump products to use `1.2.0` ([#1090]). @@ -56,6 +59,7 @@ All notable changes to this project will be documented in this file. - Add `--locked` flag to `cargo install` commands for reproducible builds ([#1044]). - nifi: reduce docker image size by removing the recursive chown/chmods in the final image ([#1027]). - opa: reduce docker image size by removing the recursive chown/chmods in the final image ([#1038]). +- opa: Manually install Go 1.23.9 ([#1103]). - spark-k8s: reduce docker image size by removing the recursive chown/chmods in the final image ([#1042]). - trino: reduce docker image size by removing the recursive chown/chmods in the final image ([#1025]). - zookeeper: reduce docker image size by removing the recursive chown/chmods in the final image ([#1043]). @@ -67,6 +71,8 @@ All notable changes to this project will be documented in this file. Also remove the old release workflow. - zookeeper: Remove 3.9.2 ([#1093]). - Remove ubi8-rust-builder image ([#1091]). +- opa: Remove `0.67.1` ([#1103]). +- opa: Remove legacy bundle-builder from container build ([#1103]). [#1025]: https://github.com/stackabletech/docker-images/pull/1025 [#1027]: https://github.com/stackabletech/docker-images/pull/1027 @@ -92,6 +98,7 @@ All notable changes to this project will be documented in this file. [#1093]: https://github.com/stackabletech/docker-images/pull/1093 [#1097]: https://github.com/stackabletech/docker-images/pull/1097 [#1098]: https://github.com/stackabletech/docker-images/pull/1098 +[#1103]: https://github.com/stackabletech/docker-images/pull/1103 ## [25.3.0] - 2025-03-21 diff --git a/opa/Dockerfile b/opa/Dockerfile index 0437fb466..0d6678fb2 100644 --- a/opa/Dockerfile +++ b/opa/Dockerfile @@ -1,39 +1,9 @@ # syntax=docker/dockerfile:1.10.0@sha256:865e5dd094beca432e8c0a1d5e1c465db5f998dca4e439981029b3b81fb39ed5 -# check=error=true +# check=error=true;skip=InvalidDefaultArgInFrom -FROM stackable/image/stackable-base AS opa-bundle-builder +ARG GOLANG -ARG BUNDLE_BUILDER_VERSION - -# Update image and install everything needed for Rustup & Rust -RUN <= 1.23.1) go install github.com/CycloneDX/cyclonedx-gomod/cmd/cyclonedx-gomod@v1.7.0 @@ -137,7 +109,6 @@ LABEL name="Open Policy Agent" \ COPY --chown=${STACKABLE_USER_UID}:0 opa/licenses /licenses COPY --from=opa-builder --chown=${STACKABLE_USER_UID}:0 /stackable/opa /stackable/opa -COPY --from=opa-bundle-builder --chown=${STACKABLE_USER_UID}:0 /opa-bundle-builder/target/release/stackable-opa-bundle-builder /stackable/opa-bundle-builder COPY --from=multilog-builder --chown=${STACKABLE_USER_UID}:0 /daemontools/admin/daemontools/command/multilog /stackable/multilog RUN < Date: Wed, 7 May 2025 12:54:36 +0200 Subject: [PATCH 22/27] fix: Bump vector to 0.46.1 for java-base:24 (#1104) --- java-base/versions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/java-base/versions.py b/java-base/versions.py index 1c6f7706d..d4a1b095f 100644 --- a/java-base/versions.py +++ b/java-base/versions.py @@ -25,6 +25,6 @@ }, { "product": "24", - "vector": "0.43.1", + "vector": "0.46.1", }, ] From 3203717cb25fe91f635eb8a8df803b13b2d87630 Mon Sep 17 00:00:00 2001 From: Nick <10092581+NickLarsenNZ@users.noreply.github.com> Date: Wed, 7 May 2025 13:31:46 +0200 Subject: [PATCH 23/27] chore(superset): Add 4.1.2 (#1102) * chore(superset): Add 4.1.2 * fix(superset): Remove invalid parts of the file * chore: Update changelog --- CHANGELOG.md | 2 + superset/constraints-4.1.2.txt | 397 +++++++++++++++++++++++++++++++++ superset/versions.py | 9 + 3 files changed, 408 insertions(+) create mode 100644 superset/constraints-4.1.2.txt diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a4010b53..2bc2b088a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,6 +28,7 @@ All notable changes to this project will be documented in this file. `check-permissions-ownership.sh` provided in stackable-base image ([#1055]). - superset: check for correct permissions and ownerships in /stackable folder via `check-permissions-ownership.sh` provided in stackable-base image ([#1053]). +- superset: Add version `4.1.2` ([#1102]). - trino: check for correct permissions and ownerships in /stackable folder via `check-permissions-ownership.sh` provided in stackable-base image ([#1025]). - zookeeper: check for correct permissions and ownerships in /stackable folder via @@ -98,6 +99,7 @@ All notable changes to this project will be documented in this file. [#1093]: https://github.com/stackabletech/docker-images/pull/1093 [#1097]: https://github.com/stackabletech/docker-images/pull/1097 [#1098]: https://github.com/stackabletech/docker-images/pull/1098 +[#1102]: https://github.com/stackabletech/docker-images/pull/1102 [#1103]: https://github.com/stackabletech/docker-images/pull/1103 ## [25.3.0] - 2025-03-21 diff --git a/superset/constraints-4.1.2.txt b/superset/constraints-4.1.2.txt new file mode 100644 index 000000000..859a9ebc7 --- /dev/null +++ b/superset/constraints-4.1.2.txt @@ -0,0 +1,397 @@ +# from https://raw.githubusercontent.com/apache/superset/refs/tags/4.1.2/requirements/base.txt +alembic==1.13.1 + # via flask-migrate +amqp==5.2.0 + # via kombu +apispec==6.3.0 + # via flask-appbuilder +apsw==3.46.0.0 + # via shillelagh +attrs==23.2.0 + # via + # cattrs + # jsonschema + # requests-cache +babel==2.15.0 + # via flask-babel +backoff==2.2.1 + # via apache-superset +bcrypt==4.1.3 + # via paramiko +billiard==4.2.0 + # via celery +blinker==1.8.2 + # via flask +bottleneck==1.3.8 + # via pandas +brotli==1.1.0 + # via flask-compress +cachelib==0.9.0 + # via + # flask-caching + # flask-session +cachetools==5.3.3 + # via google-auth +cattrs==23.2.3 + # via requests-cache +celery==5.4.0 + # via apache-superset +certifi==2024.2.2 + # via requests +cffi==1.16.0 + # via + # cryptography + # pynacl +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via + # apache-superset + # celery + # click-didyoumean + # click-option-group + # click-plugins + # click-repl + # flask + # flask-appbuilder +click-didyoumean==0.3.1 + # via celery +click-option-group==0.5.6 + # via apache-superset +click-plugins==1.1.1 + # via celery +click-repl==0.3.0 + # via celery +colorama==0.4.6 + # via + # apache-superset + # flask-appbuilder +cron-descriptor==1.4.3 + # via apache-superset +croniter==2.0.5 + # via apache-superset +cryptography==42.0.7 + # via + # apache-superset + # paramiko + # pyopenssl +deprecated==1.2.14 + # via limits +deprecation==2.1.0 + # via apache-superset +dnspython==2.6.1 + # via email-validator +email-validator==2.1.1 + # via flask-appbuilder +flask==2.3.3 + # via + # apache-superset + # flask-appbuilder + # flask-babel + # flask-caching + # flask-compress + # flask-jwt-extended + # flask-limiter + # flask-login + # flask-migrate + # flask-session + # flask-sqlalchemy + # flask-wtf +flask-appbuilder==4.5.0 + # via apache-superset +flask-babel==2.0.0 + # via flask-appbuilder +flask-caching==2.3.0 + # via apache-superset +flask-compress==1.15 + # via apache-superset +flask-jwt-extended==4.6.0 + # via flask-appbuilder +flask-limiter==3.7.0 + # via flask-appbuilder +flask-login==0.6.3 + # via + # apache-superset + # flask-appbuilder +flask-migrate==3.1.0 + # via apache-superset +flask-session==0.8.0 + # via apache-superset +flask-sqlalchemy==2.5.1 + # via + # flask-appbuilder + # flask-migrate +flask-talisman==1.1.0 + # via apache-superset +flask-wtf==1.2.1 + # via + # apache-superset + # flask-appbuilder +func-timeout==4.3.5 + # via apache-superset +geographiclib==2.0 + # via geopy +geopy==2.4.1 + # via apache-superset +google-auth==2.29.0 + # via shillelagh +greenlet==3.0.3 + # via shillelagh +gunicorn==22.0.0 + # via apache-superset +hashids==1.3.1 + # via apache-superset +holidays==0.25 + # via apache-superset +humanize==4.9.0 + # via apache-superset +idna==3.7 + # via + # email-validator + # requests +importlib-metadata==7.1.0 + # via apache-superset +importlib-resources==6.4.0 + # via limits +isodate==0.6.1 + # via apache-superset +itsdangerous==2.2.0 + # via + # flask + # flask-wtf +jinja2==3.1.4 + # via + # flask + # flask-babel +jsonpath-ng==1.6.1 + # via apache-superset +jsonschema==4.17.3 + # via flask-appbuilder +kombu==5.3.7 + # via celery +korean-lunar-calendar==0.3.1 + # via holidays +limits==3.12.0 + # via flask-limiter +llvmlite==0.42.0 + # via numba +mako==1.3.5 + # via + # alembic + # apache-superset +markdown==3.6 + # via apache-superset +markdown-it-py==3.0.0 + # via rich +markupsafe==2.1.5 + # via + # jinja2 + # mako + # werkzeug + # wtforms +marshmallow==3.21.2 + # via + # flask-appbuilder + # marshmallow-sqlalchemy +marshmallow-sqlalchemy==0.28.2 + # via flask-appbuilder +mdurl==0.1.2 + # via markdown-it-py +msgpack==1.0.8 + # via apache-superset +msgspec==0.18.6 + # via flask-session +nh3==0.2.17 + # via apache-superset +numba==0.59.1 + # via pandas +numexpr==2.10.0 + # via + # -r requirements/base.in + # pandas +numpy==1.23.5 + # via + # apache-superset + # bottleneck + # numba + # numexpr + # pandas + # pyarrow +ordered-set==4.1.0 + # via flask-limiter +packaging==23.2 + # via + # apache-superset + # apispec + # deprecation + # gunicorn + # limits + # marshmallow + # marshmallow-sqlalchemy + # shillelagh +pandas==2.0.3 + # via apache-superset +paramiko==3.4.0 + # via + # apache-superset + # sshtunnel +parsedatetime==2.6 + # via apache-superset +pgsanity==0.2.9 + # via apache-superset +platformdirs==3.8.1 + # via requests-cache +ply==3.11 + # via jsonpath-ng +polyline==2.0.2 + # via apache-superset +prison==0.2.1 + # via flask-appbuilder +prompt-toolkit==3.0.44 + # via click-repl +pyarrow==14.0.2 + # via apache-superset +pyasn1==0.6.0 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.0 + # via google-auth +pycparser==2.22 + # via cffi +pygments==2.18.0 + # via rich +pyjwt==2.8.0 + # via + # apache-superset + # flask-appbuilder + # flask-jwt-extended +pynacl==1.5.0 + # via paramiko +pyopenssl==24.1.0 + # via shillelagh +pyparsing==3.1.2 + # via apache-superset +pyrsistent==0.20.0 + # via jsonschema +python-dateutil==2.9.0.post0 + # via + # apache-superset + # celery + # croniter + # flask-appbuilder + # holidays + # pandas + # shillelagh +python-dotenv==1.0.1 + # via apache-superset +python-geohash==0.8.5 + # via apache-superset +pytz==2024.1 + # via + # croniter + # flask-babel + # pandas +pyyaml==6.0.1 + # via + # apache-superset + # apispec +redis==4.6.0 + # via apache-superset +requests==2.32.2 + # via + # requests-cache + # shillelagh +requests-cache==1.2.0 + # via shillelagh +rich==13.7.1 + # via flask-limiter +rsa==4.9 + # via google-auth +selenium==3.141.0 + # via apache-superset +shillelagh==1.2.18 + # via apache-superset +shortid==0.1.2 + # via apache-superset +simplejson==3.19.2 + # via apache-superset +six==1.16.0 + # via + # isodate + # prison + # python-dateutil + # url-normalize + # wtforms-json +slack-sdk==3.27.2 + # via apache-superset +sqlalchemy==1.4.52 + # via + # alembic + # apache-superset + # flask-appbuilder + # flask-sqlalchemy + # marshmallow-sqlalchemy + # shillelagh + # sqlalchemy-utils +sqlalchemy-utils==0.38.3 + # via + # apache-superset + # flask-appbuilder +sqlglot==25.24.0 + # via apache-superset +sqlparse==0.5.0 + # via apache-superset +sshtunnel==0.4.0 + # via apache-superset +tabulate==0.8.10 + # via apache-superset +typing-extensions==4.12.0 + # via + # alembic + # apache-superset + # flask-limiter + # limits + # shillelagh +tzdata==2024.1 + # via + # celery + # pandas +url-normalize==1.4.3 + # via requests-cache +urllib3==1.26.18 + # via + # -r requirements/base.in + # requests + # requests-cache + # selenium +vine==5.1.0 + # via + # amqp + # celery + # kombu +wcwidth==0.2.13 + # via prompt-toolkit +werkzeug==3.0.6 + # via + # -r requirements/base.in + # flask + # flask-appbuilder + # flask-jwt-extended + # flask-login +wrapt==1.16.0 + # via deprecated +wtforms==3.1.2 + # via + # apache-superset + # flask-appbuilder + # flask-wtf + # wtforms-json +wtforms-json==0.3.5 + # via apache-superset +xlsxwriter==3.0.9 + # via apache-superset +zipp==3.19.0 + # via importlib-metadata +zstandard==0.22.0 + # via flask-compress diff --git a/superset/versions.py b/superset/versions.py index 4615098d6..34caff7a4 100644 --- a/superset/versions.py +++ b/superset/versions.py @@ -17,4 +17,13 @@ "authlib": "1.2.1", # https://github.com/dpgaspar/Flask-AppBuilder/blob/release/4.5.0/requirements/extra.txt#L7 "stackable-base": "1.0.0", }, + { + "product": "4.1.2", + "python": "3.9", + "cyclonedx_bom": "6.0.0", + "vector": "0.46.1", + "statsd_exporter": "0.28.0", + "authlib": "1.2.1", # https://github.com/dpgaspar/Flask-AppBuilder/blob/release/4.5.0/requirements/extra.txt#L7 + "stackable-base": "1.0.0", + }, ] From 31490f8218083bd75a1ebfa63bd3b5db434f00e0 Mon Sep 17 00:00:00 2001 From: Sebastian Bernauer Date: Wed, 7 May 2025 13:50:48 +0200 Subject: [PATCH 24/27] feat(nifi): Add nifi-iceberg-bundle (#1060) * feat(nifi): Add nifi-iceberg-bundle * changelog * Add SBOM to final image * Use version 0.0.1 * hadolint * Bump to 0.0.2 * Bump to 0.0.3 --- CHANGELOG.md | 2 ++ nifi/Dockerfile | 84 ++++++++++++++++++++++++++++++++++++++++++++++++ nifi/versions.py | 1 + 3 files changed, 87 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2bc2b088a..5620509a2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -34,6 +34,7 @@ All notable changes to this project will be documented in this file. - zookeeper: check for correct permissions and ownerships in /stackable folder via `check-permissions-ownership.sh` provided in stackable-base image ([#1043]). - nifi: Add OPA authorizer plugin with workaround ([#1058]). +- nifi: Add [nifi-iceberg-bundle](https://github.com/stackabletech/nifi-iceberg-bundle) for NiFi `2.2.0` ([#1060]). - java: Add JDK 24 ([#1097]). - ci: Add golang image to mirror workflow ([#1103]). @@ -94,6 +95,7 @@ All notable changes to this project will be documented in this file. [#1055]: https://github.com/stackabletech/docker-images/pull/1055 [#1056]: https://github.com/stackabletech/docker-images/pull/1056 [#1058]: https://github.com/stackabletech/docker-images/pull/1058 +[#1060]: https://github.com/stackabletech/docker-images/pull/1060 [#1090]: https://github.com/stackabletech/docker-images/pull/1090 [#1091]: https://github.com/stackabletech/docker-images/pull/1091 [#1093]: https://github.com/stackabletech/docker-images/pull/1093 diff --git a/nifi/Dockerfile b/nifi/Dockerfile index 2e54b3ca5..db2c50db6 100644 --- a/nifi/Dockerfile +++ b/nifi/Dockerfile @@ -63,6 +63,88 @@ rm -rf /stackable/nifi-${PRODUCT}/docs chmod -R g=u /stackable EOF +FROM stackable/image/java-devel AS nifi-iceberg-bundle-builder + +ARG NIFI_ICEBERG_BUNDLE +ARG PRODUCT +ARG STACKABLE_USER_UID + +USER ${STACKABLE_USER_UID} +WORKDIR /build + +RUN < Date: Wed, 7 May 2025 14:13:28 +0200 Subject: [PATCH 25/27] fix(nifi): Delete correct intermediate folder (#1106) * fix(nifi): Delete correct intermediate folder * changelog --- CHANGELOG.md | 3 ++- nifi/Dockerfile | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5620509a2..e2917809d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -34,7 +34,7 @@ All notable changes to this project will be documented in this file. - zookeeper: check for correct permissions and ownerships in /stackable folder via `check-permissions-ownership.sh` provided in stackable-base image ([#1043]). - nifi: Add OPA authorizer plugin with workaround ([#1058]). -- nifi: Add [nifi-iceberg-bundle](https://github.com/stackabletech/nifi-iceberg-bundle) for NiFi `2.2.0` ([#1060]). +- nifi: Add [nifi-iceberg-bundle](https://github.com/stackabletech/nifi-iceberg-bundle) for NiFi `2.2.0` ([#1060], [#1106]). - java: Add JDK 24 ([#1097]). - ci: Add golang image to mirror workflow ([#1103]). @@ -103,6 +103,7 @@ All notable changes to this project will be documented in this file. [#1098]: https://github.com/stackabletech/docker-images/pull/1098 [#1102]: https://github.com/stackabletech/docker-images/pull/1102 [#1103]: https://github.com/stackabletech/docker-images/pull/1103 +[#1106]: https://github.com/stackabletech/docker-images/pull/1106 ## [25.3.0] - 2025-03-21 diff --git a/nifi/Dockerfile b/nifi/Dockerfile index db2c50db6..abf905c3f 100644 --- a/nifi/Dockerfile +++ b/nifi/Dockerfile @@ -96,7 +96,7 @@ if [[ "${PRODUCT}" != 1.* ]] ; then cd .. # Save disk space, even for intermediate images - rm -rf nifi-iceberg-bundle-main + rm -rf nifi-iceberg-bundle-${NIFI_ICEBERG_BUNDLE} # Set correct groups chmod g=u /stackable/*.nar From 7d027daeeabb9463d84b2f3993c748d3be224dba Mon Sep 17 00:00:00 2001 From: Benedikt Labrenz Date: Thu, 8 May 2025 14:28:32 +0200 Subject: [PATCH 26/27] fix Dockerfile --- nifi/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nifi/Dockerfile b/nifi/Dockerfile index cda460444..3d3bbf069 100644 --- a/nifi/Dockerfile +++ b/nifi/Dockerfile @@ -96,7 +96,7 @@ if [[ "${PRODUCT}" != 1.* ]] ; then cd .. # Save disk space, even for intermediate images - rm -rf nifi-iceberg-bundle-main + rm -rf nifi-iceberg-bundle-${NIFI_ICEBERG_BUNDLE} # Set correct groups chmod g=u /stackable/*.nar From f1d05ffbaa7b0dbfa996c2b705dc83389714af7c Mon Sep 17 00:00:00 2001 From: Benedikt Labrenz Date: Fri, 9 May 2025 14:40:41 +0200 Subject: [PATCH 27/27] build nifi opa plugin from tag --- nifi/Dockerfile | 29 +++++++++++++++++++---------- nifi/versions.py | 3 +++ 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/nifi/Dockerfile b/nifi/Dockerfile index 3d3bbf069..7ad645dba 100644 --- a/nifi/Dockerfile +++ b/nifi/Dockerfile @@ -77,7 +77,7 @@ mkdir -p /stackable # NiFI 1.x natively supports Iceberg, no need to build an iceberg-bundle for it if [[ "${PRODUCT}" != 1.* ]] ; then - curl "https://github.com/stackabletech/nifi-iceberg-bundle/archive/refs/tags/${NIFI_ICEBERG_BUNDLE}.tar.gz" | tar -xzC . + curl -L "https://github.com/stackabletech/nifi-iceberg-bundle/archive/refs/tags/${NIFI_ICEBERG_BUNDLE}.tar.gz" | tar -xzC . cd nifi-iceberg-bundle-${NIFI_ICEBERG_BUNDLE} || exit mvn \ @@ -106,24 +106,33 @@ EOF FROM stackable/image/java-devel AS opa-authorizer-builder +ARG NIFI_OPA_AUTHORIZER_PLUGIN ARG STACKABLE_USER_UID ARG PRODUCT USER ${STACKABLE_USER_UID} -WORKDIR /stackable +WORKDIR /build -# TODO: Set to tag after new release of nifi-opa-plugin -RUN git clone --depth 1 --branch feat/reworked-opa-response https://github.com/DavidGitter/nifi-opa-plugin.git && \ - cd nifi-opa-plugin/authorizer && \ - mvn \ +RUN <