diff --git a/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml b/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml index 7bc99a76..dd650853 100644 --- a/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml +++ b/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: start-pyspark-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 # N.B. it is possible for the scheduler to report that a DAG exists, only for the worker task to fail if a pod is unexpectedly # restarted. Additionally, the db-init job takes a few minutes to complete before the cluster is deployed. The wait/watch steps # below are not "water-tight" but add a layer of stability by at least ensuring that the db is initialized and ready and that diff --git a/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml b/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml index f3c05ee6..b5e9ba8d 100644 --- a/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml +++ b/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: start-date-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 # N.B. it is possible for the scheduler to report that a DAG exists, only for the worker task to fail if a pod is unexpectedly # restarted. Additionally, the db-init job takes a few minutes to complete before the cluster is deployed. The wait/watch steps # below are not "water-tight" but add a layer of stability by at least ensuring that the db is initialized and ready and that diff --git a/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml b/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml index c00b8644..277c6005 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml @@ -9,11 +9,11 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-kafka - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 command: ["bash", "-c", "echo 'Waiting for all kafka brokers to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/instance=kafka -l app.kubernetes.io/name=kafka"] containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/LakehouseKafkaIngest.xml && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml b/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml index 09653e8c..8915daf5 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml @@ -12,11 +12,11 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-kafka - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 command: ["bash", "-c", "echo 'Waiting for all kafka brokers to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/name=kafka -l app.kubernetes.io/instance=kafka"] containers: - name: create-spark-ingestion-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 command: ["bash", "-c", "echo 'Submitting Spark job' && kubectl apply -f /tmp/manifest/spark-ingestion-job.yaml"] volumeMounts: - name: manifest diff --git a/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml b/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml index 8503ada8..0c2a42d9 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml @@ -9,11 +9,11 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-testdata - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 command: ["bash", "-c", "echo 'Waiting for job load-test-data to finish' && kubectl wait --for=condition=complete --timeout=30m job/load-test-data"] containers: - name: create-tables-in-trino - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml b/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml index 519b4086..d5fdff67 100644 --- a/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml +++ b/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/end-to-end-security/create-spark-report.yaml b/demos/end-to-end-security/create-spark-report.yaml index 3a3a1764..da142e43 100644 --- a/demos/end-to-end-security/create-spark-report.yaml +++ b/demos/end-to-end-security/create-spark-report.yaml @@ -12,7 +12,7 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-trino-tables - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 command: - bash - -euo @@ -23,7 +23,7 @@ spec: kubectl wait --timeout=30m --for=condition=complete job/create-tables-in-trino containers: - name: create-spark-report - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 command: - bash - -euo diff --git a/demos/end-to-end-security/create-trino-tables.yaml b/demos/end-to-end-security/create-trino-tables.yaml index 65e03a9d..7c488d5f 100644 --- a/demos/end-to-end-security/create-trino-tables.yaml +++ b/demos/end-to-end-security/create-trino-tables.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-tables-in-trino - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable23.11.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml b/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml index 28da5466..67bc539b 100644 --- a/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml +++ b/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml @@ -9,7 +9,7 @@ spec: spec: containers: - name: create-hfile-and-import-to-hbase - image: docker.stackable.tech/stackable/hbase:2.4.17-stackable24.3.0 + image: docker.stackable.tech/stackable/hbase:2.4.18-stackable24.7.0 env: - name: HADOOP_USER_NAME value: stackable diff --git a/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml b/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml index d97b58c5..d3a82873 100644 --- a/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml +++ b/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: distcp-cycling-data - image: docker.stackable.tech/stackable/hadoop:3.3.6-stackable24.3.0 + image: docker.stackable.tech/stackable/hadoop:3.4.0-stackable24.7.0 env: - name: HADOOP_USER_NAME value: stackable diff --git a/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/Dockerfile b/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/Dockerfile index 4129cb46..9895fc28 100644 --- a/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/Dockerfile +++ b/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/Dockerfile @@ -1,6 +1,6 @@ # docker build -t docker.stackable.tech/demos/pyspark-k8s-with-scikit-learn:3.3.0-stackable0.0.0-dev . -FROM docker.stackable.tech/stackable/pyspark-k8s:3.5.0-stackable24.3.0 +FROM docker.stackable.tech/stackable/pyspark-k8s:3.5.0-stackable24.7.0 COPY requirements.txt . diff --git a/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml b/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml index e7f6b68a..3e1c8c3a 100644 --- a/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml +++ b/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: load-ny-taxi-data - image: docker.stackable.tech/stackable/hadoop:3.3.6-stackable24.3.0 + image: docker.stackable.tech/stackable/hadoop:3.4.0-stackable24.7.0 command: ["bash", "-c", "/stackable/hadoop/bin/hdfs dfs -mkdir -p /ny-taxi-data/raw \ && cd /tmp \ && for month in 2020-09; do \ diff --git a/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml b/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml index 1e9941ca..3416ed91 100644 --- a/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-druid-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor"] volumeMounts: - name: ingestion-job-spec diff --git a/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml b/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml index 75cc8c3a..231d8818 100644 --- a/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/IngestEarthquakesToKafka.xml && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml b/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml index e6de9b61..b52a2adf 100644 --- a/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml +++ b/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml b/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml index e7b137d7..3c2d6208 100644 --- a/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-druid-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/stations-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor && curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-ingestion-job-spec.json https://druid-coordinator:8281/druid/indexer/v1/supervisor && curl -X POST --insecure -H 'Content-Type: application/json' -d @/tmp/ingestion-job-spec/measurements-compaction-job-spec.json https://druid-coordinator:8281/druid/coordinator/v1/config/compaction"] volumeMounts: - name: ingestion-job-spec diff --git a/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml b/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml index ebf967c9..6795a681 100644 --- a/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml +++ b/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/IngestWaterLevelsToKafka.xml && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml b/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml index 55740b24..6cf44c53 100644 --- a/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml +++ b/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/signal-processing/Dockerfile-nifi b/demos/signal-processing/Dockerfile-nifi index 3515d923..be1c2594 100644 --- a/demos/signal-processing/Dockerfile-nifi +++ b/demos/signal-processing/Dockerfile-nifi @@ -1,5 +1,5 @@ -# docker build -f ./Dockerfile-nifi -t docker.stackable.tech/demos/nifi:1.25.0-postgresql . +# docker build -f ./Dockerfile-nifi -t docker.stackable.tech/demos/nifi:1.27.0-postgresql . -FROM docker.stackable.tech/stackable/nifi:1.25.0-stackable24.3.0 +FROM docker.stackable.tech/stackable/nifi:1.27.0-stackable24.7.0 RUN curl --fail -o /stackable/nifi/postgresql-42.6.0.jar "https://repo.stackable.tech/repository/misc/postgresql-timescaledb/postgresql-42.6.0.jar" diff --git a/demos/signal-processing/create-nifi-ingestion-job.yaml b/demos/signal-processing/create-nifi-ingestion-job.yaml index c6adaf00..51179a50 100644 --- a/demos/signal-processing/create-nifi-ingestion-job.yaml +++ b/demos/signal-processing/create-nifi-ingestion-job.yaml @@ -9,13 +9,13 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-timescale-job - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 command: ["bash", "-c", "echo 'Waiting for timescaleDB tables to be ready' && kubectl wait --for=condition=complete job/create-timescale-tables-job" ] containers: - name: create-nifi-ingestion-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "export PGPASSWORD=$(cat /timescale-admin-credentials/password) && \ curl -O https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/DownloadAndWriteToDB.xml && \ sed -i \"s/PLACEHOLDERPGPASSWORD/$PGPASSWORD/g\" DownloadAndWriteToDB.xml && \ diff --git a/demos/signal-processing/create-timescale-tables.yaml b/demos/signal-processing/create-timescale-tables.yaml index 96acb874..61089f34 100644 --- a/demos/signal-processing/create-timescale-tables.yaml +++ b/demos/signal-processing/create-timescale-tables.yaml @@ -9,7 +9,7 @@ spec: serviceAccountName: demo-serviceaccount initContainers: - name: wait-for-timescale - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 command: ["bash", "-c", "echo 'Waiting for timescaleDB to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/name=postgresql-timescaledb" ] diff --git a/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml b/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml index 3481a0fd..39cf03a3 100644 --- a/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml +++ b/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml @@ -8,11 +8,11 @@ spec: spec: initContainers: - name: wait-for-testdata - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "echo 'Waiting for job load-ny-taxi-data to finish' && kubectl wait --for=condition=complete --timeout=30m job/load-ny-taxi-data"] containers: - name: create-spark-anomaly-detection-job - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "echo 'Submitting Spark job' && kubectl apply -f /tmp/manifest/spark-ad-job.yaml"] volumeMounts: - name: manifest @@ -37,7 +37,7 @@ data: name: spark-ad spec: sparkImage: - productVersion: 3.5.0 + productVersion: 3.5.1 mode: cluster mainApplicationFile: local:///spark-scripts/spark-ad.py deps: diff --git a/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml b/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml index 15adcd85..36aba951 100644 --- a/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml +++ b/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/trino-taxi-data/create-table-in-trino.yaml b/demos/trino-taxi-data/create-table-in-trino.yaml index 162f855a..d45ce7d9 100644 --- a/demos/trino-taxi-data/create-table-in-trino.yaml +++ b/demos/trino-taxi-data/create-table-in-trino.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: create-ny-taxi-data-table-in-trino - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/demos/trino-taxi-data/setup-superset.yaml b/demos/trino-taxi-data/setup-superset.yaml index f1744d8c..2c94efda 100644 --- a/demos/trino-taxi-data/setup-superset.yaml +++ b/demos/trino-taxi-data/setup-superset.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-superset - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/superset-assets.zip && python -u /tmp/script/script.py"] volumeMounts: - name: script diff --git a/docs/modules/demos/pages/data-lakehouse-iceberg-trino-spark.adoc b/docs/modules/demos/pages/data-lakehouse-iceberg-trino-spark.adoc index 7d1e3ac7..0b007674 100644 --- a/docs/modules/demos/pages/data-lakehouse-iceberg-trino-spark.adoc +++ b/docs/modules/demos/pages/data-lakehouse-iceberg-trino-spark.adoc @@ -34,7 +34,7 @@ $ stackablectl demo install data-lakehouse-iceberg-trino-spark [#system-requirements] == System requirements -The demo was developed and tested on a kubernetes cluster with 10 nodes (4 cores (8 threads), 20GiB RAM and 30GB HDD). +The demo was developed and tested on a kubernetes cluster with about 12 nodes (4 cores with hyperthreading/SMT, 20GiB RAM and 30GB HDD). Instance types that loosely correspond to this on the Hyperscalers are: - *Google*: `e2-standard-8` @@ -179,7 +179,7 @@ As you can see, the table `house-sales` is partitioned by year. Go ahead and cli image::data-lakehouse-iceberg-trino-spark/minio_5.png[] -You can see that Trino has placed a single file into the selected folder containing all the house sales of that +You can see that Trino has data into the selected folder containing all the house sales of that particular year. == NiFi @@ -491,7 +491,7 @@ Here you can see all the available Trino catalogs. * `tpcds`: https://trino.io/docs/current/connector/tpcds.html[TPCDS connector] providing a set of schemas to support the http://www.tpc.org/tpcds/[TPC Benchmark™ DS] * `tpch`: https://trino.io/docs/current/connector/tpch.html[TPCH connector] providing a set of schemas to support the - http://www.tpc.org/tpcds/[TPC Benchmark™ DS] + http://www.tpc.org/tpch/[TPC Benchmark™ H] * `lakehouse`: The lakehouse area containing the enriched and performant accessible data == Superset @@ -541,14 +541,14 @@ image::data-lakehouse-iceberg-trino-spark/superset_7.png[] On the left, select the database `Trino lakehouse`, the schema `house_sales`, and set `See table schema` to `house_sales`. -image::data-lakehouse-iceberg-trino-spark/superset_8.png[] - -[NOTE] +[IMPORTANT] ==== -This older screenshot shows how the table preview would look like. Currently, there is an https://github.com/apache/superset/issues/25307[open issue] +The older screenshot below shows how the table preview would look like. Currently, there is an https://github.com/apache/superset/issues/25307[open issue] with previewing trino tables using the Iceberg connector. This doesn't affect the execution the following execution of the SQL statement. ==== +image::data-lakehouse-iceberg-trino-spark/superset_8.png[] + In the right textbox, you can enter the desired SQL statement. If you want to avoid making one up, use the following: [source,sql] diff --git a/docs/modules/demos/pages/logging.adoc b/docs/modules/demos/pages/logging.adoc index 12cd0aad..ddb87631 100644 --- a/docs/modules/demos/pages/logging.adoc +++ b/docs/modules/demos/pages/logging.adoc @@ -46,10 +46,10 @@ This demo will synchronization, and providing group services. This demo makes its log data observable in OpenSearch Dashboards. ** *Vector*: A tool for building observability pipelines. This demo uses Vector as a log agent to gather and transform the logs and as an aggregator to forward the collected logs to OpenSearch. -** *OpenSearch*: A data store and search engine. This demo uses it to store and index the of the log data. +** *OpenSearch*: A data store and search engine. This demo uses it to store and index the log data. ** *OpenSearch Dashboards*: A visualization and user interface. This demo uses it to make the log data easily accessible to the user. -* Create a view in OpenSearch Dashboards for convenient browsing the log data. +* Create a view in OpenSearch Dashboards to conveniently browse the log data. You can see the deployed products and their relationship in the following diagram: diff --git a/docs/modules/demos/pages/spark-k8s-anomaly-detection-taxi-data.adoc b/docs/modules/demos/pages/spark-k8s-anomaly-detection-taxi-data.adoc index 6b212922..6308a3a7 100644 --- a/docs/modules/demos/pages/spark-k8s-anomaly-detection-taxi-data.adoc +++ b/docs/modules/demos/pages/spark-k8s-anomaly-detection-taxi-data.adoc @@ -23,7 +23,7 @@ This demo should not be run alongside other demos. To run this demo, your system needs at least: -* 8 {k8s-cpu}[cpu units] (core/hyperthread) +* 10 {k8s-cpu}[cpu units] (core/hyperthread) * 32GiB memory * 35GiB disk storage diff --git a/stacks/_templates/jupyterhub.yaml b/stacks/_templates/jupyterhub.yaml index 2de6e693..81481c9a 100644 --- a/stacks/_templates/jupyterhub.yaml +++ b/stacks/_templates/jupyterhub.yaml @@ -3,7 +3,7 @@ name: jupyterhub repo: name: jupyterhub url: https://jupyterhub.github.io/helm-chart/ -version: 3.2.1 +version: 3.3.7 options: hub: config: @@ -49,7 +49,7 @@ options: HADOOP_CONF_DIR: "/home/jovyan/hdfs" initContainers: - name: download-notebook - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/jupyterhub-pyspark-hdfs/notebook.ipynb -o /notebook/notebook.ipynb'] volumeMounts: - mountPath: /notebook diff --git a/stacks/_templates/keycloak.yaml b/stacks/_templates/keycloak.yaml index ca987432..c7d7085e 100644 --- a/stacks/_templates/keycloak.yaml +++ b/stacks/_templates/keycloak.yaml @@ -48,7 +48,7 @@ spec: - name: tls mountPath: /tls/ - name: create-auth-class - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable0.0.0-dev # We need 0.0.0-dev, so we get kubectl + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: ["/bin/bash", "-c"] args: - | diff --git a/stacks/_templates/minio-distributed-small.yaml b/stacks/_templates/minio-distributed-small.yaml index 6f734431..2f7ee3d3 100644 --- a/stacks/_templates/minio-distributed-small.yaml +++ b/stacks/_templates/minio-distributed-small.yaml @@ -3,7 +3,7 @@ name: minio repo: name: minio url: https://charts.min.io/ -version: 5.0.14 +version: 5.2.0 options: additionalLabels: stackable.tech/vendor: Stackable diff --git a/stacks/_templates/minio-distributed.yaml b/stacks/_templates/minio-distributed.yaml index c357d5c8..656177a5 100644 --- a/stacks/_templates/minio-distributed.yaml +++ b/stacks/_templates/minio-distributed.yaml @@ -3,7 +3,7 @@ name: minio repo: name: minio url: https://charts.min.io/ -version: 5.0.14 +version: 5.2.0 options: additionalLabels: stackable.tech/vendor: Stackable diff --git a/stacks/_templates/minio.yaml b/stacks/_templates/minio.yaml index 7937eb4a..f75581d3 100644 --- a/stacks/_templates/minio.yaml +++ b/stacks/_templates/minio.yaml @@ -3,7 +3,7 @@ name: minio repo: name: minio url: https://charts.min.io/ -version: 5.0.14 +version: 5.2.0 options: additionalLabels: stackable.tech/vendor: Stackable diff --git a/stacks/_templates/postgresql-airflow.yaml b/stacks/_templates/postgresql-airflow.yaml index 77b23187..63421bab 100644 --- a/stacks/_templates/postgresql-airflow.yaml +++ b/stacks/_templates/postgresql-airflow.yaml @@ -3,7 +3,7 @@ name: postgresql repo: name: bitnami url: https://charts.bitnami.com/bitnami/ -version: 13.2.18 +version: 15.5.16 options: commonLabels: stackable.tech/vendor: Stackable diff --git a/stacks/_templates/postgresql-druid.yaml b/stacks/_templates/postgresql-druid.yaml index fec0f4ad..2e36ceb5 100644 --- a/stacks/_templates/postgresql-druid.yaml +++ b/stacks/_templates/postgresql-druid.yaml @@ -3,7 +3,7 @@ name: postgresql repo: name: bitnami url: https://charts.bitnami.com/bitnami/ -version: 13.2.18 +version: 15.5.16 options: commonLabels: stackable.tech/vendor: Stackable diff --git a/stacks/_templates/postgresql-hive-iceberg.yaml b/stacks/_templates/postgresql-hive-iceberg.yaml index a0b9d631..098fa4a5 100644 --- a/stacks/_templates/postgresql-hive-iceberg.yaml +++ b/stacks/_templates/postgresql-hive-iceberg.yaml @@ -3,7 +3,7 @@ name: postgresql repo: name: bitnami url: https://charts.bitnami.com/bitnami/ -version: 13.2.18 +version: 15.5.16 options: commonLabels: stackable.tech/vendor: Stackable diff --git a/stacks/_templates/postgresql-hive.yaml b/stacks/_templates/postgresql-hive.yaml index 2958339b..4feee781 100644 --- a/stacks/_templates/postgresql-hive.yaml +++ b/stacks/_templates/postgresql-hive.yaml @@ -3,7 +3,7 @@ name: postgresql repo: name: bitnami url: https://charts.bitnami.com/bitnami/ -version: 13.2.18 +version: 15.5.16 options: commonLabels: stackable.tech/vendor: Stackable diff --git a/stacks/_templates/postgresql-hivehdfs.yaml b/stacks/_templates/postgresql-hivehdfs.yaml index 67504d96..a2cf58a7 100644 --- a/stacks/_templates/postgresql-hivehdfs.yaml +++ b/stacks/_templates/postgresql-hivehdfs.yaml @@ -3,7 +3,7 @@ name: postgresql repo: name: bitnami url: https://charts.bitnami.com/bitnami/ -version: 13.2.18 +version: 15.5.16 options: commonLabels: stackable.tech/vendor: Stackable diff --git a/stacks/_templates/postgresql-hives3.yaml b/stacks/_templates/postgresql-hives3.yaml index e73aeb62..c1fd8d55 100644 --- a/stacks/_templates/postgresql-hives3.yaml +++ b/stacks/_templates/postgresql-hives3.yaml @@ -3,7 +3,7 @@ name: postgresql repo: name: bitnami url: https://charts.bitnami.com/bitnami/ -version: 13.2.18 +version: 15.5.16 options: commonLabels: stackable.tech/vendor: Stackable diff --git a/stacks/_templates/postgresql-superset.yaml b/stacks/_templates/postgresql-superset.yaml index 4d9d1b4e..485337de 100644 --- a/stacks/_templates/postgresql-superset.yaml +++ b/stacks/_templates/postgresql-superset.yaml @@ -3,7 +3,7 @@ name: postgresql repo: name: bitnami url: https://charts.bitnami.com/bitnami/ -version: 13.2.18 +version: 15.5.16 options: commonLabels: stackable.tech/vendor: Stackable diff --git a/stacks/_templates/redis-airflow.yaml b/stacks/_templates/redis-airflow.yaml index 2381f61f..19f37cfa 100644 --- a/stacks/_templates/redis-airflow.yaml +++ b/stacks/_templates/redis-airflow.yaml @@ -3,7 +3,7 @@ name: redis repo: name: bitnami url: https://charts.bitnami.com/bitnami/ -version: 18.1.6 +version: 19.6.1 options: commonLabels: stackable.tech/vendor: Stackable diff --git a/stacks/_templates/vector-aggregator.yaml b/stacks/_templates/vector-aggregator.yaml index 2b05dfdb..d7dd458a 100644 --- a/stacks/_templates/vector-aggregator.yaml +++ b/stacks/_templates/vector-aggregator.yaml @@ -3,7 +3,7 @@ name: vector repo: name: vector url: https://helm.vector.dev -version: 0.26.0 +version: 0.34.0 options: commonLabels: stackable.tech/vendor: Stackable @@ -25,7 +25,7 @@ options: - https://opensearch-cluster-master.default.svc.cluster.local:9200 mode: bulk # The auto-detection of the API version does not work in Vector - # 0.33.0 for OpenSearch, so the version must be set explicitly + # 0.39.0 for OpenSearch, so the version must be set explicitly # (see https://github.com/vectordotdev/vector/issues/17690). api_version: v8 tls: diff --git a/stacks/airflow/airflow.yaml b/stacks/airflow/airflow.yaml index 5c1a3544..03d2b278 100644 --- a/stacks/airflow/airflow.yaml +++ b/stacks/airflow/airflow.yaml @@ -6,7 +6,7 @@ metadata: name: airflow spec: image: - productVersion: 2.8.1 + productVersion: 2.9.2 clusterConfig: listenerClass: external-unstable loadExamples: false @@ -272,7 +272,7 @@ data: spec: version: "1.0" sparkImage: - productVersion: 3.5.0 + productVersion: 3.5.1 mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py job: diff --git a/stacks/data-lakehouse-iceberg-trino-spark/trino.yaml b/stacks/data-lakehouse-iceberg-trino-spark/trino.yaml index a86ccb46..10274bd2 100644 --- a/stacks/data-lakehouse-iceberg-trino-spark/trino.yaml +++ b/stacks/data-lakehouse-iceberg-trino-spark/trino.yaml @@ -5,7 +5,7 @@ metadata: name: trino spec: image: - productVersion: "442" + productVersion: "451" clusterConfig: listenerClass: external-unstable catalogLabelSelector: @@ -114,7 +114,7 @@ metadata: name: opa spec: image: - productVersion: 0.61.0 + productVersion: 0.66.0 servers: roleGroups: default: {} diff --git a/stacks/dual-hive-hdfs-s3/hdfs.yaml b/stacks/dual-hive-hdfs-s3/hdfs.yaml index db840268..5c6b6d83 100644 --- a/stacks/dual-hive-hdfs-s3/hdfs.yaml +++ b/stacks/dual-hive-hdfs-s3/hdfs.yaml @@ -5,7 +5,7 @@ metadata: name: hdfs-zk spec: image: - productVersion: 3.8.1 + productVersion: 3.9.2 servers: roleGroups: default: @@ -25,7 +25,7 @@ metadata: name: hdfs spec: image: - productVersion: 3.3.4 + productVersion: 3.4.0 clusterConfig: listenerClass: external-unstable dfsReplication: 1 diff --git a/stacks/dual-hive-hdfs-s3/trino.yaml b/stacks/dual-hive-hdfs-s3/trino.yaml index 5af415fe..0ac7bb9f 100644 --- a/stacks/dual-hive-hdfs-s3/trino.yaml +++ b/stacks/dual-hive-hdfs-s3/trino.yaml @@ -61,7 +61,7 @@ metadata: name: trino spec: image: - productVersion: "442" + productVersion: "451" clusterConfig: authorization: opa: diff --git a/stacks/end-to-end-security/hdfs.yaml b/stacks/end-to-end-security/hdfs.yaml index b67efeb2..8bcae913 100644 --- a/stacks/end-to-end-security/hdfs.yaml +++ b/stacks/end-to-end-security/hdfs.yaml @@ -4,7 +4,7 @@ metadata: name: hdfs spec: image: - productVersion: 3.3.6 + productVersion: 3.4.0 clusterConfig: zookeeperConfigMapName: hdfs-znode authentication: diff --git a/stacks/end-to-end-security/hive-metastore.yaml b/stacks/end-to-end-security/hive-metastore.yaml index 91ab2f93..e8c49f71 100644 --- a/stacks/end-to-end-security/hive-metastore.yaml +++ b/stacks/end-to-end-security/hive-metastore.yaml @@ -9,8 +9,7 @@ spec: clusterConfig: database: connString: jdbc:postgresql://postgresql-hive-iceberg:5432/hive - user: hive - password: hive + credentialsSecret: postgres-credentials dbType: postgres hdfs: configMap: hdfs @@ -21,3 +20,12 @@ spec: roleGroups: default: replicas: 1 +--- +apiVersion: v1 +kind: Secret +metadata: + name: postgres-credentials +type: Opaque +stringData: + username: hive + password: hive diff --git a/stacks/end-to-end-security/krb5.yaml b/stacks/end-to-end-security/krb5.yaml index 23432787..329c1bd2 100644 --- a/stacks/end-to-end-security/krb5.yaml +++ b/stacks/end-to-end-security/krb5.yaml @@ -14,7 +14,7 @@ spec: spec: initContainers: - name: init - image: docker.stackable.tech/stackable/krb5:1.18.2-stackable0.0.0-dev + image: docker.stackable.tech/stackable/krb5:1.18.2-stackable24.7.0 # TODO: bump to 1.21.1? args: - sh - -euo @@ -35,7 +35,7 @@ spec: name: data containers: - name: kdc - image: docker.stackable.tech/stackable/krb5:1.18.2-stackable0.0.0-dev + image: docker.stackable.tech/stackable/krb5:1.18.2-stackable24.7.0 # TODO: bump to 1.21.1? args: - krb5kdc - -n @@ -48,7 +48,7 @@ spec: - mountPath: /var/kerberos/krb5kdc name: data - name: kadmind - image: docker.stackable.tech/stackable/krb5:1.18.2-stackable0.0.0-dev + image: docker.stackable.tech/stackable/krb5:1.18.2-stackable24.7.0 # TODO: bump to 1.21.1? args: - kadmind - -nofork @@ -61,7 +61,7 @@ spec: - mountPath: /var/kerberos/krb5kdc name: data - name: client - image: docker.stackable.tech/stackable/krb5:1.18.2-stackable0.0.0-dev + image: docker.stackable.tech/stackable/krb5:1.18.2-stackable24.7.0 # TODO: bump to 1.21.1? tty: true stdin: true env: diff --git a/stacks/end-to-end-security/opa.yaml b/stacks/end-to-end-security/opa.yaml index 73987ca1..543a8f06 100644 --- a/stacks/end-to-end-security/opa.yaml +++ b/stacks/end-to-end-security/opa.yaml @@ -5,7 +5,7 @@ metadata: name: opa spec: image: - productVersion: 0.61.0 + productVersion: 0.66.0 clusterConfig: userInfo: backend: diff --git a/stacks/end-to-end-security/superset.yaml b/stacks/end-to-end-security/superset.yaml index 2400002b..9226aa10 100644 --- a/stacks/end-to-end-security/superset.yaml +++ b/stacks/end-to-end-security/superset.yaml @@ -5,7 +5,7 @@ metadata: name: superset spec: image: - productVersion: 3.1.0 + productVersion: 3.1.3 clusterConfig: listenerClass: external-unstable credentialsSecret: superset-credentials diff --git a/stacks/end-to-end-security/trino.yaml b/stacks/end-to-end-security/trino.yaml index 12edae96..4aebad31 100644 --- a/stacks/end-to-end-security/trino.yaml +++ b/stacks/end-to-end-security/trino.yaml @@ -5,7 +5,7 @@ metadata: name: trino spec: image: - productVersion: "442" + productVersion: "451" clusterConfig: listenerClass: external-unstable tls: diff --git a/stacks/end-to-end-security/zookeeper.yaml b/stacks/end-to-end-security/zookeeper.yaml index 2f93fb8d..2b2699d7 100644 --- a/stacks/end-to-end-security/zookeeper.yaml +++ b/stacks/end-to-end-security/zookeeper.yaml @@ -5,7 +5,7 @@ metadata: name: zookeeper spec: image: - productVersion: 3.8.3 + productVersion: 3.9.2 servers: roleGroups: default: diff --git a/stacks/hdfs-hbase/hbase.yaml b/stacks/hdfs-hbase/hbase.yaml index 5d6d6041..c6f6797e 100644 --- a/stacks/hdfs-hbase/hbase.yaml +++ b/stacks/hdfs-hbase/hbase.yaml @@ -5,7 +5,7 @@ metadata: name: hbase spec: image: - productVersion: 2.4.17 + productVersion: 2.4.18 clusterConfig: listenerClass: external-unstable hdfsConfigMapName: hdfs diff --git a/stacks/hdfs-hbase/hdfs.yaml b/stacks/hdfs-hbase/hdfs.yaml index addd16ff..9b2f355a 100644 --- a/stacks/hdfs-hbase/hdfs.yaml +++ b/stacks/hdfs-hbase/hdfs.yaml @@ -4,7 +4,7 @@ metadata: name: hdfs spec: image: - productVersion: 3.3.6 + productVersion: 3.4.0 clusterConfig: dfsReplication: 1 zookeeperConfigMapName: hdfs-znode diff --git a/stacks/jupyterhub-pyspark-hdfs/hdfs.yaml b/stacks/jupyterhub-pyspark-hdfs/hdfs.yaml index 4a3bb988..54b0ad69 100644 --- a/stacks/jupyterhub-pyspark-hdfs/hdfs.yaml +++ b/stacks/jupyterhub-pyspark-hdfs/hdfs.yaml @@ -13,7 +13,7 @@ metadata: name: hdfs spec: image: - productVersion: 3.3.6 + productVersion: 3.4.0 clusterConfig: dfsReplication: 1 zookeeperConfigMapName: hdfs-znode diff --git a/stacks/keycloak-opa-poc/druid.yaml b/stacks/keycloak-opa-poc/druid.yaml index 2dd9a0d8..514761ae 100644 --- a/stacks/keycloak-opa-poc/druid.yaml +++ b/stacks/keycloak-opa-poc/druid.yaml @@ -5,7 +5,7 @@ metadata: name: druid spec: image: - productVersion: 27.0.0 + productVersion: 28.0.1 clusterConfig: listenerClass: external-unstable deepStorage: diff --git a/stacks/keycloak-opa-poc/hdfs.yaml b/stacks/keycloak-opa-poc/hdfs.yaml index 40d34d25..30222c36 100644 --- a/stacks/keycloak-opa-poc/hdfs.yaml +++ b/stacks/keycloak-opa-poc/hdfs.yaml @@ -5,7 +5,7 @@ metadata: name: hdfs spec: image: - productVersion: 3.3.6 + productVersion: 3.4.0 clusterConfig: dfsReplication: 1 zookeeperConfigMapName: hdfs-znode diff --git a/stacks/keycloak-opa-poc/keycloak.yaml b/stacks/keycloak-opa-poc/keycloak.yaml index 61c14428..a6c2e225 100644 --- a/stacks/keycloak-opa-poc/keycloak.yaml +++ b/stacks/keycloak-opa-poc/keycloak.yaml @@ -70,7 +70,7 @@ spec: spec: containers: - name: propagate-keycloak-address - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 command: - bash - -x diff --git a/stacks/keycloak-opa-poc/opa.yaml b/stacks/keycloak-opa-poc/opa.yaml index 15fd2c2b..41615844 100644 --- a/stacks/keycloak-opa-poc/opa.yaml +++ b/stacks/keycloak-opa-poc/opa.yaml @@ -5,7 +5,7 @@ metadata: name: opa spec: image: - productVersion: 0.61.0 + productVersion: 0.66.0 servers: roleGroups: default: {} diff --git a/stacks/keycloak-opa-poc/policies.yaml b/stacks/keycloak-opa-poc/policies.yaml index 6ba37fc1..c2c0ebec 100644 --- a/stacks/keycloak-opa-poc/policies.yaml +++ b/stacks/keycloak-opa-poc/policies.yaml @@ -8,15 +8,15 @@ metadata: data: trino.rego: | package trino - import future.keywords.in + import rego.v1 default allow = false - allow { + allow if { input.context.identity.user in ["alice", "admin"] } - allow { + allow if { input.action.operation == "ImpersonateUser" input.action.resource.user.name == input.context.identity.user } @@ -31,14 +31,17 @@ data: druid.rego: | package druid import data.bundles.opagroups.admins - import future.keywords.in - import future.keywords.if + import rego.v1 default allow = false - allow if input.user in admins + allow if { + input.user in admins + } - allow if input.user == "druid_system" + allow if { + input.user == "druid_system" + } # A CM like this is created by the setup keycloak Job # It is used for Druid roles, as we currently need to write them based on the user uuids. # --- diff --git a/stacks/keycloak-opa-poc/setup-keycloak.yaml b/stacks/keycloak-opa-poc/setup-keycloak.yaml index 72a39e13..62d538a2 100644 --- a/stacks/keycloak-opa-poc/setup-keycloak.yaml +++ b/stacks/keycloak-opa-poc/setup-keycloak.yaml @@ -28,7 +28,7 @@ spec: spec: containers: - name: setup-keycloak - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 env: - name: KEYCLOAK_ADMIN_PASSWORD valueFrom: diff --git a/stacks/keycloak-opa-poc/trino.yaml b/stacks/keycloak-opa-poc/trino.yaml index af941163..315ec5b0 100644 --- a/stacks/keycloak-opa-poc/trino.yaml +++ b/stacks/keycloak-opa-poc/trino.yaml @@ -5,7 +5,7 @@ metadata: name: trino spec: image: - productVersion: "428" + productVersion: "451" clusterConfig: listenerClass: external-unstable tls: diff --git a/stacks/keycloak-opa-poc/zookeeper.yaml b/stacks/keycloak-opa-poc/zookeeper.yaml index 0c133c90..4e6c0b27 100644 --- a/stacks/keycloak-opa-poc/zookeeper.yaml +++ b/stacks/keycloak-opa-poc/zookeeper.yaml @@ -5,7 +5,7 @@ metadata: name: zk spec: image: - productVersion: 3.8.3 + productVersion: 3.9.2 servers: roleGroups: default: diff --git a/stacks/logging/setup-opensearch-dashboards.yaml b/stacks/logging/setup-opensearch-dashboards.yaml index a0cd5d19..c3b4330d 100644 --- a/stacks/logging/setup-opensearch-dashboards.yaml +++ b/stacks/logging/setup-opensearch-dashboards.yaml @@ -8,7 +8,7 @@ spec: spec: containers: - name: setup-opensearch-dashboards - image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.3.0 + image: docker.stackable.tech/stackable/testing-tools:0.2.0-stackable24.7.0 env: - name: OPEN_SEARCH_ADMIN_PASSWORD valueFrom: diff --git a/stacks/nifi-kafka-druid-superset-s3/druid.yaml b/stacks/nifi-kafka-druid-superset-s3/druid.yaml index c2ee0369..b7630d99 100644 --- a/stacks/nifi-kafka-druid-superset-s3/druid.yaml +++ b/stacks/nifi-kafka-druid-superset-s3/druid.yaml @@ -14,8 +14,6 @@ spec: connString: jdbc:postgresql://postgresql-druid/druid host: postgresql-druid port: 5432 - user: druid - password: druid deepStorage: s3: bucket: @@ -95,3 +93,11 @@ metadata: stringData: accessKey: admin secretKey: {{ minioAdminPassword }} +--- +apiVersion: v1 +kind: Secret +metadata: + name: druid-db-credentials +stringData: + username: druid + password: druid \ No newline at end of file diff --git a/stacks/nifi-kafka-druid-superset-s3/kafka.yaml b/stacks/nifi-kafka-druid-superset-s3/kafka.yaml index dca4dc71..e0b85399 100644 --- a/stacks/nifi-kafka-druid-superset-s3/kafka.yaml +++ b/stacks/nifi-kafka-druid-superset-s3/kafka.yaml @@ -13,7 +13,7 @@ metadata: name: kafka spec: image: - productVersion: 3.6.1 + productVersion: 3.7.1 clusterConfig: zookeeperConfigMapName: kafka-znode authentication: diff --git a/stacks/nifi-kafka-druid-superset-s3/nifi.yaml b/stacks/nifi-kafka-druid-superset-s3/nifi.yaml index 741eacaa..8105be96 100644 --- a/stacks/nifi-kafka-druid-superset-s3/nifi.yaml +++ b/stacks/nifi-kafka-druid-superset-s3/nifi.yaml @@ -5,7 +5,7 @@ metadata: name: nifi spec: image: - productVersion: 1.25.0 + productVersion: 1.27.0 clusterConfig: authentication: - authenticationClass: nifi-admin-credentials diff --git a/stacks/nifi-kafka-druid-superset-s3/superset.yaml b/stacks/nifi-kafka-druid-superset-s3/superset.yaml index 122a22b3..d70644a1 100644 --- a/stacks/nifi-kafka-druid-superset-s3/superset.yaml +++ b/stacks/nifi-kafka-druid-superset-s3/superset.yaml @@ -5,7 +5,7 @@ metadata: name: superset spec: image: - productVersion: 3.1.0 + productVersion: 4.0.2 clusterConfig: listenerClass: external-unstable credentialsSecret: superset-credentials diff --git a/stacks/signal-processing/jupyterhub.yaml b/stacks/signal-processing/jupyterhub.yaml index 3595737c..6b086a44 100644 --- a/stacks/signal-processing/jupyterhub.yaml +++ b/stacks/signal-processing/jupyterhub.yaml @@ -38,7 +38,7 @@ options: stackable.tech/vendor: Stackable initContainers: - name: download-notebook - image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.3.0 + image: docker.stackable.tech/stackable/tools:1.0.0-stackable24.7.0 command: ['sh', '-c', 'curl https://raw.githubusercontent.com/stackabletech/demos/main/stacks/signal-processing/tsdb.ipynb -o /notebook/tsdb.ipynb'] volumeMounts: - mountPath: /notebook diff --git a/stacks/signal-processing/nifi.yaml b/stacks/signal-processing/nifi.yaml index bdb9cede..19e88cda 100644 --- a/stacks/signal-processing/nifi.yaml +++ b/stacks/signal-processing/nifi.yaml @@ -5,8 +5,8 @@ metadata: name: nifi spec: image: - productVersion: 1.25.0 - custom: docker.stackable.tech/demos/nifi:1.25.0-postgresql + productVersion: 1.27.0 + custom: docker.stackable.tech/demos/nifi:1.27.0-postgresql clusterConfig: listenerClass: external-unstable zookeeperConfigMapName: nifi-znode diff --git a/stacks/stacks-v2.yaml b/stacks/stacks-v2.yaml index ec899364..757dd40f 100644 --- a/stacks/stacks-v2.yaml +++ b/stacks/stacks-v2.yaml @@ -1,7 +1,7 @@ stacks: monitoring: description: Stack containing Prometheus and Grafana - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -24,7 +24,7 @@ stacks: default: adminadmin logging: description: Stack containing OpenSearch, OpenSearch Dashboards (Kibana) and Vector aggregator - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -84,7 +84,7 @@ stacks: default: adminadmin airflow: description: Stack containing Airflow scheduling platform - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -111,7 +111,7 @@ stacks: default: airflowSecretKey data-lakehouse-iceberg-trino-spark: description: Data lakehouse using Iceberg lakehouse on S3, Trino as query engine, Spark for streaming ingest and Superset for data visualization - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -168,7 +168,7 @@ stacks: default: supersetSecretKey hdfs-hbase: description: HBase cluster using HDFS as underlying storage - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -191,7 +191,7 @@ stacks: parameters: [] nifi-kafka-druid-superset-s3: description: Stack containing NiFi, Kafka, Druid, MinIO and Superset for data visualization - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -237,7 +237,7 @@ stacks: default: adminadmin spark-trino-superset-s3: description: Stack containing MinIO, Trino and Superset for data visualization - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -282,7 +282,7 @@ stacks: default: supersetSecretKey trino-superset-s3: description: Stack containing MinIO, Trino and Superset for data visualization - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -324,7 +324,7 @@ stacks: default: supersetSecretKey trino-iceberg: description: Stack containing Trino using Apache Iceberg as a S3 data lakehouse - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -358,7 +358,7 @@ stacks: default: adminadmin jupyterhub-pyspark-hdfs: description: Jupyterhub with PySpark and HDFS integration - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -388,7 +388,7 @@ stacks: default: adminadmin dual-hive-hdfs-s3: description: Dual stack Hive on HDFS and S3 for Hadoop/Hive to Trino migration - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -425,7 +425,7 @@ stacks: The bind user credentials are: ldapadmin:ldapadminpassword. No AuthenticationClass is configured, The AuthenticationClass is created manually in the tutorial. Use the 'openldap' Stack for an OpenLDAD with an AuthenticationClass already installed. - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -448,7 +448,7 @@ stacks: The bind user credentials are: ldapadmin:ldapadminpassword. The LDAP AuthenticationClass is called 'ldap' and the SecretClass for the bind credentials is called 'ldap-bind-credentials'. The stack already creates an appropriate Secret, so referring to the 'ldap' AuthenticationClass in your ProductCluster should be enough. - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -474,7 +474,7 @@ stacks: 3 users are created in Keycloak: admin:adminadmin, alice:alicealice, bob:bobbob. admin and alice are admins with full authorization in Druid and Trino, bob is not authorized. This is a proof-of-concept and the mechanisms used here are subject to change. - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -540,7 +540,7 @@ stacks: Please note that this stack is tightly coupled with the demo. So if you install the stack you will get demo-specific parts (such as Keycloak users or regorules). - stackableRelease: end-to-end-security-release + stackableRelease: 24.7 stackableOperators: - commons - listener @@ -610,7 +610,7 @@ stacks: signal-processing: description: >- A stack used for creating, streaming and processing in-flight data and persisting it to TimescaleDB before it is displayed in Grafana - stackableRelease: 24.3 + stackableRelease: 24.7 stackableOperators: - commons - listener diff --git a/stacks/trino-iceberg/trino.yaml b/stacks/trino-iceberg/trino.yaml index 2619a05b..4f15796a 100644 --- a/stacks/trino-iceberg/trino.yaml +++ b/stacks/trino-iceberg/trino.yaml @@ -5,7 +5,7 @@ metadata: name: trino spec: image: - productVersion: "442" + productVersion: "451" clusterConfig: listenerClass: external-unstable catalogLabelSelector: @@ -99,7 +99,7 @@ metadata: name: opa spec: image: - productVersion: 0.61.0 + productVersion: 0.66.0 servers: roleGroups: default: diff --git a/stacks/trino-superset-s3/superset.yaml b/stacks/trino-superset-s3/superset.yaml index b4768220..24a8cfd4 100644 --- a/stacks/trino-superset-s3/superset.yaml +++ b/stacks/trino-superset-s3/superset.yaml @@ -5,7 +5,7 @@ metadata: name: superset spec: image: - productVersion: 3.1.0 + productVersion: 4.0.2 clusterConfig: listenerClass: external-unstable credentialsSecret: superset-credentials diff --git a/stacks/trino-superset-s3/trino.yaml b/stacks/trino-superset-s3/trino.yaml index 8beb0e31..c8df87b2 100644 --- a/stacks/trino-superset-s3/trino.yaml +++ b/stacks/trino-superset-s3/trino.yaml @@ -5,7 +5,7 @@ metadata: name: trino spec: image: - productVersion: "442" + productVersion: "451" clusterConfig: listenerClass: external-unstable catalogLabelSelector: @@ -71,7 +71,7 @@ metadata: name: opa spec: image: - productVersion: 0.61.0 + productVersion: 0.66.0 servers: roleGroups: default: {}