From 61e6915afa3abd6cc8bbfced9bacf0a81aa97c27 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mari=C3=A1n=20Macik?= Date: Wed, 13 Dec 2023 13:41:18 +0100 Subject: [PATCH 1/4] RHOAIENG-905 - Create a PR check for testing README file (without gitops) --- .../aws-env.yaml | 2 +- test/shell-pipeline-tests/common.sh | 59 +++++++++++++ test/shell-pipeline-tests/pipelines-test.sh | 83 +++++++++++++++++++ 3 files changed, 143 insertions(+), 1 deletion(-) create mode 100644 test/shell-pipeline-tests/common.sh create mode 100755 test/shell-pipeline-tests/pipelines-test.sh diff --git a/pipelines/tekton/build-container-image-pipeline/aws-env.yaml b/pipelines/tekton/build-container-image-pipeline/aws-env.yaml index 3c199c50..6382ae46 100644 --- a/pipelines/tekton/build-container-image-pipeline/aws-env.yaml +++ b/pipelines/tekton/build-container-image-pipeline/aws-env.yaml @@ -5,4 +5,4 @@ metadata: stringData: aws-storage-config: |+ - { "type": "s3", "access_key_id": "$", "secret_access_key": "$", "endpoint_url": "https://example.amazonaws.com/", "region": "us-west-1" } + { "type": "s3", "access_key_id": "{{ YOUR_AWS_ACCESS_KEY }}", "secret_access_key": "{{ YOUR_AWS_SECRET_KEY }}", "endpoint_url": "{{ S3_ENDPOINT__https://example.amazonaws.com/ }}", "region": "{{ S3_REGION__us-west-1 }}" } diff --git a/test/shell-pipeline-tests/common.sh b/test/shell-pipeline-tests/common.sh new file mode 100644 index 00000000..042e74c5 --- /dev/null +++ b/test/shell-pipeline-tests/common.sh @@ -0,0 +1,59 @@ +#!/usr/bin/env bash + +function waitForOpResult() { + max_retry=$1 + shift + expected=$1 + shift + unexpected=$1 + shift + counter=0 + cmd=$1 + echo "waitForOpResult waiting for command '$cmd' to finish with expected result '$expected' or unexpected result '$unexpected'" + res=$(eval "$cmd") + until [ "$res" == "$expected" ] + do + [[ counter -eq $max_retry ]] && echo "Failed! waitForOpResult running command '$cmd' and waiting for expected output '$expected' reached max retry count '$max_retry'." >&2 && return 1 + [[ "$res" == "$unexpected" ]] && echo "Failed! waitForOpResult running command '$cmd' and waiting for expected output '$expected' finished with unexpected result '$res'." >&2 && return 1 + echo "Waiting for another try" + sleep 5 + ((counter++)) + echo "Trying again. Try #$counter out of $max_retry" >&2 + res=$(eval "$cmd") + done + echo "waitForOpResult running command '$cmd' finished with expected result '$res'" + #TODO return code +} + +function saveArtifacts() { + ## Backup all Pipeline Runs, Task Runs + local PIPELINE_RUN_NAME=$1 + local LOGS_DIR="${ARTIFACT_DIR}/$PIPELINE_RUN_NAME" + mkdir -p "$LOGS_DIR" + echo "Archiving YAML definitions and logs for '$PIPELINE_RUN_NAME' to '$LOGS_DIR'" + oc get pipeline -o yaml > "${LOGS_DIR}"/pipelines.txt + oc get pipelinerun $PIPELINE_RUN_NAME -o yaml > "${LOGS_DIR}"/pipelineRuns.txt + oc get task -o yaml > "${LOGS_DIR}"/tasks.txt + oc get taskrun -l "tekton.dev/pipelineRun=$PIPELINE_RUN_NAME" -o yaml > "${LOGS_DIR}"/taskRuns.txt + oc logs -l "tekton.dev/pipelineRun=$PIPELINE_RUN_NAME" --all-containers --prefix --tail=-1 > "${LOGS_DIR}"/logs.txt +} + +function createS3Secret() { + local AWS_SECRET_PATH_TEMPLATE=$1 + local AWS_SECRET_PATH=$2 + + local AI_EDGE_AWS_VAULT_OPENSHIFT_CI_SECRET_PATH + local AWS_ACCESS_KEY + local AWS_SECRET_KEY + + AI_EDGE_AWS_VAULT_OPENSHIFT_CI_SECRET_PATH="${CUSTOM_AWS_SECRET_PATH:-/secrets/ai-edge-aws}" + AWS_ACCESS_KEY=$(cat "$AI_EDGE_AWS_VAULT_OPENSHIFT_CI_SECRET_PATH"/accessKey) + AWS_SECRET_KEY=$(cat "$AI_EDGE_AWS_VAULT_OPENSHIFT_CI_SECRET_PATH"/secretAccessKey) + + cp "$AWS_SECRET_PATH_TEMPLATE" "$AWS_SECRET_PATH" + + sed -i "s|{{ YOUR_AWS_ACCESS_KEY }}|${AWS_ACCESS_KEY}|" "$AWS_SECRET_PATH" + sed -i "s|{{ YOUR_AWS_SECRET_KEY }}|${AWS_SECRET_KEY}|" "$AWS_SECRET_PATH" + sed -i "s|{{ S3_ENDPOINT__https://example.amazonaws.com/ }}|https://s3.us-west-1.amazonaws.com|" "$AWS_SECRET_PATH" + sed -i "s|{{ S3_REGION__us-west-1 }}|us-west-1|" "$AWS_SECRET_PATH" +} diff --git a/test/shell-pipeline-tests/pipelines-test.sh b/test/shell-pipeline-tests/pipelines-test.sh new file mode 100755 index 00000000..23e3c211 --- /dev/null +++ b/test/shell-pipeline-tests/pipelines-test.sh @@ -0,0 +1,83 @@ +#!/usr/bin/env bash +REPO_ROOT_DIR=$(dirname ${BASH_SOURCE[0]})/../.. +PIPELINES_DIR="$REPO_ROOT_DIR/pipelines" + +source "$REPO_ROOT_DIR"/test/shell-pipeline-tests/common.sh + +NAMESPACE="pipeline-test" +oc delete project "$NAMESPACE" --ignore-not-found --timeout=60s +oc new-project "$NAMESPACE" + +##### BUILD CONTAINER IMAGE PIPELINE ##### +BUILD_CONTAINER_IMAGE_PIPELINE_PATH="$PIPELINES_DIR"/tekton/build-container-image-pipeline + +AWS_SECRET_PATH_TEMPLATE="$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/aws-env.yaml +AWS_SECRET_PATH="$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/aws-env-overridden.yaml + +createS3Secret "$AWS_SECRET_PATH_TEMPLATE" "$AWS_SECRET_PATH" + +oc create -f "$AWS_SECRET_PATH" + +## oc apply -k pipelines +oc apply -k "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/ + +## prepare parameters +cp "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-tensorflow-housing.yaml "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-tensorflow-housing-overridden.yaml +sed -i "s|value: rhoai-edge-models|value: rhoai-edge-models-ci|" "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-tensorflow-housing-overridden.yaml + +## oc create pipeline run +oc create -f "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-tensorflow-housing-overridden.yaml +sleep 5 # Just to have the startTime field available + +PIPELINE_RUN_NAME=$(oc get pipelinerun --sort-by={.status.startTime} -o=custom-columns=NAME:.metadata.name | grep "build.*housing" | tail -n 1) + +if [[ $PIPELINE_RUN_NAME == "" ]]; then + echo "Could not find any pipeline run" + exit 1 +fi + +## wait for the result +waitForOpResult 200 "True" "False" "oc get pipelinerun $PIPELINE_RUN_NAME -o jsonpath={.status.conditions[?\(@.type==\'Succeeded\'\)].status}" +PIPELINE_RUN_RESULT=$? + +saveArtifacts "$PIPELINE_RUN_NAME" + +if [[ $PIPELINE_RUN_RESULT != 0 ]]; then + echo "Build pipeline failed, aborting further tests" + exit 1 +fi + + +##### TEST MLFLOW IMAGE PIPELINE ##### +TEST_MLFLOW_IMAGE_PIPELINE_PATH="$PIPELINES_DIR"/tekton/test-mlflow-image-pipeline + +AI_EDGE_QUAY_SECRET_OPENSHIFT_CI_PATH="${CUSTOM_QUAY_SECRET_PATH:-/secrets/ai-edge-quay}" +oc create secret generic rhoai-edge-openshift-ci-secret --from-file=.dockerconfigjson="$AI_EDGE_QUAY_SECRET_OPENSHIFT_CI_PATH"/dockerconfigjson --type=kubernetes.io/dockerconfigjson --dry-run=client -o yaml | oc apply -f - +oc secret link pipeline rhoai-edge-openshift-ci-secret + +## oc apply -k pipelines +oc apply -k "$TEST_MLFLOW_IMAGE_PIPELINE_PATH"/ + +## oc create pipeline run +oc create -f "$TEST_MLFLOW_IMAGE_PIPELINE_PATH"/test-mlflow-image-pipelinerun-tensorflow-housing.yaml +sleep 5 # Just to have the startTime field available + +PIPELINE_RUN_NAME=$(oc get pipelinerun --sort-by={.status.startTime} -o=custom-columns=NAME:.metadata.name | grep "test.*housing" | tail -n 1) + +if [[ $PIPELINE_RUN_NAME == "" ]]; then + echo "Could not find any pipeline run" + exit 1 +fi + +## wait for the result +waitForOpResult 200 "True" "False" "oc get pipelinerun $PIPELINE_RUN_NAME -o jsonpath={.status.conditions[?\(@.type==\'Succeeded\'\)].status}" +PIPELINE_RUN_RESULT=$? + +saveArtifacts "$PIPELINE_RUN_NAME" + +if [[ $PIPELINE_RUN_RESULT != 0 ]]; then + echo "Test pipeline failed, aborting further tests" + exit 1 +fi + +echo "All pipelines finished successfully" From 79334d522c5aa2375a2b6e91abe5eb228a131600 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mari=C3=A1n=20Macik?= Date: Wed, 13 Dec 2023 15:21:33 +0100 Subject: [PATCH 2/4] Wait for Openshift Pipelines operator --- test/shell-pipeline-tests/common.sh | 2 +- test/shell-pipeline-tests/pipelines-test.sh | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/test/shell-pipeline-tests/common.sh b/test/shell-pipeline-tests/common.sh index 042e74c5..40ce8207 100644 --- a/test/shell-pipeline-tests/common.sh +++ b/test/shell-pipeline-tests/common.sh @@ -28,7 +28,7 @@ function waitForOpResult() { function saveArtifacts() { ## Backup all Pipeline Runs, Task Runs local PIPELINE_RUN_NAME=$1 - local LOGS_DIR="${ARTIFACT_DIR}/$PIPELINE_RUN_NAME" + local LOGS_DIR="${ARTIFACT_DIR}/$PIPELINE_RUN_NAME" # ARTIFACT_DIR is an env var on OpenShift-CI mkdir -p "$LOGS_DIR" echo "Archiving YAML definitions and logs for '$PIPELINE_RUN_NAME' to '$LOGS_DIR'" oc get pipeline -o yaml > "${LOGS_DIR}"/pipelines.txt diff --git a/test/shell-pipeline-tests/pipelines-test.sh b/test/shell-pipeline-tests/pipelines-test.sh index 23e3c211..c1ba564d 100755 --- a/test/shell-pipeline-tests/pipelines-test.sh +++ b/test/shell-pipeline-tests/pipelines-test.sh @@ -8,6 +8,9 @@ NAMESPACE="pipeline-test" oc delete project "$NAMESPACE" --ignore-not-found --timeout=60s oc new-project "$NAMESPACE" +echo "Waiting for OpenShift Pipelines operator to be fully installed" +waitForOpResult 60 "True" "N/A" "oc get tektonconfig -n openshift-operators config -o jsonpath={.status.conditions[?\(@.type==\'Ready\'\)].status}" + ##### BUILD CONTAINER IMAGE PIPELINE ##### BUILD_CONTAINER_IMAGE_PIPELINE_PATH="$PIPELINES_DIR"/tekton/build-container-image-pipeline From 1f7c42d7d26397227121212766edc25d2c3ce603 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mari=C3=A1n=20Macik?= Date: Thu, 14 Dec 2023 15:42:27 +0100 Subject: [PATCH 3/4] Add the housing pipeline --- Makefile | 2 + test/shell-pipeline-tests/README.md | 35 ++++++++ test/shell-pipeline-tests/common.sh | 1 - .../pipelines-test-openvino-bike-rentals.sh | 88 +++++++++++++++++++ .../pipelines-test-tensorflow-housing.sh} | 4 +- 5 files changed, 127 insertions(+), 3 deletions(-) create mode 100644 test/shell-pipeline-tests/README.md create mode 100755 test/shell-pipeline-tests/openvino-bike-rentals/pipelines-test-openvino-bike-rentals.sh rename test/shell-pipeline-tests/{pipelines-test.sh => tensorflow-housing/pipelines-test-tensorflow-housing.sh} (97%) diff --git a/Makefile b/Makefile index 50ca5aa2..ae06c6b0 100644 --- a/Makefile +++ b/Makefile @@ -32,4 +32,6 @@ GO=go GOFLAGS="" test: + @(./test/shell-pipeline-tests/openvino-bike-rentals/pipelines-test-openvino-bike-rentals.sh) + @(./test/shell-pipeline-tests/tensorflow-housing/pipelines-test-tensorflow-housing.sh) @(cd test/e2e-tests/tests && ${GO} test) diff --git a/test/shell-pipeline-tests/README.md b/test/shell-pipeline-tests/README.md new file mode 100644 index 00000000..0ac41256 --- /dev/null +++ b/test/shell-pipeline-tests/README.md @@ -0,0 +1,35 @@ +# Shell Pipeline Tests + +This directory contains 2 shell pipeline tests: +* openvino-bike-rentals - OpenVINO version using the bike rentals model +* tensorflow-housing - TensorFlow version using the housing model + +Both tests currently run [build-container-image-pipeline](../../pipelines/tekton/build-container-image-pipeline) +and the [test-mlflow-image-pipeline](../../pipelines/tekton/test-mlflow-image-pipeline). The tests will be switched in the near future to the full [aiedge-e2e](../../pipelines/tekton/aiedge-e2e) version of the pipeline. +After that, [GitOps pipeline](../../pipelines/tekton/gitops-update-pipeline) tests will be added as well. + +Scripts are primarily run in the OpenShift CI environment, so they make use of +OpenShift CI secrets. You need to configure these if you want to run it locally, see the next section. + +## Local execution + +For local execution, these environment variables need to be set: + +* **ARTIFACT_DIR** - Directory where logs and yaml files from the namespace should be stored for easier debugging. +* **CUSTOM_AWS_SECRET_PATH** - Directory where credentials for the AWS S3 bucket are stored. S3 bucket is used as a source of the AI model. The directory should have 2 files: + * accessKey - containing the access key, sometimes also called access key ID + * secretAccessKey - containing the secret access key +* **CUSTOM_QUAY_SECRET_PATH** - Directory where credentials for the Quay repository are stored. The repository is used to publish the image after it is tested. The directory should contain the file: + * dockerconfigjson - without the '.' (dot), containing the full docker config.json with authentication to Quay.io or another registry + +After the credentials are configured, you can run the pipeline tests using: + +```shell +ARTIFACT_DIR=./artifacts CUSTOM_AWS_SECRET_PATH=./secrets CUSTOM_QUAY_SECRET_PATH=./secrets ./openvino-bike-rentals/pipelines-test-openvino-bike-rentals.sh +``` +and +```shell +ARTIFACT_DIR=./artifacts CUSTOM_AWS_SECRET_PATH=./secrets CUSTOM_QUAY_SECRET_PATH=./secrets ./tensorflow-housing/pipelines-test-tensorflow-housing.sh +``` + +This would put all the logs into the `$PWD/artifacts` directory and it also expects all the credential files to be stored under the `$PWD/secrets` directory. diff --git a/test/shell-pipeline-tests/common.sh b/test/shell-pipeline-tests/common.sh index 40ce8207..a8d5ab7c 100644 --- a/test/shell-pipeline-tests/common.sh +++ b/test/shell-pipeline-tests/common.sh @@ -22,7 +22,6 @@ function waitForOpResult() { res=$(eval "$cmd") done echo "waitForOpResult running command '$cmd' finished with expected result '$res'" - #TODO return code } function saveArtifacts() { diff --git a/test/shell-pipeline-tests/openvino-bike-rentals/pipelines-test-openvino-bike-rentals.sh b/test/shell-pipeline-tests/openvino-bike-rentals/pipelines-test-openvino-bike-rentals.sh new file mode 100755 index 00000000..cbe2d285 --- /dev/null +++ b/test/shell-pipeline-tests/openvino-bike-rentals/pipelines-test-openvino-bike-rentals.sh @@ -0,0 +1,88 @@ +#!/usr/bin/env bash +REPO_ROOT_DIR=$(dirname ${BASH_SOURCE[0]})/../../.. +PIPELINES_DIR="$REPO_ROOT_DIR/pipelines" + +source "$REPO_ROOT_DIR"/test/shell-pipeline-tests/common.sh + +NAMESPACE="pipeline-test-openvino-bike-rentals" +oc delete project "$NAMESPACE" --ignore-not-found --timeout=60s +oc new-project "$NAMESPACE" + +echo "Waiting for OpenShift Pipelines operator to be fully installed" +waitForOpResult 60 "True" "N/A" "oc get tektonconfig -n openshift-operators config -o jsonpath={.status.conditions[?\(@.type==\'Ready\'\)].status}" + +##### BUILD CONTAINER IMAGE PIPELINE ##### +BUILD_CONTAINER_IMAGE_PIPELINE_PATH="$PIPELINES_DIR"/tekton/build-container-image-pipeline + +AWS_SECRET_PATH_TEMPLATE="$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/aws-env.yaml +AWS_SECRET_PATH="$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/aws-env-overridden.yaml + +createS3Secret "$AWS_SECRET_PATH_TEMPLATE" "$AWS_SECRET_PATH" + +oc create -f "$AWS_SECRET_PATH" + +## oc apply -k pipelines +oc apply -k "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/ + +## prepare parameters +cp "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals.yaml "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals-overridden.yaml +sed -i "s|value: rhoai-edge-models|value: rhoai-edge-models-ci|" "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals-overridden.yaml +sed -i "s|value: \"git\"|value: \"s3\"|" "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals-overridden.yaml +sed -i "s|value: pipelines/models/|value: \"\"|" "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals-overridden.yaml + +## oc create pipeline run +oc create -f "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals-overridden.yaml +sleep 5 # Just to have the startTime field available + +PIPELINE_RUN_NAME=$(oc get pipelinerun --sort-by={.status.startTime} -o=custom-columns=NAME:.metadata.name | grep "build.*bike" | tail -n 1) + +if [[ $PIPELINE_RUN_NAME == "" ]]; then + echo "Could not find any pipeline run" + exit 1 +fi + +## wait for the result +waitForOpResult 200 "True" "False" "oc get pipelinerun $PIPELINE_RUN_NAME -o jsonpath={.status.conditions[?\(@.type==\'Succeeded\'\)].status}" +PIPELINE_RUN_RESULT=$? + +saveArtifacts "$PIPELINE_RUN_NAME" + +if [[ $PIPELINE_RUN_RESULT != 0 ]]; then + echo "Build pipeline failed, aborting further tests" + exit 1 +fi + + +##### TEST MLFLOW IMAGE PIPELINE ##### +TEST_MLFLOW_IMAGE_PIPELINE_PATH="$PIPELINES_DIR"/tekton/test-mlflow-image-pipeline + +AI_EDGE_QUAY_SECRET_OPENSHIFT_CI_PATH="${CUSTOM_QUAY_SECRET_PATH:-/secrets/ai-edge-quay}" +oc create secret generic rhoai-edge-openshift-ci-secret --from-file=.dockerconfigjson="$AI_EDGE_QUAY_SECRET_OPENSHIFT_CI_PATH"/dockerconfigjson --type=kubernetes.io/dockerconfigjson --dry-run=client -o yaml | oc apply -f - +oc secret link pipeline rhoai-edge-openshift-ci-secret + +## oc apply -k pipelines +oc apply -k "$TEST_MLFLOW_IMAGE_PIPELINE_PATH"/ + +## oc create pipeline run +oc create -f "$TEST_MLFLOW_IMAGE_PIPELINE_PATH"/test-mlflow-image-pipelinerun-bike-rental.yaml +sleep 5 # Just to have the startTime field available + +PIPELINE_RUN_NAME=$(oc get pipelinerun --sort-by={.status.startTime} -o=custom-columns=NAME:.metadata.name | grep "test.*bike" | tail -n 1) + +if [[ $PIPELINE_RUN_NAME == "" ]]; then + echo "Could not find any pipeline run" + exit 1 +fi + +## wait for the result +waitForOpResult 200 "True" "False" "oc get pipelinerun $PIPELINE_RUN_NAME -o jsonpath={.status.conditions[?\(@.type==\'Succeeded\'\)].status}" +PIPELINE_RUN_RESULT=$? + +saveArtifacts "$PIPELINE_RUN_NAME" + +if [[ $PIPELINE_RUN_RESULT != 0 ]]; then + echo "Test pipeline failed, aborting further tests" + exit 1 +fi + +echo "All pipelines finished successfully" diff --git a/test/shell-pipeline-tests/pipelines-test.sh b/test/shell-pipeline-tests/tensorflow-housing/pipelines-test-tensorflow-housing.sh similarity index 97% rename from test/shell-pipeline-tests/pipelines-test.sh rename to test/shell-pipeline-tests/tensorflow-housing/pipelines-test-tensorflow-housing.sh index c1ba564d..1b7a0533 100755 --- a/test/shell-pipeline-tests/pipelines-test.sh +++ b/test/shell-pipeline-tests/tensorflow-housing/pipelines-test-tensorflow-housing.sh @@ -1,10 +1,10 @@ #!/usr/bin/env bash -REPO_ROOT_DIR=$(dirname ${BASH_SOURCE[0]})/../.. +REPO_ROOT_DIR=$(dirname ${BASH_SOURCE[0]})/../../.. PIPELINES_DIR="$REPO_ROOT_DIR/pipelines" source "$REPO_ROOT_DIR"/test/shell-pipeline-tests/common.sh -NAMESPACE="pipeline-test" +NAMESPACE="pipeline-test-tensorflow-housing" oc delete project "$NAMESPACE" --ignore-not-found --timeout=60s oc new-project "$NAMESPACE" From 77be5caa034379546c265eaa19a542101f789ea3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mari=C3=A1n=20Macik?= Date: Thu, 14 Dec 2023 13:05:46 +0100 Subject: [PATCH 4/4] Make pipeline runnable --- .../build-container-image-pipeline.yaml | 1 + .../build-container-image-pipelinerun-tensorflow-housing.yaml | 3 +++ 2 files changed, 4 insertions(+) diff --git a/pipelines/tekton/build-container-image-pipeline/build-container-image-pipeline.yaml b/pipelines/tekton/build-container-image-pipeline/build-container-image-pipeline.yaml index 6c5a873d..5557f10a 100644 --- a/pipelines/tekton/build-container-image-pipeline/build-container-image-pipeline.yaml +++ b/pipelines/tekton/build-container-image-pipeline/build-container-image-pipeline.yaml @@ -97,6 +97,7 @@ spec: runAfter: - kserve-download-model - git-clone-model-repo + - copy-model-from-pvc taskRef: kind: ClusterTask name: git-clone diff --git a/pipelines/tekton/build-container-image-pipeline/build-container-image-pipelinerun-tensorflow-housing.yaml b/pipelines/tekton/build-container-image-pipeline/build-container-image-pipelinerun-tensorflow-housing.yaml index be72120a..4b1db8d7 100644 --- a/pipelines/tekton/build-container-image-pipeline/build-container-image-pipelinerun-tensorflow-housing.yaml +++ b/pipelines/tekton/build-container-image-pipeline/build-container-image-pipelinerun-tensorflow-housing.yaml @@ -32,6 +32,9 @@ spec: - name: buildah-cache persistentVolumeClaim: claimName: buildah-cache-pvc + - name: model-workspace + persistentVolumeClaim: + claimName: basic-pvc - name: aws-secret secret: secretName: aws-env