Skip to content

Commit

Permalink
Merge pull request #191 from MarianMacik/RHOAIENG-905
Browse files Browse the repository at this point in the history
RHOAIENG-905 - Create a PR check for testing README file (without gitops)
  • Loading branch information
openshift-merge-bot[bot] authored Dec 19, 2023
2 parents 4c2169d + 77be5ca commit 70b248f
Show file tree
Hide file tree
Showing 8 changed files with 274 additions and 1 deletion.
2 changes: 2 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,6 @@ GO=go
GOFLAGS=""

test:
@(./test/shell-pipeline-tests/openvino-bike-rentals/pipelines-test-openvino-bike-rentals.sh)
@(./test/shell-pipeline-tests/tensorflow-housing/pipelines-test-tensorflow-housing.sh)
@(cd test/e2e-tests/tests && ${GO} test)
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@ metadata:

stringData:
aws-storage-config: |+
{ "type": "s3", "access_key_id": "$", "secret_access_key": "$", "endpoint_url": "https://example.amazonaws.com/", "region": "us-west-1" }
{ "type": "s3", "access_key_id": "{{ YOUR_AWS_ACCESS_KEY }}", "secret_access_key": "{{ YOUR_AWS_SECRET_KEY }}", "endpoint_url": "{{ S3_ENDPOINT__https://example.amazonaws.com/ }}", "region": "{{ S3_REGION__us-west-1 }}" }
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ spec:
runAfter:
- kserve-download-model
- git-clone-model-repo
- copy-model-from-pvc
taskRef:
kind: ClusterTask
name: git-clone
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@ spec:
- name: buildah-cache
persistentVolumeClaim:
claimName: buildah-cache-pvc
- name: model-workspace
persistentVolumeClaim:
claimName: basic-pvc
- name: aws-secret
secret:
secretName: aws-env
35 changes: 35 additions & 0 deletions test/shell-pipeline-tests/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# Shell Pipeline Tests

This directory contains 2 shell pipeline tests:
* openvino-bike-rentals - OpenVINO version using the bike rentals model
* tensorflow-housing - TensorFlow version using the housing model

Both tests currently run [build-container-image-pipeline](../../pipelines/tekton/build-container-image-pipeline)
and the [test-mlflow-image-pipeline](../../pipelines/tekton/test-mlflow-image-pipeline). The tests will be switched in the near future to the full [aiedge-e2e](../../pipelines/tekton/aiedge-e2e) version of the pipeline.
After that, [GitOps pipeline](../../pipelines/tekton/gitops-update-pipeline) tests will be added as well.

Scripts are primarily run in the OpenShift CI environment, so they make use of
OpenShift CI secrets. You need to configure these if you want to run it locally, see the next section.

## Local execution

For local execution, these environment variables need to be set:

* **ARTIFACT_DIR** - Directory where logs and yaml files from the namespace should be stored for easier debugging.
* **CUSTOM_AWS_SECRET_PATH** - Directory where credentials for the AWS S3 bucket are stored. S3 bucket is used as a source of the AI model. The directory should have 2 files:
* accessKey - containing the access key, sometimes also called access key ID
* secretAccessKey - containing the secret access key
* **CUSTOM_QUAY_SECRET_PATH** - Directory where credentials for the Quay repository are stored. The repository is used to publish the image after it is tested. The directory should contain the file:
* dockerconfigjson - without the '.' (dot), containing the full docker config.json with authentication to Quay.io or another registry

After the credentials are configured, you can run the pipeline tests using:

```shell
ARTIFACT_DIR=./artifacts CUSTOM_AWS_SECRET_PATH=./secrets CUSTOM_QUAY_SECRET_PATH=./secrets ./openvino-bike-rentals/pipelines-test-openvino-bike-rentals.sh
```
and
```shell
ARTIFACT_DIR=./artifacts CUSTOM_AWS_SECRET_PATH=./secrets CUSTOM_QUAY_SECRET_PATH=./secrets ./tensorflow-housing/pipelines-test-tensorflow-housing.sh
```

This would put all the logs into the `$PWD/artifacts` directory and it also expects all the credential files to be stored under the `$PWD/secrets` directory.
58 changes: 58 additions & 0 deletions test/shell-pipeline-tests/common.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
#!/usr/bin/env bash

function waitForOpResult() {
max_retry=$1
shift
expected=$1
shift
unexpected=$1
shift
counter=0
cmd=$1
echo "waitForOpResult waiting for command '$cmd' to finish with expected result '$expected' or unexpected result '$unexpected'"
res=$(eval "$cmd")
until [ "$res" == "$expected" ]
do
[[ counter -eq $max_retry ]] && echo "Failed! waitForOpResult running command '$cmd' and waiting for expected output '$expected' reached max retry count '$max_retry'." >&2 && return 1
[[ "$res" == "$unexpected" ]] && echo "Failed! waitForOpResult running command '$cmd' and waiting for expected output '$expected' finished with unexpected result '$res'." >&2 && return 1
echo "Waiting for another try"
sleep 5
((counter++))
echo "Trying again. Try #$counter out of $max_retry" >&2
res=$(eval "$cmd")
done
echo "waitForOpResult running command '$cmd' finished with expected result '$res'"
}

function saveArtifacts() {
## Backup all Pipeline Runs, Task Runs
local PIPELINE_RUN_NAME=$1
local LOGS_DIR="${ARTIFACT_DIR}/$PIPELINE_RUN_NAME" # ARTIFACT_DIR is an env var on OpenShift-CI
mkdir -p "$LOGS_DIR"
echo "Archiving YAML definitions and logs for '$PIPELINE_RUN_NAME' to '$LOGS_DIR'"
oc get pipeline -o yaml > "${LOGS_DIR}"/pipelines.txt
oc get pipelinerun $PIPELINE_RUN_NAME -o yaml > "${LOGS_DIR}"/pipelineRuns.txt
oc get task -o yaml > "${LOGS_DIR}"/tasks.txt
oc get taskrun -l "tekton.dev/pipelineRun=$PIPELINE_RUN_NAME" -o yaml > "${LOGS_DIR}"/taskRuns.txt
oc logs -l "tekton.dev/pipelineRun=$PIPELINE_RUN_NAME" --all-containers --prefix --tail=-1 > "${LOGS_DIR}"/logs.txt
}

function createS3Secret() {
local AWS_SECRET_PATH_TEMPLATE=$1
local AWS_SECRET_PATH=$2

local AI_EDGE_AWS_VAULT_OPENSHIFT_CI_SECRET_PATH
local AWS_ACCESS_KEY
local AWS_SECRET_KEY

AI_EDGE_AWS_VAULT_OPENSHIFT_CI_SECRET_PATH="${CUSTOM_AWS_SECRET_PATH:-/secrets/ai-edge-aws}"
AWS_ACCESS_KEY=$(cat "$AI_EDGE_AWS_VAULT_OPENSHIFT_CI_SECRET_PATH"/accessKey)
AWS_SECRET_KEY=$(cat "$AI_EDGE_AWS_VAULT_OPENSHIFT_CI_SECRET_PATH"/secretAccessKey)

cp "$AWS_SECRET_PATH_TEMPLATE" "$AWS_SECRET_PATH"

sed -i "s|{{ YOUR_AWS_ACCESS_KEY }}|${AWS_ACCESS_KEY}|" "$AWS_SECRET_PATH"
sed -i "s|{{ YOUR_AWS_SECRET_KEY }}|${AWS_SECRET_KEY}|" "$AWS_SECRET_PATH"
sed -i "s|{{ S3_ENDPOINT__https://example.amazonaws.com/ }}|https://s3.us-west-1.amazonaws.com|" "$AWS_SECRET_PATH"
sed -i "s|{{ S3_REGION__us-west-1 }}|us-west-1|" "$AWS_SECRET_PATH"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
#!/usr/bin/env bash
REPO_ROOT_DIR=$(dirname ${BASH_SOURCE[0]})/../../..
PIPELINES_DIR="$REPO_ROOT_DIR/pipelines"

source "$REPO_ROOT_DIR"/test/shell-pipeline-tests/common.sh

NAMESPACE="pipeline-test-openvino-bike-rentals"
oc delete project "$NAMESPACE" --ignore-not-found --timeout=60s
oc new-project "$NAMESPACE"

echo "Waiting for OpenShift Pipelines operator to be fully installed"
waitForOpResult 60 "True" "N/A" "oc get tektonconfig -n openshift-operators config -o jsonpath={.status.conditions[?\(@.type==\'Ready\'\)].status}"

##### BUILD CONTAINER IMAGE PIPELINE #####
BUILD_CONTAINER_IMAGE_PIPELINE_PATH="$PIPELINES_DIR"/tekton/build-container-image-pipeline

AWS_SECRET_PATH_TEMPLATE="$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/aws-env.yaml
AWS_SECRET_PATH="$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/aws-env-overridden.yaml

createS3Secret "$AWS_SECRET_PATH_TEMPLATE" "$AWS_SECRET_PATH"

oc create -f "$AWS_SECRET_PATH"

## oc apply -k pipelines
oc apply -k "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/

## prepare parameters
cp "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals.yaml "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals-overridden.yaml
sed -i "s|value: rhoai-edge-models|value: rhoai-edge-models-ci|" "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals-overridden.yaml
sed -i "s|value: \"git\"|value: \"s3\"|" "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals-overridden.yaml
sed -i "s|value: pipelines/models/|value: \"\"|" "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals-overridden.yaml

## oc create pipeline run
oc create -f "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-bike-rentals-overridden.yaml
sleep 5 # Just to have the startTime field available

PIPELINE_RUN_NAME=$(oc get pipelinerun --sort-by={.status.startTime} -o=custom-columns=NAME:.metadata.name | grep "build.*bike" | tail -n 1)

if [[ $PIPELINE_RUN_NAME == "" ]]; then
echo "Could not find any pipeline run"
exit 1
fi

## wait for the result
waitForOpResult 200 "True" "False" "oc get pipelinerun $PIPELINE_RUN_NAME -o jsonpath={.status.conditions[?\(@.type==\'Succeeded\'\)].status}"
PIPELINE_RUN_RESULT=$?

saveArtifacts "$PIPELINE_RUN_NAME"

if [[ $PIPELINE_RUN_RESULT != 0 ]]; then
echo "Build pipeline failed, aborting further tests"
exit 1
fi


##### TEST MLFLOW IMAGE PIPELINE #####
TEST_MLFLOW_IMAGE_PIPELINE_PATH="$PIPELINES_DIR"/tekton/test-mlflow-image-pipeline

AI_EDGE_QUAY_SECRET_OPENSHIFT_CI_PATH="${CUSTOM_QUAY_SECRET_PATH:-/secrets/ai-edge-quay}"
oc create secret generic rhoai-edge-openshift-ci-secret --from-file=.dockerconfigjson="$AI_EDGE_QUAY_SECRET_OPENSHIFT_CI_PATH"/dockerconfigjson --type=kubernetes.io/dockerconfigjson --dry-run=client -o yaml | oc apply -f -
oc secret link pipeline rhoai-edge-openshift-ci-secret

## oc apply -k pipelines
oc apply -k "$TEST_MLFLOW_IMAGE_PIPELINE_PATH"/

## oc create pipeline run
oc create -f "$TEST_MLFLOW_IMAGE_PIPELINE_PATH"/test-mlflow-image-pipelinerun-bike-rental.yaml
sleep 5 # Just to have the startTime field available

PIPELINE_RUN_NAME=$(oc get pipelinerun --sort-by={.status.startTime} -o=custom-columns=NAME:.metadata.name | grep "test.*bike" | tail -n 1)

if [[ $PIPELINE_RUN_NAME == "" ]]; then
echo "Could not find any pipeline run"
exit 1
fi

## wait for the result
waitForOpResult 200 "True" "False" "oc get pipelinerun $PIPELINE_RUN_NAME -o jsonpath={.status.conditions[?\(@.type==\'Succeeded\'\)].status}"
PIPELINE_RUN_RESULT=$?

saveArtifacts "$PIPELINE_RUN_NAME"

if [[ $PIPELINE_RUN_RESULT != 0 ]]; then
echo "Test pipeline failed, aborting further tests"
exit 1
fi

echo "All pipelines finished successfully"
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
#!/usr/bin/env bash
REPO_ROOT_DIR=$(dirname ${BASH_SOURCE[0]})/../../..
PIPELINES_DIR="$REPO_ROOT_DIR/pipelines"

source "$REPO_ROOT_DIR"/test/shell-pipeline-tests/common.sh

NAMESPACE="pipeline-test-tensorflow-housing"
oc delete project "$NAMESPACE" --ignore-not-found --timeout=60s
oc new-project "$NAMESPACE"

echo "Waiting for OpenShift Pipelines operator to be fully installed"
waitForOpResult 60 "True" "N/A" "oc get tektonconfig -n openshift-operators config -o jsonpath={.status.conditions[?\(@.type==\'Ready\'\)].status}"

##### BUILD CONTAINER IMAGE PIPELINE #####
BUILD_CONTAINER_IMAGE_PIPELINE_PATH="$PIPELINES_DIR"/tekton/build-container-image-pipeline

AWS_SECRET_PATH_TEMPLATE="$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/aws-env.yaml
AWS_SECRET_PATH="$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/aws-env-overridden.yaml

createS3Secret "$AWS_SECRET_PATH_TEMPLATE" "$AWS_SECRET_PATH"

oc create -f "$AWS_SECRET_PATH"

## oc apply -k pipelines
oc apply -k "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/

## prepare parameters
cp "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-tensorflow-housing.yaml "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-tensorflow-housing-overridden.yaml
sed -i "s|value: rhoai-edge-models|value: rhoai-edge-models-ci|" "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-tensorflow-housing-overridden.yaml

## oc create pipeline run
oc create -f "$BUILD_CONTAINER_IMAGE_PIPELINE_PATH"/build-container-image-pipelinerun-tensorflow-housing-overridden.yaml
sleep 5 # Just to have the startTime field available

PIPELINE_RUN_NAME=$(oc get pipelinerun --sort-by={.status.startTime} -o=custom-columns=NAME:.metadata.name | grep "build.*housing" | tail -n 1)

if [[ $PIPELINE_RUN_NAME == "" ]]; then
echo "Could not find any pipeline run"
exit 1
fi

## wait for the result
waitForOpResult 200 "True" "False" "oc get pipelinerun $PIPELINE_RUN_NAME -o jsonpath={.status.conditions[?\(@.type==\'Succeeded\'\)].status}"
PIPELINE_RUN_RESULT=$?

saveArtifacts "$PIPELINE_RUN_NAME"

if [[ $PIPELINE_RUN_RESULT != 0 ]]; then
echo "Build pipeline failed, aborting further tests"
exit 1
fi


##### TEST MLFLOW IMAGE PIPELINE #####
TEST_MLFLOW_IMAGE_PIPELINE_PATH="$PIPELINES_DIR"/tekton/test-mlflow-image-pipeline

AI_EDGE_QUAY_SECRET_OPENSHIFT_CI_PATH="${CUSTOM_QUAY_SECRET_PATH:-/secrets/ai-edge-quay}"
oc create secret generic rhoai-edge-openshift-ci-secret --from-file=.dockerconfigjson="$AI_EDGE_QUAY_SECRET_OPENSHIFT_CI_PATH"/dockerconfigjson --type=kubernetes.io/dockerconfigjson --dry-run=client -o yaml | oc apply -f -
oc secret link pipeline rhoai-edge-openshift-ci-secret

## oc apply -k pipelines
oc apply -k "$TEST_MLFLOW_IMAGE_PIPELINE_PATH"/

## oc create pipeline run
oc create -f "$TEST_MLFLOW_IMAGE_PIPELINE_PATH"/test-mlflow-image-pipelinerun-tensorflow-housing.yaml
sleep 5 # Just to have the startTime field available

PIPELINE_RUN_NAME=$(oc get pipelinerun --sort-by={.status.startTime} -o=custom-columns=NAME:.metadata.name | grep "test.*housing" | tail -n 1)

if [[ $PIPELINE_RUN_NAME == "" ]]; then
echo "Could not find any pipeline run"
exit 1
fi

## wait for the result
waitForOpResult 200 "True" "False" "oc get pipelinerun $PIPELINE_RUN_NAME -o jsonpath={.status.conditions[?\(@.type==\'Succeeded\'\)].status}"
PIPELINE_RUN_RESULT=$?

saveArtifacts "$PIPELINE_RUN_NAME"

if [[ $PIPELINE_RUN_RESULT != 0 ]]; then
echo "Test pipeline failed, aborting further tests"
exit 1
fi

echo "All pipelines finished successfully"

0 comments on commit 70b248f

Please sign in to comment.