diff --git a/.gitignore b/.gitignore index 40fd440a0f..e170017db5 100644 --- a/.gitignore +++ b/.gitignore @@ -71,7 +71,6 @@ examples/models/keras_mnist/build/ examples/models/mean_classifier/build/ notebooks/my-ml-deployment/ -wrappers/s2i/python/Dockerfile wrappers/s2i/python/_wrappers .Rhistory diff --git a/ci_build_and_push_images.sh b/ci_build_and_push_images.sh index a13cb207a6..74aba271b8 100755 --- a/ci_build_and_push_images.sh +++ b/ci_build_and_push_images.sh @@ -37,7 +37,8 @@ function build_push_operator { -C operator \ docker-build \ docker-push \ - docker-build-redhat + docker-build-redhat \ + docker-push-redhat OPERATOR_EXIT_VALUE=$? } @@ -46,7 +47,8 @@ function build_push_executor { -C executor \ docker-build \ docker-push \ - docker-build-redhat + docker-build-redhat \ + docker-push-redhat EXECUTOR_EXIT_VALUE=$? } @@ -58,7 +60,8 @@ function build_push_engine { -C engine \ build_image \ push_to_registry \ - docker-build-redhat + docker-build-redhat \ + docker-push-redhat ENGINE_EXIT_VALUE=$? } @@ -68,8 +71,7 @@ function build_push_mock { build_rest \ build_grpc \ push_rest \ - push_grpc \ - docker-build-redhat + push_grpc MOCK_MODEL_EXIT_VALUE=$? } @@ -77,8 +79,7 @@ function build_push_alibi_detect { make \ -C components/alibi-detect-server \ docker-build \ - docker-push \ - docker-build-redhat + docker-push ALIBI_DETECT_EXIT_VALUE=$? } @@ -86,11 +87,59 @@ function build_push_request_logger { make \ -C components/seldon-request-logger \ build_image \ - push_image \ - docker-build-redhat + push_image LOGGER_EXIT_VALUE=$? } +function build_push_sklearnserver { + make \ + -C servers/sklearnserver \ + build_all \ + push_all + SKLEARN_EXIT_VALUE=$? +} + +function build_push_mlflowserver { + make \ + -C servers/mlflowserver \ + build_all \ + push_all + MLFLOW_EXIT_VALUE=$? +} + +function build_push_xgboostserver { + make \ + -C servers/xgboostserver \ + build_all \ + push_all + XGBOOST_EXIT_VALUE=$? +} + +function build_push_tfproxy { + make \ + -C integrations/tfserving \ + build_all \ + push_all + TFPROXY_EXIT_VALUE=$? +} + +function build_push_alibi_explainer { + make \ + -C components/alibi-explain-server \ + docker-build \ + docker-push + EXPLAIN_EXIT_VALUE=$? +} + +function build_push_storage_initializer { + make \ + -C components/storage-initializer \ + docker-build \ + docker-push + STORAGE_INITIALIZER_EXIT_VALUE=$? +} + + build_push_python build_push_operator build_push_executor @@ -98,6 +147,12 @@ build_push_engine build_push_mock build_push_alibi_detect build_push_request_logger +build_push_sklearnserver +build_push_mlflowserver +build_push_xgboostserver +build_push_tfproxy +build_push_alibi_explainer +build_push_storage_initializer ####################################### # EXIT STOPS COMMANDS FROM HERE ONWARDS @@ -122,6 +177,12 @@ exit $((${PYTHON_EXIT_VALUE} \ + ${EXECUTOR_EXIT_VALUE} \ + ${MOCK_MODEL_EXIT_VALUE} \ + ${ALIBI_DETECT_EXIT_VALUE} \ - + ${LOGGER_EXIT_VALUE})) + + ${LOGGER_EXIT_VALUE} \ + + ${SKLEARN_EXIT_VALUE} \ + + ${MLFLOW_EXIT_VALUE} \ + + ${XGBOOST_EXIT_VALUE} \ + + ${TFPROXY_EXIT_VALUE} \ + + ${STORAGE_INITIALIZER_EXIT_VALUE} \ + + ${EXPLAIN_EXIT_VALUE})) diff --git a/components/alibi-detect-server/Dockerfile b/components/alibi-detect-server/Dockerfile index 1ee71a2f6a..b309ff154e 100644 --- a/components/alibi-detect-server/Dockerfile +++ b/components/alibi-detect-server/Dockerfile @@ -1,14 +1,15 @@ -FROM python:3.7 - -RUN apt-get update \ - && apt-get install -y --no-install-recommends git \ - && apt-get purge -y --auto-remove \ - && rm -rf /var/lib/apt/lists/* - -WORKDIR /workspace +FROM registry.access.redhat.com/ubi8/python-36 +LABEL name="Seldon Alibi Detect Server" \ + vendor="Seldon Technologies" \ + version="1.1.1-rc" \ + release="1" \ + summary="Alibi Detect Server for Seldon Core" \ + description="The Alibi Detect Server provides outlier, drift and adversarial detection services for Seldon Core" ADD requirements_server.txt . +RUN pip install pip -U + RUN pip install -r requirements_server.txt # Fix cloudevents bug: https://github.com/cloudevents/sdk-python/issues/24 @@ -21,4 +22,12 @@ COPY setup.py . RUN pip install -e . +# Add licences +RUN pip install pip-licenses +RUN mkdir ./licenses && pip-licenses --from=mixed --format=csv --output-file=./licenses/license_info.csv && \ + pip-licenses --from=mixed --format=plain-vertical --with-license-file --no-license-path --output-file=./licenses/license.txt +USER root +RUN mv ./licenses /licenses +USER default + ENTRYPOINT ["python", "-m", "adserver"] diff --git a/components/alibi-detect-server/Dockerfile.redhat b/components/alibi-detect-server/Dockerfile.redhat deleted file mode 100644 index bdaeb1ef14..0000000000 --- a/components/alibi-detect-server/Dockerfile.redhat +++ /dev/null @@ -1,33 +0,0 @@ -FROM registry.access.redhat.com/ubi8/python-36 -LABEL name="Seldon Alibi Detect Server" \ - vendor="Seldon Technologies" \ - version="v1.1.0" \ - release="1" \ - summary="Alibi Detect Server for Seldon Core" \ - description="The Alibi Detect Server provides outlier, drift and adversarial detection services for Seldon Core" - -ADD requirements_server.txt . - -RUN pip install pip -U - -RUN pip install -r requirements_server.txt - -# Fix cloudevents bug: https://github.com/cloudevents/sdk-python/issues/24 -RUN git clone --branch 24-extensions https://github.com/ryandawsonuk/sdk-python.git && \ - cd sdk-python && \ - pip install -e . - -COPY adserver adserver -COPY setup.py . - -RUN pip install -e . - -# Add licences -RUN pip install pip-licenses -RUN mkdir ./licenses && pip-licenses --from=mixed --format=csv --output-file=./licenses/license_info.csv && \ - pip-licenses --from=mixed --format=plain-vertical --with-license-file --no-license-path --output-file=./licenses/license.txt -USER root -RUN mv ./licenses /licenses -USER default - -ENTRYPOINT ["python", "-m", "adserver"] diff --git a/components/alibi-detect-server/Makefile b/components/alibi-detect-server/Makefile index cb0da1b306..5690555ad6 100644 --- a/components/alibi-detect-server/Makefile +++ b/components/alibi-detect-server/Makefile @@ -1,3 +1,4 @@ +SHELL := /bin/bash VERSION := $(shell cat ../../version.txt) IMAGE=alibi-detect-server @@ -17,12 +18,6 @@ test: type_check lint: black . -docker-build: - docker build -f Dockerfile -t seldonio/${IMAGE}:${VERSION} . - -docker-push: - docker push seldonio/${IMAGE}:${VERSION} - # # Local Run # @@ -52,24 +47,23 @@ curl-outlier-detector-scores: curl -v localhost:8080/ -d @./input.json -H "Alibi-Detect-Return-Feature-Score: true" -H "Alibi-Detect-Return-Instance-Score: true" -# -# RedHat -# +docker-build: + docker build -f Dockerfile -t seldonio/${IMAGE}:${VERSION} . -IMAGE_NAME_BASE=alibi-detect-server -IMG_VERSION_REDHAT ?= ${IMAGE_NAME_BASE}-ubi8:${VERSION} -IMG_REDHAT ?= seldonio/${IMG_VERSION_REDHAT} +docker-push: + docker push seldonio/${IMAGE}:${VERSION} -# Build the docker image for Redhat -docker-build-redhat: - docker build . -f Dockerfile.redhat -t ${IMG_REDHAT} +kind_load: docker-build + kind load docker-image seldonio/${IMAGE}:${VERSION} -# Push the docker image -docker-push-redhat: - docker push ${IMG_REDHAT} +# +# RedHat +# # password can be found at: https://connect.redhat.com/project/3993461/view redhat-image-scan: - docker login -u unused scan.connect.redhat.com - docker tag ${IMG_REDHAT} scan.connect.redhat.com/ospid-32ed6498-bce5-4c3b-9486-fe1c6e2582d3/${IMG_VERSION_REDHAT} - docker push scan.connect.redhat.com/ospid-32ed6498-bce5-4c3b-9486-fe1c6e2582d3/${IMG_VERSION_REDHAT} + docker pull seldonio/${IMAGE}:${VERSION} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_alibi_detect} | docker login -u unused scan.connect.redhat.com --password-stdin + docker tag seldonio/${IMAGE}:${VERSION} scan.connect.redhat.com/ospid-32ed6498-bce5-4c3b-9486-fe1c6e2582d3/${IMAGE}:${VERSION} + docker push scan.connect.redhat.com/ospid-32ed6498-bce5-4c3b-9486-fe1c6e2582d3/${IMAGE}:${VERSION} diff --git a/components/alibi-explain-server/.gitignore b/components/alibi-explain-server/.gitignore new file mode 100644 index 0000000000..628cd96f3b --- /dev/null +++ b/components/alibi-explain-server/.gitignore @@ -0,0 +1 @@ +kfserving diff --git a/components/alibi-explain-server/Dockerfile b/components/alibi-explain-server/Dockerfile new file mode 100644 index 0000000000..9a5ae81a0c --- /dev/null +++ b/components/alibi-explain-server/Dockerfile @@ -0,0 +1,30 @@ +FROM registry.access.redhat.com/ubi8/python-36 +LABEL name="Seldon Alibi Wrapper" \ + vendor="Seldon Technologies" \ + version="1.1.1-rc" \ + release="1" \ + summary="Alibi Explainer Wrapper for Seldon Core" \ + description="Allows Seldon Core inference models to run with a black box model explanation model from the Alibi:Explain project" + + +COPY kfserving/python/alibiexplainer alibiexplainer +COPY kfserving/python/kfserving kfserving +COPY kfserving/python/third_party third_party + +USER root +RUN pip install --upgrade pip && pip install -e ./kfserving +RUN git clone https://github.com/SeldonIO/alibi.git && \ + cd alibi && \ + pip install . +RUN pip install -e ./alibiexplainer + +RUN chmod -R a+rwx /opt/app-root/lib/python3.6 + +# Add licences +RUN pip install pip-licenses +RUN mkdir ./licenses && pip-licenses --from=mixed --format=csv --output-file=./licenses/license_info.csv && \ + pip-licenses --from=mixed --format=plain-vertical --with-license-file --no-license-path --output-file=./licenses/license.txt +RUN mv ./licenses /licenses +USER default + +ENTRYPOINT ["python", "-m", "alibiexplainer"] diff --git a/components/alibi-explain-server/Makefile b/components/alibi-explain-server/Makefile new file mode 100644 index 0000000000..ecefabc8a4 --- /dev/null +++ b/components/alibi-explain-server/Makefile @@ -0,0 +1,26 @@ +SHELL := /bin/bash +VERSION := $(shell cat ../../version.txt) +IMAGE=alibiexplainer + +kfserving: + git clone -b seldon_grpc_explainer https://github.com/seldonio/kfserving.git + +docker-build: kfserving + docker build --file=Dockerfile --force-rm=true -t seldonio/${IMAGE}:${VERSION} . + +docker-push: + docker push seldonio/${IMAGE}:${VERSION} + +kind_load: docker-build + kind load docker-image seldonio/${IMAGE}:${VERSION} + +# password can be found at: https://connect.redhat.com/project/3987291/view +redhat-image-scan: + docker pull seldonio/${IMAGE}:${VERSION} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_alibi_explain} | docker login -u unused scan.connect.redhat.com --password-stdin + docker tag seldonio/${IMAGE}:${VERSION} scan.connect.redhat.com/ospid-02f3e15b-c16f-4353-affa-61d5f3c6408b/${IMAGE}:${VERSION} + docker push scan.connect.redhat.com/ospid-02f3e15b-c16f-4353-affa-61d5f3c6408b/${IMAGE}:${VERSION} + +clean: + rm -rf kfserving diff --git a/components/seldon-request-logger/Dockerfile b/components/seldon-request-logger/Dockerfile index 3f6943d04c..ba3a597002 100644 --- a/components/seldon-request-logger/Dockerfile +++ b/components/seldon-request-logger/Dockerfile @@ -1,8 +1,24 @@ -FROM python:3.7-slim -COPY . /app -WORKDIR /app +FROM registry.access.redhat.com/ubi8/python-36 +LABEL name="Seldon Request Logger" \ + vendor="Seldon Technologies" \ + version="1.1.1-rc" \ + release="1" \ + summary="The payload logger for Seldon Core" \ + description="The Seldon Payload Logger allows request and response payloads from a Seldon Core inference graph to be processed and sent to an ELK endpoint" + +COPY app app +COPY requirements.txt requirements.txt RUN pip install -r requirements.txt RUN pip install gunicorn + +# Add licences +RUN pip install pip-licenses +RUN mkdir ./licenses && pip-licenses --from=mixed --format=csv --output-file=./licenses/license_info.csv && \ + pip-licenses --from=mixed --format=plain-vertical --with-license-file --no-license-path --output-file=./licenses/license.txt +USER root +RUN mv ./licenses /licenses +USER default + EXPOSE 8080 ENTRYPOINT ["python"] CMD ["app/default_logger.py"] \ No newline at end of file diff --git a/components/seldon-request-logger/Dockerfile.redhat b/components/seldon-request-logger/Dockerfile.redhat deleted file mode 100644 index 7b753c7585..0000000000 --- a/components/seldon-request-logger/Dockerfile.redhat +++ /dev/null @@ -1,24 +0,0 @@ -FROM registry.access.redhat.com/ubi8/python-36 -LABEL name="Seldon Request Logger" \ - vendor="Seldon Technologies" \ - version="v1.1.0" \ - release="1" \ - summary="The payload logger for Seldon Core" \ - description="The Seldon Payload Logger allows request and response payloads from a Seldon Core inference graph to be processed and sent to an ELK endpoint" - -COPY app app -COPY requirements.txt requirements.txt -RUN pip install -r requirements.txt -RUN pip install gunicorn - -# Add licences -RUN pip install pip-licenses -RUN mkdir ./licenses && pip-licenses --from=mixed --format=csv --output-file=./licenses/license_info.csv && \ - pip-licenses --from=mixed --format=plain-vertical --with-license-file --no-license-path --output-file=./licenses/license.txt -USER root -RUN mv ./licenses /licenses -USER default - -EXPOSE 8080 -ENTRYPOINT ["python"] -CMD ["app/default_logger.py"] \ No newline at end of file diff --git a/components/seldon-request-logger/Makefile b/components/seldon-request-logger/Makefile index 176712ed15..467d531739 100644 --- a/components/seldon-request-logger/Makefile +++ b/components/seldon-request-logger/Makefile @@ -1,11 +1,6 @@ +SHELL := /bin/bash VERSION := $(shell cat ../../version.txt) -build_image: - docker build . -t seldonio/seldon-request-logger:${VERSION} - -push_image: - docker push seldonio/seldon-request-logger:${VERSION} - run_container: docker run -p 2222:8080 seldonio/seldon-request-logger:latest @@ -40,24 +35,26 @@ test_single_dim: -d '{"request": {"meta": {"puid": "71dlk7k1rhmci0cd8g5rmeolmn", "tags": {}, "routing": {}, "requestPath": {}, "metrics": []}, "data": {"names": ["f0", "f1"], "ndarray": [0.77, 0.63]}, "date": "2019-06-17T10:59:55.693Z[GMT]"}, "response": {"meta": {"puid": "71dlk7k1rhmci0cd8g5rmeolmn", "tags": {}, "routing": {}, "requestPath": {"classifier": "seldonio/mock_classifier:1.0"}, "metrics": []}, "data": {"names": ["proba"], "ndarray": [0.09826376903346358]}, "date": "2019-06-17T10:59:55.696Z[GMT]"}, "sdepName": "seldon-single-model"}' -# -# RedHat -# +IMAGE=seldon-request-logger -IMAGE_NAME_BASE=seldon-request-logger -IMG_VERSION_REDHAT ?= ${IMAGE_NAME_BASE}-ubi8:${VERSION} -IMG_REDHAT ?= seldonio/${IMG_VERSION_REDHAT} +build_image: + docker build . -t seldonio/${IMAGE}:${VERSION} + +push_image: + docker push seldonio/${IMAGE}:${VERSION} + +kind_load: build_image + kind load docker-image seldonio/${IMAGE}:${VERSION} -# Build the docker image for Redhat -docker-build-redhat: - docker build . -f Dockerfile.redhat -t ${IMG_REDHAT} -# Push the docker image -docker-push-redhat: - docker push ${IMG_REDHAT} +# +# RedHat +# # password can be found at: https://connect.redhat.com/project/3993051/view redhat-image-scan: - docker login -u unused scan.connect.redhat.com - docker tag ${IMG_REDHAT} scan.connect.redhat.com/ospid-62dca88a-015b-4d08-8c52-b709e55a7790/${IMG_VERSION_REDHAT} - docker push scan.connect.redhat.com/ospid-62dca88a-015b-4d08-8c52-b709e55a7790/${IMG_VERSION_REDHAT} + docker pull seldonio/${IMAGE}:${VERSION} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_request_logger} | docker login -u unused scan.connect.redhat.com --password-stdin + docker tag seldonio/${IMAGE}:${VERSION} scan.connect.redhat.com/ospid-62dca88a-015b-4d08-8c52-b709e55a7790/${IMAGE}:${VERSION} + docker push scan.connect.redhat.com/ospid-62dca88a-015b-4d08-8c52-b709e55a7790/${IMAGE}:${VERSION} diff --git a/components/storage-initializer/.gitignore b/components/storage-initializer/.gitignore new file mode 100644 index 0000000000..628cd96f3b --- /dev/null +++ b/components/storage-initializer/.gitignore @@ -0,0 +1 @@ +kfserving diff --git a/components/storage-initializer/Dockerfile b/components/storage-initializer/Dockerfile new file mode 100644 index 0000000000..100db2f559 --- /dev/null +++ b/components/storage-initializer/Dockerfile @@ -0,0 +1,23 @@ +FROM registry.access.redhat.com/ubi8/python-36 +LABEL name="Storage Initializer" \ + vendor="Seldon Technologies" \ + version="1.1.1-rc" \ + release="1" \ + summary="Storage Initializer for Seldon Core" \ + description="Allows Seldon Core to download artifacts from cloud and local storage to a local volume" + +USER root +COPY ./kfserving/python/kfserving kfserving +RUN pip install --upgrade pip && pip install ./kfserving + +COPY ./kfserving/python/storage-initializer storage-initializer +COPY ./kfserving/python/third_party third_party + +# Add licences +RUN pip install pip-licenses +RUN mkdir ./licenses && pip-licenses --from=mixed --format=csv --output-file=./licenses/license_info.csv && \ + pip-licenses --from=mixed --format=plain-vertical --with-license-file --no-license-path --output-file=./licenses/license.txt +RUN mv ./licenses /licenses +USER default + +ENTRYPOINT ["./storage-initializer/scripts/initializer-entrypoint"] diff --git a/components/storage-initializer/Makefile b/components/storage-initializer/Makefile new file mode 100644 index 0000000000..abfe8fa6a6 --- /dev/null +++ b/components/storage-initializer/Makefile @@ -0,0 +1,26 @@ +SHELL := /bin/bash +VERSION := $(shell cat ../../version.txt) +IMAGE=storage-initializer + +kfserving: + git clone -b seldon_grpc_explainer https://github.com/seldonio/kfserving.git + +docker-build: kfserving + docker build --file=Dockerfile --force-rm=true -t seldonio/${IMAGE}:${VERSION} . + +docker-push: + docker push seldonio/${IMAGE}:${VERSION} + +kind_load: docker-build + kind load docker-image seldonio/${IMAGE}:${VERSION} + +# password can be found at: https://connect.redhat.com/project/3986991/view +redhat-image-scan: + docker pull seldonio/${IMAGE}:${VERSION} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_storage_initializer} | docker login -u unused scan.connect.redhat.com --password-stdin + docker tag seldonio/${IMAGE}:${VERSION} scan.connect.redhat.com/ospid-dc132f00-5370-46ed-b506-737bb66d3f34/${IMAGE}:${VERSION} + docker push scan.connect.redhat.com/ospid-dc132f00-5370-46ed-b506-737bb66d3f34/${IMAGE}:${VERSION} + +clean: + rm -rf kfserving diff --git a/doc/source/python/python_wrapping_s2i.md b/doc/source/python/python_wrapping_s2i.md index 40ea364668..e3b1e3a35d 100644 --- a/doc/source/python/python_wrapping_s2i.md +++ b/doc/source/python/python_wrapping_s2i.md @@ -17,7 +17,7 @@ If you are not familiar with s2i you can read [general instructions on using s2i To check everything is working you can run ```bash -s2i usage seldonio/seldon-core-s2i-python3:0.18 +s2i usage seldonio/seldon-core-s2i-python3:1.1.1-rc ``` @@ -129,14 +129,14 @@ These values can also be provided or overridden on the command line when buildin ## Step 3 - Build your image Use ```s2i build``` to create your Docker image from source code. You will need Docker installed on the machine and optionally git if your source code is in a public git repo. You can choose from three python builder images - * Python 3.6 : seldonio/seldon-core-s2i-python36:0.18, seldonio/seldon-core-s2i-python3:0.18 + * Python 3.6 : seldonio/seldon-core-s2i-python36:1.1.1-rc seldonio/seldon-core-s2i-python3:1.1.1-rc * Note there are [issues running TensorFlow under Python 3.7](https://github.com/tensorflow/tensorflow/issues/20444) (Nov 2018) and Python 3.7 is not officially supported by TensorFlow (Dec 2018). * Python 3.6 plus ONNX support via [Intel nGraph](https://github.com/NervanaSystems/ngraph) : seldonio/seldon-core-s2i-python3-ngraph-onnx:0.1 Using s2i you can build directly from a git repo or from a local source folder. See the [s2i docs](https://github.com/openshift/source-to-image/blob/master/docs/cli.md#s2i-build) for further details. The general format is: ```bash -s2i build seldonio/seldon-core-s2i-python3:0.18 +s2i build seldonio/seldon-core-s2i-python3:1.1.1-rc ``` Change to seldonio/seldon-core-s2i-python3 if using python 3. @@ -144,7 +144,7 @@ Change to seldonio/seldon-core-s2i-python3 if using python 3. An example invocation using the test template model inside seldon-core: ```bash -s2i build https://github.com/seldonio/seldon-core.git --context-dir=wrappers/s2i/python/test/model-template-app seldonio/seldon-core-s2i-python3:0.18 seldon-core-template-model +s2i build https://github.com/seldonio/seldon-core.git --context-dir=wrappers/s2i/python/test/model-template-app seldonio/seldon-core-s2i-python3:1.1.1-rc seldon-core-template-model ``` The above s2i build invocation: @@ -159,13 +159,13 @@ For building from a local source folder, an example where we clone the seldon-co ```bash git clone https://github.com/seldonio/seldon-core.git cd seldon-core -s2i build wrappers/s2i/python/test/model-template-app seldonio/seldon-core-s2i-python3:0.18 seldon-core-template-model +s2i build wrappers/s2i/python/test/model-template-app seldonio/seldon-core-s2i-python3:1.1.1-rc seldon-core-template-model ``` For more help see: ```bash -s2i usage seldonio/seldon-core-s2i-python3:0.18 +s2i usage seldonio/seldon-core-s2i-python3:1.1.1-rc s2i build --help ``` @@ -275,7 +275,7 @@ The allowable ```type``` values for the parameters are defined in the [proto buf To use a private repository for installing Python dependencies use the following build command: ```bash -s2i build -i :/whl seldonio/seldon-core-s2i-python3:0.18 +s2i build -i :/whl seldonio/seldon-core-s2i-python3:1.1.1-rc ``` This command will look for local Python wheels in the `````` and use these before searching PyPI. diff --git a/doc/source/workflow/quickstart.md b/doc/source/workflow/quickstart.md index d1ac80fac2..e8b94d357b 100644 --- a/doc/source/workflow/quickstart.md +++ b/doc/source/workflow/quickstart.md @@ -217,7 +217,7 @@ Now we can use the Seldon Core utilities to convert our python class into a full The result below is a container with the name `sklearn_iris` and the tag `0.1` which we will be able to deploy using Seldon Core. ```console -s2i build . seldonio/seldon-core-s2i-python3:0.18 sklearn_iris:0.1 +s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc sklearn_iris:0.1 ``` **5. Deploy to Kubernetes** diff --git a/engine/Dockerfile.redhat b/engine/Dockerfile.redhat index 705a31a1d5..73fc5db43a 100644 --- a/engine/Dockerfile.redhat +++ b/engine/Dockerfile.redhat @@ -14,7 +14,7 @@ RUN yum install -y java-11-openjdk.x86_64 LABEL name="Seldon Engine" \ vendor="Seldon Technologies" \ - version="v1.1.0" \ + version="1.1.1-rc" \ release="1" \ summary="The Seldon Engine that is deployed for each model to handle the data flow of each ML computational graph." \ description="The Seldon Engine that is deployed for each model to handle the data flow of each ML computational graph." diff --git a/engine/Makefile b/engine/Makefile index 7c4ab66300..9d222414ce 100644 --- a/engine/Makefile +++ b/engine/Makefile @@ -1,7 +1,8 @@ +SHELL := /bin/bash VERSION := $(shell cat ../version.txt) IMAGE_NAME_BASE=engine IMG ?= seldonio/${IMAGE_NAME_BASE}:${VERSION} -IMG_VERSION_REDHAT ?= ${IMAGE_NAME_BASE}-ubi8:1.1.0 +IMG_VERSION_REDHAT ?= ${IMAGE_NAME_BASE}-ubi8:${VERSION} IMG_REDHAT ?= seldonio/${IMG_VERSION_REDHAT} version: @@ -16,9 +17,14 @@ build_image: update_proto update_swagger docker-build-redhat: update_proto update_swagger docker build -f Dockerfile.redhat -t ${IMG_REDHAT} . +docker-push-redhat: + docker push ${IMG_REDHAT} + # password can be found at: https://connect.redhat.com/project/1366491/view redhat-image-scan: - docker login -u unused scan.connect.redhat.com + docker pull ${IMG_REDHAT} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_engine} | docker login -u unused scan.connect.redhat.com --password-stdin docker tag ${IMG_REDHAT} scan.connect.redhat.com/ospid-0d5322f9-b490-461e-a93f-1a88f69c1251/${IMG_VERSION_REDHAT} docker push scan.connect.redhat.com/ospid-0d5322f9-b490-461e-a93f-1a88f69c1251/${IMG_VERSION_REDHAT} diff --git a/engine/custom-images/Dockerfile-redhat b/engine/custom-images/Dockerfile-redhat deleted file mode 100644 index 8c3a65d422..0000000000 --- a/engine/custom-images/Dockerfile-redhat +++ /dev/null @@ -1,26 +0,0 @@ -FROM openjdk:8u201-jre-alpine3.9 as builder - -ARG APP_VERSION=UNKOWN_VERSION - -RUN apk add curl - -COPY /target/seldon-engine-${APP_VERSION}.jar /app.jar -COPY /target/generated-resources /licenses/ - -# Copy the controller-manager into a thin image -FROM registry.access.redhat.com/ubi8/ubi - -RUN yum install java-1.8.0-openjdk.x86_64 -y - -LABEL name="Seldon Engine" \ - vendor="Seldon Technologies" \ - version="v0.3.1" \ - release="1" \ - summary="The Seldon Engine that is deployed for each model to handle the data flow of each ML computational graph." \ - description="The Seldon Engine that is deployed for each model to handle the data flow of each ML computational graph." - -WORKDIR / -COPY --from=builder /app.jar app.jar -COPY --from=builder /licenses/ /licenses/ - -entrypoint [ "sh", "-c", "java -Djava.security.egd=file:/dev/./urandom $java_opts -jar app.jar" ] diff --git a/engine/pom.xml b/engine/pom.xml index ae9b7d2ff5..e3cd1a634e 100644 --- a/engine/pom.xml +++ b/engine/pom.xml @@ -10,7 +10,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs io.seldon.engine seldon-engine - 1.1.1-SNAPSHOT + 1.1.1-rc jar engine http://maven.apache.org diff --git a/examples/cicd/sig-mlops-jenkins-classic/models/image_classifier/seldon_custom_server_README.ipynb b/examples/cicd/sig-mlops-jenkins-classic/models/image_classifier/seldon_custom_server_README.ipynb index 9891664c1d..5ad2a27bfd 100644 --- a/examples/cicd/sig-mlops-jenkins-classic/models/image_classifier/seldon_custom_server_README.ipynb +++ b/examples/cicd/sig-mlops-jenkins-classic/models/image_classifier/seldon_custom_server_README.ipynb @@ -494,7 +494,7 @@ ], "source": [ "%%bash\n", - "SELDON_BASE_WRAPPER=\"seldonio/seldon-core-s2i-python36:0.12\"\n", + "SELDON_BASE_WRAPPER=\"seldonio/seldon-core-s2i-python36:1.1.1-rc "s2i build src/. $SELDON_BASE_WRAPPER sklearn-server:0.1 \\\n", " --environment-file src/seldon_model.conf" ] diff --git a/examples/cicd/sig-mlops-jenkins-classic/models/news_classifier/seldon_custom_server_README.ipynb b/examples/cicd/sig-mlops-jenkins-classic/models/news_classifier/seldon_custom_server_README.ipynb index 9891664c1d..5ad2a27bfd 100644 --- a/examples/cicd/sig-mlops-jenkins-classic/models/news_classifier/seldon_custom_server_README.ipynb +++ b/examples/cicd/sig-mlops-jenkins-classic/models/news_classifier/seldon_custom_server_README.ipynb @@ -494,7 +494,7 @@ ], "source": [ "%%bash\n", - "SELDON_BASE_WRAPPER=\"seldonio/seldon-core-s2i-python36:0.12\"\n", + "SELDON_BASE_WRAPPER=\"seldonio/seldon-core-s2i-python36:1.1.1-rc "s2i build src/. $SELDON_BASE_WRAPPER sklearn-server:0.1 \\\n", " --environment-file src/seldon_model.conf" ] diff --git a/examples/cicd/sig-mlops-jenkins-classic/servers/torchserver/Makefile b/examples/cicd/sig-mlops-jenkins-classic/servers/torchserver/Makefile index 96bb94a7a5..54501427f2 100644 --- a/examples/cicd/sig-mlops-jenkins-classic/servers/torchserver/Makefile +++ b/examples/cicd/sig-mlops-jenkins-classic/servers/torchserver/Makefile @@ -3,13 +3,13 @@ FOLDER=torchserver IMAGE_BASE=seldonio/${FOLDER} build_rest: - s2i build -E environment_rest ./${FOLDER} seldonio/seldon-core-s2i-python37:0.12-SNAPSHOT ${IMAGE_BASE}_rest:${VERSION} + s2i build -E environment_rest ./${FOLDER} seldonio/seldon-core-s2i-python37:1.1.1-rc ${IMAGE_BASE}_rest:${VERSION} push_rest: docker push ${IMAGE_BASE}_rest:${VERSION} build_grpc: - s2i build -E environment_grpc ./${FOLDER} seldonio/seldon-core-s2i-python37:0.12-SNAPSHOT ${IMAGE_BASE}_grpc:${VERSION} + s2i build -E environment_grpc ./${FOLDER} seldonio/seldon-core-s2i-python37:1.1.1-rc ${IMAGE_BASE}_grpc:${VERSION} push_grpc: docker push ${IMAGE_BASE}_grpc:${VERSION} diff --git a/examples/cicd/sig-mlops-jenkins-classic/servers/torchserver/test/sklearn_iris.ipynb b/examples/cicd/sig-mlops-jenkins-classic/servers/torchserver/test/sklearn_iris.ipynb index a1ffc1bb09..5168e0a1fa 100644 --- a/examples/cicd/sig-mlops-jenkins-classic/servers/torchserver/test/sklearn_iris.ipynb +++ b/examples/cicd/sig-mlops-jenkins-classic/servers/torchserver/test/sklearn_iris.ipynb @@ -86,7 +86,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "s2i build -E environment_rest ./sklearnserver seldonio/seldon-core-s2i-python37:0.11-SNAPSHOT seldonio/sklearnserver_rest:0.1\n", + "s2i build -E environment_rest ./sklearnserver seldonio/seldon-core-s2i-python37:1.1.1-rc seldonio/sklearnserver_rest:0.1\n", "---> Installing application source...\n", "---> Installing dependencies ...\n", "Looking in links: /whl\n", @@ -281,7 +281,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "s2i build -E environment_grpc ./sklearnserver seldonio/seldon-core-s2i-python37:0.11-SNAPSHOT seldonio/sklearnserver_grpc:0.1\n", + "s2i build -E environment_grpc ./sklearnserver seldonio/seldon-core-s2i-python37:1.1.1-rc seldonio/sklearnserver_grpc:0.1\n", "---> Installing application source...\n", "---> Installing dependencies ...\n", "Looking in links: /whl\n", diff --git a/examples/cicd/sig-mlops-seldon-jenkins-x/seldon_custom_server_README.ipynb b/examples/cicd/sig-mlops-seldon-jenkins-x/seldon_custom_server_README.ipynb index 95076a7230..c2a8352a44 100644 --- a/examples/cicd/sig-mlops-seldon-jenkins-x/seldon_custom_server_README.ipynb +++ b/examples/cicd/sig-mlops-seldon-jenkins-x/seldon_custom_server_README.ipynb @@ -494,7 +494,7 @@ ], "source": [ "%%bash\n", - "SELDON_BASE_WRAPPER=\"seldonio/seldon-core-s2i-python36:0.12\"\n", + "SELDON_BASE_WRAPPER=\"seldonio/seldon-core-s2i-python36:1.1.1-rc "s2i build src/. $SELDON_BASE_WRAPPER sklearn-server:0.1 \\\n", " --environment-file src/seldon_model.conf" ] diff --git a/examples/combiners/spam_clf_combiner/spam-classification.ipynb b/examples/combiners/spam_clf_combiner/spam-classification.ipynb index c17f947fef..95a067ae71 100644 --- a/examples/combiners/spam_clf_combiner/spam-classification.ipynb +++ b/examples/combiners/spam_clf_combiner/spam-classification.ipynb @@ -321,7 +321,7 @@ } ], "source": [ - "!s2i build keras-spam-classifier/ seldonio/seldon-core-s2i-python3:0.7 spam-classifier:1.0.0.1" + "!s2i build keras-spam-classifier/ seldonio/seldon-core-s2i-python3:1.1.1-rc spam-classifier:1.0.0.1" ] }, { @@ -415,7 +415,7 @@ } ], "source": [ - "!s2i build keras-spam-classifier/ seldonio/seldon-core-s2i-python3:0.7 keras-spam-classifier:1.0.0.1" + "!s2i build keras-spam-classifier/ seldonio/seldon-core-s2i-python3:1.1.1-rc keras-spam-classifier:1.0.0.1" ] }, { @@ -469,7 +469,7 @@ } ], "source": [ - "!s2i build Translator/ seldonio/seldon-core-s2i-python3:0.7 translator:1.0.0.1" + "!s2i build Translator/ seldonio/seldon-core-s2i-python3:1.1.1-rc translator:1.0.0.1" ] }, { @@ -538,7 +538,7 @@ } ], "source": [ - "!s2i build Combiner/ seldonio/seldon-core-s2i-python3:0.7 combiner:1.0.0.1" + "!s2i build Combiner/ seldonio/seldon-core-s2i-python3:1.1.1-rc combiner:1.0.0.1" ] }, { diff --git a/examples/explainers/imagenet/resources/transformer/Makefile b/examples/explainers/imagenet/resources/transformer/Makefile index 2f586864f6..2ea20b3c52 100644 --- a/examples/explainers/imagenet/resources/transformer/Makefile +++ b/examples/explainers/imagenet/resources/transformer/Makefile @@ -2,10 +2,10 @@ IMAGE_VERSION=0.1 IMAGE_NAME=docker.io/seldonio/imagenet-transformer build_grpc: - s2i build -E environment_grpc . seldonio/seldon-core-s2i-python36:0.13-SNAPSHOT $(IMAGE_NAME):$(IMAGE_VERSION) + s2i build -E environment_grpc . seldonio/seldon-core-s2i-python36:1.1.1-rc $(IMAGE_NAME):$(IMAGE_VERSION) build_rest: - s2i build -E environment_rest . seldonio/seldon-core-s2i-python36:0.13-SNAPSHOT $(IMAGE_NAME):$(IMAGE_VERSION) + s2i build -E environment_rest . seldonio/seldon-core-s2i-python36:1.1.1-rc $(IMAGE_NAME):$(IMAGE_VERSION) push_to_dockerhub: docker push $(IMAGE_NAME):$(IMAGE_VERSION) diff --git a/examples/input_tranformer/Spam Classification.ipynb b/examples/input_tranformer/Spam Classification.ipynb deleted file mode 100644 index 083ba419b9..0000000000 --- a/examples/input_tranformer/Spam Classification.ipynb +++ /dev/null @@ -1,402 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Spam Classification Model (Sklearn)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- Wrap a ML model for use as a prediction microservice in seldon-core\n", - "- Run locally on Docker to test\n", - "- Deploy on seldon-core running on k8s cluster" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Train Locally" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np \n", - "import pandas as pd\n", - "from sklearn.externals import joblib\n", - "from pathlib import Path\n", - "import string\n", - "from nltk.stem import SnowballStemmer\n", - "from nltk.corpus import stopwords\n", - "from sklearn.feature_extraction.text import TfidfVectorizer\n", - "from sklearn.model_selection import train_test_split\n", - "import pickle\n", - "from sklearn.svm import SVC\n", - "from sklearn.metrics import accuracy_score\n", - "model_path: Path=Path('./')" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [], - "source": [ - "data = pd.read_csv(\"spam.csv\",encoding='latin-1')\n", - "data = data.drop([\"Unnamed: 2\", \"Unnamed: 3\", \"Unnamed: 4\"], axis=1)\n", - "data = data.rename(columns={\"v1\":\"class\", \"v2\":\"text\"})\n", - "data.head()\n", - "\n", - "def pre_process(text):\n", - " text = text.translate(str.maketrans('', '', string.punctuation))\n", - " text = [word for word in text.split() if word.lower() not in stopwords.words('english')]\n", - " words = \"\"\n", - " for i in text:\n", - " stemmer = SnowballStemmer(\"english\")\n", - " words += (stemmer.stem(i))+\" \"\n", - " return words\n", - "\n", - "features = data['text'].copy()\n", - "features = features.apply(pre_process)\n", - "\n", - "vectorizer = TfidfVectorizer(\"english\")\n", - "_features = vectorizer.fit_transform(features)\n", - "with open('Spam-Classifier/model/vectorizer.pkl', 'wb') as vect:\n", - " pickle.dump(vectorizer, vect)" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,\n", - " decision_function_shape='ovr', degree=3, gamma=1.0, kernel='sigmoid',\n", - " max_iter=-1, probability=True, random_state=None, shrinking=True, tol=0.001,\n", - " verbose=False)" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "vectorizer = joblib.load(model_path.joinpath('Spam-Classifier/model/vectorizer.pkl'))\n", - "train_x, test_x, train_y, test_y = train_test_split(_features, data['class'], test_size=0.3, random_state=0)\n", - "svc = SVC(kernel='sigmoid', gamma=1.0, probability=True)\n", - "svc.fit(train_x,train_y)\n", - "# save the model to disk\n", - "filename = 'Spam-Classifier/model/model.pkl'\n", - "pickle.dump(svc, open(filename, 'wb'))" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - "clf = joblib.load(model_path.joinpath(filename))" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "0.9730861244019139" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "prediction = clf.predict(test_x)\n", - "accuracy_score(test_y,prediction)" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[0.0220629, 0.9779371]])" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "message = np.array(['click here to win the price'])\n", - "data = vectorizer.transform(message).todense()\n", - "probas = clf.predict_proba(data)\n", - "probas" - ] - }, - { - "cell_type": "code", - "execution_count": 53, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array(['ham', 'spam'], dtype=object)" - ] - }, - "execution_count": 53, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "clf.classes_" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### wrap each model component using s2i" - ] - }, - { - "cell_type": "code", - "execution_count": 43, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "---> Installing application source...\n", - "---> Installing dependencies ...\n", - "Looking in links: /whl\n", - "Collecting scikit-learn==0.21.2 (from -r requirements.txt (line 1))\n", - " Url '/whl' is ignored. It is either a non-existing path or lacks a specific scheme.\n", - "Downloading https://files.pythonhosted.org/packages/85/04/49633f490f726da6e454fddc8e938bbb5bfed2001681118d3814c219b723/scikit_learn-0.21.2-cp36-cp36m-manylinux1_x86_64.whl (6.7MB)\n", - "Requirement already satisfied: numpy>=1.9.2 in /usr/local/lib/python3.6/site-packages (from -r requirements.txt (line 2)) (1.16.3)\n", - "Collecting scipy>=0.17.0 (from scikit-learn==0.21.2->-r requirements.txt (line 1))\n", - " Url '/whl' is ignored. It is either a non-existing path or lacks a specific scheme.\n", - "Downloading https://files.pythonhosted.org/packages/29/50/a552a5aff252ae915f522e44642bb49a7b7b31677f9580cfd11bcc869976/scipy-1.3.1-cp36-cp36m-manylinux1_x86_64.whl (25.2MB)\n", - "Collecting joblib>=0.11 (from scikit-learn==0.21.2->-r requirements.txt (line 1))\n", - " Url '/whl' is ignored. It is either a non-existing path or lacks a specific scheme.\n", - "Downloading https://files.pythonhosted.org/packages/8f/42/155696f85f344c066e17af287359c9786b436b1bf86029bb3411283274f3/joblib-0.14.0-py2.py3-none-any.whl (294kB)\n", - "Installing collected packages: scipy, joblib, scikit-learn\n", - "Successfully installed joblib-0.14.0 scikit-learn-0.21.2 scipy-1.3.1\n", - "Url '/whl' is ignored. It is either a non-existing path or lacks a specific scheme.\n", - "You are using pip version 18.1, however version 19.3.1 is available.\n", - "You should consider upgrading via the 'pip install --upgrade pip' command.\n", - "Build completed successfully\n" - ] - } - ], - "source": [ - "!s2i build Spam-Classifier/ seldonio/seldon-core-s2i-python3:0.7 spam-classifier:1.0.0.1" - ] - }, - { - "cell_type": "code", - "execution_count": 49, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1b8159f67b7ddbd2de26833411303ebee8e08331097e28754f04688c1fb86d3c\r\n" - ] - } - ], - "source": [ - "!docker run --name \"spam-classifier\" -d --rm -p 5000:5000 spam-classifier:1.0.0.1" - ] - }, - { - "cell_type": "code", - "execution_count": 51, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\"data\":{\"ndarray\":[\"0.9779371008528993\",\"spam\"]},\"meta\":{}}\r\n" - ] - } - ], - "source": [ - "!curl -g http://localhost:5000/predict --data-urlencode 'json={\"data\": {\"names\": [\"message\"], \"ndarray\": [\"click here to win the price\"]}}'\n" - ] - }, - { - "cell_type": "code", - "execution_count": 52, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "spam-classifier\r\n" - ] - } - ], - "source": [ - "!docker rm spam-classifier --force" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "---> Installing application source...\n", - "---> Installing dependencies ...\n", - "Looking in links: /whl\n", - "Collecting goslate (from -r requirements.txt (line 1))\n", - " Url '/whl' is ignored. It is either a non-existing path or lacks a specific scheme.\n", - "Downloading https://files.pythonhosted.org/packages/39/0b/50af938a1c3d4f4c595b6a22d37af11ebe666246b05a1a97573e8c8944e5/goslate-1.5.1.tar.gz\n", - "Requirement already satisfied: numpy in /usr/local/lib/python3.6/site-packages (from -r requirements.txt (line 2)) (1.16.3)\n", - "Collecting futures (from goslate->-r requirements.txt (line 1))\n", - " Url '/whl' is ignored. It is either a non-existing path or lacks a specific scheme.\n", - "Downloading https://files.pythonhosted.org/packages/05/80/f41cca0ea1ff69bce7e7a7d76182b47bb4e1a494380a532af3e8ee70b9ec/futures-3.1.1-py3-none-any.whl\n", - "Building wheels for collected packages: goslate\n", - "Running setup.py bdist_wheel for goslate: started\n", - "Running setup.py bdist_wheel for goslate: finished with status 'done'\n", - "Stored in directory: /root/.cache/pip/wheels/4f/7f/28/6f52271012a7649b54b1a7adaae329b4246bbbf9d1e4f6e51a\n", - "Successfully built goslate\n", - "Installing collected packages: futures, goslate\n", - "Successfully installed futures-3.1.1 goslate-1.5.1\n", - "Url '/whl' is ignored. It is either a non-existing path or lacks a specific scheme.\n", - "You are using pip version 18.1, however version 19.3.1 is available.\n", - "You should consider upgrading via the 'pip install --upgrade pip' command.\n", - "Build completed successfully\n" - ] - } - ], - "source": [ - "!s2i build Translator/ seldonio/seldon-core-s2i-python3:0.7 translator:1.0.0.1" - ] - }, - { - "cell_type": "code", - "execution_count": 55, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ca18617eed4ee5b12c1ce835d94a677007e3c095166b8e4e5d0f9fd164757814\r\n" - ] - } - ], - "source": [ - "!docker run --name \"eng-translator\" -d --rm -p 5000:5000 translator:1.0.0.1" - ] - }, - { - "cell_type": "code", - "execution_count": 57, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\"data\":{\"names\":[\"message\"],\"ndarray\":[\"How is your day\"]},\"meta\":{}}\r\n" - ] - } - ], - "source": [ - "!curl -g http://localhost:5000/transform-input --data-urlencode 'json={\"data\": {\"names\": [\"message\"], \"ndarray\": [\"Wie läuft dein Tag\"]}}'" - ] - }, - { - "cell_type": "code", - "execution_count": 58, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "eng-translator\r\n" - ] - } - ], - "source": [ - "!docker rm eng-translator --force" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Assuming you have kubernetes cluster running and seldon-core installed, you can deploy your Machine Learning model using:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "kubectl apply -f deploy.yaml" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/examples/input_tranformer/spam-classification.ipynb b/examples/input_tranformer/spam-classification.ipynb index eaeae65444..c1941cc137 100644 --- a/examples/input_tranformer/spam-classification.ipynb +++ b/examples/input_tranformer/spam-classification.ipynb @@ -214,7 +214,7 @@ } ], "source": [ - "!s2i build Spam-Classifier/ seldonio/seldon-core-s2i-python3:0.7 spam-classifier:1.0.0.1" + "!s2i build Spam-Classifier/ seldonio/seldon-core-s2i-python3:1.1.1-rc spam-classifier:1.0.0.1" ] }, { @@ -302,7 +302,7 @@ } ], "source": [ - "!s2i build Translator/ seldonio/seldon-core-s2i-python3:0.7 translator:1.0.0.1" + "!s2i build Translator/ seldonio/seldon-core-s2i-python3:1.1.1-rc translator:1.0.0.1" ] }, { diff --git a/examples/kubeflow/kubeflow_seldon_e2e_pipeline.ipynb b/examples/kubeflow/kubeflow_seldon_e2e_pipeline.ipynb index c193b71eac..dffb3274b1 100644 --- a/examples/kubeflow/kubeflow_seldon_e2e_pipeline.ipynb +++ b/examples/kubeflow/kubeflow_seldon_e2e_pipeline.ipynb @@ -414,7 +414,7 @@ "text": [ "#!/bin/bash\r\n", "\r\n", - "s2i build . seldonio/seldon-core-s2i-python3:0.6 clean_text_transformer:0.1\r\n", + "s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc clean_text_transformer:0.1\r\n", "\r\n" ] } diff --git a/examples/models/alibaba_ack_deep_mnist/alibaba_cloud_ack_deep_mnist.ipynb b/examples/models/alibaba_ack_deep_mnist/alibaba_cloud_ack_deep_mnist.ipynb index 8292bf81d5..920821927e 100644 --- a/examples/models/alibaba_ack_deep_mnist/alibaba_cloud_ack_deep_mnist.ipynb +++ b/examples/models/alibaba_ack_deep_mnist/alibaba_cloud_ack_deep_mnist.ipynb @@ -285,7 +285,7 @@ } ], "source": [ - "!s2i build . seldonio/seldon-core-s2i-python36:0.18 deep-mnist:0.1" + "!s2i build . seldonio/seldon-core-s2i-python36:1.1.1-rc deep-mnist:0.1" ] }, { diff --git a/examples/models/aws_eks_deep_mnist/aws_eks_deep_mnist.ipynb b/examples/models/aws_eks_deep_mnist/aws_eks_deep_mnist.ipynb index ab3b510539..ad02dd1246 100644 --- a/examples/models/aws_eks_deep_mnist/aws_eks_deep_mnist.ipynb +++ b/examples/models/aws_eks_deep_mnist/aws_eks_deep_mnist.ipynb @@ -182,7 +182,7 @@ } ], "source": [ - "!s2i build . seldonio/seldon-core-s2i-python36:0.18 deep-mnist:0.1" + "!s2i build . seldonio/seldon-core-s2i-python36:1.1.1-rc deep-mnist:0.1" ] }, { diff --git a/examples/models/azure_aks_deep_mnist/azure_aks_deep_mnist.ipynb b/examples/models/azure_aks_deep_mnist/azure_aks_deep_mnist.ipynb index 81b4b48a41..d2529f4ea9 100644 --- a/examples/models/azure_aks_deep_mnist/azure_aks_deep_mnist.ipynb +++ b/examples/models/azure_aks_deep_mnist/azure_aks_deep_mnist.ipynb @@ -215,7 +215,7 @@ } ], "source": [ - "!s2i build . seldonio/seldon-core-s2i-python36:0.18 deep-mnist:0.1" + "!s2i build . seldonio/seldon-core-s2i-python36:1.1.1-rc deep-mnist:0.1" ] }, { diff --git a/examples/models/chainer_mnist/chainer_mnist.ipynb b/examples/models/chainer_mnist/chainer_mnist.ipynb index afb850b6c0..26f09ecc6b 100644 --- a/examples/models/chainer_mnist/chainer_mnist.ipynb +++ b/examples/models/chainer_mnist/chainer_mnist.ipynb @@ -786,7 +786,7 @@ } ], "source": [ - "!s2i build . seldonio/seldon-core-s2i-python3:0.10 chainer-mnist:0.1" + "!s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc chainer-mnist:0.1" ] }, { @@ -1095,7 +1095,7 @@ } ], "source": [ - "!eval $(minikube docker-env) && s2i build . seldonio/seldon-core-s2i-python3:0.10 chainer-mnist:0.1" + "!eval $(minikube docker-env) && s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc chainer-mnist:0.1" ] }, { diff --git a/examples/models/deep_mnist/Makefile b/examples/models/deep_mnist/Makefile index 5e114747c6..f4bd3c1e94 100644 --- a/examples/models/deep_mnist/Makefile +++ b/examples/models/deep_mnist/Makefile @@ -2,7 +2,7 @@ IMAGE_NAME=seldonio/tf-example-mnist IMAGE_VERSION=0.2 build_image: - s2i build . seldonio/seldon-core-s2i-python3:0.19-SNAPSHOT ${IMAGE_NAME}:${IMAGE_VERSION} + s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc ${IMAGE_NAME}:${IMAGE_VERSION} push_image: docker push $(IMAGE_NAME):$(IMAGE_VERSION) diff --git a/examples/models/mean_classifier/.s2i/bin/assemble b/examples/models/mean_classifier/.s2i/bin/assemble new file mode 100644 index 0000000000..f3cac492de --- /dev/null +++ b/examples/models/mean_classifier/.s2i/bin/assemble @@ -0,0 +1,15 @@ +#!/bin/bash +echo "Before assembling" + +/s2i/bin/assemble +rc=$? + +if [ $rc -eq 0 ]; then + echo "After successful assembling" +else + echo "After failed assembling" + exit $rc +fi + +mkdir -p /tmp/.s2i/ +cp image_metadata.json /tmp/.s2i/image_metadata.json diff --git a/examples/models/mean_classifier/Dockerfile.redhat b/examples/models/mean_classifier/Dockerfile.redhat deleted file mode 100644 index c2e2f7a0bc..0000000000 --- a/examples/models/mean_classifier/Dockerfile.redhat +++ /dev/null @@ -1,33 +0,0 @@ -FROM registry.access.redhat.com/ubi8/python-36 -LABEL name="Seldon Core Dummy Model" \ - vendor="Seldon Technologies" \ - version="v1.1.0" \ - release="1" \ - summary="Dummy Model for Seldon Core" \ - description="A Dummy Model used to smoke test Seldon Core" - -RUN pip install pip -U - -COPY requirements.txt . -COPY MeanClassifier.py . -COPY model.npy . -RUN pip install -r requirements.txt -EXPOSE 5000 - -# Add licences -RUN pip install pip-licenses -RUN mkdir ./licenses && pip-licenses --from=mixed --format=csv --output-file=./licenses/license_info.csv && \ - pip-licenses --from=mixed --format=plain-vertical --with-license-file --no-license-path --output-file=./licenses/license.txt -USER root -RUN mv ./licenses /licenses -USER default - -# Define environment variable -ENV MODEL_NAME MeanClassifier -ENV API_TYPE REST -ENV SERVICE_TYPE MODEL -ENV PERSISTENCE 0 - -CMD exec seldon-core-microservice $MODEL_NAME $API_TYPE --service-type $SERVICE_TYPE --persistence $PERSISTENCE - - diff --git a/examples/models/mean_classifier/Makefile b/examples/models/mean_classifier/Makefile index 4d6b21ee3f..6d248da9f5 100644 --- a/examples/models/mean_classifier/Makefile +++ b/examples/models/mean_classifier/Makefile @@ -1,19 +1,19 @@ +SHELL := /bin/bash VERSION := $(shell cat ../../../version.txt) IMAGE_NAME_BASE=mock_classifier IMAGE_BASE=seldonio/${IMAGE_NAME_BASE} -build_rest: - s2i build -E environment_rest . seldonio/seldon-core-s2i-python36:${VERSION} ${IMAGE_BASE}_rest:${VERSION} +build_%: + s2i build -E environment_$* . seldonio/seldon-core-s2i-python37-ubi8:${VERSION} ${IMAGE_BASE}_$*:${VERSION} -push_rest: - docker push ${IMAGE_BASE}_rest:${VERSION} +push_%: + docker push ${IMAGE_BASE}_$*:${VERSION} -build_grpc: - s2i build -E environment_grpc . seldonio/seldon-core-s2i-python36:${VERSION} ${IMAGE_BASE}_grpc:${VERSION} - -push_grpc: - docker push ${IMAGE_BASE}_grpc:${VERSION} +.PHONY: build_all +build_all: build_rest build_grpc +.PHONY: push_all +push_all:push_rest push_grpc run_rest_local: export PREDICTIVE_UNIT_SERVICE_PORT=9000 && TRACING=1 JAEGER_AGENT_HOST=localhost JAEGER_AGENT_PORT=6831 JAEGER_SAMPLER_TYPE=const JAEGER_SAMPLER_PARAM=1 seldon-core-microservice --service-type MODEL MeanClassifier REST @@ -22,24 +22,26 @@ run_grpc_local: export PREDICTIVE_UNIT_SERVICE_PORT=9000 && TRACING=1 JAEGER_AGENT_HOST=localhost JAEGER_AGENT_PORT=6831 JAEGER_SAMPLER_TYPE=const JAEGER_SAMPLER_PARAM=1 seldon-core-microservice --service-type MODEL MeanClassifier GRPC -kind_load_rest: - kind load -v 3 docker-image ${IMAGE_BASE}_rest:${VERSION} - -kind_load_grpc: - kind load -v 3 docker-image ${IMAGE_BASE}_grpc:${VERSION} +kind_load_%: + kind load -v 3 docker-image ${IMAGE_BASE}_$*:${VERSION} +.PHONY: kind_load_all +kind_load_all: kind_load_rest kind_load_grpc # # Redhat # -IMG_VERSION_REDHAT ?= ${IMAGE_NAME_BASE}-ubi8:${VERSION} -IMG_REDHAT ?= seldonio/${IMG_VERSION_REDHAT} +# https://connect.redhat.com/project/4035711/view +scan_rest=ospid-c90fdfe6-d054-4598-baa8-7f7aac3ed63a +# no groc at present +scan_grpc= +redhat-image-scan-%: + docker pull ${IMAGE_BASE}_$*:${VERSION} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_mock_model_$*} | docker login -u unused scan.connect.redhat.com --password-stdin + docker tag ${IMAGE_BASE}_$*:${VERSION} scan.connect.redhat.com/${scan_$*}/${IMAGE_NAME_BASE}_$*:${VERSION} + docker push scan.connect.redhat.com/${scan_$*}/${IMAGE_NAME_BASE}_$*:${VERSION} -docker-build-redhat: - docker build . -f Dockerfile.redhat -t ${IMG_REDHAT} +.PHONY: redhat-image-scan-all +redhat-image-scan: redhat-image-scan-rest -# password can be found at: https://connect.redhat.com/project/4035711/view -redhat-image-scan: - docker login -u unused scan.connect.redhat.com - docker tag ${IMG_REDHAT} scan.connect.redhat.com/ospid-c90fdfe6-d054-4598-baa8-7f7aac3ed63a/${IMG_VERSION_REDHAT} - docker push scan.connect.redhat.com/ospid-c90fdfe6-d054-4598-baa8-7f7aac3ed63a/${IMG_VERSION_REDHAT} diff --git a/examples/models/mean_classifier/image_metadata.json b/examples/models/mean_classifier/image_metadata.json new file mode 100644 index 0000000000..0192a02edc --- /dev/null +++ b/examples/models/mean_classifier/image_metadata.json @@ -0,0 +1 @@ +{"labels": [{"name": "Seldon Mock Model"}, {"vendor": "Seldon Technologies"}, {"version": "1.1.1-rc"}, {"release": "1"}, {"summary": "A mock model for testing Seldon Core"}, {"description": "A mock REST model for testing Seldon Core"}]} \ No newline at end of file diff --git a/examples/models/nvidia-mnist/Makefile b/examples/models/nvidia-mnist/Makefile index a70b3b574c..582f47fa7d 100644 --- a/examples/models/nvidia-mnist/Makefile +++ b/examples/models/nvidia-mnist/Makefile @@ -7,7 +7,7 @@ clean: rm -f tmp.json build_transformer: - s2i build . seldonio/seldon-core-s2i-python3:0.4 ${TRANSFORMER_IMAGE} + s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc ${TRANSFORMER_IMAGE} push_transformer: docker push ${TRANSFORMER_IMAGE} diff --git a/examples/models/nvidia-mnist/nvidia_mnist.ipynb b/examples/models/nvidia-mnist/nvidia_mnist.ipynb index 3f7669e1bd..7fc39bcab3 100644 --- a/examples/models/nvidia-mnist/nvidia_mnist.ipynb +++ b/examples/models/nvidia-mnist/nvidia_mnist.ipynb @@ -339,7 +339,7 @@ } ], "source": [ - "!s2i build . seldonio/seldon-core-s2i-python3:0.18 mnist-caffe2-transformer:0.1" + "!s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc mnist-caffe2-transformer:0.1" ] }, { diff --git a/examples/models/resnet/Makefile b/examples/models/resnet/Makefile index 2f193233e5..38efeec833 100644 --- a/examples/models/resnet/Makefile +++ b/examples/models/resnet/Makefile @@ -1,7 +1,7 @@ build_image: - s2i build -E environment_grpc . seldonio/seldon-core-s2i-python36:0.4 seldon-resnet2.4 + s2i build -E environment_grpc . seldonio/seldon-core-s2i-python36:1.1.1-rc seldon-resnet2.4 clean: diff --git a/examples/models/resnet/reset.ipynb b/examples/models/resnet/reset.ipynb index 1be5eb3e74..a083484fb6 100644 --- a/examples/models/resnet/reset.ipynb +++ b/examples/models/resnet/reset.ipynb @@ -110,7 +110,7 @@ } ], "source": [ - "!s2i build -E environment_grpc . seldonio/seldon-core-s2i-python36:0.18 seldon-resnet2.4" + "!s2i build -E environment_grpc . seldonio/seldon-core-s2i-python36:1.1.1-rc seldon-resnet2.4" ] }, { diff --git a/examples/models/sk_mnist/Makefile b/examples/models/sk_mnist/Makefile index deec712e91..2210733288 100644 --- a/examples/models/sk_mnist/Makefile +++ b/examples/models/sk_mnist/Makefile @@ -5,7 +5,7 @@ train: python train.py build_image: - s2i build -E environment_rest . seldonio/seldon-core-s2i-python3:0.19-SNAPSHOT ${IMAGE_NAME}:${IMAGE_VERSION} + s2i build -E environment_rest . seldonio/seldon-core-s2i-python3:1.1.1-rc ${IMAGE_NAME}:${IMAGE_VERSION} push_image: docker push $(IMAGE_NAME):$(IMAGE_VERSION) diff --git a/examples/models/sklearn_iris/Makefile b/examples/models/sklearn_iris/Makefile index f9d188d41a..1439c5e7c0 100644 --- a/examples/models/sklearn_iris/Makefile +++ b/examples/models/sklearn_iris/Makefile @@ -2,7 +2,7 @@ IMAGE_NAME=seldonio/sklearn-iris IMAGE_VERSION=0.1 build_image: train - s2i build -E environment_rest . seldonio/seldon-core-s2i-python3:0.18 ${IMAGE_NAME}:${IMAGE_VERSION} + s2i build -E environment_rest . seldonio/seldon-core-s2i-python3:1.1.1-rc ${IMAGE_NAME}:${IMAGE_VERSION} push_image: docker push $(IMAGE_NAME):$(IMAGE_VERSION) diff --git a/examples/models/sklearn_iris/sklearn_iris.ipynb b/examples/models/sklearn_iris/sklearn_iris.ipynb index 893e697ab7..9b87994388 100644 --- a/examples/models/sklearn_iris/sklearn_iris.ipynb +++ b/examples/models/sklearn_iris/sklearn_iris.ipynb @@ -109,7 +109,7 @@ "metadata": {}, "outputs": [], "source": [ - "!s2i build -E environment_rest . seldonio/seldon-core-s2i-python3:0.18 seldonio/sklearn-iris:0.1" + "!s2i build -E environment_rest . seldonio/seldon-core-s2i-python3:1.1.1-rc seldonio/sklearn-iris:0.1" ] }, { diff --git a/examples/models/sklearn_iris_customdata/Makefile b/examples/models/sklearn_iris_customdata/Makefile index 2ac337027b..c893087e10 100644 --- a/examples/models/sklearn_iris_customdata/Makefile +++ b/examples/models/sklearn_iris_customdata/Makefile @@ -2,7 +2,7 @@ IMAGE_NAME=seldonio/sklearn-iris-customdata IMAGE_VERSION=0.1 build_image: train - s2i build . seldonio/seldon-core-s2i-python3:0.19 ${IMAGE_NAME}:${IMAGE_VERSION} + s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc ${IMAGE_NAME}:${IMAGE_VERSION} push_image: docker push $(IMAGE_NAME):$(IMAGE_VERSION) diff --git a/examples/models/sklearn_iris_customdata/sklearn_iris_customdata.ipynb b/examples/models/sklearn_iris_customdata/sklearn_iris_customdata.ipynb index b7d50ad91b..fdbde27938 100644 --- a/examples/models/sklearn_iris_customdata/sklearn_iris_customdata.ipynb +++ b/examples/models/sklearn_iris_customdata/sklearn_iris_customdata.ipynb @@ -132,7 +132,7 @@ }, "outputs": [], "source": [ - "!s2i build . seldonio/seldon-core-s2i-python37:0.19-SNAPSHOT seldonio/sklearn-iris-customdata:0.1" + "!s2i build . seldonio/seldon-core-s2i-python37:1.1.1-rc seldonio/sklearn-iris-customdata:0.1" ] }, { diff --git a/examples/models/sklearn_iris_jsondata/Makefile b/examples/models/sklearn_iris_jsondata/Makefile index d6a42a60b5..5aaa74aea6 100644 --- a/examples/models/sklearn_iris_jsondata/Makefile +++ b/examples/models/sklearn_iris_jsondata/Makefile @@ -2,7 +2,7 @@ IMAGE_NAME=seldonio/sklearn-iris-jsondata IMAGE_VERSION=0.1 build_image: train - s2i build -E environment_rest . seldonio/seldon-core-s2i-python3:0.18 ${IMAGE_NAME}:${IMAGE_VERSION} + s2i build -E environment_rest . seldonio/seldon-core-s2i-python3:1.1.1-rc ${IMAGE_NAME}:${IMAGE_VERSION} push_image: docker push $(IMAGE_NAME):$(IMAGE_VERSION) diff --git a/examples/models/sklearn_iris_jsondata/sklearn_iris_jsondata.ipynb b/examples/models/sklearn_iris_jsondata/sklearn_iris_jsondata.ipynb index 0461d5f5c5..b00b4f56f6 100644 --- a/examples/models/sklearn_iris_jsondata/sklearn_iris_jsondata.ipynb +++ b/examples/models/sklearn_iris_jsondata/sklearn_iris_jsondata.ipynb @@ -118,7 +118,7 @@ "metadata": {}, "outputs": [], "source": [ - "!s2i build -E environment_rest . seldonio/seldon-core-s2i-python3:0.18 seldonio/sklearn-iris-jsondata:0.1" + "!s2i build -E environment_rest . seldonio/seldon-core-s2i-python3:1.1.1-rc seldonio/sklearn-iris-jsondata:0.1" ] }, { diff --git a/examples/models/sklearn_spacy_text/sklearn_spacy_text_classifier_example.ipynb b/examples/models/sklearn_spacy_text/sklearn_spacy_text_classifier_example.ipynb index e32a2708c8..ef34edbe55 100644 --- a/examples/models/sklearn_spacy_text/sklearn_spacy_text_classifier_example.ipynb +++ b/examples/models/sklearn_spacy_text/sklearn_spacy_text_classifier_example.ipynb @@ -465,7 +465,7 @@ }, "outputs": [], "source": [ - "!s2i build . seldonio/seldon-core-s2i-python3:0.18 reddit-classifier:0.1" + "!s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc reddit-classifier:0.1" ] }, { diff --git a/examples/models/xss/Makefile b/examples/models/xss/Makefile index 56d4f24e60..d416a5e0b6 100644 --- a/examples/models/xss/Makefile +++ b/examples/models/xss/Makefile @@ -2,5 +2,5 @@ IMAGE_NAME=xss-model IMAGE_VERSION=0.1 build_image: - s2i build . seldonio/seldon-core-s2i-python3:0.7 ${IMAGE_NAME}:${IMAGE_VERSION} + s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc ${IMAGE_NAME}:${IMAGE_VERSION} diff --git a/examples/models/xss/xss-example.ipynb b/examples/models/xss/xss-example.ipynb index a90f39d54c..395114b1a3 100644 --- a/examples/models/xss/xss-example.ipynb +++ b/examples/models/xss/xss-example.ipynb @@ -10012,7 +10012,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "s2i build . seldonio/seldon-core-s2i-python3:0.7 xss-model:0.1\n", + "s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc xss-model:0.1\n", "error: Unable to load docker config: json: cannot unmarshal string into Go value of type docker.dockerConfig\n", "---> Installing application source...\n", "Build completed successfully\n" diff --git a/examples/output_transformers/Makefile b/examples/output_transformers/Makefile index 652e1208b3..511d1343d4 100644 --- a/examples/output_transformers/Makefile +++ b/examples/output_transformers/Makefile @@ -2,4 +2,4 @@ IMAGE_VERSION=0.1 IMAGE_NAME = seldonio/output_transformer build: - s2i build . seldonio/seldon-core-s2i-python3:0.4 $(IMAGE_NAME):$(IMAGE_VERSION) + s2i build . seldonio/seldon-core-s2i-python3:1.1.1-rc $(IMAGE_NAME):$(IMAGE_VERSION) diff --git a/executor/Dockerfile.redhat b/executor/Dockerfile.redhat index 5f00a11a60..8d893314bf 100644 --- a/executor/Dockerfile.redhat +++ b/executor/Dockerfile.redhat @@ -23,7 +23,7 @@ RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 GO111MODULE=on go build -a -o executor FROM registry.access.redhat.com/ubi8/ubi-minimal LABEL name="Seldon Executor" \ vendor="Seldon Technologies" \ - version="v1.1.0" \ + version="1.1.1-rc" \ release="1" \ summary="The service orchestrator for Seldon Core" \ description="The service orchestrator for Seldon Core which manages the request/response flow through the Seldon Core inference graphs deployed via the Seldon Core Operator as SeldonDeployments custom resources" diff --git a/executor/Makefile b/executor/Makefile index d340196918..3464463cc2 100644 --- a/executor/Makefile +++ b/executor/Makefile @@ -1,3 +1,4 @@ +SHELL := /bin/bash VERSION := $(shell cat ../version.txt) # Image URL to use all building/pushing image targets IMAGE_NAME_BASE=seldon-core-executor @@ -80,7 +81,9 @@ docker-push-redhat: # password can be found at: https://connect.redhat.com/project/3977851/view redhat-image-scan: - docker login -u unused scan.connect.redhat.com + docker pull ${IMG_REDHAT} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_executor} | docker login -u unused scan.connect.redhat.com --password-stdin docker tag ${IMG_REDHAT} scan.connect.redhat.com/ospid-fffb6f6b-90b5-4f1d-be69-71baa8fb16cb/${IMG_VERSION_REDHAT} docker push scan.connect.redhat.com/ospid-fffb6f6b-90b5-4f1d-be69-71baa8fb16cb/${IMG_VERSION_REDHAT} diff --git a/executor/api/rest/client_test.go b/executor/api/rest/client_test.go index 981bb8e9a4..76d88a03dd 100644 --- a/executor/api/rest/client_test.go +++ b/executor/api/rest/client_test.go @@ -330,3 +330,15 @@ func TestTimeout(t *testing.T) { _, err = seldonRestClient.Status(createTestContext(), "model", host, int32(port), nil, map[string][]string{}) g.Expect(err).ToNot(BeNil()) } + + +func TestIsJson(t *testing.T) { + g := NewGomegaWithT(t) + badJson := "ab" + res := isJSON([]byte(badJson)) + g.Expect(res).To(Equal(false)) + + goodJson := "{\"foo\":\"bar\"}" + res = isJSON([]byte(goodJson)) + g.Expect(res).To(Equal(true)) +} \ No newline at end of file diff --git a/hack/update_python_version.sh b/hack/update_python_version.sh new file mode 100755 index 0000000000..98cee47c1d --- /dev/null +++ b/hack/update_python_version.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -o nounset +set -o errexit +set -o pipefail + +STARTUP_DIR="$( cd "$( dirname "$0" )" && pwd )" + +if [ "$#" -ne 1 ]; then + echo "You must enter " + exit 1 +fi + +NEW_VERSION=$1 + +cd ${STARTUP_DIR}/.. && find . -type f \( -path './examples/*.ipynb' -or -path './doc/*.md' -or -path './examples/*Makefile' \) -exec grep -El 'seldon-core-s2i-python3[67]?:[^\$ ]+' \{\} \; | xargs -n1 -r sed -Ei "s/(seldon-core-s2i-python3)([67]?:)([^\$ ]+)/\1\2${NEW_VERSION}/g" + + + diff --git a/helm-charts/seldon-core-analytics/Chart.yaml b/helm-charts/seldon-core-analytics/Chart.yaml index ff62ac4631..a8f8a6cf5b 100644 --- a/helm-charts/seldon-core-analytics/Chart.yaml +++ b/helm-charts/seldon-core-analytics/Chart.yaml @@ -18,4 +18,4 @@ keywords: name: seldon-core-analytics sources: - https://github.com/SeldonIO/seldon-core -version: 1.1.1-SNAPSHOT +version: 1.1.1-rc diff --git a/helm-charts/seldon-core-operator/Chart.yaml b/helm-charts/seldon-core-operator/Chart.yaml index 0e6d90cb4f..c5674335c1 100644 --- a/helm-charts/seldon-core-operator/Chart.yaml +++ b/helm-charts/seldon-core-operator/Chart.yaml @@ -6,4 +6,4 @@ keywords: name: seldon-core-operator sources: - https://github.com/SeldonIO/seldon-core -version: 1.1.1-SNAPSHOT +version: 1.1.1-rc diff --git a/helm-charts/seldon-core-operator/templates/configmap_seldon-config.yaml b/helm-charts/seldon-core-operator/templates/configmap_seldon-config.yaml index b9aca6d891..6ce7dc5659 100644 --- a/helm-charts/seldon-core-operator/templates/configmap_seldon-config.yaml +++ b/helm-charts/seldon-core-operator/templates/configmap_seldon-config.yaml @@ -2,6 +2,7 @@ apiVersion: v1 data: credentials: '{{ .Values.credentials | toJson }}' + explainer: '{{ .Values.explainer | toJson }}' predictor_servers: '{{ .Values.predictor_servers | toJson }}' storageInitializer: '{{ .Values.storageInitializer | toJson }}' kind: ConfigMap diff --git a/helm-charts/seldon-core-operator/templates/deployment_seldon-controller-manager.yaml b/helm-charts/seldon-core-operator/templates/deployment_seldon-controller-manager.yaml index 8d1c766fbd..53a4c31527 100644 --- a/helm-charts/seldon-core-operator/templates/deployment_seldon-controller-manager.yaml +++ b/helm-charts/seldon-core-operator/templates/deployment_seldon-controller-manager.yaml @@ -45,6 +45,28 @@ spec: value: '' - name: RELATED_IMAGE_ENGINE value: '' + - name: RELATED_IMAGE_STORAGE_INITIALIZER + value: '' + - name: RELATED_IMAGE_SKLEARNSERVER_REST + value: '' + - name: RELATED_IMAGE_SKLEARNSERVER_GRPC + value: '' + - name: RELATED_IMAGE_XGBOOSTSERVER_REST + value: '' + - name: RELATED_IMAGE_XGBOOSTSERVER_GRPC + value: '' + - name: RELATED_IMAGE_MLFLOWSERVER_REST + value: '' + - name: RELATED_IMAGE_MLFLOWSERVER_GRPC + value: '' + - name: RELATED_IMAGE_TFPROXY_REST + value: '' + - name: RELATED_IMAGE_TFPROXY_GRPC + value: '' + - name: RELATED_IMAGE_TENSORFLOW + value: '' + - name: RELATED_IMAGE_EXPLAINER + value: '' - name: CREATE_RESOURCES value: '{{ .Values.createResources }}' - name: POD_NAMESPACE diff --git a/helm-charts/seldon-core-operator/values.yaml b/helm-charts/seldon-core-operator/values.yaml index fc5259cc94..0b17fe8cba 100644 --- a/helm-charts/seldon-core-operator/values.yaml +++ b/helm-charts/seldon-core-operator/values.yaml @@ -32,7 +32,7 @@ createResources: false # Use this to ensure all container run as non-root by default # For openshift leave blank as usually this will be injected automatically on an openshift cluster # to all pods. -defaultUserID: "" +defaultUserID: "8888" # ## Service Orchestrator (Executor) # The executor is the default service orchestrator which has superceeded the "Java Engine" @@ -46,7 +46,7 @@ executor: pullPolicy: IfNotPresent registry: docker.io repository: seldonio/seldon-core-executor - tag: 1.1.1-SNAPSHOT + tag: 1.1.1-rc prometheus: path: /prometheus serviceAccount: @@ -61,7 +61,7 @@ image: pullPolicy: IfNotPresent registry: docker.io repository: seldonio/seldon-core-operator - tag: 1.1.1-SNAPSHOT + tag: 1.1.1-rc manager: cpuLimit: 500m cpuRequest: 100m @@ -97,33 +97,33 @@ predictiveUnit: predictor_servers: MLFLOW_SERVER: grpc: - defaultImageVersion: "0.5" + defaultImageVersion: "1.1.1-rc" image: seldonio/mlflowserver_grpc rest: - defaultImageVersion: "0.5" + defaultImageVersion: "1.1.1-rc" image: seldonio/mlflowserver_rest SKLEARN_SERVER: grpc: - defaultImageVersion: "0.3" + defaultImageVersion: "1.1.1-rc" image: seldonio/sklearnserver_grpc rest: - defaultImageVersion: "0.3" + defaultImageVersion: "1.1.1-rc" image: seldonio/sklearnserver_rest TENSORFLOW_SERVER: grpc: - defaultImageVersion: "0.7" + defaultImageVersion: "1.1.1-rc" image: seldonio/tfserving-proxy_grpc rest: - defaultImageVersion: "0.7" + defaultImageVersion: "1.1.1-rc" image: seldonio/tfserving-proxy_rest tensorflow: true tfImage: tensorflow/serving:2.1.0 XGBOOST_SERVER: grpc: - defaultImageVersion: "0.4" + defaultImageVersion: "1.1.1-rc" image: seldonio/xgboostserver_grpc rest: - defaultImageVersion: "0.4" + defaultImageVersion: "1.1.1-rc" image: seldonio/xgboostserver_rest # ## Other @@ -152,7 +152,7 @@ engine: pullPolicy: IfNotPresent registry: docker.io repository: seldonio/engine - tag: 1.1.1-SNAPSHOT + tag: 1.1.1-rc logMessagesExternally: false port: 8000 prometheus: @@ -160,3 +160,8 @@ engine: serviceAccount: name: default user: 8888 + + +# Explainer image +explainer: + image: seldonio/alibiexplainer:1.1.1-rc diff --git a/integrations/tfserving/.s2i/bin/assemble b/integrations/tfserving/.s2i/bin/assemble new file mode 100644 index 0000000000..f3cac492de --- /dev/null +++ b/integrations/tfserving/.s2i/bin/assemble @@ -0,0 +1,15 @@ +#!/bin/bash +echo "Before assembling" + +/s2i/bin/assemble +rc=$? + +if [ $rc -eq 0 ]; then + echo "After successful assembling" +else + echo "After failed assembling" + exit $rc +fi + +mkdir -p /tmp/.s2i/ +cp image_metadata.json /tmp/.s2i/image_metadata.json diff --git a/integrations/tfserving/Makefile b/integrations/tfserving/Makefile index a732771fe0..09e39ac69b 100644 --- a/integrations/tfserving/Makefile +++ b/integrations/tfserving/Makefile @@ -1,20 +1,41 @@ -IMAGE_VERSION=0.7 -IMAGE_NAME = docker.io/seldonio/tfserving-proxy +SHELL := /bin/bash +VERSION := $(shell cat ../../version.txt) +IMAGE_NAME_BASE=tfserving-proxy +IMAGE_BASE=seldonio/${IMAGE_NAME_BASE} +KIND_NAME ?= kind -SELDON_CORE_DIR=../../.. +build_%: + s2i build \ + -E environment_$* \ + . \ + seldonio/seldon-core-s2i-python37-ubi8:${VERSION} \ + ${IMAGE_BASE}_$*:${VERSION} -.PHONY: build_rest -build_rest: - s2i build -E environment_rest . seldonio/seldon-core-s2i-python3:0.18 $(IMAGE_NAME)_rest:$(IMAGE_VERSION) +push_%: + docker push ${IMAGE_BASE}_$*:${VERSION} -.PHONY: build_grpc -build_grpc: - s2i build -E environment_grpc . seldonio/seldon-core-s2i-python3:0.18 $(IMAGE_NAME)_grpc:$(IMAGE_VERSION) +kind_load_%: + kind load -v 3 docker-image ${IMAGE_BASE}_$*:${VERSION} --name ${KIND_NAME} +.PHONY: build_all +build_all: build_rest build_grpc -push_to_dockerhub_rest: - docker push $(IMAGE_NAME)_rest:$(IMAGE_VERSION) +.PHONY: push_all +push_all: push_rest push_grpc -push_to_dockerhub_grpc: - docker push $(IMAGE_NAME)_grpc:$(IMAGE_VERSION) +.PHONY: kind_load_all +kind_load: build_all kind_load_rest kind_load_grpc +# https://connect.redhat.com/project/4098981/view +scan_rest=ospid-2cbfde5f-10d2-4cc1-9b1d-0dff7f6d1021 +# https://connect.redhat.com/project/4100321/view +scan_grpc=ospid-da09f299-64d1-4e19-86c9-af60a0ddb851 +redhat-image-scan-%: + docker pull ${IMAGE_BASE}_$*:${VERSION} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_tfproxy_$*} | docker login -u unused scan.connect.redhat.com --password-stdin + docker tag ${IMAGE_BASE}_$*:${VERSION} scan.connect.redhat.com/${scan_$*}/${IMAGE_NAME_BASE}_$*:${VERSION} + docker push scan.connect.redhat.com/${scan_$*}/${IMAGE_NAME_BASE}_$*:${VERSION} + +.PHONY: redhat-image-scan +redhat-image-scan: redhat-image-scan-rest redhat-image-scan-grpc diff --git a/integrations/tfserving/image_metadata.json b/integrations/tfserving/image_metadata.json new file mode 100644 index 0000000000..821258fe20 --- /dev/null +++ b/integrations/tfserving/image_metadata.json @@ -0,0 +1 @@ +{"labels": [{"name": "Seldon Tensorflow Proxy Server"}, {"vendor": "Seldon Technologies"}, {"version": "1.1.1-rc"}, {"release": "1"}, {"summary": "A Tensorflow Proxy Server for Seldon Core"}, {"description": "A proxy for Tensorflow Serving to allow Seldon data plane to call a Tensorflow Server"}]} \ No newline at end of file diff --git a/marketplaces/redhat/scan-images.py b/marketplaces/redhat/scan-images.py new file mode 100644 index 0000000000..4061925d0b --- /dev/null +++ b/marketplaces/redhat/scan-images.py @@ -0,0 +1,63 @@ +from subprocess import Popen, PIPE +import os + +def run_command(args, debug=False): + err, out = None, None + if debug: + print("cwd[{}]".format(os.getcwd())) + print("Executing: " + repr(args)) + p = Popen(args, stdout=PIPE, stderr=PIPE) + if p.wait() == 0: + out = p.stdout.read() + out = out.strip() + else: + err = {} + if p.stderr != None: + err["stderr"] = p.stderr.read() + err["stderr"] = err["stderr"].strip() + if p.stdout != None: + err["stdout"] = p.stdout.read() + err["stdout"] = err["stdout"].strip() + return err, out + +def scan_images(debug=False): + paths = [ + "operator", + "executor", + "engine", + "examples/models/mean_classifier", + "components/alibi-detect-server", + "components/seldon-request-logger", + "servers/sklearnserver", + "servers/mlflowserver", + "servers/xgboostserver", + "integrations/tfserving", + "components/alibi-explain-server", + "components/storage-initializer", + "servers/tfserving", + ] + + for path in paths: + args = [ + "make", + "-C", + "../../"+path, + "redhat-image-scan" + ] + err, out = run_command(args, debug) + if err == None: + print("updated {path}".format(**locals())) + else: + errStr = str(err["stderr"]) + if errStr.index("The image tag you are pushing already exists.") > 0: + print(f"Warning: Image already exists for {path}.") + else: + print("error updating {path}".format(**locals())) + print(err) + + +if __name__ == "__main__": + with open('../../version.txt', 'r') as file: + version = file.read() + print(f"Will scan and upload images for {version}") + scan_images() diff --git a/operator/Dockerfile.redhat b/operator/Dockerfile.redhat index 5e8f8ecb5b..e88957383d 100644 --- a/operator/Dockerfile.redhat +++ b/operator/Dockerfile.redhat @@ -23,7 +23,7 @@ RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 GO111MODULE=on go build -a -o manager FROM registry.access.redhat.com/ubi8/ubi-minimal LABEL name="Seldon Operator" \ vendor="Seldon Technologies" \ - version="v1.1.0" \ + version="1.1.1-rc" \ release="1" \ summary="An Operator to deploy, monitor, explain and manage production machine learning models at scale." \ description="The Seldon operator for management, monitoring and operations of machine learning systems through the Seldon Engine. Once installed, the Seldon Operator provides multiple functions which facilitate the productisation, monitoring and maintenance of machine learning systems at scale." diff --git a/operator/Makefile b/operator/Makefile index b8884468ca..e937506130 100644 --- a/operator/Makefile +++ b/operator/Makefile @@ -1,3 +1,4 @@ +SHELL := /bin/bash VERSION := $(shell cat ../version.txt) # Image URL to use all building/pushing image targets IMAGE_NAME_BASE=seldon-core-operator @@ -137,7 +138,9 @@ docker-push-redhat: # password can be found at: https://connect.redhat.com/project/1366481/view redhat-image-scan: - docker login -u unused scan.connect.redhat.com + docker pull ${IMG_REDHAT} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_operator} | docker login -u unused scan.connect.redhat.com --password-stdin docker tag ${IMG_REDHAT} scan.connect.redhat.com/ospid-7f50cebe-122b-495a-a143-41426dfcb6c9/${IMG_VERSION_REDHAT} docker push scan.connect.redhat.com/ospid-7f50cebe-122b-495a-a143-41426dfcb6c9/${IMG_VERSION_REDHAT} diff --git a/operator/apis/machinelearning.seldon.io/v1/prepack.go b/operator/apis/machinelearning.seldon.io/v1/prepack.go new file mode 100644 index 0000000000..a575dd4d63 --- /dev/null +++ b/operator/apis/machinelearning.seldon.io/v1/prepack.go @@ -0,0 +1,169 @@ +package v1 + +import ( + "context" + "encoding/json" + "fmt" + corev1 "k8s.io/api/core/v1" + k8types "k8s.io/apimachinery/pkg/types" + "os" +) + +const ( + EnvSklearnServerImageRestRelated = "RELATED_IMAGE_SKLEARNSERVER_REST" + EnvSklearnserverImageGrpcRelated = "RELATED_IMAGE_SKLEARNSERVER_GRPC" + EnvXgboostserverImageRestRelated = "RELATED_IMAGE_XGBOOSTSERVER_REST" + EnvXgboostserverImageGrpcRelated = "RELATED_IMAGE_XGBOOSTSERVER_GRPC" + EnvMlflowserverImageRestRelated = "RELATED_IMAGE_MLFLOWSERVER_REST" + EnvMlflowserverImageGrpcRelated = "RELATED_IMAGE_MLFLOWSERVER_GRPC" + EnvTensorflowImageRelated = "RELATED_IMAGE_TENSORFLOW" + EnvTfproxyImageRestRelated = "RELATED_IMAGE_TFPROXY_REST" + EnvTfproxyImageGrpcRelated = "RELATED_IMAGE_TFPROXY_GRPC" + PrepackTensorflowName = "TENSORFLOW_SERVER" + PrepackSklearnName = "SKLEARN_SERVER" + PrepackXgboostName = "XGBOOST_SERVER" + PrepackMlflowName = "MLFLOW_SERVER" +) + +const PredictorServerConfigMapKeyName = "predictor_servers" + +type PredictorImageConfig struct { + ContainerImage string `json:"image"` + DefaultImageVersion string `json:"defaultImageVersion"` +} + +type PredictorServerConfig struct { + Tensorflow bool `json:"tensorflow,omitempty"` + TensorflowImage string `json:"tfImage,omitempty"` + RestConfig PredictorImageConfig `json:"rest,omitempty"` + GrpcConfig PredictorImageConfig `json:"grpc,omitempty"` +} + +var ( + ControllerConfigMapName = "seldon-config" + envSklearnServerRestImageRelated = os.Getenv(EnvSklearnServerImageRestRelated) + envSklearnServerGrpcImageRelated = os.Getenv(EnvSklearnserverImageGrpcRelated) + envXgboostServerRestImageRelated = os.Getenv(EnvXgboostserverImageRestRelated) + envXgboostServerGrpcImageRelated = os.Getenv(EnvXgboostserverImageGrpcRelated) + envMlflowServerRestImageRelated = os.Getenv(EnvMlflowserverImageRestRelated) + envMlflowServerGrpcImageRelated = os.Getenv(EnvMlflowserverImageGrpcRelated) + envTfserverServerImageRelated = os.Getenv(EnvTensorflowImageRelated) + envTfproxyServerRestImageRelated = os.Getenv(EnvTfproxyImageRestRelated) + envTfproxyServerGrpcImageRelated = os.Getenv(EnvTfproxyImageGrpcRelated) + relatedImageConfig = map[string]PredictorServerConfig{} +) + +func init() { + if envSklearnServerRestImageRelated != "" { + relatedImageConfig[PrepackSklearnName] = PredictorServerConfig{ + RestConfig: PredictorImageConfig{ + ContainerImage: envSklearnServerRestImageRelated, + }, + GrpcConfig: PredictorImageConfig{ + ContainerImage: envSklearnServerGrpcImageRelated, + }, + } + } + if envXgboostServerRestImageRelated != "" { + relatedImageConfig[PrepackXgboostName] = PredictorServerConfig{ + RestConfig: PredictorImageConfig{ + ContainerImage: envXgboostServerRestImageRelated, + }, + GrpcConfig: PredictorImageConfig{ + ContainerImage: envXgboostServerGrpcImageRelated, + }, + } + } + if envMlflowServerRestImageRelated != "" { + relatedImageConfig[PrepackMlflowName] = PredictorServerConfig{ + RestConfig: PredictorImageConfig{ + ContainerImage: envMlflowServerRestImageRelated, + }, + GrpcConfig: PredictorImageConfig{ + ContainerImage: envMlflowServerGrpcImageRelated, + }, + } + } + if envTfserverServerImageRelated != "" { + relatedImageConfig[PrepackTensorflowName] = PredictorServerConfig{ + Tensorflow: true, + TensorflowImage: envTfserverServerImageRelated, + RestConfig: PredictorImageConfig{ + ContainerImage: envTfproxyServerRestImageRelated, + }, + GrpcConfig: PredictorImageConfig{ + ContainerImage: envTfproxyServerGrpcImageRelated, + }, + } + } +} + +func IsPrepack(pu *PredictiveUnit) bool { + isPrepack := len(*pu.Implementation) > 0 && *pu.Implementation != SIMPLE_MODEL && *pu.Implementation != SIMPLE_ROUTER && *pu.Implementation != RANDOM_ABTEST && *pu.Implementation != AVERAGE_COMBINER && *pu.Implementation != UNKNOWN_IMPLEMENTATION + return isPrepack +} + +func getPredictorServerConfigs() (map[string]PredictorServerConfig, error) { + configMap := &corev1.ConfigMap{} + + err := C.Get(context.TODO(), k8types.NamespacedName{Name: ControllerConfigMapName, Namespace: ControllerNamespace}, configMap) + + if err != nil { + fmt.Println("Failed to find config map " + ControllerConfigMapName) + fmt.Println(err) + return map[string]PredictorServerConfig{}, err + } + return getPredictorServerConfigsFromMap(configMap) +} + +func getPredictorServerConfigsFromMap(configMap *corev1.ConfigMap) (map[string]PredictorServerConfig, error) { + predictorServerConfig := make(map[string]PredictorServerConfig) + if predictorConfig, ok := configMap.Data[PredictorServerConfigMapKeyName]; ok { + err := json.Unmarshal([]byte(predictorConfig), &predictorServerConfig) + if err != nil { + panic(fmt.Errorf("Unable to unmarshall %v json string due to %v ", PredictorServerConfigMapKeyName, err)) + } + } + + return predictorServerConfig, nil +} + +func getPrepackServerConfigWithRelated(serverName string, relatedImages map[string]PredictorServerConfig) *PredictorServerConfig { + //Use related images if present + if val, ok := relatedImages[serverName]; ok { + return &val + } + + ServersConfigs, err := getPredictorServerConfigs() + if err != nil { + seldondeploymentlog.Error(err, "Failed to read prepacked model servers from configmap") + return nil + } + ServerConfig, ok := ServersConfigs[serverName] + if !ok { + seldondeploymentlog.Error(nil, "No entry in predictors map for "+serverName) + return nil + } + return &ServerConfig +} + +func GetPrepackServerConfig(serverName string) *PredictorServerConfig { + return getPrepackServerConfigWithRelated(serverName, relatedImageConfig) +} + +func SetImageNameForPrepackContainer(pu *PredictiveUnit, c *corev1.Container, serverConfig *PredictorServerConfig) { + // Add image: ignore version if empty + if c.Image == "" { + if pu.Endpoint.Type == REST { + c.Image = serverConfig.RestConfig.ContainerImage + if serverConfig.RestConfig.DefaultImageVersion != "" { + c.Image = c.Image + ":" + serverConfig.RestConfig.DefaultImageVersion + } + } else { + c.Image = serverConfig.GrpcConfig.ContainerImage + if serverConfig.GrpcConfig.DefaultImageVersion != "" { + c.Image = c.Image + ":" + serverConfig.GrpcConfig.DefaultImageVersion + } + } + } +} diff --git a/operator/apis/machinelearning.seldon.io/v1/prepack_test.go b/operator/apis/machinelearning.seldon.io/v1/prepack_test.go new file mode 100644 index 0000000000..3e5b4c2292 --- /dev/null +++ b/operator/apis/machinelearning.seldon.io/v1/prepack_test.go @@ -0,0 +1,85 @@ +package v1 + +import ( + . "github.com/onsi/gomega" + corev1 "k8s.io/api/core/v1" + "testing" +) + +func TestImageSetNormal(t *testing.T) { + g := NewGomegaWithT(t) + err := setupTestConfigMap() + g.Expect(err).To(BeNil()) + + scenarios := map[string]struct { + pu *PredictiveUnit + config *PredictorServerConfig + desiredImageName string + }{ + "GRPC image with version": { + pu: &PredictiveUnit{Endpoint: &Endpoint{Type: GRPC}}, + config: &PredictorServerConfig{ + GrpcConfig: PredictorImageConfig{ContainerImage: "a", DefaultImageVersion: "1"}, + }, + desiredImageName: "a:1", + }, + "GRPC image with no version": { + pu: &PredictiveUnit{Endpoint: &Endpoint{Type: GRPC}}, + config: &PredictorServerConfig{ + GrpcConfig: PredictorImageConfig{ContainerImage: "a"}, + }, + desiredImageName: "a", + }, + "REST image with version": { + pu: &PredictiveUnit{Endpoint: &Endpoint{Type: REST}}, + config: &PredictorServerConfig{ + RestConfig: PredictorImageConfig{ContainerImage: "a", DefaultImageVersion: "1"}, + }, + desiredImageName: "a:1", + }, + "REST image with no version": { + pu: &PredictiveUnit{Endpoint: &Endpoint{Type: REST}}, + config: &PredictorServerConfig{ + RestConfig: PredictorImageConfig{ContainerImage: "a"}, + }, + desiredImageName: "a", + }, + } + + for name, scenario := range scenarios { + t.Logf("Scenario: %s", name) + con := &corev1.Container{} + SetImageNameForPrepackContainer(scenario.pu, con, scenario.config) + g.Expect(con.Image).To(Equal(scenario.desiredImageName)) + } +} + +func TestGetPredictorConfig(t *testing.T) { + g := NewGomegaWithT(t) + err := setupTestConfigMap() + g.Expect(err).To(BeNil()) + serverConfigs, err := getPredictorServerConfigs() + g.Expect(err).To(BeNil()) + scenarios := map[string]struct { + serverName string + relatedImageMap map[string]PredictorServerConfig + desiredConfig PredictorServerConfig + }{ + + "related image sklearn": { + serverName: PrepackSklearnName, + relatedImageMap: map[string]PredictorServerConfig{PrepackSklearnName: {RestConfig: PredictorImageConfig{ContainerImage: "a"}}}, + desiredConfig: PredictorServerConfig{RestConfig: PredictorImageConfig{ContainerImage: "a"}}, + }, + "default image sklearn": { + serverName: PrepackSklearnName, + relatedImageMap: map[string]PredictorServerConfig{}, + desiredConfig: serverConfigs[PrepackSklearnName], + }, + } + for name, scenario := range scenarios { + t.Logf("Scenario: %s", name) + config := getPrepackServerConfigWithRelated(scenario.serverName, scenario.relatedImageMap) + g.Expect(*config).To(Equal(scenario.desiredConfig)) + } +} diff --git a/operator/apis/machinelearning.seldon.io/v1/seldondeployment_webhook.go b/operator/apis/machinelearning.seldon.io/v1/seldondeployment_webhook.go index 1b28f0c230..b1d3fc8fc6 100644 --- a/operator/apis/machinelearning.seldon.io/v1/seldondeployment_webhook.go +++ b/operator/apis/machinelearning.seldon.io/v1/seldondeployment_webhook.go @@ -17,51 +17,31 @@ limitations under the License. package v1 import ( - "context" - "encoding/json" - "fmt" - "os" - "strconv" - "github.com/seldonio/seldon-core/operator/constants" corev1 "k8s.io/api/core/v1" apierrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" - k8types "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/validation/field" + "os" ctrl "sigs.k8s.io/controller-runtime" "sigs.k8s.io/controller-runtime/pkg/client" logf "sigs.k8s.io/controller-runtime/pkg/runtime/log" "sigs.k8s.io/controller-runtime/pkg/webhook" + "strconv" ) var ( // log is for logging in this package. seldondeploymentlog = logf.Log.WithName("seldondeployment") ControllerNamespace = GetEnv("POD_NAMESPACE", "seldon-system") - ControllerConfigMapName = "seldon-config" C client.Client envPredictiveUnitServicePort = os.Getenv(ENV_PREDICTIVE_UNIT_SERVICE_PORT) envPredictiveUnitServicePortMetrics = os.Getenv(ENV_PREDICTIVE_UNIT_SERVICE_PORT_METRICS) envPredictiveUnitMetricsPortName = GetEnv(ENV_PREDICTIVE_UNIT_METRICS_PORT_NAME, constants.DefaultMetricsPortName) ) -const PredictorServerConfigMapKeyName = "predictor_servers" - -type PredictorImageConfig struct { - ContainerImage string `json:"image"` - DefaultImageVersion string `json:"defaultImageVersion"` -} - -type PredictorServerConfig struct { - Tensorflow bool `json:"tensorflow,omitempty"` - TensorflowImage string `json:"tfImage,omitempty"` - RestConfig PredictorImageConfig `json:"rest,omitempty"` - GrpcConfig PredictorImageConfig `json:"grpc,omitempty"` -} - // Get an environment variable given by key or return the fallback. func GetEnv(key, fallback string) string { if value, ok := os.LookupEnv(key); ok { @@ -70,31 +50,6 @@ func GetEnv(key, fallback string) string { return fallback } -func getPredictorServerConfigs() (map[string]PredictorServerConfig, error) { - configMap := &corev1.ConfigMap{} - - err := C.Get(context.TODO(), k8types.NamespacedName{Name: ControllerConfigMapName, Namespace: ControllerNamespace}, configMap) - - if err != nil { - fmt.Println("Failed to find config map " + ControllerConfigMapName) - fmt.Println(err) - return map[string]PredictorServerConfig{}, err - } - return getPredictorServerConfigsFromMap(configMap) -} - -func getPredictorServerConfigsFromMap(configMap *corev1.ConfigMap) (map[string]PredictorServerConfig, error) { - predictorServerConfig := make(map[string]PredictorServerConfig) - if predictorConfig, ok := configMap.Data[PredictorServerConfigMapKeyName]; ok { - err := json.Unmarshal([]byte(predictorConfig), &predictorServerConfig) - if err != nil { - panic(fmt.Errorf("Unable to unmarshall %v json string due to %v ", PredictorServerConfigMapKeyName, err)) - } - } - - return predictorServerConfig, nil -} - func (r *SeldonDeployment) SetupWebhookWithManager(mgr ctrl.Manager) error { C = mgr.GetClient() return ctrl.NewWebhookManagedBy(mgr). @@ -126,40 +81,6 @@ func GetPort(name string, ports []corev1.ContainerPort) *corev1.ContainerPort { return nil } -func IsPrepack(pu *PredictiveUnit) bool { - isPrepack := len(*pu.Implementation) > 0 && *pu.Implementation != SIMPLE_MODEL && *pu.Implementation != SIMPLE_ROUTER && *pu.Implementation != RANDOM_ABTEST && *pu.Implementation != AVERAGE_COMBINER && *pu.Implementation != UNKNOWN_IMPLEMENTATION - return isPrepack -} - -func GetPrepackServerConfig(serverName string) PredictorServerConfig { - ServersConfigs, err := getPredictorServerConfigs() - - if err != nil { - seldondeploymentlog.Error(err, "Failed to read prepacked model servers from configmap") - } - ServerConfig, ok := ServersConfigs[serverName] - if !ok { - seldondeploymentlog.Error(nil, "No entry in predictors map for "+serverName) - } - return ServerConfig -} - -func SetImageNameForPrepackContainer(pu *PredictiveUnit, c *corev1.Container) { - //Add missing fields - // Add image - if c.Image == "" { - - ServerConfig := GetPrepackServerConfig(string(*pu.Implementation)) - - if pu.Endpoint.Type == REST { - c.Image = ServerConfig.RestConfig.ContainerImage + ":" + ServerConfig.RestConfig.DefaultImageVersion - } else { - c.Image = ServerConfig.GrpcConfig.ContainerImage + ":" + ServerConfig.GrpcConfig.DefaultImageVersion - } - - } -} - // ----- func addDefaultsToGraph(pu *PredictiveUnit) { @@ -329,11 +250,17 @@ func (r *SeldonDeploymentSpec) DefaultSeldonDeployment(mldepName string, namespa } } - getUpdatePortNumMap(con.Name, &nextPortNum, portMap) + getUpdatePortNumMap(pu.Name, &nextPortNum, portMap) portNum := portMap[pu.Name] r.setContainerPredictiveUnitDefaults(0, portNum, &nextMetricsPortNum, mldepName, namespace, &p, pu, con) - SetImageNameForPrepackContainer(pu, con) + //Only set image default for non tensorflow graphs + if r.Protocol != ProtocolTensorflow { + serverConfig := GetPrepackServerConfig(string(*pu.Implementation)) + if serverConfig != nil { + SetImageNameForPrepackContainer(pu, con, serverConfig) + } + } // if new Add container to componentSpecs if !existing { @@ -362,7 +289,7 @@ func (r *SeldonDeploymentSpec) DefaultSeldonDeployment(mldepName string, namespa // --- Validating // Check the predictive units to ensure the graph matches up with defined containers. -func checkPredictiveUnits(pu *PredictiveUnit, p *PredictorSpec, fldPath *field.Path, allErrs field.ErrorList) field.ErrorList { +func (r *SeldonDeploymentSpec) checkPredictiveUnits(pu *PredictiveUnit, p *PredictorSpec, fldPath *field.Path, allErrs field.ErrorList) field.ErrorList { if *pu.Implementation == UNKNOWN_IMPLEMENTATION { if GetContainerForPredictiveUnit(p, pu.Name) == nil { @@ -379,6 +306,11 @@ func checkPredictiveUnits(pu *PredictiveUnit, p *PredictorSpec, fldPath *field.P } c := GetContainerForPredictiveUnit(p, pu.Name) + //Current non tensorflow serving prepack servers can not handle tensorflow protocol + if r.Protocol == ProtocolTensorflow && (*pu.Implementation == PrepackSklearnName || *pu.Implementation == PrepackXgboostName || *pu.Implementation == PrepackMlflowName) { + allErrs = append(allErrs, field.Invalid(fldPath, pu.Name, "Prepackaged server does not handle tendorflow protocol "+string(*pu.Implementation))) + } + if c == nil || c.Image == "" { ServersConfigs, err := getPredictorServerConfigs() @@ -401,7 +333,7 @@ func checkPredictiveUnits(pu *PredictiveUnit, p *PredictorSpec, fldPath *field.P } for i := 0; i < len(pu.Children); i++ { - allErrs = checkPredictiveUnits(&pu.Children[i], p, fldPath.Index(i), allErrs) + allErrs = r.checkPredictiveUnits(&pu.Children[i], p, fldPath.Index(i), allErrs) } return allErrs @@ -476,7 +408,8 @@ func (r *SeldonDeploymentSpec) ValidateSeldonDeployment() error { allErrs = append(allErrs, field.Invalid(fldPath, p.Name, "Duplicate predictor name")) } predictorNames[p.Name] = true - allErrs = checkPredictiveUnits(&p.Graph, &p, field.NewPath("spec").Child("predictors").Index(i).Child("graph"), allErrs) + + allErrs = r.checkPredictiveUnits(&p.Graph, &p, field.NewPath("spec").Child("predictors").Index(i).Child("graph"), allErrs) } if len(transports) > 1 { diff --git a/operator/apis/machinelearning.seldon.io/v1/seldondeployment_webhook_test.go b/operator/apis/machinelearning.seldon.io/v1/seldondeployment_webhook_test.go index 2aa41fd30c..c96cf4bed0 100644 --- a/operator/apis/machinelearning.seldon.io/v1/seldondeployment_webhook_test.go +++ b/operator/apis/machinelearning.seldon.io/v1/seldondeployment_webhook_test.go @@ -1,17 +1,104 @@ package v1 import ( - "testing" - + "context" . "github.com/onsi/gomega" "github.com/seldonio/seldon-core/operator/constants" + appsv1 "k8s.io/api/apps/v1" v1 "k8s.io/api/core/v1" + "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1beta1" "k8s.io/apimachinery/pkg/api/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" v12 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" + "k8s.io/apimachinery/pkg/runtime/serializer" + clientgoscheme "k8s.io/client-go/kubernetes/scheme" "sigs.k8s.io/controller-runtime/pkg/client/fake" + "testing" ) +func createScheme() *runtime.Scheme { + scheme := runtime.NewScheme() + _ = clientgoscheme.AddToScheme(scheme) + _ = appsv1.AddToScheme(scheme) + _ = v1.AddToScheme(scheme) + _ = v1beta1.AddToScheme(scheme) + _ = serializer.NewCodecFactory(scheme).UniversalDeserializer().Decode + return scheme +} + +func setupTestConfigMap() error { + scheme := createScheme() + C = fake.NewFakeClientWithScheme(scheme) + return C.Create(context.Background(), testConfigMap) +} + +var configs = map[string]string{ + "predictor_servers": `{ + "TENSORFLOW_SERVER": { + "tensorflow": true, + "tfImage": "tensorflow/serving:latest", + "rest": { + "image": "seldonio/tfserving-proxy_rest", + "defaultImageVersion": "0.7" + }, + "grpc": { + "image": "seldonio/tfserving-proxy_grpc", + "defaultImageVersion": "0.7" + } + }, + "SKLEARN_SERVER": { + "rest": { + "image": "seldonio/sklearnserver_rest", + "defaultImageVersion": "0.2" + }, + "grpc": { + "image": "seldonio/sklearnserver_grpc", + "defaultImageVersion": "0.2" + } + }, + "XGBOOST_SERVER": { + "rest": { + "image": "seldonio/xgboostserver_rest", + "defaultImageVersion": "0.2" + }, + "grpc": { + "image": "seldonio/xgboostserver_grpc", + "defaultImageVersion": "0.2" + } + }, + "MLFLOW_SERVER": { + "rest": { + "image": "seldonio/mlflowserver_rest", + "defaultImageVersion": "0.2" + }, + "grpc": { + "image": "seldonio/mlflowserver_grpc", + "defaultImageVersion": "0.2" + } + }, + "CUSTOM_SERVER": { + "rest": { + "image": "custom_rest", + "defaultImageVersion": "0.2" + }, + "grpc": { + "image": "custom_grpc", + "defaultImageVersion": "0.2" + } + } + }`, +} + +// Create configmap +var testConfigMap = &v1.ConfigMap{ + ObjectMeta: metav1.ObjectMeta{ + Name: ControllerConfigMapName, + Namespace: ControllerNamespace, + }, + Data: configs, +} + func TestValidateBadProtocol(t *testing.T) { g := NewGomegaWithT(t) spec := &SeldonDeploymentSpec{ @@ -468,8 +555,8 @@ func TestPortUseExisting(t *testing.T) { func TestMetricsPortAddedToPrepacked(t *testing.T) { g := NewGomegaWithT(t) - scheme := runtime.NewScheme() - C = fake.NewFakeClientWithScheme(scheme) + err := setupTestConfigMap() + g.Expect(err).To(BeNil()) impl := PredictiveUnitImplementation(constants.PrePackedServerTensorflow) spec := &SeldonDeploymentSpec{ Predictors: []PredictorSpec{ @@ -501,8 +588,8 @@ func TestMetricsPortAddedToPrepacked(t *testing.T) { func TestPredictorProtocolGrpc(t *testing.T) { g := NewGomegaWithT(t) - scheme := runtime.NewScheme() - C = fake.NewFakeClientWithScheme(scheme) + err := setupTestConfigMap() + g.Expect(err).To(BeNil()) impl := PredictiveUnitImplementation(constants.PrePackedServerTensorflow) spec := &SeldonDeploymentSpec{ Transport: TransportGrpc, @@ -532,8 +619,8 @@ func TestPredictorProtocolGrpc(t *testing.T) { func TestPrepackedWithExistingContainer(t *testing.T) { g := NewGomegaWithT(t) - scheme := runtime.NewScheme() - C = fake.NewFakeClientWithScheme(scheme) + err := setupTestConfigMap() + g.Expect(err).To(BeNil()) impl := PredictiveUnitImplementation(constants.PrePackedServerTensorflow) spec := &SeldonDeploymentSpec{ Transport: TransportGrpc, @@ -564,8 +651,91 @@ func TestPrepackedWithExistingContainer(t *testing.T) { g.Expect(metricPort).NotTo(BeNil()) g.Expect(metricPort.ContainerPort).To(Equal(constants.FirstMetricsPortNumber)) - // empty image name as no configmap - but is set - g.Expect(spec.Predictors[0].ComponentSpecs[0].Spec.Containers[0].Image).To(Equal(":")) + // image set from configMap + g.Expect(spec.Predictors[0].ComponentSpecs[0].Spec.Containers[0].Image).To(Equal("seldonio/tfserving-proxy_grpc:0.7")) + + //Graph + pu := GetPredictiveUnit(&spec.Predictors[0].Graph, "classifier") + g.Expect(pu).ToNot(BeNil()) + g.Expect(pu.Endpoint.ServicePort).To(Equal(constants.FirstPortNumber)) + g.Expect(pu.Endpoint.ServiceHost).To(Equal(constants.DNSLocalHost)) + g.Expect(pu.Endpoint.Type).To(Equal(GRPC)) +} + +func TestPrepackedWithCustom(t *testing.T) { + g := NewGomegaWithT(t) + err := setupTestConfigMap() + g.Expect(err).To(BeNil()) + impl := PredictiveUnitImplementation("CUSTOM_SERVER") + spec := &SeldonDeploymentSpec{ + Transport: TransportGrpc, + Predictors: []PredictorSpec{ + { + Name: "p1", + Graph: PredictiveUnit{ + Name: "classifier", + Implementation: &impl, + }, + }, + }, + } + + spec.DefaultSeldonDeployment("mydep", "default") + metricPort := GetPort(envPredictiveUnitMetricsPortName, spec.Predictors[0].ComponentSpecs[0].Spec.Containers[0].Ports) + g.Expect(metricPort).NotTo(BeNil()) + g.Expect(metricPort.ContainerPort).To(Equal(constants.FirstMetricsPortNumber)) + g.Expect(metricPort.Name).To(Equal(envPredictiveUnitMetricsPortName)) + + // image set from configMap + g.Expect(spec.Predictors[0].ComponentSpecs[0].Spec.Containers[0].Image).To(Equal("custom_grpc:0.2")) + + //Graph + pu := GetPredictiveUnit(&spec.Predictors[0].Graph, "classifier") + g.Expect(pu).ToNot(BeNil()) + g.Expect(pu.Endpoint.ServicePort).To(Equal(constants.FirstPortNumber)) + g.Expect(pu.Endpoint.ServiceHost).To(Equal(constants.DNSLocalHost)) + g.Expect(pu.Endpoint.Type).To(Equal(GRPC)) +} + +func TestPrepackedWithExistingContainerAndImage(t *testing.T) { + g := NewGomegaWithT(t) + err := setupTestConfigMap() + g.Expect(err).To(BeNil()) + impl := PredictiveUnitImplementation(constants.PrePackedServerTensorflow) + image := "myimage:0.1" + spec := &SeldonDeploymentSpec{ + Transport: TransportGrpc, + Predictors: []PredictorSpec{ + { + Name: "p1", + ComponentSpecs: []*SeldonPodSpec{ + { + Spec: v1.PodSpec{ + Containers: []v1.Container{ + { + Name: "classifier", + Image: image, + }, + }, + }, + }, + }, + Graph: PredictiveUnit{ + Name: "classifier", + Implementation: &impl, + }, + }, + }, + } + + spec.DefaultSeldonDeployment("mydep", "default") + metricPort := GetPort(envPredictiveUnitMetricsPortName, spec.Predictors[0].ComponentSpecs[0].Spec.Containers[0].Ports) + g.Expect(metricPort).NotTo(BeNil()) + g.Expect(metricPort.ContainerPort).To(Equal(constants.FirstMetricsPortNumber)) + g.Expect(metricPort.Name).To(Equal(envPredictiveUnitMetricsPortName)) + + // image set from configMap + g.Expect(spec.Predictors[0].ComponentSpecs[0].Spec.Containers[0].Image).To(Equal(image)) //Graph pu := GetPredictiveUnit(&spec.Predictors[0].Graph, "classifier") @@ -577,8 +747,8 @@ func TestPrepackedWithExistingContainer(t *testing.T) { func TestMetricsPortAddedToTwoPrepacked(t *testing.T) { g := NewGomegaWithT(t) - scheme := runtime.NewScheme() - C = fake.NewFakeClientWithScheme(scheme) + err := setupTestConfigMap() + g.Expect(err).To(BeNil()) impl := PredictiveUnitImplementation(constants.PrePackedServerTensorflow) spec := &SeldonDeploymentSpec{ Predictors: []PredictorSpec{ @@ -623,8 +793,8 @@ func TestMetricsPortAddedToTwoPrepacked(t *testing.T) { func TestDefaultPrepackagedServerType(t *testing.T) { g := NewGomegaWithT(t) - scheme := runtime.NewScheme() - C = fake.NewFakeClientWithScheme(scheme) + err := setupTestConfigMap() + g.Expect(err).To(BeNil()) impl := PredictiveUnitImplementation(constants.PrePackedServerTensorflow) spec := &SeldonDeploymentSpec{ Predictors: []PredictorSpec{ @@ -917,6 +1087,59 @@ func TestDefaultABTest(t *testing.T) { g.Expect(*graph.Children[1].Type).To(Equal(MODEL)) } +func TestValidateTensorflowProtocolNormalPrepackaged(t *testing.T) { + g := NewGomegaWithT(t) + err := setupTestConfigMap() + g.Expect(err).To(BeNil()) + impl := PredictiveUnitImplementation(constants.PrePackedServerSklearn) + spec := &SeldonDeploymentSpec{ + Protocol: ProtocolTensorflow, + Predictors: []PredictorSpec{ + { + Name: "p1", + Graph: PredictiveUnit{ + Name: "classifier", + Implementation: &impl, + ModelURI: "s3://mybucket/model", + }, + }, + }, + } + + spec.DefaultSeldonDeployment("mydep", "default") + err = spec.ValidateSeldonDeployment() + g.Expect(err).ToNot(BeNil()) + serr := err.(*errors.StatusError) + g.Expect(serr.Status().Code).To(Equal(int32(422))) + g.Expect(len(serr.Status().Details.Causes)).To(Equal(1)) + g.Expect(serr.Status().Details.Causes[0].Type).To(Equal(v12.CauseTypeFieldValueInvalid)) + g.Expect(serr.Status().Details.Causes[0].Field).To(Equal("spec.predictors[0].graph")) +} + +func TestValidateTensorflowProtocolNormal(t *testing.T) { + g := NewGomegaWithT(t) + err := setupTestConfigMap() + g.Expect(err).To(BeNil()) + impl := PredictiveUnitImplementation(constants.PrePackedServerTensorflow) + spec := &SeldonDeploymentSpec{ + Protocol: ProtocolTensorflow, + Predictors: []PredictorSpec{ + { + Name: "p1", + Graph: PredictiveUnit{ + Name: "classifier", + Implementation: &impl, + ModelURI: "s3://mybucket/model", + }, + }, + }, + } + + spec.DefaultSeldonDeployment("mydep", "default") + err = spec.ValidateSeldonDeployment() + g.Expect(err).To(BeNil()) +} + func TestPredictorNoGraph(t *testing.T) { g := NewGomegaWithT(t) scheme := runtime.NewScheme() diff --git a/operator/config/manager/configmap.yaml b/operator/config/manager/configmap.yaml index 199850077e..92454e1452 100644 --- a/operator/config/manager/configmap.yaml +++ b/operator/config/manager/configmap.yaml @@ -19,44 +19,44 @@ data: { "TENSORFLOW_SERVER": { "tensorflow": true, - "tfImage": "tensorflow/serving:latest", + "tfImage": "tensorflow/serving:2.1.0", "rest": { "image": "seldonio/tfserving-proxy_rest", - "defaultImageVersion": "0.7" + "defaultImageVersion": "1.1.1-rc" }, "grpc": { "image": "seldonio/tfserving-proxy_grpc", - "defaultImageVersion": "0.7" + "defaultImageVersion": "1.1.1-rc" } }, "SKLEARN_SERVER": { "rest": { "image": "seldonio/sklearnserver_rest", - "defaultImageVersion": "0.3" + "defaultImageVersion": "1.1.1-rc" }, "grpc": { "image": "seldonio/sklearnserver_grpc", - "defaultImageVersion": "0.3" + "defaultImageVersion": "1.1.1-rc" } }, "XGBOOST_SERVER": { "rest": { "image": "seldonio/xgboostserver_rest", - "defaultImageVersion": "0.4" + "defaultImageVersion": "1.1.1-rc" }, "grpc": { "image": "seldonio/xgboostserver_grpc", - "defaultImageVersion": "0.4" + "defaultImageVersion": "1.1.1-rc" } }, "MLFLOW_SERVER": { "rest": { "image": "seldonio/mlflowserver_rest", - "defaultImageVersion": "0.5" + "defaultImageVersion": "1.1.1-rc" }, "grpc": { "image": "seldonio/mlflowserver_grpc", - "defaultImageVersion": "0.5" + "defaultImageVersion": "1.1.1-rc" } } } @@ -68,3 +68,7 @@ data: "cpuRequest": "100m", "cpuLimit": "1" } + explainer: |- + { + "image" : "seldonio/alibiexplainer:1.1.1-SNAPSHOT" + } diff --git a/operator/config/manager/kustomization.yaml b/operator/config/manager/kustomization.yaml index bf748b8b43..cbd05a5e5a 100644 --- a/operator/config/manager/kustomization.yaml +++ b/operator/config/manager/kustomization.yaml @@ -7,4 +7,4 @@ kind: Kustomization images: - name: controller newName: seldonio/seldon-core-operator - newTag: 1.1.1-SNAPSHOT + newTag: 1.1.1-rc diff --git a/operator/config/manager/manager.yaml b/operator/config/manager/manager.yaml index 87cae2a48a..7d15087f12 100644 --- a/operator/config/manager/manager.yaml +++ b/operator/config/manager/manager.yaml @@ -39,6 +39,28 @@ spec: value: "" - name: RELATED_IMAGE_ENGINE value: "" + - name: RELATED_IMAGE_STORAGE_INITIALIZER + value: "" + - name: RELATED_IMAGE_SKLEARNSERVER_REST + value: "" + - name: RELATED_IMAGE_SKLEARNSERVER_GRPC + value: "" + - name: RELATED_IMAGE_XGBOOSTSERVER_REST + value: "" + - name: RELATED_IMAGE_XGBOOSTSERVER_GRPC + value: "" + - name: RELATED_IMAGE_MLFLOWSERVER_REST + value: "" + - name: RELATED_IMAGE_MLFLOWSERVER_GRPC + value: "" + - name: RELATED_IMAGE_TFPROXY_REST + value: "" + - name: RELATED_IMAGE_TFPROXY_GRPC + value: "" + - name: RELATED_IMAGE_TENSORFLOW + value: "" + - name: RELATED_IMAGE_EXPLAINER + value: "" - name: CREATE_RESOURCES value: "false" - name: POD_NAMESPACE @@ -52,7 +74,7 @@ spec: - name: AMBASSADOR_SINGLE_NAMESPACE value: "false" - name: ENGINE_CONTAINER_IMAGE_AND_VERSION - value: docker.io/seldonio/engine:1.1.1-SNAPSHOT + value: docker.io/seldonio/engine:1.1.1-rc - name: ENGINE_CONTAINER_IMAGE_PULL_POLICY value: IfNotPresent - name: ENGINE_CONTAINER_SERVICE_ACCOUNT_NAME @@ -80,7 +102,7 @@ spec: - name: USE_EXECUTOR value: "true" - name: EXECUTOR_CONTAINER_IMAGE_AND_VERSION - value: seldonio/seldon-core-executor:1.1.1-SNAPSHOT + value: seldonio/seldon-core-executor:1.1.1-rc - name: EXECUTOR_CONTAINER_IMAGE_PULL_POLICY value: IfNotPresent - name: EXECUTOR_PROMETHEUS_PATH diff --git a/operator/controllers/model_initializer_injector.go b/operator/controllers/model_initializer_injector.go index 3a148f1799..f1e9ea0f38 100644 --- a/operator/controllers/model_initializer_injector.go +++ b/operator/controllers/model_initializer_injector.go @@ -25,8 +25,7 @@ import ( "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/client-go/kubernetes" - ctrl "sigs.k8s.io/controller-runtime" - "sigs.k8s.io/controller-runtime/pkg/client" + "os" ) // TODO: change image to seldon? is at least configurable by configmap now (with fixed version there) @@ -41,13 +40,23 @@ const ( PvcSourceMountPath = "/mnt/pvc" ModelInitializerVolumeSuffix = "provision-location" ModelInitializerContainerSuffix = "model-initializer" + EnvStorageInitializerImageRelated = "RELATED_IMAGE_STORAGE_INITIALIZER" ) var ( - ControllerNamespace = GetEnv("POD_NAMESPACE", "seldon-system") - ControllerConfigMapName = "seldon-config" + ControllerNamespace = GetEnv("POD_NAMESPACE", "seldon-system") + ControllerConfigMapName = "seldon-config" + envStorageInitializerImage = os.Getenv(EnvStorageInitializerImageRelated) ) +type ModelInitialiser struct { + clientset kubernetes.Interface +} + +func NewModelInitializer(clientset kubernetes.Interface) *ModelInitialiser { + return &ModelInitialiser{clientset: clientset} +} + type StorageInitializerConfig struct { Image string `json:"image"` CpuRequest string `json:"cpuRequest"` @@ -56,22 +65,19 @@ type StorageInitializerConfig struct { MemoryLimit string `json:"memoryLimit"` } -func credentialsBuilder(Client client.Client) (credentialsBuilder *credentials.CredentialBuilder, err error) { - - clientset := kubernetes.NewForConfigOrDie(ctrl.GetConfigOrDie()) - configMap, err := clientset.CoreV1().ConfigMaps(ControllerNamespace).Get(ControllerConfigMapName, metav1.GetOptions{}) +func (mi *ModelInitialiser) credentialsBuilder() (credentialsBuilder *credentials.CredentialBuilder, err error) { + configMap, err := mi.clientset.CoreV1().ConfigMaps(ControllerNamespace).Get(ControllerConfigMapName, metav1.GetOptions{}) if err != nil { //log.Error(err, "Failed to find config map", "name", ControllerConfigMapName) return nil, err } - credentialBuilder := credentials.NewCredentialBulder(Client, configMap) + credentialBuilder := credentials.NewCredentialBulder(configMap, mi.clientset) return credentialBuilder, nil } -func getStorageInitializerConfigs(Client client.Client) (*StorageInitializerConfig, error) { - clientset := kubernetes.NewForConfigOrDie(ctrl.GetConfigOrDie()) - configMap, err := clientset.CoreV1().ConfigMaps(ControllerNamespace).Get(ControllerConfigMapName, metav1.GetOptions{}) +func (mi *ModelInitialiser) getStorageInitializerConfigs() (*StorageInitializerConfig, error) { + configMap, err := mi.clientset.CoreV1().ConfigMaps(ControllerNamespace).Get(ControllerConfigMapName, metav1.GetOptions{}) if err != nil { //log.Error(err, "Failed to find config map", "name", ControllerConfigMapName) return nil, err @@ -103,7 +109,7 @@ func getStorageInitializerConfigsFromMap(configMap *corev1.ConfigMap) (*StorageI } // InjectModelInitializer injects an init container to provision model data -func InjectModelInitializer(deployment *appsv1.Deployment, containerName string, srcURI string, serviceAccountName string, envSecretRefName string, Client client.Client) (deploy *appsv1.Deployment, err error) { +func (mi *ModelInitialiser) InjectModelInitializer(deployment *appsv1.Deployment, containerName string, srcURI string, serviceAccountName string, envSecretRefName string) (deploy *appsv1.Deployment, err error) { if srcURI == "" { return deployment, nil @@ -194,13 +200,15 @@ func InjectModelInitializer(deployment *appsv1.Deployment, containerName string, } modelInitializerMounts = append(modelInitializerMounts, sharedVolumeWriteMount) - config, err := getStorageInitializerConfigs(Client) + config, err := mi.getStorageInitializerConfigs() if err != nil { return nil, err } storageInitializerImage := ModelInitializerContainerImage + ":" + ModelInitializerContainerVersion - if config != nil && config.Image != "" { + if envStorageInitializerImage != "" { + storageInitializerImage = envStorageInitializerImage + } else if config != nil && config.Image != "" { storageInitializerImage = config.Image } @@ -233,7 +241,7 @@ func InjectModelInitializer(deployment *appsv1.Deployment, containerName string, podSpec.Volumes = append(podSpec.Volumes, podVolumes...) // Inject credentials - credentialsBuilder, err := credentialsBuilder(Client) + credentialsBuilder, err := mi.credentialsBuilder() if err != nil { return nil, err } diff --git a/operator/controllers/model_initializer_injector_test.go b/operator/controllers/model_initializer_injector_test.go new file mode 100644 index 0000000000..83b9be0677 --- /dev/null +++ b/operator/controllers/model_initializer_injector_test.go @@ -0,0 +1,65 @@ +package controllers + +import ( + . "github.com/onsi/gomega" + appsv1 "k8s.io/api/apps/v1" + v1 "k8s.io/api/core/v1" + "k8s.io/client-go/kubernetes/fake" + "testing" +) + +func TestStorageInitalizerInjector(t *testing.T) { + g := NewGomegaWithT(t) + scheme = createScheme() + client := fake.NewSimpleClientset() + _, err := client.CoreV1().ConfigMaps(ControllerNamespace).Create(configMap) + g.Expect(err).To(BeNil()) + mi := NewModelInitializer(client) + containerName := "classifier" + d := appsv1.Deployment{ + Spec: appsv1.DeploymentSpec{ + Template: v1.PodTemplateSpec{ + Spec: v1.PodSpec{ + Containers: []v1.Container{ + { + Name: containerName, + }, + }, + }, + }, + }, + } + _, err = mi.InjectModelInitializer(&d, containerName, "gs://mybucket/mymodel", "", "") + g.Expect(err).To(BeNil()) + g.Expect(len(d.Spec.Template.Spec.InitContainers)).To(Equal(1)) + g.Expect(d.Spec.Template.Spec.InitContainers[0].Image).To(Equal("gcr.io/kfserving/storage-initializer:0.2.2")) +} + +func TestStorageInitalizerInjectorWithRelatedImage(t *testing.T) { + g := NewGomegaWithT(t) + scheme = createScheme() + client := fake.NewSimpleClientset() + _, err := client.CoreV1().ConfigMaps(ControllerNamespace).Create(configMap) + g.Expect(err).To(BeNil()) + mi := NewModelInitializer(client) + containerName := "classifier" + d := appsv1.Deployment{ + Spec: appsv1.DeploymentSpec{ + Template: v1.PodTemplateSpec{ + Spec: v1.PodSpec{ + Containers: []v1.Container{ + { + Name: containerName, + }, + }, + }, + }, + }, + } + envStorageInitializerImage = "abc:1.2" + _, err = mi.InjectModelInitializer(&d, containerName, "gs://mybucket/mymodel", "", "") + g.Expect(err).To(BeNil()) + g.Expect(len(d.Spec.Template.Spec.InitContainers)).To(Equal(1)) + g.Expect(d.Spec.Template.Spec.InitContainers[0].Image).To(Equal(envStorageInitializerImage)) + envStorageInitializerImage = "" +} diff --git a/operator/controllers/resources/credentials/service_account_credentials.go b/operator/controllers/resources/credentials/service_account_credentials.go index 2c77316aca..4ef289350f 100644 --- a/operator/controllers/resources/credentials/service_account_credentials.go +++ b/operator/controllers/resources/credentials/service_account_credentials.go @@ -25,8 +25,6 @@ import ( v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/client-go/kubernetes" - ctrl "sigs.k8s.io/controller-runtime" - "sigs.k8s.io/controller-runtime/pkg/client" logf "sigs.k8s.io/controller-runtime/pkg/runtime/log" ) @@ -40,13 +38,13 @@ type CredentialConfig struct { } type CredentialBuilder struct { - client client.Client - config CredentialConfig + clientset kubernetes.Interface + config CredentialConfig } var log = logf.Log.WithName("CredentialBulder") -func NewCredentialBulder(client client.Client, config *v1.ConfigMap) *CredentialBuilder { +func NewCredentialBulder(config *v1.ConfigMap, clientset kubernetes.Interface) *CredentialBuilder { credentialConfig := CredentialConfig{} if credential, ok := config.Data[CredentialConfigKeyName]; ok { err := json.Unmarshal([]byte(credential), &credentialConfig) @@ -55,8 +53,8 @@ func NewCredentialBulder(client client.Client, config *v1.ConfigMap) *Credential } } return &CredentialBuilder{ - client: client, - config: credentialConfig, + clientset: clientset, + config: credentialConfig, } } @@ -77,14 +75,13 @@ func (c *CredentialBuilder) CreateSecretVolumeAndEnv(namespace string, serviceAc gcsCredentialFileName = c.config.GCS.GCSCredentialFileName } - clientset := kubernetes.NewForConfigOrDie(ctrl.GetConfigOrDie()) - serviceAccount, err := clientset.CoreV1().ServiceAccounts(namespace).Get(serviceAccountName, metav1.GetOptions{}) + serviceAccount, err := c.clientset.CoreV1().ServiceAccounts(namespace).Get(serviceAccountName, metav1.GetOptions{}) if err != nil { log.Error(err, "Failed to find service account", "ServiceAccountName", serviceAccountName) return nil } for _, secretRef := range serviceAccount.Secrets { - secret, err := clientset.CoreV1().Secrets(namespace).Get(secretRef.Name, metav1.GetOptions{}) + secret, err := c.clientset.CoreV1().Secrets(namespace).Get(secretRef.Name, metav1.GetOptions{}) if err != nil { log.Error(err, "Failed to find secret", "SecretName", secretRef.Name) continue diff --git a/operator/controllers/seldondeployment_controller.go b/operator/controllers/seldondeployment_controller.go index 1b95fbcbef..8ef20a8bb2 100644 --- a/operator/controllers/seldondeployment_controller.go +++ b/operator/controllers/seldondeployment_controller.go @@ -20,6 +20,8 @@ import ( "bytes" "context" "fmt" + "k8s.io/client-go/kubernetes" + _ "k8s.io/client-go/plugin/pkg/client/auth" "net/url" "strconv" "strings" @@ -71,6 +73,7 @@ type SeldonDeploymentReconciler struct { Scheme *runtime.Scheme Namespace string Recorder record.EventRecorder + ClientSet kubernetes.Interface } //---------------- Old part @@ -496,7 +499,8 @@ func (r *SeldonDeploymentReconciler) createComponents(mlDep *machinelearningv1.S c.deployments = append(c.deployments, deploy) } - err = createStandaloneModelServers(r, mlDep, &p, &c, &p.Graph, securityContext) + pi := NewPrePackedInitializer(r.ClientSet) + err = pi.createStandaloneModelServers(mlDep, &p, &c, &p.Graph, securityContext) if err != nil { return nil, err } @@ -573,7 +577,8 @@ func (r *SeldonDeploymentReconciler) createComponents(mlDep *machinelearningv1.S externalPorts[i] = httpGrpcPorts{httpPort: httpPort, grpcPort: grpcPort} } - err = createExplainer(r, mlDep, &p, &c, pSvcName, securityContext, log) + ei := NewExplainerInitializer(r.ClientSet) + err = ei.createExplainer(mlDep, &p, &c, pSvcName, securityContext, log) if err != nil { return nil, err } diff --git a/operator/controllers/seldondeployment_explainers.go b/operator/controllers/seldondeployment_explainers.go index 44aa2fdb02..68498f971c 100644 --- a/operator/controllers/seldondeployment_explainers.go +++ b/operator/controllers/seldondeployment_explainers.go @@ -17,10 +17,13 @@ limitations under the License. package controllers import ( + "fmt" + "k8s.io/client-go/kubernetes" "sort" "strconv" "strings" + "encoding/json" "github.com/go-logr/logr" machinelearningv1 "github.com/seldonio/seldon-core/operator/apis/machinelearning.seldon.io/v1" "github.com/seldonio/seldon-core/operator/constants" @@ -30,9 +33,51 @@ import ( corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/util/intstr" + "os" ) -func createExplainer(r *SeldonDeploymentReconciler, mlDep *machinelearningv1.SeldonDeployment, p *machinelearningv1.PredictorSpec, c *components, pSvcName string, podSecurityContect *corev1.PodSecurityContext, log logr.Logger) error { +const ( + ExplainerConfigMapKeyName = "explainer" + EnvExplainerImageRelated = "RELATED_IMAGE_EXPLAINER" +) + +var ( + envExplainerImage = os.Getenv(EnvExplainerImageRelated) +) + +type ExplainerInitialiser struct { + clientset kubernetes.Interface +} + +func NewExplainerInitializer(clientset kubernetes.Interface) *ExplainerInitialiser { + return &ExplainerInitialiser{clientset: clientset} +} + +type ExplainerConfig struct { + Image string `json:"image"` +} + +func (ei *ExplainerInitialiser) getExplainerConfigs() (*ExplainerConfig, error) { + configMap, err := ei.clientset.CoreV1().ConfigMaps(ControllerNamespace).Get(ControllerConfigMapName, metav1.GetOptions{}) + if err != nil { + //log.Error(err, "Failed to find config map", "name", ControllerConfigMapName) + return nil, err + } + return getExplainerConfigsFromMap(configMap) +} + +func getExplainerConfigsFromMap(configMap *corev1.ConfigMap) (*ExplainerConfig, error) { + explainerConfig := &ExplainerConfig{} + if initializerConfig, ok := configMap.Data[ExplainerConfigMapKeyName]; ok { + err := json.Unmarshal([]byte(initializerConfig), &explainerConfig) + if err != nil { + panic(fmt.Errorf("Unable to unmarshall %v json string due to %v ", ExplainerConfigMapKeyName, err)) + } + } + return explainerConfig, nil +} + +func (ei *ExplainerInitialiser) createExplainer(mlDep *machinelearningv1.SeldonDeployment, p *machinelearningv1.PredictorSpec, c *components, pSvcName string, podSecurityContect *corev1.PodSecurityContext, log logr.Logger) error { if !isEmptyExplainer(p.Explainer) { @@ -54,9 +99,17 @@ func createExplainer(r *SeldonDeploymentReconciler, mlDep *machinelearningv1.Sel p.Graph.Endpoint = &machinelearningv1.Endpoint{Type: machinelearningv1.REST} } + // Image from configMap or Relalated Image if its not set if explainerContainer.Image == "" { - // TODO: should use explainer type but this is the only one available currently - explainerContainer.Image = "seldonio/alibiexplainer:1.1.0" + if envExplainerImage != "" { + explainerContainer.Image = envExplainerImage + } else { + config, err := ei.getExplainerConfigs() + if err != nil { + return err + } + explainerContainer.Image = config.Image + } } // explainer can get port from spec or from containerSpec or fall back on default @@ -152,7 +205,9 @@ func createExplainer(r *SeldonDeploymentReconciler, mlDep *machinelearningv1.Sel if p.Explainer.ModelUri != "" { var err error - deploy, err = InjectModelInitializer(deploy, explainerContainer.Name, p.Explainer.ModelUri, p.Explainer.ServiceAccountName, p.Explainer.EnvSecretRefName, r.Client) + + mi := NewModelInitializer(ei.clientset) + deploy, err = mi.InjectModelInitializer(deploy, explainerContainer.Name, p.Explainer.ModelUri, p.Explainer.ServiceAccountName, p.Explainer.EnvSecretRefName) if err != nil { return err } diff --git a/operator/controllers/seldondeployment_explainers_test.go b/operator/controllers/seldondeployment_explainers_test.go index 6a5c0d502f..dce99902c1 100644 --- a/operator/controllers/seldondeployment_explainers_test.go +++ b/operator/controllers/seldondeployment_explainers_test.go @@ -28,10 +28,75 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/rand" + "k8s.io/client-go/kubernetes/fake" ctrl "sigs.k8s.io/controller-runtime" + "testing" "time" ) +func createTestSDepWithExplainer() *machinelearningv1.SeldonDeployment { + var modelType = machinelearningv1.MODEL + key := types.NamespacedName{ + Name: "dep", + Namespace: "default", + } + return &machinelearningv1.SeldonDeployment{ + ObjectMeta: metav1.ObjectMeta{ + Name: key.Name, + Namespace: key.Namespace, + }, + Spec: machinelearningv1.SeldonDeploymentSpec{ + Name: "mydep", + Predictors: []machinelearningv1.PredictorSpec{ + { + Name: "p1", + ComponentSpecs: []*machinelearningv1.SeldonPodSpec{ + { + Spec: v1.PodSpec{ + Containers: []v1.Container{ + { + Image: "seldonio/mock_classifier:1.0", + Name: "classifier", + }, + }, + }, + }, + }, + Graph: machinelearningv1.PredictiveUnit{ + Name: "classifier", + Type: &modelType, + }, + Explainer: &machinelearningv1.Explainer{ + Type: machinelearningv1.AlibiAnchorsTabularExplainer, + }, + }, + }, + }, + } +} + +func TestExplainerImageRelated(t *testing.T) { + g := NewGomegaWithT(t) + scheme = createScheme() + client := fake.NewSimpleClientset() + _, err := client.CoreV1().ConfigMaps(ControllerNamespace).Create(configMap) + g.Expect(err).To(BeNil()) + ei := NewExplainerInitializer(client) + sdep := createTestSDepWithExplainer() + svcName := "s" + c := components{ + serviceDetails: map[string]*machinelearningv1.ServiceStatus{ + svcName: &machinelearningv1.ServiceStatus{ + HttpEndpoint: "a.svc.local", + }, + }, + } + envExplainerImage = "explainer:123" + ei.createExplainer(sdep, &sdep.Spec.Predictors[0], &c, svcName, nil, ctrl.Log) + g.Expect(len(c.deployments)).To(Equal(1)) + g.Expect(c.deployments[0].Spec.Template.Spec.Containers[0].Image).To(Equal(envExplainerImage)) +} + var _ = Describe("createExplainer", func() { var r *SeldonDeploymentReconciler var mlDep *machinelearningv1.SeldonDeployment @@ -40,6 +105,7 @@ var _ = Describe("createExplainer", func() { var pSvcName string BeforeEach(func() { + p = &machinelearningv1.PredictorSpec{ Name: "main", } @@ -58,18 +124,24 @@ var _ = Describe("createExplainer", func() { pSvcName = machinelearningv1.GetPredictorKey(mlDep, p) r = &SeldonDeploymentReconciler{ - Client: k8sManager.GetClient(), - Log: ctrl.Log.WithName("controllers").WithName("SeldonDeployment"), - Scheme: k8sManager.GetScheme(), - Recorder: k8sManager.GetEventRecorderFor(constants.ControllerName), + Client: k8sManager.GetClient(), + ClientSet: clientset, + Log: ctrl.Log.WithName("controllers").WithName("SeldonDeployment"), + Scheme: k8sManager.GetScheme(), + Recorder: k8sManager.GetEventRecorderFor(constants.ControllerName), } }) DescribeTable( "Empty explainers should not create any component", func(explainer *machinelearningv1.Explainer) { + scheme = createScheme() + client := fake.NewSimpleClientset() + _, err := client.CoreV1().ConfigMaps(ControllerNamespace).Create(configMap) + Expect(err).To(BeNil()) p.Explainer = explainer - err := createExplainer(r, mlDep, p, c, pSvcName, nil, r.Log) + ei := NewExplainerInitializer(client) + err = ei.createExplainer(mlDep, p, c, pSvcName, nil, r.Log) Expect(err).ToNot(HaveOccurred()) Expect(c.deployments).To(BeEmpty()) @@ -136,7 +208,7 @@ var _ = Describe("Create a Seldon Deployment with explainer", func() { // Run Defaulter instance.Default() envUseExecutor = "true" - envExecutorUser = "2" + envDefaultUser = "2" Expect(k8sClient.Create(context.Background(), instance)).Should(Succeed()) //time.Sleep(time.Second * 5) @@ -174,6 +246,7 @@ var _ = Describe("Create a Seldon Deployment with explainer", func() { Expect(len(depFetched.Spec.Template.Spec.Containers)).Should(Equal(1)) Expect(*depFetched.Spec.Replicas).To(Equal(int32(1))) Expect(*depFetched.Spec.Template.Spec.SecurityContext.RunAsUser).To(Equal(int64(2))) + Expect(depFetched.Spec.Template.Spec.Containers[0].Image).To(Equal("seldonio/alibiexplainer:1.2.0")) //Check svc created svcKey := types.NamespacedName{ diff --git a/operator/controllers/seldondeployment_prepackaged_servers.go b/operator/controllers/seldondeployment_prepackaged_servers.go index f3bddd9314..95ef57ed3c 100644 --- a/operator/controllers/seldondeployment_prepackaged_servers.go +++ b/operator/controllers/seldondeployment_prepackaged_servers.go @@ -19,14 +19,14 @@ package controllers import ( "encoding/json" "fmt" - "strconv" - "strings" - machinelearningv1 "github.com/seldonio/seldon-core/operator/apis/machinelearning.seldon.io/v1" "github.com/seldonio/seldon-core/operator/constants" "github.com/seldonio/seldon-core/operator/utils" appsv1 "k8s.io/api/apps/v1" v1 "k8s.io/api/core/v1" + "k8s.io/client-go/kubernetes" + "strconv" + "strings" ) const ( @@ -37,6 +37,14 @@ var ( PredictiveUnitDefaultEnvSecretRefName = GetEnv(ENV_PREDICTIVE_UNIT_DEFAULT_ENV_SECRET_REF_NAME, "") ) +type PrePackedInitialiser struct { + clientset kubernetes.Interface +} + +func NewPrePackedInitializer(clientset kubernetes.Interface) *PrePackedInitialiser { + return &PrePackedInitialiser{clientset: clientset} +} + func extractEnvSecretRefName(pu *machinelearningv1.PredictiveUnit) string { envSecretRefName := "" if pu.EnvSecretRefName == "" { @@ -47,11 +55,10 @@ func extractEnvSecretRefName(pu *machinelearningv1.PredictiveUnit) string { return envSecretRefName } -func createTensorflowServingContainer(pu *machinelearningv1.PredictiveUnit, usePUPorts bool) *v1.Container { +func createTensorflowServingContainer(pu *machinelearningv1.PredictiveUnit, tensorflowProtocol bool) *v1.Container { ServerConfig := machinelearningv1.GetPrepackServerConfig(string(*pu.Implementation)) tfImage := "tensorflow/serving:latest" - if ServerConfig.TensorflowImage != "" { tfImage = ServerConfig.TensorflowImage } @@ -59,7 +66,7 @@ func createTensorflowServingContainer(pu *machinelearningv1.PredictiveUnit, useP grpcPort := int32(constants.TfServingGrpcPort) restPort := int32(constants.TfServingRestPort) name := constants.TFServingContainerName - if usePUPorts { + if tensorflowProtocol { if pu.Endpoint.Type == machinelearningv1.GRPC { grpcPort = pu.Endpoint.ServicePort } else { @@ -91,115 +98,109 @@ func createTensorflowServingContainer(pu *machinelearningv1.PredictiveUnit, useP } } -func addTFServerContainer(mlDep *machinelearningv1.SeldonDeployment, r *SeldonDeploymentReconciler, pu *machinelearningv1.PredictiveUnit, p *machinelearningv1.PredictorSpec, deploy *appsv1.Deployment, serverConfig machinelearningv1.PredictorServerConfig) error { - - if len(*pu.Implementation) > 0 && (serverConfig.Tensorflow || serverConfig.TensorflowImage != "") { - - ty := machinelearningv1.MODEL - pu.Type = &ty +func (pi *PrePackedInitialiser) addTFServerContainer(mlDep *machinelearningv1.SeldonDeployment, pu *machinelearningv1.PredictiveUnit, deploy *appsv1.Deployment, serverConfig *machinelearningv1.PredictorServerConfig) error { + ty := machinelearningv1.MODEL + pu.Type = &ty - c := utils.GetContainerForDeployment(deploy, pu.Name) + c := utils.GetContainerForDeployment(deploy, pu.Name) - var tfServingContainer *v1.Container - if mlDep.Spec.Protocol == machinelearningv1.ProtocolTensorflow { - tfServingContainer = createTensorflowServingContainer(pu, true) - containers := make([]v1.Container, len(deploy.Spec.Template.Spec.Containers)) - for i, ctmp := range deploy.Spec.Template.Spec.Containers { - if ctmp.Name == pu.Name { - containers[i] = *tfServingContainer - } else { - containers[i] = ctmp - } - } - deploy.Spec.Template.Spec.Containers = containers - - } else { - //Add missing fields - machinelearningv1.SetImageNameForPrepackContainer(pu, c) - SetUriParamsForTFServingProxyContainer(pu, c) - - tfServingContainer = utils.GetContainerForDeployment(deploy, constants.TFServingContainerName) - existing := tfServingContainer != nil - if !existing { - tfServingContainer = createTensorflowServingContainer(pu, false) - deploy.Spec.Template.Spec.Containers = append(deploy.Spec.Template.Spec.Containers, *tfServingContainer) - } + var tfServingContainer *v1.Container + if mlDep.Spec.Protocol == machinelearningv1.ProtocolTensorflow { + tfServingContainer = c + } else { + machinelearningv1.SetImageNameForPrepackContainer(pu, c, serverConfig) + SetUriParamsForTFServingProxyContainer(pu, c) + tfServingContainer = utils.GetContainerForDeployment(deploy, constants.TFServingContainerName) + } + existing := tfServingContainer != nil + if !existing { + tfServingContainer = createTensorflowServingContainer(pu, mlDep.Spec.Protocol == machinelearningv1.ProtocolTensorflow) + deploy.Spec.Template.Spec.Containers = append(deploy.Spec.Template.Spec.Containers, *tfServingContainer) + } else { + // Update any missing fields + protoType := createTensorflowServingContainer(pu, mlDep.Spec.Protocol == machinelearningv1.ProtocolTensorflow) + if tfServingContainer.Image == "" { + tfServingContainer.Image = protoType.Image } + if tfServingContainer.Args == nil || len(tfServingContainer.Args) == 0 { + tfServingContainer.Args = protoType.Args + } + if tfServingContainer.Ports == nil || len(tfServingContainer.Ports) == 0 { + tfServingContainer.Ports = protoType.Ports + } + } - envSecretRefName := extractEnvSecretRefName(pu) + envSecretRefName := extractEnvSecretRefName(pu) - _, err := InjectModelInitializer(deploy, tfServingContainer.Name, pu.ModelURI, pu.ServiceAccountName, envSecretRefName, r) - if err != nil { - return err - } + mi := NewModelInitializer(pi.clientset) + _, err := mi.InjectModelInitializer(deploy, tfServingContainer.Name, pu.ModelURI, pu.ServiceAccountName, envSecretRefName) + if err != nil { + return err } return nil } -func addModelDefaultServers(r *SeldonDeploymentReconciler, pu *machinelearningv1.PredictiveUnit, p *machinelearningv1.PredictorSpec, deploy *appsv1.Deployment, serverConfig machinelearningv1.PredictorServerConfig) error { - - if len(*pu.Implementation) > 0 && !serverConfig.Tensorflow && serverConfig.TensorflowImage == "" { +func (pi *PrePackedInitialiser) addModelDefaultServers(pu *machinelearningv1.PredictiveUnit, deploy *appsv1.Deployment, serverConfig *machinelearningv1.PredictorServerConfig) error { + ty := machinelearningv1.MODEL + pu.Type = &ty - ty := machinelearningv1.MODEL - pu.Type = &ty - - if pu.Endpoint == nil { - pu.Endpoint = &machinelearningv1.Endpoint{Type: machinelearningv1.REST} - } - c := utils.GetContainerForDeployment(deploy, pu.Name) - existing := c != nil - if !existing { - c = &v1.Container{ - Name: pu.Name, - VolumeMounts: []v1.VolumeMount{ - { - Name: machinelearningv1.PODINFO_VOLUME_NAME, - MountPath: machinelearningv1.PODINFO_VOLUME_PATH, - }, + if pu.Endpoint == nil { + pu.Endpoint = &machinelearningv1.Endpoint{Type: machinelearningv1.REST} + } + c := utils.GetContainerForDeployment(deploy, pu.Name) + existing := c != nil + if !existing { + c = &v1.Container{ + Name: pu.Name, + VolumeMounts: []v1.VolumeMount{ + { + Name: machinelearningv1.PODINFO_VOLUME_NAME, + MountPath: machinelearningv1.PODINFO_VOLUME_PATH, }, - } + }, } + } - machinelearningv1.SetImageNameForPrepackContainer(pu, c) - - // Add parameters envvar - point at mount path because initContainer will download - params := pu.Parameters - uriParam := machinelearningv1.Parameter{ - Name: "model_uri", - Type: "STRING", - Value: DefaultModelLocalMountPath, - } - params = append(params, uriParam) - paramStr, err := json.Marshal(params) - if err != nil { - return err - } + machinelearningv1.SetImageNameForPrepackContainer(pu, c, serverConfig) - if len(params) > 0 { - if !utils.HasEnvVar(c.Env, machinelearningv1.ENV_PREDICTIVE_UNIT_PARAMETERS) { - c.Env = append(c.Env, v1.EnvVar{Name: machinelearningv1.ENV_PREDICTIVE_UNIT_PARAMETERS, Value: string(paramStr)}) - } else { - c.Env = utils.SetEnvVar(c.Env, v1.EnvVar{Name: machinelearningv1.ENV_PREDICTIVE_UNIT_PARAMETERS, Value: string(paramStr)}) - } + // Add parameters envvar - point at mount path because initContainer will download + params := pu.Parameters + uriParam := machinelearningv1.Parameter{ + Name: "model_uri", + Type: "STRING", + Value: DefaultModelLocalMountPath, + } + params = append(params, uriParam) + paramStr, err := json.Marshal(params) + if err != nil { + return err + } + if len(params) > 0 { + if !utils.HasEnvVar(c.Env, machinelearningv1.ENV_PREDICTIVE_UNIT_PARAMETERS) { + c.Env = append(c.Env, v1.EnvVar{Name: machinelearningv1.ENV_PREDICTIVE_UNIT_PARAMETERS, Value: string(paramStr)}) + } else { + c.Env = utils.SetEnvVar(c.Env, v1.EnvVar{Name: machinelearningv1.ENV_PREDICTIVE_UNIT_PARAMETERS, Value: string(paramStr)}) } - // Add container to deployment - if !existing { - if len(deploy.Spec.Template.Spec.Containers) > 0 { - deploy.Spec.Template.Spec.Containers = append(deploy.Spec.Template.Spec.Containers, *c) - } else { - deploy.Spec.Template.Spec.Containers = []v1.Container{*c} - } + } + + // Add container to deployment + if !existing { + if len(deploy.Spec.Template.Spec.Containers) > 0 { + deploy.Spec.Template.Spec.Containers = append(deploy.Spec.Template.Spec.Containers, *c) + } else { + deploy.Spec.Template.Spec.Containers = []v1.Container{*c} } + } - envSecretRefName := extractEnvSecretRefName(pu) + envSecretRefName := extractEnvSecretRefName(pu) - _, err = InjectModelInitializer(deploy, c.Name, pu.ModelURI, pu.ServiceAccountName, envSecretRefName, r.Client) - if err != nil { - return err - } + mi := NewModelInitializer(pi.clientset) + _, err = mi.InjectModelInitializer(deploy, c.Name, pu.ModelURI, pu.ServiceAccountName, envSecretRefName) + if err != nil { + return err } return nil } @@ -257,7 +258,7 @@ func SetUriParamsForTFServingProxyContainer(pu *machinelearningv1.PredictiveUnit } } -func createStandaloneModelServers(r *SeldonDeploymentReconciler, mlDep *machinelearningv1.SeldonDeployment, p *machinelearningv1.PredictorSpec, c *components, pu *machinelearningv1.PredictiveUnit, podSecurityContext *v1.PodSecurityContext) error { +func (pi *PrePackedInitialiser) createStandaloneModelServers(mlDep *machinelearningv1.SeldonDeployment, p *machinelearningv1.PredictorSpec, c *components, pu *machinelearningv1.PredictiveUnit, podSecurityContext *v1.PodSecurityContext) error { if machinelearningv1.IsPrepack(pu) { sPodSpec, idx := utils.GetSeldonPodSpecForPredictiveUnit(p, pu.Name) @@ -284,13 +285,17 @@ func createStandaloneModelServers(r *SeldonDeploymentReconciler, mlDep *machinel deploy = createDeploymentWithoutEngine(depName, seldonId, sPodSpec, p, mlDep, podSecurityContext) } - ServerConfig := machinelearningv1.GetPrepackServerConfig(string(*pu.Implementation)) - - if err := addModelDefaultServers(r, pu, p, deploy, ServerConfig); err != nil { - return err - } - if err := addTFServerContainer(mlDep, r, pu, p, deploy, ServerConfig); err != nil { - return err + serverConfig := machinelearningv1.GetPrepackServerConfig(string(*pu.Implementation)) + if serverConfig != nil { + if *pu.Implementation != machinelearningv1.PrepackTensorflowName { + if err := pi.addModelDefaultServers(pu, deploy, serverConfig); err != nil { + return err + } + } else { + if err := pi.addTFServerContainer(mlDep, pu, deploy, serverConfig); err != nil { + return err + } + } } if !existing { @@ -313,7 +318,7 @@ func createStandaloneModelServers(r *SeldonDeploymentReconciler, mlDep *machinel } for i := 0; i < len(pu.Children); i++ { - if err := createStandaloneModelServers(r, mlDep, p, c, &pu.Children[i], podSecurityContext); err != nil { + if err := pi.createStandaloneModelServers(mlDep, p, c, &pu.Children[i], podSecurityContext); err != nil { return err } } diff --git a/operator/controllers/seldondeployment_prepackaged_servers_test.go b/operator/controllers/seldondeployment_prepackaged_servers_test.go index ba4d6b5dac..42643993d1 100644 --- a/operator/controllers/seldondeployment_prepackaged_servers_test.go +++ b/operator/controllers/seldondeployment_prepackaged_servers_test.go @@ -9,6 +9,7 @@ import ( "github.com/seldonio/seldon-core/operator/utils" appsv1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "strconv" @@ -265,6 +266,112 @@ var _ = Describe("Create a prepacked tfserving server for tensorflow protocol an }) +var _ = Describe("Create a prepacked tfserving server for tensorflow protocol and REST with existing container", func() { + const interval = time.Second * 1 + const name = "pp3" + const sdepName = "prepack3b" + modelName := "classifier" + By("Creating a resource") + It("should create a resource with defaults", func() { + Expect(k8sClient).NotTo(BeNil()) + cpuRequest, err := resource.ParseQuantity("2") + Expect(err).To(BeNil()) + var modelType = machinelearningv1.MODEL + var impl = machinelearningv1.PredictiveUnitImplementation(constants.PrePackedServerTensorflow) + key := types.NamespacedName{ + Name: sdepName, + Namespace: "default", + } + instance := &machinelearningv1.SeldonDeployment{ + ObjectMeta: metav1.ObjectMeta{ + Name: key.Name, + Namespace: key.Namespace, + }, + Spec: machinelearningv1.SeldonDeploymentSpec{ + Name: name, + Protocol: machinelearningv1.ProtocolTensorflow, + Predictors: []machinelearningv1.PredictorSpec{ + { + Name: "p1", + ComponentSpecs: []*machinelearningv1.SeldonPodSpec{ + { + Spec: corev1.PodSpec{ + Containers: []corev1.Container{ + { + Name: modelName, + Resources: corev1.ResourceRequirements{ + Requests: corev1.ResourceList{corev1.ResourceCPU: cpuRequest}, + }, + }, + }, + }, + }, + }, + Graph: machinelearningv1.PredictiveUnit{ + Name: modelName, + Type: &modelType, + Implementation: &impl, + Endpoint: &machinelearningv1.Endpoint{Type: machinelearningv1.REST}, + }, + }, + }, + }, + } + + configMapName := types.NamespacedName{Name: "seldon-config", + Namespace: "seldon-system"} + + configResult := &corev1.ConfigMap{} + const timeout = time.Second * 300 + Eventually(func() error { return k8sClient.Get(context.TODO(), configMapName, configResult) }, timeout). + Should(Succeed()) + + // Run Defaulter + instance.Default() + + Expect(k8sClient.Create(context.Background(), instance)).Should(Succeed()) + //time.Sleep(time.Second * 5) + + fetched := &machinelearningv1.SeldonDeployment{} + Eventually(func() error { + err := k8sClient.Get(context.Background(), key, fetched) + return err + }, timeout, interval).Should(BeNil()) + Expect(fetched.Name).Should(Equal(sdepName)) + + sPodSpec, idx := utils.GetSeldonPodSpecForPredictiveUnit(&instance.Spec.Predictors[0], instance.Spec.Predictors[0].Graph.Name) + depName := machinelearningv1.GetDeploymentName(instance, instance.Spec.Predictors[0], sPodSpec, idx) + depKey := types.NamespacedName{ + Name: depName, + Namespace: "default", + } + depFetched := &appsv1.Deployment{} + Eventually(func() error { + err := k8sClient.Get(context.Background(), depKey, depFetched) + return err + }, timeout, interval).Should(BeNil()) + Expect(len(depFetched.Spec.Template.Spec.Containers)).Should(Equal(2)) + for _, c := range depFetched.Spec.Template.Spec.Containers { + if c.Name == modelName { + Expect(c.Image).ToNot(BeNil()) + Expect(c.Resources.Requests.Cpu()).ToNot(BeNil()) + Expect(*c.Resources.Requests.Cpu()).To(Equal(cpuRequest)) + for _, arg := range c.Args { + if strings.Index(arg, constants.TfServingArgPort) == 0 { + Expect(arg).To(Equal(constants.TfServingArgPort + strconv.Itoa(constants.TfServingGrpcPort))) + } + if strings.Index(arg, constants.TfServingArgRestPort) == 0 { + Expect(arg).To(Equal(constants.TfServingArgRestPort + strconv.Itoa(int(constants.FirstPortNumber)))) + } + } + } + } + + Expect(k8sClient.Delete(context.Background(), instance)).Should(Succeed()) + }) + +}) + var _ = Describe("Test override of environment variable", func() { const blankName = "" const secretName = "SECRET_NAME" @@ -396,7 +503,7 @@ var _ = Describe("Create a prepacked sklearn server", func() { const timeout = time.Second * 30 const interval = time.Second * 1 const name = "pp1" - const sdepName = "prepack1" + const sdepName = "prepack5" envExecutorUser = "2" By("Creating a resource") It("should create a resource with defaults and security context", func() { @@ -441,7 +548,7 @@ var _ = Describe("Create a prepacked sklearn server", func() { //set security user envUseExecutor = "true" - envExecutorUser = "2" + envDefaultUser = "2" Expect(k8sClient.Create(context.Background(), instance)).Should(Succeed()) //time.Sleep(time.Second * 5) diff --git a/operator/controllers/suite_test.go b/operator/controllers/suite_test.go index e4b5290b40..8a131fcd4a 100644 --- a/operator/controllers/suite_test.go +++ b/operator/controllers/suite_test.go @@ -19,10 +19,6 @@ package controllers import ( "context" "fmt" - "os" - "path/filepath" - "testing" - . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" machinelearningv1 "github.com/seldonio/seldon-core/operator/apis/machinelearning.seldon.io/v1" @@ -35,12 +31,15 @@ import ( "k8s.io/client-go/kubernetes" clientgoscheme "k8s.io/client-go/kubernetes/scheme" "k8s.io/client-go/rest" + "os" + "path/filepath" ctrl "sigs.k8s.io/controller-runtime" "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/envtest" "sigs.k8s.io/controller-runtime/pkg/envtest/printer" logf "sigs.k8s.io/controller-runtime/pkg/log" "sigs.k8s.io/controller-runtime/pkg/log/zap" + "testing" // +kubebuilder:scaffold:imports ) @@ -66,7 +65,7 @@ var configs = map[string]string{ "predictor_servers": `{ "TENSORFLOW_SERVER": { "tensorflow": true, - "tfImage": "tensorflow/serving:latest", + "tfImage": "tensorflow/serving:2.1", "rest": { "image": "seldonio/tfserving-proxy_rest", "defaultImageVersion": "0.7" @@ -107,6 +106,18 @@ var configs = map[string]string{ } } }`, + "storageInitializer": ` + { + "image" : "gcr.io/kfserving/storage-initializer:0.2.2", + "memoryRequest": "100Mi", + "memoryLimit": "1Gi", + "cpuRequest": "100m", + "cpuLimit": "1" + }`, + "explainer": ` + { + "image" : "seldonio/alibiexplainer:1.2.0" + }`, } // Create configmap @@ -124,6 +135,8 @@ var _ = JustBeforeEach(func() { envExecutorImageRelated = "b" envEngineImage = "c" envEngineImageRelated = "d" + envDefaultUser = "" + envExplainerImage = "" }) var _ = BeforeSuite(func(done Done) { @@ -178,10 +191,11 @@ var _ = BeforeSuite(func(done Done) { Expect(err).ToNot(HaveOccurred()) err = (&SeldonDeploymentReconciler{ - Client: k8sManager.GetClient(), - Log: ctrl.Log.WithName("controllers").WithName("SeldonDeployment"), - Scheme: k8sManager.GetScheme(), - Recorder: k8sManager.GetEventRecorderFor(constants.ControllerName), + Client: k8sManager.GetClient(), + ClientSet: clientset, + Log: ctrl.Log.WithName("controllers").WithName("SeldonDeployment"), + Scheme: k8sManager.GetScheme(), + Recorder: k8sManager.GetEventRecorderFor(constants.ControllerName), }).SetupWithManager(k8sManager, constants.ControllerName) Expect(err).ToNot(HaveOccurred()) diff --git a/operator/go.mod b/operator/go.mod index 82e491fc7c..fd6d0351e0 100644 --- a/operator/go.mod +++ b/operator/go.mod @@ -3,6 +3,8 @@ module github.com/seldonio/seldon-core/operator go 1.13 require ( + github.com/Azure/go-autorest v14.0.1+incompatible // indirect + github.com/Azure/go-autorest/autorest/adal v0.8.3 // indirect github.com/ghodss/yaml v1.0.0 github.com/go-logr/logr v0.1.0 github.com/gogo/protobuf v1.3.1 diff --git a/operator/go.sum b/operator/go.sum index c7ea77bcb8..17936f8742 100644 --- a/operator/go.sum +++ b/operator/go.sum @@ -5,6 +5,8 @@ cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSR github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= github.com/Azure/go-autorest v11.1.2+incompatible h1:viZ3tV5l4gE2Sw0xrasFHytCGtzYCrT+um/rrSQ1BfA= github.com/Azure/go-autorest v11.1.2+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= +github.com/Azure/go-autorest v14.0.1+incompatible h1:YhojO9jolWIvvTW7ORhz2ZSNF6Q1TbLqUunKd3jrtyw= +github.com/Azure/go-autorest v14.0.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest/autorest v0.9.0 h1:MRvx8gncNaXJqOoLmhNjUAKh33JJF8LyxPhomEtOsjs= github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= github.com/Azure/go-autorest/autorest v0.9.2 h1:6AWuh3uWrsZJcNoCHrCF/+g4aKPCU39kaMO6/qrnK/4= @@ -13,6 +15,8 @@ github.com/Azure/go-autorest/autorest/adal v0.5.0 h1:q2gDruN08/guU9vAjuPWff0+QIr github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= github.com/Azure/go-autorest/autorest/adal v0.7.0 h1:PUMxSVw3tEImG0JTRqbxjXLKCSoPk7DartDELqlOuiI= github.com/Azure/go-autorest/autorest/adal v0.7.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc= +github.com/Azure/go-autorest/autorest/adal v0.8.3 h1:O1AGG9Xig71FxdX9HO5pGNyZ7TbSyHaVg+5eJO/jSGw= +github.com/Azure/go-autorest/autorest/adal v0.8.3/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= github.com/Azure/go-autorest/autorest/date v0.2.0 h1:yW+Zlqf26583pE43KhfnhFcdmSWlm5Ew6bxipnr/tbM= github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g= @@ -349,6 +353,8 @@ golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392 h1:ACG4HJsFiNMf47Y4PeRoebLNy/2lXT9EtprMuTFWt1M= golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= +golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413 h1:ULYEB3JvPRE/IfO+9uO7vKV/xzVTO7XPAwm8xbf4w2g= +golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190312203227-4b39c73a6495/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= diff --git a/operator/helm/split_resources.py b/operator/helm/split_resources.py index 973aeb013e..fc6546dff3 100644 --- a/operator/helm/split_resources.py +++ b/operator/helm/split_resources.py @@ -174,6 +174,7 @@ def helm_release(value: str): res["data"]["storageInitializer"] = helm_value_json( "storageInitializer" ) + res["data"]["explainer"] = helm_value_json("explainer") if kind == "serviceaccount" and name == "seldon-manager": res["metadata"]["name"] = helm_value("serviceAccount.name") diff --git a/operator/main.go b/operator/main.go index 49ea9db646..66abe34af6 100644 --- a/operator/main.go +++ b/operator/main.go @@ -18,6 +18,7 @@ package main import ( "flag" + "k8s.io/client-go/kubernetes" "os" "github.com/seldonio/seldon-core/operator/constants" @@ -112,6 +113,7 @@ func main() { if err = (&controllers.SeldonDeploymentReconciler{ Client: mgr.GetClient(), + ClientSet: kubernetes.NewForConfigOrDie(config), Log: ctrl.Log.WithName("controllers").WithName("SeldonDeployment"), Scheme: mgr.GetScheme(), Namespace: namespace, diff --git a/operator/seldon-operator/Makefile b/operator/seldon-operator/Makefile index 62c2d592fe..6c5ebdcdd2 100644 --- a/operator/seldon-operator/Makefile +++ b/operator/seldon-operator/Makefile @@ -1,5 +1,6 @@ -PREV_VERSION=1.0.0 -VERSION=1.1.0 +SHELL := /bin/bash +VERSION ?= $(shell cat ../../version.txt) +PREV_VERSION=1.1.0 COMMUNITY_OPERATORS_BASE=/home/clive/work/seldon-core/redhat/community-operators .PHONY: recreate-generated @@ -28,7 +29,8 @@ deploy/olm-catalog/seldon-operator/${VERSION}/seldon-operator.v${VERSION}.cluste operator-sdk generate csv --csv-version ${VERSION} --from-version ${PREV_VERSION} --update-crds update_csv: clean_csv_${VERSION} deploy/olm-catalog/seldon-operator/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml - + sed -i s/${PREV_VERSION}/${VERSION}/ deploy/olm-catalog/seldon-operator/seldon-operator.package.yaml + sed -i 's/containerImage:.*/containerImage: seldonio\/seldon-core-operator:'${VERSION}'/' deploy/olm-catalog/seldon-operator/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml # # TESTS # @@ -91,7 +93,7 @@ certified_validate_ui: certified_push_quay: - operator-courier push bundle/certified/ seldon seldon-operator-certified 1.1.0 "$$QUAY_TOKEN" + operator-courier push bundle/certified/ seldon seldon-operator-certified ${VERSION} "$$QUAY_TOKEN" certified_zip: diff --git a/operator/seldon-operator/README.md b/operator/seldon-operator/README.md index cb341315ea..9ec7162f98 100644 --- a/operator/seldon-operator/README.md +++ b/operator/seldon-operator/README.md @@ -16,10 +16,10 @@ Recreate the core yaml from these resources: make deploy/operator.yaml ``` -Create a new rule in the Makefile to generate the operator CSV from a previous version using the latest yaml. For 1.1.0 this is an initial rule based off a phony previous release 1.0.0. +Create new CSV. ``` -make clean_1.1.0 deploy/olm-catalog/seldon-operator/1.1.0/seldon-operator.v1.1.0.clusterserviceversion.yaml +make update_csv ``` Check the OLM bundle. For this you need to have installed [operator-courier](https://github.com/operator-framework/operator-courier). @@ -152,7 +152,7 @@ export QUAY_TOKEN="basic f2VsAG9uNmZydXsd12I2R0hPUCpwc3Vy" Then run ``` -make operator-courier_push +make community-quay-push ``` You will need to delete any old application from quay before this will succeed. @@ -186,10 +186,10 @@ For community operators: For certified operators: - * Create UBI images for operator, executor and engine. - * Create a new release on https://connect.redhat.com/content/seldon-core with - * new ubi images - * new bundle that refrences then. TODO: create bundle from community bundle. + * Create new uni images. Run `/marketplace/redhat/scan-images.py`. You will need the passwords from passpack. + * Create new bundle + +* new bundle that refrences then. For Marketplace: diff --git a/operator/seldon-operator/bundle/create_bundle.sh b/operator/seldon-operator/bundle/create_bundle.sh index 785d549624..c553a9478d 100755 --- a/operator/seldon-operator/bundle/create_bundle.sh +++ b/operator/seldon-operator/bundle/create_bundle.sh @@ -8,6 +8,17 @@ OPERATOR_IMAGE=registry.connect.redhat.com/seldonio/seldon-core-operator EXECUTOR_IMAGE=registry.connect.redhat.com/seldonio/seldon-core-executor ENGINE_IMAGE=registry.connect.redhat.com/seldonio/seldon-engine MOCK_IMAGE=registry.connect.redhat.com/seldonio/mock-classifier@sha256:482ee477c344badcaa80e850f4339db41957f9c2396ae24f9e398b67bd5c184e +STORAGE_INITIALIZER_IMAGE=registry.connect.redhat.com/seldonio/storage-initializer +SKLEARNSERVER_REST_IMAGE=registry.connect.redhat.com/seldonio/sklearnserver-rest +SKLEARNSERVER_GRPC_IMAGE=registry.connect.redhat.com/seldonio/sklearnserver-grpc +XGBOOSTSERVER_REST_IMAGE=registry.connect.redhat.com/seldonio/xgboostserver-rest +XGBOOSTSERVER_GRPC_IMAGE=registry.connect.redhat.com/seldonio/xgboostserver-grpc +MLFLOWSERVER_REST_IMAGE=registry.connect.redhat.com/seldonio/mlflowserver-rest +MLFLOWSERVER_GRPC_IMAGE=registry.connect.redhat.com/seldonio/mlflowserver-grpc +TFPROXY_REST_IMAGE=registry.connect.redhat.com/seldonio/tfproxy-rest +TFPROXY_GRPC_IMAGE=registry.connect.redhat.com/seldonio/tfproxy-grpc +TENSORFLOW_IMAGE=registry.connect.redhat.com/seldonio/tensorflow-serving +EXPLAINER_IMAGE=registry.connect.redhat.com/seldonio/alibiexplainer mkdir -p certified @@ -23,7 +34,18 @@ function update_images { sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_ENGINE$\)#\1\2\n\1 value: '${ENGINE_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml sed -i 's#\(^.*image: \)seldonio/seldon-core-operator:.*$#\1'${OPERATOR_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml sed -i 's#\(^.*containerImage: \)seldonio/seldon-core-operator:.*$#\1'${OPERATOR_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml - sed -i 's#seldonio/mock_classifier_rest:1.3#'${MOCK_IMAGE}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#seldonio/mock_classifier_rest:1.3#'${MOCK_IMAGE}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_STORAGE_INITIALIZER$\)#\1\2\n\1 value: '${STORAGE_INITIALIZER_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_SKLEARNSERVER_REST$\)#\1\2\n\1 value: '${SKLEARNSERVER_REST_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_SKLEARNSERVER_GRPC$\)#\1\2\n\1 value: '${SKLEARNSERVER_GRPC_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_XGBOOSTSERVER_REST$\)#\1\2\n\1 value: '${XGBOOSTSERVER_REST_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_XGBOOSTSERVER_GRPC$\)#\1\2\n\1 value: '${XGBOOSTSERVER_GRPC_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_MLFLOWSERVER_REST$\)#\1\2\n\1 value: '${MLFLOWSERVER_REST_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_MLFLOWSERVER_GRPC$\)#\1\2\n\1 value: '${MLFLOWSERVER_GRPC_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_TFPROXY_REST$\)#\1\2\n\1 value: '${TFPROXY_REST_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_TFPROXY_GRPC$\)#\1\2\n\1 value: '${TFPROXY_GRPC_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_TENSORFLOW$\)#\1\2\n\1 value: '${TENSORFLOW_IMAGE}:2.1.0'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml + sed -i 's#\(^.*\)\(- name: RELATED_IMAGE_EXPLAINER$\)#\1\2\n\1 value: '${EXPLAINER_IMAGE}:${VERSION}'#' certified/${VERSION}/seldon-operator.v${VERSION}.clusterserviceversion.yaml } function update_package { diff --git a/operator/seldon-operator/deploy/operator.yaml b/operator/seldon-operator/deploy/operator.yaml index 515fe64e84..a21db7c489 100644 --- a/operator/seldon-operator/deploy/operator.yaml +++ b/operator/seldon-operator/deploy/operator.yaml @@ -43,6 +43,28 @@ spec: value: '' - name: RELATED_IMAGE_ENGINE value: '' + - name: RELATED_IMAGE_STORAGE_INITIALIZER + value: '' + - name: RELATED_IMAGE_SKLEARNSERVER_REST + value: '' + - name: RELATED_IMAGE_SKLEARNSERVER_GRPC + value: '' + - name: RELATED_IMAGE_XGBOOSTSERVER_REST + value: '' + - name: RELATED_IMAGE_XGBOOSTSERVER_GRPC + value: '' + - name: RELATED_IMAGE_MLFLOWSERVER_REST + value: '' + - name: RELATED_IMAGE_MLFLOWSERVER_GRPC + value: '' + - name: RELATED_IMAGE_TFPROXY_REST + value: '' + - name: RELATED_IMAGE_TFPROXY_GRPC + value: '' + - name: RELATED_IMAGE_TENSORFLOW + value: '' + - name: RELATED_IMAGE_EXPLAINER + value: '' - name: CREATE_RESOURCES value: 'true' - name: POD_NAMESPACE @@ -56,7 +78,7 @@ spec: - name: AMBASSADOR_SINGLE_NAMESPACE value: 'false' - name: ENGINE_CONTAINER_IMAGE_AND_VERSION - value: docker.io/seldonio/engine:1.1.0 + value: docker.io/seldonio/engine:1.1.1-rc - name: ENGINE_CONTAINER_IMAGE_PULL_POLICY value: IfNotPresent - name: ENGINE_CONTAINER_SERVICE_ACCOUNT_NAME @@ -84,7 +106,7 @@ spec: - name: USE_EXECUTOR value: 'true' - name: EXECUTOR_CONTAINER_IMAGE_AND_VERSION - value: seldonio/seldon-core-executor:1.1.0 + value: seldonio/seldon-core-executor:1.1.1-rc - name: EXECUTOR_CONTAINER_IMAGE_PULL_POLICY value: IfNotPresent - name: EXECUTOR_PROMETHEUS_PATH @@ -101,7 +123,7 @@ spec: value: http://default-broker. - name: DEFAULT_USER_ID value: '' - image: seldonio/seldon-core-operator:1.1.0 + image: seldonio/seldon-core-operator:1.1.1-rc name: manager ports: - containerPort: 8443 diff --git a/python/tests/helpers.py b/python/tests/helpers.py index 312a11b8d2..387da9ba49 100644 --- a/python/tests/helpers.py +++ b/python/tests/helpers.py @@ -6,12 +6,7 @@ from contextlib import contextmanager from subprocess import Popen -from tenacity import ( - retry, - wait_fixed, - stop_after_attempt, - retry_if_exception_type, -) +from tenacity import retry, wait_fixed, stop_after_attempt, retry_if_exception_type class MicroserviceWrapper: diff --git a/release.py b/release.py index 97b5bc516a..2c97f15084 100644 --- a/release.py +++ b/release.py @@ -9,8 +9,7 @@ import os import sys import argparse -import re -import shutil +import json def pp(o): @@ -59,6 +58,7 @@ def update_pom_file(fpath, seldon_core_version, debug=False): if debug: print("processing [{}]".format(fpath)) comp_dir_path = os.path.dirname(fpath) + cwd = os.getcwd() os.chdir(comp_dir_path) MAVEN_REPOSITORY_LOCATION = os.getenv('MAVEN_REPOSITORY_LOCATION') @@ -84,6 +84,7 @@ def update_pom_file(fpath, seldon_core_version, debug=False): else: print("error {fpath}".format(**locals())) print(err) + os.chdir(cwd) def update_chart_yaml_file(fpath, seldon_core_version, debug=False): @@ -103,7 +104,7 @@ def update_chart_yaml_file(fpath, seldon_core_version, debug=False): print("updated {fpath}".format(**locals())) -def update_operator_values_yaml_file(fpath, seldon_core_version, debug=False): +def update_operator_values_yaml_file_core_images(fpath, seldon_core_version, debug=False): fpath = os.path.realpath(fpath) if debug: print("processing [{}]".format(fpath)) @@ -119,12 +120,77 @@ def update_operator_values_yaml_file(fpath, seldon_core_version, debug=False): # pp(out) # pp(err) if err == None: - print("updated operator values yaml".format(**locals())) + print("updated operator values yaml for core images".format(**locals())) else: - print("error updating operator values yaml".format(**locals())) + print("error updating operator values yaml for core images".format(**locals())) print(err) +def update_operator_values_yaml_file_prepackaged_images(fpath, seldon_core_version, debug=False): + fpath = os.path.realpath(fpath) + if debug: + print("processing [{}]".format(fpath)) + args = [ + "sed", + "-i", + "s/defaultImageVersion: \(.*\)/defaultImageVersion: \"{seldon_core_version}\"/".format( + **locals() + ), + fpath, + ] + err, out = run_command(args, debug) + # pp(out) + # pp(err) + if err == None: + print("updated operator values yaml for prepackaged server images".format(**locals())) + else: + print("error updating operator values yaml for prepackaged server images".format(**locals())) + print(err) + +def update_operator_values_yaml_file_explainer_image(fpath, seldon_core_version, debug=False): + fpath = os.path.realpath(fpath) + if debug: + print("processing [{}]".format(fpath)) + args = [ + "sed", + "-i", + "s|seldonio/alibiexplainer:\(.*\)|seldonio/alibiexplainer:{seldon_core_version}|".format( + **locals() + ), + fpath, + ] + err, out = run_command(args, debug) + # pp(out) + # pp(err) + if err == None: + print("updated operator values yaml for prepackaged server images".format(**locals())) + else: + print("error updating operator values yaml for prepackaged server images".format(**locals())) + print(err) + + + +def update_operator_kustomize_prepackaged_images(fpath, seldon_core_version, debug=False): + fpath = os.path.realpath(fpath) + if debug: + print("processing [{}]".format(fpath)) + args = [ + "sed", + "-i", + "s/\"defaultImageVersion\": \(.*\)/\"defaultImageVersion\": \"{seldon_core_version}\"/".format( + **locals() + ), + fpath, + ] + err, out = run_command(args, debug) + # pp(out) + # pp(err) + if err == None: + print("updated operator kustomize yaml for prepackaged server images".format(**locals())) + else: + print("error updating operator kustomize yaml for prepackaged server images".format(**locals())) + print(err) + def update_versions_txt(seldon_core_version, debug=False): with open("version.txt", "w") as f: f.write("{seldon_core_version}\n".format(**locals())) @@ -185,13 +251,78 @@ def update_operator_version(seldon_core_version, debug=False): print(err) +def update_image_metadata_json(seldon_core_version, debug=False): + paths = [ + "examples/models/mean_classifier/image_metadata.json", + "integrations/tfserving/image_metadata.json", + "servers/sklearnserver/sklearnserver/image_metadata.json", + "servers/mlflowserver/mlflowserver/image_metadata.json", + "servers/xgboostserver/xgboostserver/image_metadata.json" + ] + for path in paths: + path = os.path.realpath(path) + if debug: + print("processing [{}]".format(path)) + with open(path) as json_file: + data = json.load(json_file) + for label in data["labels"]: + if "version" in label: + label["version"] = f"{seldon_core_version}" + with open(path, 'w') as outfile: + json.dump(data, outfile) + +def update_dockerfile_label_version(seldon_core_version, debug=False): + paths = [ + "operator/Dockerfile.redhat", + "engine/Dockerfile.redhat", + "executor/Dockerfile.redhat", + "servers/tfserving/Dockerfile.redhat", + "components/alibi-detect-server/Dockerfile", + "components/storage-initializer/Dockerfile", + "components/seldon-request-logger/Dockerfile", + "components/alibi-explain-server/Dockerfile" + ] + for path in paths: + if debug: + print("processing [{}]".format(path)) + args = [ + "sed", + "-i", + "s/version=\".*\" \\\\/version=\"{seldon_core_version}\" \\\\/".format(**locals()), + path, + ] + err, out = run_command(args, debug) + if err == None: + print("updated {path}".format(**locals())) + else: + print("error updating {path}".format(**locals())) + print(err) + +def update_python_wrapper_fixed_versions(seldon_core_version, debug=False): + + args = [ + "./hack/update_python_version.sh", + "{seldon_core_version}".format(**locals()), + ] + err, out = run_command(args, debug) + # pp(out) + # pp(err) + if err == None: + print("Updated python wrapper in matching files".format(**locals())) + else: + print("error updating python wrapper in matching files".format(**locals())) + print(err) + def set_version( seldon_core_version, pom_files, chart_yaml_files, operator_values_yaml_file, + operator_kustomize_yaml_file, debug=False, ): + update_python_wrapper_fixed_versions(seldon_core_version, debug) + # Normalize file paths pom_files_realpaths = [os.path.realpath(x) for x in pom_files] chart_yaml_file_realpaths = [os.path.realpath(x) for x in chart_yaml_files] @@ -200,20 +331,25 @@ def set_version( if operator_values_yaml_file != None else None ) + operator_kustomize_yaml_file_realpath = ( + os.path.realpath(operator_kustomize_yaml_file) + if operator_kustomize_yaml_file != None + else None + ) # Update kustomize update_kustomize_engine_version(seldon_core_version, debug) update_kustomize_executor_version(seldon_core_version, debug) - + # # Update operator version update_operator_version(seldon_core_version, debug) - + # # Update top level versions.txt update_versions_txt(seldon_core_version, debug) - + # # update the pom files for fpath in pom_files_realpaths: - update_pom_file(fpath, seldon_core_version, debug) + update_pom_file(fpath, seldon_core_version, debug) # update the helm chart files for chart_yaml_file_realpath in chart_yaml_file_realpaths: @@ -221,11 +357,30 @@ def set_version( # update the operator helm values file if operator_values_yaml_file != None: - update_operator_values_yaml_file( - operator_values_yaml_file_realpath, seldon_core_version, debug + update_operator_values_yaml_file_core_images( + operator_values_yaml_file_realpath, seldon_core_version, debug + ) + + if operator_values_yaml_file != None: + update_operator_values_yaml_file_prepackaged_images( + operator_values_yaml_file_realpath, seldon_core_version, debug + ) + update_operator_values_yaml_file_explainer_image( + operator_values_yaml_file_realpath, seldon_core_version, debug ) + if operator_kustomize_yaml_file != None: + update_operator_kustomize_prepackaged_images( + operator_kustomize_yaml_file_realpath, seldon_core_version, debug + ) + + # Update image version labels + update_image_metadata_json(seldon_core_version,debug) + update_dockerfile_label_version(seldon_core_version, debug) + + + def main(argv): POM_FILES = ["engine/pom.xml"] CHART_YAML_FILES = [ @@ -233,6 +388,7 @@ def main(argv): "helm-charts/seldon-core-analytics/Chart.yaml", ] OPERATOR_VALUES_YAML_FILE = "helm-charts/seldon-core-operator/values.yaml" + OPERATOR_KUSTOMIZE_CONFIGMAP = "operator/config/manager/configmap.yaml" opts = getOpts(argv[1:]) if opts.debug: @@ -242,6 +398,7 @@ def main(argv): POM_FILES, CHART_YAML_FILES, OPERATOR_VALUES_YAML_FILE, + OPERATOR_KUSTOMIZE_CONFIGMAP, opts.debug, ) print("done") diff --git a/servers/mlflowserver/Makefile b/servers/mlflowserver/Makefile index 5ba271ad39..92a8d3e640 100644 --- a/servers/mlflowserver/Makefile +++ b/servers/mlflowserver/Makefile @@ -1,19 +1,21 @@ -VERSION=0.5 -IMAGE_BASE=seldonio/mlflowserver -S2I_IMAGE_VERSION=1.1.1-SNAPSHOT +SHELL := /bin/bash +VERSION := $(shell cat ../../version.txt) +IMAGE_NAME_BASE=mlflowserver +IMAGE_BASE=seldonio/${IMAGE_NAME_BASE} +KIND_NAME ?= kind build_%: s2i build \ -E environment_$* \ ./mlflowserver \ - seldonio/seldon-core-s2i-python37:${S2I_IMAGE_VERSION} \ + seldonio/seldon-core-s2i-python37-ubi8:${VERSION} \ ${IMAGE_BASE}_$*:${VERSION} push_%: docker push ${IMAGE_BASE}_$*:${VERSION} kind_load_%: - kind load -v 3 docker-image ${IMAGE_BASE}_$*:${VERSION} + kind load -v 3 docker-image ${IMAGE_BASE}_$*:${VERSION} --name ${KIND_NAME} .PHONY: push_all push_all: push_rest push_grpc @@ -22,4 +24,20 @@ push_all: push_rest push_grpc build_all: build_rest build_grpc .PHONY: kind_load_all -kind_load_all: kind_load_rest kind_load_grpc +kind_load: build_all kind_load_rest kind_load_grpc + + +# https://connect.redhat.com/project/4121681/view +scan_rest=ospid-fe6af4a5-e0c6-414a-b0a6-00b7773b6336 +# https://connect.redhat.com/project/4122001/view +scan_grpc=ospid-fbd924dd-bc1e-403b-9f35-5f75b82b1cdf +redhat-image-scan-%: + docker pull ${IMAGE_BASE}_$*:${VERSION} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_mlflowserver_$*} | docker login -u unused scan.connect.redhat.com --password-stdin + docker tag ${IMAGE_BASE}_$*:${VERSION} scan.connect.redhat.com/${scan_$*}/${IMAGE_NAME_BASE}_$*:${VERSION} + docker push scan.connect.redhat.com/${scan_$*}/${IMAGE_NAME_BASE}_$*:${VERSION} + +.PHONY: redhat-image-scan +redhat-image-scan: redhat-image-scan-rest redhat-image-scan-grpc + diff --git a/servers/mlflowserver/mlflowserver/.s2i/bin/assemble b/servers/mlflowserver/mlflowserver/.s2i/bin/assemble new file mode 100644 index 0000000000..f3cac492de --- /dev/null +++ b/servers/mlflowserver/mlflowserver/.s2i/bin/assemble @@ -0,0 +1,15 @@ +#!/bin/bash +echo "Before assembling" + +/s2i/bin/assemble +rc=$? + +if [ $rc -eq 0 ]; then + echo "After successful assembling" +else + echo "After failed assembling" + exit $rc +fi + +mkdir -p /tmp/.s2i/ +cp image_metadata.json /tmp/.s2i/image_metadata.json diff --git a/servers/mlflowserver/mlflowserver/image_metadata.json b/servers/mlflowserver/mlflowserver/image_metadata.json new file mode 100644 index 0000000000..275b776405 --- /dev/null +++ b/servers/mlflowserver/mlflowserver/image_metadata.json @@ -0,0 +1 @@ +{"labels": [{"name": "Seldon MLFlow Server"}, {"vendor": "Seldon Technologies"}, {"version": "1.1.1-rc"}, {"release": "1"}, {"summary": "An MLFlow Model Server for Seldon Core"}, {"description": "The model server for MLFlow models"}]} \ No newline at end of file diff --git a/servers/sklearnserver/Makefile b/servers/sklearnserver/Makefile index 5b7256f8b0..7a8e4f264b 100644 --- a/servers/sklearnserver/Makefile +++ b/servers/sklearnserver/Makefile @@ -1,14 +1,14 @@ -VERSION=0.3 -IMAGE_BASE=seldonio/sklearnserver -S2I_IMAGE_VERSION=1.1.1-SNAPSHOT - +SHELL := /bin/bash +VERSION := $(shell cat ../../version.txt) +IMAGE_NAME_BASE=sklearnserver +IMAGE_BASE=seldonio/${IMAGE_NAME_BASE} KIND_NAME ?= kind build_%: s2i build \ -E environment_$* \ ./sklearnserver \ - seldonio/seldon-core-s2i-python37:${S2I_IMAGE_VERSION} \ + seldonio/seldon-core-s2i-python37-ubi8:${VERSION} \ ${IMAGE_BASE}_$*:${VERSION} push_%: @@ -24,4 +24,20 @@ push_all: push_rest push_grpc build_all: build_rest build_grpc .PHONY: kind_load_all -kind_load_all: kind_load_rest kind_load_grpc +kind_load: build_all kind_load_rest kind_load_grpc + + +# https://connect.redhat.com/project/4060551/view +scan_rest=ospid-5c2f945c-69fa-4de4-8574-e9a61f69d69a +# https://connect.redhat.com/project/4061631/view +scan_grpc=ospid-d75d8767-c689-4368-9859-4ec4f0062a3a +redhat-image-scan-%: + docker pull ${IMAGE_BASE}_$*:${VERSION} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_sklearnserver_$*} | docker login -u unused scan.connect.redhat.com --password-stdin + docker tag ${IMAGE_BASE}_$*:${VERSION} scan.connect.redhat.com/${scan_$*}/${IMAGE_NAME_BASE}_$*:${VERSION} + docker push scan.connect.redhat.com/${scan_$*}/${IMAGE_NAME_BASE}_$*:${VERSION} + +.PHONY: redhat-image-scan +redhat-image-scan: redhat-image-scan-rest redhat-image-scan-grpc + diff --git a/servers/sklearnserver/samples/iris_grpc.yaml b/servers/sklearnserver/samples/iris_grpc.yaml new file mode 100644 index 0000000000..f213bbb84c --- /dev/null +++ b/servers/sklearnserver/samples/iris_grpc.yaml @@ -0,0 +1,19 @@ +apiVersion: machinelearning.seldon.io/v1alpha2 +kind: SeldonDeployment +metadata: + name: sklearn +spec: + name: iris + transport: grpc + predictors: + - graph: + children: [] + implementation: SKLEARN_SERVER + modelUri: gs://seldon-models/sklearn/iris + name: classifier + name: default + replicas: 1 + svcOrchSpec: + env: + - name: SELDON_LOG_LEVEL + value: DEBUG diff --git a/servers/sklearnserver/sklearnserver/.s2i/bin/assemble b/servers/sklearnserver/sklearnserver/.s2i/bin/assemble new file mode 100644 index 0000000000..f3cac492de --- /dev/null +++ b/servers/sklearnserver/sklearnserver/.s2i/bin/assemble @@ -0,0 +1,15 @@ +#!/bin/bash +echo "Before assembling" + +/s2i/bin/assemble +rc=$? + +if [ $rc -eq 0 ]; then + echo "After successful assembling" +else + echo "After failed assembling" + exit $rc +fi + +mkdir -p /tmp/.s2i/ +cp image_metadata.json /tmp/.s2i/image_metadata.json diff --git a/servers/sklearnserver/sklearnserver/image_metadata.json b/servers/sklearnserver/sklearnserver/image_metadata.json new file mode 100644 index 0000000000..5c33c34922 --- /dev/null +++ b/servers/sklearnserver/sklearnserver/image_metadata.json @@ -0,0 +1 @@ +{"labels": [{"name": "Seldon SKLearn Server"}, {"vendor": "Seldon Technologies"}, {"version": "1.1.1-rc"}, {"release": "1"}, {"summary": "A SKLearn Model Server for Seldon Core"}, {"description": "The model server for sklearn models saved as pickles."}]} \ No newline at end of file diff --git a/servers/sklearnserver/sklearnserver/requirements.txt b/servers/sklearnserver/sklearnserver/requirements.txt index a5f4a48ab8..d96f530234 100644 --- a/servers/sklearnserver/sklearnserver/requirements.txt +++ b/servers/sklearnserver/sklearnserver/requirements.txt @@ -1,3 +1,4 @@ +seldon_core scikit-learn == 0.20.3 numpy >= 1.8.2 joblib >= 0.13.0 diff --git a/servers/tfserving/Dockerfile.redhat b/servers/tfserving/Dockerfile.redhat new file mode 100644 index 0000000000..455d78f231 --- /dev/null +++ b/servers/tfserving/Dockerfile.redhat @@ -0,0 +1,46 @@ +ARG TF_SERVING_VERSION=latest +ARG TF_SERVING_BUILD_IMAGE=tensorflow/serving:${TF_SERVING_VERSION}-devel + +FROM ${TF_SERVING_BUILD_IMAGE} as build_image +FROM registry.access.redhat.com/ubi8/ubi-minimal +LABEL name="Seldon Tensorflow Serving Server" \ + vendor="Seldon Technologies" \ + version="1.1.1-rc" \ + release="1" \ + summary="Tensorflow serving for Seldon Core" \ + description="Tensorflow serving for Seldon Core to server Tensorflow models" + +ARG TF_SERVING_VERSION_GIT_BRANCH=master +ARG TF_SERVING_VERSION_GIT_COMMIT=head + +LABEL maintainer="cc@seldon.io" +LABEL tensorflow_serving_github_branchtag=${TF_SERVING_VERSION_GIT_BRANCH} +LABEL tensorflow_serving_github_commit=${TF_SERVING_VERSION_GIT_COMMIT} + +RUN microdnf install openssl ca-certificates wget \ + && microdnf update; microdnf clean all + +# Install TF Serving pkg +COPY --from=build_image /usr/local/bin/tensorflow_model_server /usr/bin/tensorflow_model_server + +# Expose ports +# gRPC +EXPOSE 8500 + +# REST +EXPOSE 8501 + +# Set where models should be stored in the container +ENV MODEL_BASE_PATH=/models +RUN mkdir -p ${MODEL_BASE_PATH} + +# The only required piece is the model name in order to differentiate endpoints +ENV MODEL_NAME=model + +COPY tf_serving_entrypoint.sh /usr/bin + +RUN ln -s /etc/ssl/certs/ca-bundle.crt /etc/ssl/certs/ca-certificates.crt + +RUN mkdir /licenses && wget -O /licenses/license.txt https://raw.githubusercontent.com/tensorflow/serving/master/LICENSE + +ENTRYPOINT ["/usr/bin/tf_serving_entrypoint.sh"] diff --git a/servers/tfserving/LICENSE b/servers/tfserving/LICENSE new file mode 100644 index 0000000000..afb00ed7ac --- /dev/null +++ b/servers/tfserving/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2016, The TensorFlow Serving Authors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/servers/tfserving/Makefile b/servers/tfserving/Makefile new file mode 100644 index 0000000000..617ef1c154 --- /dev/null +++ b/servers/tfserving/Makefile @@ -0,0 +1,17 @@ +SHELL := /bin/bash +VERSION=2.1.0 +IMAGE_NAME_BASE=tfserving +IMG ?= seldonio/${IMAGE_NAME_BASE}:${VERSION} +IMG_VERSION_REDHAT ?= ${IMAGE_NAME_BASE}-ubi8:${VERSION} +IMG_REDHAT ?= seldonio/${IMG_VERSION_REDHAT} + + +docker-build-redhat: + docker build . -f Dockerfile.redhat --build-arg TF_SERVING_VERSION=${VERSION} -t ${IMG_REDHAT} + +# password can be found at: https://connect.redhat.com/project/4098071/view +redhat-image-scan: docker-build-redhat + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_tfserver} | docker login -u unused scan.connect.redhat.com --password-stdin + docker tag ${IMG_REDHAT} scan.connect.redhat.com/ospid-8714259c-f9bd-4ee9-9b6e-06f54354746e/${IMG_VERSION_REDHAT} + docker push scan.connect.redhat.com/ospid-8714259c-f9bd-4ee9-9b6e-06f54354746e/${IMG_VERSION_REDHAT} diff --git a/servers/tfserving/samples/halfplustwo_rest_resources.yaml b/servers/tfserving/samples/halfplustwo_rest_resources.yaml new file mode 100644 index 0000000000..d5413e84e0 --- /dev/null +++ b/servers/tfserving/samples/halfplustwo_rest_resources.yaml @@ -0,0 +1,28 @@ +apiVersion: machinelearning.seldon.io/v1alpha2 +kind: SeldonDeployment +metadata: + name: hpt +spec: + name: hpt + protocol: tensorflow + transport: rest + predictors: + - componentSpecs: + - spec: + containers: + - name: halfplustwo + resources: + requests: + memory: 1Mi + graph: + children: [] + implementation: TENSORFLOW_SERVER + modelUri: gs://seldon-models/tfserving/half_plus_two + name: halfplustwo + parameters: + - name: model_name + type: STRING + value: halfplustwo + name: default + replicas: 1 + diff --git a/servers/tfserving/tf_serving_entrypoint.sh b/servers/tfserving/tf_serving_entrypoint.sh new file mode 100755 index 0000000000..f77b3780d0 --- /dev/null +++ b/servers/tfserving/tf_serving_entrypoint.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +tensorflow_model_server --port=8500 --rest_api_port=8501 --model_name=${MODEL_NAME} --model_base_path=${MODEL_BASE_PATH}/${MODEL_NAME} "$@" + diff --git a/servers/xgboostserver/Makefile b/servers/xgboostserver/Makefile index b38f8b09ad..6421f7399a 100644 --- a/servers/xgboostserver/Makefile +++ b/servers/xgboostserver/Makefile @@ -1,19 +1,21 @@ -VERSION=0.4 -IMAGE_BASE=seldonio/xgboostserver -S2I_IMAGE_VERSION=1.1.1-SNAPSHOT +SHELL := /bin/bash +VERSION := $(shell cat ../../version.txt) +IMAGE_NAME_BASE=xgboostserver +IMAGE_BASE=seldonio/${IMAGE_NAME_BASE} +KIND_NAME ?= kind build_%: s2i build \ -E environment_$* \ ./xgboostserver \ - seldonio/seldon-core-s2i-python37:${S2I_IMAGE_VERSION} \ + seldonio/seldon-core-s2i-python37-ubi8:${VERSION} \ ${IMAGE_BASE}_$*:${VERSION} push_%: docker push ${IMAGE_BASE}_$*:${VERSION} kind_load_%: - kind load -v 3 docker-image ${IMAGE_BASE}_$*:${VERSION} + kind load -v 3 docker-image ${IMAGE_BASE}_$*:${VERSION} --name ${KIND_NAME} .PHONY: push_all push_all: push_rest push_grpc @@ -22,4 +24,19 @@ push_all: push_rest push_grpc build_all: build_rest build_grpc .PHONY: kind_load_all -kind_load_all: kind_load_rest kind_load_grpc +kind_load: build_all kind_load_rest kind_load_grpc + + +# https://connect.redhat.com/project/4127491/view +scan_rest=ospid-096b89ca-b4a6-4cff-8c13-d65945a5adb9 +# +scan_grpc=ospid-ba6387aa-1144-4764-8b04-80e3d34fbcc4 +redhat-image-scan-%: + docker pull ${IMAGE_BASE}_$*:${VERSION} + source ~/.config/seldon/seldon-core/redhat-image-passwords.sh && \ + echo $${rh_password_xgboostserver_$*} | docker login -u unused scan.connect.redhat.com --password-stdin + docker tag ${IMAGE_BASE}_$*:${VERSION} scan.connect.redhat.com/${scan_$*}/${IMAGE_NAME_BASE}_$*:${VERSION} + docker push scan.connect.redhat.com/${scan_$*}/${IMAGE_NAME_BASE}_$*:${VERSION} + +.PHONY: redhat-image-scan +redhat-image-scan: redhat-image-scan-rest redhat-image-scan-grpc diff --git a/servers/xgboostserver/xgboostserver/.s2i/bin/assemble b/servers/xgboostserver/xgboostserver/.s2i/bin/assemble new file mode 100644 index 0000000000..f3cac492de --- /dev/null +++ b/servers/xgboostserver/xgboostserver/.s2i/bin/assemble @@ -0,0 +1,15 @@ +#!/bin/bash +echo "Before assembling" + +/s2i/bin/assemble +rc=$? + +if [ $rc -eq 0 ]; then + echo "After successful assembling" +else + echo "After failed assembling" + exit $rc +fi + +mkdir -p /tmp/.s2i/ +cp image_metadata.json /tmp/.s2i/image_metadata.json diff --git a/servers/xgboostserver/xgboostserver/image_metadata.json b/servers/xgboostserver/xgboostserver/image_metadata.json new file mode 100644 index 0000000000..83786ac2dc --- /dev/null +++ b/servers/xgboostserver/xgboostserver/image_metadata.json @@ -0,0 +1 @@ +{"labels": [{"name": "Seldon XGBoost Server"}, {"vendor": "Seldon Technologies"}, {"version": "1.1.1-rc"}, {"release": "1"}, {"summary": "A XGBoost Model Server for Seldon Core"}, {"description": "The model server for XGBoost models"}]} \ No newline at end of file diff --git a/testing/scripts/Makefile b/testing/scripts/Makefile index b8fdbac1d6..6a566c25cb 100644 --- a/testing/scripts/Makefile +++ b/testing/scripts/Makefile @@ -22,7 +22,21 @@ kind_build_executor: kind_build_fixed_models: cd ../docker/fixed-model && make kind_load_images -kind_build_images: build_protos kind_build_engine kind_build_operator kind_build_executor kind_build_fixed_models +kind_build_prepackaged: + cd ../../servers/sklearnserver && make kind_load + cd ../../servers/xgboostserver && make kind_load + cd ../../servers/mlflowserver && make kind_load + cd ../../integrations/tfserving && make kind_load + +kind_build_alibi: + cd ../../components/alibi-detect-server && make kind_load + cd ../../components/alibi-explain-server && make kind_load + +kind_build_misc: + cd ../../components/seldon-request-logger && make kind_load + cd ../../components/storage-initializer && make kind_load + +kind_build_images: build_protos kind_build_engine kind_build_operator kind_build_executor kind_build_fixed_models kind_build_prepackaged kind_build_alibi kind_build_misc helm_setup: helm repo add stable https://kubernetes-charts.storage.googleapis.com/ diff --git a/testing/scripts/kind_test_all.sh b/testing/scripts/kind_test_all.sh index 974f6bdb85..757e9ce13e 100755 --- a/testing/scripts/kind_test_all.sh +++ b/testing/scripts/kind_test_all.sh @@ -111,6 +111,14 @@ if [[ ${KIND_EXIT_VALUE} -eq 0 ]]; then return 1 fi + echo "Build prepacked servers and alibi wrappers" + make kind_build_prepackaged kind_build_alibi kind_build_misc + KIND_BUILD_EXIT_VALUE=$? + if [[ $KIND_BUILD_EXIT_VALUE -gt 0 ]]; then + echo "Kind build has errors" + return 1 + fi + # KIND CLUSTER SETUP make kind_setup SETUP_EXIT_VALUE=$? diff --git a/testing/scripts/seldon_e2e_utils.py b/testing/scripts/seldon_e2e_utils.py index 613ea98aa6..5a284c4e05 100644 --- a/testing/scripts/seldon_e2e_utils.py +++ b/testing/scripts/seldon_e2e_utils.py @@ -99,7 +99,7 @@ def get_deployment_names(sdep_name, namespace, attempts=20, sleep=5): def wait_for_rollout( - sdep_name, namespace, attempts=20, sleep=5, expected_deployments=1 + sdep_name, namespace, attempts=30, sleep=5, expected_deployments=1 ): deployment_names = [] for _ in range(attempts): diff --git a/version.txt b/version.txt index bc5594a19d..65ea59b894 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -1.1.1-SNAPSHOT +1.1.1-rc diff --git a/wrappers/s2i/python/Dockerfile.tmpl b/wrappers/s2i/python/Dockerfile similarity index 79% rename from wrappers/s2i/python/Dockerfile.tmpl rename to wrappers/s2i/python/Dockerfile index 4fe8d72dff..3ee34eb23f 100644 --- a/wrappers/s2i/python/Dockerfile.tmpl +++ b/wrappers/s2i/python/Dockerfile @@ -1,9 +1,12 @@ -FROM continuumio/miniconda3:%CONDA_VERSION% +ARG CONDA_VERSION +ARG BASE_IMAGE +FROM $BASE_IMAGE:$CONDA_VERSION LABEL io.openshift.s2i.scripts-url="image:///s2i/bin" # This is to install desired version of Python without updating conda version -RUN conda install --yes python=%PYTHON_VERSION% conda=%CONDA_VERSION% +ARG PYTHON_VERSION +RUN conda install --yes python=$PYTHON_VERSION conda=$CONDA_VERSION RUN apt-get update --yes && apt-get install --yes gcc make build-essential RUN mkdir microservice diff --git a/wrappers/s2i/python/Dockerfile.conda b/wrappers/s2i/python/Dockerfile.conda new file mode 100644 index 0000000000..2da81f0a9d --- /dev/null +++ b/wrappers/s2i/python/Dockerfile.conda @@ -0,0 +1,29 @@ +FROM registry.access.redhat.com/ubi8/ubi + +# $ docker build . -t continuumio/miniconda3:latest -t continuumio/miniconda3:4.5.11 +# $ docker run --rm -it continuumio/miniconda3:latest /bin/bash +# $ docker push continuumio/miniconda3:latest +# $ docker push continuumio/miniconda3:4.5.11 + +ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 +ENV PATH /opt/conda/bin:$PATH + +RUN dnf update -y && \ + dnf install -y wget bzip2 ca-certificates curl git + +ARG CONDA_VERSION +RUN wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-$CONDA_VERSION-Linux-x86_64.sh -O ~/miniconda.sh && \ + /bin/bash ~/miniconda.sh -b -p /opt/conda && \ + rm ~/miniconda.sh && \ + /opt/conda/bin/conda clean -tipsy && \ + ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh && \ + echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.bashrc && \ + echo "conda activate base" >> ~/.bashrc && \ + chgrp -R root /opt/conda && chmod -R g+rw /opt/conda + +ENV TINI_VERSION v0.16.1 +ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /usr/bin/tini +RUN chmod +x /usr/bin/tini + +ENTRYPOINT [ "/usr/bin/tini", "--" ] +CMD [ "/bin/bash" ] diff --git a/wrappers/s2i/python/Dockerfile.gpu.tmpl b/wrappers/s2i/python/Dockerfile.gpu similarity index 100% rename from wrappers/s2i/python/Dockerfile.gpu.tmpl rename to wrappers/s2i/python/Dockerfile.gpu diff --git a/wrappers/s2i/python/Dockerfile.local.tmpl b/wrappers/s2i/python/Dockerfile.local similarity index 76% rename from wrappers/s2i/python/Dockerfile.local.tmpl rename to wrappers/s2i/python/Dockerfile.local index f6f5243fe9..d2bc429ef7 100644 --- a/wrappers/s2i/python/Dockerfile.local.tmpl +++ b/wrappers/s2i/python/Dockerfile.local @@ -1,9 +1,12 @@ -FROM continuumio/miniconda3:%CONDA_VERSION% +ARG CONDA_VERSION +ARG BASE_IMAGE +FROM $BASE_IMAGE:$CONDA_VERSION LABEL io.openshift.s2i.scripts-url="image:///s2i/bin" # This is to install desired version of Python without updating conda version -RUN conda install --yes python=%PYTHON_VERSION% conda=%CONDA_VERSION% +ARG PYTHON_VERSION +RUN conda install --yes python=$PYTHON_VERSION conda=$CONDA_VERSION RUN apt-get update --yes && apt-get install --yes gcc make build-essential RUN mkdir microservice diff --git a/wrappers/s2i/python/Dockerfile.redhat b/wrappers/s2i/python/Dockerfile.redhat new file mode 100644 index 0000000000..17c792e4bf --- /dev/null +++ b/wrappers/s2i/python/Dockerfile.redhat @@ -0,0 +1,24 @@ +ARG CONDA_VERSION +ARG BASE_IMAGE +FROM $BASE_IMAGE:$CONDA_VERSION + +LABEL io.openshift.s2i.scripts-url="image:///s2i/bin" + +# This is to install desired version of Python without updating conda version +ARG PYTHON_VERSION +RUN conda install --yes python=$PYTHON_VERSION conda=$CONDA_VERSION +RUN dnf install -y make automake gcc gcc-c++ + +RUN mkdir microservice +WORKDIR /microservice + +COPY ./s2i/bin/ /s2i/bin + +# keep install of seldon-core after the COPY to force re-build of layer +COPY _python /microservice +RUN cd /microservice/python && make install +COPY _python/python/licenses/license.txt . + +RUN mkdir -p /.conda && chmod a+rwx /.conda + +EXPOSE 5000 diff --git a/wrappers/s2i/python/Makefile b/wrappers/s2i/python/Makefile index 2cd5789c23..976c5f87b0 100644 --- a/wrappers/s2i/python/Makefile +++ b/wrappers/s2i/python/Makefile @@ -4,13 +4,17 @@ SHELL:=/bin/bash PYTHON_VERSION=3.7 CONDA_VERSION=4.7.12 IMAGE_PYTHON_VERSION=`echo -n $(PYTHON_VERSION) | sed 's/\.//g'` -BASE_IMAGE_PYTHON_VERSION=`echo -n $(PYTHON_VERSION) | cut -d. -f1` +DEFAULT_IMAGE_PYTHON_VERSION=`echo -n $(PYTHON_VERSION) | cut -d. -f1` IMAGE_NAME = docker.io/seldonio/seldon-core-s2i-python${IMAGE_PYTHON_VERSION} -BASE_IMAGE_NAME = docker.io/seldonio/seldon-core-s2i-python${BASE_IMAGE_PYTHON_VERSION} +DEFAULT_IMAGE_NAME = docker.io/seldonio/seldon-core-s2i-python${DEFAULT_IMAGE_PYTHON_VERSION} GPU_IMAGE_NAME = docker.io/seldonio/seldon-core-s2i-python3-tf-gpu SELDON_CORE_DIR=../../.. +# Base images to build the s2i builder images from +BASE_IMAGE=continuumio/miniconda3 +CONDA_BASE_IMAGE=docker.io/seldonio/conda-ubi8 + .PHONY: get_local_repo get_local_repo: mkdir -p _python @@ -19,42 +23,45 @@ get_local_repo: .PHONY: build build: get_local_repo - cat Dockerfile.tmpl | sed -e "s|%PYTHON_VERSION%|$(PYTHON_VERSION)|" | sed -e "s|%CONDA_VERSION%|$(CONDA_VERSION)|" > Dockerfile - set -x && docker build -t $(IMAGE_NAME):$(IMAGE_VERSION) . + set -x && docker build -f Dockerfile --build-arg PYTHON_VERSION=${PYTHON_VERSION} --build-arg CONDA_VERSION=${CONDA_VERSION} --build-arg BASE_IMAGE=${BASE_IMAGE} -t $(IMAGE_NAME):$(IMAGE_VERSION) . .PHONY: build_gpu build_gpu: get_local_repo - cat Dockerfile.gpu.tmpl > Dockerfile - docker build -t ${GPU_IMAGE_NAME}:${IMAGE_VERSION} . - + docker build -f Dockerfile.gpu -t ${GPU_IMAGE_NAME}:${IMAGE_VERSION} . .PHONY: build_local build_local: get_local_repo - cat Dockerfile.local.tmpl | sed -e "s|%PYTHON_VERSION%|$(PYTHON_VERSION)|" | sed -e "s|%CONDA_VERSION%|$(CONDA_VERSION)|" > Dockerfile - set -x && docker build -t $(IMAGE_NAME):$(IMAGE_VERSION) . + set -x && docker build -f Dockerfile.local --build-arg PYTHON_VERSION=${PYTHON_VERSION} --build-arg CONDA_VERSION=${CONDA_VERSION} --build-arg BASE_IMAGE=${BASE_IMAGE} -t $(IMAGE_NAME):$(IMAGE_VERSION) . + +.PHONY: build_redhat +build_redhat: get_local_repo + set -x && docker build -f Dockerfile.redhat --build-arg PYTHON_VERSION=${PYTHON_VERSION} --build-arg CONDA_VERSION=${CONDA_VERSION} --build-arg BASE_IMAGE=${CONDA_BASE_IMAGE} -t $(IMAGE_NAME)-ubi8:$(IMAGE_VERSION) . + + tag_base_python: - docker tag $(IMAGE_NAME):$(IMAGE_VERSION) $(BASE_IMAGE_NAME):$(IMAGE_VERSION) + docker tag $(IMAGE_NAME):$(IMAGE_VERSION) $(DEFAULT_IMAGE_NAME):$(IMAGE_VERSION) push_to_dockerhub_base_python: - docker push $(BASE_IMAGE_NAME):$(IMAGE_VERSION) + docker push $(DEFAULT_IMAGE_NAME):$(IMAGE_VERSION) push_to_dockerhub: docker push $(IMAGE_NAME):$(IMAGE_VERSION) +push_redhat_to_dockerhub: + docker push $(IMAGE_NAME)-ubi8:$(IMAGE_VERSION) + push_gpu_to_dockerhub: docker push ${GPU_IMAGE_NAME}:${IMAGE_VERSION} .PHONY: test test: - cat Dockerfile.tmpl | sed -e "s|%PYTHON_VERSION%|$(PYTHON_VERSION)|" | sed -e "s|%CONDA_VERSION%|$(CONDA_VERSION)|" > Dockerfile - docker build -t $(IMAGE_NAME)-candidate . + docker build --build-arg PYTHON_VERSION=${PYTHON_VERSION} --build-arg CONDA_VERSION=${CONDA_VERSION} --build-arg BASE_IMAGE=${BASE_IMAGE} -t $(IMAGE_NAME)-candidate . IMAGE_NAME=$(IMAGE_NAME)-candidate test/run .PHONY: test_local test_local: - cat Dockerfile.local.tmpl | sed -e "s|%PYTHON_VERSION%|$(PYTHON_VERSION)|" | sed -e "s|%CONDA_VERSION%|$(CONDA_VERSION)|" > Dockerfile - docker build -t $(IMAGE_NAME)-candidate . + docker build -f Dockerfile.local --build-arg PYTHON_VERSION=${PYTHON_VERSION} --build-arg CONDA_VERSION=${CONDA_VERSION} --build-arg BASE_IMAGE=${BASE_IMAGE} -t $(IMAGE_NAME)-candidate . IMAGE_NAME=$(IMAGE_NAME)-candidate test/run .PHONY: clean @@ -68,3 +75,10 @@ clean: after_build_image_seldon_core_check: docker run --rm -it docker.io/seldonio/seldon-core-s2i-python36:$(IMAGE_VERSION) python -c 'import seldon_core; print(seldon_core.version.__version__)' docker run --rm -it docker.io/seldonio/seldon-core-s2i-python37:$(IMAGE_VERSION) python -c 'import seldon_core; print(seldon_core.version.__version__)' + + + +build_conda_base: + docker build -f Dockerfile.conda --build-arg CONDA_VERSION=${CONDA_VERSION} -t ${CONDA_BASE_IMAGE}:${CONDA_VERSION} . + + diff --git a/wrappers/s2i/python/build_scripts/build_all_local.sh b/wrappers/s2i/python/build_scripts/build_all_local.sh index 7717e34b5d..87d5093ddd 100755 --- a/wrappers/s2i/python/build_scripts/build_all_local.sh +++ b/wrappers/s2i/python/build_scripts/build_all_local.sh @@ -1,3 +1,4 @@ ./build_local_python3.6.sh ./build_local_python3.7.sh ./build_python_gpu.sh +./build_redhat.sh diff --git a/wrappers/s2i/python/build_scripts/build_local_python3.7.sh b/wrappers/s2i/python/build_scripts/build_local_python3.7.sh index 17db86ceff..2e4558bc95 100755 --- a/wrappers/s2i/python/build_scripts/build_local_python3.7.sh +++ b/wrappers/s2i/python/build_scripts/build_local_python3.7.sh @@ -1,4 +1,2 @@ -# NB: Tensorflow does not work with python 3.7 at present -# see https://github.com/tensorflow/tensorflow/issues/20444 make -C ../ build_local PYTHON_VERSION=3.7 make -C ../ tag_base_python PYTHON_VERSION=3.7 diff --git a/wrappers/s2i/python/build_scripts/build_redhat.sh b/wrappers/s2i/python/build_scripts/build_redhat.sh new file mode 100755 index 0000000000..00d4601e8f --- /dev/null +++ b/wrappers/s2i/python/build_scripts/build_redhat.sh @@ -0,0 +1,2 @@ +make -C ../ build_conda_base +make -C ../ build_redhat diff --git a/wrappers/s2i/python/requirements.txt b/wrappers/s2i/python/requirements.txt index b5adf4a540..b7c24161be 100644 --- a/wrappers/s2i/python/requirements.txt +++ b/wrappers/s2i/python/requirements.txt @@ -1 +1 @@ -seldon-core==0.5.1 +seldon-core diff --git a/wrappers/s2i/python/s2i/bin/assemble b/wrappers/s2i/python/s2i/bin/assemble index 367bf84816..cb3a45359d 100755 --- a/wrappers/s2i/python/s2i/bin/assemble +++ b/wrappers/s2i/python/s2i/bin/assemble @@ -69,3 +69,10 @@ elif [[ -f environment.yml ]]; then fi conda env create --name $CONDA_ENV_NAME --file environment.yml fi + + +# Add licences +pip install pip-licenses +mkdir ./licenses && pip-licenses --from=mixed --format=csv --output-file=./licenses/license_info.csv && \ + pip-licenses --from=mixed --format=plain-vertical --with-license-file --no-license-path --output-file=./licenses/license.txt +mv ./licenses /licenses diff --git a/wrappers/s2i/python/test/model-template-app/MyModel.py b/wrappers/s2i/python/test/model-template-app/MyModel.py index 34e1e48d16..3bf88e2bf6 100644 --- a/wrappers/s2i/python/test/model-template-app/MyModel.py +++ b/wrappers/s2i/python/test/model-template-app/MyModel.py @@ -22,7 +22,7 @@ def predict(self,X,features_names): print("Predict called - will run idenity function") return X - def send_feedback(self,features,feature_names,reward,truth): + def send_feedback(self,features,feature_names,reward,truth,routing): """ Handle feedback diff --git a/wrappers/s2i/python/test/router-template-app/MyRouter.py b/wrappers/s2i/python/test/router-template-app/MyRouter.py index bd49e7881d..be9f894675 100644 --- a/wrappers/s2i/python/test/router-template-app/MyRouter.py +++ b/wrappers/s2i/python/test/router-template-app/MyRouter.py @@ -22,7 +22,7 @@ def route(self,features,feature_names): """ return 0 - def send_feedback(self,features,feature_names,routing,reward,truth): + def send_feedback(self,features,feature_names,reward,truth,routing): """ Handle feedback for your routings. Optional. diff --git a/wrappers/s2i/python/update_python_version.sh b/wrappers/s2i/python/update_python_version.sh deleted file mode 100755 index 3bec187031..0000000000 --- a/wrappers/s2i/python/update_python_version.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash - -set -o nounset -set -o errexit -set -o pipefail - -STARTUP_DIR="$( cd "$( dirname "$0" )" && pwd )" - -if [ "$#" -ne 2 ]; then - echo "You must enter " - exit 1 -fi - -OLD_VERSION=$1 -NEW_VERSION=$2 - -declare -a paths=('./examples/*.ipynb' './doc/*.md' './docs/*.md' './example/*Makefile' './integrations/*Makefile') -declare -a versions=('3' '36' '37') - -cd ../../.. - -for i in "${paths[@]}" -do - for PYTHON_VERSION in "${versions[@]}" - do - echo "Updating python version ${PYTHON_VERSION} in $i from ${OLD_VERSION} to ${NEW_VERSION}" - find . -type f -path "$i" -exec grep -l seldon-core-s2i-python${PYTHON_VERSION}:${OLD_VERSION}-SNAPSHOT \{\} \; | xargs -n1 -r sed -i "s/seldon-core-s2i-python${PYTHON_VERSION}:${OLD_VERSION}-SNAPSHOT/seldon-core-s2i-python${PYTHON_VERSION}:${OLD_VERSION}/g" - find . -type f -path "$i" -exec grep -l seldon-core-s2i-python${PYTHON_VERSION}:${OLD_VERSION} \{\} \; | xargs -n1 -r sed -i "s/seldon-core-s2i-python${PYTHON_VERSION}:${OLD_VERSION}/seldon-core-s2i-python${PYTHON_VERSION}:${NEW_VERSION}/g" - done -done - - -