Skip to content

Commit

Permalink
Merge pull request #57 from red-hat-data-services/release-2023a
Browse files Browse the repository at this point in the history
Sync commits from release-2023a branch to rhods-1.34
  • Loading branch information
harshad16 authored Oct 6, 2023
2 parents 3cc8907 + 21e8255 commit 856cb38
Show file tree
Hide file tree
Showing 47 changed files with 16,318 additions and 3,838 deletions.
73 changes: 65 additions & 8 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ define image
$(call push_image,$(1))
endef

####################################### Buildchain for Python 3.8 using ubi8 #######################################

# Build and push base-ubi8-python-3.8 image to the registry
.PHONY: base-ubi8-python-3.8
base-ubi8-python-3.8:
Expand Down Expand Up @@ -87,6 +89,21 @@ jupyter-pytorch-ubi8-python-3.8: cuda-jupyter-datascience-ubi8-python-3.8
jupyter-trustyai-ubi8-python-3.8: jupyter-datascience-ubi8-python-3.8
$(call image,$@,jupyter/trustyai/ubi8-python-3.8,$<)

# Build and push habana-jupyter-1.9.0-ubi8-python-3.8 image to the registry
.PHONY: habana-jupyter-1.9.0-ubi8-python-3.8
habana-jupyter-1.9.0-ubi8-python-3.8: jupyter-datascience-ubi8-python-3.8
$(call image,$@,habana/1.9.0/ubi8-python-3.8,$<)

# Build and push habana-jupyter-1.10.0-ubi8-python-3.8 image to the registry
.PHONY: habana-jupyter-1.10.0-ubi8-python-3.8
habana-jupyter-1.10.0-ubi8-python-3.8: jupyter-datascience-ubi8-python-3.8
$(call image,$@,habana/1.10.0/ubi8-python-3.8,$<)

# Build and push habana-jupyter-1.11.0-ubi8-python-3.8 image to the registry
.PHONY: habana-jupyter-1.11.0-ubi8-python-3.8
habana-jupyter-1.11.0-ubi8-python-3.8: jupyter-datascience-ubi8-python-3.8
$(call image,$@,habana/1.11.0/ubi8-python-3.8,$<)

# Build and push runtime-minimal-ubi8-python-3.8 image to the registry
.PHONY: runtime-minimal-ubi8-python-3.8
runtime-minimal-ubi8-python-3.8: base-ubi8-python-3.8
Expand Down Expand Up @@ -210,7 +227,7 @@ jupyter-datascience-anaconda-python-3.8: base-anaconda-python-3.8
$(call image,$@,jupyter/datascience/anaconda-python-3.8,$<)


####################################### Buildchain for Anaconda Python #######################################
####################################### Deployments #######################################

# Download kubectl binary
.PHONY: bin/kubectl
Expand Down Expand Up @@ -293,17 +310,57 @@ undeploy-c9s-%-c9s-python-3.9: bin/kubectl
$(info # Undeploying notebook from $(NOTEBOOK_DIR) directory...)
$(KUBECTL_BIN) delete -k $(NOTEBOOK_DIR)

# Check if the notebook is ready by pinging the /api endpoint
# Verify the notebook's readiness by pinging the /api endpoint and executing the corresponding test_notebook.ipynb file in accordance with the build chain logic.
.PHONY: test
test-%: bin/kubectl
$(eval NOTEBOOK_NAME := $(subst .,-,$(subst cuda-,,$*)))
$(info # Running tests for $(NOTEBOOK_NAME) notebook...)
$(KUBECTL_BIN) wait --for=condition=ready pod -l app=$(NOTEBOOK_NAME) --timeout=600s
$(KUBECTL_BIN) port-forward svc/$(NOTEBOOK_NAME)-notebook 8888:8888 &
curl --retry 5 --retry-delay 5 --retry-connrefused \
http://localhost:8888/notebook/opendatahub/jovyan/api; EXIT_CODE=$$?; echo && \
pkill --full "^$(KUBECTL_BIN).*port-forward.*"; \
exit $${EXIT_CODE}
$(KUBECTL_BIN) wait --for=condition=ready pod -l app=$(NOTEBOOK_NAME) --timeout=300s
$(KUBECTL_BIN) port-forward svc/$(NOTEBOOK_NAME)-notebook 8888:8888 & curl --retry 5 --retry-delay 5 --retry-connrefused http://localhost:8888/notebook/opendatahub/jovyan/api ; EXIT_CODE=$$?; echo && pkill --full "^$(KUBECTL_BIN).*port-forward.*"; \
$(eval FULL_NOTEBOOK_NAME = $(shell ($(KUBECTL_BIN) get pods -l app=$(NOTEBOOK_NAME) -o custom-columns=":metadata.name" | tr -d '\n')))
echo "=> Checking $(FULL_NOTEBOOK_NAME) notebook execution..." ; \
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "python3 -m pip install papermill" ; \
if echo "$(FULL_NOTEBOOK_NAME)" | grep -q "minimal-ubi9"; then \
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/minimal/ubi9-python-3.9/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb minimal_ubi9_output.ipynb --kernel python3 > /dev/null" ; \
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "datascience-ubi9"; then \
$(MAKE) validate-ubi9-datascience -e FULL_NOTEBOOK_NAME=$(FULL_NOTEBOOK_NAME); \
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "pytorch-ubi9"; then \
$(MAKE) validate-ubi9-datascience -e FULL_NOTEBOOK_NAME=$(FULL_NOTEBOOK_NAME); \
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/pytorch/ubi9-python-3.9/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb pytorch_ubi9_output.ipynb --kernel python3 > /dev/null" ; \
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "tensorflow-ubi9"; then \
$(MAKE) validate-ubi9-datascience -e FULL_NOTEBOOK_NAME=$(FULL_NOTEBOOK_NAME); \
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/tensorflow/ubi9-python-3.9/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb tensorflow_ubi9_output.ipynb --kernel python3 > /dev/null" ; \
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "trustyai-ubi9"; then \
$(MAKE) validate-ubi9-datascience -e FULL_NOTEBOOK_NAME=$(FULL_NOTEBOOK_NAME); \
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/trustyai/ubi9-python-3.9/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb trustyai_ubi9_output.ipynb --kernel python3 > /dev/null" ; \
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "minimal-ubi8"; then \
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/minimal/ubi8-python-3.8/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb minimal_ubi8_output.ipynb --kernel python3 > /dev/null" ; \
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "datascience-ubi8"; then \
$(MAKE) validate-ubi8-datascience -e FULL_NOTEBOOK_NAME=$(FULL_NOTEBOOK_NAME); \
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "pytorch-ubi8"; then \
$(MAKE) validate-ubi8-datascience -e FULL_NOTEBOOK_NAME=$(FULL_NOTEBOOK_NAME); \
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/pytorch/ubi8-python-3.8/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb pytorch_ubi8_output.ipynb --kernel python3 > /dev/null" ; \
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "tensorflow-ubi8"; then \
$(MAKE) validate-ubi8-datascience -e FULL_NOTEBOOK_NAME=$(FULL_NOTEBOOK_NAME); \
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/tensorflow/ubi8-python-3.8/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb tensorflow_ubi8_output.ipynb --kernel python3 > /dev/null" ; \
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "trustyai-ubi8"; then \
$(MAKE) validate-ubi8-datascience -e FULL_NOTEBOOK_NAME=$(FULL_NOTEBOOK_NAME); \
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/trustyai/ubi8-python-3.8/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb trustyai_ubi8_output.ipynb --kernel python3 > /dev/null" ; \
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "anaconda"; then \
echo "There is no test notebook implemented yet for Anaconda Notebook...." ; \
else \
echo "No matching condition found for $(FULL_NOTEBOOK_NAME)." ; \
fi

.PHONY: validate-ubi9-datascience
validate-ubi9-datascience:
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/minimal/ubi9-python-3.9/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb minimal_ubi9_output.ipynb --kernel python3 > /dev/null" ; \
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/datascience/ubi9-python-3.9/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb datascience_ubi9_output.ipynb --kernel python3 > /dev/null" ; \

.PHONY: validate-ubi8-datascience
validate-ubi8-datascience:
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/minimal/ubi8-python-3.8/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb minimal_ubi8_output.ipynb --kernel python3 > /dev/null" ; \
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget https://raw.githubusercontent.com/opendatahub-io/notebooks/main/jupyter/datascience/ubi8-python-3.8/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb datascience_ubi8_output.ipynb --kernel python3 > /dev/null" ; \

# Validate that runtime image meets minimum criteria
# This validation is created from subset of https://github.com/elyra-ai/elyra/blob/9c417d2adc9d9f972de5f98fd37f6945e0357ab9/Makefile#L325
Expand Down
27 changes: 27 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# Notebook Images

[![GitHub Tag](https://img.shields.io/github/v/tag/opendatahub-io/notebooks?style=plastic)](https://github.com/opendatahub-io/notebooks/releases)
[![Docker Repository on Quay](https://quay.io/repository/opendatahub/workbench-images/status "Docker Repository on Quay")](https://quay.io/repository/opendatahub/workbench-images?tab=tags)

These images were created to be used with Open Data Hub (ODH) with the ODH Notebook Controller as the launcher.

## Container Image Layering
Expand Down Expand Up @@ -40,6 +43,18 @@ graph TB
cuda-jupyter-datascience-ubi8-python-3.8 --> cuda-jupyter-tensorflow-ubi8-python-3.8;
end
subgraph Habana
%% Nodes
habana-jupyter-1.9.0-ubi8-python-3.8("HabanaAI Data Science Notebook<br/>(habana-jupyter-1.9.0-ubi8-python-3.8)");
habana-jupyter-1.10.0-ubi8-python-3.8("HabanaAI Data Science Notebook<br/>(habana-jupyter-1.10.0-ubi8-python-3.8)");
habana-jupyter-1.11.0-ubi8-python-3.8("HabanaAI Data Science Notebook<br/>(habana-jupyter-1.11.0-ubi8-python-3.8)");
%% Edges
jupyter-datascience-ubi8-python-3.8 --> habana-jupyter-1.9.0-ubi8-python-3.8;
jupyter-datascience-ubi8-python-3.8 --> habana-jupyter-1.10.0-ubi8-python-3.8;
jupyter-datascience-ubi8-python-3.8 --> habana-jupyter-1.11.0-ubi8-python-3.8;
end
subgraph Runtimes
%% Nodes
runtimes-minimal-ubi8-python-3.8("Minimal Runtime<br/>(runtime-minimal-ubi8-python-3.8)");
Expand Down Expand Up @@ -129,6 +144,9 @@ The following workbench images are available:
- cuda-jupyter-minimal-ubi8-python-3.8
- cuda-jupyter-datascience-ubi8-python-3.8
- cuda-jupyter-tensorflow-ubi8-python-3.8
- habana-jupyter-1.9.0-ubi8-python-3.8
- habana-jupyter-1.10.0-ubi8-python-3.8
- habana-jupyter-1.11.0-ubi8-python-3.8
- runtime-minimal-ubi8-python-3.8
- runtime-datascience-ubi8-python-3.8
- runtime-pytorch-ubi8-python-3.8
Expand Down Expand Up @@ -166,6 +184,15 @@ You can overwrite `IMAGE_REGISTRY` and `RELEASE` variables to use a different re
make ${WORKBENCH_NAME} -e IMAGE_REGISTRY=quay.io/${YOUR_USER}/workbench-images -e RELEASE=2023x
```

## Local Execution

The notebook can be run as container on the local systems.
Use podman/docker to execute the workbench images as container.

```shell
podman run -p 8888:8888 quay.io/opendatahub/workbench-images:jupyter-minimal-ubi8-python-3.8-20230808
```

## Testing Notebooks

Deploy the notebook images in your Kubernetes environment using deploy8-${NOTEBOOK_NAME} for ubi8 or deploy9-${NOTEBOOK_NAME} for ubi9:
Expand Down
163 changes: 163 additions & 0 deletions habana/1.10.0/ubi8-python-3.8/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
# Copyright (c) 2022 Habana Labs, Ltd.
#
# SPDX-License-Identifier: Apache-2.0
#
# HabanaLabs Dockerfile base installer layer for RedHat 8.6
# Reference: https://github.com/HabanaAI/Setup_and_Install/blob/1.10.0/dockerfiles/base/Dockerfile.rhel8.6
ARG BASE_IMAGE
FROM ${BASE_IMAGE}

ARG ARTIFACTORY_URL="vault.habana.ai"
ARG VERSION="1.10.0"
ARG REVISION="494"
ARG PT_VERSION="2.0.1"

USER root

RUN dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
dnf clean all && rm -rf /var/cache/yum

RUN echo "[appstream]" > /etc/yum.repos.d/CentOS-Linux-AppStream.repo && \
echo "name=CentOS Linux 8 - AppStream" >> /etc/yum.repos.d/CentOS-Linux-AppStream.repo && \
echo "mirrorlist=http://mirrorlist.centos.org/?release=\$releasever-stream&arch=\$basearch&repo=AppStream&infra=\$infra" >> /etc/yum.repos.d/CentOS-Linux-AppStream.repo && \
echo "gpgcheck=0" >> /etc/yum.repos.d/CentOS-Linux-AppStream.repo

RUN echo "[BaseOS]" > /etc/yum.repos.d/CentOS-Linux-BaseOS.repo && \
echo "name=CentOS Linux 8 - BaseOS" >> /etc/yum.repos.d/CentOS-Linux-BaseOS.repo && \
echo "mirrorlist=http://mirrorlist.centos.org/?release=\$releasever-stream&arch=\$basearch&repo=BaseOS&infra=\$infra" >> /etc/yum.repos.d/CentOS-Linux-BaseOS.repo && \
echo "gpgcheck=0" >> /etc/yum.repos.d/CentOS-Linux-BaseOS.repo

RUN dnf install -y \
clang \
cmake3 \
cpp \
gcc \
gcc-c++ \
glibc \
glibc-headers \
glibc-devel \
jemalloc \
libarchive \
libksba \
unzip \
llvm \
lsof \
python38-devel \
openssh-clients \
libjpeg-devel \
openssh-server \
redhat-lsb-core \
wget \
git \
mesa-libGL \
python3-dnf-plugin-versionlock && \
# update pkgs (except OS version) for resolving potentials CVEs
dnf versionlock add redhat-release* && \
dnf update -y && \
dnf clean all && rm -rf /var/cache/yum

ENV LD_LIBRARY_PATH=/usr/lib/habanalabs:$LD_LIBRARY_PATH
ENV RDMAV_FORK_SAFE=1

RUN echo "[habanalabs]" > /etc/yum.repos.d/habanalabs.repo && \
echo "name=Habana RH8 Linux repo" >> /etc/yum.repos.d/habanalabs.repo && \
echo "baseurl=https://${ARTIFACTORY_URL}/artifactory/rhel/8/8.6" >> /etc/yum.repos.d/habanalabs.repo && \
echo "gpgkey=https://${ARTIFACTORY_URL}/artifactory/rhel/8/8.6/repodata/repomd.xml.key" >> /etc/yum.repos.d/habanalabs.repo

RUN echo "[powertools]" > /etc/yum.repos.d/powertools.repo && \
echo "name=powertools" >> /etc/yum.repos.d/powertools.repo && \
echo "baseurl=http://mirror.centos.org/centos/8-stream/PowerTools/x86_64/os/" >> /etc/yum.repos.d/powertools.repo && \
echo "gpgcheck=0" >> /etc/yum.repos.d/powertools.repo

RUN dnf install -y habanalabs-rdma-core-"$VERSION"-"$REVISION".el8 \
habanalabs-thunk-"$VERSION"-"$REVISION".el8 \
habanalabs-firmware-tools-"$VERSION"-"$REVISION".el8 \
habanalabs-graph-"$VERSION"-"$REVISION".el8 && \
rm -f /etc/yum.repos.d/habanalabs.repo && rm -rf /tmp/* && \
dnf clean all && rm -rf /var/cache/yum

# There is no need to store pip installation files inside docker image
ENV PIP_NO_CACHE_DIR=on
ENV PIP_DISABLE_PIP_VERSION_CHECK=1

# Install python packages
# RUN python3.8 -m pip install hpu_media_loader=="${VERSION}"."${REVISION}"
# Install Python packages and Jupyterlab extensions from Pipfile.lock
COPY Pipfile.lock ./

RUN echo "Installing softwares and packages" && micropipenv install && rm -f ./Pipfile.lock

RUN echo "export LANG=en_US.UTF-8" >> /root/.bashrc
RUN export LANG=en_US.UTF-8
ENV GC_KERNEL_PATH=/usr/lib/habanalabs/libtpc_kernels.so
ENV HABANA_LOGS=/var/log/habana_logs/
ENV HABANA_SCAL_BIN_PATH=/opt/habanalabs/engines_fw
ENV HABANA_PLUGINS_LIB_PATH=/opt/habanalabs/habana_plugins

## Install habana tensorflow
## Reference: https://github.com/HabanaAI/Setup_and_Install/blob/1.10.0/dockerfiles/tensorflow/Dockerfile.rhel8.6

# For AML/CentOS/RHEL OS'es TFIO_DATAPATH have to be specified to import tensorflow_io lib correctly
ENV TFIO_DATAPATH=/opt/app-root/src/python3.8/site-packages/

# For AML/CentOS/RHEL ca-cert file is expected exactly under /etc/ssl/certs/ca-certificates.crt
# otherwise curl will fail during access to S3 AWS storage
RUN ln -s /etc/ssl/certs/ca-bundle.crt /etc/ssl/certs/ca-certificates.crt

## Install habana pytorch
## Reference: https://github.com/HabanaAI/Setup_and_Install/blob/1.10.0/dockerfiles/pytorch/Dockerfile.rhel8.6
ENV LANG=en_US.UTF-8
ENV PYTHONPATH=/root:/usr/lib/habanalabs/

RUN dnf install -y \
curl \
cairo-devel \
numactl-devel \
iproute \
which \
zlib-devel \
lapack-devel \
openblas-devel \
numactl \
gperftools-devel && \
dnf clean all && rm -rf /var/cache/yum

RUN wget --no-verbose https://"${ARTIFACTORY_URL}"/artifactory/gaudi-pt-modules/"${VERSION}"/"${REVISION}"\
/pytorch/rhel86/pytorch_modules-v"${PT_VERSION}"_"${VERSION}"_"${REVISION}".tgz && \
mkdir /root/habanalabs /root/habanalabs/pytorch_temp && \
tar -xf pytorch_modules-v"${PT_VERSION}"_"${VERSION}"_"${REVISION}".tgz -C /root/habanalabs/pytorch_temp/. && \
pip3 install /root/habanalabs/pytorch_temp/*.whl && \
pip3 install $(grep "lightning" /root/habanalabs/pytorch_temp/requirements-pytorch.txt) && \
pip3 install tensorboard~=2.12.2 protobuf==3.20.3 && \
pip3 install -U habana-lightning-plugins=="${VERSION}"."${REVISION}" && \
pip3 uninstall -y pillow pillow-simd && \
pip3 install pillow-simd==7.0.0.post3 && \
rm -rf /root/habanalabs/pytorch_temp/ && \
rm -rf pytorch_modules-v"${PT_VERSION}"_"${VERSION}"_"${REVISION}".tgz && \
echo "source /etc/profile.d/habanalabs.sh" >> ~/.bashrc

ENV LD_PRELOAD=/lib64/libtcmalloc.so.4
ENV TCMALLOC_LARGE_ALLOC_REPORT_THRESHOLD=7516192768

RUN dnf clean all && rm -rf /var/cache/dnf && rm -rf /tmp/*

## Label the image with details required by ODH
LABEL name="odh-notebook-habana-jupyter-1.10.0-ubi8-python-3.8" \
summary="Jupyter HabanaAI 1.10.0 notebook image for ODH notebooks" \
description="Jupyter HabanaAI 1.10.0 notebook image with base Python 3.8 builder image based on ubi8 for ODH notebooks" \
io.k8s.display-name="Jupyter HabanaAI 1.10.0 notebook image for ODH notebooks" \
io.k8s.description="Jupyter HabanaAI 1.10.0 notebook image with base Python 3.8 builder image based on ubi8 for ODH notebooks" \
authoritative-source-url="https://github.com/opendatahub-io/notebooks" \
io.openshift.build.commit.ref="main" \
io.openshift.build.source-location="https://github.com/opendatahub-io/notebooks/tree/main/habana/1.10.0/ubi8-python-3.8" \
io.openshift.build.image="quay.io/opendatahub/workbench-images:habana-jupyter-1.10.0-ubi8-python-3.8"

# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x
RUN sed -i -e "s/Python.*/$(python --version| cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \
# Fix permissions to support pip in Openshift environments \
chmod -R g+w /opt/app-root/lib/python3.8/site-packages && \
fix-permissions /opt/app-root -P

USER 1001

WORKDIR /opt/app-root/src
Loading

0 comments on commit 856cb38

Please sign in to comment.