diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 30c0c28..f3eb606 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -38,7 +38,7 @@ jobs: - run: | source ./tool.sh build_image core latest docker_core/Dockerfile \ - --build-arg "ARG_PROFILE_PYTHON=base,datascience,database,nlp,cv,chem,tf2,torch" \ + --build-arg "ARG_PROFILE_PYTHON=base,datascience,mkl,database,nlp,cv,chem,tf2,torch" \ --build-arg "ARG_PROFILE_R=base,datascience,rstudio,rshiny" \ --build-arg "ARG_PROFILE_NODEJS=base" \ --build-arg "ARG_PROFILE_JAVA=base,maven" \ @@ -51,7 +51,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - run: source ./tool.sh && build_image py-data latest docker_core/Dockerfile --build-arg "ARG_PROFILE_PYTHON=datascience,database" && push_image + - run: source ./tool.sh && build_image py-data latest docker_core/Dockerfile --build-arg "ARG_PROFILE_PYTHON=datascience,mkl,database" && push_image qpod_py-chem: name: qpod/py-chem @@ -59,7 +59,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - run: source ./tool.sh && build_image py-chem latest docker_core/Dockerfile --build-arg "ARG_PROFILE_PYTHON=datascience,chem" && push_image + - run: source ./tool.sh && build_image py-chem latest docker_core/Dockerfile --build-arg "ARG_PROFILE_PYTHON=datascience,mkl,chem" && push_image qpod_py-std: name: qpod/py-std @@ -67,7 +67,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - run: source ./tool.sh && build_image py-std latest docker_core/Dockerfile --build-arg "ARG_PROFILE_PYTHON=datascience,database,nlp,cv,chem" && push_image + - run: source ./tool.sh && build_image py-std latest docker_core/Dockerfile --build-arg "ARG_PROFILE_PYTHON=datascience,mkl,database,nlp,cv,chem" && push_image qpod_py-jdk: name: qpod/py-jdk @@ -75,7 +75,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - run: source ./tool.sh && build_image py-jdk latest docker_core/Dockerfile --build-arg "ARG_PROFILE_PYTHON=datascience,database,nlp,cv,chem" --build-arg "ARG_PROFILE_JAVA=base" && push_image + - run: source ./tool.sh && build_image py-jdk latest docker_core/Dockerfile --build-arg "ARG_PROFILE_PYTHON=datascience,mkl,database,nlp,cv,chem" --build-arg "ARG_PROFILE_JAVA=base" && push_image qpod_r-base: name: qpod/r-base @@ -263,7 +263,7 @@ jobs: - uses: actions/checkout@v2 - run: | source ./tool.sh - build_image paddle-cuda116 latest docker_core/Dockerfile --build-arg "BASE_IMG=cuda_11.6" --build-arg "ARG_PROFILE_PYTHON=paddle" + build_image paddle-cuda116 latest docker_core/Dockerfile --build-arg "BASE_IMG=cuda_11.6" --build-arg "ARG_PROFILE_PYTHON=paddle,mkl" alias_image paddle-cuda116 latest paddle latest push_image @@ -275,7 +275,7 @@ jobs: - uses: actions/checkout@v2 - run: | source ./tool.sh - build_image paddle-cuda102 latest docker_core/Dockerfile --build-arg "BASE_IMG=cuda_10.2" --build-arg "ARG_PROFILE_PYTHON=paddle" + build_image paddle-cuda102 latest docker_core/Dockerfile --build-arg "BASE_IMG=cuda_10.2" --build-arg "ARG_PROFILE_PYTHON=paddle,mkl" push_image @@ -287,7 +287,7 @@ jobs: - uses: actions/checkout@v2 - run: | source ./tool.sh - build_image py-nlp latest docker_core/Dockerfile --build-arg "BASE_IMG=cuda_11.6" --build-arg "ARG_PROFILE_PYTHON=datascience,torch,nlp" + build_image py-nlp latest docker_core/Dockerfile --build-arg "BASE_IMG=cuda_11.6" --build-arg "ARG_PROFILE_PYTHON=datascience,mkl,torch,nlp" push_image qpod_py-cv: @@ -298,7 +298,7 @@ jobs: - uses: actions/checkout@v2 - run: | source ./tool.sh - build_image py-cv latest docker_core/Dockerfile --build-arg "BASE_IMG=cuda_11.6" --build-arg "ARG_PROFILE_PYTHON=datascience,torch,cv" + build_image py-cv latest docker_core/Dockerfile --build-arg "BASE_IMG=cuda_11.6" --build-arg "ARG_PROFILE_PYTHON=datascience,mkl,torch,cv" push_image @@ -312,7 +312,7 @@ jobs: source ./tool.sh && free_diskspace build_image full-cuda-11.6 latest docker_core/Dockerfile \ --build-arg "BASE_IMG=cuda_11.6" \ - --build-arg "ARG_PROFILE_PYTHON=base,datascience,database,nlp,cv,chem,tf2,torch" \ + --build-arg "ARG_PROFILE_PYTHON=base,datascience,mkl,database,nlp,cv,chem,tf2,torch" \ --build-arg "ARG_PROFILE_R=base,datascience,rstudio,rshiny" \ --build-arg "ARG_PROFILE_NODEJS=base" \ --build-arg "ARG_PROFILE_JAVA=base,maven" \ diff --git a/docker_atom/work/script-setup.sh b/docker_atom/work/script-setup.sh index 77f1327..546c4d8 100644 --- a/docker_atom/work/script-setup.sh +++ b/docker_atom/work/script-setup.sh @@ -16,6 +16,15 @@ setup_mamba() { setup_conda_postprocess() { + # If python exists, set pypi source + if [ -f "$(which python)" ]; then + cat >/etc/pip.conf <> /opt/utils/install_list.conda ) || echo "Skip MLK install." \ +# If on a x86_64 architecture: 1) if mkl specified, install nomkl; 2) install conda packages as specified. +RUN $(grep -q "x86_64" <<< `arch`) && ( \ + ( $(grep -q "mkl" <<< "${ARG_PROFILE_PYTHON}") && \ + ( echo "mkl" >> /opt/utils/install_list_core.conda && echo "Install mkl.") || \ + ( echo "nomkl" >> /opt/utils/install_list_core.conda && echo "Install nomkl.") \ + ) \ + ) \ && source /opt/utils/script-utils.sh && install_conda /opt/utils/install_list_core.conda # If installing Python packages