diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 5c19dfa8d01a0c..cdcd9e761ec331 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -80,3 +80,11 @@ jobs: with: docker_distribute_image: ${{ needs.build-docker.outputs.docker_distribute_image }} clone-can-skip: ${{ needs.clone.outputs.can-skip }} + + formers: + name: Formers-test + uses: ./.github/workflows/_Formers.yml + needs: [clone, build-docker] + with: + docker_formers_image: ${{ needs.build-docker.outputs.docker_formers_image }} + clone-can-skip: ${{ needs.clone.outputs.can-skip }} diff --git a/.github/workflows/_Formers.yml b/.github/workflows/_Formers.yml new file mode 100644 index 00000000000000..fa848a24ef8b13 --- /dev/null +++ b/.github/workflows/_Formers.yml @@ -0,0 +1,385 @@ +name: formers-test + +on: + workflow_call: + inputs: + docker_formers_image: + type: string + required: true + clone-can-skip: + type: string + required: false + default: "false" + +env: + PR_ID: ${{ github.event.pull_request.number }} + COMMIT_ID: ${{ github.event.pull_request.head.sha }} + work_dir: /paddle + PADDLE_ROOT: /paddle + TASK: paddle-CI-${{ github.event.pull_request.number }}-formers + ci_scripts: /paddle/ci + BRANCH: ${{ github.event.pull_request.base.ref }} + CI_name: distribute + no_proxy: bcebos.com,apiin.im.baidu.com,gitee.com,aliyun.com,.baidu.com,.tuna.tsinghua.edu.cn,paddlepaddle.org.cn + docker_image: ${{ inputs.docker_formers_image }} + PYTEST_EXECUTE_FLAG_FILE: ${{ github.workspace }}/../../../PYTEST_EXECUTE_FLAG_FILE/${{ github.event.pull_request.number || '0' }}/${{ github.event.pull_request.head.sha || github.sha }}/pytest_execute.flag + +defaults: + run: + shell: bash + +jobs: + build: + name: Build + if: ${{ inputs.clone-can-skip != 'true' }} + outputs: + can-skip: ${{ steps.check-bypass.outputs.can-skip }} + runs-on: + group: GZ_BD-CPU + + steps: + - name: Check docker image and run container + env: + FLAGS_fraction_of_gpu_memory_to_use: 0.15 + CTEST_OUTPUT_ON_FAILURE: 1 + CTEST_PARALLEL_LEVEL: 4 + WITH_GPU: "ON" + WITH_AVX: "ON" + WITH_MKL: "OFF" + WITH_PYTHON: "ON" + WITH_DISTRIBUTE: "ON" + WITH_TESTING: "ON" + WITH_INFERENCE_API_TEST: "OFF" + WITH_FA_BUILD_WITH_CACHE: "ON" + COVERALLS_UPLOAD: "ON" + PADDLE_VERSION: 0.0.0 + CUDA_VISIBLE_DEVICES: 0,1 + GIT_PR_ID: ${{ github.event.pull_request.number }} + GPUBOX_DEMO_INSTALL_DIR: /root/.cache/build + INFERENCE_DEMO_INSTALL_DIR: /root/.cache/python35 + PY_VERSION: "3.10" + WITH_TENSORRT: "OFF" + GENERATOR: "Ninja" + WITH_SHARED_PHI: "ON" + CUDA_ARCH_NAME: Manual + CUDA_ARCH_BIN: "80 90" + WITH_CUDNN_FRONTEND: "ON" + FLAGS_enable_cudnn_frontend: 1 + CACHE_DIR: /root/.cache/build + CCACHE_DIR: /root/.ccache/gpubox + CCACHE_MAXSIZE: 150G + CCACHE_LIMIT_MULTIPLE: 0.8 + CCACHE_STATSLOG: /paddle/build/.stats.log + CCACHE_SLOPPINESS: clang_index_store,time_macros,include_file_mtime + run: | + container_name=${TASK}-build-$(date +%Y%m%d-%H%M%S) + echo "container_name=${container_name}" >> ${{ github.env }} + docker run -d -t --name ${container_name} \ + -v "/home/data/cfs:/home/data/cfs" \ + -v "/home/data/cfs/.cache/:/root/.cache" \ + -v "/home/data/cfs/.ccache:/root/.ccache" \ + -v "/dev/shm:/dev/shm" \ + -v ${{ github.workspace }}/../../..:${{ github.workspace }}/../../.. \ + -v ${{ github.workspace }}:/paddle \ + -e BRANCH \ + -e PR_ID \ + -e COMMIT_ID \ + -e work_dir \ + -e PADDLE_ROOT \ + -e ci_scripts \ + -e CI_name \ + -e WITH_SHARED_PHI \ + -e WITH_MKL \ + -e WITH_TESTING \ + -e COVERALLS_UPLOAD \ + -e GIT_PR_ID \ + -e PADDLE_VERSION \ + -e WITH_DISTRIBUTE \ + -e PY_VERSION \ + -e WITH_TENSORRT \ + -e WITH_FA_BUILD_WITH_CACHE \ + -e GENERATOR \ + -e CCACHE_MAXSIZE \ + -e CCACHE_LIMIT_MULTIPLE \ + -e WITH_AVX \ + -e WITH_PYTHON \ + -e CACHE_DIR \ + -e CCACHE_DIR \ + -e CCACHE_STATSLOG \ + -e CCACHE_SLOPPINESS \ + -e FLAGS_fraction_of_gpu_memory_to_use \ + -e CTEST_OUTPUT_ON_FAILURE \ + -e CTEST_PARALLEL_LEVEL \ + -e WITH_GPU \ + -e WITH_INFERENCE_API_TEST \ + -e CUDA_VISIBLE_DEVICES \ + -e GPUBOX_DEMO_INSTALL_DIR \ + -e INFERENCE_DEMO_INSTALL_DIR \ + -e CUDA_ARCH_NAME \ + -e CUDA_ARCH_BIN \ + -e WITH_CUDNN_FRONTEND \ + -e FLAGS_enable_cudnn_frontend \ + -e no_proxy \ + -w /paddle --network host ${docker_image} + + - name: Download paddle.tar.gz and merge target branch + env: + work_dir: ${{ github.workspace }} + run: | + docker exec -t ${{ env.container_name }} /bin/bash -c ' + mkdir -p /root/.cache/build + mkdir -p /root/.ccache/gpubox + rm -rf * .[^.]* + set -e + echo "Downloading Paddle.tar.gz" + wget -q --tries=5 --no-proxy https://paddle-github-action.bj.bcebos.com/PR/Paddle/${PR_ID}/${COMMIT_ID}/Paddle.tar.gz --no-check-certificate + echo "Extracting Paddle.tar.gz" + tar -xf Paddle.tar.gz --strip-components=1 + rm Paddle.tar.gz + git remote -v + set +e + git remote add upstream https://github.com/PaddlePaddle/Paddle.git + set -e + source ${{ github.workspace }}/../../../proxy + git checkout test + echo "Pull upstream develop" + bash ci/git_pull.sh $BRANCH + ' + + - name: Check bypass + id: check-bypass + uses: ./.github/actions/check-bypass + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + workflow-name: distribute + + - name: Download flashattn cache + if: steps.check-bypass.outputs.can-skip != 'true' + run: | + docker exec -t ${{ env.container_name }} /bin/bash -c ' + set +e + flashattn_version=$(git submodule status | grep flashattn | awk "{print \$1}" | sed "s#-##g") + echo "flashattn_version=${flashattn_version}" >> ${{ github.env }} + wget -q --tries=5 --no-proxy https://paddle-github-action.bj.bcebos.com/PR/formers/flashattn_cache/flashattn_libs_${flashattn_version}.tar.gz --no-check-certificate; FACODE=$? + if [ $FACODE -ne 0 ]; then + echo "flashattn_cached_package=true" >> ${{ github.env }} + fi + ' + + - name: Build + if: steps.check-bypass.outputs.can-skip != 'true' + run: | + docker exec -t ${{ env.container_name }} /bin/bash -c ' + source ${{ github.workspace }}/../../../proxy + export CFLAGS="-mno-amx-tile -mno-amx-int8 -mno-amx-bf16" + export CXXFLAGS="-mno-amx-tile -mno-amx-int8 -mno-amx-bf16" + bash ${ci_scripts}/cmake-predownload.sh + bash ${ci_scripts}/run_setup.sh bdist_wheel + ' + + - name: Packaging of products + if: steps.check-bypass.outputs.can-skip != 'true' + run: | + docker exec -t ${{ env.container_name }} /bin/bash -c ' + if [ "${{ env.flashattn_cached_package }}" == "true" ]; then + cd ${work_dir}/build/third_party/install/flashattn/lib + mkdir flashattn_libs_${{ env.flashattn_version }} && cd flashattn_libs_${{ env.flashattn_version }} + mkdir fa_libs && cp ../lib*.so fa_libs && tar -zcf fa_libs.tar ./fa_libs && rm -rf ./fa_libs + md5sum fa_libs.tar |awk "{print \$1}" >MD5.txt + cd .. && tar -zcf flashattn_libs_${{ env.flashattn_version }}.tar ./flashattn_libs_${{ env.flashattn_version }} + fi + bash ${ci_scripts}/compress_build.sh + cd ${work_dir}/.. + tar --use-compress-program="pzstd -1" --warning=no-file-changed -cf Paddle.tar.gz paddle + ' + + - name: Upload product to bos + if: steps.check-bypass.outputs.can-skip != 'true' + env: + home_path: ${{ github.workspace }}/.. + bos_file: ${{ github.workspace }}/../bos_retry/BosClient.py + paddle_whl: paddlepaddle_gpu-0.0.0-cp310-cp310-linux_x86_64.whl + run: | + docker exec -t ${{ env.container_name }} /bin/bash -c ' + set -x + export AK=paddle + export SK=paddle + source ${{ github.workspace }}/../../../proxy + echo "::group::Install bce-python-sdk" + python -m pip install bce-python-sdk==0.8.74 + echo "::endgroup::" + if [ ! -f "${{ env.bos_file }}" ]; then + wget -q --no-proxy -O ${{ env.home_path }}/bos_retry.tar.gz https://xly-devops.bj.bcebos.com/home/bos_retry.tar.gz --no-check-certificate + mkdir ${{ env.home_path }}/bos_retry + tar xf ${{ env.home_path }}/bos_retry.tar.gz -C ${{ env.home_path }}/bos_retry + fi + cd .. + source ${{ github.workspace }}/../../../unproxy + echo "Uploading Paddle.tar.gz to bos" + python ${{ env.bos_file }} Paddle.tar.gz paddle-github-action/PR/formers/${{ env.PR_ID }}/${{ env.COMMIT_ID }} + echo "Uploading whl to bos" + mv ${work_dir}/dist/${{ env.paddle_whl }} . + python ${{ env.bos_file }} ${{ env.paddle_whl }} paddle-github-action/PR/formers/${{ env.PR_ID }}/${{ env.COMMIT_ID }} + if [ "${{ env.flashattn_cached_package }}" == "true" ]; then + echo "Uploading flashattn_libs_${flashattn_version}.tar.gz to bos" + mv ${work_dir}/build/third_party/install/flashattn/lib/flashattn_libs_${{ env.flashattn_version }}.tar . + python ${{ env.bos_file }} flashattn_libs_${{ env.flashattn_version }}.tar paddle-github-action/PR/formers/flashattn_cache + fi + rm -rf Paddle.tar.gz ${{ env.paddle_whl }} flashattn_libs_${flashattn_version}.tar + ' + + - name: Terminate and delete the container + if: ${{ steps.check-bypass.outputs.can-skip != 'true' && always() }} + run: | + set +e + sleep 3d + docker exec -t ${{ env.container_name }} /bin/bash -c 'rm -rf * .[^.]*' + docker rm -f ${{ env.container_name }} + + test: + name: Test + needs: build + if: ${{ needs.build.outputs.can-skip != 'true' }} + runs-on: + group: Distribute + steps: + - name: Check docker image and run container + env: + FLAGS_fraction_of_gpu_memory_to_use: 0.15 + CTEST_OUTPUT_ON_FAILURE: 1 + CTEST_PARALLEL_LEVEL: 4 + WITH_GPU: "ON" + WITH_AVX: "ON" + WITH_DISTRIBUTE: "ON" + WITH_TESTING: "ON" + WITH_COVERAGE: "OFF" + CMAKE_BUILD_TYPE: Release + PADDLE_FRACTION_GPU_MEMORY_TO_USE: 0.15 + PRECISION_TEST: "OFF" + WITH_UNITY_BUILD: "ON" + AGILE_COMPILE_BRANCH: ${{ github.event.pull_request.base.ref }} + AGILE_REVISION: ${{ github.event.pull_request.head.sha }} + WITH_INCREMENTAL_COVERAGE: "OFF" + WITH_ONNXRUNTIME: "OFF" + COVERALLS_UPLOAD: "ON" + PADDLE_VERSION: 0.0.0 + GIT_PR_ID: ${{ github.event.pull_request.number }} + PY_VERSION: "3.10" + CUDA_ARCH_NAME: Auto + WITH_CUDNN_FRONTEND: "ON" + FLAGS_enable_cudnn_frontend: 1 + CACHE_DIR: /root/.cache/build + CCACHE_DIR: /root/.ccache/formers + CFS_DIR: /home/data/cfs + paddle_whl: /paddle/dist/paddlepaddle_gpu-0.0.0-cp310-cp310-linux_x86_64.whl + run: | + export CUDA_SO="$(\ls -d /usr/lib64/libcuda* | xargs -I{} echo '-v {}:{}') $(\ls -d /usr/lib64/libnvidia* | xargs -I{} echo '-v {}:{}')" + export DEVICES="$(\ls -d /dev/nvidia* | xargs -I{} echo "-v {}:{}") $(\ls /dev/nvidia-caps/* | xargs -I{} echo "-v {}:{}")" + export SMI="-v /usr/bin/nvidia-smi:/usr/bin/nvidia-smi" + container_name=${TASK}-test-$(date +%Y%m%d-%H%M%S) + echo "container_name=${container_name}" >> ${{ github.env }} + docker run -d -t --name ${container_name} ${CUDA_SO} ${DEVICES} ${SMI} --runtime=nvidia --shm-size=32G \ + -v "/home/data/cfs:/home/data/cfs" \ + -v "/home/data/cfs/.cache/:/root/.cache" \ + -v "/home/data/cfs/.ccache:/root/.ccache" \ + -v "/ssd1/root:/root" \ + -v "/dev/shm:/dev/shm" \ + -v ${{ github.workspace }}/../../..:${{ github.workspace }}/../../.. \ + -v ${{ github.workspace }}:/paddle \ + -e BRANCH \ + -e PR_ID \ + -e COMMIT_ID \ + -e work_dir \ + -e PADDLE_ROOT \ + -e ci_scripts \ + -e CI_name \ + -e FLAGS_fraction_of_gpu_memory_to_use \ + -e CTEST_OUTPUT_ON_FAILURE \ + -e CTEST_PARALLEL_LEVEL \ + -e WITH_GPU \ + -e WITH_AVX \ + -e WITH_DISTRIBUTE \ + -e WITH_TESTING \ + -e WITH_COVERAGE \ + -e CMAKE_BUILD_TYPE \ + -e PADDLE_FRACTION_GPU_MEMORY_TO_USE \ + -e PRECISION_TEST \ + -e WITH_UNITY_BUILD \ + -e AGILE_COMPILE_BRANCH \ + -e AGILE_REVISION \ + -e WITH_INCREMENTAL_COVERAGE \ + -e WITH_ONNXRUNTIME \ + -e COVERALLS_UPLOAD \ + -e PADDLE_VERSION \ + -e GIT_PR_ID \ + -e PY_VERSION \ + -e CUDA_ARCH_NAME \ + -e WITH_CUDNN_FRONTEND \ + -e FLAGS_enable_cudnn_frontend \ + -e CACHE_DIR \ + -e CCACHE_DIR \ + -e CFS_DIR \ + -e paddle_whl \ + -e no_proxy \ + -w /paddle --network host ${docker_image} + + - name: Download paddle.tar.gz and merge target branch + run: | + docker exec -t ${{ env.container_name }} /bin/bash -c ' + rm -rf * .[^.]* + echo "Downloading Paddle.tar.gz" + wget -q --tries=5 --no-proxy https://paddle-github-action.bj.bcebos.com/PR/formers/${{ env.PR_ID }}/${{ env.COMMIT_ID }}/Paddle.tar.gz --no-check-certificate + echo "Extracting Paddle.tar.gz" + tar --use-compress-program="pzstd" -xf Paddle.tar.gz --strip-components=1 + rm Paddle.tar.gz + git checkout test + ' + + - name: Test + run: | + docker exec -t ${{ env.container_name }} /bin/bash -c ' + source ${{ github.workspace }}/../../../proxy + bash ${ci_scripts}/formers_test.sh + ' + + - name: Upload and display logs + if: always() + env: + home_path: ${{ github.workspace }}/.. + bos_file: ${{ github.workspace }}/../bos_retry/BosClient.py + run: | + docker exec -t ${{ env.container_name }} /bin/bash -c ' + export AK=paddle + export SK=paddle + if [ ! -f "${{ env.bos_file }}" ]; then + wget -q --no-proxy -O ${{ env.home_path }}/bos_retry.tar.gz https://xly-devops.bj.bcebos.com/home/bos_retry.tar.gz --no-check-certificate + mkdir ${{ env.home_path }}/bos_retry + tar xf ${{ env.home_path }}/bos_retry.tar.gz -C ${{ env.home_path }}/bos_retry + fi + if [ -n "$PR_ID" ] && [ "$PR_ID" != "0" ]; then + bos_prefix="${PR_ID}/${COMMIT_ID}" + else + bos_prefix="schedule/$(date +%Y%m%d)" + fi + # api test logs + cd ${work_dir}/PaddleFormers/unittest_logs + for FILE in ${work_dir}/PaddleFormers/unittest_logs/*; do + file=$(basename "$FILE") + python ${{ env.bos_file }} $file paddle-github-action/PR/PaddleFormers/unittest-gpu/${bos_prefix}/logs + echo "$file: https://paddle-github-action.bj.bcebos.com/PR/PaddleFormers/unittest-gpu/${bos_prefix}/logs/$file" + done + # models test logs + cd ${work_dir}/PaddleFormers/model_unittest_logs + for FILE in ${work_dir}/PaddleFormers/model_unittest_logs/*; do + file=$(basename "$FILE") + python ${{ env.bos_file }} $file paddle-github-action/PR/PaddleFormers/model-unittest-gpu/${bos_prefix}/logs + echo "$file: https://paddle-github-action.bj.bcebos.com/PR/PaddleFormers/model-unittest-gpu/${bos_prefix}/logs/$file" + done + ' + + - name: Terminate and delete the container + if: always() + run: | + set +e + docker exec -t ${{ env.container_name }} /bin/bash -c 'rm -rf * .[^.]*' + docker rm -f ${{ env.container_name }} diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 7f5f9ba8ec6691..5041a124e485f4 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -46,6 +46,10 @@ on: description: "Generate images for all CI usage" value: ${{ jobs.build-docker-images.outputs.docker_doc_image }} + docker_formers_image: + description: "Generate images for all CI usage" + value: ${{ jobs.build-docker-images.outputs.docker_formers_image }} + jobs: build-docker-images: if: ${{ github.repository_owner == 'PaddlePaddle' }} @@ -61,6 +65,7 @@ jobs: docker_npu_image: ${{ steps.build-docker-images.outputs.docker_npu_image }} docker_dcu_image: ${{ steps.build-docker-images.outputs.docker_dcu_image }} docker_doc_image: ${{ steps.build-docker-images.outputs.docker_doc_image }} + docker_formers_image: ${{ steps.build-docker-images.outputs.docker_formers_image }} steps: - id: build-docker-images name: Build docker images @@ -71,6 +76,7 @@ jobs: docker_coverage_file: Dockerfile.cuda117_cudnn8_gcc82_ubuntu18_coverage docker_build_file: Dockerfile.cuda11.2_cudnn8_gcc82_trt8 docker_distribute_file: Dockerfile.cuda123_cudnn9_gcc122_ubuntu20 + docker_formers_file: Dockerfile.cuda126_cudnn9_gcc122_ubuntu24 docker_xpu_file: Dockerfile.develop.xre docker_npu_file: Dockerfile.develop.npu docker_dcu_file: Dockerfile.develop.dtk @@ -94,7 +100,7 @@ jobs: # docker build images if [ "${{ inputs.task }}" == "cpu" ]; then - declare -A docker_files=(["docker_cpu"]="$docker_cpu_file" ["docker_distribute"]="$docker_distribute_file" ["docker_xpu"]="$docker_xpu_file" ["docker_npu"]="$docker_npu_file" ["docker_dcu"]="$docker_dcu_file") + declare -A docker_files=(["docker_cpu"]="$docker_cpu_file" ["docker_formers"]="$docker_formers_file" ["docker_distribute"]="$docker_distribute_file" ["docker_xpu"]="$docker_xpu_file" ["docker_npu"]="$docker_npu_file" ["docker_dcu"]="$docker_dcu_file") elif [ "${{ inputs.task }}" == "build" ]; then declare -A docker_files=(["docker_build"]="$docker_build_file" ["docker_doc"]="$docker_doc_file") else diff --git a/ci/formers_test.sh b/ci/formers_test.sh new file mode 100644 index 00000000000000..39777546da5aae --- /dev/null +++ b/ci/formers_test.sh @@ -0,0 +1,69 @@ +# Copyright (c) 2025 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +function formers_api() { + cd /workspace/PaddleFormers && git config --global --add safe.directory $PWD + source $work_dir/../../../proxy + source $work_dir/../../../AISTUDIO_ACCESS_TOKEN + echo "Check whether the local model file exists:" + ls -l ./models + timeout 30m bash scripts/unit_test/ci_unittest.sh ${paddle_whl} false ${PYTEST_EXECUTE_FLAG_FILE} ${BRANCH} +} + +function formers_models() { + rm -rf /root/.cache/aistudio/ + cd /workspace/PaddleFormers && git config --global --add safe.directory $PWD + echo "Check whether the local model file exists:" + ls -l ./models + timeout 30m bash scripts/regression/ci_model_unittest.sh ${paddle_whl} ${BRANCH} +} + +function formers_test() { + python ${PADDLE_ROOT}/tools/get_pr_title.py skip_distribute_test && CINN_OR_BUAA_PR=1 + if [[ "${CINN_OR_BUAA_PR}" = "1" ]];then + echo "PR's title with 'CINN' or 'BUAA', skip the run distribute ci test !" + exit 0 + fi + + echo "::group::Start formers api tests" + formers_api + echo "End api tests" + echo "::endgroup::" + + echo "::group::Start formers models tests" + formers_models + echo "End models tests" + echo "::endgroup::" +} + +export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/lib64/:/usr/local/lib/ +PATH=/usr/local/bin:${PATH} +ln -sf $(which python3.10) /usr/local/bin/python +ln -sf $(which pip3.10) /usr/local/bin/pip + +echo "Downloading PaddleFormers.tar.gz..." +wget -q https://paddle-qa.bj.bcebos.com/CodeSync/develop/PaddleFormers.tar +tar xf PaddleFormers.tar +echo "Extracting PaddleFormers.tar.gz..." +cd PaddleFormers +cp -r ${CFS_DIR}/models ./models + +echo "::group::Install paddle dependencies" +pip config set global.cache-dir "/root/.cache/pip" +pip config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple +echo "::endgroup::" +ldconfig + +formers_test diff --git a/tools/dockerfile/Dockerfile.ubuntu24 b/tools/dockerfile/Dockerfile.ubuntu24 index 8f45ea47270b69..4f5f18b29244fb 100644 --- a/tools/dockerfile/Dockerfile.ubuntu24 +++ b/tools/dockerfile/Dockerfile.ubuntu24 @@ -55,7 +55,8 @@ RUN apt-get update && \ python3.10 python3.10-dev python3.10-distutils \ python3.11 python3.11-dev python3.11-distutils \ python3.12 python3.12-dev \ - python3.13 python3.13-dev python3.13-nogil && \ + python3.13 python3.13-dev python3.13-nogil \ + python3.14 python3.14-dev python3.14-nogil && \ apt-get install python-is-python3 RUN rm /usr/bin/python && ln -s /usr/bin/python3.9 /usr/bin/python && \ rm /usr/bin/python3 && ln -s /usr/bin/python3.9 /usr/bin/python3 @@ -73,6 +74,10 @@ RUN python3.13t get-pip.py && \ mv /usr/local/bin/pip3.13 /usr/local/bin/pip3.13t && \ python3.13 get-pip.py +RUN python3.14t get-pip.py && \ + mv /usr/local/bin/pip3.14 /usr/local/bin/pip3.14t && \ + python3.14 get-pip.py + RUN python -m pip config set global.break-system-packages true RUN python3.9 -m pip install setuptools==50.3.2 && \ @@ -80,7 +85,9 @@ RUN python3.9 -m pip install setuptools==50.3.2 && \ python3.11 -m pip install setuptools==68.2.0 && \ python3.12 -m pip install --break-system-packages setuptools==68.2.0 && \ python3.13 -m pip install setuptools==69.5.0 && \ - python3.13t -m pip install setuptools==69.5.0 + python3.13t -m pip install setuptools==69.5.0 && \ + python3.14 -m pip install setuptools==69.5.0 && \ + python3.14t -m pip install setuptools==69.5.0 # binutils >= 2.27 RUN apt-get install -y binutils @@ -118,7 +125,9 @@ RUN python3.9 -m pip --no-cache-dir install ipython==5.3.0 && \ python3.13 -m pip --no-cache-dir install ipython==5.3.0 && \ python3.13 -m pip --no-cache-dir install ipykernel==4.6.0 wheel && \ python3.13t -m pip --no-cache-dir install ipython==5.3.0 && \ - python3.13t -m pip --no-cache-dir install ipykernel==4.6.0 wheel + python3.13t -m pip --no-cache-dir install ipykernel==4.6.0 wheel && \ + python3.14 -m pip --no-cache-dir install ipython==5.3.0 ipykernel==4.6.0 wheel && \ + python3.14t -m pip --no-cache-dir install ipython==5.3.0 ipykernel==4.6.0 wheel # For PaddleTest CE RUN python3.9 -m pip --no-cache-dir install pytest && \ @@ -126,7 +135,9 @@ RUN python3.9 -m pip --no-cache-dir install pytest && \ python3.11 -m pip --no-cache-dir install pytest && \ python3.12 -m pip --no-cache-dir install --break-system-packages pytest && \ python3.13 -m pip --no-cache-dir install pytest && \ - python3.13t -m pip --no-cache-dir install pytest + python3.13t -m pip --no-cache-dir install pytest && \ + python3.14 -m pip --no-cache-dir install pytest && \ + python3.14t -m pip --no-cache-dir install pytest RUN python3.9 -m pip --no-cache-dir install pre-commit==2.17.0 && \ python3.10 -m pip --no-cache-dir install pre-commit==2.17.0 && \ @@ -135,23 +146,28 @@ RUN python3.9 -m pip --no-cache-dir install pre-commit==2.17.0 && \ python3.11 -m pip --no-cache-dir install cpplint==1.6.0 clang-format==13.0.0 && \ python3.12 -m pip --no-cache-dir install --break-system-packages cpplint==1.6.0 clang-format==13.0.0 && \ python3.13 -m pip --no-cache-dir install cpplint==1.6.0 clang-format==13.0.0 && \ - python3.13t -m pip --no-cache-dir install cpplint==1.6.0 clang-format==13.0.0 + python3.13t -m pip --no-cache-dir install cpplint==1.6.0 clang-format==13.0.0 && \ + python3.14 -m pip --no-cache-dir install cpplint==1.6.0 clang-format==13.0.0 && \ + python3.14t -m pip --no-cache-dir install cpplint==1.6.0 clang-format==13.0.0 COPY ./python/requirements.txt /root/ COPY ./python/unittest_py/requirements.txt /home/ RUN python3.9 -m pip --no-cache-dir install -r /root/requirements.txt && \ - python3.9 -m pip --no-cache-dir install -r /home/requirements.txt && \ + python3.9 -m pip --no-cache-dir install --break-system-packages --ignore-installed -r /home/requirements.txt && \ python3.10 -m pip --no-cache-dir install -r /root/requirements.txt && \ - python3.10 -m pip --no-cache-dir install -r /home/requirements.txt && \ + python3.10 -m pip --no-cache-dir install --break-system-packages --ignore-installed -r /home/requirements.txt && \ python3.11 -m pip --no-cache-dir install -r /root/requirements.txt && \ - python3.11 -m pip --no-cache-dir install -r /home/requirements.txt && \ + python3.11 -m pip --no-cache-dir install --break-system-packages --ignore-installed -r /home/requirements.txt && \ python3.12 -m pip --no-cache-dir install --break-system-packages -r /root/requirements.txt && \ - python3.12 -m pip --no-cache-dir install --break-system-packages -r /home/requirements.txt && \ + python3.12 -m pip --no-cache-dir install --break-system-packages --ignore-installed -r /home/requirements.txt && \ python3.13 -m pip --no-cache-dir install -r /root/requirements.txt && \ - python3.13 -m pip --no-cache-dir install -r /home/requirements.txt && \ - python3.13t -m pip --no-cache-dir install -r /root/requirements.txt - # python3.13t -m pip --no-cache-dir install -r /home/requirements.txt + python3.13 -m pip --no-cache-dir install --break-system-packages --ignore-installed -r /home/requirements.txt && \ + python3.13t -m pip --no-cache-dir install -r /root/requirements.txt && \ + python3.14 -m pip --no-cache-dir install -r /root/requirements.txt && \ + python3.14 -m pip --no-cache-dir install --break-system-packages --ignore-installed -r /home/requirements.txt && \ + python3.14t -m pip --no-cache-dir install -r /root/requirements.txt + # clang14 RUN apt-get update &&\ diff --git a/tools/dockerfile/ci_dockerfile.sh b/tools/dockerfile/ci_dockerfile.sh index d507c61ee0e0ce..836482320fda3d 100644 --- a/tools/dockerfile/ci_dockerfile.sh +++ b/tools/dockerfile/ci_dockerfile.sh @@ -132,11 +132,41 @@ function make_ubuntu20_cu123_dockerfile(){ cd /home \&\& rm -rf PaddleNLP" ${dockerfile_name} } +function make_ubuntu24_cu126_dockerfile(){ + dockerfile_name="Dockerfile.cuda126_cudnn9_gcc122_ubuntu24" + sed "s##nvidia/cuda:12.6.0-cudnn-devel-ubuntu24.04#g" ./Dockerfile.ubuntu24 >${dockerfile_name} + sed -i "s##ENV LD_LIBRARY_PATH=/usr/local/cuda-12.6/targets/x86_64-linux/lib:\$LD_LIBRARY_PATH #g" ${dockerfile_name} + sed -i 's###g' ${dockerfile_name} + sed -i "7i ENV TZ=Asia/Beijing" ${dockerfile_name} + sed -i "8i RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone" ${dockerfile_name} + sed -i "27i RUN apt-get update && apt-get install -y liblzma-dev openmpi-bin openmpi-doc libopenmpi-dev libsndfile1" ${dockerfile_name} + dockerfile_line=$(wc -l ${dockerfile_name}|awk '{print $1}') + sed -i "${dockerfile_line}i RUN wget --no-check-certificate -q https://paddle-edl.bj.bcebos.com/hadoop-2.7.7.tar.gz \&\& \ + tar -xzf hadoop-2.7.7.tar.gz && mv hadoop-2.7.7 /usr/local/" ${dockerfile_name} + sed -i "${dockerfile_line}i RUN apt remove git -y \&\& apt update \&\& apt install -y libcurl4-openssl-dev gettext pigz zstd ninja-build \&\& wget -q https://paddle-ci.gz.bcebos.com/git-2.17.1.tar.gz \&\& \ + tar -xvf git-2.17.1.tar.gz \&\& \ + cd git-2.17.1 \&\& \ + ./configure --with-openssl --with-curl --prefix=/usr/local \&\& \ + make -j8 \&\& make install " ${dockerfile_name} + sed -i "${dockerfile_line}i RUN pip install wheel \&\& pip3 install PyGithub wheel distro" ${dockerfile_name} + sed -i 's# && rm /etc/apt/sources.list.d/nvidia-ml.list##g' ${dockerfile_name} + sed -i 's#RUN bash /build_scripts/install_trt.sh#RUN bash /build_scripts/install_trt.sh trt8616#g' ${dockerfile_name} + + sed -i "${dockerfile_line}i WORKDIR /home \n \ + RUN git clone --depth=1 https://github.com/PaddlePaddle/PaddleFormers \&\& cd PaddleFormers \&\& \ + pip3.10 install -r requirements.txt \&\& \ + pip3.10 install -r requirements-dev.txt \&\& \ + pip3.10 install -r tests/requirements.txt \&\& \ + pip3.10 install pytest-timeout \&\& \ + cd /home \&\& rm -rf PaddleFormers" ${dockerfile_name} +} + function main() { make_cpu_dockerfile make_ce_framework_dockerfile make_ubuntu20_cu12_dockerfile make_ubuntu20_cu123_dockerfile + make_ubuntu24_cu126_dockerfile } main "$@"