From 4be43970ea9c19b2c5acf95a2aa7d84dc1c6073d Mon Sep 17 00:00:00 2001 From: Philip Meier Date: Sat, 20 May 2023 23:17:04 +0200 Subject: [PATCH 1/5] kill CircleCI --- .circleci/.gitignore | 1 - .circleci/build_docs/commit_docs.sh | 35 -- .circleci/config.yml | 271 -------------- .circleci/config.yml.in | 251 ------------- .circleci/regenerate.py | 58 --- .circleci/smoke_test/docker/Dockerfile | 34 -- .../android/scripts/binary_android_build.sh | 27 -- .../android/scripts/binary_android_upload.sh | 34 -- .../android/scripts/install_gradle.sh | 19 - .../unittest/ios/scripts/binary_ios_build.sh | 47 --- .../unittest/ios/scripts/binary_ios_upload.sh | 42 --- .../unittest/linux/scripts/environment.yml | 16 - .circleci/unittest/linux/scripts/install.sh | 45 --- .../unittest/linux/scripts/post_process.sh | 6 - .../linux/scripts/run-clang-format.py | 331 ------------------ .circleci/unittest/linux/scripts/run_test.sh | 22 -- .circleci/unittest/linux/scripts/setup_env.sh | 47 --- .../unittest/windows/scripts/environment.yml | 19 - .circleci/unittest/windows/scripts/install.sh | 53 --- .../windows/scripts/install_conda.bat | 1 - .../unittest/windows/scripts/post_process.sh | 6 - .../unittest/windows/scripts/run_test.sh | 12 - .../unittest/windows/scripts/set_cuda_envs.sh | 48 --- .../unittest/windows/scripts/setup_env.sh | 45 --- .../windows/scripts/vc_env_helper.bat | 39 --- 25 files changed, 1509 deletions(-) delete mode 100644 .circleci/.gitignore delete mode 100755 .circleci/build_docs/commit_docs.sh delete mode 100644 .circleci/config.yml delete mode 100644 .circleci/config.yml.in delete mode 100755 .circleci/regenerate.py delete mode 100644 .circleci/smoke_test/docker/Dockerfile delete mode 100644 .circleci/unittest/android/scripts/binary_android_build.sh delete mode 100644 .circleci/unittest/android/scripts/binary_android_upload.sh delete mode 100755 .circleci/unittest/android/scripts/install_gradle.sh delete mode 100755 .circleci/unittest/ios/scripts/binary_ios_build.sh delete mode 100644 .circleci/unittest/ios/scripts/binary_ios_upload.sh delete mode 100644 .circleci/unittest/linux/scripts/environment.yml delete mode 100755 .circleci/unittest/linux/scripts/install.sh delete mode 100755 .circleci/unittest/linux/scripts/post_process.sh delete mode 100755 .circleci/unittest/linux/scripts/run-clang-format.py delete mode 100755 .circleci/unittest/linux/scripts/run_test.sh delete mode 100755 .circleci/unittest/linux/scripts/setup_env.sh delete mode 100644 .circleci/unittest/windows/scripts/environment.yml delete mode 100644 .circleci/unittest/windows/scripts/install.sh delete mode 100644 .circleci/unittest/windows/scripts/install_conda.bat delete mode 100644 .circleci/unittest/windows/scripts/post_process.sh delete mode 100644 .circleci/unittest/windows/scripts/run_test.sh delete mode 100644 .circleci/unittest/windows/scripts/set_cuda_envs.sh delete mode 100644 .circleci/unittest/windows/scripts/setup_env.sh delete mode 100644 .circleci/unittest/windows/scripts/vc_env_helper.bat diff --git a/.circleci/.gitignore b/.circleci/.gitignore deleted file mode 100644 index 485dee64bcf..00000000000 --- a/.circleci/.gitignore +++ /dev/null @@ -1 +0,0 @@ -.idea diff --git a/.circleci/build_docs/commit_docs.sh b/.circleci/build_docs/commit_docs.sh deleted file mode 100755 index 04e3538fefc..00000000000 --- a/.circleci/build_docs/commit_docs.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash - -set -ex - - -if [ "$2" == "" ]; then - echo call as "$0" "" "" - echo where src is the root of the built documentation git checkout and - echo branch should be "main" or "1.7" or so - exit 1 -fi - -src=$1 -target=$2 - -echo "committing docs from ${src} to ${target}" - -pushd "${src}" -git checkout gh-pages -mkdir -p ./"${target}" -rm -rf ./"${target}"/* -cp -r "${src}/docs/build/html/"* ./"$target" -if [ "${target}" == "main" ]; then - mkdir -p ./_static - rm -rf ./_static/* - cp -r "${src}/docs/build/html/_static/"* ./_static - git add --all ./_static || true -fi -git add --all ./"${target}" || true -git config user.email "soumith+bot@pytorch.org" -git config user.name "pytorchbot" -# If there aren't changes, don't make a commit; push is no-op -git commit -m "auto-generating sphinx docs" || true -git remote add https https://github.com/pytorch/vision.git -git push -u https gh-pages diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 0463007af46..00000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,271 +0,0 @@ -version: 2.1 - -# How to test the Linux jobs: -# - Install CircleCI local CLI: https://circleci.com/docs/2.0/local-cli/ -# - circleci config process .circleci/config.yml > gen.yml && circleci local execute -c gen.yml --job binary_win_wheel_py3.8 -# - Replace binary_win_wheel_py3.8 with the name of the job you want to test. -# Job names are 'name:' key. - -executors: - windows-cpu: - machine: - resource_class: windows.xlarge - image: windows-server-2019-vs2019:stable - shell: bash.exe - - windows-gpu: - machine: - resource_class: windows.gpu.nvidia.medium - image: windows-server-2019-nvidia:stable - shell: bash.exe - -commands: - checkout_merge: - description: "checkout merge branch" - steps: - - checkout -# - run: -# name: Checkout merge branch -# command: | -# set -ex -# BRANCH=$(git rev-parse --abbrev-ref HEAD) -# if [[ "$BRANCH" != "main" ]]; then -# git fetch --force origin ${CIRCLE_BRANCH}/merge:merged/${CIRCLE_BRANCH} -# git checkout "merged/$CIRCLE_BRANCH" -# fi - designate_upload_channel: - description: "inserts the correct upload channel into ${BASH_ENV}" - steps: - - run: - name: adding UPLOAD_CHANNEL to BASH_ENV - command: | - our_upload_channel=nightly - # On tags upload to test instead - if [[ -n "${CIRCLE_TAG}" ]]; then - our_upload_channel=test - fi - echo "export UPLOAD_CHANNEL=${our_upload_channel}" >> ${BASH_ENV} - - pip_install: - parameters: - args: - type: string - descr: - type: string - default: "" - user: - type: boolean - default: true - steps: - - run: - name: > - <<^ parameters.descr >> pip install << parameters.args >> <> - <<# parameters.descr >> << parameters.descr >> <> - command: > - pip install - <<# parameters.user >> --user <> - --progress-bar=off - << parameters.args >> - -binary_common: &binary_common - parameters: - # Edit these defaults to do a release - build_version: - description: "version number of release binary; by default, build a nightly" - type: string - default: "" - pytorch_version: - description: "PyTorch version to build against; by default, use a nightly" - type: string - default: "" - # Don't edit these - python_version: - description: "Python version to build against (e.g., 3.8)" - type: string - cu_version: - description: "CUDA version to build against, in CU format (e.g., cpu or cu100)" - type: string - default: "cpu" - unicode_abi: - description: "Python 2.7 wheel only: whether or not we are cp27mu (default: no)" - type: string - default: "" - wheel_docker_image: - description: "Wheel only: what docker image to use" - type: string - default: "" - conda_docker_image: - description: "Conda only: what docker image to use" - type: string - default: "pytorch/conda-builder:cpu" - environment: - PYTHON_VERSION: << parameters.python_version >> - PYTORCH_VERSION: << parameters.pytorch_version >> - UNICODE_ABI: << parameters.unicode_abi >> - CU_VERSION: << parameters.cu_version >> - MACOSX_DEPLOYMENT_TARGET: 10.9 - -smoke_test_common: &smoke_test_common - <<: *binary_common - docker: - - image: torchvision/smoke_test:latest - -jobs: - circleci_consistency: - docker: - - image: cimg/python:3.8 - steps: - - checkout - - pip_install: - args: jinja2 pyyaml - - run: - name: Check CircleCI config consistency - command: | - python .circleci/regenerate.py - git diff --exit-code || (echo ".circleci/config.yml not in sync with config.yml.in! Run .circleci/regenerate.py to update config"; exit 1) - - smoke_test_docker_image_build: - machine: - image: ubuntu-2004:202104-01 - resource_class: large - environment: - image_name: torchvision/smoke_test - steps: - - checkout - - designate_upload_channel - - run: - name: Build and push Docker image - no_output_timeout: "1h" - command: | - set +x - echo "${DOCKER_HUB_TOKEN}" | docker login --username "${DOCKER_HUB_USERNAME}" --password-stdin - set -x - cd .circleci/smoke_test/docker && docker build . -t ${image_name}:${CIRCLE_WORKFLOW_ID} - docker tag ${image_name}:${CIRCLE_WORKFLOW_ID} ${image_name}:latest - docker push ${image_name}:${CIRCLE_WORKFLOW_ID} - docker push ${image_name}:latest - - cmake_linux_cpu: - <<: *binary_common - docker: - - image: "pytorch/manylinux-cpu" - resource_class: 2xlarge+ - steps: - - checkout_merge - - designate_upload_channel - - run: - name: Setup conda - command: .circleci/unittest/linux/scripts/setup_env.sh - - run: packaging/build_cmake.sh - - cmake_linux_gpu: - <<: *binary_common - machine: - image: ubuntu-2004-cuda-11.4:202110-01 - resource_class: gpu.nvidia.small - steps: - - checkout_merge - - designate_upload_channel - - run: - name: Setup conda - command: docker run -e CU_VERSION -e PYTHON_VERSION -e UNICODE_ABI -e PYTORCH_VERSION -t --gpus all -v $PWD:$PWD -w $PWD << parameters.wheel_docker_image >> .circleci/unittest/linux/scripts/setup_env.sh - - run: - name: Build torchvision C++ distribution and test - no_output_timeout: 30m - command: docker run -e CU_VERSION -e PYTHON_VERSION -e UNICODE_ABI -e PYTORCH_VERSION -e UPLOAD_CHANNEL -t --gpus all -v $PWD:$PWD -w $PWD << parameters.wheel_docker_image >> packaging/build_cmake.sh - - cmake_macos_cpu: - <<: *binary_common - macos: - xcode: "14.0" - steps: - - checkout_merge - - designate_upload_channel - - run: - command: | - curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh - sh conda.sh -b - source $HOME/miniconda3/bin/activate - conda install -yq conda-build cmake python=<< parameters.python_version >> - packaging/build_cmake.sh - - cmake_windows_cpu: - <<: *binary_common - executor: - name: windows-cpu - steps: - - checkout_merge - - designate_upload_channel - - run: - command: | - set -ex - source packaging/windows/internal/vc_install_helper.sh - eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')" - conda activate base - conda create -yn python39 python=3.9 - conda activate python39 - packaging/build_cmake.sh - - cmake_windows_gpu: - <<: *binary_common - executor: - name: windows-gpu - steps: - - checkout_merge - - designate_upload_channel - - run: - name: Update CUDA driver - command: packaging/windows/internal/driver_update.bat - - run: - command: | - set -ex - source packaging/windows/internal/vc_install_helper.sh - packaging/windows/internal/cuda_install.bat - eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')" - conda activate - conda update -y conda - conda create -yn python39 python=3.9 - conda activate python39 - packaging/build_cmake.sh - - -workflows: - lint: - jobs: - - circleci_consistency - - cmake: - jobs: - - cmake_linux_cpu: - cu_version: cpu - name: cmake_linux_cpu - python_version: '3.8' - - cmake_linux_gpu: - cu_version: cu117 - name: cmake_linux_gpu - python_version: '3.8' - wheel_docker_image: pytorch/manylinux-cuda117 - - cmake_windows_cpu: - cu_version: cpu - name: cmake_windows_cpu - python_version: '3.8' - - cmake_windows_gpu: - cu_version: cu117 - name: cmake_windows_gpu - python_version: '3.8' - - cmake_macos_cpu: - cu_version: cpu - name: cmake_macos_cpu - python_version: '3.8' - - docker_build: - triggers: - - schedule: - cron: "0 10 * * 0" - filters: - branches: - only: - - main - jobs: - - smoke_test_docker_image_build: - context: org-member diff --git a/.circleci/config.yml.in b/.circleci/config.yml.in deleted file mode 100644 index f3a88f2d987..00000000000 --- a/.circleci/config.yml.in +++ /dev/null @@ -1,251 +0,0 @@ -version: 2.1 - -# How to test the Linux jobs: -# - Install CircleCI local CLI: https://circleci.com/docs/2.0/local-cli/ -# - circleci config process .circleci/config.yml > gen.yml && circleci local execute -c gen.yml --job binary_win_wheel_py3.8 -# - Replace binary_win_wheel_py3.8 with the name of the job you want to test. -# Job names are 'name:' key. - -executors: - windows-cpu: - machine: - resource_class: windows.xlarge - image: windows-server-2019-vs2019:stable - shell: bash.exe - - windows-gpu: - machine: - resource_class: windows.gpu.nvidia.medium - image: windows-server-2019-nvidia:stable - shell: bash.exe - -commands: - checkout_merge: - description: "checkout merge branch" - steps: - - checkout -# - run: -# name: Checkout merge branch -# command: | -# set -ex -# BRANCH=$(git rev-parse --abbrev-ref HEAD) -# if [[ "$BRANCH" != "main" ]]; then -# git fetch --force origin ${CIRCLE_BRANCH}/merge:merged/${CIRCLE_BRANCH} -# git checkout "merged/$CIRCLE_BRANCH" -# fi - designate_upload_channel: - description: "inserts the correct upload channel into ${BASH_ENV}" - steps: - - run: - name: adding UPLOAD_CHANNEL to BASH_ENV - command: | - our_upload_channel=nightly - # On tags upload to test instead - if [[ -n "${CIRCLE_TAG}" ]]; then - our_upload_channel=test - fi - echo "export UPLOAD_CHANNEL=${our_upload_channel}" >> ${BASH_ENV} - - pip_install: - parameters: - args: - type: string - descr: - type: string - default: "" - user: - type: boolean - default: true - steps: - - run: - name: > - <<^ parameters.descr >> pip install << parameters.args >> <> - <<# parameters.descr >> << parameters.descr >> <> - command: > - pip install - <<# parameters.user >> --user <> - --progress-bar=off - << parameters.args >> - -binary_common: &binary_common - parameters: - # Edit these defaults to do a release - build_version: - description: "version number of release binary; by default, build a nightly" - type: string - default: "" - pytorch_version: - description: "PyTorch version to build against; by default, use a nightly" - type: string - default: "" - # Don't edit these - python_version: - description: "Python version to build against (e.g., 3.8)" - type: string - cu_version: - description: "CUDA version to build against, in CU format (e.g., cpu or cu100)" - type: string - default: "cpu" - unicode_abi: - description: "Python 2.7 wheel only: whether or not we are cp27mu (default: no)" - type: string - default: "" - wheel_docker_image: - description: "Wheel only: what docker image to use" - type: string - default: "" - conda_docker_image: - description: "Conda only: what docker image to use" - type: string - default: "pytorch/conda-builder:cpu" - environment: - PYTHON_VERSION: << parameters.python_version >> - PYTORCH_VERSION: << parameters.pytorch_version >> - UNICODE_ABI: << parameters.unicode_abi >> - CU_VERSION: << parameters.cu_version >> - MACOSX_DEPLOYMENT_TARGET: 10.9 - -smoke_test_common: &smoke_test_common - <<: *binary_common - docker: - - image: torchvision/smoke_test:latest - -jobs: - circleci_consistency: - docker: - - image: cimg/python:3.8 - steps: - - checkout - - pip_install: - args: jinja2 pyyaml - - run: - name: Check CircleCI config consistency - command: | - python .circleci/regenerate.py - git diff --exit-code || (echo ".circleci/config.yml not in sync with config.yml.in! Run .circleci/regenerate.py to update config"; exit 1) - - smoke_test_docker_image_build: - machine: - image: ubuntu-2004:202104-01 - resource_class: large - environment: - image_name: torchvision/smoke_test - steps: - - checkout - - designate_upload_channel - - run: - name: Build and push Docker image - no_output_timeout: "1h" - command: | - set +x - echo "${DOCKER_HUB_TOKEN}" | docker login --username "${DOCKER_HUB_USERNAME}" --password-stdin - set -x - cd .circleci/smoke_test/docker && docker build . -t ${image_name}:${CIRCLE_WORKFLOW_ID} - docker tag ${image_name}:${CIRCLE_WORKFLOW_ID} ${image_name}:latest - docker push ${image_name}:${CIRCLE_WORKFLOW_ID} - docker push ${image_name}:latest - - cmake_linux_cpu: - <<: *binary_common - docker: - - image: "pytorch/manylinux-cpu" - resource_class: 2xlarge+ - steps: - - checkout_merge - - designate_upload_channel - - run: - name: Setup conda - command: .circleci/unittest/linux/scripts/setup_env.sh - - run: packaging/build_cmake.sh - - cmake_linux_gpu: - <<: *binary_common - machine: - image: ubuntu-2004-cuda-11.4:202110-01 - resource_class: gpu.nvidia.small - steps: - - checkout_merge - - designate_upload_channel - - run: - name: Setup conda - command: docker run -e CU_VERSION -e PYTHON_VERSION -e UNICODE_ABI -e PYTORCH_VERSION -t --gpus all -v $PWD:$PWD -w $PWD << parameters.wheel_docker_image >> .circleci/unittest/linux/scripts/setup_env.sh - - run: - name: Build torchvision C++ distribution and test - no_output_timeout: 30m - command: docker run -e CU_VERSION -e PYTHON_VERSION -e UNICODE_ABI -e PYTORCH_VERSION -e UPLOAD_CHANNEL -t --gpus all -v $PWD:$PWD -w $PWD << parameters.wheel_docker_image >> packaging/build_cmake.sh - - cmake_macos_cpu: - <<: *binary_common - macos: - xcode: "14.0" - steps: - - checkout_merge - - designate_upload_channel - - run: - command: | - curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh - sh conda.sh -b - source $HOME/miniconda3/bin/activate - conda install -yq conda-build cmake python=<< parameters.python_version >> - packaging/build_cmake.sh - - cmake_windows_cpu: - <<: *binary_common - executor: - name: windows-cpu - steps: - - checkout_merge - - designate_upload_channel - - run: - command: | - set -ex - source packaging/windows/internal/vc_install_helper.sh - eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')" - conda activate base - conda create -yn python39 python=3.9 - conda activate python39 - packaging/build_cmake.sh - - cmake_windows_gpu: - <<: *binary_common - executor: - name: windows-gpu - steps: - - checkout_merge - - designate_upload_channel - - run: - name: Update CUDA driver - command: packaging/windows/internal/driver_update.bat - - run: - command: | - set -ex - source packaging/windows/internal/vc_install_helper.sh - packaging/windows/internal/cuda_install.bat - eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')" - conda activate - conda update -y conda - conda create -yn python39 python=3.9 - conda activate python39 - packaging/build_cmake.sh - - -workflows: - lint: - jobs: - - circleci_consistency - - cmake: - jobs: - {{ cmake_workflows() }} - - docker_build: - triggers: - - schedule: - cron: "0 10 * * 0" - filters: - branches: - only: - - main - jobs: - - smoke_test_docker_image_build: - context: org-member diff --git a/.circleci/regenerate.py b/.circleci/regenerate.py deleted file mode 100755 index e7b8db3d56e..00000000000 --- a/.circleci/regenerate.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python3 - -""" -This script should use a very simple, functional programming style. -Avoid Jinja macros in favor of native Python functions. - -Don't go overboard on code generation; use Python only to generate -content that can't be easily declared statically using CircleCI's YAML API. - -Data declarations (e.g. the nested loops for defining the configuration matrix) -should be at the top of the file for easy updating. - -See this comment for design rationale: -https://github.com/pytorch/vision/pull/1321#issuecomment-531033978 -""" - -import os.path - -import jinja2 -import yaml -from jinja2 import select_autoescape - - -def indent(indentation, data_list): - return ("\n" + " " * indentation).join(yaml.dump(data_list, default_flow_style=False).splitlines()) - - -def cmake_workflows(indentation=6): - jobs = [] - python_version = "3.8" - for os_type in ["linux", "windows", "macos"]: - # Skip OSX CUDA - device_types = ["cpu", "gpu"] if os_type != "macos" else ["cpu"] - for device in device_types: - job = {"name": f"cmake_{os_type}_{device}", "python_version": python_version} - - job["cu_version"] = "cu117" if device == "gpu" else "cpu" - if device == "gpu" and os_type == "linux": - job["wheel_docker_image"] = "pytorch/manylinux-cuda117" - jobs.append({f"cmake_{os_type}_{device}": job}) - return indent(indentation, jobs) - - -if __name__ == "__main__": - d = os.path.dirname(__file__) - env = jinja2.Environment( - loader=jinja2.FileSystemLoader(d), - lstrip_blocks=True, - autoescape=select_autoescape(enabled_extensions=("html", "xml")), - keep_trailing_newline=True, - ) - - with open(os.path.join(d, "config.yml"), "w") as f: - f.write( - env.get_template("config.yml.in").render( - cmake_workflows=cmake_workflows, - ) - ) diff --git a/.circleci/smoke_test/docker/Dockerfile b/.circleci/smoke_test/docker/Dockerfile deleted file mode 100644 index 34bdcda1053..00000000000 --- a/.circleci/smoke_test/docker/Dockerfile +++ /dev/null @@ -1,34 +0,0 @@ -# this Dockerfile is for torchvision smoke test, it will be created periodically via CI system -# if you need to do it locally, follow below steps once you have Docker installed -# assuming you're within the directory where this Dockerfile located -# $ docker build . -t torchvision/smoketest - -# if you want to push to aws ecr, make sure you have the rights to write to ECR, then run -# $ eval $(aws ecr get-login --region us-east-1 --no-include-email) -# $ export MYTAG=localbuild ## you can choose whatever tag you like -# $ docker tag torchvision/smoketest 308535385114.dkr.ecr.us-east-1.amazonaws.com/torchvision/smoke_test:${MYTAG} -# $ docker push 308535385114.dkr.ecr.us-east-1.amazonaws.com/torchvision/smoke_test:${MYTAG} - -FROM ubuntu:latest - -RUN apt-get -qq update && apt-get -qq -y install curl bzip2 libsox-fmt-all \ - && curl -sSL https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -o /tmp/miniconda.sh \ - && bash /tmp/miniconda.sh -bfp /usr/local \ - && rm -rf /tmp/miniconda.sh \ - && conda install -y python=3 \ - && conda update conda \ - && apt-get -qq -y remove curl bzip2 \ - && apt-get -qq -y autoremove \ - && apt-get autoclean \ - && rm -rf /var/lib/apt/lists/* /var/log/dpkg.log \ - && conda clean --all --yes - -ENV PATH /opt/conda/bin:$PATH - -RUN conda create -y --name python3.7 python=3.7 -RUN conda create -y --name python3.8 python=3.8 -RUN conda create -y --name python3.9 python=3.9 -RUN conda create -y --name python3.10 python=3.10 -SHELL [ "/bin/bash", "-c" ] -RUN echo "source /usr/local/etc/profile.d/conda.sh" >> ~/.bashrc -CMD [ "/bin/bash"] diff --git a/.circleci/unittest/android/scripts/binary_android_build.sh b/.circleci/unittest/android/scripts/binary_android_build.sh deleted file mode 100644 index 0d8c0d47d8a..00000000000 --- a/.circleci/unittest/android/scripts/binary_android_build.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -set -ex -o pipefail - -echo "DIR: $(pwd)" -echo "ANDROID_HOME=${ANDROID_HOME}" -echo "ANDROID_NDK_HOME=${ANDROID_NDK_HOME}" -echo "JAVA_HOME=${JAVA_HOME}" - -WORKSPACE=/home/circleci/workspace -VISION_ANDROID=/home/circleci/project/android - -. /home/circleci/project/.circleci/unittest/android/scripts/install_gradle.sh - -GRADLE_LOCAL_PROPERTIES=${VISION_ANDROID}/local.properties -rm -f $GRADLE_LOCAL_PROPERTIES - -echo "sdk.dir=${ANDROID_HOME}" >> $GRADLE_LOCAL_PROPERTIES -echo "ndk.dir=${ANDROID_NDK_HOME}" >> $GRADLE_LOCAL_PROPERTIES - -echo "GRADLE_PATH $GRADLE_PATH" -echo "GRADLE_HOME $GRADLE_HOME" - -${GRADLE_PATH} --scan --stacktrace --debug --no-daemon -p ${VISION_ANDROID} assemble || true - -mkdir -p ~/workspace/artifacts -find . -type f -name *aar -print | xargs tar cfvz ~/workspace/artifacts/artifacts-aars.tgz -find . -type f -name *apk -print | xargs tar cfvz ~/workspace/artifacts/artifacts-apks.tgz diff --git a/.circleci/unittest/android/scripts/binary_android_upload.sh b/.circleci/unittest/android/scripts/binary_android_upload.sh deleted file mode 100644 index 1472a877d90..00000000000 --- a/.circleci/unittest/android/scripts/binary_android_upload.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -set -ex -o pipefail - -echo "DIR: $(pwd)" -echo "ANDROID_HOME=${ANDROID_HOME}" -echo "ANDROID_NDK_HOME=${ANDROID_NDK_HOME}" -echo "JAVA_HOME=${JAVA_HOME}" - -WORKSPACE=/home/circleci/workspace -VISION_ANDROID=/home/circleci/project/android - -. /home/circleci/project/.circleci/unittest/android/scripts/install_gradle.sh - -GRADLE_LOCAL_PROPERTIES=${VISION_ANDROID}/local.properties -rm -f $GRADLE_LOCAL_PROPERTIES -GRADLE_PROPERTIES=/home/circleci/project/android/gradle.properties - -echo "sdk.dir=${ANDROID_HOME}" >> $GRADLE_LOCAL_PROPERTIES -echo "ndk.dir=${ANDROID_NDK_HOME}" >> $GRADLE_LOCAL_PROPERTIES - -echo "SONATYPE_NEXUS_USERNAME=${SONATYPE_NEXUS_USERNAME}" >> $GRADLE_PROPERTIES -echo "mavenCentralRepositoryUsername=${SONATYPE_NEXUS_USERNAME}" >> $GRADLE_PROPERTIES -echo "SONATYPE_NEXUS_PASSWORD=${SONATYPE_NEXUS_PASSWORD}" >> $GRADLE_PROPERTIES -echo "mavenCentralRepositoryPassword=${SONATYPE_NEXUS_PASSWORD}" >> $GRADLE_PROPERTIES - -echo "signing.keyId=${ANDROID_SIGN_KEY}" >> $GRADLE_PROPERTIES -echo "signing.password=${ANDROID_SIGN_PASS}" >> $GRADLE_PROPERTIES - -cat /home/circleci/project/android/gradle.properties | grep VERSION - -${GRADLE_PATH} --scan --stacktrace --debug --no-daemon -p ${VISION_ANDROID} ops:uploadArchives - -mkdir -p ~/workspace/artifacts -find . -type f -name *aar -print | xargs tar cfvz ~/workspace/artifacts/artifacts-aars.tgz diff --git a/.circleci/unittest/android/scripts/install_gradle.sh b/.circleci/unittest/android/scripts/install_gradle.sh deleted file mode 100755 index ff90c657e65..00000000000 --- a/.circleci/unittest/android/scripts/install_gradle.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -set -ex - -_https_amazon_aws=https://downloads.gradle-dn.com/distributions -GRADLE_VERSION=6.8.3 - -_gradle_home=/opt/gradle -sudo rm -rf $gradle_home -sudo mkdir -p $_gradle_home - -curl --silent --output /tmp/gradle.zip --retry 3 $_https_amazon_aws/gradle-${GRADLE_VERSION}-bin.zip - -sudo unzip -q /tmp/gradle.zip -d $_gradle_home -rm /tmp/gradle.zip - -sudo chmod -R 777 $_gradle_home - -export GRADLE_HOME=$_gradle_home/gradle-$GRADLE_VERSION -export GRADLE_PATH=${GRADLE_HOME}/bin/gradle diff --git a/.circleci/unittest/ios/scripts/binary_ios_build.sh b/.circleci/unittest/ios/scripts/binary_ios_build.sh deleted file mode 100755 index 1f117481f2d..00000000000 --- a/.circleci/unittest/ios/scripts/binary_ios_build.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash -set -ex -o pipefail - -echo "" -echo "DIR: $(pwd)" -WORKSPACE=/Users/distiller/workspace -PROJ_ROOT_IOS=/Users/distiller/project/ios -PYTORCH_IOS_NIGHTLY_NAME=libtorch_ios_nightly_build.zip -export TCLLIBPATH="/usr/local/lib" - -# install conda -curl --retry 3 -o ~/conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh -chmod +x ~/conda.sh -/bin/bash ~/conda.sh -b -p ~/anaconda -export PATH="~/anaconda/bin:${PATH}" -source ~/anaconda/bin/activate - -# install dependencies -conda install numpy ninja pyyaml mkl mkl-include setuptools cmake cffi requests wget --yes -conda install -c conda-forge valgrind --yes -export CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname $(which conda))/../"} - -# sync submodules -cd ${PROJ_ROOT_IOS} -git submodule sync -git submodule update --init --recursive - -# download pytorch-iOS nightly build and unzip it -mkdir -p ${PROJ_ROOT_IOS}/lib -mkdir -p ${PROJ_ROOT_IOS}/build -mkdir -p ${PROJ_ROOT_IOS}/pytorch -TORCH_ROOT="${PROJ_ROOT_IOS}/pytorch" - -cd ${TORCH_ROOT} -wget https://ossci-ios-build.s3.amazonaws.com/${PYTORCH_IOS_NIGHTLY_NAME} -mkdir -p ./build_ios -unzip -d ./build_ios ./${PYTORCH_IOS_NIGHTLY_NAME} - -LIBTORCH_HEADER_ROOT="${TORCH_ROOT}/build_ios/install/include" -cd ${PROJ_ROOT_IOS} -IOS_ARCH=${IOS_ARCH} LIBTORCH_HEADER_ROOT=${LIBTORCH_HEADER_ROOT} ./build_ios.sh -rm -rf ${TORCH_ROOT} - -# store the binary -DEST_DIR=${WORKSPACE}/ios/${IOS_ARCH} -mkdir -p ${DEST_DIR} -cp ${PROJ_ROOT_IOS}/lib/*.a ${DEST_DIR} diff --git a/.circleci/unittest/ios/scripts/binary_ios_upload.sh b/.circleci/unittest/ios/scripts/binary_ios_upload.sh deleted file mode 100644 index ce56388e5da..00000000000 --- a/.circleci/unittest/ios/scripts/binary_ios_upload.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/bash -set -ex -o pipefail - -echo "" -echo "DIR: $(pwd)" - -WORKSPACE=/Users/distiller/workspace -PROJ_ROOT=/Users/distiller/project -ARTIFACTS_DIR=${WORKSPACE}/ios -ls ${ARTIFACTS_DIR} -ZIP_DIR=${WORKSPACE}/zip -mkdir -p ${ZIP_DIR}/install/lib - -# build a FAT bianry -cd ${ZIP_DIR}/install/lib -libs=("${ARTIFACTS_DIR}/x86_64/libtorchvision_ops.a" "${ARTIFACTS_DIR}/arm64/libtorchvision_ops.a") -lipo -create "${libs[@]}" -o ${ZIP_DIR}/install/lib/libtorchvision_ops.a -lipo -i ${ZIP_DIR}/install/lib/*.a - -# copy the license -cp ${PROJ_ROOT}/LICENSE ${ZIP_DIR}/ -# zip the library -ZIPFILE=libtorchvision_ops_ios_nightly_build.zip -cd ${ZIP_DIR} -#for testing -touch version.txt -echo $(date +%s) > version.txt -zip -r ${ZIPFILE} install version.txt LICENSE - -# upload to aws -# Install conda then 'conda install' awscli -curl --retry 3 -o ~/conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh -chmod +x ~/conda.sh -/bin/bash ~/conda.sh -b -p ~/anaconda -export PATH="~/anaconda/bin:${PATH}" -source ~/anaconda/bin/activate -conda install -c conda-forge awscli --yes -set +x -export AWS_ACCESS_KEY_ID=${AWS_S3_ACCESS_KEY_FOR_PYTORCH_BINARY_UPLOAD} -export AWS_SECRET_ACCESS_KEY=${AWS_S3_ACCESS_SECRET_FOR_PYTORCH_BINARY_UPLOAD} -set -x -aws s3 cp ${ZIPFILE} s3://ossci-ios-build/ --acl public-read diff --git a/.circleci/unittest/linux/scripts/environment.yml b/.circleci/unittest/linux/scripts/environment.yml deleted file mode 100644 index fae96c5f93c..00000000000 --- a/.circleci/unittest/linux/scripts/environment.yml +++ /dev/null @@ -1,16 +0,0 @@ -channels: - - pytorch - - defaults -dependencies: - - pytest - - pytest-cov - - pytest-mock - - pip - - libpng - - jpeg - - ca-certificates - - h5py - - pip: - - future - - scipy - - av < 10 diff --git a/.circleci/unittest/linux/scripts/install.sh b/.circleci/unittest/linux/scripts/install.sh deleted file mode 100755 index 6375e191072..00000000000 --- a/.circleci/unittest/linux/scripts/install.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env bash - -unset PYTORCH_VERSION -# For unittest, nightly PyTorch is used as the following section, -# so no need to set PYTORCH_VERSION. -# In fact, keeping PYTORCH_VERSION forces us to hardcode PyTorch version in config. - -set -ex - -eval "$(./conda/bin/conda shell.bash hook)" -conda activate ./env - -if [ "${CU_VERSION:-}" == cpu ] ; then - cudatoolkit="cpuonly" - version="cpu" -else - if [[ ${#CU_VERSION} -eq 4 ]]; then - CUDA_VERSION="${CU_VERSION:2:1}.${CU_VERSION:3:1}" - elif [[ ${#CU_VERSION} -eq 5 ]]; then - CUDA_VERSION="${CU_VERSION:2:2}.${CU_VERSION:4:1}" - fi - echo "Using CUDA $CUDA_VERSION as determined by CU_VERSION: ${CU_VERSION} " - version="$(python -c "print('.'.join(\"${CUDA_VERSION}\".split('.')[:2]))")" - cudatoolkit="pytorch-cuda=${version}" - - # make sure local cuda is set to required cuda version and not CUDA version by default - rm -f /usr/local/cuda - ln -s /usr/local/cuda-${version} /usr/local/cuda -fi - -case "$(uname -s)" in - Darwin*) os=MacOSX;; - *) os=Linux -esac - -printf "Installing PyTorch with %s\n" "${cudatoolkit}" -if [ "${os}" == "MacOSX" ]; then - conda install -y -c "pytorch-${UPLOAD_CHANNEL}" "pytorch-${UPLOAD_CHANNEL}"::pytorch "${cudatoolkit}" -else - conda install -y -c "pytorch-${UPLOAD_CHANNEL}" -c nvidia "pytorch-${UPLOAD_CHANNEL}"::pytorch[build="*${version}*"] "${cudatoolkit}" -fi - - -printf "* Installing torchvision\n" -python setup.py develop diff --git a/.circleci/unittest/linux/scripts/post_process.sh b/.circleci/unittest/linux/scripts/post_process.sh deleted file mode 100755 index e97bf2a7b1b..00000000000 --- a/.circleci/unittest/linux/scripts/post_process.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash - -set -e - -eval "$(./conda/bin/conda shell.bash hook)" -conda activate ./env diff --git a/.circleci/unittest/linux/scripts/run-clang-format.py b/.circleci/unittest/linux/scripts/run-clang-format.py deleted file mode 100755 index 5c61b2519e0..00000000000 --- a/.circleci/unittest/linux/scripts/run-clang-format.py +++ /dev/null @@ -1,331 +0,0 @@ -#!/usr/bin/env python -""" -MIT License - -Copyright (c) 2017 Guillaume Papin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - -A wrapper script around clang-format, suitable for linting multiple files -and to use for continuous integration. - -This is an alternative API for the clang-format command line. -It runs over multiple files and directories in parallel. -A diff output is produced and a sensible exit code is returned. - -""" - -import argparse -import difflib -import fnmatch -import multiprocessing -import os -import signal -import subprocess -import sys -import traceback -from functools import partial - -try: - from subprocess import DEVNULL # py3k -except ImportError: - DEVNULL = open(os.devnull, "wb") - - -DEFAULT_EXTENSIONS = "c,h,C,H,cpp,hpp,cc,hh,c++,h++,cxx,hxx,cu" - - -class ExitStatus: - SUCCESS = 0 - DIFF = 1 - TROUBLE = 2 - - -def list_files(files, recursive=False, extensions=None, exclude=None): - if extensions is None: - extensions = [] - if exclude is None: - exclude = [] - - out = [] - for file in files: - if recursive and os.path.isdir(file): - for dirpath, dnames, fnames in os.walk(file): - fpaths = [os.path.join(dirpath, fname) for fname in fnames] - for pattern in exclude: - # os.walk() supports trimming down the dnames list - # by modifying it in-place, - # to avoid unnecessary directory listings. - dnames[:] = [x for x in dnames if not fnmatch.fnmatch(os.path.join(dirpath, x), pattern)] - fpaths = [x for x in fpaths if not fnmatch.fnmatch(x, pattern)] - for f in fpaths: - ext = os.path.splitext(f)[1][1:] - if ext in extensions: - out.append(f) - else: - out.append(file) - return out - - -def make_diff(file, original, reformatted): - return list( - difflib.unified_diff( - original, reformatted, fromfile=f"{file}\t(original)", tofile=f"{file}\t(reformatted)", n=3 - ) - ) - - -class DiffError(Exception): - def __init__(self, message, errs=None): - super().__init__(message) - self.errs = errs or [] - - -class UnexpectedError(Exception): - def __init__(self, message, exc=None): - super().__init__(message) - self.formatted_traceback = traceback.format_exc() - self.exc = exc - - -def run_clang_format_diff_wrapper(args, file): - try: - ret = run_clang_format_diff(args, file) - return ret - except DiffError: - raise - except Exception as e: - raise UnexpectedError(f"{file}: {e.__class__.__name__}: {e}", e) - - -def run_clang_format_diff(args, file): - try: - with open(file, encoding="utf-8") as f: - original = f.readlines() - except OSError as exc: - raise DiffError(str(exc)) - invocation = [args.clang_format_executable, file] - - # Use of utf-8 to decode the process output. - # - # Hopefully, this is the correct thing to do. - # - # It's done due to the following assumptions (which may be incorrect): - # - clang-format will returns the bytes read from the files as-is, - # without conversion, and it is already assumed that the files use utf-8. - # - if the diagnostics were internationalized, they would use utf-8: - # > Adding Translations to Clang - # > - # > Not possible yet! - # > Diagnostic strings should be written in UTF-8, - # > the client can translate to the relevant code page if needed. - # > Each translation completely replaces the format string - # > for the diagnostic. - # > -- http://clang.llvm.org/docs/InternalsManual.html#internals-diag-translation - - try: - proc = subprocess.Popen( - invocation, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, encoding="utf-8" - ) - except OSError as exc: - raise DiffError(f"Command '{subprocess.list2cmdline(invocation)}' failed to start: {exc}") - proc_stdout = proc.stdout - proc_stderr = proc.stderr - - # hopefully the stderr pipe won't get full and block the process - outs = list(proc_stdout.readlines()) - errs = list(proc_stderr.readlines()) - proc.wait() - if proc.returncode: - raise DiffError( - "Command '{}' returned non-zero exit status {}".format( - subprocess.list2cmdline(invocation), proc.returncode - ), - errs, - ) - return make_diff(file, original, outs), errs - - -def bold_red(s): - return "\x1b[1m\x1b[31m" + s + "\x1b[0m" - - -def colorize(diff_lines): - def bold(s): - return "\x1b[1m" + s + "\x1b[0m" - - def cyan(s): - return "\x1b[36m" + s + "\x1b[0m" - - def green(s): - return "\x1b[32m" + s + "\x1b[0m" - - def red(s): - return "\x1b[31m" + s + "\x1b[0m" - - for line in diff_lines: - if line[:4] in ["--- ", "+++ "]: - yield bold(line) - elif line.startswith("@@ "): - yield cyan(line) - elif line.startswith("+"): - yield green(line) - elif line.startswith("-"): - yield red(line) - else: - yield line - - -def print_diff(diff_lines, use_color): - if use_color: - diff_lines = colorize(diff_lines) - sys.stdout.writelines(diff_lines) - - -def print_trouble(prog, message, use_colors): - error_text = "error:" - if use_colors: - error_text = bold_red(error_text) - print(f"{prog}: {error_text} {message}", file=sys.stderr) - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--clang-format-executable", - metavar="EXECUTABLE", - help="path to the clang-format executable", - default="clang-format", - ) - parser.add_argument( - "--extensions", - help=f"comma separated list of file extensions (default: {DEFAULT_EXTENSIONS})", - default=DEFAULT_EXTENSIONS, - ) - parser.add_argument("-r", "--recursive", action="store_true", help="run recursively over directories") - parser.add_argument("files", metavar="file", nargs="+") - parser.add_argument("-q", "--quiet", action="store_true") - parser.add_argument( - "-j", - metavar="N", - type=int, - default=0, - help="run N clang-format jobs in parallel (default number of cpus + 1)", - ) - parser.add_argument( - "--color", default="auto", choices=["auto", "always", "never"], help="show colored diff (default: auto)" - ) - parser.add_argument( - "-e", - "--exclude", - metavar="PATTERN", - action="append", - default=[], - help="exclude paths matching the given glob-like pattern(s) from recursive search", - ) - - args = parser.parse_args() - - # use default signal handling, like diff return SIGINT value on ^C - # https://bugs.python.org/issue14229#msg156446 - signal.signal(signal.SIGINT, signal.SIG_DFL) - try: - signal.SIGPIPE - except AttributeError: - # compatibility, SIGPIPE does not exist on Windows - pass - else: - signal.signal(signal.SIGPIPE, signal.SIG_DFL) - - colored_stdout = False - colored_stderr = False - if args.color == "always": - colored_stdout = True - colored_stderr = True - elif args.color == "auto": - colored_stdout = sys.stdout.isatty() - colored_stderr = sys.stderr.isatty() - - version_invocation = [args.clang_format_executable, "--version"] - try: - subprocess.check_call(version_invocation, stdout=DEVNULL) - except subprocess.CalledProcessError as e: - print_trouble(parser.prog, str(e), use_colors=colored_stderr) - return ExitStatus.TROUBLE - except OSError as e: - print_trouble( - parser.prog, - f"Command '{subprocess.list2cmdline(version_invocation)}' failed to start: {e}", - use_colors=colored_stderr, - ) - return ExitStatus.TROUBLE - - retcode = ExitStatus.SUCCESS - files = list_files( - args.files, recursive=args.recursive, exclude=args.exclude, extensions=args.extensions.split(",") - ) - - if not files: - return - - njobs = args.j - if njobs == 0: - njobs = multiprocessing.cpu_count() + 1 - njobs = min(len(files), njobs) - - if njobs == 1: - # execute directly instead of in a pool, - # less overhead, simpler stacktraces - it = (run_clang_format_diff_wrapper(args, file) for file in files) - pool = None - else: - pool = multiprocessing.Pool(njobs) - it = pool.imap_unordered(partial(run_clang_format_diff_wrapper, args), files) - while True: - try: - outs, errs = next(it) - except StopIteration: - break - except DiffError as e: - print_trouble(parser.prog, str(e), use_colors=colored_stderr) - retcode = ExitStatus.TROUBLE - sys.stderr.writelines(e.errs) - except UnexpectedError as e: - print_trouble(parser.prog, str(e), use_colors=colored_stderr) - sys.stderr.write(e.formatted_traceback) - retcode = ExitStatus.TROUBLE - # stop at the first unexpected error, - # something could be very wrong, - # don't process all files unnecessarily - if pool: - pool.terminate() - break - else: - sys.stderr.writelines(errs) - if outs == []: - continue - if not args.quiet: - print_diff(outs, use_color=colored_stdout) - if retcode == ExitStatus.SUCCESS: - retcode = ExitStatus.DIFF - return retcode - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/.circleci/unittest/linux/scripts/run_test.sh b/.circleci/unittest/linux/scripts/run_test.sh deleted file mode 100755 index 5348baa71dd..00000000000 --- a/.circleci/unittest/linux/scripts/run_test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash - -set -e - -eval "$(./conda/bin/conda shell.bash hook)" -conda activate ./env - -python -m torch.utils.collect_env - -case "$(uname -s)" in - Darwin*) - # The largest macOS runner is not able to handle the regular test suite plus the transforms v2 tests at the same - # time due to insufficient resources. Thus, we ignore the transforms v2 tests at first and run them in a separate - # step afterwards. - GLOB='test/test_transforms_v2*' - pytest --junitxml=test-results/junit.xml -v --durations 20 --ignore-glob="${GLOB}" - eval "pytest --junitxml=test-results/junit-transforms-v2.xml -v --durations 20 ${GLOB}" - ;; - *) - pytest --junitxml=test-results/junit.xml -v --durations 20 - ;; -esac diff --git a/.circleci/unittest/linux/scripts/setup_env.sh b/.circleci/unittest/linux/scripts/setup_env.sh deleted file mode 100755 index 8a8a78f1fb2..00000000000 --- a/.circleci/unittest/linux/scripts/setup_env.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env bash - -# This script is for setting up environment in which unit test is ran. -# To speed up the CI time, the resulting environment is cached. -# -# Do not install PyTorch and torchvision here, otherwise they also get cached. - -set -ex - -this_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -# Avoid error: "fatal: unsafe repository" -git config --global --add safe.directory '*' -root_dir="$(git rev-parse --show-toplevel)" -conda_dir="${root_dir}/conda" -env_dir="${root_dir}/env" - -cd "${root_dir}" - -case "$(uname -s)" in - Darwin*) os=MacOSX;; - *) os=Linux -esac - -# 1. Install conda at ./conda -if [ ! -d "${conda_dir}" ]; then - printf "* Installing conda\n" - wget -O miniconda.sh "http://repo.continuum.io/miniconda/Miniconda3-latest-${os}-x86_64.sh" - bash ./miniconda.sh -b -f -p "${conda_dir}" -fi -eval "$(${conda_dir}/bin/conda shell.bash hook)" - -# 2. Create test environment at ./env -if [ ! -d "${env_dir}" ]; then - printf "* Creating a test environment\n" - conda create --prefix "${env_dir}" -y python="$PYTHON_VERSION" -fi -conda activate "${env_dir}" - -# 3. Install Conda dependencies -printf "* Installing dependencies (except PyTorch)\n" -FFMPEG_PIN="=4.2" -if [[ "${PYTHON_VERSION}" == "3.9" ]]; then - FFMPEG_PIN=">=4.2" -fi - -conda install -y -c pytorch "ffmpeg${FFMPEG_PIN}" -conda env update --file "${this_dir}/environment.yml" --prune diff --git a/.circleci/unittest/windows/scripts/environment.yml b/.circleci/unittest/windows/scripts/environment.yml deleted file mode 100644 index d229aafb41a..00000000000 --- a/.circleci/unittest/windows/scripts/environment.yml +++ /dev/null @@ -1,19 +0,0 @@ -channels: - - pytorch - - defaults -dependencies: - - pytest - - pytest-cov - - pytest-mock - - pip - - libpng - - jpeg - - ca-certificates - - hdf5 - - setuptools - - pip: - - future - - scipy - - av !=9.1.1, <10 - - dataclasses - - h5py diff --git a/.circleci/unittest/windows/scripts/install.sh b/.circleci/unittest/windows/scripts/install.sh deleted file mode 100644 index 7c55c8144d9..00000000000 --- a/.circleci/unittest/windows/scripts/install.sh +++ /dev/null @@ -1,53 +0,0 @@ - -#!/usr/bin/env bash - -unset PYTORCH_VERSION -# For unittest, nightly PyTorch is used as the following section, -# so no need to set PYTORCH_VERSION. -# In fact, keeping PYTORCH_VERSION forces us to hardcode PyTorch version in config. - -set -ex - -this_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -eval "$(./conda/Scripts/conda.exe 'shell.bash' 'hook')" -conda activate ./env - -# TODO, refactor the below logic to make it easy to understand how to get correct cuda_version. -if [ "${CU_VERSION:-}" == cpu ] ; then - cudatoolkit="cpuonly" - version="cpu" -else - if [[ ${#CU_VERSION} -eq 4 ]]; then - CUDA_VERSION="${CU_VERSION:2:1}.${CU_VERSION:3:1}" - elif [[ ${#CU_VERSION} -eq 5 ]]; then - CUDA_VERSION="${CU_VERSION:2:2}.${CU_VERSION:4:1}" - fi - - cuda_toolkit_pckg="cudatoolkit" - if [[ $CUDA_VERSION == 11.6 || $CUDA_VERSION == 11.7 || $CUDA_VERSION == 11.8 || $CUDA_VERSION == 12.1 ]]; then - cuda_toolkit_pckg="pytorch-cuda" - fi - - echo "Using CUDA $CUDA_VERSION as determined by CU_VERSION" - version="$(python -c "print('.'.join(\"${CUDA_VERSION}\".split('.')[:2]))")" - cudatoolkit="${cuda_toolkit_pckg}=${version}" -fi - -printf "Installing PyTorch with %s\n" "${cudatoolkit}" -conda install -y -c "pytorch-${UPLOAD_CHANNEL}" -c nvidia "pytorch-${UPLOAD_CHANNEL}"::pytorch[build="*${version}*"] "${cudatoolkit}" - -torch_cuda=$(python -c "import torch; print(torch.cuda.is_available())") -echo torch.cuda.is_available is $torch_cuda - -if [ ! -z "${CUDA_VERSION:-}" ] ; then - if [ "$torch_cuda" == "False" ]; then - echo "torch with cuda installed but torch.cuda.is_available() is False" - exit 1 - fi -fi - -source "$this_dir/set_cuda_envs.sh" - -printf "* Installing torchvision\n" -"$this_dir/vc_env_helper.bat" python setup.py develop diff --git a/.circleci/unittest/windows/scripts/install_conda.bat b/.circleci/unittest/windows/scripts/install_conda.bat deleted file mode 100644 index 6052ad08b10..00000000000 --- a/.circleci/unittest/windows/scripts/install_conda.bat +++ /dev/null @@ -1 +0,0 @@ -start /wait "" "%miniconda_exe%" /S /InstallationType=JustMe /RegisterPython=0 /AddToPath=0 /D=%tmp_conda% diff --git a/.circleci/unittest/windows/scripts/post_process.sh b/.circleci/unittest/windows/scripts/post_process.sh deleted file mode 100644 index 5c5cbb758a9..00000000000 --- a/.circleci/unittest/windows/scripts/post_process.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash - -set -e - -eval "$(./conda/Scripts/conda.exe 'shell.bash' 'hook')" -conda activate ./env diff --git a/.circleci/unittest/windows/scripts/run_test.sh b/.circleci/unittest/windows/scripts/run_test.sh deleted file mode 100644 index 802ad37f511..00000000000 --- a/.circleci/unittest/windows/scripts/run_test.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash - -set -e - -eval "$(./conda/Scripts/conda.exe 'shell.bash' 'hook')" -conda activate ./env - -this_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -source "$this_dir/set_cuda_envs.sh" - -python -m torch.utils.collect_env -pytest --junitxml=test-results/junit.xml -v --durations 20 diff --git a/.circleci/unittest/windows/scripts/set_cuda_envs.sh b/.circleci/unittest/windows/scripts/set_cuda_envs.sh deleted file mode 100644 index 7db3137b594..00000000000 --- a/.circleci/unittest/windows/scripts/set_cuda_envs.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env bash -set -ex - -echo CU_VERSION is "${CU_VERSION}" -echo CUDA_VERSION is "${CUDA_VERSION}" - -# Currenly, CU_VERSION and CUDA_VERSION are not consistent. -# to understand this code, see https://github.com/pytorch/vision/issues/4443 -version="cpu" -if [[ ! -z "${CUDA_VERSION}" ]] ; then - version="$CUDA_VERSION" -else - if [[ ${#CU_VERSION} -eq 5 ]]; then - version="${CU_VERSION:2:2}.${CU_VERSION:4:1}" - fi -fi - -# Don't use if [[ "$version" == "cpu" ]]; then exit 0 fi. -# It would exit the shell. One result is cpu tests would not run if the shell exit. -# Unless there's an error, Don't exit. -if [[ "$version" != "cpu" ]]; then - # set cuda envs - export PATH="/c/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v${version}/bin:/c/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v${version}/libnvvp:$PATH" - export CUDA_PATH_V${version/./_}="C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v${version}" - export CUDA_PATH="C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v${version}" - - if [ ! -d "$CUDA_PATH" ]; then - echo "$CUDA_PATH" does not exist - exit 1 - fi - - if [ ! -f "${CUDA_PATH}\include\nvjpeg.h" ]; then - echo "nvjpeg does not exist" - exit 1 - fi - - # check cuda driver version - for path in '/c/Program Files/NVIDIA Corporation/NVSMI/nvidia-smi.exe' /c/Windows/System32/nvidia-smi.exe; do - if [[ -x "$path" ]]; then - "$path" || echo "true"; - break - fi - done - - which nvcc - nvcc --version - env | grep CUDA -fi diff --git a/.circleci/unittest/windows/scripts/setup_env.sh b/.circleci/unittest/windows/scripts/setup_env.sh deleted file mode 100644 index 84697875979..00000000000 --- a/.circleci/unittest/windows/scripts/setup_env.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env bash - -# This script is for setting up environment in which unit test is ran. -# To speed up the CI time, the resulting environment is cached. -# -# Do not install PyTorch and torchvision here, otherwise they also get cached. - -set -ex - -this_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -root_dir="$(git rev-parse --show-toplevel)" -conda_dir="${root_dir}/conda" -env_dir="${root_dir}/env" - -cd "${root_dir}" - -# 1. Install conda at ./conda -if [ ! -d "${conda_dir}" ]; then - printf "* Installing conda\n" - export tmp_conda="$(echo $conda_dir | tr '/' '\\')" - export miniconda_exe="$(echo $root_dir | tr '/' '\\')\\miniconda.exe" - curl --output miniconda.exe https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe -O - "$this_dir/install_conda.bat" - unset tmp_conda - unset miniconda_exe -fi - -eval "$(${conda_dir}/Scripts/conda.exe 'shell.bash' 'hook')" - -# 2. Create test environment at ./env -if [ ! -d "${env_dir}" ]; then - printf "* Creating a test environment\n" - conda create --prefix "${env_dir}" -y python="$PYTHON_VERSION" -fi -conda activate "${env_dir}" - -# 3. Install Conda dependencies -printf "* Installing dependencies (except PyTorch)\n" -conda env update --file "${this_dir}/environment.yml" --prune - -# 4. Downgrade setuptools on Python 3.7. -# See https://github.com/pytorch/vision/pull/5868 -if [[ "${PYTHON_VERSION}" == '3.7' ]]; then - pip install --upgrade setuptools==58.0.4 -fi diff --git a/.circleci/unittest/windows/scripts/vc_env_helper.bat b/.circleci/unittest/windows/scripts/vc_env_helper.bat deleted file mode 100644 index 9410135677a..00000000000 --- a/.circleci/unittest/windows/scripts/vc_env_helper.bat +++ /dev/null @@ -1,39 +0,0 @@ -@echo on - -set VC_VERSION_LOWER=16 -set VC_VERSION_UPPER=17 - -for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [%VC_VERSION_LOWER%^,%VC_VERSION_UPPER%^) -property installationPath`) do ( - if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( - set "VS15INSTALLDIR=%%i" - set "VS15VCVARSALL=%%i\VC\Auxiliary\Build\vcvarsall.bat" - goto vswhere - ) -) - -:vswhere -if "%VSDEVCMD_ARGS%" == "" ( - call "%VS15VCVARSALL%" x64 || exit /b 1 -) else ( - call "%VS15VCVARSALL%" x64 %VSDEVCMD_ARGS% || exit /b 1 -) - -@echo on - -set DISTUTILS_USE_SDK=1 - -set args=%1 -shift -:start -if [%1] == [] goto done -set args=%args% %1 -shift -goto start - -:done -if "%args%" == "" ( - echo Usage: vc_env_helper.bat [command] [args] - echo e.g. vc_env_helper.bat cl /c test.cpp -) - -%args% || exit /b 1 From 77e28955370907410473a05c6ad531cca3297af3 Mon Sep 17 00:00:00 2001 From: Philip Meier Date: Sat, 20 May 2023 23:28:11 +0200 Subject: [PATCH 2/5] purge most CircleCI mentions --- .gitattributes | 3 - .github/scripts/run-clang-format.py | 331 ++++++++++++++++++++++++++++ .github/workflows/lint.yml | 2 +- 3 files changed, 332 insertions(+), 4 deletions(-) create mode 100755 .github/scripts/run-clang-format.py diff --git a/.gitattributes b/.gitattributes index f9d672d7fb5..22d0452f8d7 100644 --- a/.gitattributes +++ b/.gitattributes @@ -6,6 +6,3 @@ # To ignore it use below *.ipynb linguist-documentation - -# To exclude autogenerated files from code reviews -.circleci/config.yml linguist-generated=true diff --git a/.github/scripts/run-clang-format.py b/.github/scripts/run-clang-format.py new file mode 100755 index 00000000000..5c61b2519e0 --- /dev/null +++ b/.github/scripts/run-clang-format.py @@ -0,0 +1,331 @@ +#!/usr/bin/env python +""" +MIT License + +Copyright (c) 2017 Guillaume Papin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +A wrapper script around clang-format, suitable for linting multiple files +and to use for continuous integration. + +This is an alternative API for the clang-format command line. +It runs over multiple files and directories in parallel. +A diff output is produced and a sensible exit code is returned. + +""" + +import argparse +import difflib +import fnmatch +import multiprocessing +import os +import signal +import subprocess +import sys +import traceback +from functools import partial + +try: + from subprocess import DEVNULL # py3k +except ImportError: + DEVNULL = open(os.devnull, "wb") + + +DEFAULT_EXTENSIONS = "c,h,C,H,cpp,hpp,cc,hh,c++,h++,cxx,hxx,cu" + + +class ExitStatus: + SUCCESS = 0 + DIFF = 1 + TROUBLE = 2 + + +def list_files(files, recursive=False, extensions=None, exclude=None): + if extensions is None: + extensions = [] + if exclude is None: + exclude = [] + + out = [] + for file in files: + if recursive and os.path.isdir(file): + for dirpath, dnames, fnames in os.walk(file): + fpaths = [os.path.join(dirpath, fname) for fname in fnames] + for pattern in exclude: + # os.walk() supports trimming down the dnames list + # by modifying it in-place, + # to avoid unnecessary directory listings. + dnames[:] = [x for x in dnames if not fnmatch.fnmatch(os.path.join(dirpath, x), pattern)] + fpaths = [x for x in fpaths if not fnmatch.fnmatch(x, pattern)] + for f in fpaths: + ext = os.path.splitext(f)[1][1:] + if ext in extensions: + out.append(f) + else: + out.append(file) + return out + + +def make_diff(file, original, reformatted): + return list( + difflib.unified_diff( + original, reformatted, fromfile=f"{file}\t(original)", tofile=f"{file}\t(reformatted)", n=3 + ) + ) + + +class DiffError(Exception): + def __init__(self, message, errs=None): + super().__init__(message) + self.errs = errs or [] + + +class UnexpectedError(Exception): + def __init__(self, message, exc=None): + super().__init__(message) + self.formatted_traceback = traceback.format_exc() + self.exc = exc + + +def run_clang_format_diff_wrapper(args, file): + try: + ret = run_clang_format_diff(args, file) + return ret + except DiffError: + raise + except Exception as e: + raise UnexpectedError(f"{file}: {e.__class__.__name__}: {e}", e) + + +def run_clang_format_diff(args, file): + try: + with open(file, encoding="utf-8") as f: + original = f.readlines() + except OSError as exc: + raise DiffError(str(exc)) + invocation = [args.clang_format_executable, file] + + # Use of utf-8 to decode the process output. + # + # Hopefully, this is the correct thing to do. + # + # It's done due to the following assumptions (which may be incorrect): + # - clang-format will returns the bytes read from the files as-is, + # without conversion, and it is already assumed that the files use utf-8. + # - if the diagnostics were internationalized, they would use utf-8: + # > Adding Translations to Clang + # > + # > Not possible yet! + # > Diagnostic strings should be written in UTF-8, + # > the client can translate to the relevant code page if needed. + # > Each translation completely replaces the format string + # > for the diagnostic. + # > -- http://clang.llvm.org/docs/InternalsManual.html#internals-diag-translation + + try: + proc = subprocess.Popen( + invocation, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, encoding="utf-8" + ) + except OSError as exc: + raise DiffError(f"Command '{subprocess.list2cmdline(invocation)}' failed to start: {exc}") + proc_stdout = proc.stdout + proc_stderr = proc.stderr + + # hopefully the stderr pipe won't get full and block the process + outs = list(proc_stdout.readlines()) + errs = list(proc_stderr.readlines()) + proc.wait() + if proc.returncode: + raise DiffError( + "Command '{}' returned non-zero exit status {}".format( + subprocess.list2cmdline(invocation), proc.returncode + ), + errs, + ) + return make_diff(file, original, outs), errs + + +def bold_red(s): + return "\x1b[1m\x1b[31m" + s + "\x1b[0m" + + +def colorize(diff_lines): + def bold(s): + return "\x1b[1m" + s + "\x1b[0m" + + def cyan(s): + return "\x1b[36m" + s + "\x1b[0m" + + def green(s): + return "\x1b[32m" + s + "\x1b[0m" + + def red(s): + return "\x1b[31m" + s + "\x1b[0m" + + for line in diff_lines: + if line[:4] in ["--- ", "+++ "]: + yield bold(line) + elif line.startswith("@@ "): + yield cyan(line) + elif line.startswith("+"): + yield green(line) + elif line.startswith("-"): + yield red(line) + else: + yield line + + +def print_diff(diff_lines, use_color): + if use_color: + diff_lines = colorize(diff_lines) + sys.stdout.writelines(diff_lines) + + +def print_trouble(prog, message, use_colors): + error_text = "error:" + if use_colors: + error_text = bold_red(error_text) + print(f"{prog}: {error_text} {message}", file=sys.stderr) + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--clang-format-executable", + metavar="EXECUTABLE", + help="path to the clang-format executable", + default="clang-format", + ) + parser.add_argument( + "--extensions", + help=f"comma separated list of file extensions (default: {DEFAULT_EXTENSIONS})", + default=DEFAULT_EXTENSIONS, + ) + parser.add_argument("-r", "--recursive", action="store_true", help="run recursively over directories") + parser.add_argument("files", metavar="file", nargs="+") + parser.add_argument("-q", "--quiet", action="store_true") + parser.add_argument( + "-j", + metavar="N", + type=int, + default=0, + help="run N clang-format jobs in parallel (default number of cpus + 1)", + ) + parser.add_argument( + "--color", default="auto", choices=["auto", "always", "never"], help="show colored diff (default: auto)" + ) + parser.add_argument( + "-e", + "--exclude", + metavar="PATTERN", + action="append", + default=[], + help="exclude paths matching the given glob-like pattern(s) from recursive search", + ) + + args = parser.parse_args() + + # use default signal handling, like diff return SIGINT value on ^C + # https://bugs.python.org/issue14229#msg156446 + signal.signal(signal.SIGINT, signal.SIG_DFL) + try: + signal.SIGPIPE + except AttributeError: + # compatibility, SIGPIPE does not exist on Windows + pass + else: + signal.signal(signal.SIGPIPE, signal.SIG_DFL) + + colored_stdout = False + colored_stderr = False + if args.color == "always": + colored_stdout = True + colored_stderr = True + elif args.color == "auto": + colored_stdout = sys.stdout.isatty() + colored_stderr = sys.stderr.isatty() + + version_invocation = [args.clang_format_executable, "--version"] + try: + subprocess.check_call(version_invocation, stdout=DEVNULL) + except subprocess.CalledProcessError as e: + print_trouble(parser.prog, str(e), use_colors=colored_stderr) + return ExitStatus.TROUBLE + except OSError as e: + print_trouble( + parser.prog, + f"Command '{subprocess.list2cmdline(version_invocation)}' failed to start: {e}", + use_colors=colored_stderr, + ) + return ExitStatus.TROUBLE + + retcode = ExitStatus.SUCCESS + files = list_files( + args.files, recursive=args.recursive, exclude=args.exclude, extensions=args.extensions.split(",") + ) + + if not files: + return + + njobs = args.j + if njobs == 0: + njobs = multiprocessing.cpu_count() + 1 + njobs = min(len(files), njobs) + + if njobs == 1: + # execute directly instead of in a pool, + # less overhead, simpler stacktraces + it = (run_clang_format_diff_wrapper(args, file) for file in files) + pool = None + else: + pool = multiprocessing.Pool(njobs) + it = pool.imap_unordered(partial(run_clang_format_diff_wrapper, args), files) + while True: + try: + outs, errs = next(it) + except StopIteration: + break + except DiffError as e: + print_trouble(parser.prog, str(e), use_colors=colored_stderr) + retcode = ExitStatus.TROUBLE + sys.stderr.writelines(e.errs) + except UnexpectedError as e: + print_trouble(parser.prog, str(e), use_colors=colored_stderr) + sys.stderr.write(e.formatted_traceback) + retcode = ExitStatus.TROUBLE + # stop at the first unexpected error, + # something could be very wrong, + # don't process all files unnecessarily + if pool: + pool.terminate() + break + else: + sys.stderr.writelines(errs) + if outs == []: + continue + if not args.quiet: + print_diff(outs, use_color=colored_stdout) + if retcode == ExitStatus.SUCCESS: + retcode = ExitStatus.DIFF + return retcode + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index ec8d285c9f3..8203bb61e4f 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -61,7 +61,7 @@ jobs: echo '::group::Lint C source' set +e - ./.circleci/unittest/linux/scripts/run-clang-format.py -r torchvision/csrc --clang-format-executable ./clang-format + ./.github/scripts/run-clang-format.py -r torchvision/csrc --clang-format-executable ./clang-format if [ $? -ne 0 ]; then git --no-pager diff From dcba18896f81955876717252924bedb638c9758e Mon Sep 17 00:00:00 2001 From: Philip Meier Date: Sat, 20 May 2023 23:33:55 +0200 Subject: [PATCH 3/5] cleanup packaging dir --- packaging/README.md | 6 - packaging/build_cmake.sh | 129 ------------------ packaging/build_conda.sh | 16 --- packaging/build_wheel.sh | 60 -------- packaging/vs2017/activate.bat | 44 ------ packaging/vs2017/conda_build_config.yaml | 23 ---- packaging/vs2017/install_activate.bat | 29 ---- packaging/vs2017/install_runtime.bat | 49 ------- packaging/vs2017/meta.yaml | 24 ---- packaging/vs2019/activate.bat | 44 ------ packaging/vs2019/conda_build_config.yaml | 23 ---- packaging/vs2019/install_activate.bat | 29 ---- packaging/vs2019/install_runtime.bat | 49 ------- packaging/vs2019/meta.yaml | 24 ---- packaging/windows/internal/driver_update.bat | 25 ---- .../windows/internal/vc_install_helper.sh | 6 - packaging/windows/internal/vs2017_install.ps1 | 25 ---- packaging/windows/internal/vs2019_install.ps1 | 21 --- 18 files changed, 626 deletions(-) delete mode 100644 packaging/README.md delete mode 100755 packaging/build_cmake.sh delete mode 100755 packaging/build_conda.sh delete mode 100755 packaging/build_wheel.sh delete mode 100644 packaging/vs2017/activate.bat delete mode 100644 packaging/vs2017/conda_build_config.yaml delete mode 100644 packaging/vs2017/install_activate.bat delete mode 100644 packaging/vs2017/install_runtime.bat delete mode 100644 packaging/vs2017/meta.yaml delete mode 100644 packaging/vs2019/activate.bat delete mode 100644 packaging/vs2019/conda_build_config.yaml delete mode 100644 packaging/vs2019/install_activate.bat delete mode 100644 packaging/vs2019/install_runtime.bat delete mode 100644 packaging/vs2019/meta.yaml delete mode 100644 packaging/windows/internal/driver_update.bat delete mode 100644 packaging/windows/internal/vc_install_helper.sh delete mode 100644 packaging/windows/internal/vs2017_install.ps1 delete mode 100644 packaging/windows/internal/vs2019_install.ps1 diff --git a/packaging/README.md b/packaging/README.md deleted file mode 100644 index 3ceac53030e..00000000000 --- a/packaging/README.md +++ /dev/null @@ -1,6 +0,0 @@ -# Building torchvision packages for release - -TorchVision release packages are built by using `build_wheel.sh` and `build_conda.sh` for all permutations of -supported operating systems, compute platforms and python versions. - -OS/Python/Compute matrix is defined in https://github.com/pytorch/vision/blob/main/.circleci/regenerate.py diff --git a/packaging/build_cmake.sh b/packaging/build_cmake.sh deleted file mode 100755 index 99d98c67f1a..00000000000 --- a/packaging/build_cmake.sh +++ /dev/null @@ -1,129 +0,0 @@ -#!/bin/bash -set -ex - -PARALLELISM=8 -if [ -n "$MAX_JOBS" ]; then - PARALLELISM=$MAX_JOBS -fi - -if [[ "$(uname)" != Darwin && "$OSTYPE" != "msys" ]]; then - eval "$(./conda/bin/conda shell.bash hook)" - conda activate ./env -fi - -script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -. "$script_dir/pkg_helpers.bash" - -export BUILD_TYPE=conda -setup_env -export SOURCE_ROOT_DIR="$PWD" -setup_conda_pytorch_constraint -setup_conda_cudatoolkit_plain_constraint - -if [[ "$OSTYPE" == "msys" ]]; then - conda install -yq conda-build cmake future - pip install dataclasses -fi - -setup_visual_studio_constraint -setup_junit_results_folder - -if [[ "$(uname)" == Darwin ]]; then - # TODO: this can be removed as soon as mkl's CMake support works with clang - # see https://github.com/pytorch/vision/pull/4203 for details - MKL_CONSTRAINT='mkl==2021.2.0' -else - MKL_CONSTRAINT='' -fi - -if [[ $CONDA_BUILD_VARIANT == "cpu" ]]; then - PYTORCH_MUTEX_CONSTRAINT='pytorch-mutex=1.0=cpu' -else - PYTORCH_MUTEX_CONSTRAINT='' -fi - -conda install -yq \pytorch=$PYTORCH_VERSION $CONDA_CUDATOOLKIT_CONSTRAINT $PYTORCH_MUTEX_CONSTRAINT $MKL_CONSTRAINT numpy -c nvidia -c "pytorch-${UPLOAD_CHANNEL}" -TORCH_PATH=$(dirname $(python -c "import torch; print(torch.__file__)")) - -if [[ "$(uname)" == Darwin || "$OSTYPE" == "msys" ]]; then - conda install -yq libpng jpeg -else - yum install -y libpng-devel libjpeg-turbo-devel -fi - -if [[ "$OSTYPE" == "msys" ]]; then - source .circleci/unittest/windows/scripts/set_cuda_envs.sh -fi - -mkdir cpp_build -pushd cpp_build - -# Generate libtorchvision files -cmake .. -DTorch_DIR=$TORCH_PATH/share/cmake/Torch -DWITH_CUDA=$CMAKE_USE_CUDA - -# Compile and install libtorchvision -if [[ "$OSTYPE" == "msys" ]]; then - "$script_dir/windows/internal/vc_env_helper.bat" "$script_dir/windows/internal/build_cmake.bat" $PARALLELISM - CONDA_PATH=$(dirname $(which python)) - cp -r "C:/Program Files (x86)/torchvision/include/torchvision" $CONDA_PATH/include -else - make -j$PARALLELISM - make install - - if [[ "$(uname)" == Darwin ]]; then - CONDA_PATH=$(dirname $(dirname $(which python))) - cp -r /usr/local/include/torchvision $CONDA_PATH/include/ - export C_INCLUDE_PATH=/usr/local/include - export CPLUS_INCLUDE_PATH=/usr/local/include - fi -fi - -popd - -# Install torchvision locally -python setup.py develop - -# Trace, compile and run project that uses Faster-RCNN -pushd test/tracing/frcnn -mkdir build - -# Trace model -python trace_model.py -cp fasterrcnn_resnet50_fpn.pt build - -cd build -cmake .. -DTorch_DIR=$TORCH_PATH/share/cmake/Torch -DWITH_CUDA=$CMAKE_USE_CUDA -if [[ "$OSTYPE" == "msys" ]]; then - "$script_dir/windows/internal/vc_env_helper.bat" "$script_dir/windows/internal/build_frcnn.bat" $PARALLELISM - mv fasterrcnn_resnet50_fpn.pt Release - cd Release - export PATH=$(cygpath -w "C:/Program Files/NVIDIA Corporation/NvToolsExt/bin/x64"):$(cygpath -w "C:/Program Files (x86)/torchvision/bin"):$(cygpath -w $TORCH_PATH)/lib:$PATH -else - make -j$PARALLELISM -fi - -# Run traced program -./test_frcnn_tracing - -# Compile and run the CPP example -popd -cd examples/cpp/hello_world -mkdir build - -# Trace model -python trace_model.py -cp resnet18.pt build - -cd build -cmake .. -DTorch_DIR=$TORCH_PATH/share/cmake/Torch - -if [[ "$OSTYPE" == "msys" ]]; then - "$script_dir/windows/internal/vc_env_helper.bat" "$script_dir/windows/internal/build_cpp_example.bat" $PARALLELISM - mv resnet18.pt Release - cd Release -else - make -j$PARALLELISM -fi - -# Run CPP example -./hello-world diff --git a/packaging/build_conda.sh b/packaging/build_conda.sh deleted file mode 100755 index ec171f82729..00000000000 --- a/packaging/build_conda.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -set -ex - -script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -. "$script_dir/pkg_helpers.bash" - -export BUILD_TYPE=conda -setup_env -export SOURCE_ROOT_DIR="$PWD" -setup_conda_pytorch_constraint -setup_conda_cudatoolkit_constraint -setup_visual_studio_constraint -setup_junit_results_folder -export CUDATOOLKIT_CHANNEL="nvidia" - -conda build -c $CUDATOOLKIT_CHANNEL $CONDA_CHANNEL_FLAGS --no-anaconda-upload --no-test --python "$PYTHON_VERSION" packaging/torchvision diff --git a/packaging/build_wheel.sh b/packaging/build_wheel.sh deleted file mode 100755 index 3299d16ec92..00000000000 --- a/packaging/build_wheel.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash -set -ex - -script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -. "$script_dir/pkg_helpers.bash" - -export BUILD_TYPE=wheel -setup_env -setup_wheel_python -pip_install numpy pyyaml future ninja -pip_install --upgrade setuptools -setup_pip_pytorch_version -python setup.py clean - -# Copy binaries to be included in the wheel distribution -if [[ "$(uname)" == Darwin || "$OSTYPE" == "msys" ]]; then - python_exec="$(which python)" - bin_path=$(dirname $python_exec) - env_path=$(dirname $bin_path) - if [[ "$(uname)" == Darwin ]]; then - # Install delocate to relocate the required binaries - pip_install "delocate>=0.9" - else - cp "$bin_path/Library/bin/libpng16.dll" torchvision - cp "$bin_path/Library/bin/libjpeg.dll" torchvision - fi -else - # Install auditwheel to get some inspection utilities - pip_install auditwheel - - # Point to custom libraries - export LD_LIBRARY_PATH=$(pwd)/ext_libraries/lib:$LD_LIBRARY_PATH - export TORCHVISION_INCLUDE=$(pwd)/ext_libraries/include - export TORCHVISION_LIBRARY=$(pwd)/ext_libraries/lib -fi - -download_copy_ffmpeg - -if [[ "$OSTYPE" == "msys" ]]; then - IS_WHEEL=1 "$script_dir/windows/internal/vc_env_helper.bat" python setup.py bdist_wheel -else - IS_WHEEL=1 python setup.py bdist_wheel -fi - - -if [[ "$(uname)" == Darwin ]]; then - pushd dist/ - python_exec="$(which python)" - bin_path=$(dirname $python_exec) - env_path=$(dirname $bin_path) - for whl in *.whl; do - DYLD_FALLBACK_LIBRARY_PATH="$env_path/lib/:$DYLD_FALLBACK_LIBRARY_PATH" delocate-wheel -v --ignore-missing-dependencies $whl - done -else - if [[ "$OSTYPE" == "msys" ]]; then - "$script_dir/windows/internal/vc_env_helper.bat" python $script_dir/wheel/relocate.py - else - LD_LIBRARY_PATH="/usr/local/lib:$CUDA_HOME/lib64:$LD_LIBRARY_PATH" python $script_dir/wheel/relocate.py - fi -fi diff --git a/packaging/vs2017/activate.bat b/packaging/vs2017/activate.bat deleted file mode 100644 index ccecfc25442..00000000000 --- a/packaging/vs2017/activate.bat +++ /dev/null @@ -1,44 +0,0 @@ -:: Set env vars that tell distutils to use the compiler that we put on path -SET DISTUTILS_USE_SDK=1 -SET MSSdk=1 - -SET "VS_VERSION=15.0" -SET "VS_MAJOR=15" -SET "VS_YEAR=2017" - -set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out" -set "MSYS2_ENV_CONV_EXCL=CL" - -:: For Python 3.5+, ensure that we link with the dynamic runtime. See -:: http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info -set "PY_VCRUNTIME_REDIST=%PREFIX%\\bin\\vcruntime140.dll" - -for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [15^,16^) -property installationPath`) do ( - if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( - set "VSINSTALLDIR=%%i\" - goto :vswhere - ) -) - -:vswhere - -:: Shorten PATH to avoid the `input line too long` error. -SET MyPath=%PATH% - -setlocal EnableDelayedExpansion - -SET TempPath="%MyPath:;=";"%" -SET var= -FOR %%a IN (%TempPath%) DO ( - IF EXIST %%~sa ( - SET "var=!var!;%%~sa" - ) -) - -set "TempPath=!var:~1!" -endlocal & set "PATH=%TempPath%" - -:: Shorten current directory too -FOR %%A IN (.) DO CD "%%~sA" - -:: other things added by install_activate.bat at package build time diff --git a/packaging/vs2017/conda_build_config.yaml b/packaging/vs2017/conda_build_config.yaml deleted file mode 100644 index 781814fd00e..00000000000 --- a/packaging/vs2017/conda_build_config.yaml +++ /dev/null @@ -1,23 +0,0 @@ -blas_impl: - - mkl # [x86_64] -c_compiler: - - vs2017 # [win] -cxx_compiler: - - vs2017 # [win] -python: - - 3.8 -# This differs from target_platform in that it determines what subdir the compiler -# will target, not what subdir the compiler package will be itself. -# For example, we need a win-64 vs2008_win-32 package, so that we compile win-32 -# code on win-64 miniconda. -cross_compiler_target_platform: - - win-64 # [win] -target_platform: - - win-64 # [win] -vc: - - 14 -zip_keys: - - # [win] - - vc # [win] - - c_compiler # [win] - - cxx_compiler # [win] diff --git a/packaging/vs2017/install_activate.bat b/packaging/vs2017/install_activate.bat deleted file mode 100644 index 253d2f2c2c1..00000000000 --- a/packaging/vs2017/install_activate.bat +++ /dev/null @@ -1,29 +0,0 @@ -set YEAR=2017 -set VER=15 - -mkdir "%PREFIX%\etc\conda\activate.d" -COPY "%RECIPE_DIR%\activate.bat" "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - -IF "%cross_compiler_target_platform%" == "win-64" ( - set "target_platform=amd64" - echo SET "CMAKE_GENERATOR=Visual Studio %VER% %YEAR% Win64" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - IF "%VSDEVCMD_ARGS%" == "" ( - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x64 >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x86_amd64 >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - ) ELSE ( - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x64 %VSDEVCMD_ARGS% >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x86_amd64 %VSDEVCMD_ARGS% >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - ) - echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - ) else ( - set "target_platform=x86" - echo SET "CMAKE_GENERATOR=Visual Studio %VER% %YEAR%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo CALL "VC\Auxiliary\Build\vcvars32.bat" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo popd - ) diff --git a/packaging/vs2017/install_runtime.bat b/packaging/vs2017/install_runtime.bat deleted file mode 100644 index 5163c16cf24..00000000000 --- a/packaging/vs2017/install_runtime.bat +++ /dev/null @@ -1,49 +0,0 @@ -set VC_PATH=x86 -if "%ARCH%"=="64" ( - set VC_PATH=x64 -) - -set MSC_VER=2017 - -rem :: This should always be present for VC installed with VS. Not sure about VC installed with Visual C++ Build Tools 2015 -rem FOR /F "usebackq tokens=3*" %%A IN (`REG QUERY "HKEY_LOCAL_MACHINE\Software\Microsoft\DevDiv\VC\Servicing\14.0\IDE.x64" /v UpdateVersion`) DO ( -rem set SP=%%A -rem ) - -rem if not "%SP%" == "%PKG_VERSION%" ( -rem echo "Version detected from registry: %SP%" -rem echo "does not match version of package being built (%PKG_VERSION%)" -rem echo "Do you have current updates for VS 2015 installed?" -rem exit 1 -rem ) - - -REM ========== REQUIRES Win 10 SDK be installed, or files otherwise copied to location below! -robocopy "C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\%VC_PATH%" "%LIBRARY_BIN%" *.dll /E -robocopy "C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\%VC_PATH%" "%PREFIX%" *.dll /E -if %ERRORLEVEL% GEQ 8 exit 1 - -REM ========== This one comes from visual studio 2017 -set "VC_VER=141" - -for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [15^,16^) -property installationPath`) do ( - if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( - set "VS15VCVARSALL=%%i\VC\Auxiliary\Build\vcvarsall.bat" - goto :eof - ) -) - -@setlocal -call "%VS15VARSALL%" x64 - -set "REDIST_ROOT=%VCToolsRedistDir%%VC_PATH%" - -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.CRT" "%LIBRARY_BIN%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.CRT" "%PREFIX%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.OpenMP" "%LIBRARY_BIN%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.OpenMP" "%PREFIX%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -@endlocal diff --git a/packaging/vs2017/meta.yaml b/packaging/vs2017/meta.yaml deleted file mode 100644 index 1f569525ee1..00000000000 --- a/packaging/vs2017/meta.yaml +++ /dev/null @@ -1,24 +0,0 @@ -{% set vcver="14.1" %} -{% set vcfeature="14" %} -{% set vsyear="2017" %} -{% set fullver="15.4.27004.2010" %} - -package: - name: vs{{ vsyear }} - version: {{ fullver }} - -build: - skip: True [not win] - script_env: - - VSDEVCMD_ARGS # [win] - -outputs: - - name: vs{{ vsyear }}_{{ cross_compiler_target_platform }} - script: install_activate.bat - track_features: - # VS 2017 is binary-compatible with VS 2015/vc14. Tools are "v141". - strong: - - vc{{ vcfeature }} - about: - summary: Activation and version verification of MSVC {{ vcver }} (VS {{ vsyear }}) compiler - license: BSD 3-clause diff --git a/packaging/vs2019/activate.bat b/packaging/vs2019/activate.bat deleted file mode 100644 index 6f607ba7518..00000000000 --- a/packaging/vs2019/activate.bat +++ /dev/null @@ -1,44 +0,0 @@ -:: Set env vars that tell distutils to use the compiler that we put on path -SET DISTUTILS_USE_SDK=1 -SET MSSdk=1 - -SET "VS_VERSION=16.0" -SET "VS_MAJOR=16" -SET "VS_YEAR=2019" - -set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out" -set "MSYS2_ENV_CONV_EXCL=CL" - -:: For Python 3.5+, ensure that we link with the dynamic runtime. See -:: http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info -set "PY_VCRUNTIME_REDIST=%PREFIX%\\bin\\vcruntime140.dll" - -for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [16^,17^) -property installationPath`) do ( - if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( - set "VSINSTALLDIR=%%i\" - goto :vswhere - ) -) - -:vswhere - -:: Shorten PATH to avoid the `input line too long` error. -SET MyPath=%PATH% - -setlocal EnableDelayedExpansion - -SET TempPath="%MyPath:;=";"%" -SET var= -FOR %%a IN (%TempPath%) DO ( - IF EXIST %%~sa ( - SET "var=!var!;%%~sa" - ) -) - -set "TempPath=!var:~1!" -endlocal & set "PATH=%TempPath%" - -:: Shorten current directory too -FOR %%A IN (.) DO CD "%%~sA" - -:: other things added by install_activate.bat at package build time diff --git a/packaging/vs2019/conda_build_config.yaml b/packaging/vs2019/conda_build_config.yaml deleted file mode 100644 index b4dc99341d0..00000000000 --- a/packaging/vs2019/conda_build_config.yaml +++ /dev/null @@ -1,23 +0,0 @@ -blas_impl: - - mkl # [x86_64] -c_compiler: - - vs2019 # [win] -cxx_compiler: - - vs2019 # [win] -python: - - 3.8 -# This differs from target_platform in that it determines what subdir the compiler -# will target, not what subdir the compiler package will be itself. -# For example, we need a win-64 vs2008_win-32 package, so that we compile win-32 -# code on win-64 miniconda. -cross_compiler_target_platform: - - win-64 # [win] -target_platform: - - win-64 # [win] -vc: - - 14 -zip_keys: - - # [win] - - vc # [win] - - c_compiler # [win] - - cxx_compiler # [win] diff --git a/packaging/vs2019/install_activate.bat b/packaging/vs2019/install_activate.bat deleted file mode 100644 index 9e60ccfd2dc..00000000000 --- a/packaging/vs2019/install_activate.bat +++ /dev/null @@ -1,29 +0,0 @@ -set YEAR=2019 -set VER=16 - -mkdir "%PREFIX%\etc\conda\activate.d" -COPY "%RECIPE_DIR%\activate.bat" "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - -IF "%cross_compiler_target_platform%" == "win-64" ( - set "target_platform=amd64" - echo SET "CMAKE_GENERATOR=Visual Studio %VER% %YEAR% Win64" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - IF "%VSDEVCMD_ARGS%" == "" ( - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x64 >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x86_amd64 >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - ) ELSE ( - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x64 %VSDEVCMD_ARGS% >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x86_amd64 %VSDEVCMD_ARGS% >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - ) - echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - ) else ( - set "target_platform=x86" - echo SET "CMAKE_GENERATOR=Visual Studio %VER% %YEAR%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo CALL "VC\Auxiliary\Build\vcvars32.bat" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo popd - ) diff --git a/packaging/vs2019/install_runtime.bat b/packaging/vs2019/install_runtime.bat deleted file mode 100644 index e09a5ccfb0f..00000000000 --- a/packaging/vs2019/install_runtime.bat +++ /dev/null @@ -1,49 +0,0 @@ -set VC_PATH=x86 -if "%ARCH%"=="64" ( - set VC_PATH=x64 -) - -set MSC_VER=2019 - -rem :: This should always be present for VC installed with VS. Not sure about VC installed with Visual C++ Build Tools 2015 -rem FOR /F "usebackq tokens=3*" %%A IN (`REG QUERY "HKEY_LOCAL_MACHINE\Software\Microsoft\DevDiv\VC\Servicing\14.0\IDE.x64" /v UpdateVersion`) DO ( -rem set SP=%%A -rem ) - -rem if not "%SP%" == "%PKG_VERSION%" ( -rem echo "Version detected from registry: %SP%" -rem echo "does not match version of package being built (%PKG_VERSION%)" -rem echo "Do you have current updates for VS 2015 installed?" -rem exit 1 -rem ) - - -REM ========== REQUIRES Win 10 SDK be installed, or files otherwise copied to location below! -robocopy "C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\%VC_PATH%" "%LIBRARY_BIN%" *.dll /E -robocopy "C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\%VC_PATH%" "%PREFIX%" *.dll /E -if %ERRORLEVEL% GEQ 8 exit 1 - -REM ========== This one comes from visual studio 2019 -set "VC_VER=142" - -for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [16^,17^) -property installationPath`) do ( - if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( - set "VS15VCVARSALL=%%i\VC\Auxiliary\Build\vcvarsall.bat" - goto :eof - ) -) - -@setlocal -call "%VS15VARSALL%" x64 - -set "REDIST_ROOT=%VCToolsRedistDir%%VC_PATH%" - -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.CRT" "%LIBRARY_BIN%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.CRT" "%PREFIX%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.OpenMP" "%LIBRARY_BIN%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.OpenMP" "%PREFIX%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -@endlocal diff --git a/packaging/vs2019/meta.yaml b/packaging/vs2019/meta.yaml deleted file mode 100644 index 94a0ed4db3e..00000000000 --- a/packaging/vs2019/meta.yaml +++ /dev/null @@ -1,24 +0,0 @@ -{% set vcver="14.2" %} -{% set vcfeature="14" %} -{% set vsyear="2019" %} -{% set fullver="15.4.27004.2010" %} - -package: - name: vs{{ vsyear }} - version: {{ fullver }} - -build: - skip: True [not win] - script_env: - - VSDEVCMD_ARGS # [win] - -outputs: - - name: vs{{ vsyear }}_{{ cross_compiler_target_platform }} - script: install_activate.bat - track_features: - # VS 2019 is binary-compatible with VS 2017/vc 14.1 and 2015/vc14. Tools are "v142". - strong: - - vc{{ vcfeature }} - about: - summary: Activation and version verification of MSVC {{ vcver }} (VS {{ vsyear }}) compiler - license: BSD 3-clause diff --git a/packaging/windows/internal/driver_update.bat b/packaging/windows/internal/driver_update.bat deleted file mode 100644 index 00b43affc01..00000000000 --- a/packaging/windows/internal/driver_update.bat +++ /dev/null @@ -1,25 +0,0 @@ -set "DRIVER_DOWNLOAD_LINK=https://ossci-windows.s3.amazonaws.com/461.09-data-center-tesla-desktop-winserver-2019-2016-international.exe" -curl --retry 3 -kL %DRIVER_DOWNLOAD_LINK% --output 461.09-data-center-tesla-desktop-winserver-2019-2016-international.exe -if errorlevel 1 exit /b 1 - -start /wait 461.09-data-center-tesla-desktop-winserver-2019-2016-international.exe -s -noreboot -if errorlevel 1 exit /b 1 - -del 461.09-data-center-tesla-desktop-winserver-2019-2016-international.exe || ver > NUL - -setlocal EnableDelayedExpansion -set NVIDIA_GPU_EXISTS=0 -for /F "delims=" %%i in ('wmic path win32_VideoController get name') do ( - set GPUS=%%i - if not "x!GPUS:NVIDIA=!" == "x!GPUS!" ( - SET NVIDIA_GPU_EXISTS=1 - goto gpu_check_end - ) -) -:gpu_check_end -endlocal & set NVIDIA_GPU_EXISTS=%NVIDIA_GPU_EXISTS% - -if "%NVIDIA_GPU_EXISTS%" == "0" ( - echo "CUDA Driver installation Failed" - exit /b 1 -) diff --git a/packaging/windows/internal/vc_install_helper.sh b/packaging/windows/internal/vc_install_helper.sh deleted file mode 100644 index 251509ae194..00000000000 --- a/packaging/windows/internal/vc_install_helper.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -set -ex - -export VC_YEAR=2019 -export VSDEVCMD_ARGS="" diff --git a/packaging/windows/internal/vs2017_install.ps1 b/packaging/windows/internal/vs2017_install.ps1 deleted file mode 100644 index 3e953de1ab7..00000000000 --- a/packaging/windows/internal/vs2017_install.ps1 +++ /dev/null @@ -1,25 +0,0 @@ -$VS_DOWNLOAD_LINK = "https://aka.ms/vs/15/release/vs_buildtools.exe" -$VS_INSTALL_ARGS = @("--nocache","--quiet","--wait", "--add Microsoft.VisualStudio.Workload.VCTools", - "--add Microsoft.VisualStudio.Component.VC.Tools.14.13", - "--add Microsoft.Component.MSBuild", - "--add Microsoft.VisualStudio.Component.Roslyn.Compiler", - "--add Microsoft.VisualStudio.Component.TextTemplating", - "--add Microsoft.VisualStudio.Component.VC.CoreIde", - "--add Microsoft.VisualStudio.Component.VC.Redist.14.Latest", - "--add Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core", - "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64", - "--add Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Win81") - -curl.exe --retry 3 -kL $VS_DOWNLOAD_LINK --output vs_installer.exe -if ($LASTEXITCODE -ne 0) { - echo "Download of the VS 2017 installer failed" - exit 1 -} - -$process = Start-Process "${PWD}\vs_installer.exe" -ArgumentList $VS_INSTALL_ARGS -NoNewWindow -Wait -PassThru -Remove-Item -Path vs_installer.exe -Force -$exitCode = $process.ExitCode -if (($exitCode -ne 0) -and ($exitCode -ne 3010)) { - echo "VS 2017 installer exited with code $exitCode, which should be one of [0, 3010]." - exit 1 -} diff --git a/packaging/windows/internal/vs2019_install.ps1 b/packaging/windows/internal/vs2019_install.ps1 deleted file mode 100644 index e436051f0db..00000000000 --- a/packaging/windows/internal/vs2019_install.ps1 +++ /dev/null @@ -1,21 +0,0 @@ -$VS_DOWNLOAD_LINK = "https://aka.ms/vs/16/release/vs_buildtools.exe" -$VS_INSTALL_ARGS = @("--nocache","--quiet","--wait", "--add Microsoft.VisualStudio.Workload.VCTools", - "--add Microsoft.Component.MSBuild", - "--add Microsoft.VisualStudio.Component.Roslyn.Compiler", - "--add Microsoft.VisualStudio.Component.VC.CoreBuildTools", - "--add Microsoft.VisualStudio.Component.VC.Redist.14.Latest", - "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64") - -curl.exe --retry 3 -kL $VS_DOWNLOAD_LINK --output vs_installer.exe -if ($LASTEXITCODE -ne 0) { - echo "Download of the VS 2019 installer failed" - exit 1 -} - -$process = Start-Process "${PWD}\vs_installer.exe" -ArgumentList $VS_INSTALL_ARGS -NoNewWindow -Wait -PassThru -Remove-Item -Path vs_installer.exe -Force -$exitCode = $process.ExitCode -if (($exitCode -ne 0) -and ($exitCode -ne 3010)) { - echo "VS 2019 installer exited with code $exitCode, which should be one of [0, 3010]." - exit 1 -} From 7cd2c1229937d7bf937fb9aeff65e347d5589e5e Mon Sep 17 00:00:00 2001 From: Philip Meier Date: Sun, 21 May 2023 15:56:54 +0200 Subject: [PATCH 4/5] put back most of vs2019 folder --- packaging/vs2019/activate.bat | 44 ++++++++++++++++++++++++ packaging/vs2019/conda_build_config.yaml | 23 +++++++++++++ packaging/vs2019/install_activate.bat | 29 ++++++++++++++++ packaging/vs2019/meta.yaml | 24 +++++++++++++ 4 files changed, 120 insertions(+) create mode 100644 packaging/vs2019/activate.bat create mode 100644 packaging/vs2019/conda_build_config.yaml create mode 100644 packaging/vs2019/install_activate.bat create mode 100644 packaging/vs2019/meta.yaml diff --git a/packaging/vs2019/activate.bat b/packaging/vs2019/activate.bat new file mode 100644 index 00000000000..6f607ba7518 --- /dev/null +++ b/packaging/vs2019/activate.bat @@ -0,0 +1,44 @@ +:: Set env vars that tell distutils to use the compiler that we put on path +SET DISTUTILS_USE_SDK=1 +SET MSSdk=1 + +SET "VS_VERSION=16.0" +SET "VS_MAJOR=16" +SET "VS_YEAR=2019" + +set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out" +set "MSYS2_ENV_CONV_EXCL=CL" + +:: For Python 3.5+, ensure that we link with the dynamic runtime. See +:: http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info +set "PY_VCRUNTIME_REDIST=%PREFIX%\\bin\\vcruntime140.dll" + +for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [16^,17^) -property installationPath`) do ( + if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( + set "VSINSTALLDIR=%%i\" + goto :vswhere + ) +) + +:vswhere + +:: Shorten PATH to avoid the `input line too long` error. +SET MyPath=%PATH% + +setlocal EnableDelayedExpansion + +SET TempPath="%MyPath:;=";"%" +SET var= +FOR %%a IN (%TempPath%) DO ( + IF EXIST %%~sa ( + SET "var=!var!;%%~sa" + ) +) + +set "TempPath=!var:~1!" +endlocal & set "PATH=%TempPath%" + +:: Shorten current directory too +FOR %%A IN (.) DO CD "%%~sA" + +:: other things added by install_activate.bat at package build time diff --git a/packaging/vs2019/conda_build_config.yaml b/packaging/vs2019/conda_build_config.yaml new file mode 100644 index 00000000000..b4dc99341d0 --- /dev/null +++ b/packaging/vs2019/conda_build_config.yaml @@ -0,0 +1,23 @@ +blas_impl: + - mkl # [x86_64] +c_compiler: + - vs2019 # [win] +cxx_compiler: + - vs2019 # [win] +python: + - 3.8 +# This differs from target_platform in that it determines what subdir the compiler +# will target, not what subdir the compiler package will be itself. +# For example, we need a win-64 vs2008_win-32 package, so that we compile win-32 +# code on win-64 miniconda. +cross_compiler_target_platform: + - win-64 # [win] +target_platform: + - win-64 # [win] +vc: + - 14 +zip_keys: + - # [win] + - vc # [win] + - c_compiler # [win] + - cxx_compiler # [win] diff --git a/packaging/vs2019/install_activate.bat b/packaging/vs2019/install_activate.bat new file mode 100644 index 00000000000..9e60ccfd2dc --- /dev/null +++ b/packaging/vs2019/install_activate.bat @@ -0,0 +1,29 @@ +set YEAR=2019 +set VER=16 + +mkdir "%PREFIX%\etc\conda\activate.d" +COPY "%RECIPE_DIR%\activate.bat" "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + +IF "%cross_compiler_target_platform%" == "win-64" ( + set "target_platform=amd64" + echo SET "CMAKE_GENERATOR=Visual Studio %VER% %YEAR% Win64" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + IF "%VSDEVCMD_ARGS%" == "" ( + echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x64 >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x86_amd64 >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + ) ELSE ( + echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x64 %VSDEVCMD_ARGS% >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x86_amd64 %VSDEVCMD_ARGS% >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + ) + echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + ) else ( + set "target_platform=x86" + echo SET "CMAKE_GENERATOR=Visual Studio %VER% %YEAR%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + echo CALL "VC\Auxiliary\Build\vcvars32.bat" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" + echo popd + ) diff --git a/packaging/vs2019/meta.yaml b/packaging/vs2019/meta.yaml new file mode 100644 index 00000000000..94a0ed4db3e --- /dev/null +++ b/packaging/vs2019/meta.yaml @@ -0,0 +1,24 @@ +{% set vcver="14.2" %} +{% set vcfeature="14" %} +{% set vsyear="2019" %} +{% set fullver="15.4.27004.2010" %} + +package: + name: vs{{ vsyear }} + version: {{ fullver }} + +build: + skip: True [not win] + script_env: + - VSDEVCMD_ARGS # [win] + +outputs: + - name: vs{{ vsyear }}_{{ cross_compiler_target_platform }} + script: install_activate.bat + track_features: + # VS 2019 is binary-compatible with VS 2017/vc 14.1 and 2015/vc14. Tools are "v142". + strong: + - vc{{ vcfeature }} + about: + summary: Activation and version verification of MSVC {{ vcver }} (VS {{ vsyear }}) compiler + license: BSD 3-clause From a6b4126487284c8279ad0b2611a76d630eaea988 Mon Sep 17 00:00:00 2001 From: Philip Meier Date: Tue, 23 May 2023 15:44:42 +0200 Subject: [PATCH 5/5] put install helper back --- packaging/windows/internal/vc_install_helper.sh | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 packaging/windows/internal/vc_install_helper.sh diff --git a/packaging/windows/internal/vc_install_helper.sh b/packaging/windows/internal/vc_install_helper.sh new file mode 100644 index 00000000000..251509ae194 --- /dev/null +++ b/packaging/windows/internal/vc_install_helper.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +set -ex + +export VC_YEAR=2019 +export VSDEVCMD_ARGS=""