Subgroup load store cleanup #4897
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Conda package | |
on: | |
push: | |
branches: | |
- master | |
pull_request: | |
permissions: read-all | |
env: | |
PACKAGE_NAME: dpctl | |
MODULE_NAME: dpctl | |
TEST_ENV_NAME: test_dpctl | |
VER_SCRIPT1: "import json; f = open('ver.json', 'r'); j = json.load(f); f.close(); " | |
VER_SCRIPT2: "d = j['dpctl'][0]; print('='.join((d[s] for s in ('version', 'build'))))" | |
INTEL_CHANNEL: "https://software.repos.intel.com/python/conda/" | |
jobs: | |
build_linux: | |
runs-on: ubuntu-22.04 | |
timeout-minutes: 90 | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11', '3.12'] | |
steps: | |
- uses: actions/checkout@v4.2.1 | |
with: | |
fetch-depth: 0 | |
- name: Set pkgs_dirs | |
run: | | |
echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc | |
- name: Cache conda packages | |
uses: actions/cache@v4 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: ~/.conda/pkgs | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Add conda to system path | |
run: echo $CONDA/bin >> $GITHUB_PATH | |
- name: Install conda-build | |
run: conda install conda-build -c conda-forge --override-channels | |
- name: Store conda paths as envs | |
shell: bash -l {0} | |
run: | | |
echo "WHEELS_OUTPUT_FOLDER=$GITHUB_WORKSPACE${{ runner.os == 'Linux' && '/' || '\\' }}" >> $GITHUB_ENV | |
- name: Build conda package | |
run: | | |
# use bootstrap channel to pull NumPy linked with OpenBLAS | |
CHANNELS="-c conda-forge --override-channels" | |
VERSIONS="--python ${{ matrix.python }} --numpy 2.0" | |
TEST="--no-test" | |
conda build \ | |
$TEST \ | |
$VERSIONS \ | |
$CHANNELS \ | |
conda-recipe | |
- name: Upload artifact | |
uses: actions/upload-artifact@v4.4.3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
path: /usr/share/miniconda/conda-bld/linux-64/${{ env.PACKAGE_NAME }}-*.tar.bz2 | |
- name: Upload wheels artifact | |
uses: actions/upload-artifact@v4.4.3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Wheels Python ${{ matrix.python }} | |
path: ${{ env.WHEELS_OUTPUT_FOLDER }}${{ env.PACKAGE_NAME }}-*.whl | |
build_windows: | |
runs-on: windows-2019 | |
timeout-minutes: 150 | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11', '3.12'] | |
steps: | |
- uses: actions/checkout@v4.2.1 | |
with: | |
fetch-depth: 0 | |
- uses: conda-incubator/setup-miniconda@v3 | |
with: | |
miniforge-variant: Miniforge3 | |
miniforge-version: latest | |
activate-environment: build | |
channels: conda-forge | |
python-version: ${{ matrix.python }} | |
- name: Install conda build | |
run: | | |
conda activate | |
conda install -y conda-build | |
conda list -n base | |
- name: Cache conda packages | |
uses: actions/cache@v4 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: /home/runner/conda_pkgs_dir | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Store conda paths as envs | |
shell: bash -l {0} | |
run: | | |
echo "CONDA_BLD=$CONDA/conda-bld/win-64/" | tr "\\\\" '/' >> $GITHUB_ENV | |
echo "WHEELS_OUTPUT_FOLDER=$GITHUB_WORKSPACE${{ runner.os == 'Linux' && '/' || '\\' }}" >> $GITHUB_ENV | |
- name: Build conda package | |
env: | |
OVERRIDE_INTEL_IPO: 1 # IPO requires more resources that GH actions VM provides | |
run: | | |
conda activate | |
conda build --no-test --python ${{ matrix.python }} --numpy 2 -c conda-forge --override-channels conda-recipe | |
- name: Upload artifact | |
uses: actions/upload-artifact@v4.4.3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
path: ${{ env.CONDA_BLD }}${{ env.PACKAGE_NAME }}-*.tar.bz2 | |
- name: Upload wheels artifact | |
uses: actions/upload-artifact@v4.4.3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Wheels Python ${{ matrix.python }} | |
path: ${{ env.WHEELS_OUTPUT_FOLDER }}${{ env.PACKAGE_NAME }}-*.whl | |
test_linux: | |
needs: build_linux | |
runs-on: ${{ matrix.runner }} | |
timeout-minutes: 30 | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11', '3.12'] | |
experimental: [false] | |
runner: [ubuntu-22.04] | |
continue-on-error: ${{ matrix.experimental }} | |
steps: | |
- name: Construct channels line | |
run: | | |
echo "CHANNELS=-c ${{ env.INTEL_CHANNEL }} -c conda-forge --override-channels" >> $GITHUB_ENV | |
- name: Display channels line | |
run: | | |
echo ${{ env.CHANNELS }} | |
- name: Download artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- name: Add conda to system path | |
run: echo $CONDA/bin >> $GITHUB_PATH | |
- name: Install conda-index | |
# Needed to be able to run conda index | |
run: conda install conda-index -c conda-forge --override-channels | |
- name: Create conda channel | |
run: | | |
mkdir -p $GITHUB_WORKSPACE/channel/linux-64 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
mv ${PACKAGE_NAME}-*.tar.bz2 $GITHUB_WORKSPACE/channel/linux-64 || exit 1 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
# Test channel | |
conda search $PACKAGE_NAME -c $GITHUB_WORKSPACE/channel --override-channels --info --json > $GITHUB_WORKSPACE/ver.json | |
cat ver.json | |
- name: Collect dependencies | |
run: | | |
CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
conda create -n ${{ env.TEST_ENV_NAME }} $PACKAGE_NAME=${PACKAGE_VERSION} python=${{ matrix.python }} $CHANNELS --only-deps --dry-run > lockfile | |
cat lockfile | |
- name: Set pkgs_dirs | |
run: | | |
echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc | |
- name: Cache conda packages | |
uses: actions/cache@v4 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: ~/.conda/pkgs | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Install dpctl | |
run: | | |
export CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" | |
export TEST_DEPENDENCIES="pytest pytest-cov cython setuptools" | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
conda create -n ${{ env.TEST_ENV_NAME }} $PACKAGE_NAME=${PACKAGE_VERSION} ${TEST_DEPENDENCIES} python=${{ matrix.python }} ${CHANNELS} | |
# Test installed packages | |
conda list -n ${{ env.TEST_ENV_NAME }} | |
- name: Smoke test | |
run: | | |
. $CONDA/etc/profile.d/conda.sh | |
conda activate ${{ env.TEST_ENV_NAME }} | |
python -c "import dpctl; dpctl.lsplatform(verbosity=2)" | |
- name: Install gdb | |
run: | | |
sudo apt-get update --fix-missing | |
sudo apt-get install -y gdb | |
- name: Run test_elementwise under gdb | |
run: | | |
. $CONDA/etc/profile.d/conda.sh | |
conda activate ${{ env.TEST_ENV_NAME }} | |
gdb --batch -ex r -ex 'info sharedlibrary' -ex 'set print elements 1000' -ex bt --args ${CONDA_PREFIX}/bin/python -m pytest -q -ra --disable-warnings --pyargs dpctl.tests.elementwise.test_trigonometric::test_trig_order -vv || true | |
- name: Create test temp dir | |
# create temporary empty folder to runs tests from | |
# https://github.com/pytest-dev/pytest/issues/11904 | |
run: mkdir -p ${GITHUB_WORKSPACE}/test_tmp | |
- name: Run tests | |
working-directory: ${{ github.workspace }}/test_tmp | |
env: | |
SYCL_CACHE_PERSISTENT: 1 | |
run: | | |
. $CONDA/etc/profile.d/conda.sh | |
conda activate ${{ env.TEST_ENV_NAME }} | |
python -m pytest -v --pyargs $MODULE_NAME | |
test_windows: | |
needs: build_windows | |
runs-on: ${{ matrix.runner }} | |
timeout-minutes: 60 | |
defaults: | |
run: | |
shell: cmd /C CALL {0} | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11', '3.12'] | |
experimental: [false] | |
runner: [windows-2019] | |
continue-on-error: ${{ matrix.experimental }} | |
env: | |
workdir: '${{ github.workspace }}' | |
steps: | |
- name: Construct channels line | |
shell: pwsh | |
run: | | |
echo "CHANNELS=-c ${{ env.INTEL_CHANNEL }} -c conda-forge --override-channels" >> $env:GITHUB_ENV | |
- name: Display channels line | |
run: | | |
echo ${{ env.CHANNELS }} | |
- name: Download artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- uses: conda-incubator/setup-miniconda@v3 | |
with: | |
miniforge-version: latest | |
channels: conda-forge,nodefaults | |
activate-environment: ${{ env.TEST_ENV_NAME }} | |
python-version: ${{ matrix.python }} | |
- name: Remove defaults channel | |
run: conda config --remove channels defaults | |
- name: Install conda-index | |
run: | | |
conda install -n base conda-index | |
- name: Create conda channel with the artifact bit | |
shell: cmd /C CALL {0} | |
run: | | |
@echo on | |
echo ${{ env.workdir }} | |
mkdir ${{ env.workdir }}\channel | |
mkdir ${{ env.workdir }}\channel\win-64 | |
move ${{ env.PACKAGE_NAME }}-*.tar.bz2 ${{ env.workdir }}\channel\win-64 | |
dir ${{ env.workdir }}\channel\win-64\ | |
- name: Index the channel | |
shell: cmd /C CALL {0} | |
run: | | |
@echo on | |
conda index ${{ env.workdir }}\channel | |
- name: List content of the channels | |
shell: cmd /C CALL {0} | |
run: | | |
dir ${{ env.workdir }}\channel | |
dir ${{ env.workdir }}\channel\win-64 | |
- name: Dump dpctl version info from created channel into ver.json | |
shell: cmd /C CALL {0} | |
run: | | |
@echo on | |
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.workdir }}/channel --override-channels --info --json > ${{ env.workdir }}\ver.json | |
dir ${{ env.workdir }} | |
- name: Output content of produced ver.json | |
shell: pwsh | |
run: Get-Content -Path ${{ env.workdir }}\ver.json | |
- name: Collect dependencies | |
shell: cmd /C CALL {0} | |
run: | | |
IF NOT EXIST ver.json ( | |
copy /Y ${{ env.workdir }}\ver.json . | |
) | |
SET "SCRIPT=%VER_SCRIPT1% %VER_SCRIPT2%" | |
FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO ( | |
SET PACKAGE_VERSION=%%F | |
) | |
conda install -n ${{ env.TEST_ENV_NAME }} ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% python=${{ matrix.python }} -c ${{ env.workdir }}/channel ${{ env.CHANNELS }} --only-deps --dry-run > lockfile | |
- name: Display lockfile content | |
shell: pwsh | |
run: Get-Content -Path .\lockfile | |
- name: Cache conda packages | |
uses: actions/cache@v4 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: /home/runner/conda_pkgs_dir | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Install opencl_rt | |
shell: cmd /C CALL {0} | |
run: conda install -n ${{ env.TEST_ENV_NAME }} opencl_rt -c ${{ env.INTEL_CHANNEL }} --override-channels | |
- name: Install dpctl | |
shell: cmd /C CALL {0} | |
run: | | |
@ECHO ON | |
IF NOT EXIST ver.json ( | |
copy /Y ${{ env.workdir }}\ver.json . | |
) | |
set "SCRIPT=%VER_SCRIPT1% %VER_SCRIPT2%" | |
FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO ( | |
SET PACKAGE_VERSION=%%F | |
) | |
SET TEST_DEPENDENCIES=pytest"<8" pytest-cov cython setuptools | |
conda install -n ${{ env.TEST_ENV_NAME }} ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% %TEST_DEPENDENCIES% python=${{ matrix.python }} -c ${{ env.workdir }}/channel ${{ env.CHANNELS }} | |
- name: Report content of test environment | |
shell: cmd /C CALL {0} | |
run: | | |
echo "Value of CONDA enviroment variable was: " %CONDA% | |
echo "Value of CONDA_PREFIX enviroment variable was: " %CONDA_PREFIX% | |
conda info && conda list -n ${{ env.TEST_ENV_NAME }} | |
- name: Configure Intel OpenCL CPU RT | |
shell: pwsh | |
run: | | |
$script_path="$env:CONDA_PREFIX\Scripts\set-intel-ocl-icd-registry.ps1" | |
&$script_path | |
# Check the variable assisting OpenCL CPU driver to find TBB DLLs which are not located where it expects them by default | |
$cl_cfg="$env:CONDA_PREFIX\Library\lib\cl.cfg" | |
Get-Content -Tail 5 -Path $cl_cfg | |
- name: Smoke test, step 1 | |
shell: cmd /C CALL {0} | |
run: >- | |
conda activate ${{ env.TEST_ENV_NAME }} && python -c "import sys; print(sys.executable)" | |
- name: Smoke test, step 2 | |
shell: cmd /C CALL {0} | |
run: >- | |
conda activate ${{ env.TEST_ENV_NAME }} && python -m dpctl -f | |
- name: Create empty temporary directory to run tests from | |
shell: cmd /C CALL {0} | |
# create temporary empty folder to runs tests from | |
# https://github.com/pytest-dev/pytest/issues/11904 | |
run: >- | |
mkdir "${{ env.workdir }}\test_tmp" | |
- name: List content of workdir folder | |
shell: cmd /C CALL {0} | |
run: dir "${{ env.workdir }}" | |
- name: Run tests | |
shell: cmd /C CALL {0} | |
env: | |
SYCL_CACHE_PERSISTENT: 1 | |
working-directory: ${{ env.workdir }}\test_tmp | |
run: >- | |
conda activate ${{ env.TEST_ENV_NAME }} && python -m pytest -v -s --pyargs ${{ env.MODULE_NAME }} | |
upload_linux: | |
needs: test_linux | |
if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}} | |
runs-on: ubuntu-22.04 | |
timeout-minutes: 20 | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11', '3.12'] | |
steps: | |
- name: Download conda artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- name: Download wheel artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Wheels Python ${{ matrix.python }} | |
- name: Install anaconda-client | |
run: conda install anaconda-client -c conda-forge --override-channels | |
- name: Add conda to system path | |
run: echo $CONDA/bin >> $GITHUB_PATH | |
- name: Package version | |
run: echo "PACKAGE_VERSION=$(basename ${{ env.PACKAGE_NAME }}-*.tar.bz2 | sed 's/^${{ env.PACKAGE_NAME }}-\([^-]*\).*/\1/')" >> $GITHUB_ENV | |
- name: Upload | |
env: | |
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} | |
run: | | |
anaconda --token $ANACONDA_TOKEN upload --user dppy --label dev ${PACKAGE_NAME}-*.tar.bz2 | |
- name: Upload Wheels | |
env: | |
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} | |
run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.whl --version ${{ env.PACKAGE_VERSION }} | |
upload_windows: | |
needs: test_windows | |
if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}} | |
runs-on: windows-2019 | |
timeout-minutes: 20 | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11', '3.12'] | |
steps: | |
- name: Download artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- name: Download wheel artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Wheels Python ${{ matrix.python }} | |
- uses: conda-incubator/setup-miniconda@v3 | |
with: | |
auto-activate-base: true | |
activate-environment: "" | |
- name: Install anaconda-client | |
run: conda install anaconda-client -c conda-forge --override-channels | |
- name: Package version | |
shell: bash -el {0} | |
run: echo "PACKAGE_VERSION=$(basename ${{ env.PACKAGE_NAME }}-*.tar.bz2 | sed 's/^${{ env.PACKAGE_NAME }}-\([^-]*\).*/\1/')" >> $GITHUB_ENV | |
- name: Upload | |
env: | |
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} | |
run: | | |
anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 | |
- name: Upload Wheels | |
env: | |
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} | |
run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.whl --version ${{ env.PACKAGE_VERSION }} | |
test_examples_linux: | |
needs: build_linux | |
runs-on: ${{ matrix.runner }} | |
strategy: | |
matrix: | |
python: ['3.11'] | |
experimental: [false] | |
runner: [ubuntu-22.04] | |
continue-on-error: ${{ matrix.experimental }} | |
timeout-minutes: 60 | |
env: | |
EXAMPLES_ENV_NAME: examples | |
BUILD_ENV_NAME: build_env | |
steps: | |
- name: Construct channels line | |
run: | | |
echo "CHANNELS=-c ${{ env.INTEL_CHANNEL }} -c conda-forge --override-channels" >> $GITHUB_ENV | |
- name: Display channels line | |
run: | | |
echo ${{ env.CHANNELS }} | |
- name: Install conda-index | |
# Needed to be able to run conda index | |
run: conda install conda-index -c conda-forge --override-channels | |
- name: Checkout dpctl repo | |
uses: actions/checkout@v4.2.1 | |
with: | |
fetch-depth: 0 | |
- name: Download artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- name: Add conda to system path | |
run: echo $CONDA/bin >> $GITHUB_PATH | |
- name: Create conda channel | |
run: | | |
mkdir -p $GITHUB_WORKSPACE/channel/linux-64 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
mv ${PACKAGE_NAME}-*.tar.bz2 $GITHUB_WORKSPACE/channel/linux-64 || exit 1 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
# Test channel | |
conda search $PACKAGE_NAME -c $GITHUB_WORKSPACE/channel --override-channels --info --json > $GITHUB_WORKSPACE/ver.json | |
cat ver.json | |
- name: Collect dependencies | |
run: | | |
CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
conda create -n ${{ env.EXAMPLES_ENV_NAME }} $PACKAGE_NAME=${PACKAGE_VERSION} python=${{ matrix.python }} $CHANNELS --only-deps --dry-run > lockfile | |
cat lockfile | |
- name: Set pkgs_dirs | |
run: | | |
echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc | |
- name: Cache conda packages | |
uses: actions/cache@v4 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: ~/.conda/pkgs | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Install example requirements | |
shell: bash -l {0} | |
env: | |
DPCPP_CMPLR: dpcpp_linux-64">=2024.2" | |
run: | | |
CHANNELS="${{ env.CHANNELS }}" | |
. $CONDA/etc/profile.d/conda.sh | |
conda create -n ${{ env.EXAMPLES_ENV_NAME }} -y pytest python=${{ matrix.python }} setuptools"<72.2.0" $CHANNELS | |
conda install -n ${{ env.EXAMPLES_ENV_NAME }} -y cmake $CHANNELS || exit 1 | |
conda install -n ${{ env.EXAMPLES_ENV_NAME }} -y ninja $CHANNELS || exit 1 | |
conda install -n ${{ env.EXAMPLES_ENV_NAME }} -y pybind11 cython scikit-build $CHANNELS || exit 1 | |
conda install -n ${{ env.EXAMPLES_ENV_NAME }} -y mkl-dpcpp mkl-devel-dpcpp dpcpp_cpp_rt $CHANNELS || exit 1 | |
conda create -y -n ${{ env.BUILD_ENV_NAME }} $CHANNELS gcc_linux-64 gxx_linux-64 ${{ env.DPCPP_CMPLR }} sysroot_linux-64">=2.28" | |
- name: Install dpctl | |
shell: bash -l {0} | |
run: | | |
source $CONDA/etc/profile.d/conda.sh | |
CHANNELS="-c $GITHUB_WORKSPACE/channel -c dppy/label/dev -c ${{ env.INTEL_CHANNEL }} -c conda-forge --override-channels" | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
conda install -n ${{ env.EXAMPLES_ENV_NAME }} -y ${CHANNELS} dpctl=${PACKAGE_VERSION} dpnp || exit 1 | |
- name: Build and run examples of pybind11 extensions | |
shell: bash -l {0} | |
run: | | |
source $CONDA/etc/profile.d/conda.sh | |
conda activate ${{ env.EXAMPLES_ENV_NAME }} | |
conda list | |
cd examples/pybind11 | |
for d in $(find . -maxdepth 1 -type d -not -path ".") | |
do | |
pushd $d | |
export MKLROOT=${CONDA_PREFIX} | |
export TBBROOT=${CONDA_PREFIX} | |
conda activate --stack build_env | |
CC=icx CXX=icpx python setup.py build_ext --inplace -G Ninja -- \ | |
-DTBB_LIBRARY_DIR=${TBBROOT}/lib \ | |
-DMKL_LIBRARY_DIR=${MKLROOT}/lib \ | |
-DMKL_INCLUDE_DIR=${MKLROOT}/include \ | |
-DTBB_INCLUDE_DIR=${TBBROOT}/include || exit 1 | |
conda deactivate | |
if [ -e tests ] | |
then | |
LD_LIBRARY_PATH=${CONDA_PREFIX}/lib python -m pytest tests || exit 1 | |
else | |
LD_LIBRARY_PATH=${CONDA_PREFIX}/lib python example.py || exit 1 | |
fi | |
popd | |
done | |
- name: Build and run examples of Cython extensions | |
shell: bash -l {0} | |
run: | | |
source $CONDA/etc/profile.d/conda.sh | |
conda activate ${{ env.EXAMPLES_ENV_NAME }} | |
conda list | |
cd examples/cython | |
for d in $(find . -maxdepth 1 -type d -not -path ".") | |
do | |
pushd $d | |
conda activate --stack ${{ env.BUILD_ENV_NAME }} | |
python setup.py build_ext --inplace || exit 1 | |
conda deactivate | |
python -m pytest tests || exit 1 | |
popd | |
done | |
- name: Build and run examples of C-extensions | |
shell: bash -l {0} | |
run: | | |
source $CONDA/etc/profile.d/conda.sh | |
conda activate ${{ env.EXAMPLES_ENV_NAME }} | |
conda list | |
cd examples/c | |
for d in $(find . -maxdepth 1 -type d -not -path ".") | |
do | |
pushd $d | |
conda activate --stack ${{ env.BUILD_ENV_NAME }} | |
python setup.py build_ext --inplace || exit 1 | |
conda deactivate | |
python -m pytest tests || exit 1 | |
popd | |
done | |
- name: Run Python examples | |
shell: bash -l {0} | |
run: | | |
cd examples/python | |
source $CONDA/etc/profile.d/conda.sh | |
conda activate ${{ env.EXAMPLES_ENV_NAME }} | |
for script in $(find . \( -not -name "_*" -and -name "*.py" \)) | |
do | |
echo "Executing ${script}" | |
python ${script} || exit 1 | |
done | |
array-api-conformity: | |
needs: build_linux | |
runs-on: ${{ matrix.runner }} | |
timeout-minutes: 90 | |
permissions: | |
pull-requests: write | |
strategy: | |
matrix: | |
python: ['3.10'] | |
experimental: [false] | |
runner: [ubuntu-22.04] | |
continue-on-error: ${{ matrix.experimental }} | |
steps: | |
- name: Construct channels line | |
run: | | |
echo "CHANNELS=-c ${{ env.INTEL_CHANNEL }} -c conda-forge --override-channels" >> $GITHUB_ENV | |
- name: Display channels line | |
run: | | |
echo ${{ env.CHANNELS }} | |
- name: Checkout dpctl repo | |
uses: actions/checkout@v4.2.1 | |
with: | |
fetch-depth: 0 | |
- name: Cache array API tests | |
id: cache-array-api-tests | |
uses: actions/cache@v4 | |
env: | |
ARRAY_CACHE: 3 | |
with: | |
path: | | |
/home/runner/work/array-api-tests/ | |
key: ${{ runner.os }}-array-api-${{ env.cache-name }}-{{ env.ARRAY_CACHE }}-${{ hashFiles('/home/runner/work/array-api-tests/requirements.txt') }} | |
restore-keys: | | |
${{ runner.os }}-build-${{ env.cache-name }}- | |
${{ runner.os }}-build- | |
${{ runner.os }}- | |
- name: Clone array API tests repo | |
if: steps.cache-array-api-tests.outputs.cache-hit != 'true' | |
shell: bash -l {0} | |
run: | | |
cd /home/runner/work | |
git clone --recurse-submodules https://github.com/data-apis/array-api-tests array-api-tests | |
cd array-api-tests | |
- name: Download artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- name: Add conda to system path | |
run: echo $CONDA/bin >> $GITHUB_PATH | |
- name: Install conda-index | |
# Needed to be able to run conda index | |
run: conda install conda-index -c conda-forge --override-channels | |
- name: Create conda channel | |
run: | | |
mkdir -p $GITHUB_WORKSPACE/channel/linux-64 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
mv ${PACKAGE_NAME}-*.tar.bz2 $GITHUB_WORKSPACE/channel/linux-64 || exit 1 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
# Test channel | |
conda search $PACKAGE_NAME -c $GITHUB_WORKSPACE/channel --override-channels --info --json > $GITHUB_WORKSPACE/ver.json | |
cat ver.json | |
- name: Collect dependencies | |
run: | | |
CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
conda create -n ${{ env.TEST_ENV_NAME }} $PACKAGE_NAME=${PACKAGE_VERSION} python=${{ matrix.python }} $CHANNELS --only-deps --dry-run > lockfile | |
cat lockfile | |
- name: Set pkgs_dirs | |
run: | | |
echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc | |
- name: Cache conda packages | |
uses: actions/cache@v4 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: ~/.conda/pkgs | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Install dpctl | |
run: | | |
CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
conda create -n ${{ env.TEST_ENV_NAME }} $PACKAGE_NAME=${PACKAGE_VERSION} pytest python=${{ matrix.python }} $CHANNELS | |
# Test installed packages | |
conda list | |
- name: Install array API test dependencies | |
shell: bash -l {0} | |
run: | | |
. $CONDA/etc/profile.d/conda.sh | |
conda activate ${{ env.TEST_ENV_NAME }} | |
cd /home/runner/work/array-api-tests | |
pip install -r requirements.txt | |
- name: Install jq | |
shell: bash -l {0} | |
run: | | |
sudo apt-get install jq | |
- name: Run array API conformance tests | |
id: run-array-api-tests | |
shell: bash -l {0} | |
env: | |
SYCL_CACHE_PERSISTENT: 1 | |
run: | | |
FILE=/home/runner/work/.report.json | |
. $CONDA/etc/profile.d/conda.sh | |
conda activate ${{ env.TEST_ENV_NAME }} | |
cd /home/runner/work/array-api-tests | |
${CONDA_PREFIX}/bin/python -c "import dpctl; dpctl.lsplatform()" | |
export ARRAY_API_TESTS_MODULE=dpctl.tensor | |
${CONDA_PREFIX}/bin/python -m pytest --json-report --json-report-file=$FILE --disable-deadline --skips-file ${GITHUB_WORKSPACE}/.github/workflows/array-api-skips.txt array_api_tests/ || true | |
- name: Set Github environment variables | |
shell: bash -l {0} | |
run: | | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
FILE=/home/runner/work/.report.json | |
if test -f "$FILE"; then | |
PASSED_TESTS=$(jq '.summary | .passed // 0' $FILE) | |
FAILED_TESTS=$(jq '.summary | .failed // 0' $FILE) | |
SKIPPED_TESTS=$(jq '.summary | .skipped // 0' $FILE) | |
MESSAGE="Array API standard conformance tests for dpctl=$PACKAGE_VERSION ran successfully. | |
Passed: $PASSED_TESTS | |
Failed: $FAILED_TESTS | |
Skipped: $SKIPPED_TESTS" | |
echo "MESSAGE<<EOF" >> $GITHUB_ENV | |
echo "$MESSAGE" >> $GITHUB_ENV | |
echo "EOF" >> $GITHUB_ENV | |
else | |
echo "Array API standard conformance tests failed to run for dpctl=$PACKAGE_VERSION." | |
exit 1 | |
fi | |
- name: Output API summary | |
shell: bash -l {0} | |
run: echo "::notice ${{ env.MESSAGE }}" | |
- name: Post result to PR | |
if: ${{ github.event.pull_request && !github.event.pull_request.head.repo.fork }} | |
uses: mshick/add-pr-comment@v2 | |
with: | |
message: | | |
${{ env.MESSAGE }} | |
allow-repeats: true | |
repo-token: ${{ secrets.GITHUB_TOKEN }} | |
cleanup_packages: | |
name: Clean up anaconda packages | |
needs: [upload_linux, upload_windows] | |
runs-on: 'ubuntu-latest' | |
timeout-minutes: 30 | |
defaults: | |
run: | |
shell: bash -el {0} | |
steps: | |
- uses: conda-incubator/setup-miniconda@v3 | |
with: | |
run-post: false | |
channel-priority: "disabled" | |
channels: conda-forge | |
python-version: '3.11' | |
- name: Install anaconda-client | |
run: conda install anaconda-client -c conda-forge --override-channels | |
- name: Checkout repo | |
uses: actions/checkout@v4.2.1 | |
with: | |
repository: IntelPython/devops-tools | |
fetch-depth: 0 | |
- name: Cleanup old packages | |
run: | | |
python scripts/cleanup-old-packages.py \ | |
--verbose --force --token ${{ secrets.ANACONDA_TOKEN }} \ | |
--package dppy/${{ env.PACKAGE_NAME }} --label dev |