Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 18 additions & 31 deletions .github/workflows/conda-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:

strategy:
matrix:
python: ["3.8", "3.9"]
python: ["3.8", "3.9", "3.10"]

steps:
- uses: actions/checkout@v3
Expand All @@ -46,18 +46,12 @@ jobs:

strategy:
matrix:
python: ["3.8", "3.9"]
integration_channels: [""]
python: ["3.8", "3.9", "3.10"]
experimental: [false]
artifact_name: [""]
include:
- python: "3.8"
integration_channels: -c dppy/label/dev
artifact_name: -c dppy_label_dev
experimental: true
continue-on-error: ${{ matrix.experimental }}
env:
CHANNELS: ${{ matrix.integration_channels }} -c intel -c defaults -c numba -c numba/label/dev -c dppy/label/dev --override-channels
CHANNELS: -c dppy/label/dev -c intel -c defaults -c numba -c numba/label/dev --override-channels
conda-bld: C:\Miniconda\conda-bld\win-64\

steps:
Expand Down Expand Up @@ -96,7 +90,7 @@ jobs:

strategy:
matrix:
python: ["3.8", "3.9"]
python: ["3.8", "3.9", "3.10"]
numba: ["0.56"]
dpnp: ["0.11"]

Expand Down Expand Up @@ -152,7 +146,7 @@ jobs:
conda activate numba_dpex_env
# echo "libintelocl.so" | tee /etc/OpenCL/vendors/intel-cpu.icd
export OCL_ICD_FILENAMES=libintelocl.so
for script in $(find . \( -not -name "_*" -not -name "vectorize.py" -not -name "scan.py" -and -name "*.py" \))
for script in $(find . \( -not -name "_*" -not "vector_sum2D.py" -not -name "vectorize.py" -not -name "scan.py" -and -name "*.py" \))
do
echo "Executing ${script}"
python ${script} || exit 1
Expand All @@ -164,21 +158,13 @@ jobs:

strategy:
matrix:
python: ["3.8", "3.9"]
python: ["3.8", "3.9", "3.10"]
integration_channels: [""]
experimental: [true] # packages are not available on -c intel yet
artifact_name: [""]
dependencies: [""]
include:
- python: "3.8"
integration_channels: -c dppy/label/dev
artifact_name: -c dppy_label_dev
experimental: true # current stable
dependencies: ""
experimental: [false] # packages are not available on -c intel yet
continue-on-error: ${{ matrix.experimental }}
env:
# conda-forge: llvm-spirv 11 not on intel channel yet
CHANNELS: ${{ matrix.integration_channels }} -c dppy/label/dev -c intel -c defaults -c numba -c numba/label/dev -c conda-forge --override-channels
CHANNELS: -c dppy/label/dev -c intel -c defaults -c numba -c numba/label/dev --override-channels

steps:
- name: Create dir for numba-dpex repo
Expand All @@ -189,15 +175,16 @@ jobs:
path: dpex-repo
fetch-depth: 0
- name: Download artifact
uses: actions/download-artifact@v2
uses: actions/download-artifact@v3
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} ${{ matrix.artifact_name }}
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
- uses: conda-incubator/setup-miniconda@v2
with:
auto-activate-base: true
activate-environment: ""
- name: Install conda-build
run: conda install conda-build
use-only-tar-bz2: true
auto-update-conda: true
conda-build-version: '*'
miniconda-version: 'latest'
activate-environment: "numba_dpex_wintest"
- name: Create conda channel
run: |
mkdir -p $env:GITHUB_WORKSPACE/channel/win-64
Expand All @@ -210,7 +197,7 @@ jobs:
- name: Cache conda packages
uses: actions/cache@v2
env:
CACHE_NUMBER: 0 # Increase to reset cache
CACHE_NUMBER: 1 # Increase to reset cache
with:
path: /home/runner/conda_pkgs_dir
key:
Expand Down Expand Up @@ -273,7 +260,7 @@ jobs:
runs-on: ubuntu-20.04
strategy:
matrix:
python: ["3.8", "3.9"]
python: ["3.8", "3.9", "3.10"]
steps:
- name: Download artifact
uses: actions/download-artifact@v2
Expand Down Expand Up @@ -301,7 +288,7 @@ jobs:
runs-on: windows-latest
strategy:
matrix:
python: ["3.8", "3.9"]
python: ["3.8", "3.9", "3.10"]
steps:
- name: Download artifact
uses: actions/download-artifact@v2
Expand Down
3 changes: 3 additions & 0 deletions numba_dpex/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,8 @@
#
# SPDX-License-Identifier: Apache-2.0


from .datamodel import *
from .dpnp_ndarray import dpnp_empty
from .types import *
from .typing import *
7 changes: 6 additions & 1 deletion numba_dpex/core/datamodel/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
# SPDX-License-Identifier: Apache-2.0

from numba.core import datamodel, types
from numba.core.datamodel.models import ArrayModel as DpnpNdArrayModel
from numba.core.datamodel.models import PrimitiveModel, StructModel
from numba.core.extending import register_model

from numba_dpex.core.types import Array, USMNdArray
from numba_dpex.core.types import Array, DpnpNdArray, USMNdArray
from numba_dpex.utils import address_space


Expand Down Expand Up @@ -64,3 +65,7 @@ def _init_data_model_manager():
# Register the USMNdArray type with the dpex ArrayModel
register_model(USMNdArray)(ArrayModel)
dpex_data_model_manager.register(USMNdArray, ArrayModel)

# Register the DpnpNdArray type with the dpex ArrayModel
register_model(DpnpNdArray)(DpnpNdArrayModel)
dpex_data_model_manager.register(DpnpNdArray, DpnpNdArrayModel)
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# SPDX-FileCopyrightText: 2020 - 2022 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0

from . import *
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
type_callable,
)

from .types import dpnp_ndarray_Type
from numba_dpex.core.types import DpnpNdArray


@type_callable(dpnp.empty)
Expand All @@ -34,7 +34,7 @@ def typer(shape, dtype=None, usm_type=None, sycl_queue=None):
usm_type = parse_usm_type(usm_type)

if nb_dtype is not None and ndim is not None and usm_type is not None:
return dpnp_ndarray_Type(
return DpnpNdArray(
dtype=nb_dtype, ndim=ndim, layout="C", usm_type=usm_type
)

Expand Down Expand Up @@ -177,7 +177,7 @@ def _call_allocator(arrtype, size, usm_type, sycl_queue):
return arrtype._allocate(size, usm_type, sycl_queue)


@overload_classmethod(dpnp_ndarray_Type, "_allocate")
@overload_classmethod(DpnpNdArray, "_allocate")
def _ol_dpnp_array_allocate(cls, size, usm_type, sycl_queue):
def impl(cls, size, usm_type, sycl_queue):
return intrin_alloc(size, usm_type, sycl_queue)
Expand Down
2 changes: 2 additions & 0 deletions numba_dpex/core/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# SPDX-License-Identifier: Apache-2.0

from .array_type import Array
from .dpnp_ndarray_type import DpnpNdArray
from .numba_types_short_names import (
b1,
bool_,
Expand Down Expand Up @@ -30,6 +31,7 @@

__all__ = [
"Array",
"DpnpNdArray",
"USMNdArray",
"none",
"boolean",
Expand Down
15 changes: 15 additions & 0 deletions numba_dpex/core/types/dpnp_ndarray_type.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# SPDX-FileCopyrightText: 2020 - 2022 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0


from .usm_ndarray_type import USMNdArray


class DpnpNdArray(USMNdArray):
"""
The Numba type to represent an dpnp.ndarray. The type has the same
structure as USMNdArray used to represnet dpctl.tensor.usm_ndarray.
"""

pass
59 changes: 42 additions & 17 deletions numba_dpex/core/typing/typeof.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,29 +3,17 @@
# SPDX-License-Identifier: Apache-2.0

from dpctl.tensor import usm_ndarray
from dpnp import ndarray
from numba.extending import typeof_impl
from numba.np import numpy_support

from numba_dpex.core.types.dpnp_ndarray_type import DpnpNdArray
from numba_dpex.core.types.usm_ndarray_type import USMNdArray
from numba_dpex.utils import address_space


@typeof_impl.register(usm_ndarray)
def typeof_usm_ndarray(val, c):
"""Registers the type inference implementation function for
dpctl.tensor.usm_ndarray

Args:
val : A Python object that should be an instance of a
dpctl.tensor.usm_ndarray
c : Unused argument used to be consistent with Numba API.

Raises:
ValueError: If an unsupported dtype encountered or val has
no ``usm_type`` or sycl_device attribute.

Returns: The Numba type corresponding to dpctl.tensor.usm_ndarray
"""
def _typeof_helper(val, array_class_type):
"""Creates a Numba type of the specified ``array_class_type`` for ``val``."""
try:
dtype = numpy_support.from_dtype(val.dtype)
except NotImplementedError:
Expand Down Expand Up @@ -55,7 +43,7 @@ def typeof_usm_ndarray(val, c):
except AttributeError:
raise ValueError("The device for the usm_ndarray could not be inferred")

return USMNdArray(
return array_class_type(
dtype=dtype,
ndim=val.ndim,
layout=layout,
Expand All @@ -65,3 +53,40 @@ def typeof_usm_ndarray(val, c):
queue=val.sycl_queue,
addrspace=address_space.GLOBAL,
)


@typeof_impl.register(usm_ndarray)
def typeof_usm_ndarray(val, c):
"""Registers the type inference implementation function for
dpctl.tensor.usm_ndarray

Args:
val : A Python object that should be an instance of a
dpctl.tensor.usm_ndarray
c : Unused argument used to be consistent with Numba API.

Raises:
ValueError: If an unsupported dtype encountered or val has
no ``usm_type`` or sycl_device attribute.

Returns: The Numba type corresponding to dpctl.tensor.usm_ndarray
"""
return _typeof_helper(val, USMNdArray)


@typeof_impl.register(ndarray)
def typeof_dpnp_ndarray(val, c):
"""Registers the type inference implementation function for dpnp.ndarray.

Args:
val : A Python object that should be an instance of a
dpnp.ndarray
c : Unused argument used to be consistent with Numba API.

Raises:
ValueError: If an unsupported dtype encountered or val has
no ``usm_type`` or sycl_device attribute.

Returns: The Numba type corresponding to dpnp.ndarray
"""
return _typeof_helper(val, DpnpNdArray)
7 changes: 0 additions & 7 deletions numba_dpex/dpnp_ndarray/__init__.py

This file was deleted.

15 changes: 0 additions & 15 deletions numba_dpex/dpnp_ndarray/models.py

This file was deleted.

35 changes: 0 additions & 35 deletions numba_dpex/dpnp_ndarray/typeof.py

This file was deleted.

Loading