Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions onnxscript/backend/onnx_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from typing import Any, Optional, Sequence

import numpy
import numpy as np
import onnx
from onnx import FunctionProto, GraphProto, ModelProto, TensorProto, ValueInfoProto

Expand Down Expand Up @@ -384,17 +384,17 @@ def _translate_attributes(self, node):
if isinstance(value, str):
attributes.append((at.name, f"{value!r}"))
continue
if isinstance(value, numpy.ndarray):
if isinstance(value, np.ndarray):
onnx_dtype = at.t.data_type
if len(value.shape) == 0:
text = (
f'make_tensor("value", {onnx_dtype}, dims=[], '
f"vals=[{value.tolist()!r}])"
f"vals=[{repr(value.tolist()).replace('nan', 'np.nan').replace('inf', 'np.inf')}])"
)
else:
text = (
f'make_tensor("value", {onnx_dtype}, dims={list(value.shape)!r}, '
f"vals={value.ravel().tolist()!r})"
f"vals={repr(value.ravel().tolist()).replace('nan', 'np.nan').replace('inf', 'np.inf')})"
)
attributes.append((at.name, text))
continue
Expand Down Expand Up @@ -738,7 +738,7 @@ def generate_rand(name: str, value: TensorProto) -> str:
raise NotImplementedError(
f"Unable to generate random initializer for data type {value.data_type}."
)
return f"{__}{name} = numpy.random.rand({shape}).astype(numpy.float32)"
return f"{__}{name} = np.random.rand({shape}).astype(np.float32)"

random_initializer_values = "\n".join(
generate_rand(key, value) for key, value in self.skipped_initializers.items()
Expand Down Expand Up @@ -793,7 +793,7 @@ def add(line: str) -> None:
result.append(line)

# Generic imports.
add("import numpy")
add("import numpy as np")
add("from onnx import TensorProto")
add("from onnx.helper import make_tensor")
add("from onnxscript import script, external_tensor")
Expand Down Expand Up @@ -873,11 +873,11 @@ def export2python(
.. runpython::
:showcode:
:process:
import numpy
import numpy as np
from sklearn.cluster import KMeans
from mlprodict.onnx_conv import to_onnx
from mlprodict.onnx_tools.onnx_export import export2python
X = numpy.arange(20).reshape(10, 2).astype(numpy.float32)
X = np.arange(20).reshape(10, 2).astype(np.float32)
tr = KMeans(n_clusters=2)
tr.fit(X)
onx = to_onnx(tr, X, target_opset=14)
Expand Down
13 changes: 4 additions & 9 deletions onnxscript/backend/onnx_export_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,8 @@ def skip(pattern: str | Pattern, reason: str, *, condition: bool = True):


SKIP_TESTS = (
skip(
r"^test_ai_onnx_ml_array_feature_extractor",
"ImportError: cannot import name 'opset' from 'onnxscript.onnx_opset'",
),
skip(
r"^test_ai_onnx_ml_binarizer",
"ImportError: cannot import name 'opset' from 'onnxscript.onnx_opset'",
),
skip(r"^test_ai_onnx_ml_array_feature_extractor", "ORT doesn't support this op"),
skip(r"^test_ai_onnx_ml_binarizer", "ORT doesn't support this op"),
skip(r"^test_center_crop_pad_crop_negative_axes_hwc", "fixme: ORT segfaults"),
skip(r"_scan_", "Operator Scan is not supported by onnxscript"),
skip(r"^test_scan", "Operator Scan is not supported by onnxscript"),
Expand Down Expand Up @@ -89,6 +83,7 @@ def skip(pattern: str | Pattern, reason: str, *, condition: bool = True):
"Change when the converter supports support something like 'while i < n and cond:'",
),
skip(r"^test_ai_onnx_ml_label_encoder", "ONNX Runtime does not support Opset 21 at 1.17"),
skip(r"^test_ai_onnx_ml_tree_ensemble", "Opset 23 is not supported"),
)

if sys.platform == "win32":
Expand Down Expand Up @@ -160,7 +155,7 @@ class TestOnnxBackEnd(unittest.TestCase):
test_folder = root_folder / "tests" / "onnx_backend_test_code"
temp_folder = root_folder / "tests" / "export"

def _proto_to_os_and_back(self, proto: onnxscript.FunctionProto, **export_options):
def _proto_to_os_and_back(self, proto: onnx.FunctionProto, **export_options):
"""Convert a proto to onnxscript code and convert it back to a proto."""
code = onnx_export.export2python(proto, **export_options)
map = extract_functions(proto.name, code, TestOnnxBackEnd.temp_folder)
Expand Down
Loading