From 6f20e926d6c02c893cb0d72459f58636f48ec25b Mon Sep 17 00:00:00 2001 From: wangshuai09 <391746016@qq.com> Date: Mon, 1 Apr 2024 17:00:50 +0800 Subject: [PATCH] add npu support --- python-package/README.md | 4 ++-- python-package/insightface/model_zoo/model_zoo.py | 2 +- web-demos/src_recognition/arcface_onnx.py | 2 +- web-demos/src_recognition/scrfd.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/python-package/README.md b/python-package/README.md index 69c199b56..e7913cf43 100644 --- a/python-package/README.md +++ b/python-package/README.md @@ -14,7 +14,7 @@ For ``insightface<=0.1.5``, we use MXNet as inference backend. Starting from insightface>=0.2, we use onnxruntime as inference backend. -You have to install ``onnxruntime-gpu`` manually to enable GPU inference, or install ``onnxruntime`` to use CPU only inference. +You have to install ``onnxruntime-gpu`` manually to enable GPU inference, or ``pip install onnxruntime-cann`` manually to enable NPU inference, or install ``onnxruntime`` to use CPU only inference. ## Change Log @@ -54,7 +54,7 @@ import insightface from insightface.app import FaceAnalysis from insightface.data import get_image as ins_get_image -app = FaceAnalysis(providers=['CUDAExecutionProvider', 'CPUExecutionProvider']) +app = FaceAnalysis(providers=['CUDAExecutionProvider', 'CANNExecutionProvider', 'CPUExecutionProvider']) app.prepare(ctx_id=0, det_size=(640, 640)) img = ins_get_image('t1') faces = app.get(img) diff --git a/python-package/insightface/model_zoo/model_zoo.py b/python-package/insightface/model_zoo/model_zoo.py index fc6283114..e94ebbb4b 100644 --- a/python-package/insightface/model_zoo/model_zoo.py +++ b/python-package/insightface/model_zoo/model_zoo.py @@ -68,7 +68,7 @@ def find_onnx_file(dir_path): return paths[-1] def get_default_providers(): - return ['CUDAExecutionProvider', 'CPUExecutionProvider'] + return ['CUDAExecutionProvider', 'CANNExecutionProvider', 'CPUExecutionProvider'] def get_default_provider_options(): return None diff --git a/web-demos/src_recognition/arcface_onnx.py b/web-demos/src_recognition/arcface_onnx.py index 870e7d641..c513cbe07 100644 --- a/web-demos/src_recognition/arcface_onnx.py +++ b/web-demos/src_recognition/arcface_onnx.py @@ -42,7 +42,7 @@ def __init__(self, model_file=None, session=None): self.input_std = input_std #print('input mean and std:', self.input_mean, self.input_std) if self.session is None: - self.session = onnxruntime.InferenceSession(self.model_file, providers=['CUDAExecutionProvider']) + self.session = onnxruntime.InferenceSession(self.model_file, providers=['CUDAExecutionProvider', 'CANNExecutionProvider']) input_cfg = self.session.get_inputs()[0] input_shape = input_cfg.shape input_name = input_cfg.name diff --git a/web-demos/src_recognition/scrfd.py b/web-demos/src_recognition/scrfd.py index cc04996d2..c4135e401 100644 --- a/web-demos/src_recognition/scrfd.py +++ b/web-demos/src_recognition/scrfd.py @@ -74,7 +74,7 @@ def __init__(self, model_file=None, session=None): if self.session is None: assert self.model_file is not None assert osp.exists(self.model_file) - self.session = onnxruntime.InferenceSession(self.model_file, providers=['CUDAExecutionProvider']) + self.session = onnxruntime.InferenceSession(self.model_file, providers=['CUDAExecutionProvider', 'CANNExecutionProvider']) self.center_cache = {} self.nms_thresh = 0.4 self.det_thresh = 0.5