Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rename onnx2ncnn to mmdeploy_onnx2ncnn #694

Merged
merged 6 commits into from
Jul 8, 2022
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions .github/scripts/test_onnx2ncnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@
def parse_args():
parser = argparse.ArgumentParser(
description='MMDeploy onnx2ncnn test tool.')
parser.add_argument('--run', type=bool, help='Execute onnx2ncnn bin.')
parser.add_argument(
'--run', type=bool, help='Execute mmdeploy_onnx2ncnn bin.')
parser.add_argument(
'--repo-dir', type=str, default='~/', help='mmcls directory.')
parser.add_argument(
Expand Down Expand Up @@ -77,14 +78,16 @@ def run(args):
# show processbar
os.system(' '.join(download_cmd))

convert_cmd = ['./onnx2ncnn', filename, 'onnx.param', 'onnx.bin']
convert_cmd = [
'./mmdeploy_onnx2ncnn', filename, 'onnx.param', 'onnx.bin'
]
subprocess.run(convert_cmd, capture_output=True, check=True)


def main():
"""test `onnx2ncnn.cpp`

First generate onnx model then convert it with `onnx2ncnn`.
First generate onnx model then convert it with `mmdeploy_onnx2ncnn`.
"""
args = parse_args()
if args.generate_onnx:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/backend-ncnn.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,10 @@ jobs:
mkdir -p build && pushd build
export LD_LIBRARY_PATH=/home/runner/work/mmdeploy/mmdeploy/ncnn-20220420/install/lib/:$LD_LIBRARY_PATH
cmake -DMMDEPLOY_TARGET_BACKENDS=ncnn -Dncnn_DIR=/home/runner/work/mmdeploy/mmdeploy/ncnn-20220420/install/lib/cmake/ncnn/ ..
make onnx2ncnn -j2
make mmdeploy_onnx2ncnn -j2
popd
- name: Test onnx2ncnn
run: |
echo $(pwd)
ln -s build/bin/onnx2ncnn ./
ln -s build/bin/mmdeploy_onnx2ncnn ./
python3 .github/scripts/test_onnx2ncnn.py --run 1
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ __pycache__/
# C extensions
*.so
onnx2ncnn
mmdeploy_onnx2ncnn

# Java classes
*.class
Expand Down
11 changes: 7 additions & 4 deletions csrc/mmdeploy/backend_ops/ncnn/onnx2ncnn/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,18 @@ project(onnx2ncnn)
find_package(Protobuf)

if (PROTOBUF_FOUND)
if (${Protobuf_PROTOC_EXECUTABLE} STREQUAL "")
message(FATAL_ERROR "protoc not found, try `-DProtobuf_PROTOC_EXECUTABLE=/path/to/protoc`")
endif ()
protobuf_generate_cpp(ONNX_PROTO_SRCS ONNX_PROTO_HDRS
${CMAKE_CURRENT_SOURCE_DIR}/onnx.proto)
add_executable(onnx2ncnn onnx2ncnn.cpp fuse_pass.cpp shape_inference.cpp ${ONNX_PROTO_SRCS} ${ONNX_PROTO_HDRS})
target_include_directories(onnx2ncnn PRIVATE ${PROTOBUF_INCLUDE_DIR}
add_executable(mmdeploy_onnx2ncnn onnx2ncnn.cpp fuse_pass.cpp shape_inference.cpp ${ONNX_PROTO_SRCS} ${ONNX_PROTO_HDRS})
target_include_directories(mmdeploy_onnx2ncnn PRIVATE ${PROTOBUF_INCLUDE_DIR}
${CMAKE_CURRENT_BINARY_DIR})
target_link_libraries(onnx2ncnn PRIVATE ${PROTOBUF_LIBRARIES})
target_link_libraries(mmdeploy_onnx2ncnn PRIVATE ${PROTOBUF_LIBRARIES})

set(_NCNN_CONVERTER_DIR ${CMAKE_SOURCE_DIR}/mmdeploy/backend/ncnn)
install(TARGETS onnx2ncnn DESTINATION ${_NCNN_CONVERTER_DIR})
install(TARGETS mmdeploy_onnx2ncnn DESTINATION ${_NCNN_CONVERTER_DIR})
else ()
message(
FATAL_ERROR "Protobuf not found, onnx model convert tool won't be built")
Expand Down
2 changes: 1 addition & 1 deletion docs/en/06-developer-guide/partition_model.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,4 +86,4 @@ https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-608_273e

After run the script above, we would have the partitioned onnx file `yolov3.onnx` in the `work-dir`. You can use the visualization tool [netron](https://netron.app/) to check the model structure.

With the partitioned onnx file, you could refer to [useful_tools.md](../useful_tools.md) to do the following procedures such as `onnx2ncnn`, `onnx2tensorrt`.
With the partitioned onnx file, you could refer to [useful_tools.md](../useful_tools.md) to do the following procedures such as `mmdeploy_onnx2ncnn`, `onnx2tensorrt`.
2 changes: 1 addition & 1 deletion docs/zh_cn/04-developer-guide/partition_model.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,4 +82,4 @@ https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-608_273e
--work-dir ./work-dirs/mmdet/yolov3/ort/partition
```

当得到分段onnx模型之后,我们可以使用mmdeploy提供的其他工具如`onnx2ncnn`, `onnx2tensorrt`来进行后续的模型部署工作。
当得到分段onnx模型之后,我们可以使用mmdeploy提供的其他工具如`mmdeploy_onnx2ncnn`, `onnx2tensorrt`来进行后续的模型部署工作。
2 changes: 1 addition & 1 deletion docs/zh_cn/04-developer-guide/support_new_backend.md
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ MMDeploy 中的后端必须支持 ONNX,因此后端能直接加载“.onnx”
backend_files = []
for onnx_path in onnx_files:
create_process(
f'onnx2ncnn with {onnx_path}',
f'mmdeploy_onnx2ncnn with {onnx_path}',
target=onnx2ncnn,
args=(onnx_path, args.work_dir),
kwargs=dict(),
Expand Down
19 changes: 15 additions & 4 deletions mmdeploy/backend/ncnn/init_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,24 @@ def get_ops_path() -> str:


def get_onnx2ncnn_path() -> str:
"""Get onnx2ncnn path.
"""Get mmdeploy_onnx2ncnn path.

Returns:
str: A path of onnx2ncnn tool.
str: A path of mmdeploy_onnx2ncnn tool.
"""
candidates = ['./onnx2ncnn', './onnx2ncnn.exe']
return get_file_path(os.path.dirname(__file__), candidates)
candidates = ['./mmdeploy_onnx2ncnn', './mmdeploy_onnx2ncnn.exe']
onnx2ncnn_path = get_file_path(os.path.dirname(__file__), candidates)

if onnx2ncnn_path is None or not os.path.exists(onnx2ncnn_path):
onnx2ncnn_path = get_file_path('', candidates)

if onnx2ncnn_path is None or not os.path.exists(onnx2ncnn_path):
tpoisonooo marked this conversation as resolved.
Show resolved Hide resolved
onnx2ncnn_path = shutil.which('mmdeploy_onnx2ncnn.exe')

if onnx2ncnn_path is None or not os.path.exists(onnx2ncnn_path):
onnx2ncnn_path = shutil.which('mmdeploy_onnx2ncnn')

return onnx2ncnn_path


def get_ncnn2int8_path() -> str:
Expand Down
4 changes: 2 additions & 2 deletions tools/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,8 +221,8 @@ def main():

if not is_available_ncnn():
logger.error('ncnn support is not available, please make sure \
1) `onnx2ncnn` existed in `PATH` 2) python import ncnn success'
)
1) `mmdeploy_onnx2ncnn` existed in `PATH` \
2) python import ncnn success')
exit(1)

import mmdeploy.apis.ncnn as ncnn_api
Expand Down
4 changes: 2 additions & 2 deletions tools/onnx2ncnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ def main():
onnx_path = args.onnx_path
output_prefix = args.output_prefix

logger.info(f'onnx2ncnn: \n\tonnx_path: {onnx_path} ')
logger.info(f'mmdeploy_onnx2ncnn: \n\tonnx_path: {onnx_path} ')
from_onnx(onnx_path, output_prefix)
logger.info('onnx2ncnn success.')
logger.info('mmdeploy_onnx2ncnn success.')


if __name__ == '__main__':
Expand Down
2 changes: 2 additions & 0 deletions tools/package_tools/mmdeploy_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,8 @@ def _remove_in_mmdeploy(path):
# remove onnx2ncnn and ncnn ext
_remove_in_mmdeploy('mmdeploy/backend/ncnn/onnx2ncnn')
_remove_in_mmdeploy('mmdeploy/backend/ncnn/onnx2ncnn.exe')
_remove_in_mmdeploy('mmdeploy/backend/ncnn/mmdeploy_onnx2ncnn')
_remove_in_mmdeploy('mmdeploy/backend/ncnn/mmdeploy_onnx2ncnn.exe')
ncnn_ext_paths = glob(
osp.join(mmdeploy_dir, 'mmdeploy/backend/ncnn/ncnn_ext.*'))
for ncnn_ext_path in ncnn_ext_paths:
Expand Down