From 1684b05b07b104165c2c6916ee45b6289c29740c Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 13 Jul 2022 13:55:44 +0000 Subject: [PATCH 01/51] first commit for yolov7 --- fastdeploy/vision.h | 1 + fastdeploy/vision/wongkinyiu/__init__.py | 116 +++++++++ .../vision/wongkinyiu/wongkinyiu_pybind.cc | 41 ++++ fastdeploy/vision/wongkinyiu/yolov7.cc | 230 ++++++++++++++++++ fastdeploy/vision/wongkinyiu/yolov7.h | 87 +++++++ model_zoo/vision/yolov7/cpp/CMakeLists.txt | 18 ++ model_zoo/vision/yolov7/cpp/README.md | 30 +++ model_zoo/vision/yolov7/cpp/yolov7.cc | 40 +++ model_zoo/vision/yolov7/yolov7.py | 23 ++ 9 files changed, 586 insertions(+) create mode 100644 fastdeploy/vision/wongkinyiu/__init__.py create mode 100644 fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc create mode 100644 fastdeploy/vision/wongkinyiu/yolov7.cc create mode 100644 fastdeploy/vision/wongkinyiu/yolov7.h create mode 100644 model_zoo/vision/yolov7/cpp/CMakeLists.txt create mode 100644 model_zoo/vision/yolov7/cpp/README.md create mode 100644 model_zoo/vision/yolov7/cpp/yolov7.cc create mode 100644 model_zoo/vision/yolov7/yolov7.py diff --git a/fastdeploy/vision.h b/fastdeploy/vision.h index ca2b9a618a..821f3689e5 100644 --- a/fastdeploy/vision.h +++ b/fastdeploy/vision.h @@ -17,6 +17,7 @@ #ifdef ENABLE_VISION #include "fastdeploy/vision/ppcls/model.h" #include "fastdeploy/vision/ultralytics/yolov5.h" +#include "fastdeploy/vision/wongkinyiu/yolov7.h" #endif #include "fastdeploy/vision/visualize/visualize.h" diff --git a/fastdeploy/vision/wongkinyiu/__init__.py b/fastdeploy/vision/wongkinyiu/__init__.py new file mode 100644 index 0000000000..e3ed7730e6 --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/__init__.py @@ -0,0 +1,116 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import logging +from ... import FastDeployModel, Frontend +from ... import fastdeploy_main as C + + +class YOLOv7(FastDeployModel): + def __init__(self, + model_file, + params_file="", + runtime_option=None, + model_format=Frontend.ONNX): + # 调用基函数进行backend_option的初始化 + # 初始化后的option保存在self._runtime_option + super(YOLOv7, self).__init__(runtime_option) + + self._model = C.vision.yongkinyiu.YOLOv7( + model_file, params_file, self._runtime_option, model_format) + # 通过self.initialized判断整个模型的初始化是否成功 + assert self.initialized, "YOLOv7 initialize failed." + + def predict(self, input_image, conf_threshold=0.25, nms_iou_threshold=0.5): + return self._model.predict(input_image, conf_threshold, + nms_iou_threshold) + + # 一些跟YOLOv7模型有关的属性封装 + # 多数是预处理相关,可通过修改如model.size = [1280, 1280]改变预处理时resize的大小(前提是模型支持) + @property + def size(self): + return self.model.size + + @property + def padding_value(self): + return self.model.padding_value + + @property + def is_no_pad(self): + return self.model.is_no_pad + + @property + def is_mini_pad(self): + return self.model.is_mini_pad + + @property + def is_scale_up(self): + return self.model.is_scale_up + + @property + def stride(self): + return self.model.stride + + @property + def max_wh(self): + return self.model.max_wh + + @size.setter + def size(self, wh): + assert isinstance(wh, [list, tuple]),\ + "The value to set `size` must be type of tuple or list." + assert len(wh) == 2,\ + "The value to set `size` must contatins 2 elements means [width, height], but now it contains {} elements.".format( + len(wh)) + self.model.size = wh + + @padding_value.setter + def padding_value(self, value): + assert isinstance( + value, + list), "The value to set `padding_value` must be type of list." + self.model.padding_value = value + + @is_no_pad.setter + def is_no_pad(self, value): + assert isinstance( + value, bool), "The value to set `is_no_pad` must be type of bool." + self.model.is_no_pad = value + + @is_mini_pad.setter + def is_mini_pad(self, value): + assert isinstance( + value, + bool), "The value to set `is_mini_pad` must be type of bool." + self.model.is_mini_pad = value + + @is_scale_up.setter + def is_scale_up(self, value): + assert isinstance( + value, + bool), "The value to set `is_scale_up` must be type of bool." + self.model.is_scale_up = value + + @stride.setter + def stride(self, value): + assert isinstance( + value, int), "The value to set `stride` must be type of int." + self.model.stride = value + + @max_wh.setter + def max_wh(self, value): + assert isinstance( + value, float), "The value to set `max_wh` must be type of float." + self.model.max_wh = value diff --git a/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc new file mode 100644 index 0000000000..99f0aab628 --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc @@ -0,0 +1,41 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/pybind/main.h" + +namespace fastdeploy { +void BindWongkinyiu(pybind11::module& m) { + auto yongkinyiu_module = + m.def_submodule("WongKinYiu", "https://github.com/WongKinYiu/yolov7"); + pybind11::class_( + yongkinyiu_module, "YOLOv7") + .def(pybind11::init()) + .def("predict", + [](vision::yongkinyiu::YOLOv7& self, pybind11::array& data, + float conf_threshold, float nms_iou_threshold) { + auto mat = PyArrayToCvMat(data); + vision::DetectionResult res; + self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); + return res; + }) + .def_readwrite("size", &vision::yongkinyiu::YOLOv7::size) + .def_readwrite("padding_value", + &vision::yongkinyiu::YOLOv7::padding_value) + .def_readwrite("is_mini_pad", &vision::yongkinyiu::YOLOv7::is_mini_pad) + .def_readwrite("is_no_pad", &vision::yongkinyiu::YOLOv7::is_no_pad) + .def_readwrite("is_scale_up", &vision::yongkinyiu::YOLOv7::is_scale_up) + .def_readwrite("stride", &vision::yongkinyiu::YOLOv7::stride) + .def_readwrite("max_wh", &vision::yongkinyiu::YOLOv7::max_wh); +} +} // namespace fastdeploy diff --git a/fastdeploy/vision/wongkinyiu/yolov7.cc b/fastdeploy/vision/wongkinyiu/yolov7.cc new file mode 100644 index 0000000000..09004b5c3c --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/yolov7.cc @@ -0,0 +1,230 @@ +#include "fastdeploy/vision/WongKinYiu/yolov7.h" +#include "fastdeploy/utils/perf.h" +#include "fastdeploy/vision/utils/utils.h" + +namespace fastdeploy { +namespace vision { +namespace wongkinyiu { + +void LetterBox(Mat* mat, std::vector size, std::vector color, + bool _auto, bool scale_fill = false, bool scale_up = true, + int stride = 32) { + float scale = + std::min(size[1] * 1.0 / mat->Height(), size[0] * 1.0 / mat->Width()); + if (!scale_up) { + scale = std::min(scale, 1.0f); + } + + int resize_h = int(round(mat->Height() * scale)); + int resize_w = int(round(mat->Width() * scale)); + + int pad_w = size[0] - resize_w; + int pad_h = size[1] - resize_h; + if (_auto) { + pad_h = pad_h % stride; + pad_w = pad_w % stride; + } else if (scale_fill) { + pad_h = 0; + pad_w = 0; + resize_h = size[1]; + resize_w = size[0]; + } + Resize::Run(mat, resize_w, resize_h); + if (pad_h > 0 || pad_w > 0) { + float half_h = pad_h * 1.0 / 2; + int top = int(round(half_h - 0.1)); + int bottom = int(round(half_h + 0.1)); + float half_w = pad_w * 1.0 / 2; + int left = int(round(half_w - 0.1)); + int right = int(round(half_w + 0.1)); + Pad::Run(mat, top, bottom, left, right, color); + } +} + +YOLOv7::YOLOv7(const std::string& model_file, const std::string& params_file, + const RuntimeOption& custom_option, + const Frontend& model_format) { + if (model_format == Frontend::ONNX) { + valid_cpu_backends = {Backend::ORT}; // 指定可用的CPU后端 + valid_gpu_backends = {Backend::ORT, Backend::TRT}; // 指定可用的GPU后端 + } else { + valid_cpu_backends = {Backend::PDINFER, Backend::ORT}; + valid_gpu_backends = {Backend::PDINFER, Backend::ORT, Backend::TRT}; + } + runtime_option = custom_option; + runtime_option.model_format = model_format; + runtime_option.model_file = model_file; + runtime_option.params_file = params_file; + initialized = Initialize(); +} + +bool YOLOv7::Initialize() { + // parameters for preprocess + size = {640, 640}; + padding_value = {114.0, 114.0, 114.0}; + is_mini_pad = false; + is_no_pad = false; + is_scale_up = false; + stride = 32; + max_wh = 7680.0; + + if (!InitRuntime()) { + FDERROR << "Failed to initialize fastdeploy backend." << std::endl; + return false; + } + return true; +} + +bool YOLOv7::Preprocess(Mat* mat, FDTensor* output, + std::map>* im_info) { + // process after image load + double ratio = (size[0] * 1.0) / std::max(static_cast(mat->Height()), + static_cast(mat->Width())); + if (ratio != 1.0) { + int interp = cv::INTER_AREA; + if (ratio > 1.0) { + interp = cv::INTER_LINEAR; + } + int resize_h = int(mat->Height() * ratio); + int resize_w = int(mat->Width() * ratio); + Resize::Run(mat, resize_w, resize_h, -1, -1, interp); + } + // yolov7's preprocess steps + // 1. letterbox + // 2. BGR->RGB + // 3. HWC->CHW + LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, + stride); + BGR2RGB::Run(mat); + Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + std::vector(mat->Channels(), 1.0)); + + // Record output shape of preprocessed image + (*im_info)["output_shape"] = {static_cast(mat->Height()), + static_cast(mat->Width())}; + + HWC2CHW::Run(mat); + Cast::Run(mat, "float"); + mat->ShareWithTensor(output); + output->shape.insert(output->shape.begin(), 1); // reshape to n, h, w, c + return true; +} + +bool YOLOv7::Postprocess( + FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold) { + FDASSERT(infer_result.shape[0] == 1, "Only support batch =1 now."); + result->Clear(); + result->Reserve(infer_result.shape[1]); + if (infer_result.dtype != FDDataType::FP32) { + FDERROR << "Only support post process with float32 data." << std::endl; + return false; + } + float* data = static_cast(infer_result.Data()); + for (size_t i = 0; i < infer_result.shape[1]; ++i) { + int s = i * infer_result.shape[2]; + float confidence = data[s + 4]; + float* max_class_score = + std::max_element(data + s + 5, data + s + infer_result.shape[2]); + confidence *= (*max_class_score); + // filter boxes by conf_threshold + if (confidence <= conf_threshold) { + continue; + } + int32_t label_id = std::distance(data + s + 5, max_class_score); + // convert from [x, y, w, h] to [x1, y1, x2, y2] + result->boxes.emplace_back(std::array{ + data[s] - data[s + 2] / 2.0f + label_id * max_wh, + data[s + 1] - data[s + 3] / 2.0f + label_id * max_wh, + data[s + 0] + data[s + 2] / 2.0f + label_id * max_wh, + data[s + 1] + data[s + 3] / 2.0f + label_id * max_wh}); + result->label_ids.push_back(label_id); + result->scores.push_back(confidence); + } + utils::NMS(result, nms_iou_threshold); + + // scale the boxes to the origin image shape + auto iter_out = im_info.find("output_shape"); + auto iter_ipt = im_info.find("input_shape"); + FDASSERT(iter_out != im_info.end() && iter_ipt != im_info.end(), + "Cannot find input_shape or output_shape from im_info."); + float out_h = iter_out->second[0]; + float out_w = iter_out->second[1]; + float ipt_h = iter_ipt->second[0]; + float ipt_w = iter_ipt->second[1]; + float scale = std::min(out_h / ipt_h, out_w / ipt_w); + for (size_t i = 0; i < result->boxes.size(); ++i) { + float pad_h = (out_h - ipt_h * scale) / 2; + float pad_w = (out_w - ipt_w * scale) / 2; + int32_t label_id = (result->label_ids)[i]; + // clip box + result->boxes[i][0] = result->boxes[i][0] - max_wh * label_id; + result->boxes[i][1] = result->boxes[i][1] - max_wh * label_id; + result->boxes[i][2] = result->boxes[i][2] - max_wh * label_id; + result->boxes[i][3] = result->boxes[i][3] - max_wh * label_id; + result->boxes[i][0] = std::max((result->boxes[i][0] - pad_w) / scale, 0.0f); + result->boxes[i][1] = std::max((result->boxes[i][1] - pad_h) / scale, 0.0f); + result->boxes[i][2] = std::max((result->boxes[i][2] - pad_w) / scale, 0.0f); + result->boxes[i][3] = std::max((result->boxes[i][3] - pad_h) / scale, 0.0f); + result->boxes[i][0] = std::min(result->boxes[i][0], ipt_w); + result->boxes[i][1] = std::min(result->boxes[i][1], ipt_h); + result->boxes[i][2] = std::min(result->boxes[i][2], ipt_w); + result->boxes[i][3] = std::min(result->boxes[i][3], ipt_h); + } + return true; +} + +bool YOLOv7::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold, + float nms_iou_threshold) { +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_START(0) +#endif + + Mat mat(*im); + std::vector input_tensors(1); + + std::map> im_info; + + // Record the shape of image and the shape of preprocessed image + im_info["input_shape"] = {static_cast(mat.Height()), + static_cast(mat.Width())}; + im_info["output_shape"] = {static_cast(mat.Height()), + static_cast(mat.Width())}; + + if (!Preprocess(&mat, &input_tensors[0], &im_info)) { + FDERROR << "Failed to preprocess input image." << std::endl; + return false; + } + +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(0, "Preprocess") + TIMERECORD_START(1) +#endif + + input_tensors[0].name = InputInfoOfRuntime(0).name; + std::vector output_tensors; + if (!Infer(input_tensors, &output_tensors)) { + FDERROR << "Failed to inference." << std::endl; + return false; + } +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(1, "Inference") + TIMERECORD_START(2) +#endif + + if (!Postprocess(output_tensors[0], result, im_info, conf_threshold, + nms_iou_threshold)) { + FDERROR << "Failed to post process." << std::endl; + return false; + } + +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(2, "Postprocess") +#endif + return true; +} + +} // namespace wongkinyiu +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/wongkinyiu/yolov7.h b/fastdeploy/vision/wongkinyiu/yolov7.h new file mode 100644 index 0000000000..b21c04936a --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/yolov7.h @@ -0,0 +1,87 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once +#include "fastdeploy/fastdeploy_model.h" +#include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" + +namespace fastdeploy { +namespace vision { +namespace wongkinyiu { + +class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { + public: + // 当model_format为ONNX时,无需指定params_file + // 当model_format为Paddle时,则需同时指定model_file & params_file + YOLOv7(const std::string& model_file, const std::string& params_file = "", + const RuntimeOption& custom_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX); + + // 定义模型的名称 + virtual std::string ModelName() const { return "WongKinYiu/yolov7"; } + + // 初始化函数,包括初始化后端,以及其它模型推理需要涉及的操作 + virtual bool Initialize(); + + // 输入图像预处理操作 + // Mat为FastDeploy定义的数据结构 + // FDTensor为预处理后的Tensor数据,传给后端进行推理 + // im_info为预处理过程保存的数据,在后处理中需要用到 + virtual bool Preprocess(Mat* mat, FDTensor* outputs, + std::map>* im_info); + + // 后端推理结果后处理,输出给用户 + // infer_result 为后端推理后的输出Tensor + // result 为模型预测的结果 + // im_info 为预处理记录的信息,后处理用于还原box + // conf_threshold 后处理时过滤box的置信度阈值 + // nms_iou_threshold 后处理时NMS设定的iou阈值 + virtual bool Postprocess( + FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold); + + // 模型预测接口,即用户调用的接口 + // im 为用户的输入数据,目前对于CV均定义为cv::Mat + // result 为模型预测的输出结构体 + // conf_threshold 为后处理的参数 + // nms_iou_threshold 为后处理的参数 + virtual bool Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold = 0.25, + float nms_iou_threshold = 0.5); + + // 以下为模型在预测时的一些参数,基本是前后处理所需 + // 用户在创建模型后,可根据模型的要求,以及自己的需求 + // 对参数进行修改 + // tuple of (width, height) + std::vector size; + // padding value, size should be same with Channels + std::vector padding_value; + // only pad to the minimum rectange which height and width is times of stride + bool is_mini_pad; + // while is_mini_pad = false and is_no_pad = true, will resize the image to + // the set size + bool is_no_pad; + // if is_scale_up is false, the input image only can be zoom out, the maximum + // resize scale cannot exceed 1.0 + bool is_scale_up; + // padding stride, for is_mini_pad + int stride; + // for offseting the boxes by classes when using NMS + float max_wh; +}; +} // namespace wongkinyiu +} // namespace vision +} // namespace fastdeploy diff --git a/model_zoo/vision/yolov7/cpp/CMakeLists.txt b/model_zoo/vision/yolov7/cpp/CMakeLists.txt new file mode 100644 index 0000000000..b3b790698c --- /dev/null +++ b/model_zoo/vision/yolov7/cpp/CMakeLists.txt @@ -0,0 +1,18 @@ +PROJECT(yolov7_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.16) + +# 在低版本ABI环境中,通过如下代码进行兼容性编译 +# add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) + +# 指定下载解压后的fastdeploy库路径 +set(FASTDEPLOY_INSTALL_DIR /home/fastdeploy/FastDeploy/build/fastdeploy-0.0.3/) + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(yolov7_demo ${PROJECT_SOURCE_DIR}/yolov7.cc) +# 添加FastDeploy库依赖 +target_link_libraries(yolov7_demo ${FASTDEPLOY_LIBS}) + diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md new file mode 100644 index 0000000000..dd740ff58a --- /dev/null +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -0,0 +1,30 @@ +# 编译YOLOv5示例 + + +``` +# 下载和解压预测库 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz +tar xvf fastdeploy-linux-x64-0.0.3.tgz + +# 编译示例代码 +mkdir build & cd build +cmake .. +make -j + +# 下载模型和图片 +wget https://github.com/ultralytics/yolov5/releases/download/v6.0/yolov5s.onnx +wget https://raw.githubusercontent.com/ultralytics/yolov5/master/data/images/bus.jpg + +# 执行 +./yolov5_demo +``` + +执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +223.395142,403.948669, 345.337189, 867.339050, 0.856906, 0 +668.301758,400.781342, 808.441772, 882.534973, 0.829716, 0 +50.210720,398.571411, 243.123367, 905.016602, 0.805375, 0 +23.768242,214.979370, 802.627686, 778.840881, 0.756311, 5 +0.737200,552.281006, 78.617218, 890.945007, 0.363471, 0 +``` diff --git a/model_zoo/vision/yolov7/cpp/yolov7.cc b/model_zoo/vision/yolov7/cpp/yolov7.cc new file mode 100644 index 0000000000..4b89972859 --- /dev/null +++ b/model_zoo/vision/yolov7/cpp/yolov7.cc @@ -0,0 +1,40 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + auto model = vis::wongkinyiu::YOLOv7("/home/fastdeploy/yolov7/onnxfiles/yolov7.onnx"); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + cv::Mat im = cv::imread("bus.jpg"); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite("vis_result.jpg", vis_im); + return 0; +} diff --git a/model_zoo/vision/yolov7/yolov7.py b/model_zoo/vision/yolov7/yolov7.py new file mode 100644 index 0000000000..c502c66366 --- /dev/null +++ b/model_zoo/vision/yolov7/yolov7.py @@ -0,0 +1,23 @@ +import fastdeploy as fd +import cv2 + +# 下载模型和测试图片 +model_url = "https://github.com/ultralytics/yolov5/releases/download/v6.0/yolov5s.onnx" +test_jpg_url = "https://raw.githubusercontent.com/ultralytics/yolov5/master/data/images/bus.jpg" +fd.download(model_url, ".", show_progress=True) +fd.download(test_jpg_url, ".", show_progress=True) + +# 加载模型 +model = fd.vision.ultralytics.YOLOv5("yolov5s.onnx") + +# 预测图片 +im = cv2.imread("bus.jpg") +result = model.predict(im, conf_threshold=0.25, nms_iou_threshold=0.5) + +# 可视化结果 +fd.vision.visualize.vis_detection(im, result) +cv2.imwrite("vis_result.jpg", im) + +# 输出预测结果 +print(result) +print(model.runtime_option) From 71c00d94e12c6a52afc1342de893bec2f7850ae2 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 14 Jul 2022 07:02:55 +0000 Subject: [PATCH 02/51] pybind for yolov7 --- fastdeploy/vision/__init__.py | 1 + fastdeploy/vision/vision_pybind.cc | 2 ++ fastdeploy/vision/wongkinyiu/__init__.py | 2 +- .../vision/wongkinyiu/wongkinyiu_pybind.cc | 24 +++++++++---------- model_zoo/vision/yolov7/yolov7.py | 8 +++---- 5 files changed, 20 insertions(+), 17 deletions(-) diff --git a/fastdeploy/vision/__init__.py b/fastdeploy/vision/__init__.py index 810b23cd3d..1ea30c35ae 100644 --- a/fastdeploy/vision/__init__.py +++ b/fastdeploy/vision/__init__.py @@ -17,3 +17,4 @@ from . import ppcls from . import ultralytics from . import visualize +from . import wongkinyiu diff --git a/fastdeploy/vision/vision_pybind.cc b/fastdeploy/vision/vision_pybind.cc index f3c3f0052d..5d79ffb2a6 100644 --- a/fastdeploy/vision/vision_pybind.cc +++ b/fastdeploy/vision/vision_pybind.cc @@ -17,6 +17,7 @@ namespace fastdeploy { void BindPpClsModel(pybind11::module& m); +void BindWongkinyiu(pybind11::module& m); void BindUltralytics(pybind11::module& m); #ifdef ENABLE_VISION_VISUALIZE void BindVisualize(pybind11::module& m); @@ -40,6 +41,7 @@ void BindVision(pybind11::module& m) { BindPpClsModel(m); BindUltralytics(m); + BindWongkinyiu(m); BindVisualize(m); } } // namespace fastdeploy diff --git a/fastdeploy/vision/wongkinyiu/__init__.py b/fastdeploy/vision/wongkinyiu/__init__.py index e3ed7730e6..0ce06209fc 100644 --- a/fastdeploy/vision/wongkinyiu/__init__.py +++ b/fastdeploy/vision/wongkinyiu/__init__.py @@ -28,7 +28,7 @@ def __init__(self, # 初始化后的option保存在self._runtime_option super(YOLOv7, self).__init__(runtime_option) - self._model = C.vision.yongkinyiu.YOLOv7( + self._model = C.vision.wongkinyiu.YOLOv7( model_file, params_file, self._runtime_option, model_format) # 通过self.initialized判断整个模型的初始化是否成功 assert self.initialized, "YOLOv7 initialize failed." diff --git a/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc index 99f0aab628..4a10f47a76 100644 --- a/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc +++ b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc @@ -16,26 +16,26 @@ namespace fastdeploy { void BindWongkinyiu(pybind11::module& m) { - auto yongkinyiu_module = - m.def_submodule("WongKinYiu", "https://github.com/WongKinYiu/yolov7"); - pybind11::class_( - yongkinyiu_module, "YOLOv7") + auto wongkinyiu_module = + m.def_submodule("wongkinyiu", "https://github.com/WongKinYiu/yolov7"); + pybind11::class_( + wongkinyiu_module, "YOLOv7") .def(pybind11::init()) .def("predict", - [](vision::yongkinyiu::YOLOv7& self, pybind11::array& data, + [](vision::wongkinyiu::YOLOv7& self, pybind11::array& data, float conf_threshold, float nms_iou_threshold) { auto mat = PyArrayToCvMat(data); vision::DetectionResult res; self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); return res; }) - .def_readwrite("size", &vision::yongkinyiu::YOLOv7::size) + .def_readwrite("size", &vision::wongkinyiu::YOLOv7::size) .def_readwrite("padding_value", - &vision::yongkinyiu::YOLOv7::padding_value) - .def_readwrite("is_mini_pad", &vision::yongkinyiu::YOLOv7::is_mini_pad) - .def_readwrite("is_no_pad", &vision::yongkinyiu::YOLOv7::is_no_pad) - .def_readwrite("is_scale_up", &vision::yongkinyiu::YOLOv7::is_scale_up) - .def_readwrite("stride", &vision::yongkinyiu::YOLOv7::stride) - .def_readwrite("max_wh", &vision::yongkinyiu::YOLOv7::max_wh); + &vision::wongkinyiu::YOLOv7::padding_value) + .def_readwrite("is_mini_pad", &vision::wongkinyiu::YOLOv7::is_mini_pad) + .def_readwrite("is_no_pad", &vision::wongkinyiu::YOLOv7::is_no_pad) + .def_readwrite("is_scale_up", &vision::wongkinyiu::YOLOv7::is_scale_up) + .def_readwrite("stride", &vision::wongkinyiu::YOLOv7::stride) + .def_readwrite("max_wh", &vision::wongkinyiu::YOLOv7::max_wh); } } // namespace fastdeploy diff --git a/model_zoo/vision/yolov7/yolov7.py b/model_zoo/vision/yolov7/yolov7.py index c502c66366..81c529b15b 100644 --- a/model_zoo/vision/yolov7/yolov7.py +++ b/model_zoo/vision/yolov7/yolov7.py @@ -2,16 +2,16 @@ import cv2 # 下载模型和测试图片 -model_url = "https://github.com/ultralytics/yolov5/releases/download/v6.0/yolov5s.onnx" -test_jpg_url = "https://raw.githubusercontent.com/ultralytics/yolov5/master/data/images/bus.jpg" +model_url = "TODO " +test_jpg_url = "https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg" fd.download(model_url, ".", show_progress=True) fd.download(test_jpg_url, ".", show_progress=True) # 加载模型 -model = fd.vision.ultralytics.YOLOv5("yolov5s.onnx") +model = fd.vision.wongkinyiu.YOLOv7("yolov7.onnx") # 预测图片 -im = cv2.imread("bus.jpg") +im = cv2.imread("horses.jpg") result = model.predict(im, conf_threshold=0.25, nms_iou_threshold=0.5) # 可视化结果 From 21ab2f939c8e1469f320826808c5d430234e25fd Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 14 Jul 2022 07:14:03 +0000 Subject: [PATCH 03/51] CPP README.md --- model_zoo/vision/yolov7/cpp/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index dd740ff58a..f19c0625d1 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -12,11 +12,11 @@ cmake .. make -j # 下载模型和图片 -wget https://github.com/ultralytics/yolov5/releases/download/v6.0/yolov5s.onnx -wget https://raw.githubusercontent.com/ultralytics/yolov5/master/data/images/bus.jpg +wget "TODO" +wget https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg # 执行 -./yolov5_demo +./yolov7_demo ``` 执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 From d63e862f919d0ce9025f78271a03e9a122d2ccdd Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 14 Jul 2022 07:14:30 +0000 Subject: [PATCH 04/51] CPP README.md --- model_zoo/vision/yolov7/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index f19c0625d1..b43d4381e5 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -1,4 +1,4 @@ -# 编译YOLOv5示例 +# 编译YOLOv7示例 ``` From 7b3b0e271072987f11fb8ffabdc8d276cf878fa0 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 14 Jul 2022 09:54:30 +0000 Subject: [PATCH 05/51] modified yolov7.cc --- fastdeploy/vision/wongkinyiu/yolov7.cc | 2 +- model_zoo/vision/yolov7/cpp/CMakeLists.txt | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.cc b/fastdeploy/vision/wongkinyiu/yolov7.cc index 09004b5c3c..6baf4c336b 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.cc +++ b/fastdeploy/vision/wongkinyiu/yolov7.cc @@ -1,4 +1,4 @@ -#include "fastdeploy/vision/WongKinYiu/yolov7.h" +#include "fastdeploy/vision/wongkinyiu/yolov7.h" #include "fastdeploy/utils/perf.h" #include "fastdeploy/vision/utils/utils.h" diff --git a/model_zoo/vision/yolov7/cpp/CMakeLists.txt b/model_zoo/vision/yolov7/cpp/CMakeLists.txt index b3b790698c..09f07b1748 100644 --- a/model_zoo/vision/yolov7/cpp/CMakeLists.txt +++ b/model_zoo/vision/yolov7/cpp/CMakeLists.txt @@ -5,7 +5,7 @@ CMAKE_MINIMUM_REQUIRED (VERSION 3.16) # add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) # 指定下载解压后的fastdeploy库路径 -set(FASTDEPLOY_INSTALL_DIR /home/fastdeploy/FastDeploy/build/fastdeploy-0.0.3/) +set(FASTDEPLOY_INSTALL_DIR /home/fastdeploy/FastDeploy/build/fastdeploy-0.0.3) include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) @@ -14,5 +14,4 @@ include_directories(${FASTDEPLOY_INCS}) add_executable(yolov7_demo ${PROJECT_SOURCE_DIR}/yolov7.cc) # 添加FastDeploy库依赖 -target_link_libraries(yolov7_demo ${FASTDEPLOY_LIBS}) - +target_link_libraries(yolov7_demo ${FASTDEPLOY_LIBS}) \ No newline at end of file From d039e800190e484c583509c3b0e97eb2222f32e9 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 15 Jul 2022 05:11:01 +0000 Subject: [PATCH 06/51] README.md --- model_zoo/vision/yolov7/README.md | 80 +++++++++++++++++++++++++++++++ model_zoo/vision/yolov7/api.md | 71 +++++++++++++++++++++++++++ 2 files changed, 151 insertions(+) create mode 100644 model_zoo/vision/yolov7/README.md create mode 100644 model_zoo/vision/yolov7/api.md diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md new file mode 100644 index 0000000000..80f9aa0fac --- /dev/null +++ b/model_zoo/vision/yolov7/README.md @@ -0,0 +1,80 @@ +# 编译YOLOv7示例 + +本文档说明如何进行[YOLOv7](https://github.com/WongKinYiu/yolov7)的快速部署推理。本目录结构如下 + +``` +. +├── cpp +│   ├── CMakeLists.txt +│   ├── README.md +│   └── yolov7.cc +├── README.md +└── yolov7.py +``` + +## 生成ONNX文件 + +- 手动获取 + + 访问[YOLOv7](https://github.com/WongKinYiu/yolov7)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + + + + ``` + #下载yolov7模型文件 + wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt + + # 导出onnx格式文件 + python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + ``` + + + +- 从PaddlePaddle获取 + +## Python部署 + +### 安装FastDeploy + +使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` + +``` +# 安装fastdeploy-python工具 +pip install fastdeploy-python + +# 安装vision-cpu模块 +fastdeploy install vision-cpu +``` + +### 运行demo + +``` +python yolov7.py +``` + + + +## C++部署 + +### 编译demo文件 + +``` +# 切换到./cpp/ 目录下 +cd cpp/ + +# 下载和解压预测库 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz +tar xvf fastdeploy-linux-x64-0.0.3.tgz + +# 编译示例代码 +mkdir build & cd build +cmake .. +make -j +``` + + + + + + + diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md new file mode 100644 index 0000000000..898a3f585f --- /dev/null +++ b/model_zoo/vision/yolov7/api.md @@ -0,0 +1,71 @@ +# YOLOv7 API说明 + +## Python API + +### YOLOv7类 +``` +fastdeploy.vision.ultralytics.YOLOv7(model_file, params_file=None, runtime_option=None, model_format=fd.Frontend.ONNX) +``` +YOLOv7模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只需提供model_file,如`yolov7s.onnx`;当model_format为`fd.Frontend.PADDLE`时,则需同时提供model_file和params_file。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### predict函数 +> ``` +> YOLOv7.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,RGB格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +示例代码参考[yolov7.py](./yolov7.py) + + +## C++ API + +### YOLOv7类 +``` +fastdeploy::vision::ultralytics::YOLOv7( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` +YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需提供model_file,如`yolov7s.onnx`;当model_format为`Frontend::PADDLE`时,则需同时提供model_file和params_file。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### predict函数 +> ``` +> YOLOv7::predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,RGB格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度 +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +示例代码参考[cpp/yolov7.cc](cpp/yolov7.cc) + +## 其它API使用 + +- [模型部署RuntimeOption配置](../../../docs/api/runtime_option.md) From a34a815de844834bfcacc8154ab206587b9a7b0b Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 03:14:38 +0000 Subject: [PATCH 07/51] python file modify --- fastdeploy/LICENSE | 201 +++++++ fastdeploy/ThirdPartyNotices.txt | 734 +++++++++++++++++++++++ fastdeploy/vision/wongkinyiu/__init__.py | 28 +- model_zoo/vision/yolov7/yolov7.py | 8 +- 4 files changed, 953 insertions(+), 18 deletions(-) create mode 100644 fastdeploy/LICENSE create mode 100644 fastdeploy/ThirdPartyNotices.txt diff --git a/fastdeploy/LICENSE b/fastdeploy/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/fastdeploy/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/fastdeploy/ThirdPartyNotices.txt b/fastdeploy/ThirdPartyNotices.txt new file mode 100644 index 0000000000..5842b9a717 --- /dev/null +++ b/fastdeploy/ThirdPartyNotices.txt @@ -0,0 +1,734 @@ +This project depends on some open source projects, list as below + +-------- +1. https://github.com/protocolbuffers/protobuf + +Copyright 2008 Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Code generated by the Protocol Buffer compiler is owned by the owner +of the input file used when generating it. This code is not +standalone and requires a support library to be linked with it. This +support library is itself covered by the above license. + +-------- +2. https://github.com/onnx/onnx + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +-------- +3. https://github.com/microsoft/onnxruntime + +MIT License + +Copyright (c) Microsoft Corporation + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------- +4. https://github.com/pybind/pybind11 + +Copyright (c) 2016 Wenzel Jakob , All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Please also refer to the file .github/CONTRIBUTING.md, which clarifies licensing of +external contributions to this project including patches, pull requests, etc. + +-------- +4. https://github.com/onnx/onnx-tensorrt + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 NVIDIA Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +-------- +5. https://github.com/opencv/opencv + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +-------- +6. https://github.com/jbeder/yaml-cpp + +Copyright (c) 2008-2015 Jesse Beder. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/fastdeploy/vision/wongkinyiu/__init__.py b/fastdeploy/vision/wongkinyiu/__init__.py index 0ce06209fc..542389e208 100644 --- a/fastdeploy/vision/wongkinyiu/__init__.py +++ b/fastdeploy/vision/wongkinyiu/__init__.py @@ -41,31 +41,31 @@ def predict(self, input_image, conf_threshold=0.25, nms_iou_threshold=0.5): # 多数是预处理相关,可通过修改如model.size = [1280, 1280]改变预处理时resize的大小(前提是模型支持) @property def size(self): - return self.model.size + return self._model.size @property def padding_value(self): - return self.model.padding_value + return self._model.padding_value @property def is_no_pad(self): - return self.model.is_no_pad + return self._model.is_no_pad @property def is_mini_pad(self): - return self.model.is_mini_pad + return self._model.is_mini_pad @property def is_scale_up(self): - return self.model.is_scale_up + return self._model.is_scale_up @property def stride(self): - return self.model.stride + return self._model.stride @property def max_wh(self): - return self.model.max_wh + return self._model.max_wh @size.setter def size(self, wh): @@ -74,43 +74,43 @@ def size(self, wh): assert len(wh) == 2,\ "The value to set `size` must contatins 2 elements means [width, height], but now it contains {} elements.".format( len(wh)) - self.model.size = wh + self._model.size = wh @padding_value.setter def padding_value(self, value): assert isinstance( value, list), "The value to set `padding_value` must be type of list." - self.model.padding_value = value + self._model.padding_value = value @is_no_pad.setter def is_no_pad(self, value): assert isinstance( value, bool), "The value to set `is_no_pad` must be type of bool." - self.model.is_no_pad = value + self._model.is_no_pad = value @is_mini_pad.setter def is_mini_pad(self, value): assert isinstance( value, bool), "The value to set `is_mini_pad` must be type of bool." - self.model.is_mini_pad = value + self._model.is_mini_pad = value @is_scale_up.setter def is_scale_up(self, value): assert isinstance( value, bool), "The value to set `is_scale_up` must be type of bool." - self.model.is_scale_up = value + self._model.is_scale_up = value @stride.setter def stride(self, value): assert isinstance( value, int), "The value to set `stride` must be type of int." - self.model.stride = value + self._model.stride = value @max_wh.setter def max_wh(self, value): assert isinstance( value, float), "The value to set `max_wh` must be type of float." - self.model.max_wh = value + self._model.max_wh = value diff --git a/model_zoo/vision/yolov7/yolov7.py b/model_zoo/vision/yolov7/yolov7.py index 81c529b15b..ca8aeeaf88 100644 --- a/model_zoo/vision/yolov7/yolov7.py +++ b/model_zoo/vision/yolov7/yolov7.py @@ -2,13 +2,13 @@ import cv2 # 下载模型和测试图片 -model_url = "TODO " -test_jpg_url = "https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg" -fd.download(model_url, ".", show_progress=True) +# model_url = "TODO " +test_jpg_url = "https://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg" +# fd.download(model_url, ".", show_progress=True) fd.download(test_jpg_url, ".", show_progress=True) # 加载模型 -model = fd.vision.wongkinyiu.YOLOv7("yolov7.onnx") +model = fd.vision.wongkinyiu.YOLOv7("/home/fastdeploy/yolov7/onnxfiles/yolov7.onnx") # 预测图片 im = cv2.imread("horses.jpg") From 39f64f2f5c0c0c479fa7219b1b436f61d625a61f Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:03:08 +0000 Subject: [PATCH 08/51] delete license in fastdeploy/ --- fastdeploy/LICENSE | 201 --------- fastdeploy/ThirdPartyNotices.txt | 734 ------------------------------- 2 files changed, 935 deletions(-) delete mode 100644 fastdeploy/LICENSE delete mode 100644 fastdeploy/ThirdPartyNotices.txt diff --git a/fastdeploy/LICENSE b/fastdeploy/LICENSE deleted file mode 100644 index 261eeb9e9f..0000000000 --- a/fastdeploy/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/fastdeploy/ThirdPartyNotices.txt b/fastdeploy/ThirdPartyNotices.txt deleted file mode 100644 index 5842b9a717..0000000000 --- a/fastdeploy/ThirdPartyNotices.txt +++ /dev/null @@ -1,734 +0,0 @@ -This project depends on some open source projects, list as below - --------- -1. https://github.com/protocolbuffers/protobuf - -Copyright 2008 Google Inc. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Code generated by the Protocol Buffer compiler is owned by the owner -of the input file used when generating it. This code is not -standalone and requires a support library to be linked with it. This -support library is itself covered by the above license. - --------- -2. https://github.com/onnx/onnx - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --------- -3. https://github.com/microsoft/onnxruntime - -MIT License - -Copyright (c) Microsoft Corporation - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - --------- -4. https://github.com/pybind/pybind11 - -Copyright (c) 2016 Wenzel Jakob , All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Please also refer to the file .github/CONTRIBUTING.md, which clarifies licensing of -external contributions to this project including patches, pull requests, etc. - --------- -4. https://github.com/onnx/onnx-tensorrt - - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2021 NVIDIA Corporation - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --------- -5. https://github.com/opencv/opencv - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --------- -6. https://github.com/jbeder/yaml-cpp - -Copyright (c) 2008-2015 Jesse Beder. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. From d071b3702c39386dc3cc9a19af7e0ee56b36cdca Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:15:17 +0000 Subject: [PATCH 09/51] repush the conflict part --- fastdeploy/vision.h | 3 --- fastdeploy/vision/vision_pybind.cc | 3 --- 2 files changed, 6 deletions(-) diff --git a/fastdeploy/vision.h b/fastdeploy/vision.h index 4e83d2681c..5f948092d7 100644 --- a/fastdeploy/vision.h +++ b/fastdeploy/vision.h @@ -17,11 +17,8 @@ #ifdef ENABLE_VISION #include "fastdeploy/vision/ppcls/model.h" #include "fastdeploy/vision/ultralytics/yolov5.h" -<<<<<<< HEAD #include "fastdeploy/vision/wongkinyiu/yolov7.h" -======= #include "fastdeploy/vision/meituan/yolov6.h" ->>>>>>> PaddlePaddle-develop #endif #include "fastdeploy/vision/visualize/visualize.h" diff --git a/fastdeploy/vision/vision_pybind.cc b/fastdeploy/vision/vision_pybind.cc index 256fb1e114..bc54e0d674 100644 --- a/fastdeploy/vision/vision_pybind.cc +++ b/fastdeploy/vision/vision_pybind.cc @@ -42,12 +42,9 @@ void BindVision(pybind11::module& m) { BindPpClsModel(m); BindUltralytics(m); -<<<<<<< HEAD BindWongkinyiu(m); -======= BindMeituan(m); #ifdef ENABLE_VISION_VISUALIZE ->>>>>>> PaddlePaddle-develop BindVisualize(m); #endif } From d5026ca1e47612b7ab85fb27a2730ea350dfc211 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:44:54 +0000 Subject: [PATCH 10/51] README.md modified --- model_zoo/vision/yolov7/README.md | 36 +++++++++------------- model_zoo/vision/yolov7/api.md | 8 ++--- model_zoo/vision/yolov7/cpp/CMakeLists.txt | 2 +- model_zoo/vision/yolov7/cpp/README.md | 3 +- model_zoo/vision/yolov7/cpp/yolov7.cc | 2 +- model_zoo/vision/yolov7/yolov7.py | 4 +-- 6 files changed, 24 insertions(+), 31 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 80f9aa0fac..93a6f81188 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -32,9 +32,9 @@ - 从PaddlePaddle获取 -## Python部署 -### 安装FastDeploy + +## 安装FastDeploy 使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` @@ -45,33 +45,27 @@ pip install fastdeploy-python # 安装vision-cpu模块 fastdeploy install vision-cpu ``` +## Python部署 -### 运行demo - +执行如下代码即会自动下载测试图片 ``` python yolov7.py ``` - - -## C++部署 - -### 编译demo文件 - +执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 ``` -# 切换到./cpp/ 目录下 -cd cpp/ - -# 下载和解压预测库 -wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz -tar xvf fastdeploy-linux-x64-0.0.3.tgz - -# 编译示例代码 -mkdir build & cd build -cmake .. -make -j +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +223.395142,403.948669, 345.337189, 867.339050, 0.856906, 0 +668.301758,400.781342, 808.441772, 882.534973, 0.829716, 0 +50.210720,398.571411, 243.123367, 905.016602, 0.805375, 0 +23.768242,214.979370, 802.627686, 778.840881, 0.756311, 5 +0.737200,552.281006, 78.617218, 890.945007, 0.363471, 0 ``` +## 其它文档 + +- [C++部署](./cpp/README.md) +- [YOLOv7 API文档](./api.md) diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index 898a3f585f..7c5fc30163 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -4,9 +4,9 @@ ### YOLOv7类 ``` -fastdeploy.vision.ultralytics.YOLOv7(model_file, params_file=None, runtime_option=None, model_format=fd.Frontend.ONNX) +fastdeploy.vision.wongkinyiu.YOLOv7(model_file, params_file=None, runtime_option=None, model_format=fd.Frontend.ONNX) ``` -YOLOv7模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只需提供model_file,如`yolov7s.onnx`;当model_format为`fd.Frontend.PADDLE`时,则需同时提供model_file和params_file。 +YOLOv7模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只需提供model_file,如`yolov7.onnx`;当model_format为`fd.Frontend.PADDLE`时,则需同时提供model_file和params_file。 **参数** @@ -34,13 +34,13 @@ YOLOv7模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只 ### YOLOv7类 ``` -fastdeploy::vision::ultralytics::YOLOv7( +fastdeploy::vision::wongkinyiu::YOLOv7( const string& model_file, const string& params_file = "", const RuntimeOption& runtime_option = RuntimeOption(), const Frontend& model_format = Frontend::ONNX) ``` -YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需提供model_file,如`yolov7s.onnx`;当model_format为`Frontend::PADDLE`时,则需同时提供model_file和params_file。 +YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需提供model_file,如`yolov7.onnx`;当model_format为`Frontend::PADDLE`时,则需同时提供model_file和params_file。 **参数** diff --git a/model_zoo/vision/yolov7/cpp/CMakeLists.txt b/model_zoo/vision/yolov7/cpp/CMakeLists.txt index 09f07b1748..ec7c86d026 100644 --- a/model_zoo/vision/yolov7/cpp/CMakeLists.txt +++ b/model_zoo/vision/yolov7/cpp/CMakeLists.txt @@ -5,7 +5,7 @@ CMAKE_MINIMUM_REQUIRED (VERSION 3.16) # add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) # 指定下载解压后的fastdeploy库路径 -set(FASTDEPLOY_INSTALL_DIR /home/fastdeploy/FastDeploy/build/fastdeploy-0.0.3) +set(FASTDEPLOY_INSTALL_DIR ${PROJECT_SOURCE_DIR}/fastdeploy-linux-x64-0.3.0/) include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index b43d4381e5..fd46e210f8 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -26,5 +26,6 @@ DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] 668.301758,400.781342, 808.441772, 882.534973, 0.829716, 0 50.210720,398.571411, 243.123367, 905.016602, 0.805375, 0 23.768242,214.979370, 802.627686, 778.840881, 0.756311, 5 -0.737200,552.281006, 78.617218, 890.945007, 0.363471, 0 +0.737200,552.281006, 78.617218, 890.945007, 0.36341 ``` + diff --git a/model_zoo/vision/yolov7/cpp/yolov7.cc b/model_zoo/vision/yolov7/cpp/yolov7.cc index 4b89972859..1607b2be09 100644 --- a/model_zoo/vision/yolov7/cpp/yolov7.cc +++ b/model_zoo/vision/yolov7/cpp/yolov7.cc @@ -16,7 +16,7 @@ int main() { namespace vis = fastdeploy::vision; - auto model = vis::wongkinyiu::YOLOv7("/home/fastdeploy/yolov7/onnxfiles/yolov7.onnx"); + auto model = vis::wongkinyiu::YOLOv7("yolov7.onnx"); if (!model.Initialized()) { std::cerr << "Init Failed." << std::endl; return -1; diff --git a/model_zoo/vision/yolov7/yolov7.py b/model_zoo/vision/yolov7/yolov7.py index ca8aeeaf88..cef467622d 100644 --- a/model_zoo/vision/yolov7/yolov7.py +++ b/model_zoo/vision/yolov7/yolov7.py @@ -2,13 +2,11 @@ import cv2 # 下载模型和测试图片 -# model_url = "TODO " test_jpg_url = "https://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg" -# fd.download(model_url, ".", show_progress=True) fd.download(test_jpg_url, ".", show_progress=True) # 加载模型 -model = fd.vision.wongkinyiu.YOLOv7("/home/fastdeploy/yolov7/onnxfiles/yolov7.onnx") +model = fd.vision.wongkinyiu.YOLOv7("yolov7.onnx") # 预测图片 im = cv2.imread("horses.jpg") From fb376adf9616b9b3aa3d515c739655567161722b Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:46:19 +0000 Subject: [PATCH 11/51] README.md modified --- model_zoo/vision/yolov7/README.md | 10 +++++----- model_zoo/vision/yolov7/cpp/README.md | 10 +++++----- model_zoo/vision/yolov7/cpp/yolov7.cc | 2 +- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 93a6f81188..77e7a654d1 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -55,11 +55,11 @@ python yolov7.py 执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 ``` DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] -223.395142,403.948669, 345.337189, 867.339050, 0.856906, 0 -668.301758,400.781342, 808.441772, 882.534973, 0.829716, 0 -50.210720,398.571411, 243.123367, 905.016602, 0.805375, 0 -23.768242,214.979370, 802.627686, 778.840881, 0.756311, 5 -0.737200,552.281006, 78.617218, 890.945007, 0.363471, 0 +0.056616,191.221619, 314.871063, 409.948914, 0.955449, 17 +432.547852,211.914841, 594.904297, 346.708618, 0.942706, 17 +0.000000,185.456207, 153.967789, 286.157562, 0.860487, 17 +224.049210,195.147003, 419.658234, 364.004852, 0.798262, 17 +369.316986,209.055725, 456.373840, 321.627625, 0.687066, 17 ``` ## 其它文档 diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index fd46e210f8..012d4c765b 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -22,10 +22,10 @@ wget https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg 执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 ``` DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] -223.395142,403.948669, 345.337189, 867.339050, 0.856906, 0 -668.301758,400.781342, 808.441772, 882.534973, 0.829716, 0 -50.210720,398.571411, 243.123367, 905.016602, 0.805375, 0 -23.768242,214.979370, 802.627686, 778.840881, 0.756311, 5 -0.737200,552.281006, 78.617218, 890.945007, 0.36341 +0.056616,191.221619, 314.871063, 409.948914, 0.955449, 17 +432.547852,211.914841, 594.904297, 346.708618, 0.942706, 17 +0.000000,185.456207, 153.967789, 286.157562, 0.860487, 17 +224.049210,195.147003, 419.658234, 364.004852, 0.798262, 17 +369.316986,209.055725, 456.373840, 321.627625, 0.687066, 17 ``` diff --git a/model_zoo/vision/yolov7/cpp/yolov7.cc b/model_zoo/vision/yolov7/cpp/yolov7.cc index 1607b2be09..8b41c0288b 100644 --- a/model_zoo/vision/yolov7/cpp/yolov7.cc +++ b/model_zoo/vision/yolov7/cpp/yolov7.cc @@ -21,7 +21,7 @@ int main() { std::cerr << "Init Failed." << std::endl; return -1; } - cv::Mat im = cv::imread("bus.jpg"); + cv::Mat im = cv::imread("horses.jpg"); cv::Mat vis_im = im.clone(); vis::DetectionResult res; From 4b8737c9c0577c1a6ba0132ad76b6e72aa9e8e20 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:54:11 +0000 Subject: [PATCH 12/51] file path modified --- model_zoo/vision/yolov7/README.md | 3 +++ model_zoo/vision/yolov7/cpp/yolov7.cc | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 77e7a654d1..70841fa61e 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -26,6 +26,9 @@ # 导出onnx格式文件 python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + + # 移动onnx文件到demo目录 + cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/ ``` diff --git a/model_zoo/vision/yolov7/cpp/yolov7.cc b/model_zoo/vision/yolov7/cpp/yolov7.cc index 8b41c0288b..6d2a80a85c 100644 --- a/model_zoo/vision/yolov7/cpp/yolov7.cc +++ b/model_zoo/vision/yolov7/cpp/yolov7.cc @@ -16,12 +16,12 @@ int main() { namespace vis = fastdeploy::vision; - auto model = vis::wongkinyiu::YOLOv7("yolov7.onnx"); + auto model = vis::wongkinyiu::YOLOv7("../yolov7.onnx"); if (!model.Initialized()) { std::cerr << "Init Failed." << std::endl; return -1; } - cv::Mat im = cv::imread("horses.jpg"); + cv::Mat im = cv::imread("../horses.jpg"); cv::Mat vis_im = im.clone(); vis::DetectionResult res; From ce922a0326c8dc14964476be7501a896d9e39302 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:57:49 +0000 Subject: [PATCH 13/51] file path modified --- model_zoo/vision/yolov7/cpp/README.md | 27 +++++++++++++++++++++++++-- model_zoo/vision/yolov7/cpp/yolov7.cc | 4 ++-- 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 012d4c765b..bef869f881 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -1,5 +1,29 @@ # 编译YOLOv7示例 +## 生成ONNX文件 + +- 手动获取 + + 访问[YOLOv7](https://github.com/WongKinYiu/yolov7)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + + + + ``` + #下载yolov7模型文件 + wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt + + # 导出onnx格式文件 + python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + + # 移动onnx文件到demo目录 + cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/ + ``` + + + +- 从PaddlePaddle获取 + + ``` # 下载和解压预测库 @@ -11,8 +35,7 @@ mkdir build & cd build cmake .. make -j -# 下载模型和图片 -wget "TODO" +# 下载图片 wget https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg # 执行 diff --git a/model_zoo/vision/yolov7/cpp/yolov7.cc b/model_zoo/vision/yolov7/cpp/yolov7.cc index 6d2a80a85c..8b41c0288b 100644 --- a/model_zoo/vision/yolov7/cpp/yolov7.cc +++ b/model_zoo/vision/yolov7/cpp/yolov7.cc @@ -16,12 +16,12 @@ int main() { namespace vis = fastdeploy::vision; - auto model = vis::wongkinyiu::YOLOv7("../yolov7.onnx"); + auto model = vis::wongkinyiu::YOLOv7("yolov7.onnx"); if (!model.Initialized()) { std::cerr << "Init Failed." << std::endl; return -1; } - cv::Mat im = cv::imread("../horses.jpg"); + cv::Mat im = cv::imread("horses.jpg"); cv::Mat vis_im = im.clone(); vis::DetectionResult res; From 6e00b82b40e3e8d19944408379ed11fb77a90073 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:59:58 +0000 Subject: [PATCH 14/51] file path modified --- model_zoo/vision/yolov7/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index bef869f881..1b577a7a3a 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -23,7 +23,7 @@ - 从PaddlePaddle获取 - +## 运行demo ``` # 下载和解压预测库 From 8c359fb9defa42ccd404890d26bc55b8f063c176 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 07:02:31 +0000 Subject: [PATCH 15/51] file path modified --- model_zoo/vision/yolov7/cpp/README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 1b577a7a3a..918625eea7 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -15,8 +15,6 @@ # 导出onnx格式文件 python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt - # 移动onnx文件到demo目录 - cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/ ``` @@ -35,6 +33,9 @@ mkdir build & cd build cmake .. make -j +# 移动onnx文件到demo目录 +cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/cpp/build/ + # 下载图片 wget https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg From 906c730255d7e4e1198784f45918984dcfe9820f Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 07:03:37 +0000 Subject: [PATCH 16/51] file path modified --- model_zoo/vision/yolov7/README.md | 2 +- model_zoo/vision/yolov7/cpp/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 70841fa61e..7246a4a7b7 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -12,7 +12,7 @@ └── yolov7.py ``` -## 生成ONNX文件 +## 获取ONNX文件 - 手动获取 diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 918625eea7..ce6337962d 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -1,6 +1,6 @@ # 编译YOLOv7示例 -## 生成ONNX文件 +## 获取ONNX文件 - 手动获取 From 80c12230f5447966d363f34f57a15abeda1951ae Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 07:36:14 +0000 Subject: [PATCH 17/51] README modified --- model_zoo/vision/yolov7/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index ce6337962d..0fcaf8ae11 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -37,7 +37,7 @@ make -j cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/cpp/build/ # 下载图片 -wget https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg +wget hhttps://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg # 执行 ./yolov7_demo From 6072757fe8af3a7f2a666b638a379865d26e9e59 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 07:36:46 +0000 Subject: [PATCH 18/51] README modified --- model_zoo/vision/yolov7/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 0fcaf8ae11..a1d146053a 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -37,7 +37,7 @@ make -j cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/cpp/build/ # 下载图片 -wget hhttps://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg +wget https://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg # 执行 ./yolov7_demo From 2c6e6a4836b6c20c4a3ebc562d9cf3722c414423 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 08:25:58 +0000 Subject: [PATCH 19/51] move some helpers to private --- fastdeploy/vision/wongkinyiu/yolov7.h | 43 ++++++++++++++------------- model_zoo/vision/yolov7/api.md | 2 +- 2 files changed, 23 insertions(+), 22 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.h b/fastdeploy/vision/wongkinyiu/yolov7.h index b21c04936a..29dffaf2f4 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.h +++ b/fastdeploy/vision/wongkinyiu/yolov7.h @@ -32,27 +32,6 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { // 定义模型的名称 virtual std::string ModelName() const { return "WongKinYiu/yolov7"; } - // 初始化函数,包括初始化后端,以及其它模型推理需要涉及的操作 - virtual bool Initialize(); - - // 输入图像预处理操作 - // Mat为FastDeploy定义的数据结构 - // FDTensor为预处理后的Tensor数据,传给后端进行推理 - // im_info为预处理过程保存的数据,在后处理中需要用到 - virtual bool Preprocess(Mat* mat, FDTensor* outputs, - std::map>* im_info); - - // 后端推理结果后处理,输出给用户 - // infer_result 为后端推理后的输出Tensor - // result 为模型预测的结果 - // im_info 为预处理记录的信息,后处理用于还原box - // conf_threshold 后处理时过滤box的置信度阈值 - // nms_iou_threshold 后处理时NMS设定的iou阈值 - virtual bool Postprocess( - FDTensor& infer_result, DetectionResult* result, - const std::map>& im_info, - float conf_threshold, float nms_iou_threshold); - // 模型预测接口,即用户调用的接口 // im 为用户的输入数据,目前对于CV均定义为cv::Mat // result 为模型预测的输出结构体 @@ -81,6 +60,28 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { int stride; // for offseting the boxes by classes when using NMS float max_wh; + + private: + // 初始化函数,包括初始化后端,以及其它模型推理需要涉及的操作 + virtual bool Initialize(); + + // 输入图像预处理操作 + // Mat为FastDeploy定义的数据结构 + // FDTensor为预处理后的Tensor数据,传给后端进行推理 + // im_info为预处理过程保存的数据,在后处理中需要用到 + virtual bool Preprocess(Mat* mat, FDTensor* outputs, + std::map>* im_info); + + // 后端推理结果后处理,输出给用户 + // infer_result 为后端推理后的输出Tensor + // result 为模型预测的结果 + // im_info 为预处理记录的信息,后处理用于还原box + // conf_threshold 后处理时过滤box的置信度阈值 + // nms_iou_threshold 后处理时NMS设定的iou阈值 + virtual bool Postprocess( + FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold); }; } // namespace wongkinyiu } // namespace vision diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index 7c5fc30163..1f40ba645a 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -23,7 +23,7 @@ YOLOv7模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只 > > **参数** > -> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,RGB格式 +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 > > * **conf_threshold**(float): 检测框置信度过滤阈值 > > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 From 48136f0d152af4a1a658af71ddaacfe4498b9f2e Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 08:46:49 +0000 Subject: [PATCH 20/51] add examples for yolov7 --- examples/CMakeLists.txt | 1 + examples/vision/wongkinyiu_yolov7.cc | 52 ++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) create mode 100644 examples/vision/wongkinyiu_yolov7.cc diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 4228a3e01f..31cd1723b1 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -17,6 +17,7 @@ endfunction() if (WTIH_VISION_EXAMPLES) add_fastdeploy_executable(vision ultralytics yolov5) add_fastdeploy_executable(vision meituan yolov6) + add_fastdeploy_executable(vision wongkinyiu yolov7) endif() # other examples ... \ No newline at end of file diff --git a/examples/vision/wongkinyiu_yolov7.cc b/examples/vision/wongkinyiu_yolov7.cc new file mode 100644 index 0000000000..7de033cae8 --- /dev/null +++ b/examples/vision/wongkinyiu_yolov7.cc @@ -0,0 +1,52 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "../resources/models/yolov7.onnx"; + std::string img_path = "../resources/images/horses.jpg"; + std::string vis_path = "../resources/outputs/wongkinyiu_yolov7_vis_result.jpg"; + + auto model = vis::wongkinyiu::YOLOv7(model_file); + if (!model.Initialized()) { + std::cerr << "Init Failed! Model: " << model_file << std::endl; + return -1; + } else { + std::cout << "Init Done! Model:" << model_file << std::endl; + } + model.EnableDebug(); + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} From 6feca9233a0503c3e2644b9fa2d1dd76ce5bdbb5 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 09:07:47 +0000 Subject: [PATCH 21/51] api.md modified --- model_zoo/vision/yolov7/api.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index 1f40ba645a..92b16c4755 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -51,7 +51,7 @@ YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需 #### predict函数 > ``` -> YOLOv7::predict(cv::Mat* im, DetectionResult* result, +> YOLOv7::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` @@ -59,7 +59,7 @@ YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需 > > **参数** > -> > * **im**: 输入图像,注意需为HWC,RGB格式 +> > * **im**: 输入图像,注意需为HWC,BGR格式 > > * **result**: 检测结果,包括检测框,各个框的置信度 > > * **conf_threshold**: 检测框置信度过滤阈值 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 From ae70d4f50ec9981e97dd7b79f3e3265c2105ed0c Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 09:11:01 +0000 Subject: [PATCH 22/51] api.md modified --- model_zoo/vision/yolov7/api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index 92b16c4755..abd2abdcec 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -49,7 +49,7 @@ YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需 > * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 > * **model_format**(Frontend): 模型格式 -#### predict函数 +#### Predict函数 > ``` > YOLOv7::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, From f591b8567b08afbd1e3894100becaa2ce511424b Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 09:31:25 +0000 Subject: [PATCH 23/51] api.md modified --- model_zoo/vision/yolov7/api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index abd2abdcec..02cf78121c 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -49,7 +49,7 @@ YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需 > * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 > * **model_format**(Frontend): 模型格式 -#### Predict函数 +#### redict函数 > ``` > YOLOv7::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, From f0def41c8b5e5e2b1d627ada84b2c4b17c84aeac Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 09:41:47 +0000 Subject: [PATCH 24/51] YOLOv7 --- model_zoo/vision/yolov7/api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index 02cf78121c..abd2abdcec 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -49,7 +49,7 @@ YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需 > * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 > * **model_format**(Frontend): 模型格式 -#### redict函数 +#### Predict函数 > ``` > YOLOv7::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, From 15b91609aae1f81e3d5789d40c18f0aa16e37e86 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 10:50:08 +0000 Subject: [PATCH 25/51] yolov7 release link --- model_zoo/vision/yolov7/README.md | 2 +- model_zoo/vision/yolov7/cpp/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 7246a4a7b7..c81c75d8d2 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -16,7 +16,7 @@ - 手动获取 - 访问[YOLOv7](https://github.com/WongKinYiu/yolov7)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + 访问[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index a1d146053a..c3d4e8bcb2 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -4,7 +4,7 @@ - 手动获取 - 访问[YOLOv7](https://github.com/WongKinYiu/yolov7)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + 访问[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 From 4706e8ca754735d318650c3f7a90b3e00f6ef16a Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 11:01:53 +0000 Subject: [PATCH 26/51] yolov7 release link --- model_zoo/vision/yolov7/README.md | 2 ++ model_zoo/vision/yolov7/cpp/README.md | 2 ++ 2 files changed, 4 insertions(+) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index c81c75d8d2..e330a3055b 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -1,5 +1,7 @@ # 编译YOLOv7示例 +当前支持模型版本为:[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) + 本文档说明如何进行[YOLOv7](https://github.com/WongKinYiu/yolov7)的快速部署推理。本目录结构如下 ``` diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index c3d4e8bcb2..2e9570f224 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -1,5 +1,7 @@ # 编译YOLOv7示例 +当前支持模型版本为:[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) + ## 获取ONNX文件 - 手动获取 From dc8358461f384cc7ee0fcc592a68e5a917925bf6 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 11:05:01 +0000 Subject: [PATCH 27/51] yolov7 release link --- model_zoo/vision/yolov7/README.md | 2 +- model_zoo/vision/yolov7/cpp/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index e330a3055b..7eed2c0c43 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -1,6 +1,6 @@ # 编译YOLOv7示例 -当前支持模型版本为:[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) +当前支持模型版本为:[YOLOv7 v0.1](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) 本文档说明如何进行[YOLOv7](https://github.com/WongKinYiu/yolov7)的快速部署推理。本目录结构如下 diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 2e9570f224..13a5e8343e 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -1,6 +1,6 @@ # 编译YOLOv7示例 -当前支持模型版本为:[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) +当前支持模型版本为:[YOLOv7 v0.1](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) ## 获取ONNX文件 From 086debd8d3e040d37b0b8cbc006277d91e246baa Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 11:10:43 +0000 Subject: [PATCH 28/51] copyright --- fastdeploy/vision/wongkinyiu/yolov7.cc | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.cc b/fastdeploy/vision/wongkinyiu/yolov7.cc index 6baf4c336b..db470d327e 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.cc +++ b/fastdeploy/vision/wongkinyiu/yolov7.cc @@ -1,3 +1,17 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + #include "fastdeploy/vision/wongkinyiu/yolov7.h" #include "fastdeploy/utils/perf.h" #include "fastdeploy/vision/utils/utils.h" From 4f980b9ce8e2573d76385ca4f0b98febf66f57a4 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 12:09:04 +0000 Subject: [PATCH 29/51] change some helpers to private --- fastdeploy/vision/wongkinyiu/yolov7.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.h b/fastdeploy/vision/wongkinyiu/yolov7.h index 29dffaf2f4..75cab34ded 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.h +++ b/fastdeploy/vision/wongkinyiu/yolov7.h @@ -63,13 +63,13 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { private: // 初始化函数,包括初始化后端,以及其它模型推理需要涉及的操作 - virtual bool Initialize(); + bool Initialize(); // 输入图像预处理操作 // Mat为FastDeploy定义的数据结构 // FDTensor为预处理后的Tensor数据,传给后端进行推理 // im_info为预处理过程保存的数据,在后处理中需要用到 - virtual bool Preprocess(Mat* mat, FDTensor* outputs, + bool Preprocess(Mat* mat, FDTensor* outputs, std::map>* im_info); // 后端推理结果后处理,输出给用户 @@ -78,7 +78,7 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { // im_info 为预处理记录的信息,后处理用于还原box // conf_threshold 后处理时过滤box的置信度阈值 // nms_iou_threshold 后处理时NMS设定的iou阈值 - virtual bool Postprocess( + bool Postprocess( FDTensor& infer_result, DetectionResult* result, const std::map>& im_info, float conf_threshold, float nms_iou_threshold); From 80beadfa3ce7ebb7cc2d345d4154cd42f6dec785 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Tue, 19 Jul 2022 02:57:08 +0000 Subject: [PATCH 30/51] change variables to const and fix documents. --- fastdeploy/vision/wongkinyiu/yolov7.cc | 6 +++--- model_zoo/vision/yolov7/README.md | 16 ++-------------- model_zoo/vision/yolov7/cpp/README.md | 8 +------- 3 files changed, 6 insertions(+), 24 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.cc b/fastdeploy/vision/wongkinyiu/yolov7.cc index db470d327e..248718a69a 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.cc +++ b/fastdeploy/vision/wongkinyiu/yolov7.cc @@ -20,9 +20,9 @@ namespace fastdeploy { namespace vision { namespace wongkinyiu { -void LetterBox(Mat* mat, std::vector size, std::vector color, - bool _auto, bool scale_fill = false, bool scale_up = true, - int stride = 32) { +void LetterBox(Mat* mat, const std::vector& size, + const std::vector& color, bool _auto, + bool scale_fill = false, bool scale_up = true, int stride = 32) { float scale = std::min(size[1] * 1.0 / mat->Height(), size[0] * 1.0 / mat->Width()); if (!scale_up) { diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 7eed2c0c43..2bb13ce459 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -20,12 +20,12 @@ 访问[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 - + ``` #下载yolov7模型文件 wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt - + # 导出onnx格式文件 python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt @@ -33,12 +33,6 @@ cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/ ``` - - -- 从PaddlePaddle获取 - - - ## 安装FastDeploy 使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` @@ -71,9 +65,3 @@ DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] - [C++部署](./cpp/README.md) - [YOLOv7 API文档](./api.md) - - - - - - diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 13a5e8343e..f216c1aecf 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -8,20 +8,15 @@ 访问[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 - - ``` #下载yolov7模型文件 wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt - + # 导出onnx格式文件 python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt ``` - - -- 从PaddlePaddle获取 ## 运行demo @@ -54,4 +49,3 @@ DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] 224.049210,195.147003, 419.658234, 364.004852, 0.798262, 17 369.316986,209.055725, 456.373840, 321.627625, 0.687066, 17 ``` - From f5f7a863e09490213c5ea51fd83c584ff10752df Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Tue, 19 Jul 2022 05:16:07 +0000 Subject: [PATCH 31/51] gitignore --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 39783b8839..51f2f2ed80 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,4 @@ fastdeploy.egg-info .setuptools-cmake-build fastdeploy/version.py fastdeploy/LICENSE* -fastdeploy/ThirdPartyNotices* \ No newline at end of file +fastdeploy/ThirdPartyNotices* From e6cec25cace95e029adc08412aa359486446ec6d Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Tue, 19 Jul 2022 08:05:01 +0000 Subject: [PATCH 32/51] Transfer some funtions to private member of class --- fastdeploy/vision/wongkinyiu/yolov7.cc | 10 +++++----- fastdeploy/vision/wongkinyiu/yolov7.h | 17 ++++++++++++----- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.cc b/fastdeploy/vision/wongkinyiu/yolov7.cc index 248718a69a..532f552947 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.cc +++ b/fastdeploy/vision/wongkinyiu/yolov7.cc @@ -20,9 +20,9 @@ namespace fastdeploy { namespace vision { namespace wongkinyiu { -void LetterBox(Mat* mat, const std::vector& size, - const std::vector& color, bool _auto, - bool scale_fill = false, bool scale_up = true, int stride = 32) { +void YOLOv7::LetterBox(Mat* mat, const std::vector& size, + const std::vector& color, bool _auto, + bool scale_fill, bool scale_up, int stride) { float scale = std::min(size[1] * 1.0 / mat->Height(), size[0] * 1.0 / mat->Width()); if (!scale_up) { @@ -107,8 +107,8 @@ bool YOLOv7::Preprocess(Mat* mat, FDTensor* output, // 1. letterbox // 2. BGR->RGB // 3. HWC->CHW - LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, - stride); + YOLOv7::LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, + is_scale_up, stride); BGR2RGB::Run(mat); Normalize::Run(mat, std::vector(mat->Channels(), 0.0), std::vector(mat->Channels(), 1.0)); diff --git a/fastdeploy/vision/wongkinyiu/yolov7.h b/fastdeploy/vision/wongkinyiu/yolov7.h index 75cab34ded..90be9ea463 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.h +++ b/fastdeploy/vision/wongkinyiu/yolov7.h @@ -70,7 +70,7 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { // FDTensor为预处理后的Tensor数据,传给后端进行推理 // im_info为预处理过程保存的数据,在后处理中需要用到 bool Preprocess(Mat* mat, FDTensor* outputs, - std::map>* im_info); + std::map>* im_info); // 后端推理结果后处理,输出给用户 // infer_result 为后端推理后的输出Tensor @@ -78,10 +78,17 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { // im_info 为预处理记录的信息,后处理用于还原box // conf_threshold 后处理时过滤box的置信度阈值 // nms_iou_threshold 后处理时NMS设定的iou阈值 - bool Postprocess( - FDTensor& infer_result, DetectionResult* result, - const std::map>& im_info, - float conf_threshold, float nms_iou_threshold); + bool Postprocess(FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold); + + // 对图片进行LetterBox处理 + // mat 为输入图片 + // size 为输入图片的size + void LetterBox(Mat* mat, const std::vector& size, + const std::vector& color, bool _auto, + bool scale_fill = false, bool scale_up = true, + int stride = 32); }; } // namespace wongkinyiu } // namespace vision From e25e4f2a5c18ffe45bd3b8574dbe7c612a528e72 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Tue, 19 Jul 2022 08:07:49 +0000 Subject: [PATCH 33/51] Transfer some funtions to private member of class --- fastdeploy/vision/wongkinyiu/yolov7.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.h b/fastdeploy/vision/wongkinyiu/yolov7.h index 90be9ea463..c494754f0e 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.h +++ b/fastdeploy/vision/wongkinyiu/yolov7.h @@ -83,8 +83,8 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { float conf_threshold, float nms_iou_threshold); // 对图片进行LetterBox处理 - // mat 为输入图片 - // size 为输入图片的size + // mat 为读取到的原图 + // size 为输入模型的图像尺寸 void LetterBox(Mat* mat, const std::vector& size, const std::vector& color, bool _auto, bool scale_fill = false, bool scale_up = true, From e8a8439dd97e0a6d52f299bff2958290637687c8 Mon Sep 17 00:00:00 2001 From: ziqi-jin <67993288+ziqi-jin@users.noreply.github.com> Date: Wed, 20 Jul 2022 15:25:57 +0800 Subject: [PATCH 34/51] Merge from develop (#9) * Fix compile problem in different python version (#26) * fix some usage problem in linux * Fix compile problem Co-authored-by: root * Add PaddleDetetion/PPYOLOE model support (#22) * add ppdet/ppyoloe * Add demo code and documents * add convert processor to vision (#27) * update .gitignore * Added checking for cmake include dir * fixed missing trt_backend option bug when init from trt * remove un-need data layout and add pre-check for dtype * changed RGB2BRG to BGR2RGB in ppcls model * add model_zoo yolov6 c++/python demo * fixed CMakeLists.txt typos * update yolov6 cpp/README.md * add yolox c++/pybind and model_zoo demo * move some helpers to private * fixed CMakeLists.txt typos * add normalize with alpha and beta * add version notes for yolov5/yolov6/yolox * add copyright to yolov5.cc * revert normalize * fixed some bugs in yolox * fixed examples/CMakeLists.txt to avoid conflicts * add convert processor to vision * format examples/CMakeLists summary * Fix bug while the inference result is empty with YOLOv5 (#29) * Add multi-label function for yolov5 * Update README.md Update doc * Update fastdeploy_runtime.cc fix variable option.trt_max_shape wrong name * Update runtime_option.md Update resnet model dynamic shape setting name from images to x * Fix bug when inference result boxes are empty * Delete detection.py Co-authored-by: Jason Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com> --- examples/CMakeLists.txt | 26 +-- examples/vision/ppdet_ppyoloe.cc | 51 ++++++ fastdeploy/__init__.py | 2 +- fastdeploy/download.py | 2 +- fastdeploy/utils/utils.h | 4 + fastdeploy/vision.h | 1 + fastdeploy/vision/__init__.py | 1 + .../vision/common/processors/convert.cc | 62 +++++++ fastdeploy/vision/common/processors/convert.h | 42 +++++ .../vision/common/processors/transform.h | 1 + fastdeploy/vision/meituan/yolov6.cc | 28 +-- fastdeploy/vision/ppcls/model.cc | 19 +- fastdeploy/vision/ppcls/model.h | 16 +- fastdeploy/vision/ppcls/ppcls_pybind.cc | 2 +- fastdeploy/vision/ppdet/__init__.py | 39 ++++ fastdeploy/vision/ppdet/ppdet_pybind.cc | 32 ++++ fastdeploy/vision/ppdet/ppyoloe.cc | 170 ++++++++++++++++++ fastdeploy/vision/ppdet/ppyoloe.h | 44 +++++ fastdeploy/vision/ultralytics/yolov5.cc | 19 +- fastdeploy/vision/utils/sort_det_res.cc | 6 +- fastdeploy/vision/vision_pybind.cc | 10 +- fastdeploy/vision/visualize/detection.cc | 4 +- model_zoo/vision/ppyoloe/README.md | 52 ++++++ model_zoo/vision/ppyoloe/api.md | 74 ++++++++ model_zoo/vision/ppyoloe/cpp/CMakeLists.txt | 17 ++ model_zoo/vision/ppyoloe/cpp/README.md | 39 ++++ model_zoo/vision/ppyoloe/cpp/ppyoloe.cc | 51 ++++++ model_zoo/vision/ppyoloe/ppyoloe.py | 24 +++ setup.py | 30 +++- 29 files changed, 818 insertions(+), 50 deletions(-) create mode 100644 examples/vision/ppdet_ppyoloe.cc create mode 100644 fastdeploy/vision/common/processors/convert.cc create mode 100644 fastdeploy/vision/common/processors/convert.h create mode 100644 fastdeploy/vision/ppdet/__init__.py create mode 100644 fastdeploy/vision/ppdet/ppdet_pybind.cc create mode 100644 fastdeploy/vision/ppdet/ppyoloe.cc create mode 100644 fastdeploy/vision/ppdet/ppyoloe.h create mode 100644 model_zoo/vision/ppyoloe/README.md create mode 100644 model_zoo/vision/ppyoloe/api.md create mode 100644 model_zoo/vision/ppyoloe/cpp/CMakeLists.txt create mode 100644 model_zoo/vision/ppyoloe/cpp/README.md create mode 100644 model_zoo/vision/ppyoloe/cpp/ppyoloe.cc create mode 100644 model_zoo/vision/ppyoloe/ppyoloe.py diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 1e2dc43bd4..112193c86a 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,24 +1,26 @@ -function(add_fastdeploy_executable field url model) +function(add_fastdeploy_executable FIELD CC_FILE) # temp target name/file var in function scope - set(TEMP_TARGET_FILE ${PROJECT_SOURCE_DIR}/examples/${field}/${url}_${model}.cc) - set(TEMP_TARGET_NAME ${field}_${url}_${model}) + set(TEMP_TARGET_FILE ${CC_FILE}) + string(REGEX MATCHALL "[0-9A-Za-z_]*.cc" FILE_NAME ${CC_FILE}) + string(REGEX REPLACE ".cc" "" FILE_PREFIX ${FILE_NAME}) + set(TEMP_TARGET_NAME ${FIELD}_${FILE_PREFIX}) if (EXISTS ${TEMP_TARGET_FILE} AND TARGET fastdeploy) add_executable(${TEMP_TARGET_NAME} ${TEMP_TARGET_FILE}) target_link_libraries(${TEMP_TARGET_NAME} PUBLIC fastdeploy) - message(STATUS "Found source file: [${field}/${url}_${model}.cc], ADD!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") - else () - message(WARNING "Can not found source file: [${field}/${url}_${model}.cc], SKIP!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") + message(STATUS " Added FastDeploy Executable : ${TEMP_TARGET_NAME}") endif() unset(TEMP_TARGET_FILE) unset(TEMP_TARGET_NAME) endfunction() # vision examples -if (WITH_VISION_EXAMPLES) - add_fastdeploy_executable(vision ultralytics yolov5) - add_fastdeploy_executable(vision meituan yolov6) - add_fastdeploy_executable(vision wongkinyiu yolov7) - add_fastdeploy_executable(vision megvii yolox) +if(WITH_VISION_EXAMPLES AND EXISTS ${PROJECT_SOURCE_DIR}/examples/vision) + message(STATUS "") + message(STATUS "*************FastDeploy Examples Summary**********") + file(GLOB ALL_VISION_EXAMPLE_SRCS ${PROJECT_SOURCE_DIR}/examples/vision/*.cc) + foreach(_CC_FILE ${ALL_VISION_EXAMPLE_SRCS}) + add_fastdeploy_executable(vision ${_CC_FILE}) + endforeach() endif() -# other examples ... \ No newline at end of file +# other examples ... diff --git a/examples/vision/ppdet_ppyoloe.cc b/examples/vision/ppdet_ppyoloe.cc new file mode 100644 index 0000000000..b234021c92 --- /dev/null +++ b/examples/vision/ppdet_ppyoloe.cc @@ -0,0 +1,51 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; + std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; + std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; + std::string img_path = "test.jpeg"; + std::string vis_path = "vis.jpeg"; + + auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} diff --git a/fastdeploy/__init__.py b/fastdeploy/__init__.py index 500e7cc42a..68006c1bed 100644 --- a/fastdeploy/__init__.py +++ b/fastdeploy/__init__.py @@ -17,7 +17,7 @@ from .fastdeploy_runtime import * from . import fastdeploy_main as C from . import vision -from .download import download +from .download import download, download_and_decompress def TensorInfoStr(tensor_info): diff --git a/fastdeploy/download.py b/fastdeploy/download.py index e00af098df..67f21d8e76 100644 --- a/fastdeploy/download.py +++ b/fastdeploy/download.py @@ -156,7 +156,7 @@ def decompress(fname): def url2dir(url, path, rename=None): full_name = download(url, path, rename, show_progress=True) - print("SDK is donwloaded, now extracting...") + print("File is donwloaded, now extracting...") if url.count(".tgz") > 0 or url.count(".tar") > 0 or url.count("zip") > 0: return decompress(full_name) diff --git a/fastdeploy/utils/utils.h b/fastdeploy/utils/utils.h index 1b9f625b5e..9312084265 100644 --- a/fastdeploy/utils/utils.h +++ b/fastdeploy/utils/utils.h @@ -64,6 +64,10 @@ class FASTDEPLOY_DECL FDLogger { bool verbose_ = true; }; +#ifndef __REL_FILE__ +#define __REL_FILE__ __FILE__ +#endif + #define FDERROR \ FDLogger(true, "[ERROR]") \ << __REL_FILE__ << "(" << __LINE__ << ")::" << __FUNCTION__ << "\t" diff --git a/fastdeploy/vision.h b/fastdeploy/vision.h index ac3f006c0a..cafe310c70 100644 --- a/fastdeploy/vision.h +++ b/fastdeploy/vision.h @@ -16,6 +16,7 @@ #include "fastdeploy/core/config.h" #ifdef ENABLE_VISION #include "fastdeploy/vision/ppcls/model.h" +#include "fastdeploy/vision/ppdet/ppyoloe.h" #include "fastdeploy/vision/ultralytics/yolov5.h" #include "fastdeploy/vision/wongkinyiu/yolov7.h" #include "fastdeploy/vision/meituan/yolov6.h" diff --git a/fastdeploy/vision/__init__.py b/fastdeploy/vision/__init__.py index 7122bede0b..6acbf0c376 100644 --- a/fastdeploy/vision/__init__.py +++ b/fastdeploy/vision/__init__.py @@ -15,6 +15,7 @@ from . import evaluation from . import ppcls +from . import ppdet from . import ultralytics from . import meituan from . import megvii diff --git a/fastdeploy/vision/common/processors/convert.cc b/fastdeploy/vision/common/processors/convert.cc new file mode 100644 index 0000000000..a7ca6de07a --- /dev/null +++ b/fastdeploy/vision/common/processors/convert.cc @@ -0,0 +1,62 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision/common/processors/convert.h" + +namespace fastdeploy { + +namespace vision { + +Convert::Convert(const std::vector& alpha, + const std::vector& beta) { + FDASSERT(alpha.size() == beta.size(), + "Convert: requires the size of alpha equal to the size of beta."); + FDASSERT(alpha.size() != 0, + "Convert: requires the size of alpha and beta > 0."); + alpha_.assign(alpha.begin(), alpha.end()); + beta_.assign(beta.begin(), beta.end()); +} + +bool Convert::CpuRun(Mat* mat) { + cv::Mat* im = mat->GetCpuMat(); + std::vector split_im; + cv::split(*im, split_im); + for (int c = 0; c < im->channels(); c++) { + split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); + } + cv::merge(split_im, *im); + return true; +} + +#ifdef ENABLE_OPENCV_CUDA +bool Convert::GpuRun(Mat* mat) { + cv::cuda::GpuMat* im = mat->GetGpuMat(); + std::vector split_im; + cv::cuda::split(*im, split_im); + for (int c = 0; c < im->channels(); c++) { + split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); + } + cv::cuda::merge(split_im, *im); + return true; +} +#endif + +bool Convert::Run(Mat* mat, const std::vector& alpha, + const std::vector& beta, ProcLib lib) { + auto c = Convert(alpha, beta); + return c(mat, lib); +} + +} // namespace vision +} // namespace fastdeploy \ No newline at end of file diff --git a/fastdeploy/vision/common/processors/convert.h b/fastdeploy/vision/common/processors/convert.h new file mode 100644 index 0000000000..5d5a5276f5 --- /dev/null +++ b/fastdeploy/vision/common/processors/convert.h @@ -0,0 +1,42 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include "fastdeploy/vision/common/processors/base.h" + +namespace fastdeploy { +namespace vision { +class Convert : public Processor { + public: + Convert(const std::vector& alpha, const std::vector& beta); + + bool CpuRun(Mat* mat); +#ifdef ENABLE_OPENCV_CUDA + bool GpuRun(Mat* mat); +#endif + std::string Name() { return "Convert"; } + + // Compute `result = mat * alpha + beta` directly by channel. + // The default behavior is the same as OpenCV's convertTo method. + static bool Run(Mat* mat, const std::vector& alpha, + const std::vector& beta, + ProcLib lib = ProcLib::OPENCV_CPU); + + private: + std::vector alpha_; + std::vector beta_; +}; +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/common/processors/transform.h b/fastdeploy/vision/common/processors/transform.h index 12eec8d72d..08073b4e42 100644 --- a/fastdeploy/vision/common/processors/transform.h +++ b/fastdeploy/vision/common/processors/transform.h @@ -17,6 +17,7 @@ #include "fastdeploy/vision/common/processors/cast.h" #include "fastdeploy/vision/common/processors/center_crop.h" #include "fastdeploy/vision/common/processors/color_space_convert.h" +#include "fastdeploy/vision/common/processors/convert.h" #include "fastdeploy/vision/common/processors/hwc2chw.h" #include "fastdeploy/vision/common/processors/normalize.h" #include "fastdeploy/vision/common/processors/pad.h" diff --git a/fastdeploy/vision/meituan/yolov6.cc b/fastdeploy/vision/meituan/yolov6.cc index 8f37bf89c6..8ac7377194 100644 --- a/fastdeploy/vision/meituan/yolov6.cc +++ b/fastdeploy/vision/meituan/yolov6.cc @@ -25,14 +25,14 @@ namespace meituan { void LetterBox(Mat* mat, std::vector size, std::vector color, bool _auto, bool scale_fill = false, bool scale_up = true, int stride = 32) { - float scale = std::min(size[1] * 1.0f / static_cast(mat->Height()), - size[0] * 1.0f / static_cast(mat->Width())); + float scale = std::min(size[1] * 1.0f / static_cast(mat->Height()), + size[0] * 1.0f / static_cast(mat->Width())); if (!scale_up) { scale = std::min(scale, 1.0f); } int resize_h = int(round(static_cast(mat->Height()) * scale)); - int resize_w = int(round(static_cast(mat->Width()) * scale)); + int resize_w = int(round(static_cast(mat->Width()) * scale)); int pad_w = size[0] - resize_w; int pad_h = size[1] - resize_h; @@ -85,13 +85,13 @@ bool YOLOv6::Initialize() { is_scale_up = false; stride = 32; max_wh = 4096.0f; - + if (!InitRuntime()) { FDERROR << "Failed to initialize fastdeploy backend." << std::endl; return false; } - // Check if the input shape is dynamic after Runtime already initialized, - // Note that, We need to force is_mini_pad 'false' to keep static + // Check if the input shape is dynamic after Runtime already initialized, + // Note that, We need to force is_mini_pad 'false' to keep static // shape after padding (LetterBox) when the is_dynamic_shape is 'false'. is_dynamic_input_ = false; auto shape = InputInfoOfRuntime(0).shape; @@ -102,7 +102,7 @@ bool YOLOv6::Initialize() { break; } } - if (!is_dynamic_input_) { + if (!is_dynamic_input_) { is_mini_pad = false; } return true; @@ -111,15 +111,15 @@ bool YOLOv6::Initialize() { bool YOLOv6::Preprocess(Mat* mat, FDTensor* output, std::map>* im_info) { // process after image load - float ratio = std::min(size[1] * 1.0f / static_cast(mat->Height()), - size[0] * 1.0f / static_cast(mat->Width())); + float ratio = std::min(size[1] * 1.0f / static_cast(mat->Height()), + size[0] * 1.0f / static_cast(mat->Width())); if (ratio != 1.0) { int interp = cv::INTER_AREA; if (ratio > 1.0) { interp = cv::INTER_LINEAR; } int resize_h = int(round(static_cast(mat->Height()) * ratio)); - int resize_w = int(round(static_cast(mat->Width()) * ratio)); + int resize_w = int(round(static_cast(mat->Width()) * ratio)); Resize::Run(mat, resize_w, resize_h, -1, -1, interp); } // yolov6's preprocess steps @@ -129,8 +129,12 @@ bool YOLOv6::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - std::vector(mat->Channels(), 1.0)); + // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + // std::vector(mat->Channels(), 1.0)); + // Compute `result = mat * alpha + beta` directly by channel + std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; + std::vector beta = {0.0f, 0.0f, 0.0f}; + Convert::Run(mat, alpha, beta); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), diff --git a/fastdeploy/vision/ppcls/model.cc b/fastdeploy/vision/ppcls/model.cc index 915cb97512..c4e5b767c7 100644 --- a/fastdeploy/vision/ppcls/model.cc +++ b/fastdeploy/vision/ppcls/model.cc @@ -1,3 +1,16 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. #include "fastdeploy/vision/ppcls/model.h" #include "fastdeploy/vision/utils/utils.h" @@ -135,6 +148,6 @@ bool Model::Predict(cv::Mat* im, ClassifyResult* result, int topk) { return true; } -} // namespace ppcls -} // namespace vision -} // namespace fastdeploy +} // namespace ppcls +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppcls/model.h b/fastdeploy/vision/ppcls/model.h index 36841d74c6..265f92d32b 100644 --- a/fastdeploy/vision/ppcls/model.h +++ b/fastdeploy/vision/ppcls/model.h @@ -1,7 +1,21 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + #pragma once #include "fastdeploy/fastdeploy_model.h" -#include "fastdeploy/vision/common/result.h" #include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" namespace fastdeploy { namespace vision { diff --git a/fastdeploy/vision/ppcls/ppcls_pybind.cc b/fastdeploy/vision/ppcls/ppcls_pybind.cc index ef3fffee8e..1abc0b2b7c 100644 --- a/fastdeploy/vision/ppcls/ppcls_pybind.cc +++ b/fastdeploy/vision/ppcls/ppcls_pybind.cc @@ -14,7 +14,7 @@ #include "fastdeploy/pybind/main.h" namespace fastdeploy { -void BindPpClsModel(pybind11::module& m) { +void BindPPCls(pybind11::module& m) { auto ppcls_module = m.def_submodule("ppcls", "Module to deploy PaddleClas."); pybind11::class_(ppcls_module, "Model") .def(pybind11::init(ppdet_module, + "PPYOLOE") + .def(pybind11::init()) + .def("predict", [](vision::ppdet::PPYOLOE& self, pybind11::array& data, + float conf_threshold, float nms_iou_threshold) { + auto mat = PyArrayToCvMat(data); + vision::DetectionResult res; + self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); + return res; + }); +} +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.cc b/fastdeploy/vision/ppdet/ppyoloe.cc new file mode 100644 index 0000000000..c215ecb0ca --- /dev/null +++ b/fastdeploy/vision/ppdet/ppyoloe.cc @@ -0,0 +1,170 @@ +#include "fastdeploy/vision/ppdet/ppyoloe.h" +#include "fastdeploy/vision/utils/utils.h" +#include "yaml-cpp/yaml.h" + +namespace fastdeploy { +namespace vision { +namespace ppdet { + +PPYOLOE::PPYOLOE(const std::string& model_file, const std::string& params_file, + const std::string& config_file, + const RuntimeOption& custom_option, + const Frontend& model_format) { + config_file_ = config_file; + valid_cpu_backends = {Backend::ORT, Backend::PDINFER}; + valid_gpu_backends = {Backend::ORT, Backend::PDINFER}; + runtime_option = custom_option; + runtime_option.model_format = model_format; + runtime_option.model_file = model_file; + runtime_option.params_file = params_file; + initialized = Initialize(); +} + +bool PPYOLOE::Initialize() { + if (!BuildPreprocessPipelineFromConfig()) { + std::cout << "Failed to build preprocess pipeline from configuration file." + << std::endl; + return false; + } + if (!InitRuntime()) { + std::cout << "Failed to initialize fastdeploy backend." << std::endl; + return false; + } + return true; +} + +bool PPYOLOE::BuildPreprocessPipelineFromConfig() { + processors_.clear(); + YAML::Node cfg; + try { + cfg = YAML::LoadFile(config_file_); + } catch (YAML::BadFile& e) { + std::cout << "Failed to load yaml file " << config_file_ + << ", maybe you should check this file." << std::endl; + return false; + } + + if (cfg["arch"].as() != "YOLO") { + std::cout << "Require the arch of model is YOLO, but arch defined in " + "config file is " + << cfg["arch"].as() << "." << std::endl; + return false; + } + processors_.push_back(std::make_shared()); + + for (const auto& op : cfg["Preprocess"]) { + std::string op_name = op["type"].as(); + if (op_name == "NormalizeImage") { + auto mean = op["mean"].as>(); + auto std = op["std"].as>(); + bool is_scale = op["is_scale"].as(); + processors_.push_back(std::make_shared(mean, std, is_scale)); + } else if (op_name == "Resize") { + bool keep_ratio = op["keep_ratio"].as(); + auto target_size = op["target_size"].as>(); + int interp = op["interp"].as(); + FDASSERT(target_size.size(), + "Require size of target_size be 2, but now it's " + + std::to_string(target_size.size()) + "."); + FDASSERT(!keep_ratio, + "Only support keep_ratio is false while deploy " + "PaddleDetection model."); + int width = target_size[1]; + int height = target_size[0]; + processors_.push_back( + std::make_shared(width, height, -1.0, -1.0, interp, false)); + } else if (op_name == "Permute") { + processors_.push_back(std::make_shared()); + } else { + std::cout << "Unexcepted preprocess operator: " << op_name << "." + << std::endl; + return false; + } + } + return true; +} + +bool PPYOLOE::Preprocess(Mat* mat, std::vector* outputs) { + int origin_w = mat->Width(); + int origin_h = mat->Height(); + for (size_t i = 0; i < processors_.size(); ++i) { + if (!(*(processors_[i].get()))(mat)) { + std::cout << "Failed to process image data in " << processors_[i]->Name() + << "." << std::endl; + return false; + } + } + + outputs->resize(2); + (*outputs)[0].name = InputInfoOfRuntime(0).name; + mat->ShareWithTensor(&((*outputs)[0])); + + // reshape to [1, c, h, w] + (*outputs)[0].shape.insert((*outputs)[0].shape.begin(), 1); + + (*outputs)[1].Allocate({1, 2}, FDDataType::FP32, InputInfoOfRuntime(1).name); + float* ptr = static_cast((*outputs)[1].MutableData()); + ptr[0] = mat->Height() * 1.0 / mat->Height(); + ptr[1] = mat->Width() * 1.0 / mat->Width(); + return true; +} + +bool PPYOLOE::Postprocess(std::vector& infer_result, + DetectionResult* result, float conf_threshold, + float nms_threshold) { + FDASSERT(infer_result[1].shape[0] == 1, + "Only support batch = 1 in FastDeploy now."); + int box_num = 0; + if (infer_result[1].dtype == FDDataType::INT32) { + box_num = *(static_cast(infer_result[1].Data())); + } else if (infer_result[1].dtype == FDDataType::INT64) { + box_num = *(static_cast(infer_result[1].Data())); + } else { + FDASSERT( + false, + "The output box_num of PPYOLOE model should be type of int32/int64."); + } + result->Reserve(box_num); + float* box_data = static_cast(infer_result[0].Data()); + for (size_t i = 0; i < box_num; ++i) { + if (box_data[i * 6 + 1] < conf_threshold) { + continue; + } + result->label_ids.push_back(box_data[i * 6]); + result->scores.push_back(box_data[i * 6 + 1]); + result->boxes.emplace_back( + std::array{box_data[i * 6 + 2], box_data[i * 6 + 3], + box_data[i * 6 + 4] - box_data[i * 6 + 2], + box_data[i * 6 + 5] - box_data[i * 6 + 3]}); + } + return true; +} + +bool PPYOLOE::Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold, float iou_threshold) { + Mat mat(*im); + std::vector processed_data; + if (!Preprocess(&mat, &processed_data)) { + FDERROR << "Failed to preprocess input data while using model:" + << ModelName() << "." << std::endl; + return false; + } + + std::vector infer_result; + if (!Infer(processed_data, &infer_result)) { + FDERROR << "Failed to inference while using model:" << ModelName() << "." + << std::endl; + return false; + } + + if (!Postprocess(infer_result, result, conf_threshold, iou_threshold)) { + FDERROR << "Failed to postprocess while using model:" << ModelName() << "." + << std::endl; + return false; + } + return true; +} + +} // namespace ppdet +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.h b/fastdeploy/vision/ppdet/ppyoloe.h new file mode 100644 index 0000000000..a3db268ca4 --- /dev/null +++ b/fastdeploy/vision/ppdet/ppyoloe.h @@ -0,0 +1,44 @@ +#pragma once +#include "fastdeploy/fastdeploy_model.h" +#include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" + +#include "fastdeploy/vision/utils/utils.h" + +namespace fastdeploy { +namespace vision { +namespace ppdet { + +class FASTDEPLOY_DECL PPYOLOE : public FastDeployModel { + public: + PPYOLOE(const std::string& model_file, const std::string& params_file, + const std::string& config_file, + const RuntimeOption& custom_option = RuntimeOption(), + const Frontend& model_format = Frontend::PADDLE); + + std::string ModelName() const { return "PaddleDetection/PPYOLOE"; } + + virtual bool Initialize(); + + virtual bool BuildPreprocessPipelineFromConfig(); + + virtual bool Preprocess(Mat* mat, std::vector* outputs); + + virtual bool Postprocess(std::vector& infer_result, + DetectionResult* result, float conf_threshold, + float nms_threshold); + + virtual bool Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold = 0.5, float nms_threshold = 0.7); + + private: + std::vector> processors_; + std::string config_file_; + // PaddleDetection can export model without nms + // This flag will help us to handle the different + // situation + bool has_nms_; +}; +} // namespace ppdet +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ultralytics/yolov5.cc b/fastdeploy/vision/ultralytics/yolov5.cc index 193cfe9794..0b7e50e735 100644 --- a/fastdeploy/vision/ultralytics/yolov5.cc +++ b/fastdeploy/vision/ultralytics/yolov5.cc @@ -87,8 +87,8 @@ bool YOLOv5::Initialize() { FDERROR << "Failed to initialize fastdeploy backend." << std::endl; return false; } - // Check if the input shape is dynamic after Runtime already initialized, - // Note that, We need to force is_mini_pad 'false' to keep static + // Check if the input shape is dynamic after Runtime already initialized, + // Note that, We need to force is_mini_pad 'false' to keep static // shape after padding (LetterBox) when the is_dynamic_shape is 'false'. is_dynamic_input_ = false; auto shape = InputInfoOfRuntime(0).shape; @@ -99,7 +99,7 @@ bool YOLOv5::Initialize() { break; } } - if (!is_dynamic_input_) { + if (!is_dynamic_input_) { is_mini_pad = false; } return true; @@ -126,8 +126,12 @@ bool YOLOv5::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - std::vector(mat->Channels(), 1.0)); + // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + // std::vector(mat->Channels(), 1.0)); + // Compute `result = mat * alpha + beta` directly by channel + std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; + std::vector beta = {0.0f, 0.0f, 0.0f}; + Convert::Run(mat, alpha, beta); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), @@ -198,6 +202,11 @@ bool YOLOv5::Postprocess( result->scores.push_back(confidence); } } + + if (result->boxes.size() == 0) { + return true; + } + utils::NMS(result, nms_iou_threshold); // scale the boxes to the origin image shape diff --git a/fastdeploy/vision/utils/sort_det_res.cc b/fastdeploy/vision/utils/sort_det_res.cc index e4a0db9761..93dbb69694 100644 --- a/fastdeploy/vision/utils/sort_det_res.cc +++ b/fastdeploy/vision/utils/sort_det_res.cc @@ -68,7 +68,11 @@ void MergeSort(DetectionResult* result, size_t low, size_t high) { void SortDetectionResult(DetectionResult* result) { size_t low = 0; - size_t high = result->scores.size() - 1; + size_t high = result->scores.size(); + if (high == 0) { + return; + } + high = high - 1; MergeSort(result, low, high); } diff --git a/fastdeploy/vision/vision_pybind.cc b/fastdeploy/vision/vision_pybind.cc index 41ada5541a..0334303ce6 100644 --- a/fastdeploy/vision/vision_pybind.cc +++ b/fastdeploy/vision/vision_pybind.cc @@ -16,7 +16,8 @@ namespace fastdeploy { -void BindPpClsModel(pybind11::module& m); +void BindPPCls(pybind11::module& m); +void BindPPDet(pybind11::module& m); void BindWongkinyiu(pybind11::module& m); void BindUltralytics(pybind11::module& m); void BindMeituan(pybind11::module& m); @@ -41,13 +42,14 @@ void BindVision(pybind11::module& m) { .def("__repr__", &vision::DetectionResult::Str) .def("__str__", &vision::DetectionResult::Str); - BindPpClsModel(m); + BindPPCls(m); + BindPPDet(m); BindUltralytics(m); BindWongkinyiu(m); BindMeituan(m); BindMegvii(m); #ifdef ENABLE_VISION_VISUALIZE BindVisualize(m); -#endif +#endif } -} // namespace fastdeploy +} // namespace fastdeploy diff --git a/fastdeploy/vision/visualize/detection.cc b/fastdeploy/vision/visualize/detection.cc index d0c4116148..5b5538bff7 100644 --- a/fastdeploy/vision/visualize/detection.cc +++ b/fastdeploy/vision/visualize/detection.cc @@ -43,7 +43,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, } std::string text = id + "," + score; int font = cv::FONT_HERSHEY_SIMPLEX; - cv::Size text_size = cv::getTextSize(text, font, font_size, 0.5, nullptr); + cv::Size text_size = cv::getTextSize(text, font, font_size, 1, nullptr); cv::Point origin; origin.x = rect.x; origin.y = rect.y; @@ -52,7 +52,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, text_size.width, text_size.height); cv::rectangle(*im, rect, rect_color, line_size); cv::putText(*im, text, origin, font, font_size, cv::Scalar(255, 255, 255), - 0.5); + 1); } } diff --git a/model_zoo/vision/ppyoloe/README.md b/model_zoo/vision/ppyoloe/README.md new file mode 100644 index 0000000000..42d18104ad --- /dev/null +++ b/model_zoo/vision/ppyoloe/README.md @@ -0,0 +1,52 @@ +# PaddleDetection/PPYOLOE部署示例 + +- 当前支持PaddleDetection版本为[release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) + +本文档说明如何进行[PPYOLOE](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe)的快速部署推理。本目录结构如下 +``` +. +├── cpp # C++ 代码目录 +│   ├── CMakeLists.txt # C++ 代码编译CMakeLists文件 +│   ├── README.md # C++ 代码编译部署文档 +│   └── ppyoloe.cc # C++ 示例代码 +├── README.md # PPYOLOE 部署文档 +└── ppyoloe.py # Python示例代码 +``` + +## 安装FastDeploy + +使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` +``` +# 安装fastdeploy-python工具 +pip install fastdeploy-python +``` + +## Python部署 + +执行如下代码即会自动下载PPYOLOE模型和测试图片 +``` +python ppyoloe.py +``` + +执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 +414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 +163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 +267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 +581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 +104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 +348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 +364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 +75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 +328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 +504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 +379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 +25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 +``` + +## 其它文档 + +- [C++部署](./cpp/README.md) +- [PPYOLOE API文档](./api.md) diff --git a/model_zoo/vision/ppyoloe/api.md b/model_zoo/vision/ppyoloe/api.md new file mode 100644 index 0000000000..1c5cbcaadb --- /dev/null +++ b/model_zoo/vision/ppyoloe/api.md @@ -0,0 +1,74 @@ +# PPYOLOE API说明 + +## Python API + +### PPYOLOE类 +``` +fastdeploy.vision.ultralytics.PPYOLOE(model_file, params_file, config_file, runtime_option=None, model_format=fd.Frontend.PADDLE) +``` +PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **config_file**(str): 模型推理配置文件 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### predict函数 +> ``` +> PPYOLOE.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) + +示例代码参考[ppyoloe.py](./ppyoloe.py) + + +## C++ API + +### PPYOLOE类 +``` +fastdeploy::vision::ultralytics::PPYOLOE( + const string& model_file, + const string& params_file, + const string& config_file, + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` +PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **config_file**(str): 模型推理配置文件 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### Predict函数 +> ``` +> YOLOv5::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度 +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) + +示例代码参考[cpp/yolov5.cc](cpp/yolov5.cc) + +## 其它API使用 + +- [模型部署RuntimeOption配置](../../../docs/api/runtime_option.md) diff --git a/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt b/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt new file mode 100644 index 0000000000..e681566517 --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt @@ -0,0 +1,17 @@ +PROJECT(ppyoloe_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.16) + +# 在低版本ABI环境中,通过如下代码进行兼容性编译 +# add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) + +# 指定下载解压后的fastdeploy库路径 +set(FASTDEPLOY_INSTALL_DIR ${PROJECT_SOURCE_DIR}/fastdeploy-linux-x64-0.3.0/) + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(ppyoloe_demo ${PROJECT_SOURCE_DIR}/ppyoloe.cc) +# 添加FastDeploy库依赖 +target_link_libraries(ppyoloe_demo ${FASTDEPLOY_LIBS}) diff --git a/model_zoo/vision/ppyoloe/cpp/README.md b/model_zoo/vision/ppyoloe/cpp/README.md new file mode 100644 index 0000000000..1027c2eeb2 --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/README.md @@ -0,0 +1,39 @@ +# 编译PPYOLOE示例 + + +``` +# 下载和解压预测库 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz +tar xvf fastdeploy-linux-x64-0.0.3.tgz + +# 编译示例代码 +mkdir build & cd build +cmake .. +make -j + +# 下载模型和图片 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz +tar xvf ppyoloe_crn_l_300e_coco.tgz +wget https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg + +# 执行 +./ppyoloe_demo +``` + +执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 +414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 +163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 +267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 +581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 +104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 +348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 +364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 +75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 +328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 +504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 +379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 +25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 +``` diff --git a/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc b/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc new file mode 100644 index 0000000000..e63f29e62a --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc @@ -0,0 +1,51 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; + std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; + std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; + std::string img_path = "000000014439_640x640.jpg"; + std::string vis_path = "vis.jpeg"; + + auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} diff --git a/model_zoo/vision/ppyoloe/ppyoloe.py b/model_zoo/vision/ppyoloe/ppyoloe.py new file mode 100644 index 0000000000..7d79dfd8cf --- /dev/null +++ b/model_zoo/vision/ppyoloe/ppyoloe.py @@ -0,0 +1,24 @@ +import fastdeploy as fd +import cv2 + +# 下载模型和测试图片 +model_url = "https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz" +test_jpg_url = "https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg" +fd.download_and_decompress(model_url, ".") +fd.download(test_jpg_url, ".", show_progress=True) + +# 加载模型 +model = fd.vision.ppdet.PPYOLOE("ppyoloe_crn_l_300e_coco/model.pdmodel", + "ppyoloe_crn_l_300e_coco/model.pdiparams", + "ppyoloe_crn_l_300e_coco/infer_cfg.yml") + +# 预测图片 +im = cv2.imread("000000014439_640x640.jpg") +result = model.predict(im, conf_threshold=0.5) + +# 可视化结果 +fd.vision.visualize.vis_detection(im, result) +cv2.imwrite("vis_result.jpg", im) + +# 输出预测结果 +print(result) diff --git a/setup.py b/setup.py index f0ff3f16de..e76f057b1c 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,8 @@ setup_configs["ENABLE_TRT_BACKEND"] = os.getenv("ENABLE_TRT_BACKEND", "OFF") setup_configs["WITH_GPU"] = os.getenv("WITH_GPU", "OFF") setup_configs["TRT_DIRECTORY"] = os.getenv("TRT_DIRECTORY", "UNDEFINED") -setup_configs["CUDA_DIRECTORY"] = os.getenv("CUDA_DIRECTORY", "/usr/local/cuda") +setup_configs["CUDA_DIRECTORY"] = os.getenv("CUDA_DIRECTORY", + "/usr/local/cuda") TOP_DIR = os.path.realpath(os.path.dirname(__file__)) SRC_DIR = os.path.join(TOP_DIR, "fastdeploy") @@ -325,17 +326,32 @@ def run(self): shutil.copy("LICENSE", "fastdeploy") depend_libs = list() - # modify the search path of libraries - command = "patchelf --set-rpath '$ORIGIN/libs/' .setuptools-cmake-build/fastdeploy_main.cpython-36m-x86_64-linux-gnu.so" - # The sw_64 not suppot patchelf, so we just disable that. - if platform.machine() != 'sw_64' and platform.machine() != 'mips64': - assert os.system(command) == 0, "patch fastdeploy_main.cpython-36m-x86_64-linux-gnu.so failed, the command: {}".format(command) + if platform.system().lower() == "linux": + for f in os.listdir(".setuptools-cmake-build"): + full_name = os.path.join(".setuptools-cmake-build", f) + if not os.path.isfile(full_name): + continue + if not full_name.count("fastdeploy_main.cpython-"): + continue + if not full_name.endswith(".so"): + continue + # modify the search path of libraries + command = "patchelf --set-rpath '$ORIGIN/libs/' {}".format( + full_name) + # The sw_64 not suppot patchelf, so we just disable that. + if platform.machine() != 'sw_64' and platform.machine( + ) != 'mips64': + assert os.system( + command + ) == 0, "patch fastdeploy_main.cpython-36m-x86_64-linux-gnu.so failed, the command: {}".format( + command) for f in os.listdir(".setuptools-cmake-build"): if not os.path.isfile(os.path.join(".setuptools-cmake-build", f)): continue if f.count("libfastdeploy") > 0: - shutil.copy(os.path.join(".setuptools-cmake-build", f), "fastdeploy/libs") + shutil.copy( + os.path.join(".setuptools-cmake-build", f), "fastdeploy/libs") for dirname in os.listdir(".setuptools-cmake-build/third_libs/install"): for lib in os.listdir( os.path.join(".setuptools-cmake-build/third_libs/install", From a182893d9232c3ff0ecda5d07ec6517ddca8f449 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 07:38:15 +0000 Subject: [PATCH 35/51] first commit for yolor --- examples/CMakeLists.txt | 25 +- .../{ppdet_ppyoloe.cc => wongkinyiu_yolor.cc} | 15 +- fastdeploy/__init__.py | 2 +- fastdeploy/download.py | 2 +- fastdeploy/utils/utils.h | 27 +- fastdeploy/vision.h | 6 +- fastdeploy/vision/__init__.py | 1 - .../vision/common/processors/convert.cc | 62 ----- fastdeploy/vision/common/processors/convert.h | 42 --- .../vision/common/processors/transform.h | 1 - fastdeploy/vision/meituan/yolov6.cc | 8 +- fastdeploy/vision/ppcls/model.cc | 19 +- fastdeploy/vision/ppcls/model.h | 20 +- fastdeploy/vision/ppcls/ppcls_pybind.cc | 4 +- fastdeploy/vision/ppdet/__init__.py | 39 --- fastdeploy/vision/ppdet/ppdet_pybind.cc | 32 --- fastdeploy/vision/ppdet/ppyoloe.cc | 170 ------------ fastdeploy/vision/ppdet/ppyoloe.h | 44 ---- fastdeploy/vision/ultralytics/yolov5.cc | 13 +- fastdeploy/vision/utils/sort_det_res.cc | 6 +- fastdeploy/vision/vision_pybind.cc | 6 +- fastdeploy/vision/visualize/detection.cc | 8 +- fastdeploy/vision/wongkinyiu/__init__.py | 98 +++++++ .../vision/wongkinyiu/wongkinyiu_pybind.cc | 21 +- fastdeploy/vision/wongkinyiu/yolor.cc | 243 ++++++++++++++++++ fastdeploy/vision/wongkinyiu/yolor.h | 95 +++++++ model_zoo/vision/ppyoloe/README.md | 52 ---- model_zoo/vision/ppyoloe/cpp/README.md | 39 --- model_zoo/vision/ppyoloe/ppyoloe.py | 24 -- model_zoo/vision/yolor/README.md | 67 +++++ model_zoo/vision/{ppyoloe => yolor}/api.md | 31 +-- .../{ppyoloe => yolor}/cpp/CMakeLists.txt | 6 +- model_zoo/vision/yolor/cpp/README.md | 51 ++++ .../cpp/ppyoloe.cc => yolor/cpp/yolor.cc} | 17 +- model_zoo/vision/yolor/yolor.py | 21 ++ setup.py | 27 +- 36 files changed, 679 insertions(+), 665 deletions(-) rename examples/vision/{ppdet_ppyoloe.cc => wongkinyiu_yolor.cc} (75%) delete mode 100644 fastdeploy/vision/common/processors/convert.cc delete mode 100644 fastdeploy/vision/common/processors/convert.h delete mode 100644 fastdeploy/vision/ppdet/__init__.py delete mode 100644 fastdeploy/vision/ppdet/ppdet_pybind.cc delete mode 100644 fastdeploy/vision/ppdet/ppyoloe.cc delete mode 100644 fastdeploy/vision/ppdet/ppyoloe.h create mode 100644 fastdeploy/vision/wongkinyiu/yolor.cc create mode 100644 fastdeploy/vision/wongkinyiu/yolor.h delete mode 100644 model_zoo/vision/ppyoloe/README.md delete mode 100644 model_zoo/vision/ppyoloe/cpp/README.md delete mode 100644 model_zoo/vision/ppyoloe/ppyoloe.py create mode 100644 model_zoo/vision/yolor/README.md rename model_zoo/vision/{ppyoloe => yolor}/api.md (56%) rename model_zoo/vision/{ppyoloe => yolor}/cpp/CMakeLists.txt (75%) create mode 100644 model_zoo/vision/yolor/cpp/README.md rename model_zoo/vision/{ppyoloe/cpp/ppyoloe.cc => yolor/cpp/yolor.cc} (66%) create mode 100644 model_zoo/vision/yolor/yolor.py diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 112193c86a..67361223c6 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,26 +1,25 @@ -function(add_fastdeploy_executable FIELD CC_FILE) +function(add_fastdeploy_executable field url model) # temp target name/file var in function scope - set(TEMP_TARGET_FILE ${CC_FILE}) - string(REGEX MATCHALL "[0-9A-Za-z_]*.cc" FILE_NAME ${CC_FILE}) - string(REGEX REPLACE ".cc" "" FILE_PREFIX ${FILE_NAME}) - set(TEMP_TARGET_NAME ${FIELD}_${FILE_PREFIX}) + set(TEMP_TARGET_FILE ${PROJECT_SOURCE_DIR}/examples/${field}/${url}_${model}.cc) + set(TEMP_TARGET_NAME ${field}_${url}_${model}) if (EXISTS ${TEMP_TARGET_FILE} AND TARGET fastdeploy) add_executable(${TEMP_TARGET_NAME} ${TEMP_TARGET_FILE}) target_link_libraries(${TEMP_TARGET_NAME} PUBLIC fastdeploy) - message(STATUS " Added FastDeploy Executable : ${TEMP_TARGET_NAME}") + message(STATUS "Found source file: [${field}/${url}_${model}.cc], ADD!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") + else () + message(WARNING "Can not found source file: [${field}/${url}_${model}.cc], SKIP!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") endif() unset(TEMP_TARGET_FILE) unset(TEMP_TARGET_NAME) endfunction() # vision examples -if(WITH_VISION_EXAMPLES AND EXISTS ${PROJECT_SOURCE_DIR}/examples/vision) - message(STATUS "") - message(STATUS "*************FastDeploy Examples Summary**********") - file(GLOB ALL_VISION_EXAMPLE_SRCS ${PROJECT_SOURCE_DIR}/examples/vision/*.cc) - foreach(_CC_FILE ${ALL_VISION_EXAMPLE_SRCS}) - add_fastdeploy_executable(vision ${_CC_FILE}) - endforeach() +if (WITH_VISION_EXAMPLES) + add_fastdeploy_executable(vision ultralytics yolov5) + add_fastdeploy_executable(vision meituan yolov6) + add_fastdeploy_executable(vision wongkinyiu yolov7) + add_fastdeploy_executable(vision megvii yolox) + add_fastdeploy_executable(vision wongkinyiu yolor) endif() # other examples ... diff --git a/examples/vision/ppdet_ppyoloe.cc b/examples/vision/wongkinyiu_yolor.cc similarity index 75% rename from examples/vision/ppdet_ppyoloe.cc rename to examples/vision/wongkinyiu_yolor.cc index b234021c92..abdca2b7ff 100644 --- a/examples/vision/ppdet_ppyoloe.cc +++ b/examples/vision/wongkinyiu_yolor.cc @@ -17,17 +17,18 @@ int main() { namespace vis = fastdeploy::vision; - std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; - std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; - std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; - std::string img_path = "test.jpeg"; - std::string vis_path = "vis.jpeg"; + std::string model_file = "../resources/models/yolor.onnx"; + std::string img_path = "../resources/images/horses.jpg"; + std::string vis_path = "../resources/outputs/wongkinyiu_yolor_vis_result.jpg"; - auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + auto model = vis::wongkinyiu::YOLOR(model_file); if (!model.Initialized()) { - std::cerr << "Init Failed." << std::endl; + std::cerr << "Init Failed! Model: " << model_file << std::endl; return -1; + } else { + std::cout << "Init Done! Model:" << model_file << std::endl; } + model.EnableDebug(); cv::Mat im = cv::imread(img_path); cv::Mat vis_im = im.clone(); diff --git a/fastdeploy/__init__.py b/fastdeploy/__init__.py index 68006c1bed..500e7cc42a 100644 --- a/fastdeploy/__init__.py +++ b/fastdeploy/__init__.py @@ -17,7 +17,7 @@ from .fastdeploy_runtime import * from . import fastdeploy_main as C from . import vision -from .download import download, download_and_decompress +from .download import download def TensorInfoStr(tensor_info): diff --git a/fastdeploy/download.py b/fastdeploy/download.py index 67f21d8e76..e00af098df 100644 --- a/fastdeploy/download.py +++ b/fastdeploy/download.py @@ -156,7 +156,7 @@ def decompress(fname): def url2dir(url, path, rename=None): full_name = download(url, path, rename, show_progress=True) - print("File is donwloaded, now extracting...") + print("SDK is donwloaded, now extracting...") if url.count(".tgz") > 0 or url.count(".tar") > 0 or url.count("zip") > 0: return decompress(full_name) diff --git a/fastdeploy/utils/utils.h b/fastdeploy/utils/utils.h index 9312084265..23ca6ee51a 100644 --- a/fastdeploy/utils/utils.h +++ b/fastdeploy/utils/utils.h @@ -26,10 +26,10 @@ #define FASTDEPLOY_DECL __declspec(dllexport) #else #define FASTDEPLOY_DECL __declspec(dllimport) -#endif // FASTDEPLOY_LIB +#endif // FASTDEPLOY_LIB #else #define FASTDEPLOY_DECL __attribute__((visibility("default"))) -#endif // _WIN32 +#endif // _WIN32 namespace fastdeploy { @@ -42,7 +42,8 @@ class FASTDEPLOY_DECL FDLogger { } explicit FDLogger(bool verbose, const std::string& prefix = "[FastDeploy]"); - template FDLogger& operator<<(const T& val) { + template + FDLogger& operator<<(const T& val) { if (!verbose_) { return *this; } @@ -64,18 +65,14 @@ class FASTDEPLOY_DECL FDLogger { bool verbose_ = true; }; -#ifndef __REL_FILE__ -#define __REL_FILE__ __FILE__ -#endif +#define FDERROR \ + FDLogger(true, "[ERROR]") << __REL_FILE__ << "(" << __LINE__ \ + << ")::" << __FUNCTION__ << "\t" -#define FDERROR \ - FDLogger(true, "[ERROR]") \ - << __REL_FILE__ << "(" << __LINE__ << ")::" << __FUNCTION__ << "\t" - -#define FDASSERT(condition, message) \ - if (!(condition)) { \ - FDERROR << message << std::endl; \ - std::abort(); \ +#define FDASSERT(condition, message) \ + if (!(condition)) { \ + FDERROR << message << std::endl; \ + std::abort(); \ } -} // namespace fastdeploy +} // namespace fastdeploy diff --git a/fastdeploy/vision.h b/fastdeploy/vision.h index cafe310c70..4398463251 100644 --- a/fastdeploy/vision.h +++ b/fastdeploy/vision.h @@ -15,12 +15,12 @@ #include "fastdeploy/core/config.h" #ifdef ENABLE_VISION +#include "fastdeploy/vision/megvii/yolox.h" +#include "fastdeploy/vision/meituan/yolov6.h" #include "fastdeploy/vision/ppcls/model.h" -#include "fastdeploy/vision/ppdet/ppyoloe.h" #include "fastdeploy/vision/ultralytics/yolov5.h" +#include "fastdeploy/vision/wongkinyiu/yolor.h" #include "fastdeploy/vision/wongkinyiu/yolov7.h" -#include "fastdeploy/vision/meituan/yolov6.h" -#include "fastdeploy/vision/megvii/yolox.h" #endif #include "fastdeploy/vision/visualize/visualize.h" diff --git a/fastdeploy/vision/__init__.py b/fastdeploy/vision/__init__.py index 6acbf0c376..7122bede0b 100644 --- a/fastdeploy/vision/__init__.py +++ b/fastdeploy/vision/__init__.py @@ -15,7 +15,6 @@ from . import evaluation from . import ppcls -from . import ppdet from . import ultralytics from . import meituan from . import megvii diff --git a/fastdeploy/vision/common/processors/convert.cc b/fastdeploy/vision/common/processors/convert.cc deleted file mode 100644 index a7ca6de07a..0000000000 --- a/fastdeploy/vision/common/processors/convert.cc +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "fastdeploy/vision/common/processors/convert.h" - -namespace fastdeploy { - -namespace vision { - -Convert::Convert(const std::vector& alpha, - const std::vector& beta) { - FDASSERT(alpha.size() == beta.size(), - "Convert: requires the size of alpha equal to the size of beta."); - FDASSERT(alpha.size() != 0, - "Convert: requires the size of alpha and beta > 0."); - alpha_.assign(alpha.begin(), alpha.end()); - beta_.assign(beta.begin(), beta.end()); -} - -bool Convert::CpuRun(Mat* mat) { - cv::Mat* im = mat->GetCpuMat(); - std::vector split_im; - cv::split(*im, split_im); - for (int c = 0; c < im->channels(); c++) { - split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); - } - cv::merge(split_im, *im); - return true; -} - -#ifdef ENABLE_OPENCV_CUDA -bool Convert::GpuRun(Mat* mat) { - cv::cuda::GpuMat* im = mat->GetGpuMat(); - std::vector split_im; - cv::cuda::split(*im, split_im); - for (int c = 0; c < im->channels(); c++) { - split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); - } - cv::cuda::merge(split_im, *im); - return true; -} -#endif - -bool Convert::Run(Mat* mat, const std::vector& alpha, - const std::vector& beta, ProcLib lib) { - auto c = Convert(alpha, beta); - return c(mat, lib); -} - -} // namespace vision -} // namespace fastdeploy \ No newline at end of file diff --git a/fastdeploy/vision/common/processors/convert.h b/fastdeploy/vision/common/processors/convert.h deleted file mode 100644 index 5d5a5276f5..0000000000 --- a/fastdeploy/vision/common/processors/convert.h +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#pragma once - -#include "fastdeploy/vision/common/processors/base.h" - -namespace fastdeploy { -namespace vision { -class Convert : public Processor { - public: - Convert(const std::vector& alpha, const std::vector& beta); - - bool CpuRun(Mat* mat); -#ifdef ENABLE_OPENCV_CUDA - bool GpuRun(Mat* mat); -#endif - std::string Name() { return "Convert"; } - - // Compute `result = mat * alpha + beta` directly by channel. - // The default behavior is the same as OpenCV's convertTo method. - static bool Run(Mat* mat, const std::vector& alpha, - const std::vector& beta, - ProcLib lib = ProcLib::OPENCV_CPU); - - private: - std::vector alpha_; - std::vector beta_; -}; -} // namespace vision -} // namespace fastdeploy diff --git a/fastdeploy/vision/common/processors/transform.h b/fastdeploy/vision/common/processors/transform.h index 08073b4e42..12eec8d72d 100644 --- a/fastdeploy/vision/common/processors/transform.h +++ b/fastdeploy/vision/common/processors/transform.h @@ -17,7 +17,6 @@ #include "fastdeploy/vision/common/processors/cast.h" #include "fastdeploy/vision/common/processors/center_crop.h" #include "fastdeploy/vision/common/processors/color_space_convert.h" -#include "fastdeploy/vision/common/processors/convert.h" #include "fastdeploy/vision/common/processors/hwc2chw.h" #include "fastdeploy/vision/common/processors/normalize.h" #include "fastdeploy/vision/common/processors/pad.h" diff --git a/fastdeploy/vision/meituan/yolov6.cc b/fastdeploy/vision/meituan/yolov6.cc index 8ac7377194..b75f2016ee 100644 --- a/fastdeploy/vision/meituan/yolov6.cc +++ b/fastdeploy/vision/meituan/yolov6.cc @@ -129,12 +129,8 @@ bool YOLOv6::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - // std::vector(mat->Channels(), 1.0)); - // Compute `result = mat * alpha + beta` directly by channel - std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; - std::vector beta = {0.0f, 0.0f, 0.0f}; - Convert::Run(mat, alpha, beta); + Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + std::vector(mat->Channels(), 1.0)); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), diff --git a/fastdeploy/vision/ppcls/model.cc b/fastdeploy/vision/ppcls/model.cc index c4e5b767c7..915cb97512 100644 --- a/fastdeploy/vision/ppcls/model.cc +++ b/fastdeploy/vision/ppcls/model.cc @@ -1,16 +1,3 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. #include "fastdeploy/vision/ppcls/model.h" #include "fastdeploy/vision/utils/utils.h" @@ -148,6 +135,6 @@ bool Model::Predict(cv::Mat* im, ClassifyResult* result, int topk) { return true; } -} // namespace ppcls -} // namespace vision -} // namespace fastdeploy +} // namespace ppcls +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppcls/model.h b/fastdeploy/vision/ppcls/model.h index 265f92d32b..fae99d4f3c 100644 --- a/fastdeploy/vision/ppcls/model.h +++ b/fastdeploy/vision/ppcls/model.h @@ -1,17 +1,3 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - #pragma once #include "fastdeploy/fastdeploy_model.h" #include "fastdeploy/vision/common/processors/transform.h" @@ -46,6 +32,6 @@ class FASTDEPLOY_DECL Model : public FastDeployModel { std::vector> processors_; std::string config_file_; }; -} // namespace ppcls -} // namespace vision -} // namespace fastdeploy +} // namespace ppcls +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppcls/ppcls_pybind.cc b/fastdeploy/vision/ppcls/ppcls_pybind.cc index 1abc0b2b7c..828bef3c7a 100644 --- a/fastdeploy/vision/ppcls/ppcls_pybind.cc +++ b/fastdeploy/vision/ppcls/ppcls_pybind.cc @@ -14,7 +14,7 @@ #include "fastdeploy/pybind/main.h" namespace fastdeploy { -void BindPPCls(pybind11::module& m) { +void BindPpClsModel(pybind11::module& m) { auto ppcls_module = m.def_submodule("ppcls", "Module to deploy PaddleClas."); pybind11::class_(ppcls_module, "Model") .def(pybind11::init(ppdet_module, - "PPYOLOE") - .def(pybind11::init()) - .def("predict", [](vision::ppdet::PPYOLOE& self, pybind11::array& data, - float conf_threshold, float nms_iou_threshold) { - auto mat = PyArrayToCvMat(data); - vision::DetectionResult res; - self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); - return res; - }); -} -} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.cc b/fastdeploy/vision/ppdet/ppyoloe.cc deleted file mode 100644 index c215ecb0ca..0000000000 --- a/fastdeploy/vision/ppdet/ppyoloe.cc +++ /dev/null @@ -1,170 +0,0 @@ -#include "fastdeploy/vision/ppdet/ppyoloe.h" -#include "fastdeploy/vision/utils/utils.h" -#include "yaml-cpp/yaml.h" - -namespace fastdeploy { -namespace vision { -namespace ppdet { - -PPYOLOE::PPYOLOE(const std::string& model_file, const std::string& params_file, - const std::string& config_file, - const RuntimeOption& custom_option, - const Frontend& model_format) { - config_file_ = config_file; - valid_cpu_backends = {Backend::ORT, Backend::PDINFER}; - valid_gpu_backends = {Backend::ORT, Backend::PDINFER}; - runtime_option = custom_option; - runtime_option.model_format = model_format; - runtime_option.model_file = model_file; - runtime_option.params_file = params_file; - initialized = Initialize(); -} - -bool PPYOLOE::Initialize() { - if (!BuildPreprocessPipelineFromConfig()) { - std::cout << "Failed to build preprocess pipeline from configuration file." - << std::endl; - return false; - } - if (!InitRuntime()) { - std::cout << "Failed to initialize fastdeploy backend." << std::endl; - return false; - } - return true; -} - -bool PPYOLOE::BuildPreprocessPipelineFromConfig() { - processors_.clear(); - YAML::Node cfg; - try { - cfg = YAML::LoadFile(config_file_); - } catch (YAML::BadFile& e) { - std::cout << "Failed to load yaml file " << config_file_ - << ", maybe you should check this file." << std::endl; - return false; - } - - if (cfg["arch"].as() != "YOLO") { - std::cout << "Require the arch of model is YOLO, but arch defined in " - "config file is " - << cfg["arch"].as() << "." << std::endl; - return false; - } - processors_.push_back(std::make_shared()); - - for (const auto& op : cfg["Preprocess"]) { - std::string op_name = op["type"].as(); - if (op_name == "NormalizeImage") { - auto mean = op["mean"].as>(); - auto std = op["std"].as>(); - bool is_scale = op["is_scale"].as(); - processors_.push_back(std::make_shared(mean, std, is_scale)); - } else if (op_name == "Resize") { - bool keep_ratio = op["keep_ratio"].as(); - auto target_size = op["target_size"].as>(); - int interp = op["interp"].as(); - FDASSERT(target_size.size(), - "Require size of target_size be 2, but now it's " + - std::to_string(target_size.size()) + "."); - FDASSERT(!keep_ratio, - "Only support keep_ratio is false while deploy " - "PaddleDetection model."); - int width = target_size[1]; - int height = target_size[0]; - processors_.push_back( - std::make_shared(width, height, -1.0, -1.0, interp, false)); - } else if (op_name == "Permute") { - processors_.push_back(std::make_shared()); - } else { - std::cout << "Unexcepted preprocess operator: " << op_name << "." - << std::endl; - return false; - } - } - return true; -} - -bool PPYOLOE::Preprocess(Mat* mat, std::vector* outputs) { - int origin_w = mat->Width(); - int origin_h = mat->Height(); - for (size_t i = 0; i < processors_.size(); ++i) { - if (!(*(processors_[i].get()))(mat)) { - std::cout << "Failed to process image data in " << processors_[i]->Name() - << "." << std::endl; - return false; - } - } - - outputs->resize(2); - (*outputs)[0].name = InputInfoOfRuntime(0).name; - mat->ShareWithTensor(&((*outputs)[0])); - - // reshape to [1, c, h, w] - (*outputs)[0].shape.insert((*outputs)[0].shape.begin(), 1); - - (*outputs)[1].Allocate({1, 2}, FDDataType::FP32, InputInfoOfRuntime(1).name); - float* ptr = static_cast((*outputs)[1].MutableData()); - ptr[0] = mat->Height() * 1.0 / mat->Height(); - ptr[1] = mat->Width() * 1.0 / mat->Width(); - return true; -} - -bool PPYOLOE::Postprocess(std::vector& infer_result, - DetectionResult* result, float conf_threshold, - float nms_threshold) { - FDASSERT(infer_result[1].shape[0] == 1, - "Only support batch = 1 in FastDeploy now."); - int box_num = 0; - if (infer_result[1].dtype == FDDataType::INT32) { - box_num = *(static_cast(infer_result[1].Data())); - } else if (infer_result[1].dtype == FDDataType::INT64) { - box_num = *(static_cast(infer_result[1].Data())); - } else { - FDASSERT( - false, - "The output box_num of PPYOLOE model should be type of int32/int64."); - } - result->Reserve(box_num); - float* box_data = static_cast(infer_result[0].Data()); - for (size_t i = 0; i < box_num; ++i) { - if (box_data[i * 6 + 1] < conf_threshold) { - continue; - } - result->label_ids.push_back(box_data[i * 6]); - result->scores.push_back(box_data[i * 6 + 1]); - result->boxes.emplace_back( - std::array{box_data[i * 6 + 2], box_data[i * 6 + 3], - box_data[i * 6 + 4] - box_data[i * 6 + 2], - box_data[i * 6 + 5] - box_data[i * 6 + 3]}); - } - return true; -} - -bool PPYOLOE::Predict(cv::Mat* im, DetectionResult* result, - float conf_threshold, float iou_threshold) { - Mat mat(*im); - std::vector processed_data; - if (!Preprocess(&mat, &processed_data)) { - FDERROR << "Failed to preprocess input data while using model:" - << ModelName() << "." << std::endl; - return false; - } - - std::vector infer_result; - if (!Infer(processed_data, &infer_result)) { - FDERROR << "Failed to inference while using model:" << ModelName() << "." - << std::endl; - return false; - } - - if (!Postprocess(infer_result, result, conf_threshold, iou_threshold)) { - FDERROR << "Failed to postprocess while using model:" << ModelName() << "." - << std::endl; - return false; - } - return true; -} - -} // namespace ppdet -} // namespace vision -} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.h b/fastdeploy/vision/ppdet/ppyoloe.h deleted file mode 100644 index a3db268ca4..0000000000 --- a/fastdeploy/vision/ppdet/ppyoloe.h +++ /dev/null @@ -1,44 +0,0 @@ -#pragma once -#include "fastdeploy/fastdeploy_model.h" -#include "fastdeploy/vision/common/processors/transform.h" -#include "fastdeploy/vision/common/result.h" - -#include "fastdeploy/vision/utils/utils.h" - -namespace fastdeploy { -namespace vision { -namespace ppdet { - -class FASTDEPLOY_DECL PPYOLOE : public FastDeployModel { - public: - PPYOLOE(const std::string& model_file, const std::string& params_file, - const std::string& config_file, - const RuntimeOption& custom_option = RuntimeOption(), - const Frontend& model_format = Frontend::PADDLE); - - std::string ModelName() const { return "PaddleDetection/PPYOLOE"; } - - virtual bool Initialize(); - - virtual bool BuildPreprocessPipelineFromConfig(); - - virtual bool Preprocess(Mat* mat, std::vector* outputs); - - virtual bool Postprocess(std::vector& infer_result, - DetectionResult* result, float conf_threshold, - float nms_threshold); - - virtual bool Predict(cv::Mat* im, DetectionResult* result, - float conf_threshold = 0.5, float nms_threshold = 0.7); - - private: - std::vector> processors_; - std::string config_file_; - // PaddleDetection can export model without nms - // This flag will help us to handle the different - // situation - bool has_nms_; -}; -} // namespace ppdet -} // namespace vision -} // namespace fastdeploy diff --git a/fastdeploy/vision/ultralytics/yolov5.cc b/fastdeploy/vision/ultralytics/yolov5.cc index 0b7e50e735..c8c6e06a94 100644 --- a/fastdeploy/vision/ultralytics/yolov5.cc +++ b/fastdeploy/vision/ultralytics/yolov5.cc @@ -126,12 +126,8 @@ bool YOLOv5::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - // std::vector(mat->Channels(), 1.0)); - // Compute `result = mat * alpha + beta` directly by channel - std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; - std::vector beta = {0.0f, 0.0f, 0.0f}; - Convert::Run(mat, alpha, beta); + Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + std::vector(mat->Channels(), 1.0)); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), @@ -202,11 +198,6 @@ bool YOLOv5::Postprocess( result->scores.push_back(confidence); } } - - if (result->boxes.size() == 0) { - return true; - } - utils::NMS(result, nms_iou_threshold); // scale the boxes to the origin image shape diff --git a/fastdeploy/vision/utils/sort_det_res.cc b/fastdeploy/vision/utils/sort_det_res.cc index 93dbb69694..e4a0db9761 100644 --- a/fastdeploy/vision/utils/sort_det_res.cc +++ b/fastdeploy/vision/utils/sort_det_res.cc @@ -68,11 +68,7 @@ void MergeSort(DetectionResult* result, size_t low, size_t high) { void SortDetectionResult(DetectionResult* result) { size_t low = 0; - size_t high = result->scores.size(); - if (high == 0) { - return; - } - high = high - 1; + size_t high = result->scores.size() - 1; MergeSort(result, low, high); } diff --git a/fastdeploy/vision/vision_pybind.cc b/fastdeploy/vision/vision_pybind.cc index 0334303ce6..e4ba05b893 100644 --- a/fastdeploy/vision/vision_pybind.cc +++ b/fastdeploy/vision/vision_pybind.cc @@ -16,8 +16,7 @@ namespace fastdeploy { -void BindPPCls(pybind11::module& m); -void BindPPDet(pybind11::module& m); +void BindPpClsModel(pybind11::module& m); void BindWongkinyiu(pybind11::module& m); void BindUltralytics(pybind11::module& m); void BindMeituan(pybind11::module& m); @@ -42,8 +41,7 @@ void BindVision(pybind11::module& m) { .def("__repr__", &vision::DetectionResult::Str) .def("__str__", &vision::DetectionResult::Str); - BindPPCls(m); - BindPPDet(m); + BindPpClsModel(m); BindUltralytics(m); BindWongkinyiu(m); BindMeituan(m); diff --git a/fastdeploy/vision/visualize/detection.cc b/fastdeploy/vision/visualize/detection.cc index 5b5538bff7..e5f01bdd35 100644 --- a/fastdeploy/vision/visualize/detection.cc +++ b/fastdeploy/vision/visualize/detection.cc @@ -43,7 +43,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, } std::string text = id + "," + score; int font = cv::FONT_HERSHEY_SIMPLEX; - cv::Size text_size = cv::getTextSize(text, font, font_size, 1, nullptr); + cv::Size text_size = cv::getTextSize(text, font, font_size, 0.5, nullptr); cv::Point origin; origin.x = rect.x; origin.y = rect.y; @@ -52,10 +52,10 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, text_size.width, text_size.height); cv::rectangle(*im, rect, rect_color, line_size); cv::putText(*im, text, origin, font, font_size, cv::Scalar(255, 255, 255), - 1); + 0.5); } } -} // namespace vision -} // namespace fastdeploy +} // namespace vision +} // namespace fastdeploy #endif diff --git a/fastdeploy/vision/wongkinyiu/__init__.py b/fastdeploy/vision/wongkinyiu/__init__.py index 542389e208..026d10062f 100644 --- a/fastdeploy/vision/wongkinyiu/__init__.py +++ b/fastdeploy/vision/wongkinyiu/__init__.py @@ -114,3 +114,101 @@ def max_wh(self, value): assert isinstance( value, float), "The value to set `max_wh` must be type of float." self._model.max_wh = value + + +class YOLOR(FastDeployModel): + def __init__(self, + model_file, + params_file="", + runtime_option=None, + model_format=Frontend.ONNX): + # 调用基函数进行backend_option的初始化 + # 初始化后的option保存在self._runtime_option + super(YOLOR, self).__init__(runtime_option) + + self._model = C.vision.wongkinyiu.YOLOR( + model_file, params_file, self._runtime_option, model_format) + # 通过self.initialized判断整个模型的初始化是否成功 + assert self.initialized, "YOLOR initialize failed." + + def predict(self, input_image, conf_threshold=0.25, nms_iou_threshold=0.5): + return self._model.predict(input_image, conf_threshold, + nms_iou_threshold) + + # 一些跟YOLOv7模型有关的属性封装 + # 多数是预处理相关,可通过修改如model.size = [1280, 1280]改变预处理时resize的大小(前提是模型支持) + @property + def size(self): + return self._model.size + + @property + def padding_value(self): + return self._model.padding_value + + @property + def is_no_pad(self): + return self._model.is_no_pad + + @property + def is_mini_pad(self): + return self._model.is_mini_pad + + @property + def is_scale_up(self): + return self._model.is_scale_up + + @property + def stride(self): + return self._model.stride + + @property + def max_wh(self): + return self._model.max_wh + + @size.setter + def size(self, wh): + assert isinstance(wh, [list, tuple]),\ + "The value to set `size` must be type of tuple or list." + assert len(wh) == 2,\ + "The value to set `size` must contatins 2 elements means [width, height], but now it contains {} elements.".format( + len(wh)) + self._model.size = wh + + @padding_value.setter + def padding_value(self, value): + assert isinstance( + value, + list), "The value to set `padding_value` must be type of list." + self._model.padding_value = value + + @is_no_pad.setter + def is_no_pad(self, value): + assert isinstance( + value, bool), "The value to set `is_no_pad` must be type of bool." + self._model.is_no_pad = value + + @is_mini_pad.setter + def is_mini_pad(self, value): + assert isinstance( + value, + bool), "The value to set `is_mini_pad` must be type of bool." + self._model.is_mini_pad = value + + @is_scale_up.setter + def is_scale_up(self, value): + assert isinstance( + value, + bool), "The value to set `is_scale_up` must be type of bool." + self._model.is_scale_up = value + + @stride.setter + def stride(self, value): + assert isinstance( + value, int), "The value to set `stride` must be type of int." + self._model.stride = value + + @max_wh.setter + def max_wh(self, value): + assert isinstance( + value, float), "The value to set `max_wh` must be type of float." + self._model.max_wh = value diff --git a/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc index 4a10f47a76..6bde2a1841 100644 --- a/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc +++ b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc @@ -17,7 +17,7 @@ namespace fastdeploy { void BindWongkinyiu(pybind11::module& m) { auto wongkinyiu_module = - m.def_submodule("wongkinyiu", "https://github.com/WongKinYiu/yolov7"); + m.def_submodule("wongkinyiu", "https://github.com/WongKinYiu"); pybind11::class_( wongkinyiu_module, "YOLOv7") .def(pybind11::init()) @@ -37,5 +37,24 @@ void BindWongkinyiu(pybind11::module& m) { .def_readwrite("is_scale_up", &vision::wongkinyiu::YOLOv7::is_scale_up) .def_readwrite("stride", &vision::wongkinyiu::YOLOv7::stride) .def_readwrite("max_wh", &vision::wongkinyiu::YOLOv7::max_wh); + + pybind11::class_( + wongkinyiu_module, "YOLOR") + .def(pybind11::init()) + .def("predict", + [](vision::wongkinyiu::YOLOR& self, pybind11::array& data, + float conf_threshold, float nms_iou_threshold) { + auto mat = PyArrayToCvMat(data); + vision::DetectionResult res; + self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); + return res; + }) + .def_readwrite("size", &vision::wongkinyiu::YOLOR::size) + .def_readwrite("padding_value", &vision::wongkinyiu::YOLOR::padding_value) + .def_readwrite("is_mini_pad", &vision::wongkinyiu::YOLOR::is_mini_pad) + .def_readwrite("is_no_pad", &vision::wongkinyiu::YOLOR::is_no_pad) + .def_readwrite("is_scale_up", &vision::wongkinyiu::YOLOR::is_scale_up) + .def_readwrite("stride", &vision::wongkinyiu::YOLOR::stride) + .def_readwrite("max_wh", &vision::wongkinyiu::YOLOR::max_wh); } } // namespace fastdeploy diff --git a/fastdeploy/vision/wongkinyiu/yolor.cc b/fastdeploy/vision/wongkinyiu/yolor.cc new file mode 100644 index 0000000000..5cf9d6cb83 --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/yolor.cc @@ -0,0 +1,243 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision/wongkinyiu/yolor.h" +#include "fastdeploy/utils/perf.h" +#include "fastdeploy/vision/utils/utils.h" + +namespace fastdeploy { +namespace vision { +namespace wongkinyiu { + +void YOLOR::LetterBox(Mat* mat, const std::vector& size, + const std::vector& color, bool _auto, + bool scale_fill, bool scale_up, int stride) { + float scale = + std::min(size[1] * 1.0 / mat->Height(), size[0] * 1.0 / mat->Width()); + if (!scale_up) { + scale = std::min(scale, 1.0f); + } + + int resize_h = int(round(mat->Height() * scale)); + int resize_w = int(round(mat->Width() * scale)); + + int pad_w = size[0] - resize_w; + int pad_h = size[1] - resize_h; + if (_auto) { + pad_h = pad_h % stride; + pad_w = pad_w % stride; + } else if (scale_fill) { + pad_h = 0; + pad_w = 0; + resize_h = size[1]; + resize_w = size[0]; + } + Resize::Run(mat, resize_w, resize_h); + if (pad_h > 0 || pad_w > 0) { + float half_h = pad_h * 1.0 / 2; + int top = int(round(half_h - 0.1)); + int bottom = int(round(half_h + 0.1)); + float half_w = pad_w * 1.0 / 2; + int left = int(round(half_w - 0.1)); + int right = int(round(half_w + 0.1)); + Pad::Run(mat, top, bottom, left, right, color); + } +} + +YOLOR::YOLOR(const std::string& model_file, const std::string& params_file, + const RuntimeOption& custom_option, const Frontend& model_format) { + if (model_format == Frontend::ONNX) { + valid_cpu_backends = {Backend::ORT}; // 指定可用的CPU后端 + valid_gpu_backends = {Backend::ORT, Backend::TRT}; // 指定可用的GPU后端 + } else { + valid_cpu_backends = {Backend::PDINFER, Backend::ORT}; + valid_gpu_backends = {Backend::PDINFER, Backend::ORT, Backend::TRT}; + } + runtime_option = custom_option; + runtime_option.model_format = model_format; + runtime_option.model_file = model_file; + runtime_option.params_file = params_file; + initialized = Initialize(); +} + +bool YOLOR::Initialize() { + // parameters for preprocess + size = {640, 640}; + padding_value = {114.0, 114.0, 114.0}; + is_mini_pad = false; + is_no_pad = false; + is_scale_up = false; + stride = 32; + max_wh = 7680.0; + + if (!InitRuntime()) { + FDERROR << "Failed to initialize fastdeploy backend." << std::endl; + return false; + } + return true; +} + +bool YOLOR::Preprocess(Mat* mat, FDTensor* output, + std::map>* im_info) { + // process after image load + double ratio = (size[0] * 1.0) / std::max(static_cast(mat->Height()), + static_cast(mat->Width())); + if (ratio != 1.0) { + int interp = cv::INTER_AREA; + if (ratio > 1.0) { + interp = cv::INTER_LINEAR; + } + int resize_h = int(mat->Height() * ratio); + int resize_w = int(mat->Width() * ratio); + Resize::Run(mat, resize_w, resize_h, -1, -1, interp); + } + // yolor's preprocess steps + // 1. letterbox + // 2. BGR->RGB + // 3. HWC->CHW + YOLOR::LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, + is_scale_up, stride); + BGR2RGB::Run(mat); + Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + std::vector(mat->Channels(), 1.0)); + + // Record output shape of preprocessed image + (*im_info)["output_shape"] = {static_cast(mat->Height()), + static_cast(mat->Width())}; + + HWC2CHW::Run(mat); + Cast::Run(mat, "float"); + mat->ShareWithTensor(output); + output->shape.insert(output->shape.begin(), 1); // reshape to n, h, w, c + return true; +} + +bool YOLOR::Postprocess( + FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold) { + FDASSERT(infer_result.shape[0] == 1, "Only support batch =1 now."); + result->Clear(); + result->Reserve(infer_result.shape[1]); + if (infer_result.dtype != FDDataType::FP32) { + FDERROR << "Only support post process with float32 data." << std::endl; + return false; + } + float* data = static_cast(infer_result.Data()); + for (size_t i = 0; i < infer_result.shape[1]; ++i) { + int s = i * infer_result.shape[2]; + float confidence = data[s + 4]; + float* max_class_score = + std::max_element(data + s + 5, data + s + infer_result.shape[2]); + confidence *= (*max_class_score); + // filter boxes by conf_threshold + if (confidence <= conf_threshold) { + continue; + } + int32_t label_id = std::distance(data + s + 5, max_class_score); + // convert from [x, y, w, h] to [x1, y1, x2, y2] + result->boxes.emplace_back(std::array{ + data[s] - data[s + 2] / 2.0f + label_id * max_wh, + data[s + 1] - data[s + 3] / 2.0f + label_id * max_wh, + data[s + 0] + data[s + 2] / 2.0f + label_id * max_wh, + data[s + 1] + data[s + 3] / 2.0f + label_id * max_wh}); + result->label_ids.push_back(label_id); + result->scores.push_back(confidence); + } + utils::NMS(result, nms_iou_threshold); + + // scale the boxes to the origin image shape + auto iter_out = im_info.find("output_shape"); + auto iter_ipt = im_info.find("input_shape"); + FDASSERT(iter_out != im_info.end() && iter_ipt != im_info.end(), + "Cannot find input_shape or output_shape from im_info."); + float out_h = iter_out->second[0]; + float out_w = iter_out->second[1]; + float ipt_h = iter_ipt->second[0]; + float ipt_w = iter_ipt->second[1]; + float scale = std::min(out_h / ipt_h, out_w / ipt_w); + for (size_t i = 0; i < result->boxes.size(); ++i) { + float pad_h = (out_h - ipt_h * scale) / 2; + float pad_w = (out_w - ipt_w * scale) / 2; + int32_t label_id = (result->label_ids)[i]; + // clip box + result->boxes[i][0] = result->boxes[i][0] - max_wh * label_id; + result->boxes[i][1] = result->boxes[i][1] - max_wh * label_id; + result->boxes[i][2] = result->boxes[i][2] - max_wh * label_id; + result->boxes[i][3] = result->boxes[i][3] - max_wh * label_id; + result->boxes[i][0] = std::max((result->boxes[i][0] - pad_w) / scale, 0.0f); + result->boxes[i][1] = std::max((result->boxes[i][1] - pad_h) / scale, 0.0f); + result->boxes[i][2] = std::max((result->boxes[i][2] - pad_w) / scale, 0.0f); + result->boxes[i][3] = std::max((result->boxes[i][3] - pad_h) / scale, 0.0f); + result->boxes[i][0] = std::min(result->boxes[i][0], ipt_w); + result->boxes[i][1] = std::min(result->boxes[i][1], ipt_h); + result->boxes[i][2] = std::min(result->boxes[i][2], ipt_w); + result->boxes[i][3] = std::min(result->boxes[i][3], ipt_h); + } + return true; +} + +bool YOLOR::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold, + float nms_iou_threshold) { +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_START(0) +#endif + + Mat mat(*im); + std::vector input_tensors(1); + + std::map> im_info; + + // Record the shape of image and the shape of preprocessed image + im_info["input_shape"] = {static_cast(mat.Height()), + static_cast(mat.Width())}; + im_info["output_shape"] = {static_cast(mat.Height()), + static_cast(mat.Width())}; + + if (!Preprocess(&mat, &input_tensors[0], &im_info)) { + FDERROR << "Failed to preprocess input image." << std::endl; + return false; + } + +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(0, "Preprocess") + TIMERECORD_START(1) +#endif + + input_tensors[0].name = InputInfoOfRuntime(0).name; + std::vector output_tensors; + if (!Infer(input_tensors, &output_tensors)) { + FDERROR << "Failed to inference." << std::endl; + return false; + } +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(1, "Inference") + TIMERECORD_START(2) +#endif + + if (!Postprocess(output_tensors[0], result, im_info, conf_threshold, + nms_iou_threshold)) { + FDERROR << "Failed to post process." << std::endl; + return false; + } + +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(2, "Postprocess") +#endif + return true; +} + +} // namespace wongkinyiu +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/wongkinyiu/yolor.h b/fastdeploy/vision/wongkinyiu/yolor.h new file mode 100644 index 0000000000..69f5ea8760 --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/yolor.h @@ -0,0 +1,95 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once +#include "fastdeploy/fastdeploy_model.h" +#include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" + +namespace fastdeploy { +namespace vision { +namespace wongkinyiu { + +class FASTDEPLOY_DECL YOLOR : public FastDeployModel { + public: + // 当model_format为ONNX时,无需指定params_file + // 当model_format为Paddle时,则需同时指定model_file & params_file + YOLOR(const std::string& model_file, const std::string& params_file = "", + const RuntimeOption& custom_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX); + + // 定义模型的名称 + virtual std::string ModelName() const { return "WongKinYiu/yolor"; } + + // 模型预测接口,即用户调用的接口 + // im 为用户的输入数据,目前对于CV均定义为cv::Mat + // result 为模型预测的输出结构体 + // conf_threshold 为后处理的参数 + // nms_iou_threshold 为后处理的参数 + virtual bool Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold = 0.25, + float nms_iou_threshold = 0.5); + + // 以下为模型在预测时的一些参数,基本是前后处理所需 + // 用户在创建模型后,可根据模型的要求,以及自己的需求 + // 对参数进行修改 + // tuple of (width, height) + std::vector size; + // padding value, size should be same with Channels + std::vector padding_value; + // only pad to the minimum rectange which height and width is times of stride + bool is_mini_pad; + // while is_mini_pad = false and is_no_pad = true, will resize the image to + // the set size + bool is_no_pad; + // if is_scale_up is false, the input image only can be zoom out, the maximum + // resize scale cannot exceed 1.0 + bool is_scale_up; + // padding stride, for is_mini_pad + int stride; + // for offseting the boxes by classes when using NMS + float max_wh; + + private: + // 初始化函数,包括初始化后端,以及其它模型推理需要涉及的操作 + bool Initialize(); + + // 输入图像预处理操作 + // Mat为FastDeploy定义的数据结构 + // FDTensor为预处理后的Tensor数据,传给后端进行推理 + // im_info为预处理过程保存的数据,在后处理中需要用到 + bool Preprocess(Mat* mat, FDTensor* outputs, + std::map>* im_info); + + // 后端推理结果后处理,输出给用户 + // infer_result 为后端推理后的输出Tensor + // result 为模型预测的结果 + // im_info 为预处理记录的信息,后处理用于还原box + // conf_threshold 后处理时过滤box的置信度阈值 + // nms_iou_threshold 后处理时NMS设定的iou阈值 + bool Postprocess(FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold); + + // 对图片进行LetterBox处理 + // mat 为读取到的原图 + // size 为输入模型的图像尺寸 + void LetterBox(Mat* mat, const std::vector& size, + const std::vector& color, bool _auto, + bool scale_fill = false, bool scale_up = true, + int stride = 32); +}; +} // namespace wongkinyiu +} // namespace vision +} // namespace fastdeploy diff --git a/model_zoo/vision/ppyoloe/README.md b/model_zoo/vision/ppyoloe/README.md deleted file mode 100644 index 42d18104ad..0000000000 --- a/model_zoo/vision/ppyoloe/README.md +++ /dev/null @@ -1,52 +0,0 @@ -# PaddleDetection/PPYOLOE部署示例 - -- 当前支持PaddleDetection版本为[release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) - -本文档说明如何进行[PPYOLOE](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe)的快速部署推理。本目录结构如下 -``` -. -├── cpp # C++ 代码目录 -│   ├── CMakeLists.txt # C++ 代码编译CMakeLists文件 -│   ├── README.md # C++ 代码编译部署文档 -│   └── ppyoloe.cc # C++ 示例代码 -├── README.md # PPYOLOE 部署文档 -└── ppyoloe.py # Python示例代码 -``` - -## 安装FastDeploy - -使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` -``` -# 安装fastdeploy-python工具 -pip install fastdeploy-python -``` - -## Python部署 - -执行如下代码即会自动下载PPYOLOE模型和测试图片 -``` -python ppyoloe.py -``` - -执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 -``` -DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] -162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 -414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 -163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 -267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 -581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 -104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 -348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 -364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 -75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 -328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 -504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 -379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 -25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 -``` - -## 其它文档 - -- [C++部署](./cpp/README.md) -- [PPYOLOE API文档](./api.md) diff --git a/model_zoo/vision/ppyoloe/cpp/README.md b/model_zoo/vision/ppyoloe/cpp/README.md deleted file mode 100644 index 1027c2eeb2..0000000000 --- a/model_zoo/vision/ppyoloe/cpp/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# 编译PPYOLOE示例 - - -``` -# 下载和解压预测库 -wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz -tar xvf fastdeploy-linux-x64-0.0.3.tgz - -# 编译示例代码 -mkdir build & cd build -cmake .. -make -j - -# 下载模型和图片 -wget https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz -tar xvf ppyoloe_crn_l_300e_coco.tgz -wget https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg - -# 执行 -./ppyoloe_demo -``` - -执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 -``` -DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] -162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 -414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 -163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 -267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 -581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 -104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 -348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 -364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 -75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 -328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 -504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 -379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 -25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 -``` diff --git a/model_zoo/vision/ppyoloe/ppyoloe.py b/model_zoo/vision/ppyoloe/ppyoloe.py deleted file mode 100644 index 7d79dfd8cf..0000000000 --- a/model_zoo/vision/ppyoloe/ppyoloe.py +++ /dev/null @@ -1,24 +0,0 @@ -import fastdeploy as fd -import cv2 - -# 下载模型和测试图片 -model_url = "https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz" -test_jpg_url = "https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg" -fd.download_and_decompress(model_url, ".") -fd.download(test_jpg_url, ".", show_progress=True) - -# 加载模型 -model = fd.vision.ppdet.PPYOLOE("ppyoloe_crn_l_300e_coco/model.pdmodel", - "ppyoloe_crn_l_300e_coco/model.pdiparams", - "ppyoloe_crn_l_300e_coco/infer_cfg.yml") - -# 预测图片 -im = cv2.imread("000000014439_640x640.jpg") -result = model.predict(im, conf_threshold=0.5) - -# 可视化结果 -fd.vision.visualize.vis_detection(im, result) -cv2.imwrite("vis_result.jpg", im) - -# 输出预测结果 -print(result) diff --git a/model_zoo/vision/yolor/README.md b/model_zoo/vision/yolor/README.md new file mode 100644 index 0000000000..467023f169 --- /dev/null +++ b/model_zoo/vision/yolor/README.md @@ -0,0 +1,67 @@ +# 编译YOLOR示例 + +当前支持模型版本为:[YOLOR v0.1](https://github.com/WongKinYiu/yolor/releases/tag/weights) + +本文档说明如何进行[YOLOR](https://github.com/WongKinYiu/yolor)的快速部署推理。本目录结构如下 + +``` +. +├── cpp +│   ├── CMakeLists.txt +│   ├── README.md +│   └── yolor.cc +├── README.md +└── yolor.py +``` + +## 获取ONNX文件 + +- 手动获取 + + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + + + + ``` + #下载yolor模型文件 + wget https://github.com/WongKinYiu/yolor/releases/download/v0.1/yolor.pt + + # 导出onnx格式文件 + python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + + # 移动onnx文件到demo目录 + cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolor/ + ``` + +## 安装FastDeploy + +使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` + +``` +# 安装fastdeploy-python工具 +pip install fastdeploy-python + +# 安装vision-cpu模块 +fastdeploy install vision-cpu +``` +## Python部署 + +执行如下代码即会自动下载测试图片 +``` +python yolor.py +``` + +执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +0.000000,185.201431, 315.673126, 410.071594, 0.959289, 17 +433.802826,211.603455, 595.489319, 346.425537, 0.952615, 17 +230.446854,195.618805, 418.365479, 362.712128, 0.884253, 17 +336.545624,208.555618, 457.704315, 323.543152, 0.788450, 17 +0.896423,183.936996, 154.788727, 304.916412, 0.672804, 17 +``` + +## 其它文档 + +- [C++部署](./cpp/README.md) +- [YOLOR API文档](./api.md) diff --git a/model_zoo/vision/ppyoloe/api.md b/model_zoo/vision/yolor/api.md similarity index 56% rename from model_zoo/vision/ppyoloe/api.md rename to model_zoo/vision/yolor/api.md index 1c5cbcaadb..b1e5be889b 100644 --- a/model_zoo/vision/ppyoloe/api.md +++ b/model_zoo/vision/yolor/api.md @@ -1,24 +1,23 @@ -# PPYOLOE API说明 +# YOLOR API说明 ## Python API -### PPYOLOE类 +### YOLOR类 ``` -fastdeploy.vision.ultralytics.PPYOLOE(model_file, params_file, config_file, runtime_option=None, model_format=fd.Frontend.PADDLE) +fastdeploy.vision.wongkinyiu.YOLOR(model_file, params_file=None, runtime_option=None, model_format=fd.Frontend.ONNX) ``` -PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 +YOLOR模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只需提供model_file,如`yolor.onnx`;当model_format为`fd.Frontend.PADDLE`时,则需同时提供model_file和params_file。 **参数** > * **model_file**(str): 模型文件路径 > * **params_file**(str): 参数文件路径 -> * **config_file**(str): 模型推理配置文件 > * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 > * **model_format**(Frontend): 模型格式 #### predict函数 > ``` -> PPYOLOE.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> YOLOR.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) > ``` > 模型预测结口,输入图像直接输出检测结果。 > @@ -26,35 +25,33 @@ PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当 > > > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 > > * **conf_threshold**(float): 检测框置信度过滤阈值 -> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 -示例代码参考[ppyoloe.py](./ppyoloe.py) +示例代码参考[yolor.py](./yolor.py) ## C++ API -### PPYOLOE类 +### YOLOR类 ``` -fastdeploy::vision::ultralytics::PPYOLOE( +fastdeploy::vision::wongkinyiu::YOLOR( const string& model_file, - const string& params_file, - const string& config_file, + const string& params_file = "", const RuntimeOption& runtime_option = RuntimeOption(), const Frontend& model_format = Frontend::ONNX) ``` -PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 +YOLOR模型加载和初始化,当model_format为`Frontend::ONNX`时,只需提供model_file,如`yolor.onnx`;当model_format为`Frontend::PADDLE`时,则需同时提供model_file和params_file。 **参数** > * **model_file**(str): 模型文件路径 > * **params_file**(str): 参数文件路径 -> * **config_file**(str): 模型推理配置文件 > * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 > * **model_format**(Frontend): 模型格式 #### Predict函数 > ``` -> YOLOv5::Predict(cv::Mat* im, DetectionResult* result, +> YOLOR::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` @@ -65,9 +62,9 @@ PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当 > > * **im**: 输入图像,注意需为HWC,BGR格式 > > * **result**: 检测结果,包括检测框,各个框的置信度 > > * **conf_threshold**: 检测框置信度过滤阈值 -> > * **nms_iou_threshold**: NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 -示例代码参考[cpp/yolov5.cc](cpp/yolov5.cc) +示例代码参考[cpp/yolor.cc](cpp/yolor.cc) ## 其它API使用 diff --git a/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt b/model_zoo/vision/yolor/cpp/CMakeLists.txt similarity index 75% rename from model_zoo/vision/ppyoloe/cpp/CMakeLists.txt rename to model_zoo/vision/yolor/cpp/CMakeLists.txt index e681566517..18248b8452 100644 --- a/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt +++ b/model_zoo/vision/yolor/cpp/CMakeLists.txt @@ -1,4 +1,4 @@ -PROJECT(ppyoloe_demo C CXX) +PROJECT(yolor_demo C CXX) CMAKE_MINIMUM_REQUIRED (VERSION 3.16) # 在低版本ABI环境中,通过如下代码进行兼容性编译 @@ -12,6 +12,6 @@ include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) # 添加FastDeploy依赖头文件 include_directories(${FASTDEPLOY_INCS}) -add_executable(ppyoloe_demo ${PROJECT_SOURCE_DIR}/ppyoloe.cc) +add_executable(yolor_demo ${PROJECT_SOURCE_DIR}/yolor.cc) # 添加FastDeploy库依赖 -target_link_libraries(ppyoloe_demo ${FASTDEPLOY_LIBS}) +target_link_libraries(yolor_demo ${FASTDEPLOY_LIBS}) diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md new file mode 100644 index 0000000000..eddf5bc51b --- /dev/null +++ b/model_zoo/vision/yolor/cpp/README.md @@ -0,0 +1,51 @@ +# 编译YOLOR示例 + +当前支持模型版本为:[YOLOR v0.1](https://github.com/WongKinYiu/yolor/releases/tag/weights) + +## 获取ONNX文件 + +- 手动获取 + + 访问[YOLOR](https://github.com/WongKinYiu/yolor/releases/tag/weights)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + + ``` + #下载yolor模型文件 + wget https://github.com/WongKinYiu/yolor/releases/download/v0.1/yolor.pt + + # 导出onnx格式文件 + python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + + ``` + + +## 运行demo + +``` +# 下载和解压预测库 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz +tar xvf fastdeploy-linux-x64-0.0.3.tgz + +# 编译示例代码 +mkdir build & cd build +cmake .. +make -j + +# 移动onnx文件到demo目录 +cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolor/cpp/build/ + +# 下载图片 +wget https://raw.githubusercontent.com/WongKinYiu/yolor/paper/inference/images/horses.jpg + +# 执行 +./yolor_demo +``` + +执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +0.000000,185.201431, 315.673126, 410.071594, 0.959289, 17 +433.802826,211.603455, 595.489319, 346.425537, 0.952615, 17 +230.446854,195.618805, 418.365479, 362.712128, 0.884253, 17 +336.545624,208.555618, 457.704315, 323.543152, 0.788450, 17 +0.896423,183.936996, 154.788727, 304.916412, 0.672804, 17 +``` diff --git a/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc b/model_zoo/vision/yolor/cpp/yolor.cc similarity index 66% rename from model_zoo/vision/ppyoloe/cpp/ppyoloe.cc rename to model_zoo/vision/yolor/cpp/yolor.cc index e63f29e62a..db194583fc 100644 --- a/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc +++ b/model_zoo/vision/yolor/cpp/yolor.cc @@ -16,28 +16,18 @@ int main() { namespace vis = fastdeploy::vision; - - std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; - std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; - std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; - std::string img_path = "000000014439_640x640.jpg"; - std::string vis_path = "vis.jpeg"; - - auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + auto model = vis::wongkinyiu::YOLOR("yolor.onnx"); if (!model.Initialized()) { std::cerr << "Init Failed." << std::endl; return -1; } - - cv::Mat im = cv::imread(img_path); + cv::Mat im = cv::imread("horses.jpg"); cv::Mat vis_im = im.clone(); vis::DetectionResult res; if (!model.Predict(&im, &res)) { std::cerr << "Prediction Failed." << std::endl; return -1; - } else { - std::cout << "Prediction Done!" << std::endl; } // 输出预测框结果 @@ -45,7 +35,6 @@ int main() { // 可视化预测结果 vis::Visualize::VisDetection(&vis_im, res); - cv::imwrite(vis_path, vis_im); - std::cout << "Detect Done! Saved: " << vis_path << std::endl; + cv::imwrite("vis_result.jpg", vis_im); return 0; } diff --git a/model_zoo/vision/yolor/yolor.py b/model_zoo/vision/yolor/yolor.py new file mode 100644 index 0000000000..56d3f9689e --- /dev/null +++ b/model_zoo/vision/yolor/yolor.py @@ -0,0 +1,21 @@ +import fastdeploy as fd +import cv2 + +# 下载模型和测试图片 +test_jpg_url = "https://raw.githubusercontent.com/WongKinYiu/yolor/paper/inference/images/horses.jpg" +fd.download(test_jpg_url, ".", show_progress=True) + +# 加载模型 +model = fd.vision.wongkinyiu.YOLOR("yolor.onnx") + +# 预测图片 +im = cv2.imread("horses.jpg") +result = model.predict(im, conf_threshold=0.25, nms_iou_threshold=0.5) + +# 可视化结果 +fd.vision.visualize.vis_detection(im, result) +cv2.imwrite("vis_result.jpg", im) + +# 输出预测结果 +print(result) +print(model.runtime_option) diff --git a/setup.py b/setup.py index e76f057b1c..5147025b4e 100644 --- a/setup.py +++ b/setup.py @@ -326,25 +326,14 @@ def run(self): shutil.copy("LICENSE", "fastdeploy") depend_libs = list() - if platform.system().lower() == "linux": - for f in os.listdir(".setuptools-cmake-build"): - full_name = os.path.join(".setuptools-cmake-build", f) - if not os.path.isfile(full_name): - continue - if not full_name.count("fastdeploy_main.cpython-"): - continue - if not full_name.endswith(".so"): - continue - # modify the search path of libraries - command = "patchelf --set-rpath '$ORIGIN/libs/' {}".format( - full_name) - # The sw_64 not suppot patchelf, so we just disable that. - if platform.machine() != 'sw_64' and platform.machine( - ) != 'mips64': - assert os.system( - command - ) == 0, "patch fastdeploy_main.cpython-36m-x86_64-linux-gnu.so failed, the command: {}".format( - command) + # modify the search path of libraries + command = "patchelf --set-rpath '$ORIGIN/libs/' .setuptools-cmake-build/fastdeploy_main.cpython-37m-x86_64-linux-gnu.so" + # The sw_64 not suppot patchelf, so we just disable that. + if platform.machine() != 'sw_64' and platform.machine() != 'mips64': + assert os.system( + command + ) == 0, "patch fastdeploy_main.cpython-37m-x86_64-linux-gnu.so failed, the command: {}".format( + command) for f in os.listdir(".setuptools-cmake-build"): if not os.path.isfile(os.path.join(".setuptools-cmake-build", f)): From 3aa015fd722877e7c449a25a9ad0eedbc6fc099a Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 07:58:07 +0000 Subject: [PATCH 36/51] for merge --- examples/CMakeLists.txt | 25 +-- examples/vision/ppdet_ppyoloe.cc | 51 ++++++ fastdeploy/__init__.py | 2 +- fastdeploy/download.py | 2 +- fastdeploy/utils/utils.h | 4 + fastdeploy/vision.h | 1 + fastdeploy/vision/__init__.py | 1 + .../vision/common/processors/convert.cc | 62 +++++++ fastdeploy/vision/common/processors/convert.h | 42 +++++ .../vision/common/processors/transform.h | 1 + fastdeploy/vision/meituan/yolov6.cc | 8 +- fastdeploy/vision/ppcls/model.cc | 13 ++ fastdeploy/vision/ppcls/model.h | 14 ++ fastdeploy/vision/ppcls/ppcls_pybind.cc | 2 +- fastdeploy/vision/ppdet/__init__.py | 39 ++++ fastdeploy/vision/ppdet/ppdet_pybind.cc | 32 ++++ fastdeploy/vision/ppdet/ppyoloe.cc | 170 ++++++++++++++++++ fastdeploy/vision/ppdet/ppyoloe.h | 44 +++++ fastdeploy/vision/ultralytics/yolov5.cc | 13 +- fastdeploy/vision/utils/sort_det_res.cc | 6 +- fastdeploy/vision/vision_pybind.cc | 6 +- fastdeploy/vision/visualize/detection.cc | 4 +- model_zoo/vision/ppyoloe/README.md | 52 ++++++ model_zoo/vision/ppyoloe/api.md | 74 ++++++++ model_zoo/vision/ppyoloe/cpp/CMakeLists.txt | 17 ++ model_zoo/vision/ppyoloe/cpp/README.md | 39 ++++ model_zoo/vision/ppyoloe/cpp/ppyoloe.cc | 51 ++++++ model_zoo/vision/ppyoloe/ppyoloe.py | 24 +++ setup.py | 27 ++- 29 files changed, 794 insertions(+), 32 deletions(-) create mode 100644 examples/vision/ppdet_ppyoloe.cc create mode 100644 fastdeploy/vision/common/processors/convert.cc create mode 100644 fastdeploy/vision/common/processors/convert.h create mode 100644 fastdeploy/vision/ppdet/__init__.py create mode 100644 fastdeploy/vision/ppdet/ppdet_pybind.cc create mode 100644 fastdeploy/vision/ppdet/ppyoloe.cc create mode 100644 fastdeploy/vision/ppdet/ppyoloe.h create mode 100644 model_zoo/vision/ppyoloe/README.md create mode 100644 model_zoo/vision/ppyoloe/api.md create mode 100644 model_zoo/vision/ppyoloe/cpp/CMakeLists.txt create mode 100644 model_zoo/vision/ppyoloe/cpp/README.md create mode 100644 model_zoo/vision/ppyoloe/cpp/ppyoloe.cc create mode 100644 model_zoo/vision/ppyoloe/ppyoloe.py diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 67361223c6..112193c86a 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,25 +1,26 @@ -function(add_fastdeploy_executable field url model) +function(add_fastdeploy_executable FIELD CC_FILE) # temp target name/file var in function scope - set(TEMP_TARGET_FILE ${PROJECT_SOURCE_DIR}/examples/${field}/${url}_${model}.cc) - set(TEMP_TARGET_NAME ${field}_${url}_${model}) + set(TEMP_TARGET_FILE ${CC_FILE}) + string(REGEX MATCHALL "[0-9A-Za-z_]*.cc" FILE_NAME ${CC_FILE}) + string(REGEX REPLACE ".cc" "" FILE_PREFIX ${FILE_NAME}) + set(TEMP_TARGET_NAME ${FIELD}_${FILE_PREFIX}) if (EXISTS ${TEMP_TARGET_FILE} AND TARGET fastdeploy) add_executable(${TEMP_TARGET_NAME} ${TEMP_TARGET_FILE}) target_link_libraries(${TEMP_TARGET_NAME} PUBLIC fastdeploy) - message(STATUS "Found source file: [${field}/${url}_${model}.cc], ADD!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") - else () - message(WARNING "Can not found source file: [${field}/${url}_${model}.cc], SKIP!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") + message(STATUS " Added FastDeploy Executable : ${TEMP_TARGET_NAME}") endif() unset(TEMP_TARGET_FILE) unset(TEMP_TARGET_NAME) endfunction() # vision examples -if (WITH_VISION_EXAMPLES) - add_fastdeploy_executable(vision ultralytics yolov5) - add_fastdeploy_executable(vision meituan yolov6) - add_fastdeploy_executable(vision wongkinyiu yolov7) - add_fastdeploy_executable(vision megvii yolox) - add_fastdeploy_executable(vision wongkinyiu yolor) +if(WITH_VISION_EXAMPLES AND EXISTS ${PROJECT_SOURCE_DIR}/examples/vision) + message(STATUS "") + message(STATUS "*************FastDeploy Examples Summary**********") + file(GLOB ALL_VISION_EXAMPLE_SRCS ${PROJECT_SOURCE_DIR}/examples/vision/*.cc) + foreach(_CC_FILE ${ALL_VISION_EXAMPLE_SRCS}) + add_fastdeploy_executable(vision ${_CC_FILE}) + endforeach() endif() # other examples ... diff --git a/examples/vision/ppdet_ppyoloe.cc b/examples/vision/ppdet_ppyoloe.cc new file mode 100644 index 0000000000..b234021c92 --- /dev/null +++ b/examples/vision/ppdet_ppyoloe.cc @@ -0,0 +1,51 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; + std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; + std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; + std::string img_path = "test.jpeg"; + std::string vis_path = "vis.jpeg"; + + auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} diff --git a/fastdeploy/__init__.py b/fastdeploy/__init__.py index 500e7cc42a..68006c1bed 100644 --- a/fastdeploy/__init__.py +++ b/fastdeploy/__init__.py @@ -17,7 +17,7 @@ from .fastdeploy_runtime import * from . import fastdeploy_main as C from . import vision -from .download import download +from .download import download, download_and_decompress def TensorInfoStr(tensor_info): diff --git a/fastdeploy/download.py b/fastdeploy/download.py index e00af098df..67f21d8e76 100644 --- a/fastdeploy/download.py +++ b/fastdeploy/download.py @@ -156,7 +156,7 @@ def decompress(fname): def url2dir(url, path, rename=None): full_name = download(url, path, rename, show_progress=True) - print("SDK is donwloaded, now extracting...") + print("File is donwloaded, now extracting...") if url.count(".tgz") > 0 or url.count(".tar") > 0 or url.count("zip") > 0: return decompress(full_name) diff --git a/fastdeploy/utils/utils.h b/fastdeploy/utils/utils.h index 23ca6ee51a..e605ee5a75 100644 --- a/fastdeploy/utils/utils.h +++ b/fastdeploy/utils/utils.h @@ -65,6 +65,10 @@ class FASTDEPLOY_DECL FDLogger { bool verbose_ = true; }; +#ifndef __REL_FILE__ +#define __REL_FILE__ __FILE__ +#endif + #define FDERROR \ FDLogger(true, "[ERROR]") << __REL_FILE__ << "(" << __LINE__ \ << ")::" << __FUNCTION__ << "\t" diff --git a/fastdeploy/vision.h b/fastdeploy/vision.h index 4398463251..68c0881cac 100644 --- a/fastdeploy/vision.h +++ b/fastdeploy/vision.h @@ -18,6 +18,7 @@ #include "fastdeploy/vision/megvii/yolox.h" #include "fastdeploy/vision/meituan/yolov6.h" #include "fastdeploy/vision/ppcls/model.h" +#include "fastdeploy/vision/ppdet/ppyoloe.h" #include "fastdeploy/vision/ultralytics/yolov5.h" #include "fastdeploy/vision/wongkinyiu/yolor.h" #include "fastdeploy/vision/wongkinyiu/yolov7.h" diff --git a/fastdeploy/vision/__init__.py b/fastdeploy/vision/__init__.py index 7122bede0b..6acbf0c376 100644 --- a/fastdeploy/vision/__init__.py +++ b/fastdeploy/vision/__init__.py @@ -15,6 +15,7 @@ from . import evaluation from . import ppcls +from . import ppdet from . import ultralytics from . import meituan from . import megvii diff --git a/fastdeploy/vision/common/processors/convert.cc b/fastdeploy/vision/common/processors/convert.cc new file mode 100644 index 0000000000..a7ca6de07a --- /dev/null +++ b/fastdeploy/vision/common/processors/convert.cc @@ -0,0 +1,62 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision/common/processors/convert.h" + +namespace fastdeploy { + +namespace vision { + +Convert::Convert(const std::vector& alpha, + const std::vector& beta) { + FDASSERT(alpha.size() == beta.size(), + "Convert: requires the size of alpha equal to the size of beta."); + FDASSERT(alpha.size() != 0, + "Convert: requires the size of alpha and beta > 0."); + alpha_.assign(alpha.begin(), alpha.end()); + beta_.assign(beta.begin(), beta.end()); +} + +bool Convert::CpuRun(Mat* mat) { + cv::Mat* im = mat->GetCpuMat(); + std::vector split_im; + cv::split(*im, split_im); + for (int c = 0; c < im->channels(); c++) { + split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); + } + cv::merge(split_im, *im); + return true; +} + +#ifdef ENABLE_OPENCV_CUDA +bool Convert::GpuRun(Mat* mat) { + cv::cuda::GpuMat* im = mat->GetGpuMat(); + std::vector split_im; + cv::cuda::split(*im, split_im); + for (int c = 0; c < im->channels(); c++) { + split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); + } + cv::cuda::merge(split_im, *im); + return true; +} +#endif + +bool Convert::Run(Mat* mat, const std::vector& alpha, + const std::vector& beta, ProcLib lib) { + auto c = Convert(alpha, beta); + return c(mat, lib); +} + +} // namespace vision +} // namespace fastdeploy \ No newline at end of file diff --git a/fastdeploy/vision/common/processors/convert.h b/fastdeploy/vision/common/processors/convert.h new file mode 100644 index 0000000000..5d5a5276f5 --- /dev/null +++ b/fastdeploy/vision/common/processors/convert.h @@ -0,0 +1,42 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include "fastdeploy/vision/common/processors/base.h" + +namespace fastdeploy { +namespace vision { +class Convert : public Processor { + public: + Convert(const std::vector& alpha, const std::vector& beta); + + bool CpuRun(Mat* mat); +#ifdef ENABLE_OPENCV_CUDA + bool GpuRun(Mat* mat); +#endif + std::string Name() { return "Convert"; } + + // Compute `result = mat * alpha + beta` directly by channel. + // The default behavior is the same as OpenCV's convertTo method. + static bool Run(Mat* mat, const std::vector& alpha, + const std::vector& beta, + ProcLib lib = ProcLib::OPENCV_CPU); + + private: + std::vector alpha_; + std::vector beta_; +}; +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/common/processors/transform.h b/fastdeploy/vision/common/processors/transform.h index 12eec8d72d..08073b4e42 100644 --- a/fastdeploy/vision/common/processors/transform.h +++ b/fastdeploy/vision/common/processors/transform.h @@ -17,6 +17,7 @@ #include "fastdeploy/vision/common/processors/cast.h" #include "fastdeploy/vision/common/processors/center_crop.h" #include "fastdeploy/vision/common/processors/color_space_convert.h" +#include "fastdeploy/vision/common/processors/convert.h" #include "fastdeploy/vision/common/processors/hwc2chw.h" #include "fastdeploy/vision/common/processors/normalize.h" #include "fastdeploy/vision/common/processors/pad.h" diff --git a/fastdeploy/vision/meituan/yolov6.cc b/fastdeploy/vision/meituan/yolov6.cc index b75f2016ee..8ac7377194 100644 --- a/fastdeploy/vision/meituan/yolov6.cc +++ b/fastdeploy/vision/meituan/yolov6.cc @@ -129,8 +129,12 @@ bool YOLOv6::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - std::vector(mat->Channels(), 1.0)); + // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + // std::vector(mat->Channels(), 1.0)); + // Compute `result = mat * alpha + beta` directly by channel + std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; + std::vector beta = {0.0f, 0.0f, 0.0f}; + Convert::Run(mat, alpha, beta); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), diff --git a/fastdeploy/vision/ppcls/model.cc b/fastdeploy/vision/ppcls/model.cc index 915cb97512..a89a1e4731 100644 --- a/fastdeploy/vision/ppcls/model.cc +++ b/fastdeploy/vision/ppcls/model.cc @@ -1,3 +1,16 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. #include "fastdeploy/vision/ppcls/model.h" #include "fastdeploy/vision/utils/utils.h" diff --git a/fastdeploy/vision/ppcls/model.h b/fastdeploy/vision/ppcls/model.h index fae99d4f3c..71800a7d76 100644 --- a/fastdeploy/vision/ppcls/model.h +++ b/fastdeploy/vision/ppcls/model.h @@ -1,3 +1,17 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + #pragma once #include "fastdeploy/fastdeploy_model.h" #include "fastdeploy/vision/common/processors/transform.h" diff --git a/fastdeploy/vision/ppcls/ppcls_pybind.cc b/fastdeploy/vision/ppcls/ppcls_pybind.cc index 828bef3c7a..10ff5ee109 100644 --- a/fastdeploy/vision/ppcls/ppcls_pybind.cc +++ b/fastdeploy/vision/ppcls/ppcls_pybind.cc @@ -14,7 +14,7 @@ #include "fastdeploy/pybind/main.h" namespace fastdeploy { -void BindPpClsModel(pybind11::module& m) { +void BindPPCls(pybind11::module& m) { auto ppcls_module = m.def_submodule("ppcls", "Module to deploy PaddleClas."); pybind11::class_(ppcls_module, "Model") .def(pybind11::init(ppdet_module, + "PPYOLOE") + .def(pybind11::init()) + .def("predict", [](vision::ppdet::PPYOLOE& self, pybind11::array& data, + float conf_threshold, float nms_iou_threshold) { + auto mat = PyArrayToCvMat(data); + vision::DetectionResult res; + self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); + return res; + }); +} +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.cc b/fastdeploy/vision/ppdet/ppyoloe.cc new file mode 100644 index 0000000000..c215ecb0ca --- /dev/null +++ b/fastdeploy/vision/ppdet/ppyoloe.cc @@ -0,0 +1,170 @@ +#include "fastdeploy/vision/ppdet/ppyoloe.h" +#include "fastdeploy/vision/utils/utils.h" +#include "yaml-cpp/yaml.h" + +namespace fastdeploy { +namespace vision { +namespace ppdet { + +PPYOLOE::PPYOLOE(const std::string& model_file, const std::string& params_file, + const std::string& config_file, + const RuntimeOption& custom_option, + const Frontend& model_format) { + config_file_ = config_file; + valid_cpu_backends = {Backend::ORT, Backend::PDINFER}; + valid_gpu_backends = {Backend::ORT, Backend::PDINFER}; + runtime_option = custom_option; + runtime_option.model_format = model_format; + runtime_option.model_file = model_file; + runtime_option.params_file = params_file; + initialized = Initialize(); +} + +bool PPYOLOE::Initialize() { + if (!BuildPreprocessPipelineFromConfig()) { + std::cout << "Failed to build preprocess pipeline from configuration file." + << std::endl; + return false; + } + if (!InitRuntime()) { + std::cout << "Failed to initialize fastdeploy backend." << std::endl; + return false; + } + return true; +} + +bool PPYOLOE::BuildPreprocessPipelineFromConfig() { + processors_.clear(); + YAML::Node cfg; + try { + cfg = YAML::LoadFile(config_file_); + } catch (YAML::BadFile& e) { + std::cout << "Failed to load yaml file " << config_file_ + << ", maybe you should check this file." << std::endl; + return false; + } + + if (cfg["arch"].as() != "YOLO") { + std::cout << "Require the arch of model is YOLO, but arch defined in " + "config file is " + << cfg["arch"].as() << "." << std::endl; + return false; + } + processors_.push_back(std::make_shared()); + + for (const auto& op : cfg["Preprocess"]) { + std::string op_name = op["type"].as(); + if (op_name == "NormalizeImage") { + auto mean = op["mean"].as>(); + auto std = op["std"].as>(); + bool is_scale = op["is_scale"].as(); + processors_.push_back(std::make_shared(mean, std, is_scale)); + } else if (op_name == "Resize") { + bool keep_ratio = op["keep_ratio"].as(); + auto target_size = op["target_size"].as>(); + int interp = op["interp"].as(); + FDASSERT(target_size.size(), + "Require size of target_size be 2, but now it's " + + std::to_string(target_size.size()) + "."); + FDASSERT(!keep_ratio, + "Only support keep_ratio is false while deploy " + "PaddleDetection model."); + int width = target_size[1]; + int height = target_size[0]; + processors_.push_back( + std::make_shared(width, height, -1.0, -1.0, interp, false)); + } else if (op_name == "Permute") { + processors_.push_back(std::make_shared()); + } else { + std::cout << "Unexcepted preprocess operator: " << op_name << "." + << std::endl; + return false; + } + } + return true; +} + +bool PPYOLOE::Preprocess(Mat* mat, std::vector* outputs) { + int origin_w = mat->Width(); + int origin_h = mat->Height(); + for (size_t i = 0; i < processors_.size(); ++i) { + if (!(*(processors_[i].get()))(mat)) { + std::cout << "Failed to process image data in " << processors_[i]->Name() + << "." << std::endl; + return false; + } + } + + outputs->resize(2); + (*outputs)[0].name = InputInfoOfRuntime(0).name; + mat->ShareWithTensor(&((*outputs)[0])); + + // reshape to [1, c, h, w] + (*outputs)[0].shape.insert((*outputs)[0].shape.begin(), 1); + + (*outputs)[1].Allocate({1, 2}, FDDataType::FP32, InputInfoOfRuntime(1).name); + float* ptr = static_cast((*outputs)[1].MutableData()); + ptr[0] = mat->Height() * 1.0 / mat->Height(); + ptr[1] = mat->Width() * 1.0 / mat->Width(); + return true; +} + +bool PPYOLOE::Postprocess(std::vector& infer_result, + DetectionResult* result, float conf_threshold, + float nms_threshold) { + FDASSERT(infer_result[1].shape[0] == 1, + "Only support batch = 1 in FastDeploy now."); + int box_num = 0; + if (infer_result[1].dtype == FDDataType::INT32) { + box_num = *(static_cast(infer_result[1].Data())); + } else if (infer_result[1].dtype == FDDataType::INT64) { + box_num = *(static_cast(infer_result[1].Data())); + } else { + FDASSERT( + false, + "The output box_num of PPYOLOE model should be type of int32/int64."); + } + result->Reserve(box_num); + float* box_data = static_cast(infer_result[0].Data()); + for (size_t i = 0; i < box_num; ++i) { + if (box_data[i * 6 + 1] < conf_threshold) { + continue; + } + result->label_ids.push_back(box_data[i * 6]); + result->scores.push_back(box_data[i * 6 + 1]); + result->boxes.emplace_back( + std::array{box_data[i * 6 + 2], box_data[i * 6 + 3], + box_data[i * 6 + 4] - box_data[i * 6 + 2], + box_data[i * 6 + 5] - box_data[i * 6 + 3]}); + } + return true; +} + +bool PPYOLOE::Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold, float iou_threshold) { + Mat mat(*im); + std::vector processed_data; + if (!Preprocess(&mat, &processed_data)) { + FDERROR << "Failed to preprocess input data while using model:" + << ModelName() << "." << std::endl; + return false; + } + + std::vector infer_result; + if (!Infer(processed_data, &infer_result)) { + FDERROR << "Failed to inference while using model:" << ModelName() << "." + << std::endl; + return false; + } + + if (!Postprocess(infer_result, result, conf_threshold, iou_threshold)) { + FDERROR << "Failed to postprocess while using model:" << ModelName() << "." + << std::endl; + return false; + } + return true; +} + +} // namespace ppdet +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.h b/fastdeploy/vision/ppdet/ppyoloe.h new file mode 100644 index 0000000000..a3db268ca4 --- /dev/null +++ b/fastdeploy/vision/ppdet/ppyoloe.h @@ -0,0 +1,44 @@ +#pragma once +#include "fastdeploy/fastdeploy_model.h" +#include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" + +#include "fastdeploy/vision/utils/utils.h" + +namespace fastdeploy { +namespace vision { +namespace ppdet { + +class FASTDEPLOY_DECL PPYOLOE : public FastDeployModel { + public: + PPYOLOE(const std::string& model_file, const std::string& params_file, + const std::string& config_file, + const RuntimeOption& custom_option = RuntimeOption(), + const Frontend& model_format = Frontend::PADDLE); + + std::string ModelName() const { return "PaddleDetection/PPYOLOE"; } + + virtual bool Initialize(); + + virtual bool BuildPreprocessPipelineFromConfig(); + + virtual bool Preprocess(Mat* mat, std::vector* outputs); + + virtual bool Postprocess(std::vector& infer_result, + DetectionResult* result, float conf_threshold, + float nms_threshold); + + virtual bool Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold = 0.5, float nms_threshold = 0.7); + + private: + std::vector> processors_; + std::string config_file_; + // PaddleDetection can export model without nms + // This flag will help us to handle the different + // situation + bool has_nms_; +}; +} // namespace ppdet +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ultralytics/yolov5.cc b/fastdeploy/vision/ultralytics/yolov5.cc index c8c6e06a94..b2e6009b1c 100644 --- a/fastdeploy/vision/ultralytics/yolov5.cc +++ b/fastdeploy/vision/ultralytics/yolov5.cc @@ -126,8 +126,12 @@ bool YOLOv5::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - std::vector(mat->Channels(), 1.0)); + // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + // std::vector(mat->Channels(), 1.0)); + // Compute `result = mat * alpha + beta` directly by channel + std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; + std::vector beta = {0.0f, 0.0f, 0.0f}; + Convert::Run(mat, alpha, beta); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), @@ -198,6 +202,11 @@ bool YOLOv5::Postprocess( result->scores.push_back(confidence); } } + + if (result->boxes.size() == 0) { + return true; + } + utils::NMS(result, nms_iou_threshold); // scale the boxes to the origin image shape diff --git a/fastdeploy/vision/utils/sort_det_res.cc b/fastdeploy/vision/utils/sort_det_res.cc index e4a0db9761..790126a6ac 100644 --- a/fastdeploy/vision/utils/sort_det_res.cc +++ b/fastdeploy/vision/utils/sort_det_res.cc @@ -68,7 +68,11 @@ void MergeSort(DetectionResult* result, size_t low, size_t high) { void SortDetectionResult(DetectionResult* result) { size_t low = 0; - size_t high = result->scores.size() - 1; + size_t high = result->scores.size(); + if (high == 0) { + return; + } + high = high - 1; MergeSort(result, low, high); } diff --git a/fastdeploy/vision/vision_pybind.cc b/fastdeploy/vision/vision_pybind.cc index e4ba05b893..0334303ce6 100644 --- a/fastdeploy/vision/vision_pybind.cc +++ b/fastdeploy/vision/vision_pybind.cc @@ -16,7 +16,8 @@ namespace fastdeploy { -void BindPpClsModel(pybind11::module& m); +void BindPPCls(pybind11::module& m); +void BindPPDet(pybind11::module& m); void BindWongkinyiu(pybind11::module& m); void BindUltralytics(pybind11::module& m); void BindMeituan(pybind11::module& m); @@ -41,7 +42,8 @@ void BindVision(pybind11::module& m) { .def("__repr__", &vision::DetectionResult::Str) .def("__str__", &vision::DetectionResult::Str); - BindPpClsModel(m); + BindPPCls(m); + BindPPDet(m); BindUltralytics(m); BindWongkinyiu(m); BindMeituan(m); diff --git a/fastdeploy/vision/visualize/detection.cc b/fastdeploy/vision/visualize/detection.cc index e5f01bdd35..6d60072447 100644 --- a/fastdeploy/vision/visualize/detection.cc +++ b/fastdeploy/vision/visualize/detection.cc @@ -43,7 +43,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, } std::string text = id + "," + score; int font = cv::FONT_HERSHEY_SIMPLEX; - cv::Size text_size = cv::getTextSize(text, font, font_size, 0.5, nullptr); + cv::Size text_size = cv::getTextSize(text, font, font_size, 1, nullptr); cv::Point origin; origin.x = rect.x; origin.y = rect.y; @@ -52,7 +52,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, text_size.width, text_size.height); cv::rectangle(*im, rect, rect_color, line_size); cv::putText(*im, text, origin, font, font_size, cv::Scalar(255, 255, 255), - 0.5); + 1); } } diff --git a/model_zoo/vision/ppyoloe/README.md b/model_zoo/vision/ppyoloe/README.md new file mode 100644 index 0000000000..42d18104ad --- /dev/null +++ b/model_zoo/vision/ppyoloe/README.md @@ -0,0 +1,52 @@ +# PaddleDetection/PPYOLOE部署示例 + +- 当前支持PaddleDetection版本为[release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) + +本文档说明如何进行[PPYOLOE](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe)的快速部署推理。本目录结构如下 +``` +. +├── cpp # C++ 代码目录 +│   ├── CMakeLists.txt # C++ 代码编译CMakeLists文件 +│   ├── README.md # C++ 代码编译部署文档 +│   └── ppyoloe.cc # C++ 示例代码 +├── README.md # PPYOLOE 部署文档 +└── ppyoloe.py # Python示例代码 +``` + +## 安装FastDeploy + +使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` +``` +# 安装fastdeploy-python工具 +pip install fastdeploy-python +``` + +## Python部署 + +执行如下代码即会自动下载PPYOLOE模型和测试图片 +``` +python ppyoloe.py +``` + +执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 +414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 +163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 +267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 +581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 +104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 +348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 +364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 +75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 +328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 +504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 +379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 +25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 +``` + +## 其它文档 + +- [C++部署](./cpp/README.md) +- [PPYOLOE API文档](./api.md) diff --git a/model_zoo/vision/ppyoloe/api.md b/model_zoo/vision/ppyoloe/api.md new file mode 100644 index 0000000000..1c5cbcaadb --- /dev/null +++ b/model_zoo/vision/ppyoloe/api.md @@ -0,0 +1,74 @@ +# PPYOLOE API说明 + +## Python API + +### PPYOLOE类 +``` +fastdeploy.vision.ultralytics.PPYOLOE(model_file, params_file, config_file, runtime_option=None, model_format=fd.Frontend.PADDLE) +``` +PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **config_file**(str): 模型推理配置文件 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### predict函数 +> ``` +> PPYOLOE.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) + +示例代码参考[ppyoloe.py](./ppyoloe.py) + + +## C++ API + +### PPYOLOE类 +``` +fastdeploy::vision::ultralytics::PPYOLOE( + const string& model_file, + const string& params_file, + const string& config_file, + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` +PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **config_file**(str): 模型推理配置文件 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### Predict函数 +> ``` +> YOLOv5::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度 +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) + +示例代码参考[cpp/yolov5.cc](cpp/yolov5.cc) + +## 其它API使用 + +- [模型部署RuntimeOption配置](../../../docs/api/runtime_option.md) diff --git a/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt b/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt new file mode 100644 index 0000000000..e681566517 --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt @@ -0,0 +1,17 @@ +PROJECT(ppyoloe_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.16) + +# 在低版本ABI环境中,通过如下代码进行兼容性编译 +# add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) + +# 指定下载解压后的fastdeploy库路径 +set(FASTDEPLOY_INSTALL_DIR ${PROJECT_SOURCE_DIR}/fastdeploy-linux-x64-0.3.0/) + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(ppyoloe_demo ${PROJECT_SOURCE_DIR}/ppyoloe.cc) +# 添加FastDeploy库依赖 +target_link_libraries(ppyoloe_demo ${FASTDEPLOY_LIBS}) diff --git a/model_zoo/vision/ppyoloe/cpp/README.md b/model_zoo/vision/ppyoloe/cpp/README.md new file mode 100644 index 0000000000..1027c2eeb2 --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/README.md @@ -0,0 +1,39 @@ +# 编译PPYOLOE示例 + + +``` +# 下载和解压预测库 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz +tar xvf fastdeploy-linux-x64-0.0.3.tgz + +# 编译示例代码 +mkdir build & cd build +cmake .. +make -j + +# 下载模型和图片 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz +tar xvf ppyoloe_crn_l_300e_coco.tgz +wget https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg + +# 执行 +./ppyoloe_demo +``` + +执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 +414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 +163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 +267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 +581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 +104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 +348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 +364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 +75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 +328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 +504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 +379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 +25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 +``` diff --git a/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc b/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc new file mode 100644 index 0000000000..e63f29e62a --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc @@ -0,0 +1,51 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; + std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; + std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; + std::string img_path = "000000014439_640x640.jpg"; + std::string vis_path = "vis.jpeg"; + + auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} diff --git a/model_zoo/vision/ppyoloe/ppyoloe.py b/model_zoo/vision/ppyoloe/ppyoloe.py new file mode 100644 index 0000000000..7d79dfd8cf --- /dev/null +++ b/model_zoo/vision/ppyoloe/ppyoloe.py @@ -0,0 +1,24 @@ +import fastdeploy as fd +import cv2 + +# 下载模型和测试图片 +model_url = "https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz" +test_jpg_url = "https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg" +fd.download_and_decompress(model_url, ".") +fd.download(test_jpg_url, ".", show_progress=True) + +# 加载模型 +model = fd.vision.ppdet.PPYOLOE("ppyoloe_crn_l_300e_coco/model.pdmodel", + "ppyoloe_crn_l_300e_coco/model.pdiparams", + "ppyoloe_crn_l_300e_coco/infer_cfg.yml") + +# 预测图片 +im = cv2.imread("000000014439_640x640.jpg") +result = model.predict(im, conf_threshold=0.5) + +# 可视化结果 +fd.vision.visualize.vis_detection(im, result) +cv2.imwrite("vis_result.jpg", im) + +# 输出预测结果 +print(result) diff --git a/setup.py b/setup.py index 5147025b4e..e76f057b1c 100644 --- a/setup.py +++ b/setup.py @@ -326,14 +326,25 @@ def run(self): shutil.copy("LICENSE", "fastdeploy") depend_libs = list() - # modify the search path of libraries - command = "patchelf --set-rpath '$ORIGIN/libs/' .setuptools-cmake-build/fastdeploy_main.cpython-37m-x86_64-linux-gnu.so" - # The sw_64 not suppot patchelf, so we just disable that. - if platform.machine() != 'sw_64' and platform.machine() != 'mips64': - assert os.system( - command - ) == 0, "patch fastdeploy_main.cpython-37m-x86_64-linux-gnu.so failed, the command: {}".format( - command) + if platform.system().lower() == "linux": + for f in os.listdir(".setuptools-cmake-build"): + full_name = os.path.join(".setuptools-cmake-build", f) + if not os.path.isfile(full_name): + continue + if not full_name.count("fastdeploy_main.cpython-"): + continue + if not full_name.endswith(".so"): + continue + # modify the search path of libraries + command = "patchelf --set-rpath '$ORIGIN/libs/' {}".format( + full_name) + # The sw_64 not suppot patchelf, so we just disable that. + if platform.machine() != 'sw_64' and platform.machine( + ) != 'mips64': + assert os.system( + command + ) == 0, "patch fastdeploy_main.cpython-36m-x86_64-linux-gnu.so failed, the command: {}".format( + command) for f in os.listdir(".setuptools-cmake-build"): if not os.path.isfile(os.path.join(".setuptools-cmake-build", f)): From ec91abbbc526c25fe2860b6dbfe6e97245f4912d Mon Sep 17 00:00:00 2001 From: ziqi-jin <67993288+ziqi-jin@users.noreply.github.com> Date: Wed, 20 Jul 2022 16:13:17 +0800 Subject: [PATCH 37/51] Develop (#11) (#12) * Fix compile problem in different python version (#26) * fix some usage problem in linux * Fix compile problem Co-authored-by: root * Add PaddleDetetion/PPYOLOE model support (#22) * add ppdet/ppyoloe * Add demo code and documents * add convert processor to vision (#27) * update .gitignore * Added checking for cmake include dir * fixed missing trt_backend option bug when init from trt * remove un-need data layout and add pre-check for dtype * changed RGB2BRG to BGR2RGB in ppcls model * add model_zoo yolov6 c++/python demo * fixed CMakeLists.txt typos * update yolov6 cpp/README.md * add yolox c++/pybind and model_zoo demo * move some helpers to private * fixed CMakeLists.txt typos * add normalize with alpha and beta * add version notes for yolov5/yolov6/yolox * add copyright to yolov5.cc * revert normalize * fixed some bugs in yolox * fixed examples/CMakeLists.txt to avoid conflicts * add convert processor to vision * format examples/CMakeLists summary * Fix bug while the inference result is empty with YOLOv5 (#29) * Add multi-label function for yolov5 * Update README.md Update doc * Update fastdeploy_runtime.cc fix variable option.trt_max_shape wrong name * Update runtime_option.md Update resnet model dynamic shape setting name from images to x * Fix bug when inference result boxes are empty * Delete detection.py Co-authored-by: Jason Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com> Co-authored-by: Jason Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com> --- fastdeploy/utils/utils.h | 4 ++++ fastdeploy/vision/ppcls/model.cc | 6 +++--- fastdeploy/vision/ultralytics/yolov5.cc | 4 ++-- fastdeploy/vision/utils/sort_det_res.cc | 2 +- 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/fastdeploy/utils/utils.h b/fastdeploy/utils/utils.h index e605ee5a75..b57a27f80c 100644 --- a/fastdeploy/utils/utils.h +++ b/fastdeploy/utils/utils.h @@ -69,6 +69,10 @@ class FASTDEPLOY_DECL FDLogger { #define __REL_FILE__ __FILE__ #endif +#define FDERROR \ + FDLogger(true, "[ERROR]") \ + << __REL_FILE__ << "(" << __LINE__ << ")::" << __FUNCTION__ << "\t" + #define FDERROR \ FDLogger(true, "[ERROR]") << __REL_FILE__ << "(" << __LINE__ \ << ")::" << __FUNCTION__ << "\t" diff --git a/fastdeploy/vision/ppcls/model.cc b/fastdeploy/vision/ppcls/model.cc index a89a1e4731..c4e5b767c7 100644 --- a/fastdeploy/vision/ppcls/model.cc +++ b/fastdeploy/vision/ppcls/model.cc @@ -148,6 +148,6 @@ bool Model::Predict(cv::Mat* im, ClassifyResult* result, int topk) { return true; } -} // namespace ppcls -} // namespace vision -} // namespace fastdeploy +} // namespace ppcls +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ultralytics/yolov5.cc b/fastdeploy/vision/ultralytics/yolov5.cc index b2e6009b1c..0b7e50e735 100644 --- a/fastdeploy/vision/ultralytics/yolov5.cc +++ b/fastdeploy/vision/ultralytics/yolov5.cc @@ -204,9 +204,9 @@ bool YOLOv5::Postprocess( } if (result->boxes.size() == 0) { - return true; + return true; } - + utils::NMS(result, nms_iou_threshold); // scale the boxes to the origin image shape diff --git a/fastdeploy/vision/utils/sort_det_res.cc b/fastdeploy/vision/utils/sort_det_res.cc index 790126a6ac..93dbb69694 100644 --- a/fastdeploy/vision/utils/sort_det_res.cc +++ b/fastdeploy/vision/utils/sort_det_res.cc @@ -70,7 +70,7 @@ void SortDetectionResult(DetectionResult* result) { size_t low = 0; size_t high = result->scores.size(); if (high == 0) { - return; + return; } high = high - 1; MergeSort(result, low, high); From 6ae19c839f66760b8231936f8128d23a3e36d088 Mon Sep 17 00:00:00 2001 From: ziqi-jin <67993288+ziqi-jin@users.noreply.github.com> Date: Wed, 20 Jul 2022 16:46:54 +0800 Subject: [PATCH 38/51] Develop (#13) * Fix compile problem in different python version (#26) * fix some usage problem in linux * Fix compile problem Co-authored-by: root * Add PaddleDetetion/PPYOLOE model support (#22) * add ppdet/ppyoloe * Add demo code and documents * add convert processor to vision (#27) * update .gitignore * Added checking for cmake include dir * fixed missing trt_backend option bug when init from trt * remove un-need data layout and add pre-check for dtype * changed RGB2BRG to BGR2RGB in ppcls model * add model_zoo yolov6 c++/python demo * fixed CMakeLists.txt typos * update yolov6 cpp/README.md * add yolox c++/pybind and model_zoo demo * move some helpers to private * fixed CMakeLists.txt typos * add normalize with alpha and beta * add version notes for yolov5/yolov6/yolox * add copyright to yolov5.cc * revert normalize * fixed some bugs in yolox * fixed examples/CMakeLists.txt to avoid conflicts * add convert processor to vision * format examples/CMakeLists summary * Fix bug while the inference result is empty with YOLOv5 (#29) * Add multi-label function for yolov5 * Update README.md Update doc * Update fastdeploy_runtime.cc fix variable option.trt_max_shape wrong name * Update runtime_option.md Update resnet model dynamic shape setting name from images to x * Fix bug when inference result boxes are empty * Delete detection.py Co-authored-by: Jason Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com> From 083cdb9ad954537fe933921917f37fabd3800812 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 09:07:35 +0000 Subject: [PATCH 39/51] documents --- model_zoo/vision/yolor/README.md | 13 +++++++------ model_zoo/vision/yolor/cpp/README.md | 14 +++++++++----- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/model_zoo/vision/yolor/README.md b/model_zoo/vision/yolor/README.md index 467023f169..12db50dc92 100644 --- a/model_zoo/vision/yolor/README.md +++ b/model_zoo/vision/yolor/README.md @@ -1,6 +1,7 @@ # 编译YOLOR示例 -当前支持模型版本为:[YOLOR v0.1](https://github.com/WongKinYiu/yolor/releases/tag/weights) +当前支持模型版本为:[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) +tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换(checkout)到 `paper` 分支. 本文档说明如何进行[YOLOR](https://github.com/WongKinYiu/yolor)的快速部署推理。本目录结构如下 @@ -20,17 +21,17 @@ 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 - - ``` #下载yolor模型文件 - wget https://github.com/WongKinYiu/yolor/releases/download/v0.1/yolor.pt + wget https://github.com/WongKinYiu/yolor/releases/download/weights/yolor-d6-paper-570.pt # 导出onnx格式文件 - python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + python models/export.py --weights PATH/TO/yolor-xx-xx-xx.pt --img-size 640 + + # 如果您在导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md)的解决办法 # 移动onnx文件到demo目录 - cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolor/ + cp PATH/TO/yolor.onnx PATH/TO/model_zoo/vision/yolor/ ``` ## 安装FastDeploy diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md index eddf5bc51b..7827ca268d 100644 --- a/model_zoo/vision/yolor/cpp/README.md +++ b/model_zoo/vision/yolor/cpp/README.md @@ -1,20 +1,24 @@ # 编译YOLOR示例 -当前支持模型版本为:[YOLOR v0.1](https://github.com/WongKinYiu/yolor/releases/tag/weights) +当前支持模型版本为:[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) ## 获取ONNX文件 - 手动获取 - 访问[YOLOR](https://github.com/WongKinYiu/yolor/releases/tag/weights)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 ``` #下载yolor模型文件 - wget https://github.com/WongKinYiu/yolor/releases/download/v0.1/yolor.pt + wget https://github.com/WongKinYiu/yolor/releases/download/weights/yolor-d6-paper-570.pt # 导出onnx格式文件 - python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + python models/export.py --weights PATH/TO/yolor-xx-xx-xx.pt --img-size 640 + # 如果您在导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md)的解决办法 + + # 移动onnx文件到demo目录 + cp PATH/TO/yolor.onnx PATH/TO/model_zoo/vision/yolor/ ``` @@ -31,7 +35,7 @@ cmake .. make -j # 移动onnx文件到demo目录 -cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolor/cpp/build/ +cp PATH/TO/yolor.onnx PATH/TO/model_zoo/vision/yolor/cpp/build/ # 下载图片 wget https://raw.githubusercontent.com/WongKinYiu/yolor/paper/inference/images/horses.jpg From 0db57304ddc9142ad41e00e12c092597ab5208b0 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 09:09:41 +0000 Subject: [PATCH 40/51] documents --- model_zoo/vision/yolor/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md index 7827ca268d..f2fb220487 100644 --- a/model_zoo/vision/yolor/cpp/README.md +++ b/model_zoo/vision/yolor/cpp/README.md @@ -15,7 +15,7 @@ # 导出onnx格式文件 python models/export.py --weights PATH/TO/yolor-xx-xx-xx.pt --img-size 640 - # 如果您在导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md)的解决办法 + # 如果您在导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考 [DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md) 的解决办法 # 移动onnx文件到demo目录 cp PATH/TO/yolor.onnx PATH/TO/model_zoo/vision/yolor/ From ee3db6b1d987bbde4d60e90c2815678e3c97276d Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 09:10:20 +0000 Subject: [PATCH 41/51] documents --- model_zoo/vision/yolor/cpp/README.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md index f2fb220487..03fdafe751 100644 --- a/model_zoo/vision/yolor/cpp/README.md +++ b/model_zoo/vision/yolor/cpp/README.md @@ -6,7 +6,7 @@ - 手动获取 - 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您在导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md)的解决办法 ``` #下载yolor模型文件 @@ -15,8 +15,6 @@ # 导出onnx格式文件 python models/export.py --weights PATH/TO/yolor-xx-xx-xx.pt --img-size 640 - # 如果您在导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考 [DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md) 的解决办法 - # 移动onnx文件到demo目录 cp PATH/TO/yolor.onnx PATH/TO/model_zoo/vision/yolor/ ``` From c19be13414a641f7d44975ee3f3be05831ed1caa Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 09:10:45 +0000 Subject: [PATCH 42/51] documents --- model_zoo/vision/yolor/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md index 03fdafe751..f6a75b9737 100644 --- a/model_zoo/vision/yolor/cpp/README.md +++ b/model_zoo/vision/yolor/cpp/README.md @@ -6,7 +6,7 @@ - 手动获取 - 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您在导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md)的解决办法 + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md)的解决办法 ``` #下载yolor模型文件 From fd8dfea40d4c484ee13c2de155f2ead83ef478f0 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 09:12:50 +0000 Subject: [PATCH 43/51] documents --- model_zoo/vision/yolor/README.md | 2 +- model_zoo/vision/yolor/cpp/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolor/README.md b/model_zoo/vision/yolor/README.md index 12db50dc92..2c54f97e2f 100644 --- a/model_zoo/vision/yolor/README.md +++ b/model_zoo/vision/yolor/README.md @@ -1,7 +1,7 @@ # 编译YOLOR示例 当前支持模型版本为:[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) -tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换(checkout)到 `paper` 分支. +(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换(checkout)到 `paper` 分支). 本文档说明如何进行[YOLOR](https://github.com/WongKinYiu/yolor)的快速部署推理。本目录结构如下 diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md index f6a75b9737..587adc4f24 100644 --- a/model_zoo/vision/yolor/cpp/README.md +++ b/model_zoo/vision/yolor/cpp/README.md @@ -1,7 +1,7 @@ # 编译YOLOR示例 当前支持模型版本为:[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) - +(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换(checkout)到 `paper` 分支). ## 获取ONNX文件 - 手动获取 From 3199d2f5ee82c69a900f25b9407ae2d47c929d23 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 09:14:31 +0000 Subject: [PATCH 44/51] documents --- model_zoo/vision/yolor/README.md | 2 +- model_zoo/vision/yolor/cpp/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolor/README.md b/model_zoo/vision/yolor/README.md index 2c54f97e2f..04f871bd8c 100644 --- a/model_zoo/vision/yolor/README.md +++ b/model_zoo/vision/yolor/README.md @@ -1,7 +1,7 @@ # 编译YOLOR示例 当前支持模型版本为:[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) -(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换(checkout)到 `paper` 分支). +(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换`checkout`到 `paper` 分支). 本文档说明如何进行[YOLOR](https://github.com/WongKinYiu/yolor)的快速部署推理。本目录结构如下 diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md index 587adc4f24..02c52091a6 100644 --- a/model_zoo/vision/yolor/cpp/README.md +++ b/model_zoo/vision/yolor/cpp/README.md @@ -1,7 +1,7 @@ # 编译YOLOR示例 当前支持模型版本为:[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) -(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换(checkout)到 `paper` 分支). +(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换`checkout`到 `paper` 分支). ## 获取ONNX文件 - 手动获取 From 807045fcfbecfb9394f5c769ae4eb0233b3a5299 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 09:15:47 +0000 Subject: [PATCH 45/51] documents --- model_zoo/vision/yolor/README.md | 2 +- model_zoo/vision/yolor/cpp/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolor/README.md b/model_zoo/vision/yolor/README.md index 04f871bd8c..2c54f97e2f 100644 --- a/model_zoo/vision/yolor/README.md +++ b/model_zoo/vision/yolor/README.md @@ -1,7 +1,7 @@ # 编译YOLOR示例 当前支持模型版本为:[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) -(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换`checkout`到 `paper` 分支). +(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换(checkout)到 `paper` 分支). 本文档说明如何进行[YOLOR](https://github.com/WongKinYiu/yolor)的快速部署推理。本目录结构如下 diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md index 02c52091a6..587adc4f24 100644 --- a/model_zoo/vision/yolor/cpp/README.md +++ b/model_zoo/vision/yolor/cpp/README.md @@ -1,7 +1,7 @@ # 编译YOLOR示例 当前支持模型版本为:[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) -(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换`checkout`到 `paper` 分支). +(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换(checkout)到 `paper` 分支). ## 获取ONNX文件 - 手动获取 From 041782637f6381a9f0b12f04922dbd6479118b29 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 09:18:27 +0000 Subject: [PATCH 46/51] documents --- model_zoo/vision/yolor/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolor/README.md b/model_zoo/vision/yolor/README.md index 2c54f97e2f..ec44e82823 100644 --- a/model_zoo/vision/yolor/README.md +++ b/model_zoo/vision/yolor/README.md @@ -19,7 +19,7 @@ - 手动获取 - 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md)的解决办法 ``` #下载yolor模型文件 From 52e6974b700313e742c4d8d9d7d079376061d41b Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 09:19:12 +0000 Subject: [PATCH 47/51] documents --- model_zoo/vision/yolor/README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/model_zoo/vision/yolor/README.md b/model_zoo/vision/yolor/README.md index ec44e82823..78cc6d796a 100644 --- a/model_zoo/vision/yolor/README.md +++ b/model_zoo/vision/yolor/README.md @@ -28,8 +28,6 @@ # 导出onnx格式文件 python models/export.py --weights PATH/TO/yolor-xx-xx-xx.pt --img-size 640 - # 如果您在导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md)的解决办法 - # 移动onnx文件到demo目录 cp PATH/TO/yolor.onnx PATH/TO/model_zoo/vision/yolor/ ``` From 042c388f14528bc5b27cd0b9f32a79946d37fbe0 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 09:32:45 +0000 Subject: [PATCH 48/51] documents --- fastdeploy/vision/wongkinyiu/__init__.py | 2 +- model_zoo/vision/yolov7/README.md | 4 ++-- model_zoo/vision/yolov7/cpp/README.md | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/__init__.py b/fastdeploy/vision/wongkinyiu/__init__.py index 026d10062f..3c77e85896 100644 --- a/fastdeploy/vision/wongkinyiu/__init__.py +++ b/fastdeploy/vision/wongkinyiu/__init__.py @@ -135,7 +135,7 @@ def predict(self, input_image, conf_threshold=0.25, nms_iou_threshold=0.5): return self._model.predict(input_image, conf_threshold, nms_iou_threshold) - # 一些跟YOLOv7模型有关的属性封装 + # 一些跟YOLOR模型有关的属性封装 # 多数是预处理相关,可通过修改如model.size = [1280, 1280]改变预处理时resize的大小(前提是模型支持) @property def size(self): diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 2bb13ce459..8b2f06d761 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -27,10 +27,10 @@ wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt # 导出onnx格式文件 - python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + python models/export.py --grid --dynamic --weights PATH/TO/yolov7.pt # 移动onnx文件到demo目录 - cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/ + cp PATH/TO/yolov7.onnx PATH/TO/model_zoo/vision/yolov7/ ``` ## 安装FastDeploy diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index f216c1aecf..655e98678c 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -13,7 +13,7 @@ wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt # 导出onnx格式文件 - python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + python models/export.py --grid --dynamic --weights PATH/TO/yolov7.pt ``` @@ -31,7 +31,7 @@ cmake .. make -j # 移动onnx文件到demo目录 -cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/cpp/build/ +cp PATH/TO/yolov7.onnx PATH/TO/model_zoo/vision/yolov7/cpp/build/ # 下载图片 wget https://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg From db3bfa2de15e40712562a267750eabe9e62fd4b7 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 09:40:11 +0000 Subject: [PATCH 49/51] documents --- model_zoo/vision/yolor/README.md | 2 +- model_zoo/vision/yolor/cpp/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolor/README.md b/model_zoo/vision/yolor/README.md index 78cc6d796a..6212d0a279 100644 --- a/model_zoo/vision/yolor/README.md +++ b/model_zoo/vision/yolor/README.md @@ -19,7 +19,7 @@ - 手动获取 - 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md)的解决办法 + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/WongKinYiu/yolor/issues/32)的解决办法 ``` #下载yolor模型文件 diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md index 587adc4f24..b5c62646ee 100644 --- a/model_zoo/vision/yolor/cpp/README.md +++ b/model_zoo/vision/yolor/cpp/README.md @@ -6,7 +6,7 @@ - 手动获取 - 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/DefTruth/lite.ai.toolkit/blob/main/docs/ort/ort_yolor.zh.md)的解决办法 + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/WongKinYiu/yolor/issues/32)的解决办法 ``` #下载yolor模型文件 From f8058abfa4d719148fb9c2299a6ccc82745b2ece Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 11:17:35 +0000 Subject: [PATCH 50/51] documents --- model_zoo/vision/yolor/README.md | 2 +- model_zoo/vision/yolor/cpp/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolor/README.md b/model_zoo/vision/yolor/README.md index 6212d0a279..358e62bbe1 100644 --- a/model_zoo/vision/yolor/README.md +++ b/model_zoo/vision/yolor/README.md @@ -19,7 +19,7 @@ - 手动获取 - 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/WongKinYiu/yolor/issues/32)的解决办法 + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[yolor#32](https://github.com/WongKinYiu/yolor/issues/32)的解决办法 ``` #下载yolor模型文件 diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md index b5c62646ee..d06bbe3005 100644 --- a/model_zoo/vision/yolor/cpp/README.md +++ b/model_zoo/vision/yolor/cpp/README.md @@ -6,7 +6,7 @@ - 手动获取 - 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[@DefTruth](https://github.com/WongKinYiu/yolor/issues/32)的解决办法 + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[yolor#32](https://github.com/WongKinYiu/yolor/issues/32)的解决办法 ``` #下载yolor模型文件 From 9121299d6dd8ab8a207f6137f44b0b0e678f1908 Mon Sep 17 00:00:00 2001 From: Jason <928090362@qq.com> Date: Thu, 21 Jul 2022 10:13:28 +0800 Subject: [PATCH 51/51] Develop (#14) * Fix compile problem in different python version (#26) * fix some usage problem in linux * Fix compile problem Co-authored-by: root * Add PaddleDetetion/PPYOLOE model support (#22) * add ppdet/ppyoloe * Add demo code and documents * add convert processor to vision (#27) * update .gitignore * Added checking for cmake include dir * fixed missing trt_backend option bug when init from trt * remove un-need data layout and add pre-check for dtype * changed RGB2BRG to BGR2RGB in ppcls model * add model_zoo yolov6 c++/python demo * fixed CMakeLists.txt typos * update yolov6 cpp/README.md * add yolox c++/pybind and model_zoo demo * move some helpers to private * fixed CMakeLists.txt typos * add normalize with alpha and beta * add version notes for yolov5/yolov6/yolox * add copyright to yolov5.cc * revert normalize * fixed some bugs in yolox * fixed examples/CMakeLists.txt to avoid conflicts * add convert processor to vision * format examples/CMakeLists summary * Fix bug while the inference result is empty with YOLOv5 (#29) * Add multi-label function for yolov5 * Update README.md Update doc * Update fastdeploy_runtime.cc fix variable option.trt_max_shape wrong name * Update runtime_option.md Update resnet model dynamic shape setting name from images to x * Fix bug when inference result boxes are empty * Delete detection.py Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com>