Skip to content

Commit

Permalink
[Inference]Fix the ort Backend multiple input bug (#43621)
Browse files Browse the repository at this point in the history
* fix or backend many inputs bug

* fix or backend many inputs bug

* fix or backend many inputs bug

* fix or backend many inputs bug

* code format

* code format
  • Loading branch information
heliqi authored Jun 21, 2022
1 parent 75144f1 commit 61591af
Show file tree
Hide file tree
Showing 9 changed files with 304 additions and 151 deletions.
5 changes: 3 additions & 2 deletions cmake/external/onnxruntime.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,9 @@ else()
)
endif()

include_directories(${ONNXRUNTIME_INC_DIR}
)# For ONNXRUNTIME code to include internal headers.
# For ONNXRUNTIME code to include internal headers.
include_directories(${ONNXRUNTIME_INC_DIR})

if(WIN32)
set(ONNXRUNTIME_SOURCE_LIB
"${ONNXRUNTIME_SOURCE_DIR}/lib/onnxruntime.dll"
Expand Down
16 changes: 3 additions & 13 deletions cmake/external/paddle2onnx.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -34,35 +34,25 @@ set(PADDLE2ONNX_INC_DIR
set(PADDLE2ONNX_LIB_DIR
"${PADDLE2ONNX_INSTALL_DIR}/lib"
CACHE PATH "onnxruntime lib directory." FORCE)
set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}"
"${PADDLE2ONNX_INSTALL_DIR}/${LIBDIR}")
set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${PADDLE2ONNX_LIB_DIR}")

include_directories(${PADDLE2ONNX_INC_DIR}
)# For PADDLE2ONNX code to include internal headers.
# For PADDLE2ONNX code to include internal headers.
include_directories(${PADDLE2ONNX_INC_DIR})
if(WIN32)
set(PADDLE2ONNX_SOURCE_LIB
"${PADDLE2ONNX_SOURCE_DIR}/lib/libpaddle2onnx.dylib"
CACHE FILEPATH "Paddle2ONNX source library." FORCE)
set(PADDLE2ONNX_LIB
"${PADDLE2ONNX_INSTALL_DIR}/lib/paddle2onnx.dll"
CACHE FILEPATH "paddle2onnx library." FORCE)
set(PADDLE2ONNX_COMPILE_LIB
"${PADDLE2ONNX_INSTALL_DIR}/lib/paddle2onnx.lib"
CACHE FILEPATH "paddle2onnx compile library." FORCE)
elseif(APPLE)
set(PADDLE2ONNX_SOURCE_LIB
"${PADDLE2ONNX_SOURCE_DIR}/lib/libpaddle2onnx.dylib"
CACHE FILEPATH "Paddle2ONNX source library." FORCE)
set(PADDLE2ONNX_LIB
"${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.dylib"
CACHE FILEPATH "PADDLE2ONNX library." FORCE)
set(PADDLE2ONNX_COMPILE_LIB
"${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.dylib"
CACHE FILEPATH "paddle2onnx compile library." FORCE)
else()
set(PADDLE2ONNX_SOURCE_LIB
"${PADDLE2ONNX_SOURCE_DIR}/lib/libpaddle2onnx.so"
CACHE FILEPATH "Paddle2ONNX source library." FORCE)
set(PADDLE2ONNX_LIB
"${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.so"
CACHE FILEPATH "PADDLE2ONNX library." FORCE)
Expand Down
35 changes: 31 additions & 4 deletions paddle/fluid/inference/api/demo_ci/onnxruntime_mobilenet_demo.cc
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,19 @@ See the License for the specific language governing permissions and
limitations under the License. */

/*
* This file contains demo of mobilenet for tensorrt.
* This file contains demo of mobilenet for onnxruntime backend.
*/

#include <glog/logging.h> // use glog instead of CHECK to avoid importing other paddle header files.

#include <algorithm>
#include <numeric>
#include <vector>

#include "gflags/gflags.h"
#include "utils.h" // NOLINT

DEFINE_string(modeldir, "", "Directory of the inference model.");
DEFINE_string(data, "", "path of data");

namespace paddle {
namespace demo {
Expand All @@ -39,8 +41,21 @@ void Main() {
auto predictor = paddle_infer::CreatePredictor(config);

// Inference.
LOG(INFO) << "--- prepare input data ----";
std::vector<int> input_shape = {1, 3, 224, 224};
std::vector<float> input_data(1 * 3 * 224 * 224, 1.0);
std::vector<float> input_data;
std::string line;
std::ifstream file(FLAGS_data);
std::getline(file, line);
file.close();
std::vector<std::string> data_strs;
split(line, ' ', &data_strs);
int input_num = 0;
for (auto& d : data_strs) {
input_num += 1;
input_data.push_back(std::stof(d));
}

std::vector<float> out_data;
out_data.resize(1000);
auto input_names = predictor->GetInputNames();
Expand All @@ -53,7 +68,19 @@ void Main() {
predictor->Run();
output_tensor->CopyToCpu(out_data.data());

VLOG(3) << "output.size " << out_data.size();
std::vector<int> out_index(out_data.size());
std::iota(out_index.begin(), out_index.end(), 0);
std::sort(
out_index.begin(), out_index.end(), [&out_data](int index1, int index2) {
return out_data[index1] > out_data[index2];
});
LOG(INFO) << "output.size " << out_data.size()
<< " max_index:" << out_index[0];
CHECK_EQ(out_data.size(), 1000);
int max_index = out_index[0];
CHECK_EQ(max_index, 13);
float max_score = out_data[max_index];
CHECK_LE(fabs(max_score - 0.99981), 1e-4);
}

} // namespace demo
Expand Down
11 changes: 7 additions & 4 deletions paddle/fluid/inference/api/demo_ci/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -52,15 +52,17 @@ if [ $7 == ON ]; then
mkdir -p MobileNetV2
cd MobileNetV2
if [[ -e "MobileNetV2.inference.model.tar.gz" ]]; then
echo "MobileNetV2.inference.model.tar.gz has been downloaded."
else
rm -rf MobileNetV2.inference.model.tar.gz
fi
# echo "MobileNetV2.inference.model.tar.gz has been downloaded."
# else
if [ $WIN_DETECT != "" ]; then
wget -q -Y off http://paddle-inference-dist.bj.bcebos.com/MobileNetV2.inference.model.tar.gz
else
wget -q --no-proxy http://paddle-inference-dist.bj.bcebos.com/MobileNetV2.inference.model.tar.gz
fi
tar xzf *.tar.gz
fi
# fi
cd ..
fi

Expand Down Expand Up @@ -265,7 +267,8 @@ for WITH_STATIC_LIB in ON OFF; do
-DWITH_ONNXRUNTIME=$WITH_ONNXRUNTIME
make -j$(nproc)
./onnxruntime_mobilenet_demo \
--modeldir=$DATA_DIR/MobileNetV2/MobileNetV2
--modeldir=$DATA_DIR/MobileNetV2/MobileNetV2 \
--data=$DATA_DIR/MobileNetV2/MobileNetV2/data.txt
if [ $? -ne 0 ]; then
echo "onnxruntime_mobilenet_demo runs failed " >> ${current_dir}/test_summary.txt
EXIT_CODE=1
Expand Down
Loading

0 comments on commit 61591af

Please sign in to comment.