Skip to content

Commit

Permalink
[OpenVINO-EP] Adding windows samples (#49)
Browse files Browse the repository at this point in the history
* Added cpp samples for Windows

* Folder structure change and code clean

* Updated README

* Alignment correction

* Moving Readme.md to right location

Signed-off-by: Aravind <[email protected]>

* Updated the the usage command with windows executable

* Added sample executable name in usage command

Co-authored-by: Aravind Gunda <[email protected]>
  • Loading branch information
MaajidKhan and gundaarx authored Nov 2, 2021
1 parent d699c1b commit da3c43f
Show file tree
Hide file tree
Showing 10 changed files with 1,630 additions and 0 deletions.
37 changes: 37 additions & 0 deletions c_cxx/OpenVINO_EP/Windows/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.

cmake_minimum_required(VERSION 3.13)

# Project
project(onnxruntime_samples C CXX)
string(APPEND CMAKE_CXX_FLAGS " /W4")

#onnxruntime-openvino provider
option(onnxruntime_USE_OPENVINO "Build with OpenVINO support" OFF)
option(OPENCV_ROOTDIR "OpenCV root dir")
option(ONNXRUNTIME_ROOTDIR "onnxruntime root dir")

if(NOT ONNXRUNTIME_ROOTDIR)
set(ONNXRUNTIME_ROOTDIR "C:/Program Files (x86)/onnxruntime")
endif()
include_directories("${ONNXRUNTIME_ROOTDIR}/include" "${ONNXRUNTIME_ROOTDIR}/include/onnxruntime/core/session")
link_directories("${ONNXRUNTIME_ROOTDIR}/lib")

if(OPENCV_ROOTDIR)
set(OPENCV_FOUND true)
set(OPENCV_INCLUDE_DIRS "${OPENCV_ROOTDIR}/include")
set(OPENCV_LIBDIR "${OPENCV_ROOTDIR}/lib")
file(GLOB OPENCV_DEBUG_LIBRARIES ${OPENCV_LIBDIR}/opencv_imgcodecs*d.lib ${OPENCV_LIBDIR}/opencv_dnn*d.lib ${OPENCV_LIBDIR}/opencv_core*d.lib ${OPENCV_LIBDIR}/opencv_imgproc*d.lib)
file(GLOB OPENCV_RELEASE_LIBRARIES ${OPENCV_LIBDIR}/opencv_imgcodecs*.lib ${OPENCV_LIBDIR}/opencv_dnn*.lib ${OPENCV_LIBDIR}/opencv_core*.lib ${OPENCV_LIBDIR}/opencv_imgproc*.lib)
list(FILTER OPENCV_RELEASE_LIBRARIES EXCLUDE REGEX ".*d\\.lib")
endif()

if(onnxruntime_USE_OPENVINO)
add_definitions(-DUSE_OPENVINO)
endif()

if(OPENCV_FOUND)
add_subdirectory(squeezenet_classification)
endif()
add_subdirectory(model-explorer)
40 changes: 40 additions & 0 deletions c_cxx/OpenVINO_EP/Windows/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Windows C++ sample with OVEP:

1. model-explorer
2. Squeezenet classification

## How to build

#### Build ONNX Runtime
Open x64 Native Tools Command Prompt for VS 2019.
```
build.bat --config RelWithDebInfo --use_openvino CPU_FP32 --build_shared_lib --parallel --cmake_extra_defines CMAKE_INSTALL_PREFIX=c:\dev\ort_install
```

By default products of the build on Windows go to build\Windows\config folder. In the case above it would be build\Windows\RelWithDebInfo.
Run the following commands.

```
cd build\Windows\RelWithDebInfo
msbuild INSTALL.vcxproj /p:Configuration=RelWithDebInfo
```

#### Build the samples

Open x64 Native Tools Command Prompt for VS 2019, Git clone the sample repo.
```
git clone https://github.com/microsoft/onnxruntime-inference-examples.git
```
Change your current directory to c_cxx\OpenVINO_EP\Windows, then run
```bat
mkdir build && cd build
cmake .. -A x64 -T host=x64 -Donnxruntime_USE_OPENVINO=ON -DONNXRUNTIME_ROOTDIR=c:\dev\ort_install -DOPENCV_ROOTDIR="C:\Program Files (x86)\Intel\openvino_2021.4.689\opencv"
```
Choose required opencv path. Skip the opencv flag if you don't want to build squeezenet sample.
Build samples using msbuild either for Debug or Release configuration.

```bat
msbuild onnxruntime_samples.sln /p:Configuration=Debug|Release
```

To run the samples make sure you source openvino variables using setupvars.bat.
17 changes: 17 additions & 0 deletions c_cxx/OpenVINO_EP/Windows/model-explorer/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.

add_executable(model-explorer model-explorer.cpp)
target_link_libraries(model-explorer PRIVATE onnxruntime)

#In onnxruntime deafault install path, the required dlls are in lib and bin folders
set(DLL_DIRS "${ONNXRUNTIME_ROOTDIR}/lib;${ONNXRUNTIME_ROOTDIR}/bin")
foreach(DLL_DIR IN LISTS DLL_DIRS)
file(GLOB ALL_DLLS ${DLL_DIR}/*.dll)
foreach(CurrentDll IN LISTS ALL_DLLS)
add_custom_command(TARGET model-explorer POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different
"${CurrentDll}"
$<TARGET_FILE_DIR:model-explorer>)
endforeach()
endforeach()
122 changes: 122 additions & 0 deletions c_cxx/OpenVINO_EP/Windows/model-explorer/model-explorer.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

/**
* This sample application demonstrates how to use components of the experimental C++ API
* to query for model inputs/outputs and how to run inferrence on a model.
*
* This example is best run with one of the ResNet models (i.e. ResNet18) from the onnx model zoo at
* https://github.com/onnx/models
*
* Assumptions made in this example:
* 1) The onnx model has 1 input node and 1 output node
*
*
* In this example, we do the following:
* 1) read in an onnx model
* 2) print out some metadata information about inputs and outputs that the model expects
* 3) generate random data for an input tensor
* 4) pass tensor through the model and check the resulting tensor
*
*/

#include <algorithm> // std::generate
#include <assert.h>
#include <iostream>
#include <sstream>
#include <vector>
#include <experimental_onnxruntime_cxx_api.h>

// pretty prints a shape dimension vector
std::string print_shape(const std::vector<int64_t>& v) {
std::stringstream ss("");
for (size_t i = 0; i < v.size() - 1; i++)
ss << v[i] << "x";
ss << v[v.size() - 1];
return ss.str();
}

int calculate_product(const std::vector<int64_t>& v) {
int total = 1;
for (auto& i : v) total *= i;
return total;
}

using namespace std;

int main(int argc, char** argv) {
if (argc != 2) {
cout << "Usage: model-explorer.exe <onnx_model.onnx>" << endl;
return -1;
}

#ifdef _WIN32
std::string str = argv[1];
std::wstring wide_string = std::wstring(str.begin(), str.end());
std::basic_string<ORTCHAR_T> model_file = std::basic_string<ORTCHAR_T>(wide_string);
#else
std::string model_file = argv[1];
#endif

// onnxruntime setup
Ort::Env env(ORT_LOGGING_LEVEL_WARNING, "example-model-explorer");
Ort::SessionOptions session_options;
//Appending OpenVINO Execution Provider API
#ifdef USE_OPENVINO
// Using OPENVINO backend
OrtOpenVINOProviderOptions options;
options.device_type = "CPU_FP32"; //Other options are: GPU_FP32, GPU_FP16, MYRIAD_FP16
std::cout << "OpenVINO device type is set to: " << options.device_type << std::endl;
session_options.AppendExecutionProvider_OpenVINO(options);
#endif
Ort::Experimental::Session session = Ort::Experimental::Session(env, model_file, session_options); // access experimental components via the Experimental namespace

// print name/shape of inputs
std::vector<std::string> input_names = session.GetInputNames();
std::vector<std::vector<int64_t> > input_shapes = session.GetInputShapes();
cout << "Input Node Name/Shape (" << input_names.size() << "):" << endl;
for (size_t i = 0; i < input_names.size(); i++) {
cout << "\t" << input_names[i] << " : " << print_shape(input_shapes[i]) << endl;
}

// print name/shape of outputs
std::vector<std::string> output_names = session.GetOutputNames();
std::vector<std::vector<int64_t> > output_shapes = session.GetOutputShapes();
cout << "Output Node Name/Shape (" << output_names.size() << "):" << endl;
for (size_t i = 0; i < output_names.size(); i++) {
cout << "\t" << output_names[i] << " : " << print_shape(output_shapes[i]) << endl;
}

// Assume model has 1 input node and 1 output node.
assert(input_names.size() == 1 && output_names.size() == 1);

// Create a single Ort tensor of random numbers
auto input_shape = input_shapes[0];
int total_number_elements = calculate_product(input_shape);
std::vector<float> input_tensor_values(total_number_elements);
std::generate(input_tensor_values.begin(), input_tensor_values.end(), [&] { return rand() % 255; }); // generate random numbers in the range [0, 255]
std::vector<Ort::Value> input_tensors;
input_tensors.push_back(Ort::Experimental::Value::CreateTensor<float>(input_tensor_values.data(), input_tensor_values.size(), input_shape));

// double-check the dimensions of the input tensor
assert(input_tensors[0].IsTensor() &&
input_tensors[0].GetTensorTypeAndShapeInfo().GetShape() == input_shape);
cout << "\ninput_tensor shape: " << print_shape(input_tensors[0].GetTensorTypeAndShapeInfo().GetShape()) << endl;

// pass data through model
cout << "Running model...";
try {
auto output_tensors = session.Run(session.GetInputNames(), input_tensors, session.GetOutputNames());
cout << "done" << endl;

// double-check the dimensions of the output tensors
// NOTE: the number of output tensors is equal to the number of output nodes specifed in the Run() call
assert(output_tensors.size() == session.GetOutputNames().size() &&
output_tensors[0].IsTensor());
cout << "output_tensor_shape: " << print_shape(output_tensors[0].GetTensorTypeAndShapeInfo().GetShape()) << endl;

} catch (const Ort::Exception& exception) {
cout << "ERROR running model inference: " << exception.what() << endl;
exit(-1);
}
}
25 changes: 25 additions & 0 deletions c_cxx/OpenVINO_EP/Windows/squeezenet_classification/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.

add_executable(run_squeezenet "squeezenet_cpp_app.cpp")
target_include_directories(run_squeezenet PRIVATE ${OPENCV_INCLUDE_DIRS})
target_link_libraries(run_squeezenet PRIVATE onnxruntime)

if(OPENCV_LIBDIR)
target_link_directories(run_squeezenet PRIVATE ${OPENCV_LIBDIR})
foreach(RelLib DebLib IN ZIP_LISTS OPENCV_RELEASE_LIBRARIES OPENCV_DEBUG_LIBRARIES)
target_link_libraries(run_squeezenet PRIVATE optimized ${RelLib} debug ${DebLib})
endforeach()
endif()

#In onnxruntime deafault install path, the required dlls are in lib and bin folders
set(DLL_DIRS "${ONNXRUNTIME_ROOTDIR}/lib;${ONNXRUNTIME_ROOTDIR}/bin")
foreach(DLL_DIR IN LISTS DLL_DIRS)
file(GLOB ALL_DLLS ${DLL_DIR}/*.dll)
foreach(ORTDll IN LISTS ALL_DLLS)
add_custom_command(TARGET run_squeezenet POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different
"${ORTDll}"
$<TARGET_FILE_DIR:run_squeezenet>)
endforeach()
endforeach()
Loading

0 comments on commit da3c43f

Please sign in to comment.