Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Refactor][tools] Add prebuild tools. #347

Merged
merged 32 commits into from
May 23, 2022
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
da87a80
move to lib
grimoire Apr 11, 2022
2a03ca7
optional import pytorch rewriter
grimoire Mar 25, 2022
8047c1b
reduce torch dependancy of tensorrt export
grimoire Mar 24, 2022
93f2611
remove more mmcv support
grimoire Mar 24, 2022
d90cd65
fix pytest
grimoire Apr 15, 2022
4ec8e88
remove mmcv logge
PeterH0323 Apr 20, 2022
3c8a85f
Add `mmdeploy.utils.logging`
PeterH0323 Apr 20, 2022
6f28158
Improve the common of the `get_logger`
PeterH0323 Apr 20, 2022
3702378
Fix lint
PeterH0323 Apr 20, 2022
b782773
onnxruntim add try catch to import wrapper if pytorch is available
PeterH0323 Apr 20, 2022
bb83390
Using `mmcv.utils.logging` in all files under `mmdeploy/codebase`
PeterH0323 Apr 22, 2022
d70b803
add __init__
grimoire May 2, 2022
f00fe81
add prebuild tools
grimoire May 5, 2022
71521eb
support windows
grimoire May 6, 2022
8f2e874
for comment
grimoire May 9, 2022
2da21a5
Merge branch 'dev-0.5.0' into fix-for-prebuilt
grimoire May 9, 2022
4d53c1e
exit if failed
grimoire May 13, 2022
34e49ac
add exist
grimoire May 13, 2022
6efe4fe
decouple
grimoire May 13, 2022
fee4c41
add tags
grimoire May 16, 2022
fa92dae
remove .mmdeploy_python
grimoire May 17, 2022
d616014
read python version from system
grimoire May 18, 2022
31c9b10
update windows config
grimoire May 19, 2022
ace3b7d
update linux config
grimoire May 19, 2022
4143279
remote many
grimoire May 21, 2022
f2e7b00
better build name
grimoire May 21, 2022
8eec447
rename python tag
grimoire May 21, 2022
5a59ab1
fix pyhon-tag
grimoire May 21, 2022
a55b214
update window config
grimoire May 23, 2022
6e74153
add env search
grimoire May 23, 2022
35d27bd
update tag
grimoire May 23, 2022
2edc101
fix build without CUDA_TOOLKIT_ROOT_DIR
grimoire May 23, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ __pycache__/

# C extensions
*.so
onnx2ncnn

# Distribution / packaging
.Python
Expand Down
6 changes: 6 additions & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1 +1,7 @@
include requirements/*.txt
include mmdeploy/backend/ncnn/*.so
include mmdeploy/backend/ncnn/*.dll
include mmdeploy/backend/ncnn/*.pyd
include mmdeploy/lib/*.so
include mmdeploy/lib/*.dll
include mmdeploy/lib/*.pyd
2 changes: 2 additions & 0 deletions csrc/backend_ops/ncnn/onnx2ncnn/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ if (PROTOBUF_FOUND)
${CMAKE_CURRENT_BINARY_DIR})
target_link_libraries(onnx2ncnn PRIVATE ${PROTOBUF_LIBRARIES})

set(_NCNN_CONVERTER_DIR ${CMAKE_SOURCE_DIR}/mmdeploy/backend/ncnn)
install(TARGETS onnx2ncnn DESTINATION ${_NCNN_CONVERTER_DIR})
else ()
message(
FATAL_ERROR "Protobuf not found, onnx model convert tool won't be built")
Expand Down
3 changes: 3 additions & 0 deletions csrc/backend_ops/ncnn/ops/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,6 @@ target_include_directories(${PROJECT_NAME}
PUBLIC ${_COMMON_INCLUDE_DIRS})

add_library(mmdeploy::ncnn_ops ALIAS ${PROJECT_NAME})

set(_NCNN_OPS_DIR ${CMAKE_SOURCE_DIR}/mmdeploy/lib)
install(TARGETS ${PROJECT_NAME} DESTINATION ${_NCNN_OPS_DIR})
3 changes: 3 additions & 0 deletions csrc/backend_ops/onnxruntime/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,6 @@ target_link_libraries(${PROJECT_NAME}_obj PUBLIC onnxruntime)
mmdeploy_add_library(${PROJECT_NAME} SHARED EXCLUDE "")
target_link_libraries(${PROJECT_NAME} PUBLIC ${PROJECT_NAME}_obj)
add_library(mmdeploy::onnxruntime::ops ALIAS ${PROJECT_NAME})

set(_ORT_OPS_DIR ${CMAKE_SOURCE_DIR}/mmdeploy/lib)
install(TARGETS ${PROJECT_NAME} DESTINATION ${_ORT_OPS_DIR})
3 changes: 3 additions & 0 deletions csrc/backend_ops/tensorrt/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,6 @@ mmdeploy_export(${PROJECT_NAME}_obj)
mmdeploy_add_module(${PROJECT_NAME} MODULE EXCLUDE "")
target_link_libraries(${PROJECT_NAME} PRIVATE ${PROJECT_NAME}_obj)
add_library(mmdeploy::tensorrt_ops ALIAS ${PROJECT_NAME})

set(_TRT_OPS_DIR ${CMAKE_SOURCE_DIR}/mmdeploy/lib)
install(TARGETS ${PROJECT_NAME} DESTINATION ${_TRT_OPS_DIR})
6 changes: 5 additions & 1 deletion mmdeploy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,11 @@
from mmdeploy.utils import get_root_logger
from .version import __version__ # noqa F401

importlib.import_module('mmdeploy.pytorch')
if importlib.util.find_spec('torch'):
importlib.import_module('mmdeploy.pytorch')
else:
logger = get_root_logger()
logger.debug('torch is not installed.')

if importlib.util.find_spec('mmcv'):
importlib.import_module('mmdeploy.mmcv')
Expand Down
41 changes: 22 additions & 19 deletions mmdeploy/backend/__init__.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,30 @@
# Copyright (c) OpenMMLab. All rights reserved.
import importlib

from mmdeploy.backend.ncnn import is_available as ncnn_available
from mmdeploy.backend.onnxruntime import is_available as ort_available
from mmdeploy.backend.openvino import is_available as openvino_available
from mmdeploy.backend.pplnn import is_available as pplnn_available
from mmdeploy.backend.sdk import is_available as sdk_available
from mmdeploy.backend.tensorrt import is_available as trt_available

__all__ = []
if ncnn_available():
from .ncnn import NCNNWrapper # noqa: F401,F403
__all__.append('NCNNWrapper')
if ort_available():
from .onnxruntime import ORTWrapper # noqa: F401,F403
__all__.append('ORTWrapper')
if trt_available():
from .tensorrt import TRTWrapper # noqa: F401,F403
__all__.append('TRTWrapper')
if pplnn_available():
from .pplnn import PPLNNWrapper # noqa: F401,F403
__all__.append('PPLNNWrapper')
if openvino_available():
from .openvino import OpenVINOWrapper # noqa: F401,F403
__all__.append('OpenVINOWrapper')
if sdk_available():
from .sdk import SDKWrapper # noqa: F401,F403
__all__.append('SDKWrapper')
if importlib.util.find_spec('torch') is not None:
__all__ = []
if ncnn_available():
from .ncnn import NCNNWrapper # noqa: F401,F403
__all__.append('NCNNWrapper')
if ort_available():
from .onnxruntime import ORTWrapper # noqa: F401,F403
__all__.append('ORTWrapper')
if trt_available():
from .tensorrt import TRTWrapper # noqa: F401,F403
__all__.append('TRTWrapper')
if pplnn_available():
from .pplnn import PPLNNWrapper # noqa: F401,F403
__all__.append('PPLNNWrapper')
if openvino_available():
from .openvino import OpenVINOWrapper # noqa: F401,F403
__all__.append('OpenVINOWrapper')
if sdk_available():
from .sdk import SDKWrapper # noqa: F401,F403
__all__.append('SDKWrapper')
7 changes: 5 additions & 2 deletions mmdeploy/backend/ncnn/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@ def is_plugin_available():


if is_available():
from .wrapper import NCNNWrapper
try:
from .wrapper import NCNNWrapper

__all__ = ['NCNNWrapper']
__all__ = ['NCNNWrapper']
except Exception:
pass
6 changes: 2 additions & 4 deletions mmdeploy/backend/ncnn/init_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def get_ops_path() -> str:
str: The library path of NCNN custom ops.
"""
candidates = [
'../../../build/lib/libmmdeploy_ncnn_ops.so',
'../../lib/libmmdeploy_ncnn_ops.so',
'../../../build/bin/*/mmdeploy_ncnn_ops.dll'
]
return get_file_path(os.path.dirname(__file__), candidates)
Expand All @@ -23,7 +23,5 @@ def get_onnx2ncnn_path() -> str:
Returns:
str: A path of onnx2ncnn tool.
"""
candidates = [
'../../../build/bin/onnx2ncnn', '../../../build/bin/*/onnx2ncnn.exe'
]
candidates = ['./onnx2ncnn', './onnx2ncnn.exe']
return get_file_path(os.path.dirname(__file__), candidates)
12 changes: 9 additions & 3 deletions mmdeploy/backend/ncnn/onnx2ncnn.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
# Copyright (c) OpenMMLab. All rights reserved.
import os
import os.path as osp
from subprocess import call
from typing import List

import mmcv

from .init_plugins import get_onnx2ncnn_path


def mkdir_or_exist(dir_name, mode=0o777):
if dir_name == '':
return
dir_name = osp.expanduser(dir_name)
os.makedirs(dir_name, mode=mode, exist_ok=True)


def get_output_model_file(onnx_path: str, work_dir: str) -> List[str]:
"""Returns the path to the .param, .bin file with export result.

Expand All @@ -19,7 +25,7 @@ def get_output_model_file(onnx_path: str, work_dir: str) -> List[str]:
List[str]: The path to the files where the export result will be
located.
"""
mmcv.mkdir_or_exist(osp.abspath(work_dir))
mkdir_or_exist(osp.abspath(work_dir))
file_name = osp.splitext(osp.split(onnx_path)[1])[0]
save_param = osp.join(work_dir, file_name + '.param')
save_bin = osp.join(work_dir, file_name + '.bin')
Expand Down
8 changes: 6 additions & 2 deletions mmdeploy/backend/onnxruntime/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,5 +26,9 @@ def is_plugin_available():


if is_available():
from .wrapper import ORTWrapper
__all__ = ['ORTWrapper']
try:
# import wrapper if pytorch is available
from .wrapper import ORTWrapper
__all__ = ['ORTWrapper']
except Exception:
pass
2 changes: 1 addition & 1 deletion mmdeploy/backend/onnxruntime/init_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def get_ops_path() -> str:
str: The library path to onnxruntime custom ops.
"""
candidates = [
'../../../build/lib/libmmdeploy_onnxruntime_ops.so',
'../../lib/libmmdeploy_onnxruntime_ops.so',
'../../../build/bin/*/mmdeploy_onnxruntime_ops.dll',
]
return get_file_path(os.path.dirname(__file__), candidates)
11 changes: 7 additions & 4 deletions mmdeploy/backend/sdk/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,13 @@
lib_dir = os.path.dirname(lib_path)
sys.path.insert(0, lib_dir)

if importlib.util.find_spec(module_name) is not None:
from .wrapper import SDKWrapper
__all__ = ['SDKWrapper']
_is_available = True
try:
if importlib.util.find_spec(module_name) is not None:
from .wrapper import SDKWrapper
__all__ = ['SDKWrapper']
_is_available = True
except Exception:
pass


def is_available() -> bool:
Expand Down
15 changes: 9 additions & 6 deletions mmdeploy/backend/tensorrt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
import importlib
import os.path as osp

import torch

from .init_plugins import get_ops_path, load_tensorrt_plugin


Expand All @@ -15,8 +13,7 @@ def is_available():
bool: True if TensorRT package is installed and cuda is available.
"""

return importlib.util.find_spec('tensorrt') is not None and \
torch.cuda.is_available()
return importlib.util.find_spec('tensorrt') is not None


def is_plugin_available():
Expand All @@ -31,9 +28,15 @@ def is_plugin_available():

if is_available():
from .utils import create_trt_engine, load_trt_engine, save_trt_engine
from .wrapper import TRTWrapper

__all__ = [
'create_trt_engine', 'save_trt_engine', 'load_trt_engine',
'TRTWrapper', 'load_tensorrt_plugin'
'load_tensorrt_plugin'
]

try:
# import wrapper if pytorch is available
from .wrapper import TRTWrapper
__all__ += ['TRTWrapper']
except Exception:
pass
25 changes: 12 additions & 13 deletions mmdeploy/backend/tensorrt/calib_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@

import h5py
import numpy as np
import pycuda.autoinit # noqa:F401
import pycuda.driver as cuda
import tensorrt as trt
import torch

DEFAULT_CALIBRATION_ALGORITHM = trt.CalibrationAlgoType.ENTROPY_CALIBRATION_2

Expand Down Expand Up @@ -67,30 +68,28 @@ def get_batch(self, names: Sequence[str], **kwargs) -> list:
ret = []
for name in names:
input_group = self.calib_data[name]
data_np = input_group[str(self.count)][...]
data_torch = torch.from_numpy(data_np)
data_np = input_group[str(self.count)][...].astype(np.float32)

# tile the tensor so we can keep the same distribute
opt_shape = self.input_shapes[name]['opt_shape']
data_shape = data_torch.shape
data_shape = data_np.shape

reps = [
int(np.ceil(opt_s / data_s))
for opt_s, data_s in zip(opt_shape, data_shape)
]

data_torch = data_torch.tile(reps)
data_np = np.tile(data_np, reps)

for dim, opt_s in enumerate(opt_shape):
if data_torch.shape[dim] != opt_s:
data_torch = data_torch.narrow(dim, 0, opt_s)
slice_list = tuple(slice(0, end) for end in opt_shape)
data_np = data_np[slice_list]

if name not in self.buffers:
self.buffers[name] = data_torch.cuda(self.device_id)
else:
self.buffers[name].copy_(data_torch.cuda(self.device_id))
data_np_cuda_ptr = cuda.mem_alloc(data_np.nbytes)
cuda.memcpy_htod(data_np_cuda_ptr,
np.ascontiguousarray(data_np))
self.buffers[name] = data_np_cuda_ptr

ret.append(int(self.buffers[name].data_ptr()))
ret.append(self.buffers[name])
self.count += 1
return ret
else:
Expand Down
2 changes: 1 addition & 1 deletion mmdeploy/backend/tensorrt/init_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def get_ops_path() -> str:
str: A path of the TensorRT plugin library.
"""
candidates = [
'../../../build/lib/libmmdeploy_tensorrt_ops.so',
'../../lib/libmmdeploy_tensorrt_ops.so',
'../../../build/bin/*/mmdeploy_tensorrt_ops.dll'
]
return get_file_path(os.path.dirname(__file__), candidates)
Expand Down
Loading