Skip to content

Commit

Permalink
Add ONNX export metadata (#7353)
Browse files Browse the repository at this point in the history
  • Loading branch information
glenn-jocher authored Apr 9, 2022
1 parent 698a5d7 commit 3bb233a
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 1 deletion.
8 changes: 7 additions & 1 deletion export.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,13 @@ def export_onnx(model, im, file, opset, train, dynamic, simplify, prefix=colorst
# Checks
model_onnx = onnx.load(f) # load onnx model
onnx.checker.check_model(model_onnx) # check onnx model
# LOGGER.info(onnx.helper.printable_graph(model_onnx.graph)) # print

# Metadata
d = {'stride': int(max(model.stride)), 'names': model.names}
for k, v in d.items():
meta = model_onnx.metadata_props.add()
meta.key, meta.value = k, str(v)
onnx.save(model_onnx, f)

# Simplify
if simplify:
Expand Down
3 changes: 3 additions & 0 deletions models/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,9 @@ def __init__(self, weights='yolov5s.pt', device=torch.device('cpu'), dnn=False,
import onnxruntime
providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] if cuda else ['CPUExecutionProvider']
session = onnxruntime.InferenceSession(w, providers=providers)
meta = session.get_modelmeta().custom_metadata_map # metadata
if 'stride' in meta:
stride, names = int(meta['stride']), eval(meta['names'])
elif xml: # OpenVINO
LOGGER.info(f'Loading {w} for OpenVINO inference...')
check_requirements(('openvino-dev',)) # requires openvino-dev: https://pypi.org/project/openvino-dev/
Expand Down

0 comments on commit 3bb233a

Please sign in to comment.