Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

OpenVINO metadata fix #7952

Merged
merged 4 commits into from
May 24, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion export.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def export_openvino(model, file, half, prefix=colorstr('OpenVINO:')):

cmd = f"mo --input_model {file.with_suffix('.onnx')} --output_dir {f} --data_type {'FP16' if half else 'FP32'}"
subprocess.check_output(cmd.split()) # export
with open(Path(f) / 'meta.yaml', 'w') as g:
with open(Path(f) / file.with_suffix('.yaml'), 'w') as g:
yaml.dump({'stride': int(max(model.stride)), 'names': model.names}, g) # add metadata.yaml

LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)')
Expand Down
27 changes: 14 additions & 13 deletions models/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,9 +323,12 @@ def __init__(self, weights='yolov5s.pt', device=torch.device('cpu'), dnn=False,
super().__init__()
w = str(weights[0] if isinstance(weights, list) else weights)
pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs = self.model_type(w) # get backend
stride, names = 32, [f'class{i}' for i in range(1000)] # assign defaults
w = attempt_download(w) # download if not local
fp16 &= (pt or jit or onnx or engine) and device.type != 'cpu' # FP16
stride, names = 32, [f'class{i}' for i in range(1000)] # assign defaults
if data: # assign class names (optional)
with open(data, errors='ignore') as f:
names = yaml.safe_load(f)['names']

if pt: # PyTorch
model = attempt_load(weights if isinstance(weights, list) else w, map_location=device)
Expand Down Expand Up @@ -365,7 +368,9 @@ def __init__(self, weights='yolov5s.pt', device=torch.device('cpu'), dnn=False,
network = ie.read_model(model=w, weights=Path(w).with_suffix('.bin'))
executable_network = ie.compile_model(model=network, device_name="CPU")
output_layer = next(iter(executable_network.outputs))
self._load_metadata(w.parent / 'meta.yaml') # load metadata
meta = w.with_suffix('.yaml')
if meta.exists():
stride, names = self._load_metadata(meta) # load metadata
elif engine: # TensorRT
LOGGER.info(f'Loading {w} for TensorRT inference...')
import tensorrt as trt # https://developer.nvidia.com/nvidia-tensorrt-download
Expand Down Expand Up @@ -431,11 +436,7 @@ def wrap_frozen_graph(gd, inputs, outputs):
output_details = interpreter.get_output_details() # outputs
elif tfjs:
raise Exception('ERROR: YOLOv5 TF.js inference is not supported')

self.__dict__.update(locals()) # assign all variables to self
if not hasattr(self, 'names') and data: # assign class names (optional)
with open(data, errors='ignore') as f:
names = yaml.safe_load(f)['names']

def forward(self, im, augment=False, visualize=False, val=False):
# YOLOv5 MultiBackend inference
Expand Down Expand Up @@ -495,13 +496,6 @@ def forward(self, im, augment=False, visualize=False, val=False):
y = torch.tensor(y, device=self.device)
return (y, []) if val else y

def _load_metadata(self, f='path/to/meta.yaml'):
# Load metadata from meta.yaml if it exists
if Path(f).is_file():
with open(f, errors='ignore') as f:
for k, v in yaml.safe_load(f).items():
setattr(self, k, v) # assign stride, names

def warmup(self, imgsz=(1, 3, 640, 640)):
# Warmup model by running inference once
warmup_types = self.pt, self.jit, self.onnx, self.engine, self.saved_model, self.pb
Expand All @@ -522,6 +516,13 @@ def model_type(p='path/to/model.pt'):
tflite &= not edgetpu # *.tflite
return pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs

@staticmethod
def _load_metadata(f='path/to/meta.yaml'):
# Load metadata from meta.yaml if it exists
with open(f, errors='ignore') as f:
d = yaml.safe_load(f)
return d['stride'], d['names'] # assign stride, names


class AutoShape(nn.Module):
# YOLOv5 input-robust model wrapper for passing cv2/np/PIL/torch inputs. Includes preprocessing, inference and NMS
Expand Down