From ac5546261784daa81bcec4a2bc86a499555c94cf Mon Sep 17 00:00:00 2001 From: Ashish Date: Tue, 23 Aug 2022 21:33:29 -0700 Subject: [PATCH] =?UTF-8?q?=E2=9A=99=EF=B8=8FEnable=20generic=20exporting?= =?UTF-8?q?=20of=20a=20trained=20model=20to=20ONNX=20or=20OpenVINO=20IR=20?= =?UTF-8?q?(#509)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Moving export and convert functionality to a more generic implementation where user can get just ONNX model or ONNX model + OpenVINO IR format * Updating CLI to support generic exporting to ONNX or OpenVINO * Changed logging statement * Updated config files to use new export mode format * Test case for exporting model to ONNX and OpenVINO IR format * Updated benchmarking convert * fixed import * Fixed test cases for exporting model * Updated export_convert API to use new export mode * Adding more informed message to user on failed test case * Changed default in config to null and adding comment for options for users to select * Reduced code duplication * reducded code duplication in cli * Single path for export_convert\ * Fixing for Windows paths --- anomalib/deploy/optimize.py | 29 +++++----- anomalib/models/cflow/config.yaml | 3 + anomalib/models/dfkde/config.yaml | 2 + anomalib/models/dfm/config.yaml | 2 + anomalib/models/draem/config.yaml | 4 +- anomalib/models/fastflow/config.yaml | 3 + anomalib/models/ganomaly/config.yaml | 3 +- anomalib/models/padim/config.yaml | 3 +- anomalib/models/patchcore/config.yaml | 3 + .../models/reverse_distillation/config.yaml | 3 +- anomalib/models/stfpm/config.yaml | 4 +- anomalib/utils/callbacks/__init__.py | 14 +++-- .../callbacks/{openvino.py => export.py} | 10 ++-- anomalib/utils/cli/cli.py | 21 ++++--- tests/pre_merge/deploy/test_inferencer.py | 4 +- .../__init__.py | 0 .../dummy_config.yml | 3 +- .../dummy_lightning_model.py | 0 .../callbacks/export_callback/test_export.py | 55 +++++++++++++++++++ .../openvino_callback/test_openvino.py | 40 -------------- tools/benchmarking/utils/convert.py | 3 +- 21 files changed, 119 insertions(+), 90 deletions(-) rename anomalib/utils/callbacks/{openvino.py => export.py} (87%) rename tests/pre_merge/utils/callbacks/{openvino_callback => export_callback}/__init__.py (100%) rename tests/pre_merge/utils/callbacks/{openvino_callback => export_callback}/dummy_config.yml (92%) rename tests/pre_merge/utils/callbacks/{openvino_callback => export_callback}/dummy_lightning_model.py (100%) create mode 100644 tests/pre_merge/utils/callbacks/export_callback/test_export.py delete mode 100644 tests/pre_merge/utils/callbacks/openvino_callback/test_openvino.py diff --git a/anomalib/deploy/optimize.py b/anomalib/deploy/optimize.py index a239d5b176..a91fcc08ca 100644 --- a/anomalib/deploy/optimize.py +++ b/anomalib/deploy/optimize.py @@ -6,7 +6,7 @@ import json import os from pathlib import Path -from typing import Dict, List, Tuple, Union +from typing import Dict, List, Optional, Tuple, Union import numpy as np import torch @@ -44,18 +44,19 @@ def get_model_metadata(model: AnomalyModule) -> Dict[str, Tensor]: def export_convert( model: AnomalyModule, input_size: Union[List[int], Tuple[int, int]], - onnx_path: Union[str, Path], - export_path: Union[str, Path], + export_mode: str, + export_path: Optional[Union[str, Path]] = None, ): """Export the model to onnx format and convert to OpenVINO IR. Args: model (AnomalyModule): Model to convert. input_size (Union[List[int], Tuple[int, int]]): Image size used as the input for onnx converter. - onnx_path (Union[str, Path]): Path to output onnx model. export_path (Union[str, Path]): Path to exported OpenVINO IR. + export_mode (str): Mode to export onnx or openvino """ height, width = input_size + onnx_path = os.path.join(str(export_path), "model.onnx") torch.onnx.export( model.model, torch.zeros((1, 3, height, width)).to(model.device), @@ -64,12 +65,14 @@ def export_convert( input_names=["input"], output_names=["output"], ) - optimize_command = "mo --input_model " + str(onnx_path) + " --output_dir " + str(export_path) - os.system(optimize_command) - with open(Path(export_path) / "meta_data.json", "w", encoding="utf-8") as metadata_file: - meta_data = get_model_metadata(model) - # Convert metadata from torch - for key, value in meta_data.items(): - if isinstance(value, Tensor): - meta_data[key] = value.numpy().tolist() - json.dump(meta_data, metadata_file, ensure_ascii=False, indent=4) + if export_mode == "openvino": + export_path = os.path.join(str(export_path), "openvino") + optimize_command = "mo --input_model " + str(onnx_path) + " --output_dir " + str(export_path) + os.system(optimize_command) + with open(Path(export_path) / "meta_data.json", "w", encoding="utf-8") as metadata_file: + meta_data = get_model_metadata(model) + # Convert metadata from torch + for key, value in meta_data.items(): + if isinstance(value, Tensor): + meta_data[key] = value.numpy().tolist() + json.dump(meta_data, metadata_file, ensure_ascii=False, indent=4) diff --git a/anomalib/models/cflow/config.yaml b/anomalib/models/cflow/config.yaml index a5d471e3cf..257ed11915 100644 --- a/anomalib/models/cflow/config.yaml +++ b/anomalib/models/cflow/config.yaml @@ -62,6 +62,9 @@ logging: logger: [] # options: [tensorboard, wandb, csv] or combinations. log_graph: false # Logs the model graph to respective logger. +optimization: + export_mode: null #options: onnx, openvino + # PL Trainer Args. Don't add extra parameter here. trainer: accelerator: auto # <"cpu", "gpu", "tpu", "ipu", "hpu", "auto"> diff --git a/anomalib/models/dfkde/config.yaml b/anomalib/models/dfkde/config.yaml index 5c384e8bb1..68e5d80bd7 100644 --- a/anomalib/models/dfkde/config.yaml +++ b/anomalib/models/dfkde/config.yaml @@ -50,6 +50,8 @@ logging: logger: [] # options: [tensorboard, wandb, csv] or combinations. log_graph: false # Logs the model graph to respective logger. +optimization: + export_mode: null #options: onnx, openvino # PL Trainer Args. Don't add extra parameter here. trainer: accelerator: auto # <"cpu", "gpu", "tpu", "ipu", "hpu", "auto"> diff --git a/anomalib/models/dfm/config.yaml b/anomalib/models/dfm/config.yaml index 3cfb0d8345..0d3dbfc2a0 100755 --- a/anomalib/models/dfm/config.yaml +++ b/anomalib/models/dfm/config.yaml @@ -50,6 +50,8 @@ logging: logger: [] # options: [tensorboard, wandb, csv] or combinations. log_graph: false # Logs the model graph to respective logger. +optimization: + export_mode: null #options: onnx, openvino # PL Trainer Args. Don't add extra parameter here. trainer: accelerator: auto # <"cpu", "gpu", "tpu", "ipu", "hpu", "auto"> diff --git a/anomalib/models/draem/config.yaml b/anomalib/models/draem/config.yaml index 96be77daae..4dffb12bc9 100644 --- a/anomalib/models/draem/config.yaml +++ b/anomalib/models/draem/config.yaml @@ -60,9 +60,7 @@ logging: log_graph: false # Logs the model graph to respective logger. optimization: - openvino: - apply: false - + export_mode: null #options: onnx, openvino # PL Trainer Args. Don't add extra parameter here. trainer: accelerator: auto # <"cpu", "gpu", "tpu", "ipu", "hpu", "auto"> diff --git a/anomalib/models/fastflow/config.yaml b/anomalib/models/fastflow/config.yaml index 5be7755bb5..6d64da88b6 100644 --- a/anomalib/models/fastflow/config.yaml +++ b/anomalib/models/fastflow/config.yaml @@ -62,6 +62,9 @@ logging: logger: [] # options: [tensorboard, wandb, csv] or combinations. log_graph: false # Logs the model graph to respective logger. +optimization: + export_mode: null #options: onnx, openvino + # PL Trainer Args. Don't add extra parameter here. trainer: accelerator: auto # <"cpu", "gpu", "tpu", "ipu", "hpu", "auto"> diff --git a/anomalib/models/ganomaly/config.yaml b/anomalib/models/ganomaly/config.yaml index a9712dd5d6..1fd3d8019c 100644 --- a/anomalib/models/ganomaly/config.yaml +++ b/anomalib/models/ganomaly/config.yaml @@ -63,8 +63,7 @@ logging: log_graph: false # Logs the model graph to respective logger. optimization: - openvino: - apply: false + export_mode: "" # PL Trainer Args. Don't add extra parameter here. trainer: diff --git a/anomalib/models/padim/config.yaml b/anomalib/models/padim/config.yaml index 75db505ab0..83e2762237 100644 --- a/anomalib/models/padim/config.yaml +++ b/anomalib/models/padim/config.yaml @@ -58,8 +58,7 @@ logging: log_graph: false # Logs the model graph to respective logger. optimization: - openvino: - apply: false + export_mode: null #options: onnx, openvino # PL Trainer Args. Don't add extra parameter here. trainer: diff --git a/anomalib/models/patchcore/config.yaml b/anomalib/models/patchcore/config.yaml index 26eec0ea9f..f7d37e24f7 100644 --- a/anomalib/models/patchcore/config.yaml +++ b/anomalib/models/patchcore/config.yaml @@ -58,6 +58,9 @@ logging: logger: [] # options: [tensorboard, wandb, csv] or combinations. log_graph: false # Logs the model graph to respective logger. +optimization: + export_mode: null # options: onnx, openvino + # PL Trainer Args. Don't add extra parameter here. trainer: accelerator: auto # <"cpu", "gpu", "tpu", "ipu", "hpu", "auto"> diff --git a/anomalib/models/reverse_distillation/config.yaml b/anomalib/models/reverse_distillation/config.yaml index 4badb15d14..f7b741c3da 100644 --- a/anomalib/models/reverse_distillation/config.yaml +++ b/anomalib/models/reverse_distillation/config.yaml @@ -67,8 +67,7 @@ logging: log_graph: false # Logs the model graph to respective logger. optimization: - openvino: - apply: false + export_mode: null #options: onnx, openvino # PL Trainer Args. Don't add extra parameter here. trainer: diff --git a/anomalib/models/stfpm/config.yaml b/anomalib/models/stfpm/config.yaml index 3bdf915188..48ed3acb75 100644 --- a/anomalib/models/stfpm/config.yaml +++ b/anomalib/models/stfpm/config.yaml @@ -65,9 +65,7 @@ logging: log_graph: false # Logs the model graph to respective logger. optimization: - openvino: - apply: false - + export_mode: null #options: onnx, openvino # PL Trainer Args. Don't add extra parameter here. trainer: accelerator: auto # <"cpu", "gpu", "tpu", "ipu", "hpu", "auto"> diff --git a/anomalib/utils/callbacks/__init__.py b/anomalib/utils/callbacks/__init__.py index 1a7b156f3c..790862e512 100644 --- a/anomalib/utils/callbacks/__init__.py +++ b/anomalib/utils/callbacks/__init__.py @@ -114,18 +114,22 @@ def get_callbacks(config: Union[ListConfig, DictConfig]) -> List[Callback]: export_dir=os.path.join(config.project.path, "compressed"), ) ) - if "openvino" in config.optimization and config.optimization.openvino.apply: - from .openvino import ( # pylint: disable=import-outside-toplevel - OpenVINOCallback, + if config.optimization.export_mode is not None: + from .export import ( # pylint: disable=import-outside-toplevel + ExportCallback, ) + logger.info("Setting model export to %s", config.optimization.export_mode) callbacks.append( - OpenVINOCallback( + ExportCallback( input_size=config.model.input_size, - dirpath=os.path.join(config.project.path, "openvino"), + dirpath=config.project.path, filename="model", + export_mode=config.optimization.export_mode, ) ) + else: + warnings.warn(f"Export option: {config.optimization.export_mode} not found. Defaulting to no model export") # Add callback to log graph to loggers if config.logging.log_graph not in [None, False]: diff --git a/anomalib/utils/callbacks/openvino.py b/anomalib/utils/callbacks/export.py similarity index 87% rename from anomalib/utils/callbacks/openvino.py rename to anomalib/utils/callbacks/export.py index 8b2ff29338..e859d50f59 100644 --- a/anomalib/utils/callbacks/openvino.py +++ b/anomalib/utils/callbacks/export.py @@ -17,7 +17,7 @@ @CALLBACK_REGISTRY -class OpenVINOCallback(Callback): +class ExportCallback(Callback): """Callback to compresses a trained model. Model is first exported to ``.onnx`` format, and then converted to OpenVINO IR. @@ -28,10 +28,11 @@ class OpenVINOCallback(Callback): filename (str): Name of output model """ - def __init__(self, input_size: Tuple[int, int], dirpath: str, filename: str): + def __init__(self, input_size: Tuple[int, int], dirpath: str, filename: str, export_mode: str): self.input_size = input_size self.dirpath = dirpath self.filename = filename + self.export_mode = export_mode def on_train_end(self, trainer, pl_module: AnomalyModule) -> None: # pylint: disable=W0613 """Call when the train ends. @@ -39,12 +40,11 @@ def on_train_end(self, trainer, pl_module: AnomalyModule) -> None: # pylint: di Converts the model to ``onnx`` format and then calls OpenVINO's model optimizer to get the ``.xml`` and ``.bin`` IR files. """ - logger.info("Exporting the model to OpenVINO") + logger.info("Exporting the model") os.makedirs(self.dirpath, exist_ok=True) - onnx_path = os.path.join(self.dirpath, self.filename + ".onnx") export_convert( model=pl_module, input_size=self.input_size, - onnx_path=onnx_path, export_path=self.dirpath, + export_mode=self.export_mode, ) diff --git a/anomalib/utils/cli/cli.py b/anomalib/utils/cli/cli.py index f2f24bee5a..2a498d9af0 100644 --- a/anomalib/utils/cli/cli.py +++ b/anomalib/utils/cli/cli.py @@ -5,6 +5,7 @@ import logging import os +import warnings from datetime import datetime from importlib import import_module from pathlib import Path @@ -91,7 +92,9 @@ def add_arguments_to_parser(self, parser: LightningArgumentParser) -> None: """ # TODO: https://github.com/openvinotoolkit/anomalib/issues/19 # TODO: https://github.com/openvinotoolkit/anomalib/issues/20 - parser.add_argument("--openvino", type=bool, default=False, help="Export to ONNX and OpenVINO IR format.") + parser.add_argument( + "--export_mode", type=str, default="", help="Select export mode to ONNX or OpenVINO IR format." + ) parser.add_argument("--nncf", type=str, help="Path to NNCF config to enable quantized training.") # ADD CUSTOM CALLBACKS TO CONFIG @@ -213,23 +216,23 @@ def __set_callbacks(self) -> None: add_visualizer_callback(callbacks, config) self.config[subcommand].visualization = config.visualization - # TODO: https://github.com/openvinotoolkit/anomalib/issues/19 - if config.openvino and config.nncf: - raise ValueError("OpenVINO and NNCF cannot be set simultaneously.") - # Export to OpenVINO - if config.openvino: - from anomalib.utils.callbacks.openvino import ( # pylint: disable=import-outside-toplevel - OpenVINOCallback, + if config.export_mode is not None: + from anomalib.utils.callbacks.export import ( # pylint: disable=import-outside-toplevel + ExportCallback, ) + logger.info("Setting model export to %s", config.export_mode) callbacks.append( - OpenVINOCallback( + ExportCallback( input_size=config.data.init_args.image_size, dirpath=os.path.join(config.trainer.default_root_dir, "compressed"), filename="model", + export_mode=config.export_mode, ) ) + else: + warnings.warn(f"Export option: {config.export_mode} not found. Defaulting to no model export") if config.nncf: if os.path.isfile(config.nncf) and config.nncf.endswith(".yaml"): nncf_module = import_module("anomalib.core.callbacks.nncf_callback") diff --git a/tests/pre_merge/deploy/test_inferencer.py b/tests/pre_merge/deploy/test_inferencer.py index 6f51f2a821..fbde1211c3 100644 --- a/tests/pre_merge/deploy/test_inferencer.py +++ b/tests/pre_merge/deploy/test_inferencer.py @@ -105,13 +105,13 @@ def test_openvino_inference(self, model_name: str, category: str = "shapes", pat export_convert( model=model, input_size=model_config.dataset.image_size, - onnx_path=export_path / "model.onnx", export_path=export_path, + export_mode="openvino", ) # Test OpenVINO inferencer openvino_inferencer = OpenVINOInferencer( - model_config, export_path / "model.xml", export_path / "meta_data.json" + model_config, export_path / "openvino/model.xml", export_path / "openvino/meta_data.json" ) openvino_dataloader = MockImageLoader(model_config.dataset.image_size, total_count=1) for image in openvino_dataloader(): diff --git a/tests/pre_merge/utils/callbacks/openvino_callback/__init__.py b/tests/pre_merge/utils/callbacks/export_callback/__init__.py similarity index 100% rename from tests/pre_merge/utils/callbacks/openvino_callback/__init__.py rename to tests/pre_merge/utils/callbacks/export_callback/__init__.py diff --git a/tests/pre_merge/utils/callbacks/openvino_callback/dummy_config.yml b/tests/pre_merge/utils/callbacks/export_callback/dummy_config.yml similarity index 92% rename from tests/pre_merge/utils/callbacks/openvino_callback/dummy_config.yml rename to tests/pre_merge/utils/callbacks/export_callback/dummy_config.yml index 91cf90ea2d..54af912c64 100644 --- a/tests/pre_merge/utils/callbacks/openvino_callback/dummy_config.yml +++ b/tests/pre_merge/utils/callbacks/export_callback/dummy_config.yml @@ -18,8 +18,7 @@ project: path: ./results optimization: - openvino: - apply: true + export_mode: "openvino" trainer: accelerator: auto # <"cpu", "gpu", "tpu", "ipu", "hpu", "auto"> diff --git a/tests/pre_merge/utils/callbacks/openvino_callback/dummy_lightning_model.py b/tests/pre_merge/utils/callbacks/export_callback/dummy_lightning_model.py similarity index 100% rename from tests/pre_merge/utils/callbacks/openvino_callback/dummy_lightning_model.py rename to tests/pre_merge/utils/callbacks/export_callback/dummy_lightning_model.py diff --git a/tests/pre_merge/utils/callbacks/export_callback/test_export.py b/tests/pre_merge/utils/callbacks/export_callback/test_export.py new file mode 100644 index 0000000000..3e8efa3e5d --- /dev/null +++ b/tests/pre_merge/utils/callbacks/export_callback/test_export.py @@ -0,0 +1,55 @@ +import os +import tempfile + +import pytest +import pytorch_lightning as pl +from pytorch_lightning.callbacks.early_stopping import EarlyStopping + +from anomalib.utils.callbacks.export import ExportCallback +from tests.helpers.config import get_test_configurable_parameters +from tests.pre_merge.utils.callbacks.export_callback.dummy_lightning_model import ( + DummyLightningModule, + FakeDataModule, +) + + +@pytest.mark.parametrize( + "export_mode", + ["openvino", "onnx"], +) +def test_export_model_callback(export_mode): + """Tests if an optimized model is created.""" + + config = get_test_configurable_parameters( + config_path="tests/pre_merge/utils/callbacks/export_callback/dummy_config.yml" + ) + + with tempfile.TemporaryDirectory() as tmp_dir: + config.project.path = tmp_dir + model = DummyLightningModule(hparams=config) + model.callbacks = [ + ExportCallback( + input_size=config.model.input_size, + dirpath=os.path.join(tmp_dir), + filename="model", + export_mode=export_mode, + ), + EarlyStopping(monitor=config.model.metric), + ] + datamodule = FakeDataModule() + trainer = pl.Trainer( + gpus=1, + callbacks=model.callbacks, + logger=False, + checkpoint_callback=False, + max_epochs=1, + val_check_interval=3, + ) + trainer.fit(model, datamodule=datamodule) + + if "openvino" in export_mode: + assert os.path.exists(os.path.join(tmp_dir, "openvino/model.bin")), "Failed to generate OpenVINO model" + elif "onnx" in export_mode: + assert os.path.exists(os.path.join(tmp_dir, "model.onnx")), "Failed to generate ONNX model" + else: + raise ValueError(f"Unknown export_mode {export_mode}. Supported modes: onnx or openvino.") diff --git a/tests/pre_merge/utils/callbacks/openvino_callback/test_openvino.py b/tests/pre_merge/utils/callbacks/openvino_callback/test_openvino.py deleted file mode 100644 index f15ccc8773..0000000000 --- a/tests/pre_merge/utils/callbacks/openvino_callback/test_openvino.py +++ /dev/null @@ -1,40 +0,0 @@ -import os -import tempfile - -import pytorch_lightning as pl -from pytorch_lightning.callbacks.early_stopping import EarlyStopping - -from anomalib.utils.callbacks.openvino import OpenVINOCallback -from tests.helpers.config import get_test_configurable_parameters -from tests.pre_merge.utils.callbacks.openvino_callback.dummy_lightning_model import ( - DummyLightningModule, - FakeDataModule, -) - - -def test_openvino_model_callback(): - """Tests if an optimized model is created.""" - - config = get_test_configurable_parameters( - config_path="tests/pre_merge/utils/callbacks/openvino_callback/dummy_config.yml" - ) - - with tempfile.TemporaryDirectory() as tmp_dir: - config.project.path = tmp_dir - model = DummyLightningModule(hparams=config) - model.callbacks = [ - OpenVINOCallback(input_size=config.model.input_size, dirpath=os.path.join(tmp_dir), filename="model"), - EarlyStopping(monitor=config.model.metric), - ] - datamodule = FakeDataModule() - trainer = pl.Trainer( - gpus=1, - callbacks=model.callbacks, - logger=False, - checkpoint_callback=False, - max_epochs=1, - val_check_interval=3, - ) - trainer.fit(model, datamodule=datamodule) - - assert os.path.exists(os.path.join(tmp_dir, "model.bin")), "Failed to generate OpenVINO model" diff --git a/tools/benchmarking/utils/convert.py b/tools/benchmarking/utils/convert.py index 032e87dc89..37040eefe7 100644 --- a/tools/benchmarking/utils/convert.py +++ b/tools/benchmarking/utils/convert.py @@ -13,5 +13,4 @@ def convert_to_openvino(model: AnomalyModule, export_path: Union[Path, str], input_size: List[int]): """Convert the trained model to OpenVINO.""" export_path = export_path if isinstance(export_path, Path) else Path(export_path) - onnx_path = export_path / "model.onnx" - export_convert(model, input_size, onnx_path, export_path) + export_convert(model, input_size, export_path=export_path, export_mode="openvino")