Skip to content

Commit

Permalink
Create meta_data.json with ONNX export as well as OpenVINO export (#636)
Browse files Browse the repository at this point in the history
* When exporting, create metadata.json for both ONNX and OpenVINO export (used in EII deployment)

This change was developed and tested with @pmudgal-Intel

* Updated export to include metadata.json in both ONNX and OpenVINO export modes.

Changes developed and tested by @pmudgal-Intel @calebmm

* code formatting

* code formatting

* Modified export_mode while exporting meta_data.json

Co-authored-by: pmudgal-Intel <priyanka.mudgal@intel.com>
  • Loading branch information
calebmm and pmudgal-Intel committed Oct 19, 2022
1 parent dacf3f4 commit 84a8e06
Showing 1 changed file with 9 additions and 9 deletions.
18 changes: 9 additions & 9 deletions anomalib/deploy/optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def export_convert(
export_mode: str,
export_path: Optional[Union[str, Path]] = None,
):
"""Export the model to onnx format and convert to OpenVINO IR.
"""Export the model to onnx format and convert to OpenVINO IR. Metadata.json is generated regardless of export mode.
Args:
model (AnomalyModule): Model to convert.
Expand All @@ -65,14 +65,14 @@ def export_convert(
input_names=["input"],
output_names=["output"],
)
export_path = os.path.join(str(export_path), export_mode)
if export_mode == "openvino":
export_path = os.path.join(str(export_path), "openvino")
optimize_command = "mo --input_model " + str(onnx_path) + " --output_dir " + str(export_path)
assert os.system(optimize_command) == 0, "OpenVINO conversion failed"
with open(Path(export_path) / "meta_data.json", "w", encoding="utf-8") as metadata_file:
meta_data = get_model_metadata(model)
# Convert metadata from torch
for key, value in meta_data.items():
if isinstance(value, Tensor):
meta_data[key] = value.numpy().tolist()
json.dump(meta_data, metadata_file, ensure_ascii=False, indent=4)
with open(Path(export_path) / "meta_data.json", "w", encoding="utf-8") as metadata_file:
meta_data = get_model_metadata(model)
# Convert metadata from torch
for key, value in meta_data.items():
if isinstance(value, Tensor):
meta_data[key] = value.numpy().tolist()
json.dump(meta_data, metadata_file, ensure_ascii=False, indent=4)

0 comments on commit 84a8e06

Please sign in to comment.