Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add --half support for OpenVINO exports #7615

Merged
merged 3 commits into from
Apr 28, 2022
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions export.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def export_onnx(model, im, file, opset, train, dynamic, simplify, prefix=colorst
LOGGER.info(f'{prefix} export failure: {e}')


def export_openvino(model, im, file, prefix=colorstr('OpenVINO:')):
def export_openvino(model, im, file, half, prefix=colorstr('OpenVINO:')):
# YOLOv5 OpenVINO export
try:
check_requirements(('openvino-dev',)) # requires openvino-dev: https://pypi.org/project/openvino-dev/
Expand All @@ -177,7 +177,7 @@ def export_openvino(model, im, file, prefix=colorstr('OpenVINO:')):
LOGGER.info(f'\n{prefix} starting export with openvino {ie.__version__}...')
f = str(file).replace('.pt', '_openvino_model' + os.sep)

cmd = f"mo --input_model {file.with_suffix('.onnx')} --output_dir {f}"
cmd = f"mo --input_model {file.with_suffix('.onnx')} --output_dir {f} {'--data_type FP16' if half else ''}"
subprocess.check_output(cmd, shell=True)

LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)')
Expand Down Expand Up @@ -477,7 +477,7 @@ def run(
# Load PyTorch model
device = select_device(device)
if half:
assert device.type != 'cpu' or coreml, '--half only compatible with GPU export, i.e. use --device 0'
assert device.type != 'cpu' or coreml or xml, '--half only compatible with GPU and OpenVINO export, i.e. use --device 0'
model = attempt_load(weights, map_location=device, inplace=True, fuse=True) # load FP32 model
nc, names = model.nc, model.names # number of classes, class names

Expand All @@ -491,7 +491,7 @@ def run(
im = torch.zeros(batch_size, 3, *imgsz).to(device) # image size(1,3,320,192) BCHW iDetection

# Update model
if half and not coreml:
if half and not (coreml or xml):
im, model = im.half(), model.half() # to FP16
model.train() if train else model.eval() # training mode = no Detect() layer grid construction
for k, m in model.named_modules():
Expand All @@ -515,7 +515,7 @@ def run(
if onnx or xml: # OpenVINO requires ONNX
f[2] = export_onnx(model, im, file, opset, train, dynamic, simplify)
if xml: # OpenVINO
f[3] = export_openvino(model, im, file)
f[3] = export_openvino(model, im, file, half)
if coreml:
_, f[4] = export_coreml(model, im, file, int8, half)

Expand Down