diff --git a/export.py b/export.py index 3447fc6ed1ab..c56a0a99a635 100644 --- a/export.py +++ b/export.py @@ -17,7 +17,7 @@ TensorFlow.js | `tfjs` | yolov5s_web_model/ Usage: - $ python path/to/export.py --weights yolov5s.pt --include torchscript onnx coreml openvino saved_model tflite tfjs + $ python path/to/export.py --weights yolov5s.pt --include torchscript onnx openvino engine coreml tflite ... Inference: $ python path/to/detect.py --weights yolov5s.pt # PyTorch @@ -179,7 +179,7 @@ def export_engine(model, im, file, train, half, simplify, workspace=4, verbose=F export_onnx(model, im, file, 12, train, False, simplify) # opset 12 model.model[-1].anchor_grid = grid else: # TensorRT >= 8 - check_version(trt.__version__, '7.0.0', hard=True) # require tensorrt>=8.0.0 + check_version(trt.__version__, '8.0.0', hard=True) # require tensorrt>=8.0.0 export_onnx(model, im, file, 13, train, False, simplify) # opset 13 onnx = file.with_suffix('.onnx') assert onnx.exists(), f'failed to export ONNX file: {onnx}' @@ -308,7 +308,7 @@ def export_tflite(keras_model, im, file, int8, data, ncalib, prefix=colorstr('Te def export_edgetpu(keras_model, im, file, prefix=colorstr('Edge TPU:')): # YOLOv5 Edge TPU export https://coral.ai/docs/edgetpu/models-intro/ try: - cmd = 'edgetpu_compiler --version' + cmd = 'edgetpu_compiler --version' # install https://coral.ai/docs/edgetpu/compiler/ out = subprocess.run(cmd, shell=True, capture_output=True, check=True) ver = out.stdout.decode().split()[-1] LOGGER.info(f'\n{prefix} starting export with Edge TPU compiler {ver}...') diff --git a/models/common.py b/models/common.py index d8d5423a16e0..b055cb68a439 100644 --- a/models/common.py +++ b/models/common.py @@ -376,8 +376,8 @@ def wrap_frozen_graph(gd, inputs, outputs): elif tflite: # https://www.tensorflow.org/lite/guide/python#install_tensorflow_lite_for_python if 'edgetpu' in w.lower(): # Edge TPU LOGGER.info(f'Loading {w} for TensorFlow Lite Edge TPU inference...') - import tflite_runtime.interpreter as tfli - delegate = {'Linux': 'libedgetpu.so.1', # install https://coral.ai/software/#edgetpu-runtime + import tflite_runtime.interpreter as tfli # install https://coral.ai/software/#edgetpu-runtime + delegate = {'Linux': 'libedgetpu.so.1', 'Darwin': 'libedgetpu.1.dylib', 'Windows': 'edgetpu.dll'}[platform.system()] interpreter = tfli.Interpreter(model_path=w, experimental_delegates=[tfli.load_delegate(delegate)])