From 77de6150d50649b6adec34086ada2cb683113d22 Mon Sep 17 00:00:00 2001 From: Nam Vu Date: Thu, 4 Nov 2021 05:33:25 -0500 Subject: [PATCH] Handle edgetpu model inference (#5372) * Handle edgetpu model inference * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Cleanup Rename `tflite_runtime.interpreter as tflite` to `tflite_runtime.interpreter as tflri` to avoid conflict with existing `tflite` boolean Co-authored-by: Nam Vu Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Glenn Jocher --- detect.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/detect.py b/detect.py index a8d7e0b6a8c9..eb29022fe80b 100644 --- a/detect.py +++ b/detect.py @@ -8,6 +8,7 @@ import argparse import os +import platform import sys from pathlib import Path @@ -107,7 +108,14 @@ def wrap_frozen_graph(gd, inputs, outputs): elif saved_model: model = tf.keras.models.load_model(w) elif tflite: - interpreter = tf.lite.Interpreter(model_path=w) # load TFLite model + if "edgetpu" in w: # https://www.tensorflow.org/lite/guide/python#install_tensorflow_lite_for_python + import tflite_runtime.interpreter as tflri + delegate = {'Linux': 'libedgetpu.so.1', # install libedgetpu https://coral.ai/software/#edgetpu-runtime + 'Darwin': 'libedgetpu.1.dylib', + 'Windows': 'edgetpu.dll'}[platform.system()] + interpreter = tflri.Interpreter(model_path=w, experimental_delegates=[tflri.load_delegate(delegate)]) + else: + interpreter = tf.lite.Interpreter(model_path=w) # load TFLite model interpreter.allocate_tensors() # allocate input_details = interpreter.get_input_details() # inputs output_details = interpreter.get_output_details() # outputs