From a1825df3114e9e9ff9861576afe200f2644a0a5a Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Tue, 30 Aug 2022 16:18:01 +0200 Subject: [PATCH] Add ClassificationModel TF export assert (#9226) * Add ClassificationModel TF export assert Export to TF not yet supported, warning alerts users. Signed-off-by: Glenn Jocher * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Signed-off-by: Glenn Jocher Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- export.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/export.py b/export.py index 1ddc3515b2fc..74c5a35a6300 100644 --- a/export.py +++ b/export.py @@ -65,7 +65,7 @@ ROOT = Path(os.path.relpath(ROOT, Path.cwd())) # relative from models.experimental import attempt_load -from models.yolo import Detect +from models.yolo import ClassificationModel, Detect from utils.dataloaders import LoadImages from utils.general import (LOGGER, Profile, check_dataset, check_img_size, check_requirements, check_version, check_yaml, colorstr, file_size, get_default_args, print_args, url2file) @@ -643,6 +643,7 @@ def run( if int8 or edgetpu: # TFLite --int8 bug https://github.com/ultralytics/yolov5/issues/5707 check_requirements(('flatbuffers==1.12',)) # required before `import tensorflow` assert not tflite or not tfjs, 'TFLite and TF.js models must be exported separately, please pass only one type.' + assert not isinstance(model, ClassificationModel), 'ClassificationModel export to TF formats not yet supported.' f[5], model = export_saved_model(model.cpu(), im, file,