From 9b4e05439c89079eb7a72a77586f91dba1bafba5 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Mon, 23 May 2022 15:57:16 +0200 Subject: [PATCH] Fix AMP check tolerance (#7937) Adjust to 5%, fixes failing Colab AMP check with V100 (1.5% different) with 200% safety margin. --- utils/general.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/utils/general.py b/utils/general.py index 6227977a14c6..bb1aa667d8a0 100755 --- a/utils/general.py +++ b/utils/general.py @@ -520,10 +520,10 @@ def check_amp(model): LOGGER.warning(emojis(f'{prefix}checks skipped ⚠️, not online.')) return True m = AutoShape(model, verbose=False) # model - a = m(im).xyxy[0] # FP32 inference + a = m(im).xywhn[0] # FP32 inference m.amp = True - b = m(im).xyxy[0] # AMP inference - if (a.shape == b.shape) and torch.allclose(a, b, atol=1.0): # close to 1.0 pixel bounding box + b = m(im).xywhn[0] # AMP inference + if (a.shape == b.shape) and torch.allclose(a, b, atol=0.05): # close to 5% absolute tolerance LOGGER.info(emojis(f'{prefix}checks passed ✅')) return True else: