From 29d79a6360d8c7da8875284246847db3312e270a Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Sat, 2 Jul 2022 18:35:45 +0200 Subject: [PATCH] Do not prefer Apple MPS (#8446) Require explicit request for MPS, i.e. ```bash python detect.py --device mps ``` Reverts https://github.com/ultralytics/yolov5/pull/8210 for preferring MPS if available. Note that torch MPS is experiencing ongoing compatibility issues in https://github.com/pytorch/pytorch/issues/77886 --- utils/torch_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/torch_utils.py b/utils/torch_utils.py index b1b107ee4f1b..c21dc6658c1e 100644 --- a/utils/torch_utils.py +++ b/utils/torch_utils.py @@ -62,7 +62,7 @@ def select_device(device='', batch_size=0, newline=True): assert torch.cuda.is_available() and torch.cuda.device_count() >= len(device.replace(',', '')), \ f"Invalid CUDA '--device {device}' requested, use '--device cpu' or pass valid CUDA device(s)" - if not cpu and torch.cuda.is_available(): # prefer GPU if available + if not (cpu or mps) and torch.cuda.is_available(): # prefer GPU if available devices = device.split(',') if device else '0' # range(torch.cuda.device_count()) # i.e. 0,1,6,7 n = len(devices) # device count if n > 1 and batch_size > 0: # check batch_size is divisible by device_count @@ -72,7 +72,7 @@ def select_device(device='', batch_size=0, newline=True): p = torch.cuda.get_device_properties(i) s += f"{'' if i == 0 else space}CUDA:{d} ({p.name}, {p.total_memory / (1 << 20):.0f}MiB)\n" # bytes to MB arg = 'cuda:0' - elif not cpu and getattr(torch, 'has_mps', False) and torch.backends.mps.is_available(): # prefer MPS if available + elif mps and getattr(torch, 'has_mps', False) and torch.backends.mps.is_available(): # prefer MPS if available s += 'MPS\n' arg = 'mps' else: # revert to CPU