From 84efa62b2d0a619309a7437aa82cebdfc4de1bed Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Fri, 11 Mar 2022 16:18:40 +0100 Subject: [PATCH] Fix PyTorch Hub export inference shapes (#6949) May resolve https://github.com/ultralytics/yolov5/issues/6947 --- models/common.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/models/common.py b/models/common.py index 70ee7105abfc..ac3af20d533e 100644 --- a/models/common.py +++ b/models/common.py @@ -544,10 +544,9 @@ def forward(self, imgs, size=640, augment=False, profile=False): g = (size / max(s)) # gain shape1.append([y * g for y in s]) imgs[i] = im if im.data.contiguous else np.ascontiguousarray(im) # update - shape1 = [make_divisible(x, self.stride) for x in np.stack(shape1, 0).max(0)] # inference shape - x = [letterbox(im, new_shape=shape1 if self.pt else size, auto=False)[0] for im in imgs] # pad - x = np.stack(x, 0) if n > 1 else x[0][None] # stack - x = np.ascontiguousarray(x.transpose((0, 3, 1, 2))) # BHWC to BCHW + shape1 = [make_divisible(x, self.stride) if self.pt else size for x in np.array(shape1).max(0)] # inf shape + x = [letterbox(im, new_shape=shape1, auto=False)[0] for im in imgs] # pad + x = np.ascontiguousarray(np.array(x).transpose((0, 3, 1, 2))) # stack and BHWC to BCHW x = torch.from_numpy(x).to(p.device).type_as(p) / 255 # uint8 to fp16/32 t.append(time_sync())