Skip to content

Commit

Permalink
Remove .autoshape() method (ultralytics#5694)
Browse files Browse the repository at this point in the history
  • Loading branch information
glenn-jocher committed Nov 20, 2021
1 parent 4d615c3 commit c9ac992
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 14 deletions.
3 changes: 2 additions & 1 deletion hubconf.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ def _create(name, pretrained=True, channels=3, classes=80, autoshape=True, verbo
"""
from pathlib import Path

from models.common import AutoShape
from models.experimental import attempt_load
from models.yolo import Model
from utils.downloads import attempt_download
Expand Down Expand Up @@ -55,7 +56,7 @@ def _create(name, pretrained=True, channels=3, classes=80, autoshape=True, verbo
if len(ckpt['model'].names) == classes:
model.names = ckpt['model'].names # set class names attribute
if autoshape:
model = model.autoshape() # for file/URI/PIL/cv2/np inputs and NMS
model = AutoShape(model) # for file/URI/PIL/cv2/np inputs and NMS
return model.to(device)

except Exception as e:
Expand Down
8 changes: 3 additions & 5 deletions models/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from utils.general import (LOGGER, check_requirements, check_suffix, colorstr, increment_path, make_divisible,
non_max_suppression, scale_coords, xywh2xyxy, xyxy2xywh)
from utils.plots import Annotator, colors, save_one_box
from utils.torch_utils import time_sync
from utils.torch_utils import copy_attr, time_sync


def autopad(k, p=None): # kernel, padding
Expand Down Expand Up @@ -405,12 +405,10 @@ class AutoShape(nn.Module):

def __init__(self, model):
super().__init__()
LOGGER.info('Adding AutoShape... ')
copy_attr(self, model, include=('yaml', 'nc', 'hyp', 'names', 'stride', 'abc'), exclude=()) # copy attributes
self.model = model.eval()

def autoshape(self):
LOGGER.info('AutoShape already enabled, skipping... ') # model already converted to model.autoshape()
return self

def _apply(self, fn):
# Apply to(), cpu(), cuda(), half() to model tensors that are not parameters or registered buffers
self = super()._apply(fn)
Expand Down
9 changes: 1 addition & 8 deletions models/yolo.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@
from utils.autoanchor import check_anchor_order
from utils.general import LOGGER, check_version, check_yaml, make_divisible, print_args
from utils.plots import feature_visualization
from utils.torch_utils import (copy_attr, fuse_conv_and_bn, initialize_weights, model_info, scale_img, select_device,
time_sync)
from utils.torch_utils import fuse_conv_and_bn, initialize_weights, model_info, scale_img, select_device, time_sync

try:
import thop # for FLOPs computation
Expand Down Expand Up @@ -226,12 +225,6 @@ def fuse(self): # fuse model Conv2d() + BatchNorm2d() layers
self.info()
return self

def autoshape(self): # add AutoShape module
LOGGER.info('Adding AutoShape... ')
m = AutoShape(self) # wrap model
copy_attr(m, self, include=('yaml', 'nc', 'hyp', 'names', 'stride'), exclude=()) # copy attributes
return m

def info(self, verbose=False, img_size=640): # print model information
model_info(self, verbose, img_size)

Expand Down

0 comments on commit c9ac992

Please sign in to comment.