From 71e6d81c890b82e8f2fec661fafbcfdd60668b5e Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Tue, 12 Apr 2022 10:58:40 +0200 Subject: [PATCH 1/2] Dynamic normalization layer selection Based on actual available layers. Torch 1.7 compatible, resolves https://github.com/ultralytics/yolov5/issues/7381 --- train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/train.py b/train.py index 80bff18fd653..b97431b56cdd 100644 --- a/train.py +++ b/train.py @@ -151,7 +151,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio LOGGER.info(f"Scaled weight_decay = {hyp['weight_decay']}") g = [], [], [] # optimizer parameter groups - bn = nn.BatchNorm2d, nn.LazyBatchNorm2d, nn.GroupNorm, nn.InstanceNorm2d, nn.LazyInstanceNorm2d, nn.LayerNorm + bn = tuple(v for (k, v) in nn.__dict__.items() if 'Norm' in k) # normalization layers, i.e. BatchNorm2d() for v in model.modules(): if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias g[2].append(v.bias) From 2c4afbbbc9ccb5e3a27e38e314985df1b27f4c50 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Tue, 12 Apr 2022 10:59:31 +0200 Subject: [PATCH 2/2] Update train.py --- train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/train.py b/train.py index b97431b56cdd..806e2cebe561 100644 --- a/train.py +++ b/train.py @@ -151,7 +151,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio LOGGER.info(f"Scaled weight_decay = {hyp['weight_decay']}") g = [], [], [] # optimizer parameter groups - bn = tuple(v for (k, v) in nn.__dict__.items() if 'Norm' in k) # normalization layers, i.e. BatchNorm2d() + bn = tuple(v for k, v in nn.__dict__.items() if 'Norm' in k) # normalization layers, i.e. BatchNorm2d() for v in model.modules(): if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias g[2].append(v.bias)