From 65953884ab84f4316557a0c8c4736a19e7028c04 Mon Sep 17 00:00:00 2001 From: Vardan Agarwal <35430842+vardanagarwal@users.noreply.github.com> Date: Mon, 11 Apr 2022 16:36:44 +0530 Subject: [PATCH 1/3] Add support for different normalization layers. --- train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/train.py b/train.py index e023a3418454..2a4dbd62fb14 100644 --- a/train.py +++ b/train.py @@ -154,7 +154,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio for v in model.modules(): if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias g[2].append(v.bias) - if isinstance(v, nn.BatchNorm2d): # weight (no decay) + if isinstance(v, (nn.BatchNorm2d, nn.LazyBatchNorm2d, nn.GroupNorm, nn.InstanceNorm2d, nn.LazyInstanceNorm2d, nn.LayerNorm)): # weight (no decay) g[1].append(v.weight) elif hasattr(v, 'weight') and isinstance(v.weight, nn.Parameter): # weight (with decay) g[0].append(v.weight) From 155e60b7e2fa86bdec0e18e6a9319d4f5482e3de Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 11:12:01 +0000 Subject: [PATCH 2/3] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- train.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/train.py b/train.py index 2a4dbd62fb14..4e1bf3e0436e 100644 --- a/train.py +++ b/train.py @@ -154,7 +154,8 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio for v in model.modules(): if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias g[2].append(v.bias) - if isinstance(v, (nn.BatchNorm2d, nn.LazyBatchNorm2d, nn.GroupNorm, nn.InstanceNorm2d, nn.LazyInstanceNorm2d, nn.LayerNorm)): # weight (no decay) + if isinstance(v, (nn.BatchNorm2d, nn.LazyBatchNorm2d, nn.GroupNorm, nn.InstanceNorm2d, nn.LazyInstanceNorm2d, + nn.LayerNorm)): # weight (no decay) g[1].append(v.weight) elif hasattr(v, 'weight') and isinstance(v.weight, nn.Parameter): # weight (with decay) g[0].append(v.weight) From d7b7a70bcdcd372663150f3ded4e019883c55fe7 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Mon, 11 Apr 2022 14:01:17 +0200 Subject: [PATCH 3/3] Cleanup --- train.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/train.py b/train.py index 4e1bf3e0436e..80bff18fd653 100644 --- a/train.py +++ b/train.py @@ -151,11 +151,11 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio LOGGER.info(f"Scaled weight_decay = {hyp['weight_decay']}") g = [], [], [] # optimizer parameter groups + bn = nn.BatchNorm2d, nn.LazyBatchNorm2d, nn.GroupNorm, nn.InstanceNorm2d, nn.LazyInstanceNorm2d, nn.LayerNorm for v in model.modules(): if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias g[2].append(v.bias) - if isinstance(v, (nn.BatchNorm2d, nn.LazyBatchNorm2d, nn.GroupNorm, nn.InstanceNorm2d, nn.LazyInstanceNorm2d, - nn.LayerNorm)): # weight (no decay) + if isinstance(v, bn): # weight (no decay) g[1].append(v.weight) elif hasattr(v, 'weight') and isinstance(v.weight, nn.Parameter): # weight (with decay) g[0].append(v.weight)