From e42c89d4efc99bfbd8c5c208ffe67c11632da84a Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Sun, 16 Oct 2022 20:51:32 +0200 Subject: [PATCH] `smart_optimizer()` revert to weight with decay (#9817) If a parameter does not fall into any other category Signed-off-by: Glenn Jocher Signed-off-by: Glenn Jocher --- utils/torch_utils.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/utils/torch_utils.py b/utils/torch_utils.py index 9f257d06ac60..04a3873854ee 100644 --- a/utils/torch_utils.py +++ b/utils/torch_utils.py @@ -319,12 +319,13 @@ def smart_optimizer(model, name='Adam', lr=0.001, momentum=0.9, decay=1e-5): g = [], [], [] # optimizer parameter groups bn = tuple(v for k, v in nn.__dict__.items() if 'Norm' in k) # normalization layers, i.e. BatchNorm2d() for v in model.modules(): - if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias (no decay) - g[2].append(v.bias) - if isinstance(v, bn): # weight (no decay) - g[1].append(v.weight) - elif hasattr(v, 'weight') and isinstance(v.weight, nn.Parameter): # weight (with decay) - g[0].append(v.weight) + for p_name, p in v.named_parameters(recurse=0): + if p_name == 'bias': # bias (no decay) + g[2].append(p) + elif p_name == 'weight' and isinstance(v, bn): # weight (no decay) + g[1].append(p) + else: + g[0].append(p) # weight (with decay) if name == 'Adam': optimizer = torch.optim.Adam(g[2], lr=lr, betas=(momentum, 0.999)) # adjust beta1 to momentum