From c18a7b689e8d15f94f5ec24aeba7dc46bed188e4 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Wed, 17 Aug 2022 22:57:55 +0200 Subject: [PATCH] Add weight `decay` to argparser (#9006) Signed-off-by: Glenn Jocher Signed-off-by: Glenn Jocher --- classify/train.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/classify/train.py b/classify/train.py index b85f14236039..d55dc066d7a3 100644 --- a/classify/train.py +++ b/classify/train.py @@ -136,7 +136,7 @@ def train(opt, device): logger.log_graph(model, imgsz) # log model # Optimizer - optimizer = smart_optimizer(model, opt.optimizer, opt.lr0, momentum=0.9, decay=5e-5) + optimizer = smart_optimizer(model, opt.optimizer, opt.lr0, momentum=0.9, decay=opt.decay) # Scheduler lrf = 0.01 # final lr (fraction of lr0) @@ -280,6 +280,7 @@ def parse_opt(known=False): parser.add_argument('--pretrained', nargs='?', const=True, default=True, help='start from i.e. --pretrained False') parser.add_argument('--optimizer', choices=['SGD', 'Adam', 'AdamW', 'RMSProp'], default='Adam', help='optimizer') parser.add_argument('--lr0', type=float, default=0.001, help='initial learning rate') + parser.add_argument('--decay', type=float, default=5e-5, help='weight decay') parser.add_argument('--label-smoothing', type=float, default=0.1, help='Label smoothing epsilon') parser.add_argument('--cutoff', type=int, default=None, help='Model layer cutoff index for Classify() head') parser.add_argument('--dropout', type=float, default=None, help='Dropout (fraction)')