From a88a81469a54838abfbba0885e1c47c9e87ce3e2 Mon Sep 17 00:00:00 2001 From: Nick Martin Date: Wed, 6 Apr 2022 09:35:33 -0700 Subject: [PATCH] Copy wandb param dict before training to avoid overwrites (#7317) * Copy wandb param dict before training to avoid overwrites. Copy the hyperparameter dict retrieved from wandb configuration before passing it to `train()`. Training overwrites parameters in the dictionary (eg scaling obj/box/cls gains), which causes the values reported in wandb to not match the input values. This is confusing as it makes it hard to reproduce a run, and also throws off wandb's Bayesian sweep algorithm. * Cleanup Co-authored-by: Glenn Jocher --- utils/loggers/wandb/sweep.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/loggers/wandb/sweep.py b/utils/loggers/wandb/sweep.py index 206059bc30bf..d49ea6f2778b 100644 --- a/utils/loggers/wandb/sweep.py +++ b/utils/loggers/wandb/sweep.py @@ -16,8 +16,8 @@ def sweep(): wandb.init() - # Get hyp dict from sweep agent - hyp_dict = vars(wandb.config).get("_items") + # Get hyp dict from sweep agent. Copy because train() modifies parameters which confused wandb. + hyp_dict = vars(wandb.config).get("_items").copy() # Workaround: get necessary opt args opt = parse_opt(known=True)