diff --git a/train.py b/train.py index f403a3c65b05..83ae7fd92ace 100644 --- a/train.py +++ b/train.py @@ -479,7 +479,7 @@ def train(hyp, opt, device, tb_writer=None, wandb=None): assert os.path.isfile(ckpt), 'ERROR: --resume checkpoint does not exist' apriori = opt.global_rank, opt.local_rank with open(Path(ckpt).parent.parent / 'opt.yaml') as f: - opt = argparse.Namespace(**yaml.load(f, Loader=yaml.FullLoader)) # replace + opt = argparse.Namespace(**yaml.load(f, Loader=yaml.SafeLoader)) # replace opt.cfg, opt.weights, opt.resume, opt.batch_size, opt.global_rank, opt.local_rank = '', ckpt, True, opt.total_batch_size, *apriori # reinstate logger.info('Resuming training from %s' % ckpt) else: @@ -503,7 +503,7 @@ def train(hyp, opt, device, tb_writer=None, wandb=None): # Hyperparameters with open(opt.hyp) as f: - hyp = yaml.load(f, Loader=yaml.FullLoader) # load hyps + hyp = yaml.load(f, Loader=yaml.SafeLoader) # load hyps # Train logger.info(opt)