From 99d324bc0eb38e85e3aede4eebd79ea2e0a97966 Mon Sep 17 00:00:00 2001 From: Abhiram V <61599526+Anon-Artist@users.noreply.github.com> Date: Tue, 19 Jan 2021 00:19:08 +0530 Subject: [PATCH] Update train.py with yaml.SafeLoader (#1972) --- train.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/train.py b/train.py index f403a3c65b05..83ae7fd92ace 100644 --- a/train.py +++ b/train.py @@ -479,7 +479,7 @@ def train(hyp, opt, device, tb_writer=None, wandb=None): assert os.path.isfile(ckpt), 'ERROR: --resume checkpoint does not exist' apriori = opt.global_rank, opt.local_rank with open(Path(ckpt).parent.parent / 'opt.yaml') as f: - opt = argparse.Namespace(**yaml.load(f, Loader=yaml.FullLoader)) # replace + opt = argparse.Namespace(**yaml.load(f, Loader=yaml.SafeLoader)) # replace opt.cfg, opt.weights, opt.resume, opt.batch_size, opt.global_rank, opt.local_rank = '', ckpt, True, opt.total_batch_size, *apriori # reinstate logger.info('Resuming training from %s' % ckpt) else: @@ -503,7 +503,7 @@ def train(hyp, opt, device, tb_writer=None, wandb=None): # Hyperparameters with open(opt.hyp) as f: - hyp = yaml.load(f, Loader=yaml.FullLoader) # load hyps + hyp = yaml.load(f, Loader=yaml.SafeLoader) # load hyps # Train logger.info(opt)