diff --git a/train.py b/train.py index 403428ca60f6..207fcb5b0730 100644 --- a/train.py +++ b/train.py @@ -33,7 +33,7 @@ from utils.loss import ComputeLoss from utils.plots import plot_images, plot_labels, plot_results, plot_evolution from utils.torch_utils import ModelEMA, select_device, intersect_dicts, torch_distributed_zero_first, is_parallel -from utils.wandb_logging.wandb_utils import WandbLogger, resume_and_get_id +from utils.wandb_logging.wandb_utils import WandbLogger, check_wandb_resume @@ -564,7 +564,7 @@ def train_ray_tune(config): check_requirements() # Resume - wandb_run = resume_and_get_id(opt) + wandb_run = check_wandb_resume(opt) if opt.resume and not wandb_run: # resume an interrupted run ckpt = opt.resume if isinstance(opt.resume, str) else get_latest_run() # specified or most recent path assert os.path.isfile(ckpt), 'ERROR: --resume checkpoint does not exist'