Skip to content

Commit

Permalink
Logger Cleanup (#9828)
Browse files Browse the repository at this point in the history
  • Loading branch information
glenn-jocher authored Oct 17, 2022
1 parent e3ff780 commit acff977
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 16 deletions.
12 changes: 0 additions & 12 deletions segment/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,17 +91,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
data_dict = None
if RANK in {-1, 0}:
logger = GenericLogger(opt=opt, console_logger=LOGGER)
# loggers = Loggers(save_dir, weights, opt, hyp, LOGGER) # loggers instance
# if loggers.clearml:
# data_dict = loggers.clearml.data_dict # None if no ClearML dataset or filled in by ClearML
# if loggers.wandb:
# data_dict = loggers.wandb.data_dict
# if resume:
# weights, epochs, hyp, batch_size = opt.weights, opt.epochs, opt.hyp, opt.batch_size
#
# # Register actions
# for k in methods(loggers):
# callbacks.register_action(k, callback=getattr(loggers, k))

# Config
plots = not evolve and not opt.noplots # create plots
Expand Down Expand Up @@ -400,7 +389,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
'ema': deepcopy(ema.ema).half(),
'updates': ema.updates,
'optimizer': optimizer.state_dict(),
# 'wandb_id': loggers.wandb.wandb_run.id if loggers.wandb else None,
'opt': vars(opt),
'date': datetime.now().isoformat()}

Expand Down
4 changes: 1 addition & 3 deletions train.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
one_cycle, print_args, print_mutation, strip_optimizer, yaml_save)
from utils.loggers import Loggers
from utils.loggers.comet.comet_utils import check_comet_resume
from utils.loggers.wandb.wandb_utils import check_wandb_resume
from utils.loss import ComputeLoss
from utils.metrics import fitness
from utils.plots import plot_evolve
Expand Down Expand Up @@ -375,7 +374,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
'ema': deepcopy(ema.ema).half(),
'updates': ema.updates,
'optimizer': optimizer.state_dict(),
'wandb_id': loggers.wandb.wandb_run.id if loggers.wandb else None,
'opt': vars(opt),
'date': datetime.now().isoformat()}

Expand Down Expand Up @@ -483,7 +481,7 @@ def main(opt, callbacks=Callbacks()):
check_requirements()

# Resume (from specified or most recent last.pt)
if opt.resume and not check_wandb_resume(opt) and not check_comet_resume(opt) and not opt.evolve:
if opt.resume and not check_comet_resume(opt) and not opt.evolve:
last = Path(check_file(opt.resume) if isinstance(opt.resume, str) else get_latest_run())
opt_yaml = last.parent.parent / 'opt.yaml' # train options yaml
opt_data = opt.data # original dataset
Expand Down
2 changes: 1 addition & 1 deletion utils/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -956,7 +956,7 @@ def strip_optimizer(f='best.pt', s=''): # from utils.general import *; strip_op
x = torch.load(f, map_location=torch.device('cpu'))
if x.get('ema'):
x['model'] = x['ema'] # replace model with ema
for k in 'optimizer', 'best_fitness', 'wandb_id', 'ema', 'updates': # keys
for k in 'optimizer', 'best_fitness', 'ema', 'updates': # keys
x[k] = None
x['epoch'] = -1
x['model'].half() # to FP16
Expand Down

0 comments on commit acff977

Please sign in to comment.