From b88333432210546585d0b972bed4ec1f0451b0f8 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Fri, 4 Jun 2021 12:37:41 +0200 Subject: [PATCH] Suppress jit trace warning + graph once (#3454) * Suppress jit trace warning + graph once Suppress harmless jit trace warning on TensorBoard add_graph call. Also fix multiple add_graph() calls bug, now only on batch 0. * Update train.py --- train.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/train.py b/train.py index 1041ec30c257..093a6197ff06 100644 --- a/train.py +++ b/train.py @@ -4,6 +4,7 @@ import os import random import time +import warnings from copy import deepcopy from pathlib import Path from threading import Thread @@ -323,18 +324,19 @@ def train(hyp, opt, device, tb_writer=None): mloss = (mloss * i + loss_items) / (i + 1) # update mean losses mem = '%.3gG' % (torch.cuda.memory_reserved() / 1E9 if torch.cuda.is_available() else 0) # (GB) s = ('%10s' * 2 + '%10.4g' * 6) % ( - '%g/%g' % (epoch, epochs - 1), mem, *mloss, targets.shape[0], imgs.shape[-1]) + f'{epoch}/{epochs - 1}', mem, *mloss, targets.shape[0], imgs.shape[-1]) pbar.set_description(s) # Plot if plots and ni < 3: f = save_dir / f'train_batch{ni}.jpg' # filename Thread(target=plot_images, args=(imgs, targets, paths, f), daemon=True).start() - if tb_writer: - tb_writer.add_graph(torch.jit.trace(de_parallel(model), imgs, strict=False), []) # model graph - # tb_writer.add_image(f, result, dataformats='HWC', global_step=epoch) + if tb_writer and ni == 0: + with warnings.catch_warnings(): + warnings.simplefilter('ignore') # suppress jit trace warning + tb_writer.add_graph(torch.jit.trace(de_parallel(model), imgs, strict=False), []) # graph elif plots and ni == 10 and wandb_logger.wandb: - wandb_logger.log({"Mosaics": [wandb_logger.wandb.Image(str(x), caption=x.name) for x in + wandb_logger.log({'Mosaics': [wandb_logger.wandb.Image(str(x), caption=x.name) for x in save_dir.glob('train*.jpg') if x.exists()]}) # end batch ------------------------------------------------------------------------------------------------