From dcd422797f740fb897a7483c5e9a0ea83fe66816 Mon Sep 17 00:00:00 2001 From: ManoleAlexandru99 Date: Sun, 9 Apr 2023 17:50:21 +0300 Subject: [PATCH] CudnnBatchNormBackward potential fix #0012 --- train.py | 4 ++-- utils/loss.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/train.py b/train.py index a9df3cc1f8f0..760686d9ae75 100644 --- a/train.py +++ b/train.py @@ -268,7 +268,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio f'Using {train_loader.num_workers * WORLD_SIZE} dataloader workers\n' f"Logging results to {colorstr('bold', save_dir)}\n" f'Starting training for {epochs} epochs...') - torch.autograd.set_detect_anomaly(True) + for epoch in range(start_epoch, epochs): # epoch ------------------------------------------------------------------ callbacks.run('on_train_epoch_start') model.train() @@ -532,7 +532,7 @@ def parse_opt(known=False): def main(opt, callbacks=Callbacks()): - print('\n---------- VERSION:', '#0011', '----------\n') + print('\n---------- VERSION:', '#0012', '----------\n') # Checks if RANK in {-1, 0}: print_args(vars(opt)) diff --git a/utils/loss.py b/utils/loss.py index 8020cbeefea5..0c6570d7bbfd 100644 --- a/utils/loss.py +++ b/utils/loss.py @@ -87,7 +87,7 @@ def forward(self, pred, true): else: # 'none' return loss -def weighted_bce(y_pred, y_true, BETA=20): +def weighted_bce(y_pred, y_true, BETA=2): weights = (y_true * (BETA - 1)) + 1 bce = nn.BCEWithLogitsLoss(reduction='none')(y_pred, y_true) wbce = torch.mean(bce * weights)