diff --git a/train.py b/train.py index a9df3cc1f8f0..760686d9ae75 100644 --- a/train.py +++ b/train.py @@ -268,7 +268,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio f'Using {train_loader.num_workers * WORLD_SIZE} dataloader workers\n' f"Logging results to {colorstr('bold', save_dir)}\n" f'Starting training for {epochs} epochs...') - torch.autograd.set_detect_anomaly(True) + for epoch in range(start_epoch, epochs): # epoch ------------------------------------------------------------------ callbacks.run('on_train_epoch_start') model.train() @@ -532,7 +532,7 @@ def parse_opt(known=False): def main(opt, callbacks=Callbacks()): - print('\n---------- VERSION:', '#0011', '----------\n') + print('\n---------- VERSION:', '#0012', '----------\n') # Checks if RANK in {-1, 0}: print_args(vars(opt)) diff --git a/utils/loss.py b/utils/loss.py index 8020cbeefea5..0c6570d7bbfd 100644 --- a/utils/loss.py +++ b/utils/loss.py @@ -87,7 +87,7 @@ def forward(self, pred, true): else: # 'none' return loss -def weighted_bce(y_pred, y_true, BETA=20): +def weighted_bce(y_pred, y_true, BETA=2): weights = (y_true * (BETA - 1)) + 1 bce = nn.BCEWithLogitsLoss(reduction='none')(y_pred, y_true) wbce = torch.mean(bce * weights)