diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 0f76c072291f4..10a6ad10d17cc 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -1059,7 +1059,7 @@ def run_pretrain_routine(self, model: LightningModule): # run tiny validation (if validation defined) # to make sure program won't crash during val - if not self.disable_validation and self.num_sanity_val_steps > 0: + if not self.disable_validation and self.num_sanity_val_steps: self.reset_val_dataloader(ref_model) # hook and callback @@ -1068,6 +1068,7 @@ def run_pretrain_routine(self, model: LightningModule): num_loaders = len(self.val_dataloaders) max_batches = [self.num_sanity_val_steps] * num_loaders + max_batches = [float('inf') for m in max_batches if m == -1] eval_results = self._evaluate(model, self.val_dataloaders, max_batches,