From 8b50ecf1acfadbf2409562cb299b70bf1dda4c73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Thu, 18 Jun 2020 23:50:23 +0200 Subject: [PATCH] support sanity_val_step=-1 --- pytorch_lightning/trainer/trainer.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 0f76c072291f4..10a6ad10d17cc 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -1059,7 +1059,7 @@ def run_pretrain_routine(self, model: LightningModule): # run tiny validation (if validation defined) # to make sure program won't crash during val - if not self.disable_validation and self.num_sanity_val_steps > 0: + if not self.disable_validation and self.num_sanity_val_steps: self.reset_val_dataloader(ref_model) # hook and callback @@ -1068,6 +1068,7 @@ def run_pretrain_routine(self, model: LightningModule): num_loaders = len(self.val_dataloaders) max_batches = [self.num_sanity_val_steps] * num_loaders + max_batches = [float('inf') for m in max_batches if m == -1] eval_results = self._evaluate(model, self.val_dataloaders, max_batches,