Skip to content

Commit

Permalink
enabled early stopping/checkpooiunt even without val step
Browse files Browse the repository at this point in the history
  • Loading branch information
williamFalcon authored and Borda committed Mar 4, 2020
1 parent b91fc01 commit 76c7872
Showing 1 changed file with 11 additions and 11 deletions.
22 changes: 11 additions & 11 deletions pytorch_lightning/trainer/training_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,17 +456,6 @@ def run_training_epoch(self):
if self.fast_dev_run or should_check_val:
self.run_evaluation(test_mode=self.testing)

# ---------------
# CHECKPOINTING, EARLY STOPPING
# ---------------
# save checkpoint even when no test or val step are defined
train_step_only = not self.is_overriden('validation_step')
if self.fast_dev_run or should_check_val or train_step_only:
self.call_checkpoint_callback()

if self.enable_early_stop:
self.early_stop_callback.check_metrics(self.callback_metrics)

# when logs should be saved
should_save_log = (batch_idx + 1) % self.log_save_interval == 0 or early_stop_epoch
if should_save_log or self.fast_dev_run:
Expand All @@ -484,6 +473,17 @@ def run_training_epoch(self):
self.global_step += 1
self.total_batch_idx += 1

# ---------------
# CHECKPOINTING, EARLY STOPPING
# ---------------
# save checkpoint even when no test or val step are defined
train_step_only = not self.is_overriden('validation_step')
if self.fast_dev_run or should_check_val or train_step_only:
self.call_checkpoint_callback()

if self.enable_early_stop:
self.early_stop_callback.check_metrics(self.callback_metrics)

# max steps reached, end training
if self.max_steps is not None and self.max_steps == self.global_step:
break
Expand Down

0 comments on commit 76c7872

Please sign in to comment.