Skip to content

Commit

Permalink
Remove NaNs from loss in LRFinder (#1862)
Browse files Browse the repository at this point in the history
* Remove NaNs from loss in LRFinder

* np.isfinite

* chlog

* add test

* chlog

Co-authored-by: Jirka <jirka@pytorchlightning.ai>
  • Loading branch information
rohitgr7 and Borda committed May 19, 2020
1 parent a153fe4 commit ac76dfc
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 4 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

- Allow user to select individual TPU core to train on ([#1729](https://github.com/PyTorchLightning/pytorch-lightning/pull/1729))

- Removed non-finite values from loss in `LRFinder` ([#1862](https://github.com/PyTorchLightning/pytorch-lightning/pull/1862))

### Deprecated

### Removed
Expand Down
5 changes: 3 additions & 2 deletions pytorch_lightning/trainer/lr_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,8 +321,9 @@ def suggestion(self, skip_begin: int = 10, skip_end: int = 1):
"""
try:
loss = self.results["loss"][skip_begin:-skip_end]
min_grad = (np.gradient(np.array(loss))).argmin()
loss = np.array(self.results["loss"][skip_begin:-skip_end])
loss = loss[np.isfinite(loss)]
min_grad = np.gradient(loss).argmin()
self._optimal_idx = min_grad + skip_begin
return self.results["lr"][self._optimal_idx]
except Exception:
Expand Down
25 changes: 23 additions & 2 deletions tests/trainer/test_lr_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def test_call_to_trainer_method(tmpdir):
# logger file to get meta
trainer = Trainer(
default_save_path=tmpdir,
max_epochs=5,
max_epochs=5
)

lrfinder = trainer.lr_find(model, mode='linear')
Expand Down Expand Up @@ -170,7 +170,7 @@ def test_suggestion_parameters_work(tmpdir):
# logger file to get meta
trainer = Trainer(
default_save_path=tmpdir,
max_epochs=10,
max_epochs=10
)

lrfinder = trainer.lr_find(model)
Expand All @@ -179,3 +179,24 @@ def test_suggestion_parameters_work(tmpdir):

assert lr1 != lr2, \
'Skipping parameter did not influence learning rate'


def test_suggestion_with_non_finite_values(tmpdir):
""" Test that non-finite values does not alter results """

hparams = EvalModelTemplate.get_default_hparams()
model = EvalModelTemplate(hparams)

# logger file to get meta
trainer = Trainer(
default_save_path=tmpdir,
max_epochs=10
)

lrfinder = trainer.lr_find(model)
before_lr = lrfinder.suggestion()
lrfinder.results['loss'][-1] = float('nan')
after_lr = lrfinder.suggestion()

assert before_lr == after_lr, \
'Learning rate was altered because of non-finite loss values'

0 comments on commit ac76dfc

Please sign in to comment.