Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Add structured result output #1989

Closed
wants to merge 177 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
177 commits
Select commit Hold shift + click to select a range
5193004
added result
williamFalcon May 28, 2020
c84d9db
added train step return
williamFalcon May 28, 2020
76892ff
added train step return
williamFalcon May 28, 2020
7d90199
added train step return
williamFalcon May 28, 2020
8ca8874
added train step return
williamFalcon May 28, 2020
7f4e308
added train step return
williamFalcon May 28, 2020
2e8d9c4
added train step return
williamFalcon May 28, 2020
a1809aa
added train step return
williamFalcon May 28, 2020
b4277ad
added basic template with new syntax and result object
williamFalcon May 28, 2020
5bb86f9
added basic template with new syntax and result object
williamFalcon May 28, 2020
9008386
added basic template with new syntax and result object
williamFalcon May 28, 2020
2be8ad9
added basic template with new syntax and result object
williamFalcon May 28, 2020
bb180a3
added basic template with new syntax and result object
williamFalcon May 28, 2020
c943787
added basic template with new syntax and result object
williamFalcon May 28, 2020
725477b
added basic template with new syntax and result object
williamFalcon May 28, 2020
a59fe36
added basic template with new syntax and result object
williamFalcon May 28, 2020
f8eaceb
added basic template with new syntax and result object
williamFalcon May 28, 2020
20c1814
added basic template with new syntax and result object
williamFalcon May 28, 2020
eacee5d
added basic template with new syntax and result object
williamFalcon May 28, 2020
c8b5d21
added basic template with new syntax and result object
williamFalcon May 28, 2020
b27150e
added basic template with new syntax and result object
williamFalcon May 28, 2020
10b8860
added basic template with new syntax and result object
williamFalcon May 28, 2020
91d3d15
added basic template with new syntax and result object
williamFalcon May 28, 2020
186881f
added basic template with new syntax and result object
williamFalcon May 28, 2020
df5f2c0
added basic template with new syntax and result object
williamFalcon May 28, 2020
bd97c9d
added basic template with new syntax and result object
williamFalcon May 28, 2020
c7b3abc
added basic template with new syntax and result object
williamFalcon May 28, 2020
a40201c
added basic template with new syntax and result object
williamFalcon May 28, 2020
ff0f582
added basic template with new syntax and result object
williamFalcon May 28, 2020
a90439f
added basic template with new syntax and result object
williamFalcon May 28, 2020
f4e94e5
added basic template with new syntax and result object
williamFalcon May 28, 2020
70fc2f5
added basic template with new syntax and result object
williamFalcon May 28, 2020
60f572b
added basic template with new syntax and result object
williamFalcon May 28, 2020
6e66866
added basic template with new syntax and result object
williamFalcon May 28, 2020
3c66051
removed args thing
williamFalcon May 28, 2020
500be23
removed args thing
williamFalcon May 28, 2020
665000f
removed args thing
williamFalcon May 28, 2020
cf1de29
removed args thing
williamFalcon May 28, 2020
c5ab427
removed args thing
williamFalcon May 28, 2020
8efd923
removed args thing
williamFalcon May 28, 2020
3c0cef1
process results
williamFalcon May 28, 2020
458afb4
docs
williamFalcon May 28, 2020
ebbe040
docs
williamFalcon May 28, 2020
9b56d44
docs
williamFalcon May 28, 2020
3559d58
docs
williamFalcon May 28, 2020
30eaa2c
docs
williamFalcon May 29, 2020
a362b3b
docs
williamFalcon May 29, 2020
476837c
docs
williamFalcon May 29, 2020
9fde8ef
docs
williamFalcon May 29, 2020
5248b5f
docs
williamFalcon May 29, 2020
68a0e0e
docs
williamFalcon May 29, 2020
b3cc286
docs
williamFalcon May 29, 2020
928b842
docs
williamFalcon May 29, 2020
b9ba7df
docs
williamFalcon May 29, 2020
4833e9a
docs
williamFalcon May 29, 2020
f8d0b91
docs
williamFalcon May 29, 2020
06980a2
docs
williamFalcon May 29, 2020
399b202
docs
williamFalcon May 29, 2020
b3a91c8
docs
williamFalcon May 29, 2020
fbe35de
docs
williamFalcon May 29, 2020
9e3af3a
docs
williamFalcon May 29, 2020
18d5c23
docs
williamFalcon May 29, 2020
fd20e1a
docs
williamFalcon May 29, 2020
858b5ac
docs
williamFalcon May 29, 2020
bc7b165
docs
williamFalcon May 29, 2020
ec633ac
docs
williamFalcon May 29, 2020
00954c3
docs
williamFalcon May 29, 2020
8293740
docs
williamFalcon May 29, 2020
11ac6fd
docs
williamFalcon May 29, 2020
6baa0a5
docs
williamFalcon May 29, 2020
6bfb2b4
docs
williamFalcon May 29, 2020
7e434db
docs
williamFalcon May 29, 2020
a348e5a
docs
williamFalcon May 29, 2020
5e99904
docs
williamFalcon May 29, 2020
cccee53
docs
williamFalcon May 29, 2020
8e6c879
docs
williamFalcon May 30, 2020
ca5254a
docs
williamFalcon May 30, 2020
60e265c
docs
williamFalcon May 30, 2020
e31e088
docs
williamFalcon May 30, 2020
99a3457
docs
williamFalcon May 30, 2020
16d82a0
docs
williamFalcon May 30, 2020
772b40f
docs
williamFalcon May 30, 2020
91fc9d4
docs
williamFalcon May 30, 2020
5acd39e
docs
williamFalcon May 30, 2020
4a1acd1
docs
williamFalcon May 30, 2020
eb6a36b
docs
williamFalcon May 30, 2020
8500551
docs
williamFalcon May 30, 2020
bcad312
docs
williamFalcon May 30, 2020
5cb9026
docs
williamFalcon May 30, 2020
b07bb94
docs
williamFalcon May 30, 2020
7028cc6
docs
williamFalcon May 30, 2020
f3913ec
docs
williamFalcon May 30, 2020
3d38cec
docs
williamFalcon May 30, 2020
da5fdaa
docs
williamFalcon May 30, 2020
b8a575b
docs
williamFalcon May 30, 2020
7179a0a
docs
williamFalcon May 30, 2020
2290201
docs
williamFalcon May 30, 2020
4c74043
docs
williamFalcon May 30, 2020
befe165
docs
williamFalcon May 30, 2020
2bb95df
docs
williamFalcon May 30, 2020
fd61a0a
docs
williamFalcon May 30, 2020
90c9ee7
docs
williamFalcon May 30, 2020
87500fe
docs
williamFalcon May 30, 2020
828c519
docs
williamFalcon May 30, 2020
0c68825
docs
williamFalcon May 30, 2020
61e82ef
docs
williamFalcon May 30, 2020
ce666a8
docs
williamFalcon May 30, 2020
48404da
docs
williamFalcon May 30, 2020
c743ff4
docs
williamFalcon May 30, 2020
26d6694
docs
williamFalcon May 30, 2020
0dfd265
docs
williamFalcon May 30, 2020
db47ada
docs
williamFalcon May 30, 2020
ff37d97
docs
williamFalcon May 30, 2020
8826b03
docs
williamFalcon May 30, 2020
4f32e04
docs
williamFalcon May 30, 2020
7a948c4
docs
williamFalcon May 30, 2020
61b6bf0
docs
williamFalcon May 30, 2020
89c0bfb
docs
williamFalcon May 30, 2020
1847612
docs
williamFalcon May 30, 2020
f644a2c
docs
williamFalcon May 30, 2020
feabb64
docs
williamFalcon May 30, 2020
5379bdb
docs
williamFalcon May 30, 2020
b18dc0b
docs
williamFalcon May 30, 2020
569236d
docs
williamFalcon May 30, 2020
4ae3943
docs
williamFalcon May 30, 2020
76494d7
docs
williamFalcon May 30, 2020
e0fcc1d
docs
williamFalcon May 30, 2020
34c282e
docs
williamFalcon May 30, 2020
6d00501
docs
williamFalcon May 30, 2020
6b96497
docs
williamFalcon May 30, 2020
1da5abd
docs
williamFalcon May 30, 2020
8690f34
docs
williamFalcon May 30, 2020
a9f2967
tests
williamFalcon May 30, 2020
02dbd20
tests
williamFalcon May 30, 2020
b0c65e1
tests
williamFalcon May 30, 2020
6452e49
tests
williamFalcon May 30, 2020
1ee2c1f
tests
williamFalcon May 30, 2020
4038313
tests
williamFalcon May 30, 2020
4404faa
tests
williamFalcon May 31, 2020
ddeb0b2
tests
williamFalcon May 31, 2020
b7ed457
tests
williamFalcon May 31, 2020
77cafa4
tests
williamFalcon May 31, 2020
39c742b
tests
williamFalcon May 31, 2020
f177dcd
added hparams test
williamFalcon Jun 6, 2020
265ee64
added hparams test
williamFalcon Jun 6, 2020
507eaeb
added hparams test
williamFalcon Jun 6, 2020
b2e4664
docs
williamFalcon Jun 7, 2020
b1a53e5
docs
williamFalcon Jun 7, 2020
a344f98
docs
williamFalcon Jun 7, 2020
d4f0e2f
docs
williamFalcon Jun 7, 2020
7221f23
docs
williamFalcon Jun 7, 2020
e6da6b0
docs
williamFalcon Jun 7, 2020
9f94f4f
docs
williamFalcon Jun 7, 2020
f17d40c
docs
williamFalcon Jun 7, 2020
09f38ad
eval step
williamFalcon Jun 7, 2020
8558165
eval step
williamFalcon Jun 7, 2020
12877bc
eval step
williamFalcon Jun 7, 2020
d4c56f7
eval step
williamFalcon Jun 7, 2020
6a52e8d
eval step
williamFalcon Jun 7, 2020
681041e
eval step
williamFalcon Jun 7, 2020
34ace25
eval step
williamFalcon Jun 7, 2020
5855753
eval step
williamFalcon Jun 7, 2020
136cfa0
eval step
williamFalcon Jun 7, 2020
b98b29b
eval step
williamFalcon Jun 7, 2020
057d256
eval step
williamFalcon Jun 7, 2020
d639742
eval step
williamFalcon Jun 7, 2020
7065e8a
eval step
williamFalcon Jun 7, 2020
869b6e8
eval step
williamFalcon Jun 7, 2020
f327c7a
training batch clean up
williamFalcon Jun 7, 2020
fe02cbe
training batch clean up
williamFalcon Jun 7, 2020
0fc6e9c
training batch clean up
williamFalcon Jun 8, 2020
3df9b53
training batch clean up
williamFalcon Jun 8, 2020
332c720
training batch clean up
williamFalcon Jun 8, 2020
ef3d835
training batch clean up
williamFalcon Jun 8, 2020
491b3f5
training batch clean up
williamFalcon Jun 8, 2020
098a731
training batch clean up
williamFalcon Jun 8, 2020
5b0a805
training batch clean up
williamFalcon Jun 8, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
147 changes: 147 additions & 0 deletions pl_examples/models/simple_template.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
"""
Example template for defining a system.
"""
import os

import torch
from torch import Tensor
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
import torchvision.transforms as transforms
from torchvision.datasets import MNIST
from torch.utils.data import DataLoader
import pytorch_lightning as pl


class SuperLitModel(pl.LightningModule):
"""
Sample model to show how to define a template.

Example:
"""

def __init__(self,
drop_prob: float = 0.2,
batch_size: int = 2,
in_features: int = 28 * 28,
learning_rate: float = 0.001 * 8,
optimizer_name: str = 'adam',
out_features: int = 10,
hidden_dim: int = 1000,
**kwargs
):
# init superclass
super().__init__()
self.drop_prob = drop_prob
self.batch_size = batch_size
self.in_features = in_features
self.learning_rate = learning_rate
self.optimizer_name = optimizer_name
self.out_features = out_features
self.hidden_dim = hidden_dim

self.c_d1 = nn.Linear(in_features=self.in_features,
out_features=self.hidden_dim)
self.c_d1_bn = nn.BatchNorm1d(self.hidden_dim)
self.c_d1_drop = nn.Dropout(self.drop_prob)

self.c_d2 = nn.Linear(in_features=self.hidden_dim,
out_features=self.out_features)

def forward(self, x):
"""
No special modification required for Lightning, define it as you normally would
in the `nn.Module` in vanilla PyTorch.
"""
x = self.c_d1(x.view(x.size(0), -1))
x = torch.tanh(x)
x = self.c_d1_bn(x)
x = self.c_d1_drop(x)
x = self.c_d2(x)
return x

def training_step(self, batch: Tensor, batch_idx: int):
"""
Lightning calls this inside the training loop with the data from the training dataloader
passed in as `batch`.
"""
# forward pass
x, y = batch
y_hat = self(x)
loss = F.cross_entropy(y_hat, y)

# structure the return from the training loop
step_result = pl.Result(
minimize=loss,
checkpoint_on=loss,
early_stop_on=loss,
)

step_result.log_metric('train_loss', loss)
step_result.pbar_metric('pbar_loss', loss)

# return loss
# return step_result
return {'loss': loss, 'log':{'something': 1}, 'random': 'af'}

def validation_step(self, batch: Tensor, batch_idx: int):
# forward pass
x, y = batch
y_hat = self(x)
val_loss = F.cross_entropy(y_hat, y)

result = pl.Result()
result.log_metric('val_loss', val_loss)
result.pbar_metric('pbar_loss', val_loss)

return {'val_loss': val_loss, 'log': {'aa': val_loss}, 'progress_bar': {'aa': val_loss}}

def configure_optimizers(self):
"""
Return whatever optimizers and learning rate schedulers you want here.
At least one optimizer is required.
"""
return optim.Adam(self.parameters(), lr=self.learning_rate)

@staticmethod
def add_model_specific_args(parent_parser): # pragma: no-cover
"""
Define parameters that only apply to this model
"""
parser = ArgumentParser(parents=[parent_parser], add_help=False)
parser.add_argument('--in_features', default=28 * 28, type=int)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this can be parsed automatically from Models units arguments...

parser.add_argument('--out_features', default=10, type=int)
parser.add_argument('--hidden_dim', default=5000, type=int)
parser.add_argument('--drop_prob', default=0.2, type=float)
parser.add_argument('--learning_rate', default=0.001, type=float)
parser.add_argument('--data_dir', default='.', type=str)
parser.add_argument('--batch_size', default=64, type=int)
return parser


if __name__ == '__main__':
from argparse import ArgumentParser
import pytorch_lightning as pl

# add trainer args
parser = ArgumentParser()
parser = pl.Trainer.add_argparse_args(parser)

# add model args
parser = SuperLitModel.add_model_specific_args(parser)
args = parser.parse_args()

# init data, model
mnist_train = MNIST(args.data_dir, train=True, download=True, transform=transforms.ToTensor())
mnist_train = DataLoader(mnist_train, batch_size=args.batch_size, num_workers=0)
model = SuperLitModel(**vars(args))

# init trainer
trainer = pl.Trainer.from_argparse_args(args)

trainer.fit(
model,
train_dataloader=mnist_train,
val_dataloaders=mnist_train
)
2 changes: 2 additions & 0 deletions pytorch_lightning/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,15 @@
from pytorch_lightning.trainer.seed import seed_everything
from pytorch_lightning.callbacks import Callback
from pytorch_lightning.core import data_loader
from pytorch_lightning.core.step_result import Result

__all__ = [
'Trainer',
'LightningModule',
'Callback',
'data_loader',
'seed_everything',
'Result',
]

# necessary for regular bolts imports. Skip exception since bolts is not always installed
Expand Down
13 changes: 9 additions & 4 deletions pytorch_lightning/callbacks/early_stopping.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,15 +110,20 @@ def on_train_start(self, trainer, pl_module):
self.best = torch_inf if self.monitor_op == torch.lt else -torch_inf

def on_validation_end(self, trainer, pl_module):
self._run_early_stopping_check(trainer, pl_module)
return self._run_early_stopping_check(trainer, pl_module)

def _run_early_stopping_check(self, trainer, pl_module):
logs = trainer.callback_metrics
stop_training = False
if not self._validate_condition_metric(logs):
return stop_training

current = logs.get(self.monitor)
used_structured_result = 'early_stop_on' in logs
if not used_structured_result:
if not self._validate_condition_metric(logs):
return stop_training
current = logs.get(self.monitor)
else:
current = logs['early_stop_on']

if not isinstance(current, torch.Tensor):
current = torch.tensor(current)

Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,6 +334,7 @@ def training_step(self, batch, batch_idx):

from pytorch_lightning.core.decorators import data_loader
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.core.step_result import Result

__all__ = ['LightningModule', 'data_loader']
__all__ = ['LightningModule', 'data_loader', 'Result']
# __call__ = __all__
30 changes: 11 additions & 19 deletions pytorch_lightning/core/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,15 @@ def on_after_backward(self):
global_step=self.trainer.global_step)

"""
def amp_scale_loss(self, unscaled_loss, optimizer, optimizer_idx):
if self.trainer.use_native_amp:
scaled_loss = self.trainer.scaler.scale(unscaled_loss)

else:
# TODO: remove in v0.8.0
scaled_loss = amp.scale_loss(unscaled_loss, optimizer)

return scaled_loss

def backward(self, trainer, loss: Tensor, optimizer: Optimizer, optimizer_idx: int) -> None:
"""
Expand All @@ -134,27 +143,10 @@ def backward(self, trainer, loss: Tensor, optimizer: Optimizer, optimizer_idx: i
Example::

def backward(self, use_amp, loss, optimizer):
if use_amp:
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
loss.backward()

"""
if trainer.precision == 16:
# .backward is not special on 16-bit with TPUs
if trainer.on_tpu:
return

if self.trainer.use_native_amp:
self.trainer.scaler.scale(loss).backward()

# TODO: remove in v0.8.0
else:
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
loss.backward()

def transfer_batch_to_device(self, batch: Any, device: torch.device) -> Any:
"""
Expand Down
Loading