Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Regression metrics #2221

Merged
merged 3 commits into from
Jun 17, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions docs/source/metrics.rst
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,18 @@ ROC
.. autoclass:: pytorch_lightning.metrics.classification.ROC
:noindex:

MAE
^^^

.. autoclass:: pytorch_lightning.metrics.regression.MAE
:noindex:

MSE
^^^

.. autoclass:: pytorch_lightning.metrics.regression.MSE
:noindex:

MulticlassROC
^^^^^^^^^^^^^

Expand All @@ -192,6 +204,18 @@ MulticlassPrecisionRecall
.. autoclass:: pytorch_lightning.metrics.classification.MulticlassPrecisionRecall
:noindex:

RMSE
^^^^

.. autoclass:: pytorch_lightning.metrics.regression.RMSLE
:noindex:

RMSLE
^^^^^

.. autoclass:: pytorch_lightning.metrics.regression.RMSE
:noindex:

--------------

Functional Metrics
Expand Down
189 changes: 189 additions & 0 deletions pytorch_lightning/metrics/regression.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
import torch.nn.functional as F
import torch
from pytorch_lightning.metrics.metric import Metric

__all__ = ['MSE', 'RMSE', 'MAE', 'RMSLE']


class MSE(Metric):
"""
Computes the mean squared loss.
"""

def __init__(
self,
reduction: str = 'elementwise_mean',
):
"""
Args:
reduction: a method for reducing mse over labels (default: takes the mean)
Available reduction methods:
- elementwise_mean: takes the mean
- none: pass array
- sum: add elements


Example:

>>> pred = torch.tensor([0., 1, 2, 3])
>>> target = torch.tensor([0., 1, 2, 2])
>>> metric = MSE()
>>> metric(pred, target)
tensor(0.2500)

"""
super().__init__(name='mse')
if reduction == 'elementwise_mean':
reduction = 'mean'
self.reduction = reduction

def forward(self, pred: torch.Tensor, target: torch.Tensor) -> torch.Tensor:
"""
Actual metric computation

Args:
pred: predicted labels
target: ground truth labels

Return:
A Tensor with the mse loss.
"""
return F.mse_loss(pred, target, self.reduction)


class RMSE(Metric):
"""
Computes the root mean squared loss.
"""

def __init__(
self,
reduction: str = 'elementwise_mean',
):
"""
Args:
reduction: a method for reducing mse over labels (default: takes the mean)
Available reduction methods:
- elementwise_mean: takes the mean
- none: pass array
- sum: add elements


Example:

>>> pred = torch.tensor([0., 1, 2, 3])
>>> target = torch.tensor([0., 1, 2, 2])
>>> metric = RMSE()
>>> metric(pred, target)
tensor(0.5000)

"""
super().__init__(name='rmse')
if reduction == 'elementwise_mean':
reduction = 'mean'
self.reduction = reduction

def forward(self, pred: torch.Tensor, target: torch.Tensor) -> torch.Tensor:
"""
Actual metric computation

Args:
pred: predicted labels
target: ground truth labels

Return:
A Tensor with the rmse loss.
"""
return torch.sqrt(F.mse_loss(pred, target, self.reduction))


class MAE(Metric):
"""
Computes the root mean absolute loss or L1-loss.
"""

def __init__(
self,
reduction: str = 'elementwise_mean',
):
"""
Args:
reduction: a method for reducing mse over labels (default: takes the mean)
Available reduction methods:
- elementwise_mean: takes the mean
- none: pass array
- sum: add elements


Example:

>>> pred = torch.tensor([0., 1, 2, 3])
>>> target = torch.tensor([0., 1, 2, 2])
>>> metric = MAE()
>>> metric(pred, target)
tensor(0.2500)

"""
super().__init__(name='mae')
if reduction == 'elementwise_mean':
reduction = 'mean'
self.reduction = reduction

def forward(self, pred: torch.Tensor, target: torch.Tensor) -> torch.Tensor:
"""
Actual metric computation

Args:
pred: predicted labels
target: ground truth labels

Return:
A Tensor with the mae loss.
"""
return F.l1_loss(pred, target, self.reduction)


class RMSLE(Metric):
"""
Computes the root mean squared log loss.
"""

def __init__(
self,
reduction: str = 'elementwise_mean',
):
"""
Args:
reduction: a method for reducing mse over labels (default: takes the mean)
Available reduction methods:
- elementwise_mean: takes the mean
- none: pass array
- sum: add elements


Example:

>>> pred = torch.tensor([0., 1, 2, 3])
>>> target = torch.tensor([0., 1, 2, 2])
>>> metric = RMSLE()
>>> metric(pred, target)
tensor(0.0207)

"""
super().__init__(name='rmsle')
if reduction == 'elementwise_mean':
reduction = 'mean'
self.reduction = reduction

def forward(self, pred: torch.Tensor, target: torch.Tensor) -> torch.Tensor:
"""
Actual metric computation

Args:
pred: predicted labels
target: ground truth labels

Return:
A Tensor with the rmsle loss.
"""
return F.mse_loss(torch.log(pred + 1), torch.log(target + 1), self.reduction)
66 changes: 66 additions & 0 deletions tests/metrics/test_regression.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import pytest
import torch

from pytorch_lightning.metrics.regression import (
MAE, MSE, RMSE, RMSLE
)


@pytest.mark.parametrize(['pred', 'target', 'exp'], [
pytest.param([0., 1., 2., 3.], [0., 1., 2., 2.], .25),
pytest.param([4., 3., 2., 1.], [1., 4., 3., 2.], 3.)
])
def test_mse(pred, target, exp):
mse = MSE()
assert mse.name == 'mse'

score = mse(pred=torch.tensor(pred),
target=torch.tensor(target))

assert isinstance(score, torch.Tensor)
assert score.item() == exp


@pytest.mark.parametrize(['pred', 'target', 'exp'], [
pytest.param([0., 1., 2., 3.], [0., 1., 2., 2.], .5),
pytest.param([4., 3., 2., 1.], [1., 4., 3., 2.], 1.7321)
])
def test_rmse(pred, target, exp):
rmse = RMSE()
assert rmse.name == 'rmse'

score = rmse(pred=torch.tensor(pred),
target=torch.tensor(target))

assert isinstance(score, torch.Tensor)
assert pytest.approx(score.item(), rel=1e-3) == exp


@pytest.mark.parametrize(['pred', 'target', 'exp'], [
pytest.param([0., 1., 2., 3.], [0., 1., 2., 2.], .25),
pytest.param([4., 3., 2., 1.], [1., 4., 3., 2.], 1.5)
])
def test_mae(pred, target, exp):
mae = MAE()
assert mae.name == 'mae'

score = mae(pred=torch.tensor(pred),
target=torch.tensor(target))

assert isinstance(score, torch.Tensor)
assert score.item() == exp


@pytest.mark.parametrize(['pred', 'target', 'exp'], [
pytest.param([0., 1., 2., 3.], [0., 1., 2., 2.], .0207),
pytest.param([4., 3., 2., 1.], [1., 4., 3., 2.], .2841)
])
def test_rmsle(pred, target, exp):
rmsle = RMSLE()
assert rmsle.name == 'rmsle'

score = rmsle(pred=torch.tensor(pred),
target=torch.tensor(target))

assert isinstance(score, torch.Tensor)
assert pytest.approx(score.item(), rel=1e-3) == exp