From 16ce8814218195324407fb24e3602ac59373fc09 Mon Sep 17 00:00:00 2001 From: Nima Sarang Date: Tue, 9 Jun 2020 13:38:03 +0430 Subject: [PATCH] add some tests --- tests/base/model_train_steps.py | 15 ++++++++++++--- tests/base/model_valid_epoch_ends.py | 6 +++++- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/tests/base/model_train_steps.py b/tests/base/model_train_steps.py index fcd020d852126f..59fde06835c09e 100644 --- a/tests/base/model_train_steps.py +++ b/tests/base/model_train_steps.py @@ -21,13 +21,22 @@ def training_step(self, batch, batch_idx, optimizer_idx=None): # calculate loss loss_val = self.loss(y, y_hat) + loss_scalar = loss_val.item() # alternate possible outputs to test + if batch_idx % 2 == 0: + output = OrderedDict({ + 'loss': loss_val, + 'progress_bar': {'some_val': loss_val * loss_val}, + 'log': {'train_some_val': loss_val * loss_val}, + }) + + # return scalars for "log" and "progress_bar" output = OrderedDict({ 'loss': loss_val, - 'progress_bar': {'some_val': loss_val * loss_val}, - 'log': {'train_some_val': loss_val * loss_val}, - }) + 'progress_bar': {'some_val': loss_scalar * loss_scalar}, + 'log': {'train_some_val': loss_scalar * loss_scalar}, + }) return output def training_step__inf_loss(self, batch, batch_idx, optimizer_idx=None): diff --git a/tests/base/model_valid_epoch_ends.py b/tests/base/model_valid_epoch_ends.py index 6c4844d3e5c9e8..7b03d524d279b4 100644 --- a/tests/base/model_valid_epoch_ends.py +++ b/tests/base/model_valid_epoch_ends.py @@ -23,7 +23,11 @@ def _mean(res, key): val_loss_mean = _mean(outputs, 'val_loss') val_acc_mean = _mean(outputs, 'val_acc') - metrics_dict = {'val_loss': val_loss_mean.item(), 'val_acc': val_acc_mean.item()} + # alternate between tensor and scalar + if self.current_epoch % 2: + metrics_dict = {'val_loss': val_loss_mean.item(), 'val_acc': val_acc_mean.item()} + else: + metrics_dict = {'val_loss': val_loss_mean, 'val_acc': val_acc_mean} results = {'progress_bar': metrics_dict, 'log': metrics_dict} return results