diff --git a/pytorch_lightning/trainer/training_io.py b/pytorch_lightning/trainer/training_io.py index 09a65af0485de..0e9d00c6c4019 100644 --- a/pytorch_lightning/trainer/training_io.py +++ b/pytorch_lightning/trainer/training_io.py @@ -322,7 +322,7 @@ def dump_checkpoint(self): # restore native amp scaling if self.use_amp and self.use_native_amp and 'native_amp_scaling_state' in checkpoint: - checkpoint['native_amp_scaling_state'] = self.scaler.state_dict + checkpoint['native_amp_scaling_state'] = self.scaler.state_dict() if hasattr(model, "hparams"): is_namespace = isinstance(model.hparams, Namespace)