From 593837e1da24ff6c942b24ed803fc1496a304609 Mon Sep 17 00:00:00 2001 From: William Falcon Date: Mon, 29 Jun 2020 06:46:19 -0400 Subject: [PATCH] fix amp wrong call --- pytorch_lightning/trainer/training_loop.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/training_loop.py b/pytorch_lightning/trainer/training_loop.py index be0735701850a..1f6a36eb56c89 100644 --- a/pytorch_lightning/trainer/training_loop.py +++ b/pytorch_lightning/trainer/training_loop.py @@ -743,7 +743,8 @@ def call_optimizer_step(self, optimizer, opt_idx, batch_idx, split_batch): # when using 16-bit else: native_amp = self.use_amp and NATIVE_AMP_AVALAIBLE - model.optimizer_step(self.current_epoch, batch_idx, optimizer, opt_idx, lambda_closure, native_amp) + model.optimizer_step(self.current_epoch, batch_idx, optimizer, opt_idx, lambda_closure, + using_native_amp=native_amp) # in native 16-bit we need to update scaler after optimizer step if self.use_amp and NATIVE_AMP_AVALAIBLE: