Skip to content

Commit

Permalink
FP16 EMA bug fix
Browse files Browse the repository at this point in the history
  • Loading branch information
glenn-jocher committed Jul 13, 2020
1 parent 2b18924 commit 2377e5f
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions utils/torch_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,8 +195,8 @@ class ModelEMA:
def __init__(self, model, decay=0.9999, updates=0):
# Create EMA
self.ema = deepcopy(model.module if is_parallel(model) else model).eval() # FP32 EMA
if next(model.parameters()).device.type != 'cpu':
self.ema.half() # FP16 EMA
# if next(model.parameters()).device.type != 'cpu':
# self.ema.half() # FP16 EMA
self.updates = updates # number of EMA updates
self.decay = lambda x: decay * (1 - math.exp(-x / 2000)) # decay exponential ramp (to help early epochs)
for p in self.ema.parameters():
Expand Down

0 comments on commit 2377e5f

Please sign in to comment.