提交 41ff83a9 authored 作者: Glenn Jocher's avatar Glenn Jocher

update torch_utils.py to FP16 EMA

上级 8b38e6f4
...@@ -195,6 +195,8 @@ class ModelEMA: ...@@ -195,6 +195,8 @@ class ModelEMA:
def __init__(self, model, decay=0.9999, updates=0): def __init__(self, model, decay=0.9999, updates=0):
# Create EMA # Create EMA
self.ema = deepcopy(model.module if is_parallel(model) else model).eval() # FP32 EMA self.ema = deepcopy(model.module if is_parallel(model) else model).eval() # FP32 EMA
if next(model.parameters()).device.type != 'cpu':
self.ema.half() # FP16 EMA
self.updates = updates # number of EMA updates self.updates = updates # number of EMA updates
self.decay = lambda x: decay * (1 - math.exp(-x / 2000)) # decay exponential ramp (to help early epochs) self.decay = lambda x: decay * (1 - math.exp(-x / 2000)) # decay exponential ramp (to help early epochs)
for p in self.ema.parameters(): for p in self.ema.parameters():
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论