This commit is contained in:
lukovnikov 2019-03-18 15:13:35 +01:00
parent 2283dcca5e
commit 19cc2c084e

View File

@ -26,7 +26,8 @@ logger = logging.getLogger(__name__)
def warmup_cosine(x, warmup=0.002):
if x < warmup:
return x/warmup
return 0.5 * (1.0 + torch.cos(math.pi * x))
x_ = (x - warmup) / (1 - warmup) # progress after warmup
return 0.5 * (1. + math.cos(math.pi * x_))
def warmup_constant(x, warmup=0.002):
""" Linearly increases learning rate over `warmup`*`t_total` (as provided to OpenAIAdam) training steps.