mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
same
This commit is contained in:
parent
2283dcca5e
commit
19cc2c084e
@ -26,7 +26,8 @@ logger = logging.getLogger(__name__)
|
||||
def warmup_cosine(x, warmup=0.002):
|
||||
if x < warmup:
|
||||
return x/warmup
|
||||
return 0.5 * (1.0 + torch.cos(math.pi * x))
|
||||
x_ = (x - warmup) / (1 - warmup) # progress after warmup
|
||||
return 0.5 * (1. + math.cos(math.pi * x_))
|
||||
|
||||
def warmup_constant(x, warmup=0.002):
|
||||
""" Linearly increases learning rate over `warmup`*`t_total` (as provided to OpenAIAdam) training steps.
|
||||
|
Loading…
Reference in New Issue
Block a user