mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-14 01:58:22 +06:00

* Replace swish with silu * revert nn.silu to nn.swish due to older version * simplify optimized silu conditional and fix format * Update activations.py * Update activations_tf.py * Update modeling_flax_utils.py * Update modeling_openai.py * add swish testcase * add pytorch swish testcase * Add more robust python version check * more formatting fixes Co-authored-by: TFUsers <TFUsers@gmail.com>
32 lines
985 B
Python
32 lines
985 B
Python
import unittest
|
|
|
|
from transformers import is_torch_available
|
|
from transformers.testing_utils import require_torch
|
|
|
|
|
|
if is_torch_available():
|
|
import torch
|
|
|
|
from transformers.activations import _gelu_python, gelu_new, get_activation
|
|
|
|
|
|
@require_torch
|
|
class TestActivations(unittest.TestCase):
|
|
def test_gelu_versions(self):
|
|
x = torch.Tensor([-100, -1, -0.1, 0, 0.1, 1.0, 100])
|
|
torch_builtin = get_activation("gelu")
|
|
self.assertTrue(torch.eq(_gelu_python(x), torch_builtin(x)).all().item())
|
|
self.assertFalse(torch.eq(_gelu_python(x), gelu_new(x)).all().item())
|
|
|
|
def test_get_activation(self):
|
|
get_activation("swish")
|
|
get_activation("silu")
|
|
get_activation("relu")
|
|
get_activation("tanh")
|
|
get_activation("gelu_new")
|
|
get_activation("gelu_fast")
|
|
with self.assertRaises(KeyError):
|
|
get_activation("bogus")
|
|
with self.assertRaises(KeyError):
|
|
get_activation(None)
|