mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-03 21:00:08 +06:00
tests/roformer: fix couple roformer tests on gpus (#38570)
Some checks are pending
Self-hosted runner (benchmark) / Benchmark (aws-g5-4xlarge-cache) (push) Waiting to run
Build documentation / build (push) Waiting to run
New model PR merged notification / Notify new model (push) Waiting to run
Slow tests on important models (on Push - A10) / Get all modified files (push) Waiting to run
Slow tests on important models (on Push - A10) / Slow & FA2 tests (push) Blocked by required conditions
Self-hosted runner (push-caller) / Check if setup was changed (push) Waiting to run
Self-hosted runner (push-caller) / build-docker-containers (push) Blocked by required conditions
Self-hosted runner (push-caller) / Trigger Push CI (push) Blocked by required conditions
Secret Leaks / trufflehog (push) Waiting to run
Update Transformers metadata / build_and_package (push) Waiting to run
Some checks are pending
Self-hosted runner (benchmark) / Benchmark (aws-g5-4xlarge-cache) (push) Waiting to run
Build documentation / build (push) Waiting to run
New model PR merged notification / Notify new model (push) Waiting to run
Slow tests on important models (on Push - A10) / Get all modified files (push) Waiting to run
Slow tests on important models (on Push - A10) / Slow & FA2 tests (push) Blocked by required conditions
Self-hosted runner (push-caller) / Check if setup was changed (push) Waiting to run
Self-hosted runner (push-caller) / build-docker-containers (push) Blocked by required conditions
Self-hosted runner (push-caller) / Trigger Push CI (push) Blocked by required conditions
Secret Leaks / trufflehog (push) Waiting to run
Update Transformers metadata / build_and_package (push) Waiting to run
Fix "RuntimeError: Expected all tensors to be on the same device, but found at least two devices, cuda:0 and cpu" error running the following roformer tests on GPUs (CUDA or XPU): ``` tests/models/roformer/test_modeling_roformer.py::RoFormerSinusoidalPositionalEmbeddingTest::test_basic tests/models/roformer/test_modeling_roformer.py::RoFormerSelfAttentionRotaryPositionEmbeddingTest::test_apply_rotary_position_embeddings ``` Signed-off-by: Dmitry Rogozhkin <dmitry.v.rogozhkin@intel.com>
This commit is contained in:
parent
b9c17c5dc0
commit
8046aff520
@ -513,8 +513,9 @@ class RoFormerSinusoidalPositionalEmbeddingTest(unittest.TestCase):
|
|||||||
|
|
||||||
def test_basic(self):
|
def test_basic(self):
|
||||||
input_ids = torch.tensor([[4, 10]], dtype=torch.long, device=torch_device)
|
input_ids = torch.tensor([[4, 10]], dtype=torch.long, device=torch_device)
|
||||||
emb1 = RoFormerSinusoidalPositionalEmbedding(num_positions=6, embedding_dim=6).to(torch_device)
|
emb1 = RoFormerSinusoidalPositionalEmbedding(num_positions=6, embedding_dim=6)
|
||||||
emb1._init_weight()
|
emb1._init_weight()
|
||||||
|
emb1 = emb1.to(torch_device)
|
||||||
emb = emb1(input_ids.shape)
|
emb = emb1(input_ids.shape)
|
||||||
desired_weights = torch.tensor(
|
desired_weights = torch.tensor(
|
||||||
[[0.0000, 0.0000, 0.0000, 1.0000, 1.0000, 1.0000], [0.8415, 0.0464, 0.0022, 0.5403, 0.9989, 1.0000]]
|
[[0.0000, 0.0000, 0.0000, 1.0000, 1.0000, 1.0000], [0.8415, 0.0464, 0.0022, 0.5403, 0.9989, 1.0000]]
|
||||||
@ -554,8 +555,9 @@ class RoFormerSelfAttentionRotaryPositionEmbeddingTest(unittest.TestCase):
|
|||||||
key_layer = (
|
key_layer = (
|
||||||
-torch.arange(2 * 12 * 16 * 64, dtype=torch.float, device=torch_device).reshape(2, 12, 16, 64) / 100
|
-torch.arange(2 * 12 * 16 * 64, dtype=torch.float, device=torch_device).reshape(2, 12, 16, 64) / 100
|
||||||
).to(torch_device)
|
).to(torch_device)
|
||||||
embed_positions = RoFormerSinusoidalPositionalEmbedding(num_positions=32, embedding_dim=64).to(torch_device)
|
embed_positions = RoFormerSinusoidalPositionalEmbedding(num_positions=32, embedding_dim=64)
|
||||||
embed_positions._init_weight()
|
embed_positions._init_weight()
|
||||||
|
embed_positions = embed_positions.to(torch_device)
|
||||||
sinusoidal_pos = embed_positions([2, 16, 768])[None, None, :, :]
|
sinusoidal_pos = embed_positions([2, 16, 768])[None, None, :, :]
|
||||||
|
|
||||||
query_layer, key_layer = RoFormerSelfAttention.apply_rotary_position_embeddings(
|
query_layer, key_layer = RoFormerSelfAttention.apply_rotary_position_embeddings(
|
||||||
|
Loading…
Reference in New Issue
Block a user