mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Skip M4T test_retain_grad_hidden_states_attentions
(#28060)
* skip test from SpeechInput * refine description of skip
This commit is contained in:
parent
d269c4b2d7
commit
deb72cb6d9
@ -20,7 +20,7 @@ import tempfile
|
||||
import unittest
|
||||
|
||||
from transformers import SeamlessM4TConfig, is_speech_available, is_torch_available
|
||||
from transformers.testing_utils import is_flaky, require_torch, slow, torch_device
|
||||
from transformers.testing_utils import require_torch, slow, torch_device
|
||||
from transformers.trainer_utils import set_seed
|
||||
from transformers.utils import cached_property
|
||||
|
||||
@ -610,9 +610,11 @@ class SeamlessM4TModelWithSpeechInputTest(ModelTesterMixin, unittest.TestCase):
|
||||
[self.model_tester.num_attention_heads, encoder_seq_length, encoder_key_length],
|
||||
)
|
||||
|
||||
@is_flaky()
|
||||
@unittest.skip(
|
||||
reason="In training model, the first speech encoder layer is sometimes skipped. Training is not supported yet, so the test is ignored."
|
||||
)
|
||||
def test_retain_grad_hidden_states_attentions(self):
|
||||
super().test_retain_grad_hidden_states_attentions()
|
||||
pass
|
||||
|
||||
|
||||
@require_torch
|
||||
|
Loading…
Reference in New Issue
Block a user