Skip M4T test_retain_grad_hidden_states_attentions (#28060)

* skip test from SpeechInput

* refine description of skip
This commit is contained in:
Yoach Lacombe 2023-12-15 13:39:16 +00:00 committed by GitHub
parent d269c4b2d7
commit deb72cb6d9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -20,7 +20,7 @@ import tempfile
import unittest
from transformers import SeamlessM4TConfig, is_speech_available, is_torch_available
from transformers.testing_utils import is_flaky, require_torch, slow, torch_device
from transformers.testing_utils import require_torch, slow, torch_device
from transformers.trainer_utils import set_seed
from transformers.utils import cached_property
@ -610,9 +610,11 @@ class SeamlessM4TModelWithSpeechInputTest(ModelTesterMixin, unittest.TestCase):
[self.model_tester.num_attention_heads, encoder_seq_length, encoder_key_length],
)
@is_flaky()
@unittest.skip(
reason="In training model, the first speech encoder layer is sometimes skipped. Training is not supported yet, so the test is ignored."
)
def test_retain_grad_hidden_states_attentions(self):
super().test_retain_grad_hidden_states_attentions()
pass
@require_torch