Set seed for M4T retain grad test (#31419)

This commit is contained in:
Yoach Lacombe 2024-06-14 14:48:04 +02:00 committed by GitHub
parent 43ee58588b
commit d9daeff297
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -612,11 +612,11 @@ class SeamlessM4TModelWithSpeechInputTest(ModelTesterMixin, unittest.TestCase):
[self.model_tester.num_attention_heads, encoder_seq_length, encoder_key_length],
)
@unittest.skip(
reason="In training model, the first speech encoder layer is sometimes skipped. Training is not supported yet, so the test is ignored."
)
def test_retain_grad_hidden_states_attentions(self):
pass
# When training the model, the first speech encoder layer is sometimes skipped.
# Setting the seed to always have the first layer.
set_seed(0)
super().test_retain_grad_hidden_states_attentions()
@require_torch