mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-03 03:31:05 +06:00
Set seed for M4T retain grad test (#31419)
This commit is contained in:
parent
43ee58588b
commit
d9daeff297
@ -612,11 +612,11 @@ class SeamlessM4TModelWithSpeechInputTest(ModelTesterMixin, unittest.TestCase):
|
||||
[self.model_tester.num_attention_heads, encoder_seq_length, encoder_key_length],
|
||||
)
|
||||
|
||||
@unittest.skip(
|
||||
reason="In training model, the first speech encoder layer is sometimes skipped. Training is not supported yet, so the test is ignored."
|
||||
)
|
||||
def test_retain_grad_hidden_states_attentions(self):
|
||||
pass
|
||||
# When training the model, the first speech encoder layer is sometimes skipped.
|
||||
# Setting the seed to always have the first layer.
|
||||
set_seed(0)
|
||||
super().test_retain_grad_hidden_states_attentions()
|
||||
|
||||
|
||||
@require_torch
|
||||
|
Loading…
Reference in New Issue
Block a user