mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-03 03:31:05 +06:00
disable test_retain_grad_hidden_states_attentions on SeamlessM4TModelWithTextInputTest (#28169)
disable retain_grad_hidden_states_attentions on SeamlessM4TModelWithTextInputTest
This commit is contained in:
parent
1d77735947
commit
e268d7e5dc
@ -751,6 +751,12 @@ class SeamlessM4TModelWithTextInputTest(
|
||||
def test_training_gradient_checkpointing_use_reentrant_false(self):
|
||||
pass
|
||||
|
||||
@unittest.skip(
|
||||
reason="In training model, the first encoder layer is sometimes skipped. Training is not supported yet, so the test is ignored."
|
||||
)
|
||||
def test_retain_grad_hidden_states_attentions(self):
|
||||
pass
|
||||
|
||||
|
||||
@require_torch
|
||||
class SeamlessM4TGenerationTest(unittest.TestCase):
|
||||
|
Loading…
Reference in New Issue
Block a user