disable test_retain_grad_hidden_states_attentions on SeamlessM4TModelWithTextInputTest (#28169)

disable retain_grad_hidden_states_attentions on SeamlessM4TModelWithTextInputTest
This commit is contained in:
Dean Wyatte 2023-12-21 00:39:44 -07:00 committed by GitHub
parent 1d77735947
commit e268d7e5dc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -751,6 +751,12 @@ class SeamlessM4TModelWithTextInputTest(
def test_training_gradient_checkpointing_use_reentrant_false(self):
pass
@unittest.skip(
reason="In training model, the first encoder layer is sometimes skipped. Training is not supported yet, so the test is ignored."
)
def test_retain_grad_hidden_states_attentions(self):
pass
@require_torch
class SeamlessM4TGenerationTest(unittest.TestCase):