* skip failing tests

* [no-filter]

* [no-filter]

* fix wording catch in FA2 test

* [no-filter]

* trigger normal CI without filtering
This commit is contained in:
Pablo Montalvo 2024-08-13 10:46:21 +02:00 committed by GitHub
parent 29c3a0fa01
commit a5a8291ad1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 7 additions and 4 deletions

View File

@ -2831,7 +2831,11 @@ class ModelTesterMixin:
model_forward_args = inspect.signature(model.forward).parameters
if "inputs_embeds" not in model_forward_args:
self.skipTest(reason="This model doesn't use `inputs_embeds`")
has_inputs_embeds_forwarding = "inputs_embeds" in set(
inspect.signature(model.prepare_inputs_for_generation).parameters.keys()
)
if not has_inputs_embeds_forwarding:
self.skipTest(reason="This model doesn't support `inputs_embeds` passed to `generate`.")
inputs = copy.deepcopy(self._prepare_for_class(inputs_dict, model_class))
pad_token_id = config.pad_token_id if config.pad_token_id is not None else 1

View File

@ -2427,8 +2427,7 @@ class TestAttentionImplementation(unittest.TestCase):
_ = AutoModel.from_pretrained(
"hf-internal-testing/tiny-random-GPTBigCodeModel", attn_implementation="flash_attention_2"
)
self.assertTrue("the package flash_attn seems not to be installed" in str(cm.exception))
self.assertTrue("the package flash_attn seems to be not installed" in str(cm.exception))
def test_not_available_flash_with_config(self):
if is_flash_attn_2_available():
@ -2443,7 +2442,7 @@ class TestAttentionImplementation(unittest.TestCase):
attn_implementation="flash_attention_2",
)
self.assertTrue("the package flash_attn seems not to be installed" in str(cm.exception))
self.assertTrue("the package flash_attn seems to be not installed" in str(cm.exception))
def test_not_available_sdpa(self):
if is_torch_sdpa_available():