mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-01 02:31:11 +06:00
Fix tests (#32649)
* skip failing tests * [no-filter] * [no-filter] * fix wording catch in FA2 test * [no-filter] * trigger normal CI without filtering
This commit is contained in:
parent
29c3a0fa01
commit
a5a8291ad1
@ -2831,7 +2831,11 @@ class ModelTesterMixin:
|
||||
model_forward_args = inspect.signature(model.forward).parameters
|
||||
if "inputs_embeds" not in model_forward_args:
|
||||
self.skipTest(reason="This model doesn't use `inputs_embeds`")
|
||||
|
||||
has_inputs_embeds_forwarding = "inputs_embeds" in set(
|
||||
inspect.signature(model.prepare_inputs_for_generation).parameters.keys()
|
||||
)
|
||||
if not has_inputs_embeds_forwarding:
|
||||
self.skipTest(reason="This model doesn't support `inputs_embeds` passed to `generate`.")
|
||||
inputs = copy.deepcopy(self._prepare_for_class(inputs_dict, model_class))
|
||||
pad_token_id = config.pad_token_id if config.pad_token_id is not None else 1
|
||||
|
||||
|
@ -2427,8 +2427,7 @@ class TestAttentionImplementation(unittest.TestCase):
|
||||
_ = AutoModel.from_pretrained(
|
||||
"hf-internal-testing/tiny-random-GPTBigCodeModel", attn_implementation="flash_attention_2"
|
||||
)
|
||||
|
||||
self.assertTrue("the package flash_attn seems not to be installed" in str(cm.exception))
|
||||
self.assertTrue("the package flash_attn seems to be not installed" in str(cm.exception))
|
||||
|
||||
def test_not_available_flash_with_config(self):
|
||||
if is_flash_attn_2_available():
|
||||
@ -2443,7 +2442,7 @@ class TestAttentionImplementation(unittest.TestCase):
|
||||
attn_implementation="flash_attention_2",
|
||||
)
|
||||
|
||||
self.assertTrue("the package flash_attn seems not to be installed" in str(cm.exception))
|
||||
self.assertTrue("the package flash_attn seems to be not installed" in str(cm.exception))
|
||||
|
||||
def test_not_available_sdpa(self):
|
||||
if is_torch_sdpa_available():
|
||||
|
Loading…
Reference in New Issue
Block a user