FIX: Faulty PEFT tests (#37757)

Two PEFT tests are actually failing:

tests/peft_integration/test_peft_integration.py::PeftIntegrationTester::test_delete_adapter
tests/peft_integration/test_peft_integration.py::PeftIntegrationTester::test_peft_pipeline_no_warning

This must have been going on for some time but was apparently never
noticed. The cause is that the tests themselves are faulty, the PEFT
integration is correct in these cases.

test_delete_adapter

The first faulty test was introduced by #34650. AFAICT, it should never
have passed in the first place, the PEFT integration logic was not
changed in the meantime. At this point, the logs for the PR CI are gone,
so I'm not sure if the test passed back then or not.

test_peft_pipeline_no_warning

This test was introduced in #36783 and should also never have passed, as
the self.assertNoLogs context manager only returns None, thus the assert
should never have worked (mea culpa for suggesting this code snippet).
Here too, the CI logs are deleted by now, so I can't check if the test
already failed back then.
This commit is contained in:
Benjamin Bossan 2025-04-28 15:10:46 +02:00 committed by GitHub
parent b262680af4
commit 1a9188a54e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -13,6 +13,7 @@
# limitations under the License. # limitations under the License.
import importlib import importlib
import os import os
import re
import tempfile import tempfile
import unittest import unittest
@ -385,7 +386,7 @@ class PeftIntegrationTester(unittest.TestCase, PeftTesterMixin):
# Delete remaining adapter # Delete remaining adapter
model.delete_adapter("adapter_2") model.delete_adapter("adapter_2")
self.assertNotIn("adapter_2", model.peft_config) self.assertFalse(hasattr(model, "peft_config"))
self.assertFalse(model._hf_peft_config_loaded) self.assertFalse(model._hf_peft_config_loaded)
# Re-add adapters for edge case tests # Re-add adapters for edge case tests
@ -394,11 +395,16 @@ class PeftIntegrationTester(unittest.TestCase, PeftTesterMixin):
# Attempt to delete multiple adapters at once # Attempt to delete multiple adapters at once
model.delete_adapter(["adapter_1", "adapter_2"]) model.delete_adapter(["adapter_1", "adapter_2"])
self.assertNotIn("adapter_1", model.peft_config) self.assertFalse(hasattr(model, "peft_config"))
self.assertNotIn("adapter_2", model.peft_config)
self.assertFalse(model._hf_peft_config_loaded) self.assertFalse(model._hf_peft_config_loaded)
# Test edge cases # Test edge cases
msg = re.escape("No adapter loaded. Please load an adapter first.")
with self.assertRaisesRegex(ValueError, msg):
model.delete_adapter("nonexistent_adapter")
model.add_adapter(peft_config_1, adapter_name="adapter_1")
with self.assertRaisesRegex(ValueError, "The following adapter\\(s\\) are not present"): with self.assertRaisesRegex(ValueError, "The following adapter\\(s\\) are not present"):
model.delete_adapter("nonexistent_adapter") model.delete_adapter("nonexistent_adapter")
@ -406,16 +412,11 @@ class PeftIntegrationTester(unittest.TestCase, PeftTesterMixin):
model.delete_adapter(["adapter_1", "nonexistent_adapter"]) model.delete_adapter(["adapter_1", "nonexistent_adapter"])
# Deleting with an empty list or None should not raise errors # Deleting with an empty list or None should not raise errors
model.add_adapter(peft_config_1, adapter_name="adapter_1")
model.add_adapter(peft_config_2, adapter_name="adapter_2") model.add_adapter(peft_config_2, adapter_name="adapter_2")
model.delete_adapter([]) # No-op model.delete_adapter([]) # No-op
self.assertIn("adapter_1", model.peft_config) self.assertIn("adapter_1", model.peft_config)
self.assertIn("adapter_2", model.peft_config) self.assertIn("adapter_2", model.peft_config)
model.delete_adapter(None) # No-op
self.assertIn("adapter_1", model.peft_config)
self.assertIn("adapter_2", model.peft_config)
# Deleting duplicate adapter names in the list # Deleting duplicate adapter names in the list
model.delete_adapter(["adapter_1", "adapter_1"]) model.delete_adapter(["adapter_1", "adapter_1"])
self.assertNotIn("adapter_1", model.peft_config) self.assertNotIn("adapter_1", model.peft_config)
@ -832,7 +833,6 @@ class PeftIntegrationTester(unittest.TestCase, PeftTesterMixin):
# Input text for testing # Input text for testing
text = "Who is a Elon Musk?" text = "Who is a Elon Musk?"
expected_error_msg = "The model 'PeftModel' is not supported for text-generation"
model = AutoModelForCausalLM.from_pretrained( model = AutoModelForCausalLM.from_pretrained(
BASE_PATH, BASE_PATH,
@ -849,7 +849,7 @@ class PeftIntegrationTester(unittest.TestCase, PeftTesterMixin):
# Create pipeline with PEFT model while capturing log output # Create pipeline with PEFT model while capturing log output
# Check that the warning message is not present in the logs # Check that the warning message is not present in the logs
pipeline_logger = logging.get_logger("transformers.pipelines.base") pipeline_logger = logging.get_logger("transformers.pipelines.base")
with self.assertNoLogs(pipeline_logger, logging.ERROR) as cl: with self.assertNoLogs(pipeline_logger, logging.ERROR):
lora_generator = pipeline( lora_generator = pipeline(
task="text-generation", task="text-generation",
model=lora_model, model=lora_model,
@ -859,8 +859,3 @@ class PeftIntegrationTester(unittest.TestCase, PeftTesterMixin):
# Generate text to verify pipeline works # Generate text to verify pipeline works
_ = lora_generator(text) _ = lora_generator(text)
# Check that the warning message is not present in the logs
self.assertNotIn(
expected_error_msg, cl.out, f"Error message '{expected_error_msg}' should not appear when using PeftModel"
)