Use another repo. for Mistral3 processor testing (#36925)

* fix

* fix

* fix

* fix

---------

Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
Yih-Dar 2025-03-24 14:36:05 +01:00 committed by GitHub
parent 9e125d9a2e
commit 340500b1a9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 9 additions and 3 deletions

View File

@ -25,6 +25,7 @@ from transformers import (
from transformers.testing_utils import (
cleanup,
require_bitsandbytes,
require_read_token,
require_torch,
require_torch_gpu,
slow,
@ -315,6 +316,7 @@ class Mistral3IntegrationTest(unittest.TestCase):
def tearDown(self):
cleanup(torch_device, gc_collect=True)
@require_read_token
def test_mistral3_integration_generate_text_only(self):
processor = AutoProcessor.from_pretrained(self.model_checkpoint)
model = Mistral3ForConditionalGeneration.from_pretrained(
@ -342,6 +344,7 @@ class Mistral3IntegrationTest(unittest.TestCase):
expected_output = "Sure, here's a haiku for you:\n\nWhispers of the breeze,\nCherry blossoms softly fall,\nSpring's gentle embrace."
self.assertEqual(decoded_output, expected_output)
@require_read_token
def test_mistral3_integration_generate(self):
processor = AutoProcessor.from_pretrained(self.model_checkpoint)
model = Mistral3ForConditionalGeneration.from_pretrained(
@ -368,6 +371,7 @@ class Mistral3IntegrationTest(unittest.TestCase):
expected_output = "The image depicts two cats lying on a pink blanket. The larger cat, which appears to be an"
self.assertEqual(decoded_output, expected_output)
@require_read_token
def test_mistral3_integration_batched_generate(self):
processor = AutoProcessor.from_pretrained(self.model_checkpoint)
model = Mistral3ForConditionalGeneration.from_pretrained(
@ -418,6 +422,7 @@ class Mistral3IntegrationTest(unittest.TestCase):
f"Decoded output: {decoded_output}\nExpected output: {expected_output}",
)
@require_read_token
@require_bitsandbytes
def test_mistral3_integration_batched_generate_multi_image(self):
processor = AutoProcessor.from_pretrained(self.model_checkpoint)

View File

@ -20,7 +20,7 @@ import unittest
import requests
from transformers import PixtralProcessor
from transformers.testing_utils import require_read_token, require_vision
from transformers.testing_utils import require_vision
from transformers.utils import is_torch_available, is_vision_available
from ...test_processing_common import ProcessorTesterMixin
@ -35,7 +35,6 @@ if is_vision_available():
@require_vision
@require_read_token
class Mistral3ProcessorTest(ProcessorTesterMixin, unittest.TestCase):
"""This tests Pixtral processor with the new `spatial_merge_size` argument in Mistral3."""
@ -52,7 +51,9 @@ class Mistral3ProcessorTest(ProcessorTesterMixin, unittest.TestCase):
def setUp(self):
self.tmpdirname = tempfile.mkdtemp()
processor = self.processor_class.from_pretrained("mistralai/Mistral-Small-3.1-24B-Instruct-2503")
processor = PixtralProcessor.from_pretrained(
"hf-internal-testing/Mistral-Small-3.1-24B-Instruct-2503-only-processor"
)
processor.save_pretrained(self.tmpdirname)
def get_processor(self):