This commit is contained in:
remi-or 2025-06-27 04:55:08 -05:00
parent 00a2987711
commit ff3613dbbd

View File

@ -1787,7 +1787,7 @@ class Blip2ModelIntegrationTest(unittest.TestCase):
# Test output
expected_ids = [2, 102, 693, 2828, 15, 5, 4105, 19, 10, 2335, 50118]
self.assertEqual(predictions[0].tolist(), [50265] * 32 + expected_ids) # 50265 is the img token id
self.assertEqual(predictions[0].tolist(), [50265] * 32 + expected_ids) # 50265 is the img token id
self.assertEqual("a woman sitting on the beach with a dog", generated_text)
# image and context
@ -1799,7 +1799,7 @@ class Blip2ModelIntegrationTest(unittest.TestCase):
# Test output
expected_ids = [2, 45641, 35, 61, 343, 16, 42, 116, 31652, 35, 24, 18, 45, 10, 343, 6, 24, 18, 10, 4105, 50118]
self.assertEqual(predictions[0].tolist(), [50265] * 32 + expected_ids) # 50265 is the img token id
self.assertEqual(predictions[0].tolist(), [50265] * 32 + expected_ids) # 50265 is the img token id
self.assertEqual(generated_text, "Question: which city is this? Answer: it's not a city, it's a beach")
@require_torch_multi_accelerator
@ -1825,16 +1825,15 @@ class Blip2ModelIntegrationTest(unittest.TestCase):
generated_text = processor.batch_decode(predictions, skip_special_tokens=True)[0].strip()
# Test output
expected_ids_and_text = Expectations({
("cuda", None): (
[0, 2335, 1556, 28, 1782, 30, 8, 2608, 1],
"woman playing with dog on the beach"
),
("rocm", (9, 5)): (
[0, 3, 9, 2335, 19, 1556, 28, 160, 1782, 30, 8, 2608, 1],
"a woman is playing with her dog on the beach"
)
}).get_expectation()
expected_ids_and_text = Expectations(
{
("cuda", None): ([0, 2335, 1556, 28, 1782, 30, 8, 2608, 1], "woman playing with dog on the beach"),
("rocm", (9, 5)): (
[0, 3, 9, 2335, 19, 1556, 28, 160, 1782, 30, 8, 2608, 1],
"a woman is playing with her dog on the beach",
),
}
).get_expectation()
self.assertEqual(predictions[0].tolist(), expected_ids_and_text[0])
self.assertEqual(generated_text, expected_ids_and_text[1])
@ -1846,16 +1845,15 @@ class Blip2ModelIntegrationTest(unittest.TestCase):
generated_text = processor.batch_decode(predictions, skip_special_tokens=True)[0].strip()
# Test output
expected_ids_and_text = Expectations({
("cuda", None): (
[0, 3, 7, 152, 67, 839, 1],
"san diego"
),
("rocm", (9, 5)): (
[0, 3, 7, 152, 2515, 11389, 3523, 1],
"san francisco" # TODO: check if this is ok
)
}).get_expectation()
expected_ids_and_text = Expectations(
{
("cuda", None): ([0, 3, 7, 152, 67, 839, 1], "san diego"),
("rocm", (9, 5)): (
[0, 3, 7, 152, 2515, 11389, 3523, 1],
"san francisco", # TODO: check if this is ok
),
}
).get_expectation()
self.assertEqual(predictions[0].tolist(), expected_ids_and_text[0])
self.assertEqual(generated_text, expected_ids_and_text[1])