This commit is contained in:
ydshieh 2025-07-02 22:03:48 +02:00
parent ff9034ffda
commit 829e5a4713
6 changed files with 10 additions and 55 deletions

View File

@ -722,9 +722,6 @@ class GroundingDinoModelIntegrationTests(unittest.TestCase):
)
expected_slice_boxes = torch.tensor(expectations.get_expectation()).to(torch_device)
expected_scores = torch.tensor([0.4524, 0.4074]).to(torch_device)
expected_slice_boxes = torch.tensor([344.8210, 23.1831, 637.3943, 373.8227]).to(torch_device)
self.assertEqual(len(results["scores"]), 2)
torch.testing.assert_close(results["scores"], expected_scores, rtol=1e-3, atol=1e-3)
torch.testing.assert_close(results["boxes"][0, :], expected_slice_boxes, rtol=1e-2, atol=1e-2)

View File

@ -467,12 +467,7 @@ class Mask2FormerModelIntegrationTest(unittest.TestCase):
}
)
expected_slice_hidden_state = torch.tensor(expectations.get_expectation()).to(torch_device)
torch.testing.assert_close(
outputs.pixel_decoder_last_hidden_state[0, 0, :3, :3],
expected_slice_hidden_state,
atol=TOLERANCE,
rtol=TOLERANCE,
)
torch.testing.assert_close(outputs.pixel_decoder_last_hidden_state[0, 0, :3, :3], expected_slice_hidden_state, atol=TOLERANCE,rtol=TOLERANCE) # fmt: skip
expectations = Expectations(
{
@ -489,12 +484,7 @@ class Mask2FormerModelIntegrationTest(unittest.TestCase):
}
)
expected_slice_hidden_state = torch.tensor(expectations.get_expectation()).to(torch_device)
torch.testing.assert_close(
outputs.transformer_decoder_last_hidden_state[0, :3, :3],
expected_slice_hidden_state,
atol=TOLERANCE,
rtol=TOLERANCE,
)
torch.testing.assert_close(outputs.transformer_decoder_last_hidden_state[0, :3, :3], expected_slice_hidden_state, atol=TOLERANCE, rtol=TOLERANCE) # fmt: skip
def test_inference_universal_segmentation_head(self):
model = Mask2FormerForUniversalSegmentation.from_pretrained(self.model_checkpoints).to(torch_device).eval()

View File

@ -520,12 +520,7 @@ class MaskFormerModelIntegrationTest(unittest.TestCase):
[-0.0069, 0.3385, -0.0089],
]
).to(torch_device)
torch.allclose(
outputs.encoder_last_hidden_state[0, 0, :3, :3],
expected_slice_hidden_state,
atol=TOLERANCE,
rtol=TOLERANCE,
)
torch.allclose(outputs.encoder_last_hidden_state[0, 0, :3, :3], expected_slice_hidden_state, atol=TOLERANCE, rtol=TOLERANCE) # fmt: skip
expectations = Expectations(
{
@ -538,12 +533,7 @@ class MaskFormerModelIntegrationTest(unittest.TestCase):
}
)
expected_slice_hidden_state = torch.tensor(expectations.get_expectation()).to(torch_device)
torch.allclose(
outputs.pixel_decoder_last_hidden_state[0, 0, :3, :3],
expected_slice_hidden_state,
atol=TOLERANCE,
rtol=TOLERANCE,
)
torch.allclose(outputs.pixel_decoder_last_hidden_state[0, 0, :3, :3], expected_slice_hidden_state, atol=TOLERANCE,rtol=TOLERANCE) # fmt: skip
expectations = Expectations(
{
@ -560,12 +550,7 @@ class MaskFormerModelIntegrationTest(unittest.TestCase):
}
)
expected_slice_hidden_state = torch.tensor(expectations.get_expectation()).to(torch_device)
torch.allclose(
outputs.transformer_decoder_last_hidden_state[0, :3, :3],
expected_slice_hidden_state,
atol=TOLERANCE,
rtol=TOLERANCE,
)
torch.allclose(outputs.transformer_decoder_last_hidden_state[0, :3, :3], expected_slice_hidden_state, atol=TOLERANCE, rtol=TOLERANCE) # fmt: skip
def test_inference_instance_segmentation_head(self):
model = (
@ -656,11 +641,7 @@ class MaskFormerModelIntegrationTest(unittest.TestCase):
expectations = Expectations(
{
(None, None): [[-0.9046, -2.6366, -4.6062], [-3.4179, -5.7890, -8.8057], [-4.9179, -7.6560, -10.7711]],
("cuda", 8): [
[-0.9000, -2.6283, -4.5964],
[-3.4123, -5.7789, -8.7919],
[-4.9132, -7.6444, -10.7557],
],
("cuda", 8): [[-0.9000, -2.6283, -4.5964], [-3.4123, -5.7789, -8.7919], [-4.9132, -7.6444, -10.7557]],
}
)
expected_slice = torch.tensor(expectations.get_expectation()).to(torch_device)

View File

@ -338,21 +338,9 @@ class MobileNetV2ModelIntegrationTest(unittest.TestCase):
[[4.2058, 4.8317, 4.7638], [4.4136, 5.0361, 4.9383], [4.5028, 4.9644, 4.8734]],
],
("cuda", 8): [
[
[17.5809, 17.7571, 18.3341],
[18.3240, 18.4216, 18.8974],
[18.6174, 18.8662, 19.2177],
],
[
[-2.1562, -2.0942, -2.3703],
[-2.4199, -2.2999, -2.6818],
[-2.7800, -2.5944, -2.7678],
],
[
[4.2092, 4.8356, 4.7694],
[4.4181, 5.0401, 4.9409],
[4.5089, 4.9700, 4.8802],
],
[[17.5809, 17.7571, 18.3341], [18.3240, 18.4216, 18.8974], [18.6174, 18.8662, 19.2177]],
[[-2.1562, -2.0942, -2.3703], [-2.4199, -2.2999, -2.6818], [-2.7800, -2.5944, -2.7678]],
[[4.2092, 4.8356, 4.7694], [4.4181, 5.0401, 4.9409], [4.5089, 4.9700, 4.8802]],
],
}
)

View File

@ -767,7 +767,6 @@ class RTDetrModelIntegrationTest(unittest.TestCase):
results = image_processor.post_process_object_detection(
outputs, threshold=0.0, target_sizes=[image.size[::-1]]
)[0]
expected_scores = torch.tensor([0.9704, 0.9599, 0.9576, 0.9507], device=torch_device)
expectations = Expectations(
{

View File

@ -791,6 +791,6 @@ class RTDetrV2ModelIntegrationTest(unittest.TestCase):
)
expected_slice_boxes = torch.tensor(expectations.get_expectation()).to(torch_device)
self.assertTrue(torch.allclose(results["scores"][:4], expected_scores, atol=1e-3, rtol=2e-4))
torch.testing.assert_close(results["scores"][:4], expected_scores, atol=1e-3, rtol=2e-4)
self.assertSequenceEqual(results["labels"][:4].tolist(), expected_labels)
torch.testing.assert_close(results["boxes"][:4], expected_slice_boxes, atol=1e-3, rtol=2e-4)