enhance require_deterministic_for_xpu (#37437)

* enhance require_deterministic_for_xpu

Signed-off-by: YAO Matrix <matrix.yao@intel.com>

* fix style

Signed-off-by: YAO Matrix <matrix.yao@intel.com>

* fix style

Signed-off-by: YAO Matrix <matrix.yao@intel.com>

---------

Signed-off-by: YAO Matrix <matrix.yao@intel.com>
This commit is contained in:
Yao Matrix 2025-04-11 14:06:08 +08:00 committed by GitHub
parent 371c44d0ef
commit c7064cdba1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 16 additions and 7 deletions

View File

@ -139,7 +139,6 @@ from .utils import (
is_torch_available,
is_torch_bf16_available_on_device,
is_torch_bf16_gpu_available,
is_torch_deterministic,
is_torch_fp16_available_on_device,
is_torch_greater_or_equal,
is_torch_hpu_available,
@ -1073,12 +1072,19 @@ def require_torch_bf16_gpu(test_case):
def require_deterministic_for_xpu(test_case):
if is_torch_xpu_available():
return unittest.skipUnless(is_torch_deterministic(), "test requires torch to use deterministic algorithms")(
test_case
)
else:
return test_case
@wraps(test_case)
def wrapper(*args, **kwargs):
if is_torch_xpu_available():
original_state = torch.are_deterministic_algorithms_enabled()
try:
torch.use_deterministic_algorithms(True)
return test_case(*args, **kwargs)
finally:
torch.use_deterministic_algorithms(original_state)
else:
return test_case(*args, **kwargs)
return wrapper
def require_torch_tf32(test_case):

View File

@ -936,6 +936,7 @@ class BertGenerationEncoderDecoderModelTest(EncoderDecoderMixin, unittest.TestCa
}
@slow
@require_deterministic_for_xpu
def test_roberta2roberta_summarization(self):
model = EncoderDecoderModel.from_pretrained("google/roberta2roberta_L-24_bbc")
model.to(torch_device)
@ -1080,6 +1081,7 @@ class GPT2EncoderDecoderModelTest(EncoderDecoderMixin, unittest.TestCase):
pass
@slow
@require_deterministic_for_xpu
def test_bert2gpt2_summarization(self):
model = EncoderDecoderModel.from_pretrained("patrickvonplaten/bert2gpt2-cnn_dailymail-fp16")

View File

@ -634,6 +634,7 @@ class Speech2TextBertModelTest(EncoderDecoderMixin, unittest.TestCase):
def test_encoder_decoder_model_from_pretrained_configs(self):
pass
@require_deterministic_for_xpu
@unittest.skip(reason="Cannot save full model as Speech2TextModel != Speech2TextEncoder")
def test_save_and_load_from_pretrained(self):
pass