Run test_can_load_with_global_device_set using a subprocess (#37553)

* fix

* fix

* fix

* Update tests/test_modeling_common.py

Co-authored-by: Cyril Vallez <cyril.vallez@huggingface.co>

* fix

---------

Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
Co-authored-by: Cyril Vallez <cyril.vallez@huggingface.co>
This commit is contained in:
Yih-Dar 2025-04-16 19:48:30 +02:00 committed by GitHub
parent 9a4ce64770
commit 5a6de703a7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -89,6 +89,7 @@ from transformers.testing_utils import (
require_torch_multi_accelerator,
require_torch_multi_gpu,
require_torch_sdpa,
run_test_using_subprocess,
set_config_for_less_flaky_test,
set_model_for_less_flaky_test,
set_model_tester_for_less_flaky_test,
@ -4550,6 +4551,9 @@ class ModelTesterMixin:
unique_devices, {device}, f"All parameters should be on {device}, but found {unique_devices}."
)
# Here we need to run with a subprocess as otherwise setting back the default device to the default value ("cpu")
# may bring unwanted consequences on other tests. See PR #37553
@run_test_using_subprocess
@require_torch_accelerator
def test_can_load_with_global_device_set(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()