Fix broken test decorator require_torch_up_to_2_accelerators (#34201)

* fix broken require_torch_up_to_2_accelerators

* make style
This commit is contained in:
byi8220 2024-10-18 07:54:55 -04:00 committed by GitHub
parent 5a5b590d06
commit 0437d6cd03
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -827,8 +827,9 @@ def require_torch_up_to_2_accelerators(test_case):
if not is_torch_available():
return unittest.skip(reason="test requires PyTorch")(test_case)
return unittest.skipUnless(backend_device_count(torch_device) < 3, "test requires 0 or 1 or 2 accelerators")
(test_case)
return unittest.skipUnless(backend_device_count(torch_device) < 3, "test requires 0 or 1 or 2 accelerators")(
test_case
)
def require_torch_xla(test_case):