mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-30 17:52:35 +06:00
[tests] use torch_device
instead of auto
for model testing (#29531)
* use torch_device * skip for XPU * Update tests/generation/test_utils.py Co-authored-by: amyeroberts <22614925+amyeroberts@users.noreply.github.com> --------- Co-authored-by: amyeroberts <22614925+amyeroberts@users.noreply.github.com>
This commit is contained in:
parent
14536c339a
commit
1ea3ad1aec
@ -1073,6 +1073,9 @@ class GenerationTesterMixin:
|
||||
@require_torch_multi_accelerator
|
||||
def test_model_parallel_beam_search(self):
|
||||
for model_class in self.all_generative_model_classes:
|
||||
if "xpu" in torch_device:
|
||||
return unittest.skip("device_map='auto' does not work with XPU devices")
|
||||
|
||||
if model_class._no_split_modules is None:
|
||||
continue
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user