mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-03 21:00:08 +06:00
fix name error when accelerate is not available (#26278)
* fix name error when accelerate is not available * fix `is_fsdp_available`
This commit is contained in:
parent
382ba670ed
commit
86ffd5ffa2
@ -607,7 +607,7 @@ def is_accelerate_available(min_version: str = None):
|
||||
|
||||
|
||||
def is_fsdp_available(min_version: str = "1.12.0"):
|
||||
return version.parse(_torch_version) >= version.parse(min_version)
|
||||
return is_torch_available() and version.parse(_torch_version) >= version.parse(min_version)
|
||||
|
||||
|
||||
def is_optimum_available():
|
||||
|
@ -79,6 +79,7 @@ if is_torch_available():
|
||||
# hack to restore original logging level pre #21700
|
||||
get_regression_trainer = partial(tests.trainer.test_trainer.get_regression_trainer, log_level="info")
|
||||
|
||||
require_fsdp_version = require_fsdp
|
||||
if is_accelerate_available():
|
||||
from accelerate.utils.constants import (
|
||||
FSDP_PYTORCH_VERSION,
|
||||
|
Loading…
Reference in New Issue
Block a user