mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-24 14:58:56 +06:00
fix name error when accelerate is not available (#26278)
* fix name error when accelerate is not available * fix `is_fsdp_available`
This commit is contained in:
parent
382ba670ed
commit
86ffd5ffa2
@ -607,7 +607,7 @@ def is_accelerate_available(min_version: str = None):
|
|||||||
|
|
||||||
|
|
||||||
def is_fsdp_available(min_version: str = "1.12.0"):
|
def is_fsdp_available(min_version: str = "1.12.0"):
|
||||||
return version.parse(_torch_version) >= version.parse(min_version)
|
return is_torch_available() and version.parse(_torch_version) >= version.parse(min_version)
|
||||||
|
|
||||||
|
|
||||||
def is_optimum_available():
|
def is_optimum_available():
|
||||||
|
@ -79,6 +79,7 @@ if is_torch_available():
|
|||||||
# hack to restore original logging level pre #21700
|
# hack to restore original logging level pre #21700
|
||||||
get_regression_trainer = partial(tests.trainer.test_trainer.get_regression_trainer, log_level="info")
|
get_regression_trainer = partial(tests.trainer.test_trainer.get_regression_trainer, log_level="info")
|
||||||
|
|
||||||
|
require_fsdp_version = require_fsdp
|
||||||
if is_accelerate_available():
|
if is_accelerate_available():
|
||||||
from accelerate.utils.constants import (
|
from accelerate.utils.constants import (
|
||||||
FSDP_PYTORCH_VERSION,
|
FSDP_PYTORCH_VERSION,
|
||||||
|
Loading…
Reference in New Issue
Block a user