mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix FA2 import + deprecation cycle (#27330)
* put back import * switch to logger.warnings instead
This commit is contained in:
parent
2ac5b9325e
commit
721d1c8ca6
@ -117,6 +117,7 @@ from .import_utils import (
|
||||
is_essentia_available,
|
||||
is_faiss_available,
|
||||
is_flash_attn_2_available,
|
||||
is_flash_attn_available,
|
||||
is_flax_available,
|
||||
is_fsdp_available,
|
||||
is_ftfy_available,
|
||||
|
@ -614,6 +614,14 @@ def is_flash_attn_2_available():
|
||||
return _flash_attn_2_available and torch.cuda.is_available()
|
||||
|
||||
|
||||
def is_flash_attn_available():
|
||||
logger.warning(
|
||||
"Using `is_flash_attn_available` is deprecated and will be removed in v4.38. "
|
||||
"Please use `is_flash_attn_2_available` instead."
|
||||
)
|
||||
return is_flash_attn_2_available()
|
||||
|
||||
|
||||
def is_torchdistx_available():
|
||||
return _torchdistx_available
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user