diff --git a/src/transformers/utils/__init__.py b/src/transformers/utils/__init__.py index 1a0d68f700ec..e7911e5d552a 100644 --- a/src/transformers/utils/__init__.py +++ b/src/transformers/utils/__init__.py @@ -117,6 +117,7 @@ is_essentia_available, is_faiss_available, is_flash_attn_2_available, + is_flash_attn_available, is_flax_available, is_fsdp_available, is_ftfy_available, diff --git a/src/transformers/utils/import_utils.py b/src/transformers/utils/import_utils.py index 3c05cac7dbe2..c4862b197c97 100644 --- a/src/transformers/utils/import_utils.py +++ b/src/transformers/utils/import_utils.py @@ -614,6 +614,14 @@ def is_flash_attn_2_available(): return _flash_attn_2_available and torch.cuda.is_available() +def is_flash_attn_available(): + logger.warning( + "Using `is_flash_attn_available` is deprecated and will be removed in v4.38. " + "Please use `is_flash_attn_2_available` instead." + ) + return is_flash_attn_2_available() + + def is_torchdistx_available(): return _torchdistx_available