add back warning

This commit is contained in:
fxmarty 2024-05-17 16:51:31 +00:00
parent c6565e8259
commit f82ae76dff

View File

@ -84,6 +84,7 @@ try:
HAS_FLASH_ATTN_V2_ROCM, HAS_FLASH_ATTN_V2_ROCM,
) )
except ImportError as e: except ImportError as e:
logger.warning(f"Could not import Flash Attention enabled models: {e}")
FLASH_ATTENTION = False FLASH_ATTENTION = False
HAS_FLASH_ATTN_V2_CUDA = False HAS_FLASH_ATTN_V2_CUDA = False
HAS_FLASH_ATTN_V2_ROCM = False HAS_FLASH_ATTN_V2_ROCM = False