fix: add back typo removed variable

This commit is contained in:
drbh 2024-02-21 11:55:06 -05:00
parent 3e22cdd14c
commit 66f89120b5

View File

@ -43,6 +43,8 @@ __all__ = [
FLASH_ATT_ERROR_MESSAGE = "{} requires Flash Attention enabled models." FLASH_ATT_ERROR_MESSAGE = "{} requires Flash Attention enabled models."
FLASH_ATTENTION = True
# FlashCausalLM reqiures CUDA Graphs to be enabled on the system. This will throw a RuntimeError # FlashCausalLM reqiures CUDA Graphs to be enabled on the system. This will throw a RuntimeError
# if CUDA Graphs are not available when calling `torch.cuda.graph_pool_handle()` in the FlashCausalLM # if CUDA Graphs are not available when calling `torch.cuda.graph_pool_handle()` in the FlashCausalLM
HAS_CUDA_GRAPH = False HAS_CUDA_GRAPH = False