From b041bf15ae9d5670c8eeae62168f874c083b0753 Mon Sep 17 00:00:00 2001 From: Nicolas Patry Date: Thu, 23 Nov 2023 13:29:03 +0000 Subject: [PATCH] Fix imports. --- server/text_generation_server/models/__init__.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/server/text_generation_server/models/__init__.py b/server/text_generation_server/models/__init__.py index bbcfea96..5b1b5715 100644 --- a/server/text_generation_server/models/__init__.py +++ b/server/text_generation_server/models/__init__.py @@ -45,15 +45,6 @@ __all__ = [ FLASH_ATT_ERROR_MESSAGE = "{} requires Flash Attention enabled models." FLASH_ATTENTION = True -from text_generation_server.models.flash_rw import FlashRWSharded -from text_generation_server.models.flash_neox import FlashNeoXSharded -from text_generation_server.models.flash_llama import ( - FlashLlama, -) -from text_generation_server.models.flash_santacoder import ( - FlashSantacoderSharded, -) -from text_generation_server.models.idefics import IDEFICSSharded try: from text_generation_server.models.flash_rw import FlashRWSharded from text_generation_server.models.flash_neox import FlashNeoXSharded