Skip to content

Commit

Permalink
uncomment flash error captureg
Browse files Browse the repository at this point in the history
  • Loading branch information
fxmarty committed Jun 24, 2024
1 parent 20bda42 commit 2d2325c
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions backends/python/server/text_embeddings_server/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@
torch.set_grad_enabled(False)

FLASH_ATTENTION = True
# try:
from text_embeddings_server.models.flash_bert import FlashBert
# except ImportError as e:
# logger.warning(f"Could not import Flash Attention enabled models: {e}")
# FLASH_ATTENTION = False
try:
from text_embeddings_server.models.flash_bert import FlashBert
except ImportError as e:
logger.warning(f"Could not import Flash Attention enabled models: {e}")
FLASH_ATTENTION = False

if FLASH_ATTENTION:
__all__.append(FlashBert)
Expand Down

0 comments on commit 2d2325c

Please sign in to comment.