mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-10-01 01:26:03 -04:00
Simplify the warning when flash-attn fails to import
This commit is contained in:
parent
b92d7fd43e
commit
c61b29b9ce
@ -19,14 +19,6 @@ from modules.text_generation import get_max_prompt_length
|
||||
|
||||
try:
|
||||
import flash_attn
|
||||
except ModuleNotFoundError:
|
||||
logger.warning(
|
||||
'You are running ExLlamaV2 without flash-attention. This will cause the VRAM usage '
|
||||
'to be a lot higher than it could be.\n'
|
||||
'Try installing flash-attention following the instructions here: '
|
||||
'https://github.com/Dao-AILab/flash-attention#installation-and-features'
|
||||
)
|
||||
pass
|
||||
except Exception:
|
||||
logger.warning('Failed to load flash-attention due to the following error:\n')
|
||||
traceback.print_exc()
|
||||
|
@ -21,14 +21,6 @@ from modules.logging_colors import logger
|
||||
|
||||
try:
|
||||
import flash_attn
|
||||
except ModuleNotFoundError:
|
||||
logger.warning(
|
||||
'You are running ExLlamaV2 without flash-attention. This will cause the VRAM usage '
|
||||
'to be a lot higher than it could be.\n'
|
||||
'Try installing flash-attention following the instructions here: '
|
||||
'https://github.com/Dao-AILab/flash-attention#installation-and-features'
|
||||
)
|
||||
pass
|
||||
except Exception:
|
||||
logger.warning('Failed to load flash-attention due to the following error:\n')
|
||||
traceback.print_exc()
|
||||
|
Loading…
Reference in New Issue
Block a user