mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-10-01 01:26:03 -04:00
Model downloader: download only fp16 if both fp16 and GGUF are present
This commit is contained in:
parent
98361af4d5
commit
d516815c9c
@ -127,9 +127,22 @@ class ModelDownloader:
|
||||
if classifications[i] in ['pytorch', 'pt']:
|
||||
links.pop(i)
|
||||
|
||||
# For GGUF, try to download only the Q4_K_M if no specific file is specified.
|
||||
# If not present, exclude all GGUFs, as that's like a repository with both
|
||||
# GGUF and fp16 files.
|
||||
if has_gguf and specific_file is None:
|
||||
has_q4km = False
|
||||
for i in range(len(classifications) - 1, -1, -1):
|
||||
if 'q4_k_m' not in links[i].lower():
|
||||
if 'q4_k_m' in links[i].lower():
|
||||
has_q4km = True
|
||||
|
||||
if has_q4km:
|
||||
for i in range(len(classifications) - 1, -1, -1):
|
||||
if 'q4_k_m' in links[i].lower():
|
||||
links.pop(i)
|
||||
else:
|
||||
for i in range(len(classifications) - 1, -1, -1):
|
||||
if links[i].lower().endswith('.gguf'):
|
||||
links.pop(i)
|
||||
|
||||
is_llamacpp = has_gguf and specific_file is not None
|
||||
|
Loading…
Reference in New Issue
Block a user