Better way to detect if a model has been downloaded

This commit is contained in:
oobabooga 2023-06-01 14:01:19 -03:00
parent 248ef32358
commit 5540335819

View File

@ -232,7 +232,7 @@ if __name__ == "__main__":
os.chdir(script_dir)
# Check if a model has been downloaded yet
if len(glob.glob("text-generation-webui/models/*/")) == 0:
if len([item for item in glob.glob('text-generation-webui/models/*') if not item.endswith(('.txt', '.yaml'))]) == 0:
print_big_message("WARNING: You haven't downloaded any model yet.\nOnce the web UI launches, head over to the bottom of the \"Model\" tab and download one.")
# Workaround for llama-cpp-python loading paths in CUDA env vars even if they do not exist