2023-12-04 18:36:01 -05:00
|
|
|
accelerate==0.25.*
|
2023-09-24 08:58:29 -04:00
|
|
|
colorama
|
|
|
|
datasets
|
|
|
|
einops
|
2023-12-04 19:15:05 -05:00
|
|
|
exllamav2==0.0.10
|
2023-10-23 00:20:41 -04:00
|
|
|
gradio==3.50.*
|
2023-09-24 08:58:29 -04:00
|
|
|
markdown
|
2023-10-15 11:05:06 -04:00
|
|
|
numpy==1.24.*
|
2023-12-12 17:05:58 -05:00
|
|
|
optimum==1.15.*
|
2023-09-24 08:58:29 -04:00
|
|
|
pandas
|
2023-12-12 00:31:30 -05:00
|
|
|
peft==0.7.*
|
2023-09-24 08:58:29 -04:00
|
|
|
Pillow>=9.5.0
|
|
|
|
pyyaml
|
|
|
|
requests
|
2023-12-03 20:50:10 -05:00
|
|
|
safetensors==0.4.1
|
2023-09-24 08:58:29 -04:00
|
|
|
scipy
|
|
|
|
sentencepiece
|
|
|
|
tensorboard
|
2023-12-12 00:30:25 -05:00
|
|
|
transformers==4.36.*
|
2023-09-24 08:58:29 -04:00
|
|
|
tqdm
|
|
|
|
wandb
|
|
|
|
|
2023-10-05 09:01:36 -04:00
|
|
|
git+https://github.com/oobabooga/torch-grammar.git
|
2023-09-28 18:16:28 -04:00
|
|
|
|
2023-09-24 08:58:29 -04:00
|
|
|
# bitsandbytes
|
2023-09-24 11:11:59 -04:00
|
|
|
bitsandbytes==0.38.1; platform_system != "Windows"
|
|
|
|
https://github.com/jllllll/bitsandbytes-windows-webui/releases/download/wheels/bitsandbytes-0.38.1-py3-none-win_amd64.whl; platform_system == "Windows"
|
2023-09-24 08:58:29 -04:00
|
|
|
|
2023-11-17 08:14:25 -05:00
|
|
|
# llama-cpp-python (CPU only, no AVX2)
|
2023-12-14 12:39:43 -05:00
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp39-cp39-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.9"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp38-cp38-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.8"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp39-cp39-win_amd64.whl; platform_system == "Windows" and python_version == "3.9"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp38-cp38-win_amd64.whl; platform_system == "Windows" and python_version == "3.8"
|
2023-11-17 08:14:25 -05:00
|
|
|
|
2023-09-24 08:58:29 -04:00
|
|
|
# AMD wheels
|
2023-11-15 18:23:22 -05:00
|
|
|
https://github.com/jllllll/AutoGPTQ/releases/download/v0.5.1/auto_gptq-0.5.1+rocm5.6-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
|
|
|
https://github.com/jllllll/AutoGPTQ/releases/download/v0.5.1/auto_gptq-0.5.1+rocm5.6-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
|
|
|
https://github.com/jllllll/AutoGPTQ/releases/download/v0.5.1/auto_gptq-0.5.1+rocm5.6-cp39-cp39-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.9"
|
|
|
|
https://github.com/jllllll/AutoGPTQ/releases/download/v0.5.1/auto_gptq-0.5.1+rocm5.6-cp38-cp38-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.8"
|
2023-10-20 20:13:33 -04:00
|
|
|
https://github.com/jllllll/exllama/releases/download/0.0.18/exllama-0.0.18+rocm5.6-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
|
|
|
https://github.com/jllllll/exllama/releases/download/0.0.18/exllama-0.0.18+rocm5.6-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
|
|
|
https://github.com/jllllll/exllama/releases/download/0.0.18/exllama-0.0.18+rocm5.6-cp39-cp39-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.9"
|
|
|
|
https://github.com/jllllll/exllama/releases/download/0.0.18/exllama-0.0.18+rocm5.6-cp38-cp38-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.8"
|
|
|
|
https://github.com/jllllll/GPTQ-for-LLaMa-CUDA/releases/download/0.1.1/gptq_for_llama-0.1.1+rocm5.6-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
|
|
|
https://github.com/jllllll/GPTQ-for-LLaMa-CUDA/releases/download/0.1.1/gptq_for_llama-0.1.1+rocm5.6-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
|
|
|
https://github.com/jllllll/GPTQ-for-LLaMa-CUDA/releases/download/0.1.1/gptq_for_llama-0.1.1+rocm5.6-cp39-cp39-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.9"
|
|
|
|
https://github.com/jllllll/GPTQ-for-LLaMa-CUDA/releases/download/0.1.1/gptq_for_llama-0.1.1+rocm5.6-cp38-cp38-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.8"
|