mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2026-03-18 19:34:39 +01:00
49 lines
2 KiB
Plaintext
49 lines
2 KiB
Plaintext
accelerate==1.12.*
|
|
audioop-lts<1.0; python_version >= "3.13"
|
|
bitsandbytes==0.49.*
|
|
datasets
|
|
diffusers==0.36.*
|
|
einops
|
|
fastapi==0.112.4
|
|
flash-linear-attention==0.4.*
|
|
html2text==2025.4.15
|
|
huggingface-hub==1.5.*
|
|
jinja2==3.1.6
|
|
markdown
|
|
numpy==2.2.*
|
|
pandas
|
|
peft==0.18.*
|
|
Pillow>=9.5.0
|
|
pydantic==2.11.0
|
|
pymupdf==1.27.1
|
|
python-docx==1.1.2
|
|
pyyaml
|
|
requests
|
|
rich
|
|
safetensors==0.7.*
|
|
scipy
|
|
sentencepiece
|
|
tensorboard
|
|
torchao==0.15.*
|
|
transformers==5.3.*
|
|
triton-windows==3.5.1.post24; platform_system == "Windows"
|
|
tqdm
|
|
wandb
|
|
|
|
# Gradio
|
|
https://github.com/oobabooga/gradio/releases/download/4.37.2-custom.9/gradio-4.37.2+custom.9-py3-none-any.whl
|
|
https://github.com/oobabooga/gradio/releases/download/4.37.2-custom.9/gradio_client-1.0.2+custom.9-py3-none-any.whl
|
|
|
|
# API
|
|
flask_cloudflared==0.0.15
|
|
sse-starlette==1.6.5
|
|
tiktoken
|
|
|
|
# CUDA wheels
|
|
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.87.0/llama_cpp_binaries-0.87.0+cu124-py3-none-win_amd64.whl; platform_system == "Windows"
|
|
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.87.0/llama_cpp_binaries-0.87.0+cu124-py3-none-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"
|
|
https://github.com/turboderp-org/exllamav3/releases/download/v0.0.23/exllamav3-0.0.23+cu128.torch2.9.0-cp313-cp313-win_amd64.whl; platform_system == "Windows" and python_version == "3.13"
|
|
https://github.com/turboderp-org/exllamav3/releases/download/v0.0.23/exllamav3-0.0.23+cu128.torch2.9.0-cp313-cp313-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.13"
|
|
https://github.com/kingbri1/flash-attention/releases/download/v2.8.3/flash_attn-2.8.3+cu128torch2.9.0cxx11abiFALSE-cp313-cp313-win_amd64.whl; platform_system == "Windows" and python_version == "3.13"
|
|
https://github.com/kingbri1/flash-attention/releases/download/v2.8.3/flash_attn-2.8.3+cu128torch2.9.0cxx11abiFALSE-cp313-cp313-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.13"
|