diff --git a/requirements/portable/requirements_vulkan.txt b/requirements/portable/requirements_vulkan.txt new file mode 100644 index 00000000..15834f89 --- /dev/null +++ b/requirements/portable/requirements_vulkan.txt @@ -0,0 +1,19 @@ +fastapi==0.112.4 +gradio==4.37.* +jinja2==3.1.6 +markdown +numpy==1.26.* +pydantic==2.8.2 +pyyaml +requests +rich +tqdm + +# API +flask_cloudflared==0.0.14 +sse-starlette==1.6.5 +tiktoken + +# CUDA wheels +https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.3.0/llama_cpp_binaries-0.3.0+vulkan-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11" +https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.3.0/llama_cpp_binaries-0.3.0+vulkan-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11" diff --git a/requirements/portable/requirements_vulkan_noavx2.txt b/requirements/portable/requirements_vulkan_noavx2.txt new file mode 100644 index 00000000..afb9e90f --- /dev/null +++ b/requirements/portable/requirements_vulkan_noavx2.txt @@ -0,0 +1,19 @@ +fastapi==0.112.4 +gradio==4.37.* +jinja2==3.1.6 +markdown +numpy==1.26.* +pydantic==2.8.2 +pyyaml +requests +rich +tqdm + +# API +flask_cloudflared==0.0.14 +sse-starlette==1.6.5 +tiktoken + +# CUDA wheels +https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.3.0/llama_cpp_binaries-0.3.0+vulkanavx-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11" +https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.3.0/llama_cpp_binaries-0.3.0+vulkanavx-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"