Add dedicated ik portable requirements files and remove macOS ik builds

This commit is contained in:
oobabooga 2026-04-02 14:49:31 -03:00
parent ea1f8c71f2
commit c50e17bdbe
6 changed files with 91 additions and 50 deletions

View file

@ -96,10 +96,3 @@ jobs:
with:
version: ${{ inputs.version }}
config: 'os:ubuntu-22.04'
build_release_ik_macos:
name: ik macOS
uses: ./.github/workflows/build-portable-release-ik.yml
with:
version: ${{ inputs.version }}
config: 'os:macos-14'

View file

@ -138,14 +138,13 @@ jobs:
# 3. Prepare requirements file based on CUDA version
cd "text-generation-webui-${VERSION_CLEAN}"
if [[ "$CUDA_VERSION" == "13.1" ]]; then
REQ_FILE="requirements/portable/requirements_cuda131.txt"
REQ_FILE="requirements/portable/requirements_ik_cuda131.txt"
else
REQ_FILE="requirements/portable/requirements.txt"
REQ_FILE="requirements/portable/requirements_ik.txt"
fi
# 4. Swap llama.cpp wheels for ik_llama.cpp and inject --ik into start scripts
sed -i 's|/llama_cpp_binaries-|/ik_llama_cpp_binaries-|g' "$REQ_FILE"
sed -i 's/--portable/--portable --ik/g' start_linux.sh start_windows.bat start_macos.sh 2>/dev/null || true
# 4. Inject --ik into start scripts
sed -i 's/--portable/--portable --ik/g' start_linux.sh start_windows.bat 2>/dev/null || true
# 5. Install packages
echo "Installing Python packages from $REQ_FILE..."

View file

@ -1,4 +1,4 @@
name: Build ik CPU and macOS
name: Build ik CPU
on:
workflow_dispatch:
@ -57,7 +57,7 @@ jobs:
id: set-matrix
run: |
$matrix = @{
'os' = @('ubuntu-22.04', 'windows-2022', 'macos-14')
'os' = @('ubuntu-22.04', 'windows-2022')
'pyver' = @("3.13")
}
@ -110,7 +110,6 @@ jobs:
# Define common variables
VERSION="${{ inputs.version }}"
OS_TYPE="${{ matrix.os }}"
# 1. Set platform-specific variables
if [[ "$RUNNER_OS" == "Windows" ]]; then
@ -119,21 +118,7 @@ jobs:
PIP_PATH="portable_env/python.exe -m pip"
PACKAGES_PATH="portable_env/Lib/site-packages"
rm start_linux.sh start_macos.sh
elif [[ "$RUNNER_OS" == "macOS" ]]; then
if [[ "$OS_TYPE" == "macos-15-intel" ]]; then
PLATFORM="macos-x86_64"
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-x86_64-apple-darwin-install_only_stripped.tar.gz"
REQ_TYPE="apple_intel"
else
PLATFORM="macos-arm64"
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-aarch64-apple-darwin-install_only_stripped.tar.gz"
REQ_TYPE="apple_silicon"
fi
PIP_PATH="portable_env/bin/python -m pip"
PACKAGES_PATH="portable_env/lib/python3.13/site-packages"
rm start_linux.sh start_windows.bat
else
# Linux case
PLATFORM="linux-cpu"
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz"
PIP_PATH="portable_env/bin/python -m pip"
@ -148,30 +133,13 @@ jobs:
tar -xzf python-build.tar.gz
mv python "text-generation-webui-${VERSION_CLEAN}/portable_env"
# 3. Prepare requirements file based on platform
# 3. Prepare requirements file
cd "text-generation-webui-${VERSION_CLEAN}"
# Select requirements file based on platform
if [[ "$RUNNER_OS" == "macOS" ]]; then
if [[ "$OS_TYPE" == "macos-15-intel" ]]; then
REQ_FILE="requirements/portable/requirements_apple_intel.txt"
else
REQ_FILE="requirements/portable/requirements_apple_silicon.txt"
fi
else
REQ_FILE="requirements/portable/requirements_cpu_only.txt"
fi
REQ_FILE="requirements/portable/requirements_ik_cpu_only.txt"
echo "Using requirements file: $REQ_FILE"
# 4. Swap llama.cpp wheels for ik_llama.cpp and inject --ik into start scripts
if [[ "$RUNNER_OS" == "macOS" ]]; then
sed -i '' 's|/llama_cpp_binaries-|/ik_llama_cpp_binaries-|g' "$REQ_FILE"
sed -i '' 's/--portable/--portable --ik/g' start_macos.sh
else
sed -i 's|/llama_cpp_binaries-|/ik_llama_cpp_binaries-|g' "$REQ_FILE"
sed -i 's/--portable/--portable --ik/g' start_linux.sh start_windows.bat 2>/dev/null || true
fi
# 4. Inject --ik into start scripts
sed -i 's/--portable/--portable --ik/g' start_linux.sh start_windows.bat 2>/dev/null || true
# 5. Install packages
echo "Installing Python packages from $REQ_FILE..."

View file

@ -0,0 +1,27 @@
audioop-lts<1.0; python_version >= "3.13"
fastapi==0.112.4
huggingface-hub==1.5.*
jinja2==3.1.6
markdown
numpy==2.2.*
pydantic==2.11.0
pymupdf==1.27.*
python-docx==1.1.2
pyyaml
requests
rich
trafilatura==2.0.0
tqdm
# Gradio
https://github.com/oobabooga/gradio/releases/download/4.37.2-custom.13/gradio-4.37.2+custom.13-py3-none-any.whl
https://github.com/oobabooga/gradio/releases/download/4.37.2-custom.13/gradio_client-1.0.2+custom.13-py3-none-any.whl
# API
flask_cloudflared==0.0.15
sse-starlette==1.6.5
tiktoken
# CUDA wheels
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.101.0/ik_llama_cpp_binaries-0.101.0+cu124-py3-none-win_amd64.whl; platform_system == "Windows"
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.101.0/ik_llama_cpp_binaries-0.101.0+cu124-py3-none-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"

View file

@ -0,0 +1,27 @@
audioop-lts<1.0; python_version >= "3.13"
fastapi==0.112.4
huggingface-hub==1.5.*
jinja2==3.1.6
markdown
numpy==2.2.*
pydantic==2.11.0
pymupdf==1.27.*
python-docx==1.1.2
pyyaml
requests
rich
trafilatura==2.0.0
tqdm
# Gradio
https://github.com/oobabooga/gradio/releases/download/4.37.2-custom.13/gradio-4.37.2+custom.13-py3-none-any.whl
https://github.com/oobabooga/gradio/releases/download/4.37.2-custom.13/gradio_client-1.0.2+custom.13-py3-none-any.whl
# API
flask_cloudflared==0.0.15
sse-starlette==1.6.5
tiktoken
# ik_llama.cpp (CPU only)
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.101.0/ik_llama_cpp_binaries-0.101.0+cpu-py3-none-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.101.0/ik_llama_cpp_binaries-0.101.0+cpu-py3-none-win_amd64.whl; platform_system == "Windows"

View file

@ -0,0 +1,27 @@
audioop-lts<1.0; python_version >= "3.13"
fastapi==0.112.4
huggingface-hub==1.5.*
jinja2==3.1.6
markdown
numpy==2.2.*
pydantic==2.11.0
pymupdf==1.27.*
python-docx==1.1.2
pyyaml
requests
rich
trafilatura==2.0.0
tqdm
# Gradio
https://github.com/oobabooga/gradio/releases/download/4.37.2-custom.13/gradio-4.37.2+custom.13-py3-none-any.whl
https://github.com/oobabooga/gradio/releases/download/4.37.2-custom.13/gradio_client-1.0.2+custom.13-py3-none-any.whl
# API
flask_cloudflared==0.0.15
sse-starlette==1.6.5
tiktoken
# CUDA wheels
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.101.0/ik_llama_cpp_binaries-0.101.0+cu131-py3-none-win_amd64.whl; platform_system == "Windows"
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.101.0/ik_llama_cpp_binaries-0.101.0+cu131-py3-none-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"