2023-09-22 00:35:53 +02:00
import argparse
import glob
2023-10-07 05:23:49 +02:00
import hashlib
2025-01-27 18:07:39 +01:00
import json
2023-09-22 00:35:53 +02:00
import os
2023-09-24 14:58:29 +02:00
import platform
2023-09-26 15:56:57 +02:00
import re
2023-12-05 06:16:16 +01:00
import signal
2023-09-22 05:12:16 +02:00
import site
2023-09-23 15:48:09 +02:00
import subprocess
2023-09-22 00:35:53 +02:00
import sys
2023-09-22 17:02:21 +02:00
# Remove the '# ' from the following lines as needed for your AMD GPU on Linux
# os.environ["ROCM_PATH"] = '/opt/rocm'
# os.environ["HSA_OVERRIDE_GFX_VERSION"] = '10.3.0'
# os.environ["HCC_AMDGPU_TARGET"] = 'gfx1030'
2025-04-09 05:07:08 +02:00
# Define the required versions
2025-08-17 22:22:55 +02:00
TORCH_VERSION = " 2.7.0 "
2025-04-09 05:07:08 +02:00
PYTHON_VERSION = " 3.11 "
2025-04-09 19:25:47 +02:00
LIBSTDCXX_VERSION_LINUX = " 12.1.0 "
2024-03-03 23:40:32 +01:00
# Environment
script_dir = os . getcwd ( )
conda_env_path = os . path . join ( script_dir , " installer_files " , " env " )
2025-04-12 06:12:32 +02:00
state_file = ' .installer_state.json '
2024-03-03 23:40:32 +01:00
2023-09-22 00:35:53 +02:00
# Command-line flags
2025-04-24 17:19:46 +02:00
flags = f " { ' ' . join ( [ flag for flag in sys . argv [ 1 : ] if flag != ' --update-wizard ' ] ) } "
2023-09-22 00:35:53 +02:00
2023-09-28 22:56:15 +02:00
2023-12-05 06:16:16 +01:00
def signal_handler ( sig , frame ) :
sys . exit ( 0 )
signal . signal ( signal . SIGINT , signal_handler )
2023-09-22 17:02:21 +02:00
def is_linux ( ) :
return sys . platform . startswith ( " linux " )
def is_windows ( ) :
return sys . platform . startswith ( " win " )
def is_macos ( ) :
return sys . platform . startswith ( " darwin " )
2023-09-24 14:58:29 +02:00
def is_x86_64 ( ) :
return platform . machine ( ) == " x86_64 "
2025-04-21 03:57:26 +02:00
def is_installed ( ) :
site_packages_path = None
for sitedir in site . getsitepackages ( ) :
if " site-packages " in sitedir and conda_env_path in sitedir :
site_packages_path = sitedir
break
if site_packages_path :
return os . path . isfile ( os . path . join ( site_packages_path , ' torch ' , ' __init__.py ' ) )
else :
return os . path . isdir ( conda_env_path )
2024-04-30 14:11:31 +02:00
def cpu_has_avx2 ( ) :
try :
import cpuinfo
info = cpuinfo . get_cpu_info ( )
2025-06-02 14:57:55 +02:00
return ' avx2 ' in info [ ' flags ' ]
2024-04-30 14:11:31 +02:00
except :
return True
def cpu_has_amx ( ) :
try :
import cpuinfo
info = cpuinfo . get_cpu_info ( )
2025-06-02 14:57:55 +02:00
return ' amx ' in info [ ' flags ' ]
2024-04-30 14:11:31 +02:00
except :
return True
2025-06-02 14:57:55 +02:00
def load_state ( ) :
""" Load installer state from JSON file """
if os . path . exists ( state_file ) :
try :
with open ( state_file , ' r ' ) as f :
return json . load ( f )
except :
return { }
return { }
2023-09-25 03:16:59 +02:00
2025-06-02 14:57:55 +02:00
def save_state ( state ) :
""" Save installer state to JSON file """
with open ( state_file , ' w ' ) as f :
json . dump ( state , f )
def get_gpu_choice ( ) :
""" Get GPU choice from state file or ask user """
state = load_state ( )
gpu_choice = state . get ( ' gpu_choice ' )
if not gpu_choice :
if " GPU_CHOICE " in os . environ :
choice = os . environ [ " GPU_CHOICE " ] . upper ( )
print_big_message ( f " Selected GPU choice \" { choice } \" based on the GPU_CHOICE environment variable. " )
else :
choice = get_user_choice (
" What is your GPU? " ,
{
2025-08-17 22:22:55 +02:00
' A ' : ' NVIDIA ' ,
2025-06-02 14:57:55 +02:00
' B ' : ' AMD - Linux/macOS only, requires ROCm 6.2.4 ' ,
' C ' : ' Apple M Series ' ,
' D ' : ' Intel Arc (beta) ' ,
' N ' : ' CPU mode '
} ,
)
# Convert choice to GPU name
2025-08-17 22:22:55 +02:00
gpu_choice = { " A " : " NVIDIA_CUDA128 " , " B " : " AMD " , " C " : " APPLE " , " D " : " INTEL " , " N " : " NONE " } [ choice ]
2025-06-02 14:57:55 +02:00
# Save choice to state
state [ ' gpu_choice ' ] = gpu_choice
save_state ( state )
return gpu_choice
def get_pytorch_install_command ( gpu_choice ) :
""" Get PyTorch installation command based on GPU choice """
2025-06-08 02:44:15 +02:00
base_cmd = f " python -m pip install torch== { TORCH_VERSION } "
2025-06-02 14:57:55 +02:00
2025-08-17 23:17:20 +02:00
if gpu_choice == " NVIDIA_CUDA128 " :
2025-06-08 02:44:15 +02:00
return " python -m pip install torch==2.7.1 --index-url https://download.pytorch.org/whl/cu128 "
2025-06-02 14:57:55 +02:00
elif gpu_choice == " AMD " :
return base_cmd + " --index-url https://download.pytorch.org/whl/rocm6.2.4 "
elif gpu_choice in [ " APPLE " , " NONE " ] :
return base_cmd + " --index-url https://download.pytorch.org/whl/cpu "
elif gpu_choice == " INTEL " :
if is_linux ( ) :
2025-06-08 02:44:15 +02:00
return " python -m pip install torch==2.1.0a0 intel-extension-for-pytorch==2.1.10+xpu --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ "
2025-06-02 14:57:55 +02:00
else :
2025-06-08 02:44:15 +02:00
return " python -m pip install torch==2.1.0a0 intel-extension-for-pytorch==2.1.10 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ "
2025-06-02 14:57:55 +02:00
else :
return base_cmd
def get_pytorch_update_command ( gpu_choice ) :
""" Get PyTorch update command based on GPU choice """
2025-06-08 02:44:15 +02:00
base_cmd = f " python -m pip install --upgrade torch== { TORCH_VERSION } "
2025-06-02 14:57:55 +02:00
2025-08-17 23:17:20 +02:00
if gpu_choice == " NVIDIA_CUDA128 " :
2025-06-08 02:44:15 +02:00
return " python -m pip install --upgrade torch==2.7.1 --index-url https://download.pytorch.org/whl/cu128 "
2025-06-02 14:57:55 +02:00
elif gpu_choice == " AMD " :
return f " { base_cmd } --index-url https://download.pytorch.org/whl/rocm6.2.4 "
elif gpu_choice in [ " APPLE " , " NONE " ] :
return f " { base_cmd } --index-url https://download.pytorch.org/whl/cpu "
elif gpu_choice == " INTEL " :
intel_extension = " intel-extension-for-pytorch==2.1.10+xpu " if is_linux ( ) else " intel-extension-for-pytorch==2.1.10 "
return f " { base_cmd } { intel_extension } --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ "
2023-09-25 03:16:59 +02:00
else :
2025-06-02 14:57:55 +02:00
return base_cmd
2024-01-05 03:50:23 +01:00
2025-06-02 14:57:55 +02:00
def get_requirements_file ( gpu_choice ) :
""" Get requirements file path based on GPU choice """
requirements_base = os . path . join ( " requirements " , " full " )
if gpu_choice == " AMD " :
file_name = f " requirements_amd { ' _noavx2 ' if not cpu_has_avx2 ( ) else ' ' } .txt "
elif gpu_choice == " APPLE " :
file_name = f " requirements_apple_ { ' intel ' if is_x86_64 ( ) else ' silicon ' } .txt "
elif gpu_choice in [ " INTEL " , " NONE " ] :
file_name = f " requirements_cpu_only { ' _noavx2 ' if not cpu_has_avx2 ( ) else ' ' } .txt "
2025-06-08 02:44:15 +02:00
elif gpu_choice == " NVIDIA_CUDA128 " :
file_name = f " requirements_cuda128 { ' _noavx2 ' if not cpu_has_avx2 ( ) else ' ' } .txt "
2025-06-02 14:57:55 +02:00
else :
raise ValueError ( f " Unknown GPU choice: { gpu_choice } " )
return os . path . join ( requirements_base , file_name )
2023-09-24 14:58:29 +02:00
2025-04-21 03:57:26 +02:00
def get_current_commit ( ) :
result = run_cmd ( " git rev-parse HEAD " , capture_output = True , environment = True )
return result . stdout . decode ( ' utf-8 ' ) . strip ( )
2024-03-03 23:40:32 +01:00
2023-09-22 17:02:21 +02:00
2025-04-21 03:57:26 +02:00
def get_extensions_names ( ) :
return [ foldername for foldername in os . listdir ( ' extensions ' ) if os . path . isfile ( os . path . join ( ' extensions ' , foldername , ' requirements.txt ' ) ) ]
2023-09-22 17:02:21 +02:00
def check_env ( ) :
# If we have access to conda, we are probably in an environment
conda_exist = run_cmd ( " conda " , environment = True , capture_output = True ) . returncode == 0
if not conda_exist :
print ( " Conda is not installed. Exiting... " )
2023-09-28 22:56:15 +02:00
sys . exit ( 1 )
2023-09-22 17:02:21 +02:00
# Ensure this is a new environment and not the base environment
2025-05-08 19:34:09 +02:00
if os . environ . get ( " CONDA_DEFAULT_ENV " , " " ) == " base " :
2023-09-22 17:02:21 +02:00
print ( " Create an environment for this project and activate it. Exiting... " )
2023-09-28 22:56:15 +02:00
sys . exit ( 1 )
2023-09-22 17:02:21 +02:00
def clear_cache ( ) :
run_cmd ( " conda clean -a -y " , environment = True )
run_cmd ( " python -m pip cache purge " , environment = True )
2023-09-22 00:35:53 +02:00
def run_cmd ( cmd , assert_success = False , environment = False , capture_output = False , env = None ) :
# Use the conda environment
if environment :
2023-09-22 17:02:21 +02:00
if is_windows ( ) :
2023-09-22 00:35:53 +02:00
conda_bat_path = os . path . join ( script_dir , " installer_files " , " conda " , " condabin " , " conda.bat " )
2024-01-27 21:31:22 +01:00
cmd = f ' " { conda_bat_path } " activate " { conda_env_path } " >nul && { cmd } '
2023-09-22 00:35:53 +02:00
else :
conda_sh_path = os . path . join ( script_dir , " installer_files " , " conda " , " etc " , " profile.d " , " conda.sh " )
2024-01-27 21:31:22 +01:00
cmd = f ' . " { conda_sh_path } " && conda activate " { conda_env_path } " && { cmd } '
2023-09-22 00:35:53 +02:00
2024-10-03 05:35:13 +02:00
# Set executable to None for Windows, bash for everything else
executable = None if is_windows ( ) else ' bash '
2024-09-29 05:55:26 +02:00
2023-09-22 00:35:53 +02:00
# Run shell commands
2024-09-29 05:55:26 +02:00
result = subprocess . run ( cmd , shell = True , capture_output = capture_output , env = env , executable = executable )
2023-09-22 00:35:53 +02:00
# Assert the command ran successfully
if assert_success and result . returncode != 0 :
2024-01-27 21:31:22 +01:00
print ( f " Command ' { cmd } ' failed with exit status code ' { str ( result . returncode ) } ' . \n \n Exiting now. \n Try running the start/update script again. " )
2023-09-28 22:56:15 +02:00
sys . exit ( 1 )
2023-09-22 00:35:53 +02:00
return result
2025-04-21 03:57:26 +02:00
def print_big_message ( message ) :
message = message . strip ( )
lines = message . split ( ' \n ' )
print ( " \n \n ******************************************************************* " )
for line in lines :
print ( " * " , line )
print ( " ******************************************************************* \n \n " )
def calculate_file_hash ( file_path ) :
p = os . path . join ( script_dir , file_path )
if os . path . isfile ( p ) :
with open ( p , ' rb ' ) as f :
return hashlib . sha256 ( f . read ( ) ) . hexdigest ( )
else :
return ' '
2024-03-06 16:36:23 +01:00
def generate_alphabetic_sequence ( index ) :
result = ' '
while index > = 0 :
index , remainder = divmod ( index , 26 )
result = chr ( ord ( ' A ' ) + remainder ) + result
index - = 1
return result
2024-03-04 19:52:24 +01:00
def get_user_choice ( question , options_dict ) :
print ( )
print ( question )
print ( )
for key , value in options_dict . items ( ) :
print ( f " { key } ) { value } " )
print ( )
choice = input ( " Input> " ) . upper ( )
while choice not in options_dict . keys ( ) :
print ( " Invalid choice. Please try again. " )
choice = input ( " Input> " ) . upper ( )
return choice
2025-04-21 03:57:26 +02:00
def update_pytorch_and_python ( ) :
print_big_message ( " Checking for PyTorch updates. " )
2025-06-02 14:57:55 +02:00
gpu_choice = get_gpu_choice ( )
install_cmd = get_pytorch_update_command ( gpu_choice )
2025-04-21 03:57:26 +02:00
run_cmd ( install_cmd , assert_success = True , environment = True )
def clean_outdated_pytorch_cuda_dependencies ( ) :
2025-06-11 06:08:18 +02:00
patterns = [ " cu121 " , " cu122 " , " torch2.4 " , " torchvision " , " torchaudio " ]
2025-04-21 03:57:26 +02:00
result = run_cmd ( " python -m pip list --format=freeze " , capture_output = True , environment = True )
matching_packages = [ ]
for line in result . stdout . decode ( ' utf-8 ' ) . splitlines ( ) :
if " == " in line :
pkg_name , version = line . split ( ' == ' , 1 )
if any ( pattern in version for pattern in patterns ) :
matching_packages . append ( pkg_name )
if matching_packages :
print ( f " \n Uninstalling: { ' , ' . join ( matching_packages ) } \n " )
run_cmd ( f " python -m pip uninstall -y { ' ' . join ( matching_packages ) } " , assert_success = True , environment = True )
return matching_packages
2023-09-22 21:08:05 +02:00
def install_webui ( ) :
2025-04-12 06:12:32 +02:00
if os . path . isfile ( state_file ) :
os . remove ( state_file )
2025-06-02 14:57:55 +02:00
# Get GPU choice and save it to state
gpu_choice = get_gpu_choice ( )
2024-01-05 03:41:54 +01:00
2024-03-03 23:40:32 +01:00
# Write a flag to CMD_FLAGS.txt for CPU mode
2025-06-02 14:57:55 +02:00
if gpu_choice == " NONE " :
2025-04-26 13:56:54 +02:00
cmd_flags_path = os . path . join ( script_dir , " user_data " , " CMD_FLAGS.txt " )
2024-01-05 03:41:54 +01:00
with open ( cmd_flags_path , ' r+ ' ) as cmd_flags_file :
if " --cpu " not in cmd_flags_file . read ( ) :
2025-04-26 13:56:54 +02:00
print_big_message ( " Adding the --cpu flag to user_data/CMD_FLAGS.txt. " )
2024-03-04 02:42:59 +01:00
cmd_flags_file . write ( " \n --cpu \n " )
2023-09-22 00:35:53 +02:00
2025-01-09 20:58:33 +01:00
# Handle CUDA version display
2025-06-08 02:44:15 +02:00
elif any ( ( is_windows ( ) , is_linux ( ) ) ) and gpu_choice == " NVIDIA_CUDA128 " :
print ( " CUDA: 12.8 " )
2024-03-03 23:40:32 +01:00
# No PyTorch for AMD on Windows (?)
2025-06-02 14:57:55 +02:00
elif is_windows ( ) and gpu_choice == " AMD " :
2024-03-03 23:40:32 +01:00
print ( " PyTorch setup on Windows is not implemented yet. Exiting... " )
sys . exit ( 1 )
2023-09-22 19:51:21 +02:00
# Install Git and then Pytorch
2023-12-15 01:41:59 +01:00
print_big_message ( " Installing PyTorch. " )
2025-06-02 14:57:55 +02:00
install_pytorch = get_pytorch_install_command ( gpu_choice )
2025-04-09 05:07:08 +02:00
run_cmd ( f " conda install -y ninja git && { install_pytorch } && python -m pip install py-cpuinfo==9.0.0 " , assert_success = True , environment = True )
2023-10-13 06:02:44 +02:00
2025-06-02 14:57:55 +02:00
if gpu_choice == " INTEL " :
2024-01-07 14:40:30 +01:00
# Install oneAPI dependencies via conda
print_big_message ( " Installing Intel oneAPI runtime libraries. " )
2025-04-07 02:44:07 +02:00
run_cmd ( " conda install -y -c https://software.repos.intel.com/python/conda/ -c conda-forge dpcpp-cpp-rt=2024.0 mkl-dpcpp=2024.0 " , environment = True )
2024-01-07 14:40:30 +01:00
# Install libuv required by Intel-patched torch
2025-04-07 02:44:07 +02:00
run_cmd ( " conda install -y libuv " , environment = True )
2024-01-07 14:40:30 +01:00
2023-09-22 21:08:05 +02:00
# Install the webui requirements
2024-06-15 15:38:05 +02:00
update_requirements ( initial_installation = True , pull = False )
2023-09-22 00:35:53 +02:00
2024-03-04 21:35:41 +01:00
def update_requirements ( initial_installation = False , pull = True ) :
2023-09-22 04:51:58 +02:00
# Create .git directory if missing
2024-01-27 21:18:50 +01:00
if not os . path . exists ( os . path . join ( script_dir , " .git " ) ) :
2025-01-21 18:58:13 +01:00
run_cmd (
" git init -b main && git remote add origin https://github.com/oobabooga/text-generation-webui && "
" git fetch && git symbolic-ref refs/remotes/origin/HEAD refs/remotes/origin/main && "
" git reset --hard origin/main && git branch --set-upstream-to=origin/main " ,
environment = True ,
assert_success = True
)
2023-09-22 17:02:21 +02:00
2025-08-17 22:22:55 +02:00
# Check for outdated CUDA 12.4 installs and refuse to update
state = load_state ( )
if state . get ( ' gpu_choice ' ) == ' NVIDIA ' :
print_big_message (
" Your current installation uses CUDA 12.4, which has been removed. \n "
" To update to the new default (CUDA 12.8), a clean installation is required. \n \n "
" INSTRUCTIONS: \n "
" 1. Delete the ' installer_files ' folder in your text-generation-webui directory. \n "
" 2. Run the start script again (e.g., start_windows.bat). \n \n "
" This will create a fresh environment with the latest software. "
)
sys . exit ( 0 )
2025-01-27 19:02:21 +01:00
current_commit = get_current_commit ( )
2025-06-02 14:57:55 +02:00
wheels_changed = not os . path . exists ( state_file )
if not wheels_changed :
state = load_state ( )
if ' wheels_changed ' in state or state . get ( ' last_installed_commit ' ) != current_commit :
2025-01-27 19:02:21 +01:00
wheels_changed = True
2025-06-02 14:57:55 +02:00
gpu_choice = get_gpu_choice ( )
requirements_file = get_requirements_file ( gpu_choice )
2025-01-27 18:07:39 +01:00
2024-03-04 21:35:41 +01:00
if pull :
2025-01-21 18:58:13 +01:00
# Read .whl lines before pulling
before_pull_whl_lines = [ ]
2025-01-21 18:31:06 +01:00
if os . path . exists ( requirements_file ) :
with open ( requirements_file , ' r ' ) as f :
before_pull_whl_lines = [ line for line in f if ' .whl ' in line ]
2025-01-21 18:58:13 +01:00
print_big_message ( ' Updating the local copy of the repository with " git pull " ' )
2023-10-07 05:23:49 +02:00
2025-01-21 18:58:13 +01:00
# Hash files before pulling
2024-03-04 21:35:41 +01:00
files_to_check = [
' start_linux.sh ' , ' start_macos.sh ' , ' start_windows.bat ' , ' start_wsl.bat ' ,
2024-03-07 06:13:54 +01:00
' update_wizard_linux.sh ' , ' update_wizard_macos.sh ' , ' update_wizard_windows.bat ' , ' update_wizard_wsl.bat ' ,
2024-03-04 21:35:41 +01:00
' one_click.py '
]
2025-01-21 18:58:13 +01:00
before_hashes = { file : calculate_file_hash ( file ) for file in files_to_check }
2024-03-04 17:00:39 +01:00
2025-01-21 18:58:13 +01:00
# Perform the git pull
2024-07-25 16:34:01 +02:00
run_cmd ( " git pull --autostash " , assert_success = True , environment = True )
2023-10-07 05:23:49 +02:00
2025-01-21 18:58:13 +01:00
# Check hashes after pulling
after_hashes = { file : calculate_file_hash ( file ) for file in files_to_check }
2025-01-21 18:31:06 +01:00
if os . path . exists ( requirements_file ) :
with open ( requirements_file , ' r ' ) as f :
after_pull_whl_lines = [ line for line in f if ' .whl ' in line ]
2025-01-27 19:22:36 +01:00
wheels_changed = wheels_changed or ( before_pull_whl_lines != after_pull_whl_lines )
# Check for changes to installer files
2025-01-21 18:58:13 +01:00
for file in files_to_check :
if before_hashes [ file ] != after_hashes [ file ] :
print_big_message ( f " File ' { file } ' was updated during ' git pull ' . Please run the script again. " )
2025-01-27 18:07:39 +01:00
# Save state before exiting
2025-06-02 14:57:55 +02:00
state = load_state ( )
2025-01-27 19:02:21 +01:00
if wheels_changed :
2025-06-02 14:57:55 +02:00
state [ ' wheels_changed ' ] = True
save_state ( state )
2025-01-29 23:05:39 +01:00
sys . exit ( 1 )
2023-09-22 00:35:53 +02:00
2025-01-27 18:07:39 +01:00
# Save current state
2025-06-02 14:57:55 +02:00
state = load_state ( )
state [ ' last_installed_commit ' ] = current_commit
state . pop ( ' wheels_changed ' , None ) # Remove wheels_changed flag
save_state ( state )
2025-01-27 18:07:39 +01:00
2024-03-06 20:31:06 +01:00
if os . environ . get ( " INSTALL_EXTENSIONS " , " " ) . lower ( ) in ( " yes " , " y " , " true " , " 1 " , " t " , " on " ) :
install_extensions_requirements ( )
2025-04-09 16:20:51 +02:00
if is_linux ( ) :
2025-04-09 19:25:47 +02:00
run_cmd ( f " conda install -y -c conda-forge libstdcxx-ng== { LIBSTDCXX_VERSION_LINUX } " , assert_success = True , environment = True )
2025-04-09 16:20:51 +02:00
2024-03-03 23:40:32 +01:00
# Update PyTorch
if not initial_installation :
2025-04-09 05:07:08 +02:00
update_pytorch_and_python ( )
2025-04-09 05:20:24 +02:00
clean_outdated_pytorch_cuda_dependencies ( )
2024-03-03 23:40:32 +01:00
2025-01-27 03:17:31 +01:00
print_big_message ( f " Installing webui requirements from file: { requirements_file } " )
2025-06-02 14:57:55 +02:00
print ( f " GPU Choice: { gpu_choice } \n " )
2024-07-25 16:34:01 +02:00
# Prepare the requirements file
textgen_requirements = open ( requirements_file ) . read ( ) . splitlines ( )
2025-01-21 17:35:35 +01:00
2025-01-21 18:31:06 +01:00
if not initial_installation and not wheels_changed :
2025-01-21 18:58:13 +01:00
textgen_requirements = [ line for line in textgen_requirements if ' .whl ' not in line ]
2025-01-21 17:35:35 +01:00
2024-07-25 16:34:01 +02:00
with open ( ' temp_requirements.txt ' , ' w ' ) as file :
file . write ( ' \n ' . join ( textgen_requirements ) )
# Workaround for git+ packages not updating properly.
git_requirements = [ req for req in textgen_requirements if req . startswith ( " git+ " ) ]
for req in git_requirements :
url = req . replace ( " git+ " , " " )
package_name = url . split ( " / " ) [ - 1 ] . split ( " @ " ) [ 0 ] . rstrip ( " .git " )
run_cmd ( f " python -m pip uninstall -y { package_name } " , environment = True )
print ( f " Uninstalled { package_name } " )
# Install/update the project requirements
run_cmd ( " python -m pip install -r temp_requirements.txt --upgrade " , assert_success = True , environment = True )
2025-01-21 17:35:35 +01:00
# Clean up
os . remove ( ' temp_requirements.txt ' )
2023-09-22 00:35:53 +02:00
clear_cache ( )
2025-04-21 03:57:26 +02:00
def install_extensions_requirements ( ) :
print_big_message ( " Installing extensions requirements. \n Some of these may fail on Windows. \n Don \' t worry if you see error messages, as they will not affect the main program. " )
extensions = get_extensions_names ( )
for i , extension in enumerate ( extensions ) :
print ( f " \n \n --- [ { i + 1 } / { len ( extensions ) } ]: { extension } \n \n " )
extension_req_path = os . path . join ( " extensions " , extension , " requirements.txt " )
run_cmd ( f " python -m pip install -r { extension_req_path } --upgrade " , assert_success = False , environment = True )
2023-09-22 00:35:53 +02:00
def launch_webui ( ) :
2023-09-26 15:56:57 +02:00
run_cmd ( f " python server.py { flags } " , environment = True )
2023-09-22 00:35:53 +02:00
if __name__ == " __main__ " :
# Verifies we are in a conda environment
check_env ( )
2023-09-23 16:27:27 +02:00
parser = argparse . ArgumentParser ( add_help = False )
2024-03-04 19:52:24 +01:00
parser . add_argument ( ' --update-wizard ' , action = ' store_true ' , help = ' Launch a menu with update options. ' )
2023-09-22 19:03:56 +02:00
args , _ = parser . parse_known_args ( )
2023-09-22 00:35:53 +02:00
2024-03-04 19:52:24 +01:00
if args . update_wizard :
2024-03-06 16:36:23 +01:00
while True :
choice = get_user_choice (
" What would you like to do? " ,
{
' A ' : ' Update the web UI ' ,
' B ' : ' Install/update extensions requirements ' ,
' C ' : ' Revert local changes to repository files with \" git reset --hard \" ' ,
' N ' : ' Nothing (exit) '
} ,
)
if choice == ' A ' :
update_requirements ( )
elif choice == ' B ' :
choices = { ' A ' : ' All extensions ' }
for i , name in enumerate ( get_extensions_names ( ) ) :
key = generate_alphabetic_sequence ( i + 1 )
choices [ key ] = name
choice = get_user_choice ( " What extension? " , choices )
if choice == ' A ' :
install_extensions_requirements ( )
else :
extension_req_path = os . path . join ( " extensions " , choices [ choice ] , " requirements.txt " )
run_cmd ( f " python -m pip install -r { extension_req_path } --upgrade " , assert_success = False , environment = True )
update_requirements ( pull = False )
elif choice == ' C ' :
run_cmd ( " git reset --hard " , assert_success = True , environment = True )
elif choice == ' N ' :
sys . exit ( )
2023-09-22 00:35:53 +02:00
else :
2023-09-22 05:12:16 +02:00
if not is_installed ( ) :
2023-09-22 21:08:05 +02:00
install_webui ( )
2023-09-22 00:35:53 +02:00
os . chdir ( script_dir )
2023-09-23 03:43:11 +02:00
if os . environ . get ( " LAUNCH_AFTER_INSTALL " , " " ) . lower ( ) in ( " no " , " n " , " false " , " 0 " , " f " , " off " ) :
2024-03-04 08:46:39 +01:00
print_big_message ( " Will now exit due to LAUNCH_AFTER_INSTALL. " )
2023-09-23 03:43:11 +02:00
sys . exit ( )
2023-09-22 00:35:53 +02:00
# Check if a model has been downloaded yet
2023-09-26 15:56:57 +02:00
if ' --model-dir ' in flags :
# Splits on ' ' or '=' while maintaining spaces within quotes
2023-09-28 22:56:15 +02:00
flags_list = re . split ( ' +(?=(?:[^ \" ]* \" [^ \" ]* \" )*[^ \" ]*$)|= ' , flags )
2024-01-05 03:41:54 +01:00
model_dir = [ flags_list [ ( flags_list . index ( flag ) + 1 ) ] for flag in flags_list if flag == ' --model-dir ' ] [ 0 ] . strip ( ' " \' ' )
2023-09-26 15:56:57 +02:00
else :
2025-04-26 13:56:54 +02:00
model_dir = ' user_data/models '
2023-09-26 15:56:57 +02:00
if len ( [ item for item in glob . glob ( f ' { model_dir } /* ' ) if not item . endswith ( ( ' .txt ' , ' .yaml ' ) ) ] ) == 0 :
2024-03-04 08:46:39 +01:00
print_big_message ( " You haven ' t downloaded any model yet. \n Once the web UI launches, head over to the \" Model \" tab and download one. " )
2023-09-22 00:35:53 +02:00
# Workaround for llama-cpp-python loading paths in CUDA env vars even if they do not exist
conda_path_bin = os . path . join ( conda_env_path , " bin " )
if not os . path . exists ( conda_path_bin ) :
os . mkdir ( conda_path_bin )
# Launch the webui
launch_webui ( )