Update AMD ROCm from 6.4 to 7.2

This commit is contained in:
oobabooga 2026-03-11 13:14:26 -07:00
parent 7a63a56043
commit 24977846fb
3 changed files with 10 additions and 8 deletions

View file

@ -91,7 +91,7 @@ def get_gpu_choice():
"What is your GPU?",
{
'A': 'NVIDIA',
'B': 'AMD - Linux/macOS only, requires ROCm 6.4',
'B': 'AMD - Linux only, ROCm 7.2',
'C': 'Apple M Series',
'D': 'Intel Arc (beta)',
'N': 'CPU mode'
@ -115,7 +115,8 @@ def get_pytorch_install_command(gpu_choice):
if gpu_choice == "NVIDIA_CUDA128":
return base_cmd + "--index-url https://download.pytorch.org/whl/cu128"
elif gpu_choice == "AMD":
return base_cmd + "--index-url https://download.pytorch.org/whl/rocm6.4"
py_tag = f"cp{PYTHON_VERSION.replace('.', '')}"
return f"python -m pip install https://repo.radeon.com/rocm/manylinux/rocm-rel-7.2/torch-{TORCH_VERSION}%2Brocm7.2.0.lw.git7e1940d4-{py_tag}-{py_tag}-linux_x86_64.whl"
elif gpu_choice in ["APPLE", "NONE"]:
return base_cmd + "--index-url https://download.pytorch.org/whl/cpu"
elif gpu_choice == "INTEL":
@ -131,7 +132,8 @@ def get_pytorch_update_command(gpu_choice):
if gpu_choice == "NVIDIA_CUDA128":
return f"{base_cmd} --index-url https://download.pytorch.org/whl/cu128"
elif gpu_choice == "AMD":
return f"{base_cmd} --index-url https://download.pytorch.org/whl/rocm6.4"
py_tag = f"cp{PYTHON_VERSION.replace('.', '')}"
return f"python -m pip install --upgrade https://repo.radeon.com/rocm/manylinux/rocm-rel-7.2/torch-{TORCH_VERSION}%2Brocm7.2.0.lw.git7e1940d4-{py_tag}-{py_tag}-linux_x86_64.whl"
elif gpu_choice in ["APPLE", "NONE"]:
return f"{base_cmd} --index-url https://download.pytorch.org/whl/cpu"
elif gpu_choice == "INTEL":
@ -266,7 +268,7 @@ def update_pytorch_and_python():
def clean_outdated_pytorch_cuda_dependencies():
patterns = ["cu121", "cu122", "torch2.4", "torch2.6", "torch2.7", "torchvision", "torchaudio"]
patterns = ["cu121", "cu122", "rocm6", "torch2.4", "torch2.6", "torch2.7", "torchvision", "torchaudio"]
result = run_cmd("python -m pip list --format=freeze", capture_output=True, environment=True)
matching_packages = []

View file

@ -37,5 +37,5 @@ sse-starlette==1.6.5
tiktoken
# AMD wheels
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.91.0/llama_cpp_binaries-0.91.0+rocm6.4-py3-none-win_amd64.whl; platform_system == "Windows"
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.91.0/llama_cpp_binaries-0.91.0+rocm6.4-py3-none-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.91.0/llama_cpp_binaries-0.91.0+rocm7.2-py3-none-win_amd64.whl; platform_system == "Windows"
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.91.0/llama_cpp_binaries-0.91.0+rocm7.2-py3-none-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"

View file

@ -23,5 +23,5 @@ sse-starlette==1.6.5
tiktoken
# AMD wheels
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.91.0/llama_cpp_binaries-0.91.0+rocm6.4-py3-none-win_amd64.whl; platform_system == "Windows"
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.91.0/llama_cpp_binaries-0.91.0+rocm6.4-py3-none-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.91.0/llama_cpp_binaries-0.91.0+rocm7.2-py3-none-win_amd64.whl; platform_system == "Windows"
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.91.0/llama_cpp_binaries-0.91.0+rocm7.2-py3-none-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"