mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2026-04-04 14:17:28 +00:00
Add Ascend NPU support (basic) (#5541)
This commit is contained in:
parent
a90509d82e
commit
fd4e46bce2
5 changed files with 35 additions and 7 deletions
|
|
@ -10,7 +10,11 @@ from pathlib import Path
|
|||
import torch
|
||||
import transformers
|
||||
from accelerate import infer_auto_device_map, init_empty_weights
|
||||
from accelerate.utils import is_ccl_available, is_xpu_available
|
||||
from accelerate.utils import (
|
||||
is_ccl_available,
|
||||
is_npu_available,
|
||||
is_xpu_available
|
||||
)
|
||||
from transformers import (
|
||||
AutoConfig,
|
||||
AutoModel,
|
||||
|
|
@ -45,6 +49,9 @@ if shared.args.deepspeed:
|
|||
if is_xpu_available() and is_ccl_available():
|
||||
torch.xpu.set_device(local_rank)
|
||||
deepspeed.init_distributed(backend="ccl")
|
||||
elif is_npu_available():
|
||||
torch.npu.set_device(local_rank)
|
||||
deepspeed.init_distributed(dist_backend="hccl")
|
||||
else:
|
||||
torch.cuda.set_device(local_rank)
|
||||
deepspeed.init_distributed()
|
||||
|
|
@ -164,6 +171,9 @@ def huggingface_loader(model_name):
|
|||
elif is_xpu_available():
|
||||
device = torch.device("xpu")
|
||||
model = model.to(device)
|
||||
elif is_npu_available():
|
||||
device = torch.device("npu")
|
||||
model = model.to(device)
|
||||
else:
|
||||
model = model.cuda()
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue