mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2026-01-09 18:21:43 +01:00
Don't pass torch_dtype to transformers loader, let it be autodetected
This commit is contained in:
parent
3039aeffeb
commit
3b28dc1821
|
|
@ -136,7 +136,6 @@ def load_model_HF(model_name):
|
|||
path_to_model = Path(f'{shared.args.model_dir}/{model_name}')
|
||||
params = {
|
||||
'low_cpu_mem_usage': True,
|
||||
'torch_dtype': torch.bfloat16 if shared.args.bf16 else torch.float16,
|
||||
'attn_implementation': shared.args.attn_implementation,
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue