From 338ae36f734a3653aad54f3e01e1e8da8298735a Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Tue, 28 Oct 2025 12:43:16 -0700 Subject: [PATCH] Add weights_only=True to torch.load in Training_PRO --- extensions/Training_PRO/script.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extensions/Training_PRO/script.py b/extensions/Training_PRO/script.py index cb11a8df..e2f90f17 100644 --- a/extensions/Training_PRO/script.py +++ b/extensions/Training_PRO/script.py @@ -823,7 +823,7 @@ def do_train(lora_name: str, always_override: bool, save_steps: int, micro_batch lora_model = get_peft_model(shared.model, config) if not always_override and Path(f"{lora_file_path}/adapter_model.bin").is_file(): logger.info("Loading existing LoRA data...") - state_dict_peft = torch.load(f"{lora_file_path}/adapter_model.bin") + state_dict_peft = torch.load(f"{lora_file_path}/adapter_model.bin", weights_only=True) set_peft_model_state_dict(lora_model, state_dict_peft) print(f" + Continue Training on {RED}{lora_file_path}/adapter_model.bin{RESET}")