From 6871484398292bb602b89ac202a3389d082d15f1 Mon Sep 17 00:00:00 2001 From: Trenten Miller <62214409+inyourface34456@users.noreply.github.com> Date: Tue, 28 Oct 2025 15:48:04 -0400 Subject: [PATCH] fix: Rename 'evaluation_strategy' to 'eval_strategy' in training --- modules/training.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/training.py b/modules/training.py index 2354c39d..1de0b702 100644 --- a/modules/training.py +++ b/modules/training.py @@ -611,7 +611,7 @@ def do_train(lora_name: str, always_override: bool, q_proj_en: bool, v_proj_en: bf16=shared.args.bf16, optim=optimizer, logging_steps=2 if stop_at_loss > 0 else 5, - evaluation_strategy="steps" if eval_data is not None else "no", + eval_strategy="steps" if eval_data is not None else "no", eval_steps=math.ceil(eval_steps / gradient_accumulation_steps) if eval_data is not None else None, save_strategy="steps" if eval_data is not None else "no", output_dir=lora_file_path, @@ -620,7 +620,7 @@ def do_train(lora_name: str, always_override: bool, q_proj_en: bool, v_proj_en: # TODO: Enable multi-device support ddp_find_unused_parameters=None, no_cuda=shared.args.cpu, - use_ipex=True if is_torch_xpu_available() and not shared.args.cpu else False + # use_ipex=True if is_torch_xpu_available() and not shared.args.cpu else False ), data_collator=transformers.DataCollatorForLanguageModeling(shared.tokenizer, mlm=False), callbacks=list([Callbacks()])