From b69f435311e47949c6341e298f4b8db396674926 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Wed, 9 Jul 2025 19:56:50 -0700 Subject: [PATCH] Fix latest transformers being super slow --- modules/transformers_loader.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/transformers_loader.py b/modules/transformers_loader.py index ef524b57..e4163b6d 100644 --- a/modules/transformers_loader.py +++ b/modules/transformers_loader.py @@ -131,6 +131,8 @@ def load_tokenizer(model_name, tokenizer_dir=None): def load_model_HF(model_name): + torch._dynamo.config.disable = True + path_to_model = Path(f'{shared.args.model_dir}/{model_name}') params = { 'low_cpu_mem_usage': True,