Fix the transformers loader

This commit is contained in:
oobabooga 2025-04-21 18:33:14 -07:00
parent 8320190184
commit 78aeabca89

View file

@ -197,13 +197,9 @@ def update_model_parameters(state, initial=False):
if initial and element in shared.provided_arguments:
continue
if element in ['cpu_memory'] and value == 0:
if element == 'cpu_memory' and value == 0:
value = vars(shared.args_defaults)[element]
# Making some simple conversions
if element == 'cpu_memory' and value is not None:
value = f"{value}MiB"
setattr(shared.args, element, value)