Do not expose alpha_value to llama.cpp & rope_freq_base to transformers

To avoid confusion
This commit is contained in:
oobabooga 2024-06-23 22:09:24 -07:00
parent b48ab482f8
commit 536f8d58d4
5 changed files with 6 additions and 27 deletions

View file

@ -22,7 +22,6 @@ loaders_and_params = OrderedDict({
'no_use_fast',
'use_flash_attention_2',
'alpha_value',
'rope_freq_base',
'compress_pos_emb',
'disable_exllama',
'disable_exllamav2',
@ -38,7 +37,6 @@ loaders_and_params = OrderedDict({
'no_mmap',
'mlock',
'no_mul_mat_q',
'alpha_value',
'rope_freq_base',
'compress_pos_emb',
'cpu',
@ -60,7 +58,6 @@ loaders_and_params = OrderedDict({
'no_mmap',
'mlock',
'no_mul_mat_q',
'alpha_value',
'rope_freq_base',
'compress_pos_emb',
'cpu',