llama.cpp: Add --extra-flags parameter for passing additional flags to llama-server

This commit is contained in:
oobabooga 2025-04-25 07:32:51 -07:00
parent b6fffbd216
commit 98f4c694b9
5 changed files with 18 additions and 0 deletions

View file

@ -12,6 +12,7 @@ loaders_and_params = OrderedDict({
'n_ctx',
'cache_type',
'tensor_split',
'extra_flags',
'rope_freq_base',
'compress_pos_emb',
'flash_attn',