mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2026-04-04 14:17:28 +00:00
Do not expose alpha_value to llama.cpp & rope_freq_base to transformers
To avoid confusion
This commit is contained in:
parent
b48ab482f8
commit
536f8d58d4
5 changed files with 6 additions and 27 deletions
|
|
@ -25,7 +25,7 @@ from transformers import (
|
|||
)
|
||||
|
||||
import modules.shared as shared
|
||||
from modules import RoPE, sampler_hijack
|
||||
from modules import sampler_hijack
|
||||
from modules.logging_colors import logger
|
||||
from modules.models_settings import get_model_metadata
|
||||
|
||||
|
|
@ -248,7 +248,7 @@ def huggingface_loader(model_name):
|
|||
if shared.args.compress_pos_emb > 1:
|
||||
params['rope_scaling'] = {'type': 'linear', 'factor': shared.args.compress_pos_emb}
|
||||
elif shared.args.alpha_value > 1:
|
||||
params['rope_scaling'] = {'type': 'dynamic', 'factor': RoPE.get_alpha_value(shared.args.alpha_value, shared.args.rope_freq_base)}
|
||||
params['rope_scaling'] = {'type': 'dynamic', 'factor': shared.args.alpha_value}
|
||||
|
||||
logger.info("TRANSFORMERS_PARAMS=")
|
||||
pprint.PrettyPrinter(indent=4, sort_dicts=False).pprint(params)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue