mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2025-12-06 07:12:10 +01:00
Multiple fixes
This commit is contained in:
parent
6a7166fffa
commit
02ca96fa44
|
|
@ -292,9 +292,22 @@ def generate_chat_prompt(user_input, state, **kwargs):
|
|||
messages.append({"role": "user", "content": "fake user message replace me"})
|
||||
|
||||
def make_prompt(messages):
|
||||
last_message = messages[-1].copy()
|
||||
if _continue:
|
||||
if state['mode'] == 'chat-instruct':
|
||||
messages = messages[:-1]
|
||||
else:
|
||||
messages[-1]["content"] = "fake assistant message replace me"
|
||||
messages.append({"role": "assistant", "content": "this will get deleted"})
|
||||
|
||||
if state['mode'] != 'chat-instruct':
|
||||
add_generation_prompt = (not _continue and not impersonate)
|
||||
else:
|
||||
add_generation_prompt = False
|
||||
|
||||
prompt = renderer(
|
||||
messages=messages[:-1] if _continue else messages,
|
||||
add_generation_prompt=(state['mode'] != 'chat-instruct' and not impersonate)
|
||||
messages=messages,
|
||||
add_generation_prompt=add_generation_prompt
|
||||
)
|
||||
|
||||
if state['mode'] == 'chat-instruct':
|
||||
|
|
@ -308,24 +321,19 @@ def generate_chat_prompt(user_input, state, **kwargs):
|
|||
outer_messages.append({"role": "system", "content": state['custom_system_message']})
|
||||
|
||||
outer_messages.append({"role": "user", "content": command})
|
||||
if _continue:
|
||||
outer_messages.append(last_message.copy())
|
||||
outer_messages[-1]["content"] = "fake assistant message replace me"
|
||||
outer_messages.append({"role": "assistant", "content": "this will get deleted"})
|
||||
|
||||
prompt = instruct_renderer(
|
||||
messages=outer_messages,
|
||||
add_generation_prompt=True
|
||||
add_generation_prompt=not _continue
|
||||
)
|
||||
else:
|
||||
# Handle GPT-OSS as a special case when continuing
|
||||
# (otherwise the thinking block gets removed...)
|
||||
if _continue and '<|channel|>final<|message|>' in state['instruction_template_str']:
|
||||
assistant_reply_so_far = "<|start|>assistant"
|
||||
if 'thinking' in messages[-1]:
|
||||
assistant_reply_so_far += f"<|channel|>analysis<|message|>{messages[-1]['thinking']}<|end|>"
|
||||
|
||||
assistant_reply_so_far += f"<|channel|>final<|message|>"
|
||||
prompt += assistant_reply_so_far
|
||||
|
||||
if _continue:
|
||||
prompt += messages[-1].get('content', '')
|
||||
prompt = prompt.split("fake assistant message replace me", 1)[0]
|
||||
prompt += last_message.get("content", "")
|
||||
|
||||
if impersonate:
|
||||
prompt = prompt.split("fake user message replace me", 1)[0]
|
||||
|
|
@ -453,7 +461,7 @@ def get_stopping_strings(state):
|
|||
renderer = partial(template.render, add_generation_prompt=False)
|
||||
renderers.append(renderer)
|
||||
|
||||
if state['mode'] in ['chat', 'chat-instruct']:
|
||||
if state['mode'] in ['chat']:
|
||||
template = jinja_env.from_string(state['chat_template_str'])
|
||||
renderer = partial(template.render, add_generation_prompt=False, name1=state['name1'], name2=state['name2'])
|
||||
renderers.append(renderer)
|
||||
|
|
|
|||
|
|
@ -203,7 +203,7 @@ settings = {
|
|||
'start_with': '',
|
||||
'mode': 'instruct',
|
||||
'chat_style': 'cai-chat',
|
||||
'chat-instruct_command': 'Continue the chat dialogue below. Write a single reply for the character "<|character|>".\n\n<|prompt|>',
|
||||
'chat-instruct_command': 'Continue the chat dialogue below. Write a single reply for the character "<|character|>". Reply directly, without starting the reply with the character name.\n\n<|prompt|>',
|
||||
'enable_web_search': False,
|
||||
'web_search_pages': 3,
|
||||
'prompt-notebook': '',
|
||||
|
|
@ -287,7 +287,7 @@ settings = {
|
|||
'greeting': 'How can I help you today?',
|
||||
'custom_system_message': '',
|
||||
'instruction_template_str': "{%- set ns = namespace(found=false) -%}\n{%- for message in messages -%}\n {%- if message['role'] == 'system' -%}\n {%- set ns.found = true -%}\n {%- endif -%}\n{%- endfor -%}\n{%- if not ns.found -%}\n {{- '' + 'Below is an instruction that describes a task. Write a response that appropriately completes the request.' + '\\n\\n' -}}\n{%- endif %}\n{%- for message in messages %}\n {%- if message['role'] == 'system' -%}\n {{- '' + message['content'] + '\\n\\n' -}}\n {%- else -%}\n {%- if message['role'] == 'user' -%}\n {{-'### Instruction:\\n' + message['content'] + '\\n\\n'-}}\n {%- else -%}\n {{-'### Response:\\n' + message['content'] + '\\n\\n' -}}\n {%- endif -%}\n {%- endif -%}\n{%- endfor -%}\n{%- if add_generation_prompt -%}\n {{-'### Response:\\n'-}}\n{%- endif -%}",
|
||||
'chat_template_str': "{%- for message in messages %}\n {%- if message['role'] == 'system' -%}\n {%- if message['content'] -%}\n {{- message['content'] + '\\n\\n' -}}\n {%- endif -%}\n {%- if user_bio -%}\n {{- user_bio + '\\n\\n' -}}\n {%- endif -%}\n {%- else -%}\n {%- if message['role'] == 'user' -%}\n {{- name1 + ': ' + message['content'] + '\\n'-}}\n {%- else -%}\n {{- name2 + ': ' + message['content'] + '\\n' -}}\n {%- endif -%}\n {%- endif -%}\n{%- endfor -%}\n{%- if add_generation_prompt %}\n {{- name2 + ': ' -}}\n{%- endif %}",
|
||||
'chat_template_str': "{%- for message in messages %}\n {%- if message['role'] == 'system' -%}\n {%- if message['content'] -%}\n {{- message['content'] + '\\n\\n' -}}\n {%- endif -%}\n {%- if user_bio -%}\n {{- user_bio + '\\n\\n' -}}\n {%- endif -%}\n {%- else -%}\n {%- if message['role'] == 'user' -%}\n {{- name1 + ': ' + message['content'] + '\\n'-}}\n {%- else -%}\n {{- name2 + ': ' + message['content'] + '\\n' -}}\n {%- endif -%}\n {%- endif -%}\n{%- endfor -%}\n{%- if add_generation_prompt %}\n {{- name2 + ':' -}}\n{%- endif %}",
|
||||
|
||||
# Extensions
|
||||
'default_extensions': [],
|
||||
|
|
|
|||
Loading…
Reference in a new issue