mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2025-12-06 07:12:10 +01:00
Add support for the Seed-OSS template
This commit is contained in:
parent
8fcb4b3102
commit
6a7166fffa
|
|
@ -109,7 +109,8 @@ def generate_chat_prompt(user_input, state, **kwargs):
|
|||
tools_in_user_message=False,
|
||||
add_generation_prompt=False,
|
||||
enable_thinking=state['enable_thinking'],
|
||||
reasoning_effort=state['reasoning_effort']
|
||||
reasoning_effort=state['reasoning_effort'],
|
||||
thinking_budget=-1 if state.get('enable_thinking', True) else 0
|
||||
)
|
||||
|
||||
chat_renderer = partial(
|
||||
|
|
@ -190,6 +191,30 @@ def generate_chat_prompt(user_input, state, **kwargs):
|
|||
|
||||
messages.insert(insert_pos, msg_dict)
|
||||
|
||||
# Handle Seed-OSS
|
||||
elif '<seed:think>' in assistant_msg:
|
||||
thinking_content = ""
|
||||
final_content = assistant_msg
|
||||
|
||||
# Extract thinking content if present
|
||||
if '<seed:think>' in assistant_msg:
|
||||
parts = assistant_msg.split('<seed:think>', 1)
|
||||
if len(parts) > 1:
|
||||
potential_content = parts[1]
|
||||
if '</seed:think>' in potential_content:
|
||||
thinking_content = potential_content.split('</seed:think>', 1)[0].strip()
|
||||
final_content = parts[0] + potential_content.split('</seed:think>', 1)[1]
|
||||
else:
|
||||
thinking_content = potential_content.strip()
|
||||
final_content = parts[0]
|
||||
|
||||
# Insert as structured message
|
||||
msg_dict = {"role": "assistant", "content": final_content.strip()}
|
||||
if thinking_content:
|
||||
msg_dict["reasoning_content"] = thinking_content
|
||||
|
||||
messages.insert(insert_pos, msg_dict)
|
||||
|
||||
else:
|
||||
# Default case (used by all other models)
|
||||
messages.insert(insert_pos, {"role": "assistant", "content": assistant_msg})
|
||||
|
|
@ -687,6 +712,8 @@ def generate_search_query(user_message, state):
|
|||
query = query.rsplit("</think>", 1)[1]
|
||||
elif "<|start|>assistant<|channel|>final<|message|>" in query:
|
||||
query = query.rsplit("<|start|>assistant<|channel|>final<|message|>", 1)[1]
|
||||
elif "</seed:think>" in query:
|
||||
query = query.rsplit("</seed:think>", 1)[1]
|
||||
|
||||
# Strip and remove surrounding quotes if present
|
||||
query = query.strip()
|
||||
|
|
|
|||
|
|
@ -137,7 +137,7 @@ def extract_thinking_block(string):
|
|||
remaining_content = string[content_start:]
|
||||
return thinking_content, remaining_content
|
||||
|
||||
# If think tags not found, try alternative format
|
||||
# If think tags not found, try GPT-OSS alternative format
|
||||
ALT_START = "<|channel|>analysis<|message|>"
|
||||
ALT_END = "<|end|>"
|
||||
ALT_CONTENT_START = "<|start|>assistant<|channel|>final<|message|>"
|
||||
|
|
@ -168,7 +168,31 @@ def extract_thinking_block(string):
|
|||
remaining_content = string[content_start:]
|
||||
return thinking_content, remaining_content
|
||||
|
||||
# Return if neither format is found
|
||||
# Try seed:think format
|
||||
SEED_START = "<seed:think>"
|
||||
SEED_END = "</seed:think>"
|
||||
|
||||
seed_start_pos = string.find(SEED_START)
|
||||
seed_end_pos = string.find(SEED_END)
|
||||
|
||||
if seed_start_pos != -1 or seed_end_pos != -1:
|
||||
if seed_start_pos == -1:
|
||||
thought_start = 0
|
||||
else:
|
||||
thought_start = seed_start_pos + len(SEED_START)
|
||||
|
||||
if seed_end_pos == -1:
|
||||
thought_end = len(string)
|
||||
content_start = len(string)
|
||||
else:
|
||||
thought_end = seed_end_pos
|
||||
content_start = seed_end_pos + len(SEED_END)
|
||||
|
||||
thinking_content = string[thought_start:thought_end]
|
||||
remaining_content = string[content_start:]
|
||||
return thinking_content, remaining_content
|
||||
|
||||
# Return if no format is found
|
||||
return None, string
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue