Fix prompt corruption when continuing with context truncation (#7439)

This commit is contained in:
Phrosty1 2026-03-22 20:48:56 -04:00 committed by GitHub
parent 1dda5e4711
commit bde496ea5d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -434,6 +434,8 @@ def generate_chat_prompt(user_input, state, **kwargs):
messages.append({"role": "user", "content": "fake user message replace me"})
def make_prompt(messages):
if _continue:
messages = copy.deepcopy(messages)
last_message = messages[-1].copy()
if _continue:
if state['mode'] == 'chat-instruct':