mtmd: Fix /chat/completions for llama.cpp

This commit is contained in:
oobabooga 2025-08-11 08:22:17 -07:00
parent 38c0b4a1ad
commit b62c8845f3
3 changed files with 32 additions and 15 deletions

View file

@ -870,18 +870,19 @@ def chatbot_wrapper(text, state, regenerate=False, _continue=False, loading_mess
row_idx = len(output['internal']) - 1
# Collect image attachments for multimodal generation
image_attachments = []
# Collect image attachments for multimodal generation from the entire history
all_image_attachments = []
if 'metadata' in output:
user_key = f"user_{row_idx}"
if user_key in output['metadata'] and "attachments" in output['metadata'][user_key]:
for attachment in output['metadata'][user_key]["attachments"]:
if attachment.get("type") == "image":
image_attachments.append(attachment)
for i in range(len(output['internal'])):
user_key = f"user_{i}"
if user_key in output['metadata'] and "attachments" in output['metadata'][user_key]:
for attachment in output['metadata'][user_key]["attachments"]:
if attachment.get("type") == "image":
all_image_attachments.append(attachment)
# Add image attachments to state for the generation
if image_attachments:
state['image_attachments'] = image_attachments
# Add all collected image attachments to state for the generation
if all_image_attachments:
state['image_attachments'] = all_image_attachments
# Generate the prompt
kwargs = {