mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2026-01-06 08:40:10 +01:00
Image: Add the LLM-generated prompt to the API result
This commit is contained in:
parent
b451bac082
commit
c7ad28a4cd
|
|
@ -48,10 +48,16 @@ def generations(request):
|
|||
resp = {'created': int(time.time()), 'data': []}
|
||||
for img in images:
|
||||
b64 = _image_to_base64(img)
|
||||
image_obj = {
|
||||
'revised_prompt': img.info.get('revised_prompt', request.prompt)
|
||||
}
|
||||
|
||||
if request.response_format == 'b64_json':
|
||||
resp['data'].append({'b64_json': b64})
|
||||
image_obj['b64_json'] = b64
|
||||
else:
|
||||
resp['data'].append({'url': f'data:image/png;base64,{b64}'})
|
||||
image_obj['url'] = f'data:image/png;base64,{b64}'
|
||||
|
||||
resp['data'].append(image_obj)
|
||||
|
||||
return resp
|
||||
|
||||
|
|
|
|||
|
|
@ -856,7 +856,13 @@ def generate(state, save_images=True):
|
|||
if magic_suffix.strip(", ") not in clean_prompt:
|
||||
gen_kwargs["prompt"] = clean_prompt + magic_suffix
|
||||
|
||||
result_holder.extend(shared.image_model(**gen_kwargs).images)
|
||||
batch_results = shared.image_model(**gen_kwargs).images
|
||||
|
||||
# Store the modified prompt in the metadata
|
||||
for img in batch_results:
|
||||
img.info["revised_prompt"] = clean_prompt
|
||||
|
||||
result_holder.extend(batch_results)
|
||||
gen_kwargs["prompt"] = clean_prompt # restore
|
||||
except Exception as e:
|
||||
error_holder.append(e)
|
||||
|
|
|
|||
Loading…
Reference in a new issue