Image: Make the LLM Variations prompt configurable

This commit is contained in:
oobabooga 2025-12-04 10:44:35 -08:00
parent 5763947c37
commit ffef3c7b1d
5 changed files with 35 additions and 6 deletions

View file

@ -36,6 +36,7 @@ def generations(request):
'image_batch_count': request.batch_count,
'image_cfg_scale': request.cfg_scale,
'image_llm_variations': request.llm_variations,
'image_llm_variations_prompt': request.llm_variations_prompt or shared.settings.get('image_llm_variations_prompt', ''),
})
# Exhaust generator, keep final result

View file

@ -276,6 +276,7 @@ class ImageGenerationRequestParams(BaseModel):
n: int = Field(default=1, ge=1, description="Alias for batch_size (OpenAI compatibility)")
batch_count: int = Field(default=1, ge=1, description="Sequential batch count")
llm_variations: bool = False
llm_variations_prompt: str | None = None
# OpenAI compatibility (unused)
model: str | None = None