From 3ef428efaa0e447d8d553e9387990b890aac5c6b Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Thu, 4 Dec 2025 17:34:17 -0800 Subject: [PATCH] Image: Remove llm_variations from the API --- extensions/openai/images.py | 3 +-- extensions/openai/typing.py | 8 +------- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/extensions/openai/images.py b/extensions/openai/images.py index e60470c3..f46d549d 100644 --- a/extensions/openai/images.py +++ b/extensions/openai/images.py @@ -35,8 +35,7 @@ def generations(request): 'image_batch_size': request.batch_size, 'image_batch_count': request.batch_count, 'image_cfg_scale': request.cfg_scale, - 'image_llm_variations': request.llm_variations, - 'image_llm_variations_prompt': request.llm_variations_prompt or shared.settings.get('image_llm_variations_prompt', ''), + 'image_llm_variations': False, }) # Exhaust generator, keep final result diff --git a/extensions/openai/typing.py b/extensions/openai/typing.py index 31a5dc6d..5ac9f6ef 100644 --- a/extensions/openai/typing.py +++ b/extensions/openai/typing.py @@ -264,7 +264,7 @@ class LoadLorasRequest(BaseModel): lora_names: List[str] -class ImageGenerationRequestParams(BaseModel): +class ImageGenerationRequest(BaseModel): """Image-specific parameters for generation.""" prompt: str negative_prompt: str = "" @@ -275,8 +275,6 @@ class ImageGenerationRequestParams(BaseModel): batch_size: int | None = Field(default=None, ge=1, description="Parallel batch size (VRAM heavy)") n: int = Field(default=1, ge=1, description="Alias for batch_size (OpenAI compatibility)") batch_count: int = Field(default=1, ge=1, description="Sequential batch count") - llm_variations: bool = False - llm_variations_prompt: str | None = None # OpenAI compatibility (unused) model: str | None = None @@ -297,10 +295,6 @@ class ImageGenerationRequestParams(BaseModel): return 1024, 1024 -class ImageGenerationRequest(GenerationOptions, ImageGenerationRequestParams): - pass - - class ImageGenerationResponse(BaseModel): created: int = int(time.time()) data: List[dict]