From 42dfcdfc5b50333c40a6adda0f4c8672508212cb Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Thu, 2 Apr 2026 20:46:27 -0700 Subject: [PATCH] API: Add warning about vanilla llama-server not supporting prompt logprobs + instructions --- modules/llama_cpp_server.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/modules/llama_cpp_server.py b/modules/llama_cpp_server.py index 34080466..2d873f00 100644 --- a/modules/llama_cpp_server.py +++ b/modules/llama_cpp_server.py @@ -333,6 +333,12 @@ class LlamaServer: prompt_probs = result.get("prompt_probabilities", []) if not prompt_probs: + logger.warning( + "The llama.cpp server did not return prompt probabilities. " + "This feature requires a custom build with prompt_logprobs support. " + "See: https://github.com/oobabooga/llama.cpp/tree/prompt-logprobs " + "or https://github.com/oobabooga/ik_llama.cpp/tree/prompt-logprobs" + ) return [] # Null first token (no conditioning context); use empty string for BOS