Fix getting the llama.cpp logprobs for Qwen3-30B-A3B

This commit is contained in:
oobabooga 2025-04-30 06:48:32 -07:00
parent 7f49e3c3ce
commit 771d3d8ed6

View file

@ -45,6 +45,9 @@ def _get_next_logits(prompt, state, use_samplers, previous, top_logits=25, retur
output = {}
for entry in logprobs:
token = repr(entry['token'])
if len(token) > 2 and token.startswith("'") and token.endswith("'"):
token = token[1:-1]
prob = entry['prob'] if use_samplers else np.exp(entry['logprob'])
output[token] = prob
return output
@ -52,6 +55,9 @@ def _get_next_logits(prompt, state, use_samplers, previous, top_logits=25, retur
output = ''
for entry in logprobs:
token = repr(entry['token'])
if len(token) > 2 and token.startswith("'") and token.endswith("'"):
token = token[1:-1]
prob = entry['prob'] if use_samplers else np.exp(entry['logprob'])
output += f"{prob:.5f} - {token}\n"
return output, previous