mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2026-01-20 15:40:23 +01:00
Fix getting the llama.cpp logprobs for Qwen3-30B-A3B
This commit is contained in:
parent
7f49e3c3ce
commit
771d3d8ed6
|
|
@ -45,6 +45,9 @@ def _get_next_logits(prompt, state, use_samplers, previous, top_logits=25, retur
|
|||
output = {}
|
||||
for entry in logprobs:
|
||||
token = repr(entry['token'])
|
||||
if len(token) > 2 and token.startswith("'") and token.endswith("'"):
|
||||
token = token[1:-1]
|
||||
|
||||
prob = entry['prob'] if use_samplers else np.exp(entry['logprob'])
|
||||
output[token] = prob
|
||||
return output
|
||||
|
|
@ -52,6 +55,9 @@ def _get_next_logits(prompt, state, use_samplers, previous, top_logits=25, retur
|
|||
output = ''
|
||||
for entry in logprobs:
|
||||
token = repr(entry['token'])
|
||||
if len(token) > 2 and token.startswith("'") and token.endswith("'"):
|
||||
token = token[1:-1]
|
||||
|
||||
prob = entry['prob'] if use_samplers else np.exp(entry['logprob'])
|
||||
output += f"{prob:.5f} - {token}\n"
|
||||
return output, previous
|
||||
|
|
|
|||
Loading…
Reference in a new issue