Fix top_logprobs_ids missing for llama.cpp loader

This commit is contained in:
oobabooga 2026-04-02 16:13:45 -03:00
parent f6f8f14c8d
commit 091037ec20

View file

@ -299,8 +299,9 @@ def format_completion_logprobs(entries):
t = item.get('token', '')
lp = item.get('logprob', item.get('prob', 0))
top_dict[t] = lp
if 'token_id' in item:
top_dict_ids[item['token_id']] = lp
tid = item.get('token_id', item.get('id'))
if tid is not None:
top_dict_ids[tid] = lp
top_logprobs.append(top_dict)
top_logprobs_ids.append(top_dict_ids if top_dict_ids else None)