mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2026-01-05 16:20:04 +01:00
Fix llama.cpp double decoding
This commit is contained in:
parent
230b562d53
commit
d9b0f2c9c3
|
|
@ -116,7 +116,7 @@ class LlamaCppModel:
|
|||
# Handle truncation
|
||||
prompt = self.encode(prompt)
|
||||
prompt = prompt[-get_max_prompt_length(state):]
|
||||
prompt = self.decode(prompt).decode('utf-8')
|
||||
prompt = self.decode(prompt)
|
||||
|
||||
logit_processors = LogitsProcessorList()
|
||||
if state['ban_eos_token']:
|
||||
|
|
|
|||
Loading…
Reference in a new issue