Skip to content

Commit 34ad71f

Browse files
authored
Merge pull request #274 from dmahurin/fix-missing-antiprompt
low_level_api_chat_cpp.py: Fix missing antiprompt output in chat.
2 parents d78453c + 0fa2ec4 commit 34ad71f

File tree

1 file changed

+6
-3
lines changed

1 file changed

+6
-3
lines changed

examples/low_level_api/low_level_api_chat_cpp.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -382,12 +382,15 @@ def generate(self):
382382
# replace end of text token with newline token when in interactive mode
383383
if (id == llama_cpp.llama_token_eos() and self.params.interactive and not self.params.instruct):
384384
id = self.llama_token_newline[0]
385+
self.embd.append(id)
385386
if (self.use_antiprompt()):
386387
# tokenize and inject first reverse prompt
387388
self.embd_inp += self.first_antiprompt[0]
388-
389-
# add it to the context
390-
self.embd.append(id)
389+
for id in self.first_antiprompt[0]:
390+
self.embd.append(id)
391+
else:
392+
# add it to the context
393+
self.embd.append(id)
391394

392395
# echo this to console
393396
self.output_echo = True

0 commit comments

Comments
 (0)