Skip to content

Commit 8895b90

Browse files
committed
Revert "llama_cpp server: prompt is a string". Closes #187
This reverts commit b9098b0.
1 parent 684d7c8 commit 8895b90

File tree

1 file changed

+6
-2
lines changed

1 file changed

+6
-2
lines changed

llama_cpp/server/app.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -167,8 +167,9 @@ def get_llama():
167167
)
168168

169169
class CreateCompletionRequest(BaseModel):
170-
prompt: Optional[str] = Field(
171-
default="", description="The prompt to generate completions for."
170+
prompt: Union[str, List[str]] = Field(
171+
default="",
172+
description="The prompt to generate completions for."
172173
)
173174
suffix: Optional[str] = Field(
174175
default=None,
@@ -222,6 +223,9 @@ class Config:
222223
def create_completion(
223224
request: CreateCompletionRequest, llama: llama_cpp.Llama = Depends(get_llama)
224225
):
226+
if isinstance(request.prompt, list):
227+
request.prompt = "".join(request.prompt)
228+
225229
completion_or_chunks = llama(
226230
**request.dict(
227231
exclude={

0 commit comments

Comments
 (0)