Skip to content

Commit

Permalink
llama_cpp server: prompt is a string
Browse files Browse the repository at this point in the history
Not sure why this union type was here but taking a look at llama.py, prompt is only ever processed as a string for completion

This was breaking types when generating an openapi client
  • Loading branch information
Stonelinks committed May 2, 2023
1 parent 7ab08b8 commit b9098b0
Showing 1 changed file with 1 addition and 4 deletions.
5 changes: 1 addition & 4 deletions llama_cpp/server/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def get_llama():
)

class CreateCompletionRequest(BaseModel):
prompt: Union[str, List[str]] = Field(
prompt: Optional[str] = Field(
default="",
description="The prompt to generate completions for."
)
Expand Down Expand Up @@ -175,9 +175,6 @@ class Config:
def create_completion(
request: CreateCompletionRequest, llama: llama_cpp.Llama = Depends(get_llama)
):
if isinstance(request.prompt, list):
request.prompt = "".join(request.prompt)

completion_or_chunks = llama(
**request.dict(
exclude={
Expand Down

0 comments on commit b9098b0

Please sign in to comment.