Skip to content

Commit

Permalink
fix: add missing await statements for async exit_stack handling (#1858)
Browse files Browse the repository at this point in the history
  • Loading branch information
gjpower authored Dec 9, 2024
1 parent ea4d86a commit afedfc8
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions llama_cpp/server/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,7 @@ async def create_completion(
Iterator[llama_cpp.CreateCompletionStreamResponse],
] = await run_in_threadpool(llama, **kwargs)
except Exception as err:
exit_stack.close()
await exit_stack.aclose()
raise err

if isinstance(iterator_or_completion, Iterator):
Expand Down Expand Up @@ -475,7 +475,7 @@ async def create_chat_completion(
# is complete.
# https://github.com/tiangolo/fastapi/issues/11143
exit_stack = contextlib.AsyncExitStack()
llama_proxy = exit_stack.enter_async_context(contextlib.asynccontextmanager(get_llama_proxy)())
llama_proxy = await exit_stack.enter_async_context(contextlib.asynccontextmanager(get_llama_proxy)())
if llama_proxy is None:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
Expand Down Expand Up @@ -513,7 +513,7 @@ async def create_chat_completion(
llama_cpp.ChatCompletion, Iterator[llama_cpp.ChatCompletionChunk]
] = await run_in_threadpool(llama.create_chat_completion, **kwargs)
except Exception as err:
exit_stack.close()
await exit_stack.aclose()
raise err

if isinstance(iterator_or_completion, Iterator):
Expand Down

0 comments on commit afedfc8

Please sign in to comment.