Skip to content

Commit

Permalink
[Bugfix] fix server startup for embedding models/in-process frontend
Browse files Browse the repository at this point in the history
  • Loading branch information
dtrifiro committed Sep 19, 2024
1 parent 6ffa3f3 commit 721fa52
Showing 1 changed file with 6 additions and 4 deletions.
10 changes: 6 additions & 4 deletions vllm/entrypoints/openai/api_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -532,8 +532,11 @@ async def run_server(args, **uvicorn_kwargs) -> None:
logger.info("vLLM API server version %s", VLLM_VERSION)
logger.info("args: %s", args)

temp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
temp_socket.bind(("", args.port))
# workaround to make sure that we bind the port before the engine is set up.
# This avoids race conditions with ray.
# see https://github.com/vllm-project/vllm/issues/8204
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(("", args.port))

def signal_handler(*_) -> None:
# Interrupt server on sigterm while initializing
Expand All @@ -551,8 +554,6 @@ def signal_handler(*_) -> None:
model_config = await async_engine_client.get_model_config()
init_app_state(async_engine_client, model_config, app.state, args)

temp_socket.close()

shutdown_task = await serve_http(
app,
limit_concurrency=async_engine_client.limit_concurrency,
Expand All @@ -564,6 +565,7 @@ def signal_handler(*_) -> None:
ssl_certfile=args.ssl_certfile,
ssl_ca_certs=args.ssl_ca_certs,
ssl_cert_reqs=args.ssl_cert_reqs,
fd=sock.fileno(),
**uvicorn_kwargs,
)

Expand Down

0 comments on commit 721fa52

Please sign in to comment.