diff --git a/vllm/entrypoints/openai/api_server.py b/vllm/entrypoints/openai/api_server.py index 9746d9697a66..af132481b1bd 100644 --- a/vllm/entrypoints/openai/api_server.py +++ b/vllm/entrypoints/openai/api_server.py @@ -41,7 +41,8 @@ resolve_mistral_chat_template) from vllm.entrypoints.launcher import serve_http from vllm.entrypoints.logger import RequestLogger -from vllm.entrypoints.openai.cli_args import (make_arg_parser, +from vllm.entrypoints.openai.cli_args import (log_non_default_args, + make_arg_parser, validate_parsed_serve_args) # yapf conflicts with isort for this block # yapf: disable @@ -1040,7 +1041,7 @@ def create_server_socket(addr: tuple[str, int]) -> socket.socket: async def run_server(args, **uvicorn_kwargs) -> None: logger.info("vLLM API server version %s", VLLM_VERSION) - logger.info("args: %s", args) + log_non_default_args(args) if args.tool_parser_plugin and len(args.tool_parser_plugin) > 3: ToolParserManager.import_tool_parser(args.tool_parser_plugin) diff --git a/vllm/entrypoints/openai/cli_args.py b/vllm/entrypoints/openai/cli_args.py index a2639d374791..d8cec2202134 100644 --- a/vllm/entrypoints/openai/cli_args.py +++ b/vllm/entrypoints/openai/cli_args.py @@ -17,8 +17,11 @@ from vllm.entrypoints.openai.serving_models import (LoRAModulePath, PromptAdapterPath) from vllm.entrypoints.openai.tool_parsers import ToolParserManager +from vllm.logger import init_logger from vllm.utils import FlexibleArgumentParser +logger = init_logger(__name__) + class LoRAParserAction(argparse.Action): @@ -285,6 +288,15 @@ def validate_parsed_serve_args(args: argparse.Namespace): "--tool-call-parser") +def log_non_default_args(args: argparse.Namespace): + non_default_args = {} + parser = make_arg_parser(FlexibleArgumentParser()) + for arg, default in vars(parser.parse_args([])).items(): + if default != getattr(args, arg): + non_default_args[arg] = getattr(args, arg) + logger.info("non-default args: %s", non_default_args) + + def create_parser_for_docs() -> FlexibleArgumentParser: parser_for_docs = FlexibleArgumentParser( prog="-m vllm.entrypoints.openai.api_server")