Skip to content

Commit

Permalink
add help messages
Browse files Browse the repository at this point in the history
  • Loading branch information
Bojun-Feng committed Aug 18, 2023
1 parent 34ba817 commit d342e19
Showing 1 changed file with 134 additions and 23 deletions.
157 changes: 134 additions & 23 deletions xinference/deploy/cmdline.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,12 +67,39 @@ def get_endpoint(endpoint: Optional[str]) -> str:
return endpoint


@click.group(invoke_without_command=True, name="xinference")
@click.group(
invoke_without_command=True,
name="xinference",
help="Xinference command-line interface for serving and deploying models.",
)
@click.pass_context
@click.version_option(__version__, "--version", "-v")
@click.option("--log-level", default="INFO", type=str)
@click.option("--host", "-H", default=XINFERENCE_DEFAULT_LOCAL_HOST, type=str)
@click.option("--port", "-p", default=XINFERENCE_DEFAULT_ENDPOINT_PORT, type=int)
@click.version_option(
__version__,
"--version",
"-v",
help="Show the current version of the Xinference tool.",
)
@click.option(
"--log-level",
default="INFO",
type=str,
help="""Set the logger level. Options listed from most log to least log are:
ALL > TRACE > DEBUG > INFO > WARN > ERROR > FATAL > OFF (Default level is INFO)""",
)
@click.option(
"--host",
"-H",
default=XINFERENCE_DEFAULT_LOCAL_HOST,
type=str,
help="Specify the host address for the Xinference server.",
)
@click.option(
"--port",
"-p",
default=XINFERENCE_DEFAULT_ENDPOINT_PORT,
type=int,
help="Specify the port number for the Xinference server.",
)
def cli(
ctx,
log_level: str,
Expand All @@ -97,10 +124,30 @@ def cli(
)


@click.command()
@click.option("--log-level", default="INFO", type=str)
@click.option("--host", "-H", default=XINFERENCE_DEFAULT_DISTRIBUTED_HOST, type=str)
@click.option("--port", "-p", default=XINFERENCE_DEFAULT_ENDPOINT_PORT, type=int)
@click.command(
help="Starts a Xinference supervisor to control and monitor the worker actors."
)
@click.option(
"--log-level",
default="INFO",
type=str,
help="""Set the logger level for the supervisor. Options listed from most log to least log are:
ALL > TRACE > DEBUG > INFO > WARN > ERROR > FATAL > OFF (Default level is INFO)""",
)
@click.option(
"--host",
"-H",
default=XINFERENCE_DEFAULT_DISTRIBUTED_HOST,
type=str,
help="Specify the host address for the supervisor.",
)
@click.option(
"--port",
"-p",
default=XINFERENCE_DEFAULT_ENDPOINT_PORT,
type=int,
help="Specify the port number for the supervisor.",
)
def supervisor(
log_level: str,
host: str,
Expand All @@ -117,14 +164,26 @@ def supervisor(
main(address=address, host=host, port=port, logging_conf=logging_conf)


@click.command()
@click.option("--log-level", default="INFO", type=str)
@click.command(
help="Starts a Xinference worker to execute tasks assigned by the supervisor in a distributed setup."
)
@click.option(
"--endpoint",
"-e",
"--log-level",
default="INFO",
type=str,
help="""Set the logger level for the worker. Options listed from most log to least log are:
ALL > TRACE > DEBUG > INFO > WARN > ERROR > FATAL > OFF (Default level is INFO)""",
)
@click.option(
"--endpoint", "-e", type=str, help="Specify the endpoint URL for the worker."
)
@click.option(
"--host",
"-H",
default=XINFERENCE_DEFAULT_DISTRIBUTED_HOST,
type=str,
help="Specify the host address for the worker.",
)
@click.option("--host", "-H", default=XINFERENCE_DEFAULT_DISTRIBUTED_HOST, type=str)
def worker(log_level: str, endpoint: Optional[str], host: str):
from ..deploy.worker import main

Expand All @@ -146,16 +205,51 @@ def worker(log_level: str, endpoint: Optional[str], host: str):
)


@cli.command("launch")
@cli.command(
"launch",
help="Launch a model with the Xinference framework with the given parameters.",
)
@click.option(
"--endpoint",
"-e",
type=str,
help="Specify the endpoint URL for launching the model.",
)
@click.option(
"--model-name",
"-n",
type=str,
required=True,
help="Provide the name of the model to be launched.",
)
@click.option(
"--size-in-billions",
"-s",
default=None,
type=int,
help="Specify the model size in billions of parameters.",
)
@click.option(
"--model-format",
"-f",
default=None,
type=str,
help="Specify the format of the model, e.g. pytorch, ggmlv3, etc.",
)
@click.option(
"--quantization",
"-q",
default=None,
type=str,
help="Define the quantization settings for the model.",
)
@click.option(
"--model-uid",
"-i",
default=None,
type=str,
help="Provide a unique identifier for the model if required. If not provided, one will be generated.",
)
@click.option("--model-name", "-n", type=str)
@click.option("--size-in-billions", "-s", default=None, type=int)
@click.option("--model-format", "-f", default=None, type=str)
@click.option("--quantization", "-q", default=None, type=str)
def model_launch(
endpoint: Optional[str],
model_name: str,
Expand All @@ -176,13 +270,21 @@ def model_launch(
print(f"Model uid: {model_uid}", file=sys.stderr)


@cli.command("list")
@cli.command(
"list",
help="List either all built-in models or only the currently deployed models in Xinference.",
)
@click.option(
"--endpoint",
"-e",
type=str,
help="Specify the endpoint URL for listing models.",
)
@click.option(
"--all",
is_flag=True,
help="Include this flag to list all built-in models, not just the ones that are currently deployed.",
)
@click.option("--all", is_flag=True)
def model_list(endpoint: Optional[str], all: bool):
from tabulate import tabulate

Expand Down Expand Up @@ -234,13 +336,22 @@ def model_list(endpoint: Optional[str], all: bool):
)


@cli.command("terminate")
@cli.command(
"terminate",
help="Terminate a deployed model through unique identifier (UID) of the model.",
)
@click.option(
"--endpoint",
"-e",
type=str,
help="Specify the endpoint URL for terminating the model.",
)
@click.option(
"--model-uid",
type=str,
required=True,
help="Provide the unique identifier (UID) of the model to be terminated.",
)
@click.option("--model-uid", type=str)
def model_terminate(
endpoint: Optional[str],
model_uid: str,
Expand Down

0 comments on commit d342e19

Please sign in to comment.