From 152a57a5852557751ea94db2b70b7ea0f80f3fdd Mon Sep 17 00:00:00 2001 From: dltn <6599399+dltn@users.noreply.github.com> Date: Fri, 22 Nov 2024 13:37:02 -0800 Subject: [PATCH 1/3] add print_pip_install_help to direct client --- src/llama_stack_client/lib/direct/direct.py | 33 +++++++++++++++------ src/llama_stack_client/lib/inline/inline.py | 0 2 files changed, 24 insertions(+), 9 deletions(-) create mode 100644 src/llama_stack_client/lib/inline/inline.py diff --git a/src/llama_stack_client/lib/direct/direct.py b/src/llama_stack_client/lib/direct/direct.py index d3aeb5ae..656dcf02 100644 --- a/src/llama_stack_client/lib/direct/direct.py +++ b/src/llama_stack_client/lib/direct/direct.py @@ -1,26 +1,31 @@ import inspect -from typing import Any, Type, cast, get_args, get_origin +from typing import Any, cast, get_args, get_origin, Type import yaml +from llama_stack.distribution.build import print_pip_install_help from llama_stack.distribution.datatypes import StackRunConfig from llama_stack.distribution.distribution import get_provider_registry from llama_stack.distribution.resolver import resolve_impls from llama_stack.distribution.server.endpoints import get_all_api_endpoints from llama_stack.distribution.server.server import is_streaming_request -from llama_stack.distribution.stack import (construct_stack, - get_stack_run_config_from_template) +from llama_stack.distribution.stack import ( + construct_stack, + get_stack_run_config_from_template, +) from pydantic import BaseModel from rich.console import Console from ..._base_client import ResponseT from ..._client import LlamaStackClient from ..._streaming import Stream -from ..._types import NOT_GIVEN, Body, RequestFiles, RequestOptions +from ..._types import Body, NOT_GIVEN, RequestFiles, RequestOptions class LlamaStackDirectClient(LlamaStackClient): def __init__(self, config: StackRunConfig, **kwargs): - raise TypeError("Use from_config() or from_template() instead of direct initialization") + raise TypeError( + "Use from_config() or from_template() instead of direct initialization" + ) @classmethod async def from_config(cls, config: StackRunConfig, **kwargs): @@ -32,8 +37,12 @@ async def from_config(cls, config: StackRunConfig, **kwargs): async def from_template(cls, template_name: str, **kwargs): config = get_stack_run_config_from_template(template_name) console = Console() - console.print(f"[green]Using template[/green] [blue]{template_name}[/blue] with config:") - console.print(yaml.dump(config.model_dump(), indent=2, default_flow_style=False)) + console.print( + f"[green]Using template[/green] [blue]{template_name}[/blue] with config:" + ) + console.print( + yaml.dump(config.model_dump(), indent=2, default_flow_style=False) + ) instance = object.__new__(cls) await instance._initialize(config, **kwargs) return instance @@ -46,7 +55,11 @@ async def _initialize(self, config: StackRunConfig, **kwargs) -> None: await self.initialize() async def initialize(self) -> None: - self.impls = await construct_stack(self.config) + try: + self.impls = await construct_stack(self.config) + except ModuleNotFoundError as e: + print_pip_install_help(self.config.providers) + raise e def _convert_param(self, param_type: Any, value: Any) -> Any: origin = get_origin(param_type) @@ -85,7 +98,9 @@ async def _call_endpoint(self, path: str, method: str, body: dict = None) -> Any for param_name, param in sig.parameters.items(): if param_name in body: value = body.get(param_name) - converted_body[param_name] = self._convert_param(param.annotation, value) + converted_body[param_name] = self._convert_param( + param.annotation, value + ) body = converted_body if is_streaming_request(endpoint.name, body): diff --git a/src/llama_stack_client/lib/inline/inline.py b/src/llama_stack_client/lib/inline/inline.py new file mode 100644 index 00000000..e69de29b From f60e1bcc0bc211943641e1ef36c02b98651c2f78 Mon Sep 17 00:00:00 2001 From: dltn <6599399+dltn@users.noreply.github.com> Date: Fri, 22 Nov 2024 13:39:18 -0800 Subject: [PATCH 2/3] flake8 --- src/llama_stack_client/lib/cli/inference/inference.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/llama_stack_client/lib/cli/inference/inference.py b/src/llama_stack_client/lib/cli/inference/inference.py index 1c6960e8..26ad00a1 100644 --- a/src/llama_stack_client/lib/cli/inference/inference.py +++ b/src/llama_stack_client/lib/cli/inference/inference.py @@ -7,9 +7,7 @@ from typing import Optional import click -import yaml from rich.console import Console -from rich.table import Table from ..common.utils import handle_client_errors From 2346881e00efa169348cb293c603adce63a26b56 Mon Sep 17 00:00:00 2001 From: Dalton Flanagan <6599399+dltn@users.noreply.github.com> Date: Fri, 22 Nov 2024 16:40:53 -0500 Subject: [PATCH 3/3] pre-commit --- src/llama_stack_client/lib/direct/direct.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/src/llama_stack_client/lib/direct/direct.py b/src/llama_stack_client/lib/direct/direct.py index 656dcf02..cf04f4f5 100644 --- a/src/llama_stack_client/lib/direct/direct.py +++ b/src/llama_stack_client/lib/direct/direct.py @@ -1,24 +1,20 @@ import inspect -from typing import Any, cast, get_args, get_origin, Type +from typing import Any, Type, cast, get_args, get_origin import yaml from llama_stack.distribution.build import print_pip_install_help from llama_stack.distribution.datatypes import StackRunConfig -from llama_stack.distribution.distribution import get_provider_registry -from llama_stack.distribution.resolver import resolve_impls from llama_stack.distribution.server.endpoints import get_all_api_endpoints from llama_stack.distribution.server.server import is_streaming_request -from llama_stack.distribution.stack import ( - construct_stack, - get_stack_run_config_from_template, -) +from llama_stack.distribution.stack import (construct_stack, + get_stack_run_config_from_template) from pydantic import BaseModel from rich.console import Console from ..._base_client import ResponseT from ..._client import LlamaStackClient from ..._streaming import Stream -from ..._types import Body, NOT_GIVEN, RequestFiles, RequestOptions +from ..._types import NOT_GIVEN, Body, RequestFiles, RequestOptions class LlamaStackDirectClient(LlamaStackClient):