|
2 | 2 |
|
3 | 3 | import logging |
4 | 4 |
|
| 5 | +from typing import Optional |
| 6 | + |
5 | 7 | from llama_stack.distribution.library_client import ( |
6 | 8 | AsyncLlamaStackAsLibraryClient, # type: ignore |
7 | 9 | LlamaStackAsLibraryClient, # type: ignore |
8 | 10 | ) |
9 | 11 | from llama_stack_client import AsyncLlamaStackClient, LlamaStackClient # type: ignore |
10 | 12 | from models.config import LLamaStackConfiguration |
| 13 | +from utils.types import Singleton |
| 14 | + |
11 | 15 |
|
12 | 16 | logger = logging.getLogger(__name__) |
13 | 17 |
|
14 | 18 |
|
15 | | -def get_llama_stack_client( |
16 | | - llama_stack_config: LLamaStackConfiguration, |
17 | | -) -> LlamaStackClient: |
18 | | - """Retrieve Llama stack client according to configuration.""" |
19 | | - if llama_stack_config.use_as_library_client is True: |
20 | | - if llama_stack_config.library_client_config_path is not None: |
21 | | - logger.info("Using Llama stack as library client") |
22 | | - client = LlamaStackAsLibraryClient( |
23 | | - llama_stack_config.library_client_config_path |
| 19 | +class LlamaStackClientHolder(metaclass=Singleton): |
| 20 | + """Container for an initialised LlamaStackClient.""" |
| 21 | + |
| 22 | + _lsc: Optional[LlamaStackClient] = None |
| 23 | + |
| 24 | + def load(self, llama_stack_config: LLamaStackConfiguration) -> None: |
| 25 | + """Retrieve Llama stack client according to configuration.""" |
| 26 | + if llama_stack_config.use_as_library_client is True: |
| 27 | + if llama_stack_config.library_client_config_path is not None: |
| 28 | + logger.info("Using Llama stack as library client") |
| 29 | + client = LlamaStackAsLibraryClient( |
| 30 | + llama_stack_config.library_client_config_path |
| 31 | + ) |
| 32 | + client.initialize() |
| 33 | + self._lsc = client |
| 34 | + else: |
| 35 | + msg = "Configuration problem: library_client_config_path option is not set" |
| 36 | + logger.error(msg) |
| 37 | + # tisnik: use custom exception there - with cause etc. |
| 38 | + raise ValueError(msg) |
| 39 | + |
| 40 | + else: |
| 41 | + logger.info("Using Llama stack running as a service") |
| 42 | + self._lsc = LlamaStackClient( |
| 43 | + base_url=llama_stack_config.url, api_key=llama_stack_config.api_key |
| 44 | + ) |
| 45 | + |
| 46 | + def get_client(self) -> LlamaStackClient: |
| 47 | + """Return an initialised LlamaStackClient.""" |
| 48 | + if not self._lsc: |
| 49 | + raise RuntimeError( |
| 50 | + "LlamaStackClient has not been initialised. Ensure 'load(..)' has been called." |
24 | 51 | ) |
25 | | - client.initialize() |
26 | | - return client |
27 | | - msg = "Configuration problem: library_client_config_path option is not set" |
28 | | - logger.error(msg) |
29 | | - # tisnik: use custom exception there - with cause etc. |
30 | | - raise Exception(msg) # pylint: disable=broad-exception-raised |
31 | | - logger.info("Using Llama stack running as a service") |
32 | | - return LlamaStackClient( |
33 | | - base_url=llama_stack_config.url, api_key=llama_stack_config.api_key |
34 | | - ) |
35 | | - |
36 | | - |
37 | | -async def get_async_llama_stack_client( |
38 | | - llama_stack_config: LLamaStackConfiguration, |
39 | | -) -> AsyncLlamaStackClient: |
40 | | - """Retrieve Async Llama stack client according to configuration.""" |
41 | | - if llama_stack_config.use_as_library_client is True: |
42 | | - if llama_stack_config.library_client_config_path is not None: |
43 | | - logger.info("Using Llama stack as library client") |
44 | | - client = AsyncLlamaStackAsLibraryClient( |
45 | | - llama_stack_config.library_client_config_path |
| 52 | + return self._lsc |
| 53 | + |
| 54 | + |
| 55 | +class AsyncLlamaStackClientHolder(metaclass=Singleton): |
| 56 | + """Container for an initialised AsyncLlamaStackClient.""" |
| 57 | + |
| 58 | + _lsc: Optional[AsyncLlamaStackClient] = None |
| 59 | + |
| 60 | + async def load(self, llama_stack_config: LLamaStackConfiguration) -> None: |
| 61 | + """Retrieve Async Llama stack client according to configuration.""" |
| 62 | + if llama_stack_config.use_as_library_client is True: |
| 63 | + if llama_stack_config.library_client_config_path is not None: |
| 64 | + logger.info("Using Llama stack as library client") |
| 65 | + client = AsyncLlamaStackAsLibraryClient( |
| 66 | + llama_stack_config.library_client_config_path |
| 67 | + ) |
| 68 | + await client.initialize() |
| 69 | + self._lsc = client |
| 70 | + else: |
| 71 | + msg = "Configuration problem: library_client_config_path option is not set" |
| 72 | + logger.error(msg) |
| 73 | + # tisnik: use custom exception there - with cause etc. |
| 74 | + raise ValueError(msg) |
| 75 | + else: |
| 76 | + logger.info("Using Llama stack running as a service") |
| 77 | + self._lsc = AsyncLlamaStackClient( |
| 78 | + base_url=llama_stack_config.url, api_key=llama_stack_config.api_key |
| 79 | + ) |
| 80 | + |
| 81 | + def get_client(self) -> AsyncLlamaStackClient: |
| 82 | + """Return an initialised AsyncLlamaStackClient.""" |
| 83 | + if not self._lsc: |
| 84 | + raise RuntimeError( |
| 85 | + "AsyncLlamaStackClient has not been initialised. Ensure 'load(..)' has been called." |
46 | 86 | ) |
47 | | - await client.initialize() |
48 | | - return client |
49 | | - msg = "Configuration problem: library_client_config_path option is not set" |
50 | | - logger.error(msg) |
51 | | - # tisnik: use custom exception there - with cause etc. |
52 | | - raise Exception(msg) # pylint: disable=broad-exception-raised |
53 | | - logger.info("Using Llama stack running as a service") |
54 | | - return AsyncLlamaStackClient( |
55 | | - base_url=llama_stack_config.url, api_key=llama_stack_config.api_key |
56 | | - ) |
| 87 | + return self._lsc |
0 commit comments