diff --git a/imixs-ai-workflow/src/main/java/org/imixs/ai/workflow/OpenAIAPIAdapter.java b/imixs-ai-workflow/src/main/java/org/imixs/ai/workflow/OpenAIAPIAdapter.java index 51b4f88..761a0b3 100644 --- a/imixs-ai-workflow/src/main/java/org/imixs/ai/workflow/OpenAIAPIAdapter.java +++ b/imixs-ai-workflow/src/main/java/org/imixs/ai/workflow/OpenAIAPIAdapter.java @@ -109,12 +109,13 @@ public class OpenAIAPIAdapter implements SignalAdapter { private static Logger logger = Logger.getLogger(OpenAIAPIAdapter.class.getName()); @Inject - @ConfigProperty(name = OpenAIAPIService.LLM_SERVICE_ENDPOINT) - Optional mlDefaultAPIEndpoint; + @ConfigProperty(name = OpenAIAPIService.ENV_LLM_SERVICE_ENDPOINT) + Optional serviceEndpoint; - @Inject - @ConfigProperty(name = OpenAIAPIService.LLM_MODEL, defaultValue = "imixs-model") - String mlDefaultModel; + // @Inject + // @ConfigProperty(name = OpenAIAPIService.LLM_MODEL, defaultValue = + // "imixs-model") + // String mlDefaultModel; @Inject private WorkflowService workflowService; @@ -202,7 +203,7 @@ public ItemCollection execute(ItemCollection workitem, ItemCollection event) // postPromptCompletion JsonObject jsonPrompt = llmService.buildJsonPromptObject(llmPrompt, workitem.getItemValueString("ai.prompt.prompt_options")); - String completionResult = llmService.postPromptCompletion(llmAPIEndpoint, jsonPrompt); + String completionResult = llmService.postPromptCompletion(jsonPrompt, llmAPIEndpoint); // process the ai.result.... if (llmAPIDebug) { logger.info("===> Completion Result: "); @@ -276,7 +277,10 @@ private String readPromptTemplate(ItemCollection event) { /** * This helper method parses the ml api endpoint either provided by a model - * definition or a imixs.property or an environment variable + * definition or a imixs.property or an environment variable. + *

+ * If not api endpoint is defined by the model the adapter uses the default api + * endpoint. * * @param llmPrompt * @return @@ -286,8 +290,7 @@ private String parseLLMEndpointByBPMN(ItemCollection llmPrompt) throws PluginExc boolean debug = logger.isLoggable(Level.FINE); String llmAPIEndpoint = null; - // test if the model provides a MLEndpoint. If not, the adapter uses the - // mlDefaultAPIEndpoint + // Test if the model provides a API Endpoint. llmAPIEndpoint = null; if (llmPrompt != null) { llmAPIEndpoint = llmPrompt.getItemValueString("endpoint"); @@ -296,8 +299,8 @@ private String parseLLMEndpointByBPMN(ItemCollection llmPrompt) throws PluginExc // switch to default api endpoint? if (llmAPIEndpoint == null || llmAPIEndpoint.isEmpty()) { // set defautl api endpoint if defined - if (mlDefaultAPIEndpoint.isPresent() && !mlDefaultAPIEndpoint.get().isEmpty()) { - llmAPIEndpoint = mlDefaultAPIEndpoint.get(); + if (serviceEndpoint.isPresent() && !serviceEndpoint.get().isEmpty()) { + llmAPIEndpoint = serviceEndpoint.get(); } } if (debug) { diff --git a/imixs-ai-workflow/src/main/java/org/imixs/ai/workflow/OpenAIAPIService.java b/imixs-ai-workflow/src/main/java/org/imixs/ai/workflow/OpenAIAPIService.java index 1578743..725e977 100644 --- a/imixs-ai-workflow/src/main/java/org/imixs/ai/workflow/OpenAIAPIService.java +++ b/imixs-ai-workflow/src/main/java/org/imixs/ai/workflow/OpenAIAPIService.java @@ -60,19 +60,24 @@ public class OpenAIAPIService implements Serializable { private static final long serialVersionUID = 1L; private static Logger logger = Logger.getLogger(OpenAIAPIService.class.getName()); - public static final String ERROR_PROMPT_TEMPLATE = "ERROR_PROMPT_TEMPLATE"; - public static final String ERROR_PROMPT_INFERENCE = "ERROR_PROMPT_INFERENCE"; + public static final String ERROR_API = "ERROR_LLM_API"; + public static final String ERROR_PROMPT_TEMPLATE = "ERROR_LLM_PROMPT_TEMPLATE"; + public static final String ERROR_PROMPT_INFERENCE = "ERROR_LLM_PROMPT_INFERENCE"; public static final String ITEM_AI_RESULT = "ai.result"; public static final String ITEM_AI_RESULT_ITEM = "ai.result.item"; public static final String ITEM_SUGGEST_ITEMS = "ai.suggest.items"; public static final String ITEM_SUGGEST_MODE = "ai.suggest.mode"; - public static final String LLM_SERVICE_ENDPOINT = "llm.service.endpoint"; public static final String LLM_MODEL = "llm.model"; - public static final String ENV_LLM_SERVICE_ENDPOINT_USER = "LLM_SERVICE_ENDPOINT_USER"; - public static final String ENV_LLM_SERVICE_ENDPOINT_PASSWORD = "LLM_SERVICE_ENDPOINT_PASSWORD"; - public static final String ENV_LLM_SERVICE_ENDPOINT_TIMEOUT = "LLM_SERVICE_TIMEOUT"; + public static final String ENV_LLM_SERVICE_ENDPOINT = "llm.service.endpoint"; + public static final String ENV_LLM_SERVICE_ENDPOINT_USER = "llm.service.endpoint.user"; + public static final String ENV_LLM_SERVICE_ENDPOINT_PASSWORD = "llm.service.endpoint.password"; + public static final String ENV_LLM_SERVICE_ENDPOINT_TIMEOUT = "llm.service.timeout"; + + @Inject + @ConfigProperty(name = ENV_LLM_SERVICE_ENDPOINT) + Optional serviceEndpoint; @Inject @ConfigProperty(name = ENV_LLM_SERVICE_ENDPOINT_USER) @@ -192,10 +197,13 @@ public String buildPrompt(String promptTemplate, ItemCollection workitem) throws } /** - * This method POST a given prompt to the endpoint '/completion' and returns the - * predicted completion. - * The method returns the response body. - * + * This method POSTs a LLM Prompt to the service endpoint '/completion' and + * returns the predicted completion. The method returns the response body. + *

+ * The endpoint is optional and can be null. In the endpoint is not provided the + * method resolves the endpoint from the environment variable + * llm.service.endpoint. + *

* The method optional test if the environment variables * LLM_SERVICE_ENDPOINT_USER and LLM_SERVICE_ENDPOINT_PASSWORD are set. In this * case a BASIC Authentication is used for the connection to the LLMService. @@ -212,13 +220,23 @@ public String buildPrompt(String promptTemplate, ItemCollection workitem) throws * --data '{"prompt": "Building a website can be done in 10 simple * steps:","n_predict": 128}' * - * @param xmlPromptData + * @param jsonPromptObject - an LLM json prompt object + * @param apiEndpoint - optional service endpoint * @throws PluginException */ - public String postPromptCompletion(String apiEndpoint, JsonObject jsonPromptObject) + public String postPromptCompletion(JsonObject jsonPromptObject, String apiEndpoint) throws PluginException { String response = null; + try { + if (apiEndpoint == null) { + // default to global endpoint + if (!serviceEndpoint.isPresent()) { + throw new PluginException(OpenAIAPIService.class.getSimpleName(), ERROR_API, + "imixs-ai llm service endpoint is empty!"); + } + apiEndpoint = serviceEndpoint.get(); + } if (!apiEndpoint.endsWith("/")) { apiEndpoint = apiEndpoint + "/"; }