Skip to content

Commit

Permalink
refactoring
Browse files Browse the repository at this point in the history
Issue #26
  • Loading branch information
rsoika committed Oct 13, 2024
1 parent 9d89df0 commit 7e62ac5
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 23 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -109,12 +109,13 @@ public class OpenAIAPIAdapter implements SignalAdapter {
private static Logger logger = Logger.getLogger(OpenAIAPIAdapter.class.getName());

@Inject
@ConfigProperty(name = OpenAIAPIService.LLM_SERVICE_ENDPOINT)
Optional<String> mlDefaultAPIEndpoint;
@ConfigProperty(name = OpenAIAPIService.ENV_LLM_SERVICE_ENDPOINT)
Optional<String> serviceEndpoint;

@Inject
@ConfigProperty(name = OpenAIAPIService.LLM_MODEL, defaultValue = "imixs-model")
String mlDefaultModel;
// @Inject
// @ConfigProperty(name = OpenAIAPIService.LLM_MODEL, defaultValue =
// "imixs-model")
// String mlDefaultModel;

@Inject
private WorkflowService workflowService;
Expand Down Expand Up @@ -202,7 +203,7 @@ public ItemCollection execute(ItemCollection workitem, ItemCollection event)
// postPromptCompletion
JsonObject jsonPrompt = llmService.buildJsonPromptObject(llmPrompt,
workitem.getItemValueString("ai.prompt.prompt_options"));
String completionResult = llmService.postPromptCompletion(llmAPIEndpoint, jsonPrompt);
String completionResult = llmService.postPromptCompletion(jsonPrompt, llmAPIEndpoint);
// process the ai.result....
if (llmAPIDebug) {
logger.info("===> Completion Result: ");
Expand Down Expand Up @@ -276,7 +277,10 @@ private String readPromptTemplate(ItemCollection event) {

/**
* This helper method parses the ml api endpoint either provided by a model
* definition or a imixs.property or an environment variable
* definition or a imixs.property or an environment variable.
* <p>
* If not api endpoint is defined by the model the adapter uses the default api
* endpoint.
*
* @param llmPrompt
* @return
Expand All @@ -286,8 +290,7 @@ private String parseLLMEndpointByBPMN(ItemCollection llmPrompt) throws PluginExc
boolean debug = logger.isLoggable(Level.FINE);
String llmAPIEndpoint = null;

// test if the model provides a MLEndpoint. If not, the adapter uses the
// mlDefaultAPIEndpoint
// Test if the model provides a API Endpoint.
llmAPIEndpoint = null;
if (llmPrompt != null) {
llmAPIEndpoint = llmPrompt.getItemValueString("endpoint");
Expand All @@ -296,8 +299,8 @@ private String parseLLMEndpointByBPMN(ItemCollection llmPrompt) throws PluginExc
// switch to default api endpoint?
if (llmAPIEndpoint == null || llmAPIEndpoint.isEmpty()) {
// set defautl api endpoint if defined
if (mlDefaultAPIEndpoint.isPresent() && !mlDefaultAPIEndpoint.get().isEmpty()) {
llmAPIEndpoint = mlDefaultAPIEndpoint.get();
if (serviceEndpoint.isPresent() && !serviceEndpoint.get().isEmpty()) {
llmAPIEndpoint = serviceEndpoint.get();
}
}
if (debug) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,19 +60,24 @@ public class OpenAIAPIService implements Serializable {
private static final long serialVersionUID = 1L;
private static Logger logger = Logger.getLogger(OpenAIAPIService.class.getName());

public static final String ERROR_PROMPT_TEMPLATE = "ERROR_PROMPT_TEMPLATE";
public static final String ERROR_PROMPT_INFERENCE = "ERROR_PROMPT_INFERENCE";
public static final String ERROR_API = "ERROR_LLM_API";
public static final String ERROR_PROMPT_TEMPLATE = "ERROR_LLM_PROMPT_TEMPLATE";
public static final String ERROR_PROMPT_INFERENCE = "ERROR_LLM_PROMPT_INFERENCE";
public static final String ITEM_AI_RESULT = "ai.result";
public static final String ITEM_AI_RESULT_ITEM = "ai.result.item";
public static final String ITEM_SUGGEST_ITEMS = "ai.suggest.items";
public static final String ITEM_SUGGEST_MODE = "ai.suggest.mode";

public static final String LLM_SERVICE_ENDPOINT = "llm.service.endpoint";
public static final String LLM_MODEL = "llm.model";

public static final String ENV_LLM_SERVICE_ENDPOINT_USER = "LLM_SERVICE_ENDPOINT_USER";
public static final String ENV_LLM_SERVICE_ENDPOINT_PASSWORD = "LLM_SERVICE_ENDPOINT_PASSWORD";
public static final String ENV_LLM_SERVICE_ENDPOINT_TIMEOUT = "LLM_SERVICE_TIMEOUT";
public static final String ENV_LLM_SERVICE_ENDPOINT = "llm.service.endpoint";
public static final String ENV_LLM_SERVICE_ENDPOINT_USER = "llm.service.endpoint.user";
public static final String ENV_LLM_SERVICE_ENDPOINT_PASSWORD = "llm.service.endpoint.password";
public static final String ENV_LLM_SERVICE_ENDPOINT_TIMEOUT = "llm.service.timeout";

@Inject
@ConfigProperty(name = ENV_LLM_SERVICE_ENDPOINT)
Optional<String> serviceEndpoint;

@Inject
@ConfigProperty(name = ENV_LLM_SERVICE_ENDPOINT_USER)
Expand Down Expand Up @@ -192,10 +197,13 @@ public String buildPrompt(String promptTemplate, ItemCollection workitem) throws
}

/**
* This method POST a given prompt to the endpoint '/completion' and returns the
* predicted completion.
* The method returns the response body.
*
* This method POSTs a LLM Prompt to the service endpoint '/completion' and
* returns the predicted completion. The method returns the response body.
* <p>
* The endpoint is optional and can be null. In the endpoint is not provided the
* method resolves the endpoint from the environment variable
* <code>llm.service.endpoint</code>.
* <p>
* The method optional test if the environment variables
* LLM_SERVICE_ENDPOINT_USER and LLM_SERVICE_ENDPOINT_PASSWORD are set. In this
* case a BASIC Authentication is used for the connection to the LLMService.
Expand All @@ -212,13 +220,23 @@ public String buildPrompt(String promptTemplate, ItemCollection workitem) throws
* --data '{"prompt": "Building a website can be done in 10 simple
* steps:","n_predict": 128}'
*
* @param xmlPromptData
* @param jsonPromptObject - an LLM json prompt object
* @param apiEndpoint - optional service endpoint
* @throws PluginException
*/
public String postPromptCompletion(String apiEndpoint, JsonObject jsonPromptObject)
public String postPromptCompletion(JsonObject jsonPromptObject, String apiEndpoint)
throws PluginException {
String response = null;

try {
if (apiEndpoint == null) {
// default to global endpoint
if (!serviceEndpoint.isPresent()) {
throw new PluginException(OpenAIAPIService.class.getSimpleName(), ERROR_API,
"imixs-ai llm service endpoint is empty!");
}
apiEndpoint = serviceEndpoint.get();
}
if (!apiEndpoint.endsWith("/")) {
apiEndpoint = apiEndpoint + "/";
}
Expand Down

0 comments on commit 7e62ac5

Please sign in to comment.