From 27e241a4bce1a10f8b10fe6bbaae03a4c28ccf78 Mon Sep 17 00:00:00 2001
From: Ido Berkovich <ido@comet.com>
Date: Mon, 20 Jan 2025 10:53:55 +0200
Subject: [PATCH] [OPIK-744] fix providers' models list (#1041)

* OPIK-744 openai models list

* OPIK-744 anthropic models list

* OPIK-744 code cleanup

* OPIK-744 use existing enum for openai

* OPIK-744 ignore default

* OPIK-744 added missing openai models

* OPIK-744 post rebase changes

* OPIK-744 rename `ModelPrice` to `OpenaiModelPrice`

* Revert "OPIK-744 rename `ModelPrice` to `OpenaiModelPrice`"

This reverts commit c3ca2e077f3dc50dc1e89e67d5cb0e47d33a0f47.

* Revert "OPIK-744 use existing enum for openai"

This reverts commit d18f06c5

* OPIK-744 post revert fixes
---
 .../llmproviders/AnthropicModelName.java      | 27 ++++++++++++
 .../domain/llmproviders/GeminiModelName.java  |  6 +--
 .../llmproviders/LlmProviderFactory.java      |  6 +--
 .../domain/llmproviders/OpenaiModelName.java  | 42 +++++++++++++++++++
 .../v1/priv/ChatCompletionsResourceTest.java  | 12 +++---
 .../llmproviders/LlmProviderFactoryTest.java  |  6 +--
 6 files changed, 82 insertions(+), 17 deletions(-)
 create mode 100644 apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/AnthropicModelName.java
 create mode 100644 apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/OpenaiModelName.java

diff --git a/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/AnthropicModelName.java b/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/AnthropicModelName.java
new file mode 100644
index 0000000000..25759662bc
--- /dev/null
+++ b/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/AnthropicModelName.java
@@ -0,0 +1,27 @@
+package com.comet.opik.domain.llmproviders;
+
+import lombok.RequiredArgsConstructor;
+
+/**
+ * This information is taken from <a href="https://docs.anthropic.com/en/docs/about-claude/models">Anthropic docs</a>
+ */
+@RequiredArgsConstructor
+public enum AnthropicModelName {
+    CLAUDE_3_5_SONNET_LATEST("claude-3-5-sonnet-latest"),
+    CLAUDE_3_5_SONNET_20241022("claude-3-5-sonnet-20241022"),
+    CLAUDE_3_5_HAIKU_LATEST("claude-3-5-haiku-latest"),
+    CLAUDE_3_5_HAIKU_20241022("claude-3-5-haiku-20241022"),
+    CLAUDE_3_5_SONNET_20240620("claude-3-5-sonnet-20240620"),
+    CLAUDE_3_OPUS_LATEST("claude-3-opus-latest"),
+    CLAUDE_3_OPUS_20240229("claude-3-opus-20240229"),
+    CLAUDE_3_SONNET_20240229("claude-3-sonnet-20240229"),
+    CLAUDE_3_HAIKU_20240307("claude-3-haiku-20240307"),
+    ;
+
+    private final String value;
+
+    @Override
+    public String toString() {
+        return value;
+    }
+}
diff --git a/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/GeminiModelName.java b/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/GeminiModelName.java
index 25ae957f23..a7a83d160f 100644
--- a/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/GeminiModelName.java
+++ b/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/GeminiModelName.java
@@ -2,9 +2,9 @@
 
 import lombok.RequiredArgsConstructor;
 
-/*
-Langchain4j doesn't provide gemini models enum.
-This information is taken from: https://ai.google.dev/gemini-api/docs/models/gemini
+/**
+ * Langchain4j doesn't provide gemini models enum.
+ * This information is taken from <a href="https://ai.google.dev/gemini-api/docs/models/gemini">gemini docs</a>
  */
 @RequiredArgsConstructor
 public enum GeminiModelName {
diff --git a/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/LlmProviderFactory.java b/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/LlmProviderFactory.java
index 5e19cda445..3b1d6f5d2f 100644
--- a/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/LlmProviderFactory.java
+++ b/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/LlmProviderFactory.java
@@ -4,8 +4,6 @@
 import com.comet.opik.api.LlmProvider;
 import com.comet.opik.domain.LlmProviderApiKeyService;
 import com.comet.opik.infrastructure.EncryptionUtils;
-import dev.ai4j.openai4j.chat.ChatCompletionModel;
-import dev.langchain4j.model.anthropic.AnthropicChatModelName;
 import dev.langchain4j.model.chat.ChatLanguageModel;
 import jakarta.inject.Inject;
 import jakarta.inject.Singleton;
@@ -50,10 +48,10 @@ public ChatLanguageModel getLanguageModel(@NonNull String workspaceId,
      * The agreed requirement is to resolve the LLM provider and its API key based on the model.
      */
     private LlmProvider getLlmProvider(String model) {
-        if (isModelBelongToProvider(model, ChatCompletionModel.class, ChatCompletionModel::toString)) {
+        if (isModelBelongToProvider(model, OpenaiModelName.class, OpenaiModelName::toString)) {
             return LlmProvider.OPEN_AI;
         }
-        if (isModelBelongToProvider(model, AnthropicChatModelName.class, AnthropicChatModelName::toString)) {
+        if (isModelBelongToProvider(model, AnthropicModelName.class, AnthropicModelName::toString)) {
             return LlmProvider.ANTHROPIC;
         }
         if (isModelBelongToProvider(model, GeminiModelName.class, GeminiModelName::toString)) {
diff --git a/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/OpenaiModelName.java b/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/OpenaiModelName.java
new file mode 100644
index 0000000000..c014c40ced
--- /dev/null
+++ b/apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/OpenaiModelName.java
@@ -0,0 +1,42 @@
+package com.comet.opik.domain.llmproviders;
+
+import lombok.RequiredArgsConstructor;
+
+/**
+ * This information is taken from <a href="https://platform.openai.com/docs/models">openai docs</a>
+ */
+@RequiredArgsConstructor
+public enum OpenaiModelName {
+    CHATGPT_4O_LATEST("chatgpt-4o-latest"),
+    GPT_4O("gpt-4o"),
+    GPT_4O_2024_05_13("gpt-4o-2024-05-13"),
+    GPT_4O_2024_08_06("gpt-4o-2024-08-06"),
+    GPT_4O_2024_11_20("gpt-4o-2024-11-20"),
+    GPT_4O_MINI("gpt-4o-mini"),
+    GPT_4O_MINI_2024_07_18("gpt-4o-mini-2024-07-18"),
+    GPT_3_5_TURBO("gpt-3.5-turbo"),
+    GPT_3_5_TURBO_1106("gpt-3.5-turbo-1106"),
+    GPT_3_5_TURBO_0125("gpt-3.5-turbo-0125"),
+    GPT_4("gpt-4"),
+    GPT_4_0613("gpt-4-0613"),
+    GPT_4_0314("gpt-4-0314"),
+    GPT_4_TURBO("gpt-4-turbo"),
+    GPT_4_TURBO_2024_04_09("gpt-4-turbo-2024-04-09"),
+    GPT_4_TURBO_PREVIEW("gpt-4-turbo-preview"),
+    GPT_4_1106_PREVIEW("gpt-4-1106-preview"),
+    GPT_4_0125_PREVIEW("gpt-4-0125-preview"),
+    GPT_O1("o1"),
+    GPT_O1_2024_12_17("o1-2024-12-17"),
+    GPT_O1_MINI("o1-mini"),
+    GPT_O1_MINI_2024_09_12("o1-mini-2024-09-12"),
+    GPT_O1_PREVIEW("o1-preview"),
+    GPT_O1_PREVIEW_2024_09_12("o1-preview-2024-09-12"),
+    ;
+
+    private final String value;
+
+    @Override
+    public String toString() {
+        return value;
+    }
+}
diff --git a/apps/opik-backend/src/test/java/com/comet/opik/api/resources/v1/priv/ChatCompletionsResourceTest.java b/apps/opik-backend/src/test/java/com/comet/opik/api/resources/v1/priv/ChatCompletionsResourceTest.java
index 4e5d2d1855..8c3d5cc830 100644
--- a/apps/opik-backend/src/test/java/com/comet/opik/api/resources/v1/priv/ChatCompletionsResourceTest.java
+++ b/apps/opik-backend/src/test/java/com/comet/opik/api/resources/v1/priv/ChatCompletionsResourceTest.java
@@ -11,13 +11,13 @@
 import com.comet.opik.api.resources.utils.WireMockUtils;
 import com.comet.opik.api.resources.utils.resources.ChatCompletionsClient;
 import com.comet.opik.api.resources.utils.resources.LlmProviderApiKeyResourceClient;
+import com.comet.opik.domain.llmproviders.AnthropicModelName;
 import com.comet.opik.domain.llmproviders.GeminiModelName;
+import com.comet.opik.domain.llmproviders.OpenaiModelName;
 import com.comet.opik.podam.PodamFactoryUtils;
 import com.redis.testcontainers.RedisContainer;
-import dev.ai4j.openai4j.chat.ChatCompletionModel;
 import dev.ai4j.openai4j.chat.ChatCompletionRequest;
 import dev.ai4j.openai4j.chat.Role;
-import dev.langchain4j.model.anthropic.AnthropicChatModelName;
 import org.apache.http.HttpStatus;
 import org.jdbi.v3.core.Jdbi;
 import org.junit.jupiter.api.BeforeAll;
@@ -222,9 +222,9 @@ void createAndStreamResponse(String expectedModel, LlmProvider llmProvider, Stri
 
         private static Stream<Arguments> testModelsProvider() {
             return Stream.of(
-                    arguments(ChatCompletionModel.GPT_4O_MINI.toString(), LlmProvider.OPEN_AI,
+                    arguments(OpenaiModelName.GPT_4O_MINI.toString(), LlmProvider.OPEN_AI,
                             UUID.randomUUID().toString()),
-                    arguments(AnthropicChatModelName.CLAUDE_3_5_SONNET_20240620.toString(), LlmProvider.ANTHROPIC,
+                    arguments(AnthropicModelName.CLAUDE_3_5_SONNET_20240620.toString(), LlmProvider.ANTHROPIC,
                             System.getenv("ANTHROPIC_API_KEY")),
                     arguments(GeminiModelName.GEMINI_1_0_PRO.toString(), LlmProvider.GEMINI,
                             System.getenv("GEMINI_AI_KEY")));
@@ -274,12 +274,12 @@ private Stream<Arguments> createAnthropicValidateMandatoryFields() {
         return Stream.of(
                 arguments(named("no messages", podamFactory.manufacturePojo(ChatCompletionRequest.Builder.class)
                         .stream(false)
-                        .model(AnthropicChatModelName.CLAUDE_3_5_SONNET_20240620.toString())
+                        .model(AnthropicModelName.CLAUDE_3_5_SONNET_20240620.toString())
                         .maxCompletionTokens(100).build()),
                         ERROR_EMPTY_MESSAGES),
                 arguments(named("no max tokens", podamFactory.manufacturePojo(ChatCompletionRequest.Builder.class)
                         .stream(false)
-                        .model(AnthropicChatModelName.CLAUDE_3_5_SONNET_20240620.toString())
+                        .model(AnthropicModelName.CLAUDE_3_5_SONNET_20240620.toString())
                         .addUserMessage("Say 'Hello World'").build()),
                         ERROR_NO_COMPLETION_TOKENS));
     }
diff --git a/apps/opik-backend/src/test/java/com/comet/opik/domain/llmproviders/LlmProviderFactoryTest.java b/apps/opik-backend/src/test/java/com/comet/opik/domain/llmproviders/LlmProviderFactoryTest.java
index c42e4489d0..5d7a2725f6 100644
--- a/apps/opik-backend/src/test/java/com/comet/opik/domain/llmproviders/LlmProviderFactoryTest.java
+++ b/apps/opik-backend/src/test/java/com/comet/opik/domain/llmproviders/LlmProviderFactoryTest.java
@@ -7,8 +7,6 @@
 import com.comet.opik.infrastructure.LlmProviderClientConfig;
 import com.comet.opik.infrastructure.OpikConfiguration;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import dev.ai4j.openai4j.chat.ChatCompletionModel;
-import dev.langchain4j.model.anthropic.AnthropicChatModelName;
 import io.dropwizard.configuration.ConfigurationException;
 import io.dropwizard.configuration.FileConfigurationSourceProvider;
 import io.dropwizard.configuration.YamlConfigurationFactory;
@@ -79,9 +77,9 @@ void testGetService(String model, LlmProvider llmProvider, Class<? extends LlmPr
     }
 
     private static Stream<Arguments> testGetService() {
-        var openAiModels = EnumUtils.getEnumList(ChatCompletionModel.class).stream()
+        var openAiModels = EnumUtils.getEnumList(OpenaiModelName.class).stream()
                 .map(model -> arguments(model.toString(), LlmProvider.OPEN_AI, LlmProviderOpenAi.class));
-        var anthropicModels = EnumUtils.getEnumList(AnthropicChatModelName.class).stream()
+        var anthropicModels = EnumUtils.getEnumList(AnthropicModelName.class).stream()
                 .map(model -> arguments(model.toString(), LlmProvider.ANTHROPIC, LlmProviderAnthropic.class));
         var geminiModels = EnumUtils.getEnumList(GeminiModelName.class).stream()
                 .map(model -> arguments(model.toString(), LlmProvider.GEMINI, LlmProviderGemini.class));