Skip to content

Commit

Permalink
[OPIK-744] fix providers' models list (#1041)
Browse files Browse the repository at this point in the history
* OPIK-744 openai models list

* OPIK-744 anthropic models list

* OPIK-744 code cleanup

* OPIK-744 use existing enum for openai

* OPIK-744 ignore default

* OPIK-744 added missing openai models

* OPIK-744 post rebase changes

* OPIK-744 rename `ModelPrice` to `OpenaiModelPrice`

* Revert "OPIK-744 rename `ModelPrice` to `OpenaiModelPrice`"

This reverts commit c3ca2e0.

* Revert "OPIK-744 use existing enum for openai"

This reverts commit d18f06c

* OPIK-744 post revert fixes
  • Loading branch information
idoberko2 authored Jan 20, 2025
1 parent c30572a commit 27e241a
Show file tree
Hide file tree
Showing 6 changed files with 82 additions and 17 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
package com.comet.opik.domain.llmproviders;

import lombok.RequiredArgsConstructor;

/**
* This information is taken from <a href="https://docs.anthropic.com/en/docs/about-claude/models">Anthropic docs</a>
*/
@RequiredArgsConstructor
public enum AnthropicModelName {
CLAUDE_3_5_SONNET_LATEST("claude-3-5-sonnet-latest"),
CLAUDE_3_5_SONNET_20241022("claude-3-5-sonnet-20241022"),
CLAUDE_3_5_HAIKU_LATEST("claude-3-5-haiku-latest"),
CLAUDE_3_5_HAIKU_20241022("claude-3-5-haiku-20241022"),
CLAUDE_3_5_SONNET_20240620("claude-3-5-sonnet-20240620"),
CLAUDE_3_OPUS_LATEST("claude-3-opus-latest"),
CLAUDE_3_OPUS_20240229("claude-3-opus-20240229"),
CLAUDE_3_SONNET_20240229("claude-3-sonnet-20240229"),
CLAUDE_3_HAIKU_20240307("claude-3-haiku-20240307"),
;

private final String value;

@Override
public String toString() {
return value;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

import lombok.RequiredArgsConstructor;

/*
Langchain4j doesn't provide gemini models enum.
This information is taken from: https://ai.google.dev/gemini-api/docs/models/gemini
/**
* Langchain4j doesn't provide gemini models enum.
* This information is taken from <a href="https://ai.google.dev/gemini-api/docs/models/gemini">gemini docs</a>
*/
@RequiredArgsConstructor
public enum GeminiModelName {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
import com.comet.opik.api.LlmProvider;
import com.comet.opik.domain.LlmProviderApiKeyService;
import com.comet.opik.infrastructure.EncryptionUtils;
import dev.ai4j.openai4j.chat.ChatCompletionModel;
import dev.langchain4j.model.anthropic.AnthropicChatModelName;
import dev.langchain4j.model.chat.ChatLanguageModel;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
Expand Down Expand Up @@ -50,10 +48,10 @@ public ChatLanguageModel getLanguageModel(@NonNull String workspaceId,
* The agreed requirement is to resolve the LLM provider and its API key based on the model.
*/
private LlmProvider getLlmProvider(String model) {
if (isModelBelongToProvider(model, ChatCompletionModel.class, ChatCompletionModel::toString)) {
if (isModelBelongToProvider(model, OpenaiModelName.class, OpenaiModelName::toString)) {
return LlmProvider.OPEN_AI;
}
if (isModelBelongToProvider(model, AnthropicChatModelName.class, AnthropicChatModelName::toString)) {
if (isModelBelongToProvider(model, AnthropicModelName.class, AnthropicModelName::toString)) {
return LlmProvider.ANTHROPIC;
}
if (isModelBelongToProvider(model, GeminiModelName.class, GeminiModelName::toString)) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
package com.comet.opik.domain.llmproviders;

import lombok.RequiredArgsConstructor;

/**
* This information is taken from <a href="https://platform.openai.com/docs/models">openai docs</a>
*/
@RequiredArgsConstructor
public enum OpenaiModelName {
CHATGPT_4O_LATEST("chatgpt-4o-latest"),
GPT_4O("gpt-4o"),
GPT_4O_2024_05_13("gpt-4o-2024-05-13"),
GPT_4O_2024_08_06("gpt-4o-2024-08-06"),
GPT_4O_2024_11_20("gpt-4o-2024-11-20"),
GPT_4O_MINI("gpt-4o-mini"),
GPT_4O_MINI_2024_07_18("gpt-4o-mini-2024-07-18"),
GPT_3_5_TURBO("gpt-3.5-turbo"),
GPT_3_5_TURBO_1106("gpt-3.5-turbo-1106"),
GPT_3_5_TURBO_0125("gpt-3.5-turbo-0125"),
GPT_4("gpt-4"),
GPT_4_0613("gpt-4-0613"),
GPT_4_0314("gpt-4-0314"),
GPT_4_TURBO("gpt-4-turbo"),
GPT_4_TURBO_2024_04_09("gpt-4-turbo-2024-04-09"),
GPT_4_TURBO_PREVIEW("gpt-4-turbo-preview"),
GPT_4_1106_PREVIEW("gpt-4-1106-preview"),
GPT_4_0125_PREVIEW("gpt-4-0125-preview"),
GPT_O1("o1"),
GPT_O1_2024_12_17("o1-2024-12-17"),
GPT_O1_MINI("o1-mini"),
GPT_O1_MINI_2024_09_12("o1-mini-2024-09-12"),
GPT_O1_PREVIEW("o1-preview"),
GPT_O1_PREVIEW_2024_09_12("o1-preview-2024-09-12"),
;

private final String value;

@Override
public String toString() {
return value;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@
import com.comet.opik.api.resources.utils.WireMockUtils;
import com.comet.opik.api.resources.utils.resources.ChatCompletionsClient;
import com.comet.opik.api.resources.utils.resources.LlmProviderApiKeyResourceClient;
import com.comet.opik.domain.llmproviders.AnthropicModelName;
import com.comet.opik.domain.llmproviders.GeminiModelName;
import com.comet.opik.domain.llmproviders.OpenaiModelName;
import com.comet.opik.podam.PodamFactoryUtils;
import com.redis.testcontainers.RedisContainer;
import dev.ai4j.openai4j.chat.ChatCompletionModel;
import dev.ai4j.openai4j.chat.ChatCompletionRequest;
import dev.ai4j.openai4j.chat.Role;
import dev.langchain4j.model.anthropic.AnthropicChatModelName;
import org.apache.http.HttpStatus;
import org.jdbi.v3.core.Jdbi;
import org.junit.jupiter.api.BeforeAll;
Expand Down Expand Up @@ -222,9 +222,9 @@ void createAndStreamResponse(String expectedModel, LlmProvider llmProvider, Stri

private static Stream<Arguments> testModelsProvider() {
return Stream.of(
arguments(ChatCompletionModel.GPT_4O_MINI.toString(), LlmProvider.OPEN_AI,
arguments(OpenaiModelName.GPT_4O_MINI.toString(), LlmProvider.OPEN_AI,
UUID.randomUUID().toString()),
arguments(AnthropicChatModelName.CLAUDE_3_5_SONNET_20240620.toString(), LlmProvider.ANTHROPIC,
arguments(AnthropicModelName.CLAUDE_3_5_SONNET_20240620.toString(), LlmProvider.ANTHROPIC,
System.getenv("ANTHROPIC_API_KEY")),
arguments(GeminiModelName.GEMINI_1_0_PRO.toString(), LlmProvider.GEMINI,
System.getenv("GEMINI_AI_KEY")));
Expand Down Expand Up @@ -274,12 +274,12 @@ private Stream<Arguments> createAnthropicValidateMandatoryFields() {
return Stream.of(
arguments(named("no messages", podamFactory.manufacturePojo(ChatCompletionRequest.Builder.class)
.stream(false)
.model(AnthropicChatModelName.CLAUDE_3_5_SONNET_20240620.toString())
.model(AnthropicModelName.CLAUDE_3_5_SONNET_20240620.toString())
.maxCompletionTokens(100).build()),
ERROR_EMPTY_MESSAGES),
arguments(named("no max tokens", podamFactory.manufacturePojo(ChatCompletionRequest.Builder.class)
.stream(false)
.model(AnthropicChatModelName.CLAUDE_3_5_SONNET_20240620.toString())
.model(AnthropicModelName.CLAUDE_3_5_SONNET_20240620.toString())
.addUserMessage("Say 'Hello World'").build()),
ERROR_NO_COMPLETION_TOKENS));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
import com.comet.opik.infrastructure.LlmProviderClientConfig;
import com.comet.opik.infrastructure.OpikConfiguration;
import com.fasterxml.jackson.databind.ObjectMapper;
import dev.ai4j.openai4j.chat.ChatCompletionModel;
import dev.langchain4j.model.anthropic.AnthropicChatModelName;
import io.dropwizard.configuration.ConfigurationException;
import io.dropwizard.configuration.FileConfigurationSourceProvider;
import io.dropwizard.configuration.YamlConfigurationFactory;
Expand Down Expand Up @@ -79,9 +77,9 @@ void testGetService(String model, LlmProvider llmProvider, Class<? extends LlmPr
}

private static Stream<Arguments> testGetService() {
var openAiModels = EnumUtils.getEnumList(ChatCompletionModel.class).stream()
var openAiModels = EnumUtils.getEnumList(OpenaiModelName.class).stream()
.map(model -> arguments(model.toString(), LlmProvider.OPEN_AI, LlmProviderOpenAi.class));
var anthropicModels = EnumUtils.getEnumList(AnthropicChatModelName.class).stream()
var anthropicModels = EnumUtils.getEnumList(AnthropicModelName.class).stream()
.map(model -> arguments(model.toString(), LlmProvider.ANTHROPIC, LlmProviderAnthropic.class));
var geminiModels = EnumUtils.getEnumList(GeminiModelName.class).stream()
.map(model -> arguments(model.toString(), LlmProvider.GEMINI, LlmProviderGemini.class));
Expand Down

0 comments on commit 27e241a

Please sign in to comment.