Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[OPIK-718] quickstart page integrations update #1034

Merged
merged 13 commits into from
Jan 15, 2025
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified apps/opik-frontend/public/images/integrations/langchain.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified apps/opik-frontend/public/images/integrations/litellm.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified apps/opik-frontend/public/images/integrations/openai.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified apps/opik-frontend/public/images/integrations/python.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified apps/opik-frontend/public/images/integrations/ragas.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { buildDocsUrl } from "@/lib/utils";
import { SquareArrowOutUpRight } from "lucide-react";
import ApiKeyCard from "../ApiKeyCard/ApiKeyCard";
import GoogleColabCard from "../GoogleColabCard/GoogleColabCard";
import IntegrationTemplate from "./IntegrationTemplate";

type FrameworkIntegrationsProps = {
integrationList: FrameworkIntegration[];
Expand Down Expand Up @@ -34,7 +35,7 @@ const FrameworkIntegrations: React.FC<FrameworkIntegrationsProps> = ({
<img
alt={item.label}
src={item.logo}
className="size-[22px] shrink-0"
className="size-[32px] shrink-0"
/>
<div className="ml-1 truncate">{item.label}</div>
</li>
Expand All @@ -53,7 +54,7 @@ const FrameworkIntegrations: React.FC<FrameworkIntegrationsProps> = ({
</div>
<div className="flex min-w-[650px] flex-1 gap-6">
<div className="flex w-full flex-1 flex-col">
<integration.component apiKey={apiKey} />
<IntegrationTemplate code={integration.code} apiKey={apiKey} />
</div>

<div className="sticky top-20 flex w-[250px] shrink-0 flex-col gap-6 self-start">
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import React from "react";
import CodeHighlighter from "@/components/shared/CodeHighlighter/CodeHighlighter";
import useAppStore from "@/store/AppStore";
import { BASE_API_URL } from "@/api/api";
import { maskAPIKey } from "@/lib/utils";

const CODE_BLOCK_1 = "pip install opik";

export const OPIK_API_KEY_TEMPLATE = "# INJECT_OPIK_CONFIGURATION";

type PutConfigInCodeArgs = {
code: string;
workspaceName: string;
apiKey?: string;
maskApiKey?: boolean;
};
const putConfigInCode = ({
code,
workspaceName,
apiKey,
maskApiKey,
}: PutConfigInCodeArgs): string => {
if (apiKey) {
return code.replace(
OPIK_API_KEY_TEMPLATE,
`os.environ["OPIK_API_KEY"] = "${
maskApiKey ? maskAPIKey(apiKey) : apiKey
}"\nos.environ["OPIK_WORKSPACE"] = "${workspaceName}"`,
);
}

return code.replace(
OPIK_API_KEY_TEMPLATE,
`os.environ["OPIK_URL_OVERRIDE"] = "${window.location.origin}${BASE_API_URL}"`,
);
};

type IntegrationTemplateProps = {
apiKey?: string;
code: string;
};

const IntegrationTemplate: React.FC<IntegrationTemplateProps> = ({
apiKey,
code,
}) => {
const workspaceName = useAppStore((state) => state.activeWorkspaceName);
const codeWithConfig = putConfigInCode({
code,
workspaceName,
apiKey,
maskApiKey: true,
});
const codeWithConfigToCopy = putConfigInCode({ code, workspaceName, apiKey });

return (
<div className="flex flex-col gap-6 rounded-md border bg-white p-6">
<div>
<div className="comet-body-s mb-3">
1. Install Opik using pip from the command line.
</div>
<div className="min-h-7">
<CodeHighlighter data={CODE_BLOCK_1} />
</div>
</div>
<div>
<div className="comet-body-s mb-3">
2. Run the following code to get started
</div>
<CodeHighlighter
data={codeWithConfig}
copyData={codeWithConfigToCopy}
/>
</div>
</div>
);
};

export default IntegrationTemplate;
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import getpass
import os

import anthropic
from opik.integrations.anthropic import track_anthropic

# INJECT_OPIK_CONFIGURATION

if "ANTHROPIC_API_KEY" not in os.environ:
os.environ["ANTHROPIC_API_KEY"] = getpass.getpass("Enter your Anthropic API key: ")

anthropic_client = anthropic.Anthropic()

anthropic_client = track_anthropic(anthropic_client)

PROMPT = "Why is it important to use a LLM Monitoring like CometML Opik tool that allows you to log traces and spans when working with Anthropic LLM Models?"

response = anthropic_client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=[
{"role": "user", "content": PROMPT},
],
)
print("Response", response.content[0].text)
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import os

import boto3
from opik.integrations.bedrock import track_bedrock

# INJECT_OPIK_CONFIGURATION

REGION = "us-east-1"
MODEL_ID = "us.meta.llama3-2-3b-instruct-v1:0"

bedrock = boto3.client(
service_name="bedrock-runtime",
region_name=REGION,
# aws_access_key_id=ACCESS_KEY,
# aws_secret_access_key=SECRET_KEY,
# aws_session_token=SESSION_TOKEN,
)

bedrock_client = track_bedrock(bedrock)

PROMPT = "Why is it important to use a LLM Monitoring like CometML Opik tool that allows you to log traces and spans when working with LLM Models hosted on AWS Bedrock?"

response = bedrock_client.converse(
modelId=MODEL_ID,
messages=[{"role": "user", "content": [{"text": PROMPT}]}],
inferenceConfig={"temperature": 0.5, "maxTokens": 512, "topP": 0.9},
)
print("Response", response["output"]["message"]["content"][0]["text"])
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import getpass
import os

import dspy
from opik.integrations.dspy.callback import OpikCallback

# INJECT_OPIK_CONFIGURATION

if "OPENAI_API_KEY" not in os.environ:
os.environ["OPENAI_API_KEY"] = getpass.getpass("Enter your OpenAI API key: ")


project_name = "DSPY"

lm = dspy.LM(
model="openai/gpt-4o-mini",
)
dspy.configure(lm=lm)


opik_callback = OpikCallback(project_name=project_name)
dspy.settings.configure(
callbacks=[opik_callback],
)

cot = dspy.ChainOfThought("question -> answer")
print(cot(question="What is the meaning of life?"))
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import getpass
import os

import openai
from opik import track

# INJECT_OPIK_CONFIGURATION

if "OPENAI_API_KEY" not in os.environ:
os.environ["OPENAI_API_KEY"] = getpass.getpass("Enter your OpenAI API key: ")

client = openai.OpenAI()


@track
def retrieve_context(input_text):
# Your retrieval logic here, here we are just returning a hardcoded list of strings
return [
"What specific information are you looking for?",
"How can I assist you with your interests today?",
"Are there any topics you'd like to explore or learn more about?",
]


@track
def generate_response(input_text, context):
full_prompt = (
f" If the user asks a question that is not specific, use the context to provide a relevant response.\n"
f"Context: {', '.join(context)}\n"
f"User: {input_text}\n"
f"AI:"
)

response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": full_prompt}],
)
return response.choices[0].message.content


@track(name="my_llm_application")
def llm_chain(input_text):
context = retrieve_context(input_text)
return generate_response(input_text, context)


# Use the LLM chain
result = llm_chain("Hello, how are you?")
print(result)
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import getpass
import os

import litellm
from litellm.integrations.opik.opik import OpikLogger

# INJECT_OPIK_CONFIGURATION

if "GEMINI_API_KEY" not in os.environ:
os.environ["GEMINI_API_KEY"] = getpass.getpass("Enter your Gemini API key: ")

opik_logger = OpikLogger()
litellm.callbacks = [opik_logger]

response = litellm.completion(
model="gemini/gemini-pro",
messages=[
{
"role": "user",
"content": "Why is tracking and evaluation of LLMs important?",
},
],
)
print(response)
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import getpass
import os

import litellm
from litellm.integrations.opik.opik import OpikLogger

# INJECT_OPIK_CONFIGURATION

if "GROQ_API_KEY" not in os.environ:
os.environ["GROQ_API_KEY"] = getpass.getpass("Enter your Groq API key: ")

opik_logger = OpikLogger()
litellm.callbacks = [opik_logger]

response = litellm.completion(
model="groq/llama3-8b-8192",
messages=[
{
"role": "user",
"content": "Why is tracking and evaluation of LLMs important?",
},
],
)
print(response)
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import getpass
import os

os.environ["HAYSTACK_CONTENT_TRACING_ENABLED"] = "true"

from haystack import Pipeline
from haystack.components.builders import ChatPromptBuilder
from haystack.components.generators.chat import OpenAIChatGenerator
from haystack.dataclasses import ChatMessage
from opik.integrations.haystack import OpikConnector

# INJECT_OPIK_CONFIGURATION

if "OPENAI_API_KEY" not in os.environ:
os.environ["OPENAI_API_KEY"] = getpass.getpass("Enter your OpenAI API key: ")

pipe = Pipeline()

# Add the OpikConnector component to the pipeline
pipe.add_component(
"tracer",
OpikConnector("Chat example"),
)

# Continue building the pipeline
pipe.add_component("prompt_builder", ChatPromptBuilder())
pipe.add_component("llm", OpenAIChatGenerator(model="gpt-3.5-turbo"))

pipe.connect("prompt_builder.prompt", "llm.messages")

messages = [
ChatMessage.from_system(
"Always respond in German even if some input data is in other languages.",
),
ChatMessage.from_user("Tell me about {{location}}"),
]

response = pipe.run(
data={
"prompt_builder": {
"template_variables": {"location": "Berlin"},
"template": messages,
},
},
)

print(response["llm"]["replies"][0])
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import getpass
import os

from langchain.prompts import PromptTemplate
from langchain_openai import OpenAI
from opik.integrations.langchain import OpikTracer

# INJECT_OPIK_CONFIGURATION

if "OPENAI_API_KEY" not in os.environ:
os.environ["OPENAI_API_KEY"] = getpass.getpass("Enter your OpenAI API key: ")

# Initialize the tracer
opik_tracer = OpikTracer()

# Create the LLM Chain using LangChain
llm = OpenAI(temperature=0, callbacks=[opik_tracer])

prompt_template = PromptTemplate(
input_variables=["input"],
template="Translate the following text to French: {input}",
)

# Use pipe operator to create LLM chain
llm_chain = prompt_template | llm

# Generate the translations
print(llm_chain.invoke({"input": "Hello, how are you?"}, callbacks=[opik_tracer]))
Loading
Loading