diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f40206e65..ccc6c2cb5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -54,7 +54,7 @@ jobs: - name: Start the service run: | echo "OPENAI_API_KEY=placeholder" >> ../.env - poetry run langgraph up -c ../langgraph.json -d ../compose.override.yml --postgres-uri 'postgres://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable' --verbose --wait + poetry run langgraph up -c ../langgraph.json -d ../compose.override.test.yml --postgres-uri 'postgres://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable' --verbose --wait - name: Run tests env: POSTGRES_HOST: localhost diff --git a/API.md b/API.md index 6b2a37454..0365ad111 100644 --- a/API.md +++ b/API.md @@ -5,7 +5,7 @@ This allows you to easily integrate it with a different frontend of your choice. For full API documentation, see [localhost:8100/docs](localhost:8100/docs) after deployment. -If you want to see the API docs before deployment, check out the [hosted docs here](https://opengpts-example-vz4y4ooboq-uc.a.run.app/docs). +If you want to see the API docs before deployment, check out the [hosted docs here](https://opengpts-backend-ffoprvkqsa-uc.a.run.app/docs). In the examples below, cookies are used as a mock auth method. For production, we recommend using JWT auth. Refer to the [auth guide for production](auth.md) for more information. When using JWT auth, you will need to include the JWT in the `Authorization` header as a Bearer token. @@ -40,14 +40,14 @@ The config parameters allows you to set the LLM used, the instructions of the as "config": { "configurable": { "type": "agent", - "type==agent/agent_type": "GPT 3.5 Turbo", + "type==agent/agent_type": "GPT 4o Mini", "type==agent/system_message": "You are a helpful assistant", "type==agent/tools": ["Wikipedia"] }, "public": True } ``` -This creates an assistant with the name `"bar"`, with GPT 3.5 Turbo, with a prompt `"You are a helpful assistant"` using the Wikipedia tool , that is public. +This creates an assistant with the name `"bar"`, with GPT 4o Mini, with a prompt `"You are a helpful assistant"` using the Wikipedia tool , that is public. Available tools names can be found in the AvailableTools class in backend/packages/gizmo-agent/gizmo_agent/tools.py Available llms can be found in GizmoAgentType in backend/packages/gizmo-agent/gizmo_agent/agent_types/__init__.py diff --git a/Makefile b/Makefile index 054598af1..ff6b90ed7 100644 --- a/Makefile +++ b/Makefile @@ -2,3 +2,6 @@ start: cd backend && poetry run langgraph up -c ../langgraph.json -d ../compose.override.yml --postgres-uri 'postgres://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable' --verbose + +start-test: + cd backend && poetry run langgraph up -c ../langgraph.json -d ../compose.override.test.yml --postgres-uri 'postgres://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable' --verbose \ No newline at end of file diff --git a/README.md b/README.md index a5b4beb85..e322e1415 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,7 @@ Because this is open source, if you do not like those architectures or want to m **Key Links** -- [GPTs: a simple hosted version](https://opengpts-example-vz4y4ooboq-uc.a.run.app/) +- [GPTs: a simple hosted version](https://opengpts-backend-ffoprvkqsa-uc.a.run.app/) - [Assistants API: a getting started guide](API.md) - [Auth: a guide for production](auth.md) @@ -281,7 +281,7 @@ the most flexible choice, but they work well with fewer models and can be less r When creating an assistant, you specify a few things. -First, you choose the language model to use. Only a few language models can be used reliably well: GPT-3.5, GPT-4, +First, you choose the language model to use. Only a few language models can be used reliably well: GPT-4, Claude, and Gemini. Second, you choose the tools to use. These can be predefined tools OR a retriever constructed from uploaded files. You @@ -339,21 +339,20 @@ You can choose between different LLMs to use. This takes advantage of LangChain's many integrations. It is important to note that depending on which LLM you use, you may need to change how you are prompting it. -We have exposed four agent types by default: +We have exposed 3 agent types by default: -- "GPT 3.5 Turbo" -- "GPT 4" -- "Azure OpenAI" -- "Claude 2" +- "GPT 4o Mini" +- "GPT 4o" +- "Claude 3.5 Sonnet" We will work to add more when we have confidence they can work well. If you want to add your own LLM or agent configuration, or want to edit the existing ones, you can find them in -`backend/app/agent_types` +`backend/app/llms` (`LLMType`) -#### Claude 2 +#### Claude 3.5 Sonnet -If using Claude 2, you will need to set the following environment variable: +If using Claude, you will need to set the following environment variable: ```shell export ANTHROPIC_API_KEY=sk-... diff --git a/backend/Makefile b/backend/Makefile index 7bb7465ca..d3217bb55 100644 --- a/backend/Makefile +++ b/backend/Makefile @@ -18,11 +18,11 @@ start: test: # We need to update handling of env variables for tests - YDC_API_KEY=placeholder OPENAI_API_KEY=placeholder LANGGRAPH_URL=http://localhost:8123 PGVECTOR_URI=postgresql+psycopg2://postgres:postgres@localhost:5433/postgres?sslmode=disable poetry run pytest $(TEST_FILE) + OPENAI_API_KEY=placeholder LANGGRAPH_URL=http://localhost:8123 POSTGRES_URI=postgresql+psycopg2://postgres:postgres@localhost:5433/postgres?sslmode=disable poetry run pytest $(TEST_FILE) test_watch: # We need to update handling of env variables for tests - YDC_API_KEY=placeholder OPENAI_API_KEY=placeholder LANGGRAPH_URL=http://localhost:8123 PGVECTOR_URI=postgresql+psycopg2://postgres:postgres@localhost:5433/postgres?sslmode=disable poetry run ptw . -- $(TEST_FILE) + OPENAI_API_KEY=placeholder LANGGRAPH_URL=http://localhost:8123 POSTGRES_URI=postgresql+psycopg2://postgres:postgres@localhost:5433/postgres?sslmode=disable poetry run ptw . -- $(TEST_FILE) ###################### # LINTING AND FORMATTING diff --git a/backend/app/config_schema.json b/backend/app/config_schema.json index 0ead2a2f1..410774521 100644 --- a/backend/app/config_schema.json +++ b/backend/app/config_schema.json @@ -17,168 +17,24 @@ ], "type": "string" }, - "AgentType": { - "title": "AgentType", - "description": "An enumeration.", - "enum": [ - "GPT 3.5 Turbo", - "GPT 4 Turbo", - "GPT 4 (Azure OpenAI)", - "Claude 2", - "GEMINI", - "Ollama" - ], - "type": "string" - }, "AvailableTools": { "title": "AvailableTools", "description": "An enumeration.", "enum": [ - "action_server_by_robocorp", - "ai_action_runner_by_connery", - "ddg_search", "search_tavily", "search_tavily_answer", "retrieval", "arxiv", - "you_search", - "sec_filings_kai_ai", - "press_releases_kai_ai", "pubmed", - "wikipedia", - "dall_e" + "wikipedia" ], "type": "string" }, - "ActionServerConfig": { - "title": "ActionServerConfig", - "type": "object", - "properties": { - "url": { - "title": "Url", - "type": "string" - }, - "api_key": { - "title": "Api Key", - "type": "string" - } - }, - "required": [ - "url", - "api_key" - ] - }, - "ActionServer": { - "title": "ActionServer", - "type": "object", - "properties": { - "type": { - "default": "action_server_by_robocorp", - "allOf": [ - { - "$ref": "#/definitions/AvailableTools" - } - ] - }, - "name": { - "title": "Name", - "default": "Action Server by Robocorp", - "const": "Action Server by Robocorp", - "type": "string" - }, - "description": { - "title": "Description", - "default": "Run AI actions with [Robocorp Action Server](https://github.com/robocorp/robocorp).", - "const": "Run AI actions with [Robocorp Action Server](https://github.com/robocorp/robocorp).", - "type": "string" - }, - "config": { - "$ref": "#/definitions/ActionServerConfig" - }, - "multi_use": { - "title": "Multi Use", - "default": true, - "const": true, - "type": "boolean" - } - }, - "required": [ - "config" - ] - }, "ToolConfig": { "title": "ToolConfig", "type": "object", "properties": {} }, - "Connery": { - "title": "Connery", - "type": "object", - "properties": { - "type": { - "default": "ai_action_runner_by_connery", - "allOf": [ - { - "$ref": "#/definitions/AvailableTools" - } - ] - }, - "name": { - "title": "Name", - "default": "AI Action Runner by Connery", - "const": "AI Action Runner by Connery", - "type": "string" - }, - "description": { - "title": "Description", - "default": "Connect OpenGPTs to the real world with [Connery](https://github.com/connery-io/connery).", - "const": "Connect OpenGPTs to the real world with [Connery](https://github.com/connery-io/connery).", - "type": "string" - }, - "config": { - "$ref": "#/definitions/ToolConfig" - }, - "multi_use": { - "title": "Multi Use", - "default": false, - "type": "boolean" - } - } - }, - "DDGSearch": { - "title": "DDGSearch", - "type": "object", - "properties": { - "type": { - "default": "ddg_search", - "allOf": [ - { - "$ref": "#/definitions/AvailableTools" - } - ] - }, - "name": { - "title": "Name", - "default": "DuckDuckGo Search", - "const": "DuckDuckGo Search", - "type": "string" - }, - "description": { - "title": "Description", - "default": "Search the web with [DuckDuckGo](https://pypi.org/project/duckduckgo-search/).", - "const": "Search the web with [DuckDuckGo](https://pypi.org/project/duckduckgo-search/).", - "type": "string" - }, - "config": { - "$ref": "#/definitions/ToolConfig" - }, - "multi_use": { - "title": "Multi Use", - "default": false, - "type": "boolean" - } - } - }, "Arxiv": { "title": "Arxiv", "type": "object", @@ -213,108 +69,6 @@ } } }, - "YouSearch": { - "title": "YouSearch", - "type": "object", - "properties": { - "type": { - "default": "you_search", - "allOf": [ - { - "$ref": "#/definitions/AvailableTools" - } - ] - }, - "name": { - "title": "Name", - "default": "You.com Search", - "const": "You.com Search", - "type": "string" - }, - "description": { - "title": "Description", - "default": "Uses [You.com](https://you.com/) search, optimized responses for LLMs.", - "const": "Uses [You.com](https://you.com/) search, optimized responses for LLMs.", - "type": "string" - }, - "config": { - "$ref": "#/definitions/ToolConfig" - }, - "multi_use": { - "title": "Multi Use", - "default": false, - "type": "boolean" - } - } - }, - "SecFilings": { - "title": "SecFilings", - "type": "object", - "properties": { - "type": { - "default": "sec_filings_kai_ai", - "allOf": [ - { - "$ref": "#/definitions/AvailableTools" - } - ] - }, - "name": { - "title": "Name", - "default": "SEC Filings (Kay.ai)", - "const": "SEC Filings (Kay.ai)", - "type": "string" - }, - "description": { - "title": "Description", - "default": "Searches through SEC filings using [Kay.ai](https://www.kay.ai/).", - "const": "Searches through SEC filings using [Kay.ai](https://www.kay.ai/).", - "type": "string" - }, - "config": { - "$ref": "#/definitions/ToolConfig" - }, - "multi_use": { - "title": "Multi Use", - "default": false, - "type": "boolean" - } - } - }, - "PressReleases": { - "title": "PressReleases", - "type": "object", - "properties": { - "type": { - "default": "press_releases_kai_ai", - "allOf": [ - { - "$ref": "#/definitions/AvailableTools" - } - ] - }, - "name": { - "title": "Name", - "default": "Press Releases (Kay.ai)", - "const": "Press Releases (Kay.ai)", - "type": "string" - }, - "description": { - "title": "Description", - "default": "Searches through press releases using [Kay.ai](https://www.kay.ai/).", - "const": "Searches through press releases using [Kay.ai](https://www.kay.ai/).", - "type": "string" - }, - "config": { - "$ref": "#/definitions/ToolConfig" - }, - "multi_use": { - "title": "Multi Use", - "default": false, - "type": "boolean" - } - } - }, "PubMed": { "title": "PubMed", "type": "object", @@ -485,51 +239,13 @@ } } }, - "DallE": { - "title": "DallE", - "type": "object", - "properties": { - "type": { - "default": "dall_e", - "allOf": [ - { - "$ref": "#/definitions/AvailableTools" - } - ] - }, - "name": { - "title": "Name", - "default": "Generate Image (Dall-E)", - "const": "Generate Image (Dall-E)", - "type": "string" - }, - "description": { - "title": "Description", - "default": "Generates images from a text description using OpenAI's DALL-E model.", - "const": "Generates images from a text description using OpenAI's DALL-E model.", - "type": "string" - }, - "config": { - "$ref": "#/definitions/ToolConfig" - }, - "multi_use": { - "title": "Multi Use", - "default": false, - "type": "boolean" - } - } - }, "LLMType": { "title": "LLMType", "description": "An enumeration.", "enum": [ - "GPT 3.5 Turbo", - "GPT 4 Turbo", - "GPT 4 (Azure OpenAI)", - "Claude 2", - "GEMINI", - "Mixtral", - "Ollama" + "GPT 4o Mini", + "GPT 4o", + "Claude 3.5 Sonnet" ], "type": "string" }, @@ -560,11 +276,11 @@ ] }, "type==agent/agent_type": { - "title": "Agent Type", - "default": "GPT 3.5 Turbo", + "title": "LLM Type", + "default": "GPT 4o Mini", "allOf": [ { - "$ref": "#/definitions/AgentType" + "$ref": "#/definitions/LLMType" } ] }, @@ -591,50 +307,29 @@ "items": { "anyOf": [ { - "$ref": "#/definitions/ActionServer" - }, - { - "$ref": "#/definitions/Connery" + "$ref": "#/definitions/Tavily" }, { - "$ref": "#/definitions/DDGSearch" + "$ref": "#/definitions/TavilyAnswer" }, { "$ref": "#/definitions/Arxiv" }, - { - "$ref": "#/definitions/YouSearch" - }, - { - "$ref": "#/definitions/SecFilings" - }, - { - "$ref": "#/definitions/PressReleases" - }, { "$ref": "#/definitions/PubMed" }, { "$ref": "#/definitions/Wikipedia" }, - { - "$ref": "#/definitions/Tavily" - }, - { - "$ref": "#/definitions/TavilyAnswer" - }, { "$ref": "#/definitions/Retrieval" - }, - { - "$ref": "#/definitions/DallE" } ] } }, "type==chatbot/llm_type": { "title": "LLM Type", - "default": "GPT 3.5 Turbo", + "default": "GPT 4o Mini", "allOf": [ { "$ref": "#/definitions/LLMType" @@ -645,6 +340,15 @@ "title": "Instructions", "default": "You are a helpful assistant.", "type": "string" + }, + "type==chat_retrieval/llm_type": { + "title": "LLM Type", + "default": "GPT 4o Mini", + "allOf": [ + { + "$ref": "#/definitions/LLMType" + } + ] } } } diff --git a/backend/app/graphs/new_agent.py b/backend/app/graphs/new_agent.py index 899ecc7e1..b15393f2d 100644 --- a/backend/app/graphs/new_agent.py +++ b/backend/app/graphs/new_agent.py @@ -1,5 +1,4 @@ -from enum import Enum -from typing import Annotated, Any, Dict, TypedDict, cast +from typing import Annotated, Any, Dict, TypedDict from langchain_core.messages import ( AIMessage, @@ -11,17 +10,11 @@ ) from langchain_core.runnables import RunnableConfig from langgraph.graph import END, StateGraph -from langgraph.graph.message import add_messages +from langgraph.graph.message import Messages, add_messages from langgraph.managed.few_shot import FewShotExamples -from langgraph.prebuilt import ToolExecutor, ToolInvocation - -from app.llms import ( - get_anthropic_llm, - get_google_llm, - get_mixtral_fireworks, - get_ollama_llm, - get_openai_llm, -) +from langgraph.prebuilt import ToolNode + +from app.llms import LLMType, get_llm from app.message_types import LiberalToolMessage from app.tools import RETRIEVAL_DESCRIPTION, TOOLS, AvailableTools, get_retrieval_tool @@ -33,8 +26,33 @@ def filter_by_assistant_id(config: RunnableConfig) -> Dict[str, Any]: return {} +def custom_add_messages(left: Messages, right: Messages): + combined_messages = add_messages(left, right) + for message in combined_messages: + # TODO: handle this correctly in ChatAnthropic. + # this is needed to handle content blocks in AIMessageChunk when using + # streaming with the graph. if we don't have that, all of the AIMessages + # will have list of dicts in the content + if ( + isinstance(message, AIMessage) + and isinstance(message.content, list) + and ( + text_content_blocks := [ + content_block + for content_block in message.content + if content_block["type"] == "text" + ] + ) + ): + message.content = "".join( + content_block["text"] for content_block in text_content_blocks + ) + + return combined_messages + + class BaseState(TypedDict): - messages: Annotated[list[AnyMessage], add_messages] + messages: Annotated[list[AnyMessage], custom_add_messages] examples: Annotated[ list, FewShotExamples.configure(metadata_filter=filter_by_assistant_id) ] @@ -59,41 +77,6 @@ def _render_messages(ms): return "\n".join(m_string) -class LLMType(str, Enum): - GPT_35_TURBO = "GPT 3.5 Turbo" - GPT_4 = "GPT 4 Turbo" - GPT_4O = "GPT 4o" - AZURE_OPENAI = "GPT 4 (Azure OpenAI)" - CLAUDE2 = "Claude 2" - GEMINI = "GEMINI" - MIXTRAL = "Mixtral" - OLLAMA = "Ollama" - - -def get_llm( - llm_type: LLMType, -): - if llm_type == LLMType.GPT_35_TURBO: - llm = get_openai_llm() - elif llm_type == LLMType.GPT_4: - llm = get_openai_llm(model="gpt-4-turbo") - elif llm_type == LLMType.GPT_4O: - llm = get_openai_llm(model="gpt-4o") - elif llm_type == LLMType.AZURE_OPENAI: - llm = get_openai_llm(azure=True) - elif llm_type == LLMType.CLAUDE2: - llm = get_anthropic_llm() - elif llm_type == LLMType.GEMINI: - llm = get_google_llm() - elif llm_type == LLMType.MIXTRAL: - llm = get_mixtral_fireworks() - elif llm_type == LLMType.OLLAMA: - llm = get_ollama_llm() - else: - raise ValueError - return llm - - def _get_messages(messages, system_message, examples): msgs = [] for m in messages: @@ -160,7 +143,7 @@ async def agent(state, config): examples = state.get("examples", []) _config = config["configurable"] system_message = _config.get("type==agent/system_message", DEFAULT_SYSTEM_MESSAGE) - llm = get_llm(_config.get("type==agent/agent_type", LLMType.GPT_35_TURBO)) + llm = get_llm(_config.get("type==agent/agent_type", LLMType.GPT_4O_MINI)) tools = get_tools( _config.get("type==agent/tools"), _config.get("assistant_id"), @@ -190,7 +173,6 @@ def should_continue(state): # Define the function to execute tools async def call_tool(state, config): - messages = state["messages"] _config = config["configurable"] tools = get_tools( _config.get("type==agent/tools"), @@ -199,33 +181,8 @@ async def call_tool(state, config): _config.get("type==agent/retrieval_description"), ) - tool_executor = ToolExecutor(tools) - actions: list[ToolInvocation] = [] - # Based on the continue condition - # we know the last message involves a function call - last_message = cast(AIMessage, messages[-1]) - for tool_call in last_message.tool_calls: - # We construct a ToolInvocation from the function_call - actions.append( - ToolInvocation( - tool=tool_call["name"], - tool_input=tool_call["args"], - ) - ) - # We call the tool_executor and get back a response - responses = await tool_executor.abatch(actions) - # We use the response to create a ToolMessage - tool_messages = [ - LiberalToolMessage( - tool_call_id=tool_call["id"], - name=tool_call["name"], - content=response, - ) - for tool_call, response in zip(last_message.tool_calls, responses) - ] - - # graph state is a dict, so return type must be dict - return {"messages": tool_messages} + tool_node = ToolNode(tools) + return await tool_node.ainvoke(state) workflow = StateGraph(BaseState) diff --git a/backend/app/graphs/new_chatbot.py b/backend/app/graphs/new_chatbot.py index d83844ae6..288906fe5 100644 --- a/backend/app/graphs/new_chatbot.py +++ b/backend/app/graphs/new_chatbot.py @@ -1,52 +1,10 @@ -from enum import Enum from typing import Annotated, Sequence, TypedDict from langchain_core.messages import BaseMessage, SystemMessage from langgraph.graph import END, StateGraph from langgraph.graph.message import add_messages -from app.llms import ( - get_anthropic_llm, - get_google_llm, - get_mixtral_fireworks, - get_ollama_llm, - get_openai_llm, -) - - -class LLMType(str, Enum): - GPT_35_TURBO = "GPT 3.5 Turbo" - GPT_4 = "GPT 4 Turbo" - GPT_4O = "GPT 4o" - AZURE_OPENAI = "GPT 4 (Azure OpenAI)" - CLAUDE2 = "Claude 2" - GEMINI = "GEMINI" - MIXTRAL = "Mixtral" - OLLAMA = "Ollama" - - -def get_llm( - llm_type: LLMType, -): - if llm_type == LLMType.GPT_35_TURBO: - llm = get_openai_llm() - elif llm_type == LLMType.GPT_4: - llm = get_openai_llm(model="gpt-4-turbo") - elif llm_type == LLMType.GPT_4O: - llm = get_openai_llm(model="gpt-4o") - elif llm_type == LLMType.AZURE_OPENAI: - llm = get_openai_llm(azure=True) - elif llm_type == LLMType.CLAUDE2: - llm = get_anthropic_llm() - elif llm_type == LLMType.GEMINI: - llm = get_google_llm() - elif llm_type == LLMType.MIXTRAL: - llm = get_mixtral_fireworks() - elif llm_type == LLMType.OLLAMA: - llm = get_ollama_llm() - else: - raise ValueError - return llm +from app.llms import LLMType, get_llm class AgentState(TypedDict): @@ -57,7 +15,9 @@ class AgentState(TypedDict): def _call_model(state, config): - m = get_llm(config["configurable"].get("type==chatbot/llm", LLMType.GPT_35_TURBO)) + m = get_llm( + config["configurable"].get("type==chatbot/llm_type", LLMType.GPT_4O_MINI) + ) system_message = config["configurable"].get( "type==chatbot/system_message", DEFAULT_SYSTEM_MESSAGE ) diff --git a/backend/app/graphs/new_rag.py b/backend/app/graphs/new_rag.py index 866b54a23..b433d9a6d 100644 --- a/backend/app/graphs/new_rag.py +++ b/backend/app/graphs/new_rag.py @@ -1,5 +1,4 @@ import operator -from enum import Enum from typing import Annotated, List, Sequence, TypedDict from uuid import uuid4 @@ -10,28 +9,10 @@ from langgraph.graph.message import add_messages from langgraph.graph.state import StateGraph -from app.llms import ( - get_anthropic_llm, - get_google_llm, - get_mixtral_fireworks, - get_ollama_llm, - get_openai_llm, -) +from app.llms import LLMType, get_llm from app.message_types import LiberalToolMessage from app.tools import get_retriever - -class LLMType(str, Enum): - GPT_35_TURBO = "GPT 3.5 Turbo" - GPT_4 = "GPT 4 Turbo" - GPT_4O = "GPT 4o" - AZURE_OPENAI = "GPT 4 (Azure OpenAI)" - CLAUDE2 = "Claude 2" - GEMINI = "GEMINI" - MIXTRAL = "Mixtral" - OLLAMA = "Ollama" - - search_prompt = PromptTemplate.from_template( """Given the conversation below, come up with a search query to look up. @@ -54,30 +35,6 @@ class LLMType(str, Enum): {context}""" -def get_llm( - llm_type: LLMType, -): - if llm_type == LLMType.GPT_35_TURBO: - llm = get_openai_llm() - elif llm_type == LLMType.GPT_4: - llm = get_openai_llm(model="gpt-4-turbo") - elif llm_type == LLMType.GPT_4O: - llm = get_openai_llm(model="gpt-4o") - elif llm_type == LLMType.AZURE_OPENAI: - llm = get_openai_llm(azure=True) - elif llm_type == LLMType.CLAUDE2: - llm = get_anthropic_llm() - elif llm_type == LLMType.GEMINI: - llm = get_google_llm() - elif llm_type == LLMType.MIXTRAL: - llm = get_mixtral_fireworks() - elif llm_type == LLMType.OLLAMA: - llm = get_ollama_llm() - else: - raise ValueError - return llm - - class AgentState(TypedDict): messages: Annotated[List[BaseMessage], add_messages] msg_count: Annotated[int, operator.add] @@ -108,9 +65,7 @@ def _get_messages(messages, system_message=DEFAULT_SYSTEM_MESSAGE): @chain async def get_search_query(messages: Sequence[BaseMessage], config): llm = get_llm( - config["configurable"].get( - "agent==chat_retrieval/llm_type", LLMType.GPT_35_TURBO - ) + config["configurable"].get("type==chat_retrieval/llm_type", LLMType.GPT_4O_MINI) ) convo = [] for m in messages: @@ -180,9 +135,7 @@ async def retrieve(state: AgentState, config): def call_model(state: AgentState, config): messages = state["messages"] llm = get_llm( - config["configurable"].get( - "agent==chat_retrieval/llm_type", LLMType.GPT_35_TURBO - ) + config["configurable"].get("type==chat_retrieval/llm_type", LLMType.GPT_4O_MINI) ) response = llm.invoke( _get_messages( diff --git a/backend/app/llms.py b/backend/app/llms.py index 94d5fc10e..3db478aa9 100644 --- a/backend/app/llms.py +++ b/backend/app/llms.py @@ -1,4 +1,5 @@ import os +from enum import Enum from functools import lru_cache from urllib.parse import urlparse @@ -14,8 +15,14 @@ logger = structlog.get_logger(__name__) +class LLMType(str, Enum): + GPT_4O_MINI = "GPT 4o Mini" + GPT_4O = "GPT 4o" + CLAUDE_3_5_SONNET = "Claude 3.5 Sonnet" + + @lru_cache(maxsize=4) -def get_openai_llm(model: str = "gpt-3.5-turbo", azure: bool = False): +def get_openai_llm(model: str = "gpt-4o-mini", azure: bool = False): proxy_url = os.getenv("PROXY_URL") http_client = None if proxy_url: @@ -69,7 +76,7 @@ def get_anthropic_llm(bedrock: bool = False): model = BedrockChat(model_id="anthropic.claude-v2", client=client) else: model = ChatAnthropic( - model_name="claude-3-haiku-20240307", + model_name="claude-3-5-sonnet-20240620", max_tokens_to_sample=2000, temperature=0, ) @@ -98,3 +105,21 @@ def get_ollama_llm(): ollama_base_url = "http://localhost:11434" return ChatOllama(model=model_name, base_url=ollama_base_url) + + +def get_llm( + llm_type: LLMType, +): + if llm_type == LLMType.GPT_4O_MINI: + llm = get_openai_llm(model="gpt-4o-mini") + elif llm_type == LLMType.GPT_4O: + llm = get_openai_llm(model="gpt-4o") + elif llm_type == LLMType.CLAUDE_3_5_SONNET: + llm = get_anthropic_llm() + elif llm_type == LLMType.GEMINI: + llm = get_google_llm() + elif llm_type == LLMType.MIXTRAL: + llm = get_mixtral_fireworks() + else: + raise ValueError(f"Unsupported LLM type: '{llm_type}'") + return llm diff --git a/backend/app/upload.py b/backend/app/upload.py index 29dd2fead..708977254 100644 --- a/backend/app/upload.py +++ b/backend/app/upload.py @@ -82,16 +82,20 @@ def convert_ingestion_input_to_blob(file: UploadFile) -> Blob: ) +def get_sqlalchemy_db_uri(db_uri: str) -> str: + return db_uri.replace("postgres://", "postgresql://") + + def get_vectorstore() -> PGVector: if os.environ.get("OPENAI_API_KEY"): return PGVector( - connection_string=os.environ["PGVECTOR_URI"], + connection_string=get_sqlalchemy_db_uri(os.environ["POSTGRES_URI"]), embedding_function=OpenAIEmbeddings(), use_jsonb=True, ) if os.environ.get("AZURE_OPENAI_API_KEY"): return PGVector( - connection_string=os.environ["PGVECTOR_URI"], + connection_string=get_sqlalchemy_db_uri(os.environ["POSTGRES_URI"]), embedding_function=AzureOpenAIEmbeddings( azure_endpoint=os.environ.get("AZURE_OPENAI_API_BASE"), azure_deployment=os.environ.get( diff --git a/backend/poetry.lock b/backend/poetry.lock index fbfb2c4f5..026835c91 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiohttp" @@ -112,20 +112,20 @@ frozenlist = ">=1.1.0" [[package]] name = "anthropic" -version = "0.26.1" +version = "0.31.2" description = "The official Python library for the anthropic API" optional = false python-versions = ">=3.7" files = [ - {file = "anthropic-0.26.1-py3-none-any.whl", hash = "sha256:2812b9b250b551ed8a1f0a7e6ae3f005654098994f45ebca5b5808bd154c9628"}, - {file = "anthropic-0.26.1.tar.gz", hash = "sha256:26680ff781a6f678a30a1dccd0743631e602b23a47719439ffdef5335fa167d8"}, + {file = "anthropic-0.31.2-py3-none-any.whl", hash = "sha256:28d176b98c72615bfae30f0a9eee6297cc33bf52535d38156fc2805556e2f09b"}, + {file = "anthropic-0.31.2.tar.gz", hash = "sha256:0134b73df8d1f142fc68675fbadb75e920054e9e3437b99df63f10f0fc6ac26f"}, ] [package.dependencies] anyio = ">=3.5.0,<5" distro = ">=1.7.0,<2" httpx = ">=0.23.0,<1" -jiter = ">=0.1.0,<1" +jiter = ">=0.4.0,<1" pydantic = ">=1.9.0,<3" sniffio = "*" tokenizers = ">=0.13.0" @@ -1600,72 +1600,72 @@ files = [ [[package]] name = "jiter" -version = "0.1.0" -description = "" +version = "0.5.0" +description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" files = [ - {file = "jiter-0.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3aa466e89664cb94e69571df326f0c28e25e2e728f90fa4c3c235bbd35b40609"}, - {file = "jiter-0.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46eed20f7d9642787eed4143f7b25e16cf9915bb45656980cc9b966bb1e00f59"}, - {file = "jiter-0.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51fcd4bdb23de3a26c2b64f7bd87e9e43c82f1171145ba13434a654d7c8e9aa9"}, - {file = "jiter-0.1.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:657ca4cf8d99e2e899a5ef778daed5f42eff6de6f23403a6225b6d6bafb55f38"}, - {file = "jiter-0.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5da72cf6582049d2b802e48dd647a096103994a21a7a762fe813b727565ac0ef"}, - {file = "jiter-0.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:148ae1c97be312f1e969d76fbf507818d53e2867e90cf3c7f78941a199d5b84c"}, - {file = "jiter-0.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f12ce8243d1adb4657cfd9f23ec73fbd206bd5387bea0ebb5514c41fd268a1c1"}, - {file = "jiter-0.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:067cc20889627a0afcaf6b465e942990b9f32d1ad88b0a083ece74becc3831b0"}, - {file = "jiter-0.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ce5866bb5ff7dc14d036fede7e7ddb86b3b67064dc66dde15de4771e2697e539"}, - {file = "jiter-0.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f446f1f5e8466fc4dfe775f9c5d8b6c3f0b8b07dc24d4ce76d8de3468d7447a8"}, - {file = "jiter-0.1.0-cp310-none-win32.whl", hash = "sha256:47c1e12bd0789bd4f76cc4973a04d512832568a2a4925cd0b52d0ed413aa5e8d"}, - {file = "jiter-0.1.0-cp310-none-win_amd64.whl", hash = "sha256:0316fa82ee4dab455bac2ec05362f3ac19d77e3139225683289c366ce35605b9"}, - {file = "jiter-0.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f47eb274aae20ee3b565886ab315c3f16f9831c0e4fd6722dc100a2dbc0923f9"}, - {file = "jiter-0.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80d1bf437ea70f43c0976f96cd83fa4618aceb526ba3eaccf9f736d0c3185f5c"}, - {file = "jiter-0.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215ca1178d30e7a652849b9ca145a4666e1ed0941aef0c61bbaf88a0cd084b66"}, - {file = "jiter-0.1.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08d7401e20fc660871a02ec05dda9dd93c95052a3c1588385230bca59d9d525b"}, - {file = "jiter-0.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cd365396e9c50b1c458bad0b21452f4c33fea222413aea78826bca98097f487"}, - {file = "jiter-0.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:050252cde3ae0b0a1eca028a30d953ce2d90e0150c1eef0e5ad75ce163d32484"}, - {file = "jiter-0.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfcf0996949a9435a2ebba2455934ad72d9faa1de2069c65aeaeaa8c6219820d"}, - {file = "jiter-0.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfce158151a3a7d0b8f8af549540e1d8328a9dce4ee61c2fb10b12f269d68b6d"}, - {file = "jiter-0.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c22d684e663cc99f887c3133a7714c5ecba73524438bc3c93e6bb868c55a9097"}, - {file = "jiter-0.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fe94ab7e548e492dfd35118de7de613078b7e4ddc276976e8fa2f0f37029cad5"}, - {file = "jiter-0.1.0-cp311-none-win32.whl", hash = "sha256:1c41463f82b67d2efa8f269f7cd150c6c16c5902a0508277f5b1c1569e93dc1d"}, - {file = "jiter-0.1.0-cp311-none-win_amd64.whl", hash = "sha256:40d361aa7e728a186495b7b00a47f83f7153714a8b49d9d38dfc45f7b6630f99"}, - {file = "jiter-0.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2a1e6335a4ce98dc64d0871ba3316f06d32728beefe336a621f9877b71a237be"}, - {file = "jiter-0.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:832e1a91fea7819507b1d1215e1a82e02da423ea298231af842b35c41d8411db"}, - {file = "jiter-0.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7f620a558d5952218fe924c7257ce3592835a23e651a140957ba66128675c0d"}, - {file = "jiter-0.1.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2149878ed5c8d1909546d6bb259aaa3ca6b6f81487b03504ea618264f79f4e3b"}, - {file = "jiter-0.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:88ba8abb7025fa4e806a1fa03a2be23cb8584ec737bdf62554873ba2698e44d3"}, - {file = "jiter-0.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2b29b49fe73c7da72f29d922ce85ee5a74772678ecbc2542e99bc4935c68965"}, - {file = "jiter-0.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b53bd047ee136f38c0b56779423bae99cab1b9a65b586f1c19e94a6f65013599"}, - {file = "jiter-0.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92bd461af7766d7f091216942951b603d546f16c1818b9072f5ec7c89bb8a7d2"}, - {file = "jiter-0.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:17a45f16e7253c23c81969086707229591645b192935cb2db226e01bd3abd148"}, - {file = "jiter-0.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e6be9c021ca191c186275d5a9c70b7767cde0852454a8827c9edde995518c856"}, - {file = "jiter-0.1.0-cp312-none-win32.whl", hash = "sha256:c6445c37eca8d79bedf3bd74683ad668137b05880c7af95f0b96222d62be2db9"}, - {file = "jiter-0.1.0-cp312-none-win_amd64.whl", hash = "sha256:5cf60df741bc80439cb2d9b2923c7b712c6c82ac6848387f95d77c5723e01d0a"}, - {file = "jiter-0.1.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:977d8b322f5f661f16903d2ff8e981e210ba0e057d2d70a1f7b59b8d478e6d45"}, - {file = "jiter-0.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ceea8e8ac9af7b0f098660f3837bc3ec975716103788f36d228c543b1319c475"}, - {file = "jiter-0.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3de99e3983c0697c449f45ec740096ac559656485aea48066c982530066dd8d"}, - {file = "jiter-0.1.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb02700ad165a81b0993ad3c550f3b590f0952ff3ce10826fc62aeb064b47b6a"}, - {file = "jiter-0.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb78293d40e38ee5c4370c547af06fb63c7e810f3895ecb76ddcc5fa413e9ccf"}, - {file = "jiter-0.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:194fcbd242b35bda13196b501b116e50fa553c414e1cb0350dc1bc910bceb00d"}, - {file = "jiter-0.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0f55e5cd7b2f649d934b1397bc104200043cdf35addf4892ed66e472e6fe05"}, - {file = "jiter-0.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f6a5c6ae1587f60eb995724e34e7257291c919d163906edd030ced77af9f420b"}, - {file = "jiter-0.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f9f521361cf3633b314edb2313e7abc4fce59dfa1d918561263474fb5c7e17b8"}, - {file = "jiter-0.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c1c0e8b99b4e9fed57b7dbd5be63aa0fc36d45551dedc3c3697aa2694c9a0be3"}, - {file = "jiter-0.1.0-cp38-none-win32.whl", hash = "sha256:631d92b82f228774e9f0b79927016fafed369521b8bf059fa8c0353ba4cd76b2"}, - {file = "jiter-0.1.0-cp38-none-win_amd64.whl", hash = "sha256:c1e798a92daf8c6511907c3861c0cf500f23241c160d1c09cb0e9ba668fde667"}, - {file = "jiter-0.1.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e04740a9ea37118fe6788754eb2ef043ad83809dd677bf3c5f331cc41f8ef70d"}, - {file = "jiter-0.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2d7272eefa5b390c6e450760959e224033b925b0ab76e3279dfdad7f5ec65db0"}, - {file = "jiter-0.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae1f35e062a71deaad689fb2f51b202c1d55ac941733bdcd7587577e17b8a16"}, - {file = "jiter-0.1.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:230fc4dd9c2c3f4b6608fda3f34891cabee2537eef7c7fd0cd68792def14bcc6"}, - {file = "jiter-0.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f867893fb7b458c5cb302b33fbe769bb8c8e60594057f29e005fc8ad21b2a58"}, - {file = "jiter-0.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b80b4b64aa5dd63e79bf5addc903855a9a5b7b2493a826cc2cbf9cc9ecfcd23a"}, - {file = "jiter-0.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ab154d2f877e66757202a71669142c1ba9e9b5c5d1cf81510924950d74f62a3"}, - {file = "jiter-0.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b43b3fa25b828a2fc4ed7c42f788c261df17d82fb5ced129140ef8be2577ee2"}, - {file = "jiter-0.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:eb3e9a062ddba709db2afde1ef2c72244dfbae09a27b4aa3701267e489ef7a30"}, - {file = "jiter-0.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c98bddd0dd5cfe638f1a7e4341edfae0a1c97aed879207e594b221f8c5058aa6"}, - {file = "jiter-0.1.0-cp39-none-win32.whl", hash = "sha256:9c6d24f8f1764b1c0917bc35131982bea5517cc7b12226f19c4c01215e1be208"}, - {file = "jiter-0.1.0-cp39-none-win_amd64.whl", hash = "sha256:28f3d4f3e88313ef20e51e3330c22c6ce636ca2eb167b185c298a2ea1ab67b8c"}, - {file = "jiter-0.1.0.tar.gz", hash = "sha256:d77da07222a42d2ae907dbd03bca708079e4268bb7e155006c2c6960281f7f1a"}, + {file = "jiter-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b599f4e89b3def9a94091e6ee52e1d7ad7bc33e238ebb9c4c63f211d74822c3f"}, + {file = "jiter-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a063f71c4b06225543dddadbe09d203dc0c95ba352d8b85f1221173480a71d5"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc0d5b8b3dd12e91dd184b87273f864b363dfabc90ef29a1092d269f18c7e28"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c22541f0b672f4d741382a97c65609332a783501551445ab2df137ada01e019e"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63314832e302cc10d8dfbda0333a384bf4bcfce80d65fe99b0f3c0da8945a91a"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a25fbd8a5a58061e433d6fae6d5298777c0814a8bcefa1e5ecfff20c594bd749"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:503b2c27d87dfff5ab717a8200fbbcf4714516c9d85558048b1fc14d2de7d8dc"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d1f3d27cce923713933a844872d213d244e09b53ec99b7a7fdf73d543529d6d"}, + {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c95980207b3998f2c3b3098f357994d3fd7661121f30669ca7cb945f09510a87"}, + {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afa66939d834b0ce063f57d9895e8036ffc41c4bd90e4a99631e5f261d9b518e"}, + {file = "jiter-0.5.0-cp310-none-win32.whl", hash = "sha256:f16ca8f10e62f25fd81d5310e852df6649af17824146ca74647a018424ddeccf"}, + {file = "jiter-0.5.0-cp310-none-win_amd64.whl", hash = "sha256:b2950e4798e82dd9176935ef6a55cf6a448b5c71515a556da3f6b811a7844f1e"}, + {file = "jiter-0.5.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4c8e1ed0ef31ad29cae5ea16b9e41529eb50a7fba70600008e9f8de6376d553"}, + {file = "jiter-0.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6f16e21276074a12d8421692515b3fd6d2ea9c94fd0734c39a12960a20e85f3"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5280e68e7740c8c128d3ae5ab63335ce6d1fb6603d3b809637b11713487af9e6"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:583c57fc30cc1fec360e66323aadd7fc3edeec01289bfafc35d3b9dcb29495e4"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26351cc14507bdf466b5f99aba3df3143a59da75799bf64a53a3ad3155ecded9"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829df14d656b3fb87e50ae8b48253a8851c707da9f30d45aacab2aa2ba2d614"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42a4bdcf7307b86cb863b2fb9bb55029b422d8f86276a50487982d99eed7c6e"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04d461ad0aebf696f8da13c99bc1b3e06f66ecf6cfd56254cc402f6385231c06"}, + {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6375923c5f19888c9226582a124b77b622f8fd0018b843c45eeb19d9701c403"}, + {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cec323a853c24fd0472517113768c92ae0be8f8c384ef4441d3632da8baa646"}, + {file = "jiter-0.5.0-cp311-none-win32.whl", hash = "sha256:aa1db0967130b5cab63dfe4d6ff547c88b2a394c3410db64744d491df7f069bb"}, + {file = "jiter-0.5.0-cp311-none-win_amd64.whl", hash = "sha256:aa9d2b85b2ed7dc7697597dcfaac66e63c1b3028652f751c81c65a9f220899ae"}, + {file = "jiter-0.5.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9f664e7351604f91dcdd557603c57fc0d551bc65cc0a732fdacbf73ad335049a"}, + {file = "jiter-0.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:044f2f1148b5248ad2c8c3afb43430dccf676c5a5834d2f5089a4e6c5bbd64df"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:702e3520384c88b6e270c55c772d4bd6d7b150608dcc94dea87ceba1b6391248"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:528d742dcde73fad9d63e8242c036ab4a84389a56e04efd854062b660f559544"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf80e5fe6ab582c82f0c3331df27a7e1565e2dcf06265afd5173d809cdbf9ba"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:44dfc9ddfb9b51a5626568ef4e55ada462b7328996294fe4d36de02fce42721f"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c451f7922992751a936b96c5f5b9bb9312243d9b754c34b33d0cb72c84669f4e"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:308fce789a2f093dca1ff91ac391f11a9f99c35369117ad5a5c6c4903e1b3e3a"}, + {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7f5ad4a7c6b0d90776fdefa294f662e8a86871e601309643de30bf94bb93a64e"}, + {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ea189db75f8eca08807d02ae27929e890c7d47599ce3d0a6a5d41f2419ecf338"}, + {file = "jiter-0.5.0-cp312-none-win32.whl", hash = "sha256:e3bbe3910c724b877846186c25fe3c802e105a2c1fc2b57d6688b9f8772026e4"}, + {file = "jiter-0.5.0-cp312-none-win_amd64.whl", hash = "sha256:a586832f70c3f1481732919215f36d41c59ca080fa27a65cf23d9490e75b2ef5"}, + {file = "jiter-0.5.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f04bc2fc50dc77be9d10f73fcc4e39346402ffe21726ff41028f36e179b587e6"}, + {file = "jiter-0.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f433a4169ad22fcb550b11179bb2b4fd405de9b982601914ef448390b2954f3"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad4a6398c85d3a20067e6c69890ca01f68659da94d74c800298581724e426c7e"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6baa88334e7af3f4d7a5c66c3a63808e5efbc3698a1c57626541ddd22f8e4fbf"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ece0a115c05efca597c6d938f88c9357c843f8c245dbbb53361a1c01afd7148"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:335942557162ad372cc367ffaf93217117401bf930483b4b3ebdb1223dbddfa7"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649b0ee97a6e6da174bffcb3c8c051a5935d7d4f2f52ea1583b5b3e7822fbf14"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4be354c5de82157886ca7f5925dbda369b77344b4b4adf2723079715f823989"}, + {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5206144578831a6de278a38896864ded4ed96af66e1e63ec5dd7f4a1fce38a3a"}, + {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8120c60f8121ac3d6f072b97ef0e71770cc72b3c23084c72c4189428b1b1d3b6"}, + {file = "jiter-0.5.0-cp38-none-win32.whl", hash = "sha256:6f1223f88b6d76b519cb033a4d3687ca157c272ec5d6015c322fc5b3074d8a5e"}, + {file = "jiter-0.5.0-cp38-none-win_amd64.whl", hash = "sha256:c59614b225d9f434ea8fc0d0bec51ef5fa8c83679afedc0433905994fb36d631"}, + {file = "jiter-0.5.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0af3838cfb7e6afee3f00dc66fa24695199e20ba87df26e942820345b0afc566"}, + {file = "jiter-0.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:550b11d669600dbc342364fd4adbe987f14d0bbedaf06feb1b983383dcc4b961"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:489875bf1a0ffb3cb38a727b01e6673f0f2e395b2aad3c9387f94187cb214bbf"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b250ca2594f5599ca82ba7e68785a669b352156260c5362ea1b4e04a0f3e2389"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ea18e01f785c6667ca15407cd6dabbe029d77474d53595a189bdc813347218e"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462a52be85b53cd9bffd94e2d788a09984274fe6cebb893d6287e1c296d50653"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92cc68b48d50fa472c79c93965e19bd48f40f207cb557a8346daa020d6ba973b"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c834133e59a8521bc87ebcad773608c6fa6ab5c7a022df24a45030826cf10bc"}, + {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab3a71ff31cf2d45cb216dc37af522d335211f3a972d2fe14ea99073de6cb104"}, + {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cccd3af9c48ac500c95e1bcbc498020c87e1781ff0345dd371462d67b76643eb"}, + {file = "jiter-0.5.0-cp39-none-win32.whl", hash = "sha256:368084d8d5c4fc40ff7c3cc513c4f73e02c85f6009217922d0823a48ee7adf61"}, + {file = "jiter-0.5.0-cp39-none-win_amd64.whl", hash = "sha256:ce03f7b4129eb72f1687fa11300fbf677b02990618428934662406d2a76742a1"}, + {file = "jiter-0.5.0.tar.gz", hash = "sha256:1d916ba875bcab5c5f7d927df998c4cb694d27dceddf3392e58beaf10563368a"}, ] [[package]] @@ -1741,107 +1741,85 @@ requests = ">=2" [[package]] name = "langchain" -version = "0.2.0" +version = "0.2.11" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain-0.2.0-py3-none-any.whl", hash = "sha256:25c43d97daaf8539c10c77d028f88782f48649e6a01e77938014a1fbaf68bb62"}, - {file = "langchain-0.2.0.tar.gz", hash = "sha256:3d980e92d8635bb7fa45751ee159c56643d1f0e56221e378b45d81cf6dc7d453"}, + {file = "langchain-0.2.11-py3-none-any.whl", hash = "sha256:5a7a8b4918f3d3bebce9b4f23b92d050699e6f7fb97591e8941177cf07a260a2"}, + {file = "langchain-0.2.11.tar.gz", hash = "sha256:d7a9e4165f02dca0bd78addbc2319d5b9286b5d37c51d784124102b57e9fd297"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} -dataclasses-json = ">=0.5.7,<0.7" -langchain-core = ">=0.2.0,<0.3.0" +langchain-core = ">=0.2.23,<0.3.0" langchain-text-splitters = ">=0.2.0,<0.3.0" langsmith = ">=0.1.17,<0.2.0" -numpy = ">=1,<2" +numpy = {version = ">=1,<2", markers = "python_version < \"3.12\""} pydantic = ">=1,<3" PyYAML = ">=5.3" requests = ">=2,<3" SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<9.0.0" - -[package.extras] -azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] -clarifai = ["clarifai (>=9.1.0)"] -cli = ["typer (>=0.9.0,<0.10.0)"] -cohere = ["cohere (>=4,<6)"] -docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] -embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.1,<0.2)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] -javascript = ["esprima (>=4.0.1,<5.0.0)"] -llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] -openai = ["openai (<2)", "tiktoken (>=0.7,<1.0)"] -qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] -text-helpers = ["chardet (>=5.1.0,<6.0.0)"] +tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" [[package]] name = "langchain-anthropic" -version = "0.1.13" +version = "0.1.20" description = "An integration package connecting AnthropicMessages and LangChain" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_anthropic-0.1.13-py3-none-any.whl", hash = "sha256:121f6f480da7685c239573d98322adb94fe486d40651ac341637f65da36881de"}, - {file = "langchain_anthropic-0.1.13.tar.gz", hash = "sha256:32e7ac51e1874c47e1a20493e75f5bfc88b0ffeaf5f1aed6091547e1ae44bb85"}, + {file = "langchain_anthropic-0.1.20-py3-none-any.whl", hash = "sha256:3a0d89ac6856be98beb3ec63813393bf29af3c5134247979c055938e741b7d9d"}, + {file = "langchain_anthropic-0.1.20.tar.gz", hash = "sha256:cb9607fecfc0f0de49b79dd0fc066790e2877873ef753abd98d2ae38d6e0f5b2"}, ] [package.dependencies] -anthropic = ">=0.26.0,<1" +anthropic = ">=0.28.0,<1" defusedxml = ">=0.7.1,<0.8.0" -langchain-core = ">=0.1.43,<0.3" +langchain-core = ">=0.2.17,<0.3" [[package]] name = "langchain-community" -version = "0.2.0" +version = "0.2.10" description = "Community contributed LangChain integrations." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_community-0.2.0-py3-none-any.whl", hash = "sha256:4d069d280fd2dc1219df13d580338729dbaebf581160bc42c73d5ed208258d57"}, - {file = "langchain_community-0.2.0.tar.gz", hash = "sha256:15c7e8f2547d9e9f03eef0704ef1c68a0074c462e9bd635662d5ea4b0eace87b"}, + {file = "langchain_community-0.2.10-py3-none-any.whl", hash = "sha256:9f4d1b5ab7f0b0a704f538e26e50fce45a461da6d2bf6b7b636d24f22fbc088a"}, + {file = "langchain_community-0.2.10.tar.gz", hash = "sha256:3a0404bad4bd07d6f86affdb62fb3d080a456c66191754d586a409d9d6024d62"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" dataclasses-json = ">=0.5.7,<0.7" -langchain = ">=0.2.0,<0.3.0" -langchain-core = ">=0.2.0,<0.3.0" +langchain = ">=0.2.9,<0.3.0" +langchain-core = ">=0.2.23,<0.3.0" langsmith = ">=0.1.0,<0.2.0" -numpy = ">=1,<2" +numpy = {version = ">=1,<2", markers = "python_version < \"3.12\""} PyYAML = ">=5.3" requests = ">=2,<3" SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<9.0.0" - -[package.extras] -cli = ["typer (>=0.9.0,<0.10.0)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" [[package]] name = "langchain-core" -version = "0.2.0" +version = "0.2.23" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_core-0.2.0-py3-none-any.whl", hash = "sha256:973cf3402d428018dc68313d8be0e721cea5b18d4ae82149ad659dbce34dc296"}, - {file = "langchain_core-0.2.0.tar.gz", hash = "sha256:22c84e88aa7e98a0b98fadb3238343152641afce6ff27e8f2d9f5ff421e35a35"}, + {file = "langchain_core-0.2.23-py3-none-any.whl", hash = "sha256:ef0b4184b37e356a27182514aedcc8c41ffacbd6348a801bc775c1ce1f608637"}, + {file = "langchain_core-0.2.23.tar.gz", hash = "sha256:ac8165f283d8f5214576ffc38387106ef0de7eb8d2c52576d06e8dd3285294b0"}, ] [package.dependencies] jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.1.0,<0.2.0" -packaging = ">=23.2,<24.0" -pydantic = ">=1,<3" +langsmith = ">=0.1.75,<0.2.0" +packaging = ">=23.2,<25" +pydantic = {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""} PyYAML = ">=5.3" -tenacity = ">=8.1.0,<9.0.0" - -[package.extras] -extended-testing = ["jinja2 (>=3,<4)"] +tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" [[package]] name = "langchain-google-vertexai" @@ -1864,18 +1842,18 @@ anthropic = ["anthropic[vertexai] (>=0.23.0,<1)"] [[package]] name = "langchain-openai" -version = "0.1.7" +version = "0.1.17" description = "An integration package connecting OpenAI and LangChain" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_openai-0.1.7-py3-none-any.whl", hash = "sha256:39c3cb22bb739900ae8294d4d9939a6138c0ca7ad11198e57038eb14c08d04ec"}, - {file = "langchain_openai-0.1.7.tar.gz", hash = "sha256:fd7e1c33ba8e2cab4b2154f3a2fd4a0d9cc6518b41cf49bb87255f9f732a4896"}, + {file = "langchain_openai-0.1.17-py3-none-any.whl", hash = "sha256:30bef5574ecbbbb91b8025b2dc5a1bd81fd62157d3ad1a35d820141f31c5b443"}, + {file = "langchain_openai-0.1.17.tar.gz", hash = "sha256:c5d70ddecdcb93e146f376bdbadbb6ec69de9ac0f402cd5b83de50b655ba85ee"}, ] [package.dependencies] -langchain-core = ">=0.1.46,<0.3" -openai = ">=1.24.0,<2.0.0" +langchain-core = ">=0.2.20,<0.3.0" +openai = ">=1.32.0,<2.0.0" tiktoken = ">=0.7,<1" [[package]] @@ -1927,28 +1905,27 @@ six = "*" [[package]] name = "langgraph" -version = "0.0.54" -description = "langgraph" +version = "0.1.14" +description = "Building stateful, multi-actor applications with LLMs" optional = false python-versions = "<4.0,>=3.9.0" files = [ - {file = "langgraph-0.0.54-py3-none-any.whl", hash = "sha256:c1c4e98f9e587c1134227d0b4300ee2e32ef561b246f7a611703eaaf33617798"}, - {file = "langgraph-0.0.54.tar.gz", hash = "sha256:d4a2b59c2483b1be0ae873a5a27a0108843755919d88cfd9da5f23b47427a448"}, + {file = "langgraph-0.1.14-py3-none-any.whl", hash = "sha256:e093cf10a0b8998a365a9f9b24e9b73d88df1c0725ba258ed4744bbb592cf7a2"}, + {file = "langgraph-0.1.14.tar.gz", hash = "sha256:e729d4fb77acf85391324bce87c48b37a45035693a1d21492f91e2bedade0078"}, ] [package.dependencies] -langchain-core = ">=0.2,<0.3" -uuid6 = ">=2024.1.12,<2025.0.0" +langchain-core = ">=0.2.22,<0.3" [[package]] name = "langgraph-cli" -version = "0.1.37" +version = "0.1.50" description = "CLI for interacting with LangGraph API" optional = false python-versions = "<4.0.0,>=3.9.0" files = [ - {file = "langgraph_cli-0.1.37-py3-none-any.whl", hash = "sha256:c9b4b0db22300bb4e1fdb8377b50442f804c42702b46e77c30d7683ced6face4"}, - {file = "langgraph_cli-0.1.37.tar.gz", hash = "sha256:906b858102f96d5773da26258b5095348fcb300c151aa7e9a879873a8feb4871"}, + {file = "langgraph_cli-0.1.50-py3-none-any.whl", hash = "sha256:094c89a8b27d21419148d99ec026e6fd43d69fb9b0c39f4126dbef8a7d2e411d"}, + {file = "langgraph_cli-0.1.50.tar.gz", hash = "sha256:0d602b1e71e2763b94341ea021601613d212ce80397c2a04062820bd187b4ca6"}, ] [package.dependencies] @@ -1956,13 +1933,13 @@ click = ">=8.1.7,<9.0.0" [[package]] name = "langgraph-sdk" -version = "0.1.21" -description = "" +version = "0.1.26" +description = "SDK for interacting with LangGraph API" optional = false python-versions = "<4.0.0,>=3.9.0" files = [ - {file = "langgraph_sdk-0.1.21-py3-none-any.whl", hash = "sha256:6b8e121efe5d6500d60002ed0e61bff6ce1f340c486af1834ba0df6a36e0f242"}, - {file = "langgraph_sdk-0.1.21.tar.gz", hash = "sha256:69b614d3b1d73d712088ad9216d3b3f79e543c038b912b59277b11807b9bd3e1"}, + {file = "langgraph_sdk-0.1.26-py3-none-any.whl", hash = "sha256:599dd1bac14dd7efc99b66cc3ad3e87e575c7c01197678021f861cad064db3df"}, + {file = "langgraph_sdk-0.1.26.tar.gz", hash = "sha256:8f7ee382d8b169cecf22cfcd8b37d64f3e9dcc63fa24cfc3797651b5e9dcfaf5"}, ] [package.dependencies] @@ -1972,18 +1949,18 @@ orjson = ">=3.10.1" [[package]] name = "langsmith" -version = "0.1.60" +version = "0.1.93" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.60-py3-none-any.whl", hash = "sha256:3c3520ea473de0a984237b3e9d638fdf23ef3acc5aec89a42e693225e72d6120"}, - {file = "langsmith-0.1.60.tar.gz", hash = "sha256:6a145b5454437f9e0f81525f23c4dcdbb8c07b1c91553b8f697456c418d6a599"}, + {file = "langsmith-0.1.93-py3-none-any.whl", hash = "sha256:811210b9d5f108f36431bd7b997eb9476a9ecf5a2abd7ddbb606c1cdcf0f43ce"}, + {file = "langsmith-0.1.93.tar.gz", hash = "sha256:285b6ad3a54f50fa8eb97b5f600acc57d0e37e139dd8cf2111a117d0435ba9b4"}, ] [package.dependencies] orjson = ">=3.9.14,<4.0.0" -pydantic = ">=1,<3" +pydantic = {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""} requests = ">=2,<3" [[package]] @@ -2345,13 +2322,13 @@ files = [ [[package]] name = "openai" -version = "1.30.1" +version = "1.37.1" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.30.1-py3-none-any.whl", hash = "sha256:c9fb3c3545c118bbce8deb824397b9433a66d0d0ede6a96f7009c95b76de4a46"}, - {file = "openai-1.30.1.tar.gz", hash = "sha256:4f85190e577cba0b066e1950b8eb9b11d25bc7ebcc43a86b326ce1bfa564ec74"}, + {file = "openai-1.37.1-py3-none-any.whl", hash = "sha256:9a6adda0d6ae8fce02d235c5671c399cfa40d6a281b3628914c7ebf244888ee3"}, + {file = "openai-1.37.1.tar.gz", hash = "sha256:faf87206785a6b5d9e34555d6a3242482a6852bc802e453e2a891f68ee04ce55"}, ] [package.dependencies] @@ -4131,17 +4108,6 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "uuid6" -version = "2024.1.12" -description = "New time-based UUID formats which are suited for use as a database key" -optional = false -python-versions = ">=3.8" -files = [ - {file = "uuid6-2024.1.12-py3-none-any.whl", hash = "sha256:8150093c8d05a331bc0535bc5ef6cf57ac6eceb2404fd319bc10caee2e02c065"}, - {file = "uuid6-2024.1.12.tar.gz", hash = "sha256:ed0afb3a973057575f9883201baefe402787ca5e11e1d24e377190f0c43f1993"}, -] - [[package]] name = "uvicorn" version = "0.23.2" @@ -4412,4 +4378,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.9.0,<3.12" -content-hash = "610d4a9bf7ec9ad26304b1dfbedb58ce5647a285d8bfd6f69a4f6233f2c36341" +content-hash = "3070360f48acaa948d739124db1232ee53da53238f77dc5419869f50d15dc0fd" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 7d65c5afe..f6d0f0638 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -15,9 +15,10 @@ fastapi-slim = ">=0.103.2" orjson = ">=3.9.10" python-multipart = "^0.0.6" langchain = ">=0.2.0" -langgraph = ">=0.0.51" +langchain_core = ">=0.2.23" +langgraph = ">=0.1.14" pydantic = "<2.0" -langchain-openai = ">=0.1.3" +langchain-openai = ">=0.1.17" beautifulsoup4 = "^4.12.3" boto3 = "^1.34.28" duckduckgo-search = "^5.3.0" @@ -35,15 +36,15 @@ unstructured = {extras = ["doc", "docx"], version = "^0.12.5"} pgvector = "^0.2.5" psycopg2-binary = "^2.9.9" pyjwt = {extras = ["crypto"], version = "^2.8.0"} -langchain-anthropic = ">=0.1.8" +langchain-anthropic = ">=0.1.20" structlog = "^24.1.0" python-json-logger = "^2.0.7" -langgraph-sdk = ">=0.1.9" -langgraph-cli = ">=0.1.37" -langchain-community = "^0.2.0" +langgraph-sdk = ">=0.1.26" +langchain-community = "^0.2.10" [tool.poetry.group.dev.dependencies] pygithub = "^2.1.1" +langgraph-cli = ">=0.1.50" [tool.poetry.group.lint.dependencies] ruff = "^0.1.4" diff --git a/backend/tests/unit_tests/app/helpers.py b/backend/tests/unit_tests/app/helpers.py index f6f106e6f..71b621b8a 100644 --- a/backend/tests/unit_tests/app/helpers.py +++ b/backend/tests/unit_tests/app/helpers.py @@ -1,9 +1,10 @@ from contextlib import asynccontextmanager -from app.lifespan import lifespan -from httpx import AsyncClient, ASGITransport +from httpx import ASGITransport, AsyncClient from typing_extensions import AsyncGenerator +from app.lifespan import lifespan + @asynccontextmanager async def get_client() -> AsyncGenerator[AsyncClient, None]: diff --git a/compose.override.test.yml b/compose.override.test.yml new file mode 100644 index 000000000..6299dfe96 --- /dev/null +++ b/compose.override.test.yml @@ -0,0 +1,39 @@ +volumes: + langgraph-data: + driver: local +services: + langgraph-postgres: + image: pgvector/pgvector:pg16 + restart: on-failure + ports: + - "5433:5432" + environment: + POSTGRES_DB: postgres + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + volumes: + - langgraph-data:/var/lib/postgresql/data + healthcheck: + test: pg_isready -U postgres + start_period: 10s + timeout: 1s + retries: 5 + backend: + container_name: opengpts-backend + pull_policy: build + build: + context: backend + ports: + - "8100:8000" # Backend is accessible on localhost:8100 + depends_on: + langgraph-postgres: + condition: service_healthy + env_file: + - .env + volumes: + - ./backend:/backend + environment: + POSTGRES_URI: "postgresql+psycopg2://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable" + LANGGRAPH_URL: "http://langgraph-api:8000" + command: + - --reload \ No newline at end of file diff --git a/compose.override.yml b/compose.override.yml index 9e22b1cde..ab43f3058 100644 --- a/compose.override.yml +++ b/compose.override.yml @@ -2,9 +2,6 @@ volumes: langgraph-data: driver: local services: - langgraph-api: - environment: - PGVECTOR_URI: "postgresql+psycopg2://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable" langgraph-postgres: image: pgvector/pgvector:pg16 restart: on-failure @@ -36,7 +33,7 @@ services: volumes: - ./backend:/backend environment: - PGVECTOR_URI: "postgresql+psycopg2://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable" + POSTGRES_URI: "postgresql+psycopg2://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable" LANGGRAPH_URL: "http://langgraph-api:8000" command: - --reload diff --git a/frontend/src/hooks/useConfigList.ts b/frontend/src/hooks/useConfigList.ts index 82ec627e6..c05919d14 100644 --- a/frontend/src/hooks/useConfigList.ts +++ b/frontend/src/hooks/useConfigList.ts @@ -105,7 +105,7 @@ export function useConfigList(): ConfigListProps { const deleteConfig = useCallback( async (assistantId: string): Promise => { - await fetch(`/assistants/${assistantId}`, { + await fetch(`/api/assistants/${assistantId}`, { method: "DELETE", headers: { Accept: "application/json",