Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added initialize_agent and Memory #93

Merged
merged 12 commits into from
Apr 2, 2023
18 changes: 11 additions & 7 deletions src/backend/langflow/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,15 @@ agents:
- ZeroShotAgent
- JsonAgent
- CSVAgent
- initialize_agent

prompts:
- PromptTemplate
- FewShotPromptTemplate

llms:
- OpenAI
- OpenAIChat
- ChatOpenAI

tools:
- Search
Expand All @@ -33,13 +34,16 @@ toolkits:
- OpenAPIToolkit
- JsonToolkit

embeddings:
#
memories:
- ConversationBufferMemory

vectorstores:
#
embeddings: []


vectorstores: []


documentloaders: []

documentloaders:
#

dev: false
10 changes: 7 additions & 3 deletions src/backend/langflow/custom/customs.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
from langflow.template import nodes

CUSTOM_NODES = {
"prompts": {**nodes.ZeroShotPromptNode().to_dict()},
"tools": {**nodes.PythonFunctionNode().to_dict(), **nodes.ToolNode().to_dict()},
"agents": {**nodes.JsonAgentNode().to_dict(), **nodes.CSVAgentNode().to_dict()},
"prompts": {"ZeroShotPrompt": nodes.ZeroShotPromptNode()},
"tools": {"PythonFunction": nodes.PythonFunctionNode(), "Tool": nodes.ToolNode()},
"agents": {
"JsonAgent": nodes.JsonAgentNode(),
"CSVAgent": nodes.CSVAgentNode(),
"initialize_agent": nodes.InitializeAgentNode(),
},
}


Expand Down
10 changes: 6 additions & 4 deletions src/backend/langflow/graph/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,10 +121,10 @@ def _build_params(self):
f"Required input {key} for module {self.node_type} not found"
)
elif value["list"]:
if key in params:
if key not in params:
params[key] = []
if edge is not None:
params[key].append(edge.source)
else:
params[key] = [edge.source]
elif value["required"] or edge is not None:
params[key] = edge.source
elif value["required"] or value.get("value"):
Expand Down Expand Up @@ -179,7 +179,9 @@ def _build(self):
params=self.params,
)
except Exception as exc:
raise ValueError(f"Error building node {self.node_type}") from exc
raise ValueError(
f"Error building node {self.node_type}: {str(exc)}"
) from exc

if self._built_object is None:
raise ValueError(f"Node type {self.node_type} not found")
Expand Down
10 changes: 8 additions & 2 deletions src/backend/langflow/graph/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,10 @@ def _build_nodes(self) -> List[Node]:

if node_type in prompt_creator.to_list():
nodes.append(PromptNode(node))
elif node_type in agent_creator.to_list():
elif (
node_type in agent_creator.to_list()
or node_lc_type in agent_creator.to_list()
):
nodes.append(AgentNode(node))
elif node_type in chain_creator.to_list():
nodes.append(ChainNode(node))
Expand All @@ -118,7 +121,10 @@ def _build_nodes(self) -> List[Node]:
nodes.append(ToolkitNode(node))
elif node_type in wrapper_creator.to_list():
nodes.append(WrapperNode(node))
elif node_type in llm_creator.to_list():
elif (
node_type in llm_creator.to_list()
or node_lc_type in llm_creator.to_list()
):
nodes.append(LLMNode(node))
else:
nodes.append(Node(node))
Expand Down
16 changes: 11 additions & 5 deletions src/backend/langflow/interface/agents/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,18 @@ def get_signature(self, name: str) -> Dict | None:
except ValueError as exc:
raise ValueError("Agent not found") from exc

# Now this is a generator
def to_list(self) -> List[str]:
return [
agent.__name__
for agent in self.type_to_loader_dict.values()
if agent.__name__ in settings.agents or settings.dev
]
names = []
for name, agent in self.type_to_loader_dict.items():
agent_name = (
agent.function_name()
if hasattr(agent, "function_name")
else agent.__name__
)
if agent_name in settings.agents or settings.dev:
names.append(agent_name)
return names


agent_creator = AgentCreator()
34 changes: 33 additions & 1 deletion src/backend/langflow/interface/agents/custom.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Optional
from typing import Any, List, Optional

from langchain import LLMChain
from langchain.agents import AgentExecutor, ZeroShotAgent
Expand All @@ -8,12 +8,19 @@
from langchain.agents.agent_toolkits.pandas.prompt import SUFFIX as PANDAS_SUFFIX
from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
from langchain.schema import BaseLanguageModel
from langchain.llms.base import BaseLLM
from langchain.tools.python.tool import PythonAstREPLTool
from langchain.agents import initialize_agent, Tool
from langchain.memory.chat_memory import BaseChatMemory


class JsonAgent(AgentExecutor):
"""Json agent"""

@staticmethod
def function_name():
return "JsonAgent"

@classmethod
def initialize(cls, *args, **kwargs):
return cls.from_toolkit_and_llm(*args, **kwargs)
Expand Down Expand Up @@ -46,6 +53,10 @@ def run(self, *args, **kwargs):
class CSVAgent(AgentExecutor):
"""CSV agent"""

@staticmethod
def function_name():
return "CSVAgent"

@classmethod
def initialize(cls, *args, **kwargs):
return cls.from_toolkit_and_llm(*args, **kwargs)
Expand Down Expand Up @@ -87,7 +98,28 @@ def run(self, *args, **kwargs):
return super().run(*args, **kwargs)


class InitializeAgent(AgentExecutor):
"""Implementation of initialize_agent function"""

@staticmethod
def function_name():
return "initialize_agent"

@classmethod
def initialize(
cls, llm: BaseLLM, tools: List[Tool], agent: str, memory: BaseChatMemory
):
return initialize_agent(tools=tools, llm=llm, agent=agent, memory=memory)

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

def run(self, *args, **kwargs):
return super().run(*args, **kwargs)


CUSTOM_AGENTS = {
"JsonAgent": JsonAgent,
"CSVAgent": CSVAgent,
"initialize_agent": InitializeAgent,
}
45 changes: 45 additions & 0 deletions src/backend/langflow/interface/agents/prebuilt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from langchain import LLMChain
from langchain.agents import AgentExecutor, ZeroShotAgent
from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
from langchain.schema import BaseLanguageModel


class MalfoyAgent(AgentExecutor):
"""Json agent"""

prefix = "Malfoy: "

@classmethod
def initialize(cls, *args, **kwargs):
return cls.from_toolkit_and_llm(*args, **kwargs)

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

@classmethod
def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel):
tools = toolkit.get_tools()
tool_names = [tool.name for tool in tools]
prompt = ZeroShotAgent.create_prompt(
tools,
prefix=JSON_PREFIX,
suffix=JSON_SUFFIX,
format_instructions=FORMAT_INSTRUCTIONS,
input_variables=None,
)
llm_chain = LLMChain(
llm=llm,
prompt=prompt,
)
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names)
return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)

def run(self, *args, **kwargs):
return super().run(*args, **kwargs)


PREBUILT_AGENTS = {
"MalfoyAgent": MalfoyAgent,
}
6 changes: 4 additions & 2 deletions src/backend/langflow/interface/base.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Union

from pydantic import BaseModel

Expand All @@ -20,7 +20,7 @@ def type_to_loader_dict(self) -> Dict:
return self.type_dict

@abstractmethod
def get_signature(self, name: str) -> Optional[Dict[Any, Any]]:
def get_signature(self, name: str) -> Union[Optional[Dict[Any, Any]], FrontendNode]:
pass

@abstractmethod
Expand All @@ -42,6 +42,8 @@ def frontend_node(self, name) -> FrontendNode:
signature = self.get_signature(name)
if signature is None:
raise ValueError(f"{name} not found")
if isinstance(signature, FrontendNode):
return signature
fields = [
TemplateField(
name=key,
Expand Down
36 changes: 10 additions & 26 deletions src/backend/langflow/interface/custom_lists.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,8 @@
from langchain.chat_models import ChatOpenAI

## Memory
# from langchain.memory.buffer_window import ConversationBufferWindowMemory
# from langchain.memory.chat_memory import ChatMessageHistory
# from langchain.memory.combined import CombinedMemory
# from langchain.memory.entity import ConversationEntityMemory
# from langchain.memory.kg import ConversationKGMemory
# from langchain.memory.readonly import ReadOnlySharedMemory
# from langchain.memory.simple import SimpleMemory
# from langchain.memory.summary import ConversationSummaryMemory
# from langchain.memory.summary_buffer import ConversationSummaryBufferMemory
from langchain import memory

## Document Loaders
from langchain.document_loaders import (
AirbyteJSONLoader,
Expand Down Expand Up @@ -104,23 +97,6 @@
llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore


## Memory

memory_type_to_cls_dict: dict[str, Any] = {
# "CombinedMemory": CombinedMemory,
# "ConversationBufferWindowMemory": ConversationBufferWindowMemory,
# "ConversationBufferMemory": ConversationBufferMemory,
# "SimpleMemory": SimpleMemory,
# "ConversationSummaryBufferMemory": ConversationSummaryBufferMemory,
# "ConversationKGMemory": ConversationKGMemory,
# "ConversationEntityMemory": ConversationEntityMemory,
# "ConversationSummaryMemory": ConversationSummaryMemory,
# "ChatMessageHistory": ChatMessageHistory,
# "ConversationStringBufferMemory": ConversationStringBufferMemory,
# "ReadOnlySharedMemory": ReadOnlySharedMemory,
}


## Chain
# from langchain.chains.loading import type_to_loader_dict
# from langchain.chains.conversation.base import ConversationChain
Expand All @@ -142,6 +118,14 @@
if not toolkit_name.islower()
}

## Memory


memory_type_to_cls_dict: dict[str, Any] = {
memory_name: import_class(f"langchain.memory.{memory_name}")
for memory_name in memory.__all__
}


wrapper_type_to_cls_dict: dict[str, Any] = {
wrapper.__name__: wrapper for wrapper in [requests.RequestsWrapper]
Expand Down
23 changes: 20 additions & 3 deletions src/backend/langflow/interface/importing/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from langchain.chains.base import Chain
from langchain.llms.base import BaseLLM
from langchain.tools import BaseTool

from langchain.chat_models.base import BaseChatModel
from langflow.interface.tools.util import get_tool_by_name


Expand All @@ -31,13 +31,30 @@ def import_by_type(_type: str, name: str) -> Any:
func_dict = {
"agents": import_agent,
"prompts": import_prompt,
"llms": import_llm,
"llms": {"llm": import_llm, "chat": import_chat_llm},
"tools": import_tool,
"chains": import_chain,
"toolkits": import_toolkit,
"wrappers": import_wrapper,
"memory": import_memory,
}
return func_dict[_type](name)
if _type == "llms":
key = "chat" if "chat" in name.lower() else "llm"
loaded_func = func_dict[_type][key] # type: ignore
else:
loaded_func = func_dict[_type]

return loaded_func(name)


def import_chat_llm(llm: str) -> BaseChatModel:
"""Import chat llm from llm name"""
return import_class(f"langchain.chat_models.{llm}")


def import_memory(memory: str) -> Any:
"""Import memory from memory name"""
return import_module(f"from langchain.memory import {memory}")


def import_class(class_path: str) -> Any:
Expand Down
Loading