Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Can't go to a node in the parent graph using Command.PARENT #3570

Open
4 tasks done
iamolvr opened this issue Feb 24, 2025 · 5 comments
Open
4 tasks done

Can't go to a node in the parent graph using Command.PARENT #3570

iamolvr opened this issue Feb 24, 2025 · 5 comments

Comments

@iamolvr
Copy link

iamolvr commented Feb 24, 2025

Checked other resources

  • This is a bug, not a usage question. For questions, please use GitHub Discussions.
  • I added a clear and detailed title that summarizes the issue.
  • I read what a minimal reproducible example is (https://stackoverflow.com/help/minimal-reproducible-example).
  • I included a self-contained, minimal example that demonstrates the issue INCLUDING all the relevant imports. The code run AS IS to reproduce the issue.

Example Code

import random

from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import StateGraph
from langgraph.graph import END, START
from langgraph.types import Command
from typing import Literal, Annotated, TypedDict
from langgraph.graph.message import AnyMessage
from langgraph.graph.message import add_messages




class State(TypedDict):
    messages: Annotated[list[AnyMessage], add_messages]
    personal_data: dict
    dialog_state: str



def node_a_child(state):
    return {"dialog_state": "b_child_state"}
    # return {"registration_status": True, "dialog_state": "b_child_state"}

def node_b_child(state) -> Command[Literal["node_c_child", "node_d_child"]]:
    value = random.choice([0, 1])
    return Command(
        goto="node_c_child" if value == 0 else "node_d_child"
    )

def node_c_child(state):
    return Command(
        graph=Command.PARENT,
        goto="node_sibling",
    )

def node_d_child(state):
    return {"dialog_state": "d_child_state"}




sub_builder = StateGraph(State)

sub_builder.add_node("node_a_child", node_a_child)
sub_builder.add_edge(START, "node_a_child")

sub_builder.add_node("node_b_child", node_b_child) 
sub_builder.add_edge("node_a_child", "node_b_child")

sub_builder.add_node("node_c_child", node_c_child)
sub_builder.add_edge("node_c_child", END)


sub_builder.add_node("node_d_child", node_d_child) 
sub_builder.add_edge("node_d_child", END)

sub_graph = sub_builder.compile(checkpointer=True)




def node_a_parent(state):
    return {"dialog_state": "a_parent_state"}

def node_b_parent(state):
    return {"dialog_state": "pop"}

def node_sibling(state):
    return {"dialog_state": "sibling"}

main_builder = StateGraph(State)

main_builder.add_node("node_a_parent", node_a_parent)
main_builder.add_edge(START, "node_a_parent")

main_builder.add_node("subgraph", sub_graph)
main_builder.add_edge("node_a_parent", "subgraph")

main_builder.add_node("node_b_parent", node_b_parent)
main_builder.add_edge("subgraph", "node_b_parent")


main_builder.add_edge("node_b_parent", END)

main_builder.add_node("node_sibling", node_sibling)
main_builder.add_edge("node_sibling", END)


checkpointer_temp = MemorySaver()

main_graph = main_builder.compile(checkpointer_temp, name="parent")


config = {
    "configurable": {
        "thread_id": 1,
    }
}


result = main_graph.invoke(input={"dialog_state": ["init_state"]}, config=config, subgraphs=True, debug=True)

Error Message and Stack Trace (if applicable)

{
	"name": "ParentCommand",
	"message": "Command(graph='subgraph', update=[('messages', []), ('dialog_state', 'a_parent_state'), ('messages', []), ('dialog_state', 'b_child_state')], goto='node_sibling')",
	"stack": "---------------------------------------------------------------------------
ParentCommand                             Traceback (most recent call last)
Cell In[46], line 8
      1 config = {
      2     \"configurable\": {
      3         \"thread_id\": 1,
      4     }
      5 }
----> 8 result = main_graph.invoke(input={\"dialog_state\": [\"init_state\"]}, config=config, subgraphs=True, debug=True)
      9 result

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/__init__.py:2124, in Pregel.invoke(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, debug, **kwargs)
   2122 else:
   2123     chunks = []
-> 2124 for chunk in self.stream(
   2125     input,
   2126     config,
   2127     stream_mode=stream_mode,
   2128     output_keys=output_keys,
   2129     interrupt_before=interrupt_before,
   2130     interrupt_after=interrupt_after,
   2131     debug=debug,
   2132     **kwargs,
   2133 ):
   2134     if stream_mode == \"values\":
   2135         latest = chunk

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/__init__.py:1779, in Pregel.stream(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, debug, subgraphs)
   1773     # Similarly to Bulk Synchronous Parallel / Pregel model
   1774     # computation proceeds in steps, while there are channel updates.
   1775     # Channel updates from step N are only visible in step N+1
   1776     # channels are guaranteed to be immutable for the duration of the step,
   1777     # with channel updates applied only at the transition between steps.
   1778     while loop.tick(input_keys=self.input_channels):
-> 1779         for _ in runner.tick(
   1780             loop.tasks.values(),
   1781             timeout=self.step_timeout,
   1782             retry_policy=self.retry_policy,
   1783             get_waiter=get_waiter,
   1784         ):
   1785             # emit output
   1786             yield from output()
   1787 # emit output

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/runner.py:302, in PregelRunner.tick(self, tasks, reraise, timeout, retry_policy, get_waiter)
    300 yield
    301 # panic on failure or timeout
--> 302 _panic_or_proceed(
    303     futures.done.union(f for f, t in futures.items() if t is not None),
    304     panic=reraise,
    305 )

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/runner.py:619, in _panic_or_proceed(futs, timeout_exc_cls, panic)
    617         # raise the exception
    618         if panic:
--> 619             raise exc
    620 if inflight:
    621     # if we got here means we timed out
    622     while inflight:
    623         # cancel all pending tasks

File /opt/homebrew/Cellar/python@3.12/3.12.7_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/concurrent/futures/_base.py:340, in Future._invoke_callbacks(self)
    338 for callback in self._done_callbacks:
    339     try:
--> 340         callback(self)
    341     except Exception:
    342         LOGGER.exception('exception calling callback for %r', self)

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/runner.py:88, in FuturesDict.on_done(self, task, fut)
     82 def on_done(
     83     self,
     84     task: PregelExecutableTask,
     85     fut: F,
     86 ) -> None:
     87     try:
---> 88         self.callback(task, _exception(fut))
     89     finally:
     90         with self.lock:

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/runner.py:551, in PregelRunner.commit(self, task, exception)
    549         self.put_writes(task.id, interrupts)
    550 elif isinstance(exception, GraphBubbleUp):
--> 551     raise exception
    552 else:
    553     # save error to checkpointer
    554     self.put_writes(task.id, [(ERROR, exception)])

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/executor.py:83, in BackgroundExecutor.done(self, task)
     81 \"\"\"Remove the task from the tasks dict when it's done.\"\"\"
     82 try:
---> 83     task.result()
     84 except GraphBubbleUp:
     85     # This exception is an interruption signal, not an error
     86     # so we don't want to re-raise it on exit
     87     self.tasks.pop(task)

File /opt/homebrew/Cellar/python@3.12/3.12.7_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/concurrent/futures/_base.py:449, in Future.result(self, timeout)
    447     raise CancelledError()
    448 elif self._state == FINISHED:
--> 449     return self.__get_result()
    451 self._condition.wait(timeout)
    453 if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:

File /opt/homebrew/Cellar/python@3.12/3.12.7_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/concurrent/futures/_base.py:401, in Future.__get_result(self)
    399 if self._exception:
    400     try:
--> 401         raise self._exception
    402     finally:
    403         # Break a reference cycle with the exception in self._exception
    404         self = None

File /opt/homebrew/Cellar/python@3.12/3.12.7_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/concurrent/futures/thread.py:58, in _WorkItem.run(self)
     55     return
     57 try:
---> 58     result = self.fn(*self.args, **self.kwargs)
     59 except BaseException as exc:
     60     self.future.set_exception(exc)

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/retry.py:40, in run_with_retry(task, retry_policy, configurable)
     38     task.writes.clear()
     39     # run the task
---> 40     return task.proc.invoke(task.input, config)
     41 except ParentCommand as exc:
     42     ns: str = config[CONF][CONFIG_KEY_CHECKPOINT_NS]

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/utils/runnable.py:546, in RunnableSeq.invoke(self, input, config, **kwargs)
    542 config = patch_config(
    543     config, callbacks=run_manager.get_child(f\"seq:step:{i + 1}\")
    544 )
    545 if i == 0:
--> 546     input = step.invoke(input, config, **kwargs)
    547 else:
    548     input = step.invoke(input, config)

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/__init__.py:2124, in Pregel.invoke(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, debug, **kwargs)
   2122 else:
   2123     chunks = []
-> 2124 for chunk in self.stream(
   2125     input,
   2126     config,
   2127     stream_mode=stream_mode,
   2128     output_keys=output_keys,
   2129     interrupt_before=interrupt_before,
   2130     interrupt_after=interrupt_after,
   2131     debug=debug,
   2132     **kwargs,
   2133 ):
   2134     if stream_mode == \"values\":
   2135         latest = chunk

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/__init__.py:1779, in Pregel.stream(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, debug, subgraphs)
   1773     # Similarly to Bulk Synchronous Parallel / Pregel model
   1774     # computation proceeds in steps, while there are channel updates.
   1775     # Channel updates from step N are only visible in step N+1
   1776     # channels are guaranteed to be immutable for the duration of the step,
   1777     # with channel updates applied only at the transition between steps.
   1778     while loop.tick(input_keys=self.input_channels):
-> 1779         for _ in runner.tick(
   1780             loop.tasks.values(),
   1781             timeout=self.step_timeout,
   1782             retry_policy=self.retry_policy,
   1783             get_waiter=get_waiter,
   1784         ):
   1785             # emit output
   1786             yield from output()
   1787 # emit output

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/runner.py:240, in PregelRunner.tick(self, tasks, reraise, timeout, retry_policy, get_waiter)
    238     self.commit(t, None)
    239 except Exception as exc:
--> 240     self.commit(t, exc)
    241     if reraise and futures:
    242         # will be re-raised after futures are done
    243         fut: concurrent.futures.Future = concurrent.futures.Future()

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/runner.py:551, in PregelRunner.commit(self, task, exception)
    549         self.put_writes(task.id, interrupts)
    550 elif isinstance(exception, GraphBubbleUp):
--> 551     raise exception
    552 else:
    553     # save error to checkpointer
    554     self.put_writes(task.id, [(ERROR, exception)])

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/runner.py:230, in PregelRunner.tick(self, tasks, reraise, timeout, retry_policy, get_waiter)
    228 t = tasks[0]
    229 try:
--> 230     run_with_retry(
    231         t,
    232         retry_policy,
    233         configurable={
    234             CONFIG_KEY_SEND: partial(writer, t),
    235             CONFIG_KEY_CALL: partial(call, t),
    236         },
    237     )
    238     self.commit(t, None)
    239 except Exception as exc:

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/pregel/retry.py:40, in run_with_retry(task, retry_policy, configurable)
     38     task.writes.clear()
     39     # run the task
---> 40     return task.proc.invoke(task.input, config)
     41 except ParentCommand as exc:
     42     ns: str = config[CONF][CONFIG_KEY_CHECKPOINT_NS]

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/utils/runnable.py:548, in RunnableSeq.invoke(self, input, config, **kwargs)
    546             input = step.invoke(input, config, **kwargs)
    547         else:
--> 548             input = step.invoke(input, config)
    549 # finish the root run
    550 except BaseException as e:

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/utils/runnable.py:310, in RunnableCallable.invoke(self, input, config, **kwargs)
    308 else:
    309     context.run(_set_config_context, config)
--> 310     ret = context.run(self.func, *args, **kwargs)
    311 if isinstance(ret, Runnable) and self.recurse:
    312     return ret.invoke(input, config)

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/graph/graph.py:94, in Branch._route(self, input, config, reader, writer)
     92 else:
     93     value = input
---> 94 result = self.path.invoke(value, config)
     95 return self._finish(writer, input, result, config)

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/utils/runnable.py:310, in RunnableCallable.invoke(self, input, config, **kwargs)
    308 else:
    309     context.run(_set_config_context, config)
--> 310     ret = context.run(self.func, *args, **kwargs)
    311 if isinstance(ret, Runnable) and self.recurse:
    312     return ret.invoke(input, config)

File ~/Library/Caches/pypoetry/virtualenvs/backend-ZFbskOdg-py3.12/lib/python3.12/site-packages/langgraph/graph/state.py:895, in _control_branch(value)
    893 for command in commands:
    894     if command.graph == Command.PARENT:
--> 895         raise ParentCommand(command)
    896     if isinstance(command.goto, Send):
    897         rtn.append(command.goto)

ParentCommand: Command(graph='subgraph', update=[('messages', []), ('dialog_state', 'a_parent_state'), ('messages', []), ('dialog_state', 'b_child_state')], goto='node_sibling')"
}

Description

I am trying to implement a handoff where a subgraph transfers the user to another node in the parent graph (a sibling node in this case) using a Command, but I keep encountering an error. There is no difference whether the graph and subgraph share the same state or only a single key, whether a reducer function is applied or not—the transition simply doesn’t occur. To me, this seems somewhat illogical. I have experimented with various target nodes (e.g., node_a_parent), but the result remains the same—a ParentCommand exception.”

Image

System Info

System Information

OS: Darwin
OS Version: Darwin Kernel Version 24.3.0: Thu Jan 2 20:24:16 PST 2025; root:xnu-11215.81.4~3/RELEASE_ARM64_T6000
Python Version: 3.12.7 (main, Oct 1 2024, 02:05:46) [Clang 15.0.0 (clang-1500.3.9.4)]

Package Information

langchain_core: 0.3.36
langchain: 0.3.19
langsmith: 0.2.11
langchain_openai: 0.3.6
langchain_text_splitters: 0.3.6
langgraph_sdk: 0.1.51

Optional packages not installed

langserve

Other Dependencies

aiohttp<4.0.0,>=3.8.3: Installed. No version info available.
async-timeout<5.0.0,>=4.0.0;: Installed. No version info available.
httpx: 0.28.1
jsonpatch<2.0,>=1.33: Installed. No version info available.
langchain-anthropic;: Installed. No version info available.
langchain-aws;: Installed. No version info available.
langchain-cohere;: Installed. No version info available.
langchain-community;: Installed. No version info available.
langchain-core<1.0.0,>=0.3.34: Installed. No version info available.
langchain-core<1.0.0,>=0.3.35: Installed. No version info available.
langchain-deepseek;: Installed. No version info available.
langchain-fireworks;: Installed. No version info available.
langchain-google-genai;: Installed. No version info available.
langchain-google-vertexai;: Installed. No version info available.
langchain-groq;: Installed. No version info available.
langchain-huggingface;: Installed. No version info available.
langchain-mistralai;: Installed. No version info available.
langchain-ollama;: Installed. No version info available.
langchain-openai;: Installed. No version info available.
langchain-text-splitters<1.0.0,>=0.3.6: Installed. No version info available.
langchain-together;: Installed. No version info available.
langchain-xai;: Installed. No version info available.
langsmith-pyo3: Installed. No version info available.
langsmith<0.4,>=0.1.125: Installed. No version info available.
langsmith<0.4,>=0.1.17: Installed. No version info available.
numpy<2,>=1.26.4;: Installed. No version info available.
numpy<3,>=1.26.2;: Installed. No version info available.
openai<2.0.0,>=1.58.1: Installed. No version info available.
orjson: 3.10.15
packaging<25,>=23.2: Installed. No version info available.
pydantic: 2.10.5
pydantic<3.0.0,>=2.5.2;: Installed. No version info available.
pydantic<3.0.0,>=2.7.4: Installed. No version info available.
pydantic<3.0.0,>=2.7.4;: Installed. No version info available.
PyYAML>=5.3: Installed. No version info available.
requests: 2.32.3
requests-toolbelt: 1.0.0
requests<3,>=2: Installed. No version info available.
SQLAlchemy<3,>=1.4: Installed. No version info available.
tenacity!=8.4.0,<10,>=8.1.0: Installed. No version info available.
tenacity!=8.4.0,<10.0.0,>=8.1.0: Installed. No version info available.
tiktoken<1,>=0.7: Installed. No version info available.
typing-extensions>=4.7: Installed. No version info available.
zstandard: Installed. No version info available.

@iamolvr iamolvr changed the title Can't goto node in the parent graph Can't go to a node in the parent graph using Command.PARENT Feb 24, 2025
@vbarda
Copy link
Collaborator

vbarda commented Feb 24, 2025

Thanks for reporting -- the issue seems to come from using checkpointer=True in the subgraph. We'll need to invstigate -- this is likely a bug. Is this a requirement for you to use checkpointer=True? Adjusted your example a bit:

import random

from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import StateGraph
from langgraph.graph import END, START
from langgraph.types import Command
from typing import Literal, Annotated, TypedDict
from langgraph.graph.message import AnyMessage
from langgraph.graph.message import add_messages
import operator


class State(TypedDict):
    messages: Annotated[list[AnyMessage], add_messages]
    personal_data: dict
    dialog_state: Annotated[list[str], operator.add]



def node_a_child(state):
    return {"dialog_state": ["b_child_state"]}

def node_b_child(state) -> Command[Literal["node_c_child", "node_d_child"]]:
    value = random.choice([0, 1])
    return Command(
        goto="node_c_child"# if value == 0 else "node_d_child"
    )

def node_c_child(state):
    return Command(
        graph=Command.PARENT,
        goto="node_sibling",
    )

def node_d_child(state):
    return {"dialog_state": ["d_child_state"]}


sub_builder = StateGraph(State)

sub_builder.add_node("node_a_child", node_a_child)
sub_builder.add_edge(START, "node_a_child")

sub_builder.add_node("node_b_child", node_b_child) 
sub_builder.add_edge("node_a_child", "node_b_child")

sub_builder.add_node("node_c_child", node_c_child)
sub_builder.add_edge("node_c_child", END)


sub_builder.add_node("node_d_child", node_d_child) 
sub_builder.add_edge("node_d_child", END)

# adding checkpointer=True causes the issue
sub_graph = sub_builder.compile()#checkpointer=True)


def node_a_parent(state):
    return {"dialog_state": ["a_parent_state"]}

def node_b_parent(state):
    return {"dialog_state": ["pop"]}

def node_sibling(state):
    return {"dialog_state": ["sibling"]}

main_builder = StateGraph(State)

main_builder.add_node("node_a_parent", node_a_parent)
main_builder.add_edge(START, "node_a_parent")

main_builder.add_node("subgraph", sub_graph)
main_builder.add_edge("node_a_parent", "subgraph")

main_builder.add_node("node_b_parent", node_b_parent)
main_builder.add_edge("subgraph", "node_b_parent")


main_builder.add_edge("node_b_parent", END)

main_builder.add_node("node_sibling", node_sibling)
main_builder.add_edge("node_sibling", END)


checkpointer_temp = MemorySaver()

main_graph = main_builder.compile(checkpointer_temp, name="parent")


config = {
    "configurable": {
        "thread_id": 1,
    }
}


result = main_graph.invoke(input={"dialog_state": ["init_state"]}, config=config, subgraphs=True)

@vbarda
Copy link
Collaborator

vbarda commented Feb 24, 2025

Reduced further to just this example illustrating the issue:

from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import StateGraph, START
from langgraph.types import Command
from typing import Literal, Annotated, TypedDict
import operator


class State(TypedDict):
    dialog_state: Annotated[list[str], operator.add]


def node_a_child(state):
    return {"dialog_state": ["a_child_state"]}

def node_b_child(state):
    return Command(
        graph=Command.PARENT,
        goto="node_b_parent",
        update={"dialog_state": ["b_child_state"]}
    )

sub_builder = StateGraph(State)
sub_builder.add_node(node_a_child)
sub_builder.add_node(node_b_child) 
sub_builder.add_edge(START, "node_a_child")
sub_builder.add_edge("node_a_child", "node_b_child")
# adding checkpointer=True causes the issue
sub_graph = sub_builder.compile()#checkpointer=True)

def node_b_parent(state):
    return {"dialog_state": ["node_b_parent"]}

main_builder = StateGraph(State)
main_builder.add_node(node_b_parent)
main_builder.add_edge(START, "subgraph_node")
main_builder.add_node("subgraph_node", sub_graph, destinations=("node_b_parent",))

checkpointer_temp = MemorySaver()
main_graph = main_builder.compile(checkpointer_temp, name="parent")
config = {
    "configurable": {
        "thread_id": 1,
    }
}

result = main_graph.invoke(input={"dialog_state": ["init_state"]}, config=config, subgraphs=True)

@iamolvr
Copy link
Author

iamolvr commented Feb 25, 2025

Thank you for your quick response. Yes, removing the checkpointer argument in the subgraph helps.

Is it a requirement for you to use checkpointer=True?

Ideally, yes, as I want to store the states of subgraphs. Currently, as a workaround, I'm planning to use a new substate key in the parent graph's state, which does the trick

@rahilvora
Copy link

Hey @vbarda
I am facing similar issue,

I am using langgraph-supervisor library.

I attached two of my agent I created (Not using agents created from create-react-agent function). I have one simple LLM agent which can chat with user and another research agent which can perform research for the user.

Right now, When I am trying to send message to the supervisor like Hi it goes into infinite loop to call chat assistant again and again and later on it tries to call research assistant and fails

Note: Both of my agents have different state.

Supervisor code snippet

from langgraph_supervisor import create_supervisor
from ultrasonic_agent.newsletter_research_assistant import builder as newsletter_builder
from ultrasonic_agent.simple_llm_assistant import web_assistant_builder
from langchain_ollama import ChatOllama
from langchain.prompts import PromptTemplate

writer_llm = ChatOllama(model="llama3.2", temperature=1.0, base_url="http://host.docker.internal:11434/")

workflow = create_supervisor(
    agents=[
      newsletter_builder.compile(name="newsletter_research_assistant"),
      web_assistant_builder.compile(name="chat_assistant")
      ],
    model=writer_llm,
    output_mode="last_message",
    supervisor_name="ultrasonic_supervisor",
    prompt=PromptTemplate.from_template(
        """
        You are a supervisor for a team of agents.
        You are given a task to complete.
        You need to decide which agent to assign the task to.
        For basic communication and casual conversation, use chat_assistant.
        If user asks for research about certain topic, use newsletter_research_assistant. 
        Please be sure to handoff the message to the correct agent. Do not try to complete the task yourself.
        Do not try to do any research yourself or perform any other tasks.
        Do not call the agents in the loop again and again once you have answered users questions or feedback.
        """
    )
)

graph = workflow.compile()

Langsmit trace: https://smith.langchain.com/public/2b8077fa-54ca-4888-b3cf-44e41c7f013c/r

@vbarda
Copy link
Collaborator

vbarda commented Mar 4, 2025

@rahilvora please open issue in the supervisor repo for this

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Projects
None yet
Development

No branches or pull requests

3 participants