[X] I added a very descriptive title to this issue.
[X] I searched the LangGraph/LangChain documentation with the integrated search.
[X] I used the GitHub search to find a similar question and didn't find it.
[X] I am sure that this is a bug in LangGraph/LangChain rather than my code.
[X] I am sure this is better as an issue rather than a GitHub discussion, since this is a LangGraph bug and not a design question.
Example Code
for step in chain.stream({"messages":
[
HumanMessage(
content="Find the current temperature in Tokyo, then, respond with a flashcard summarizing this information"
)
]}
):
print(step)
Error Message and Stack Trace (if applicable)
{
"name": "InvalidUpdateError",
"message": "Expected dict, got [AIMessage(content='Thought: The search results provided do not contain the current temperature in Tokyo directly, making it impossible to respond accurately with a flashcard summarizing this information.'), SystemMessage(content='Context from last attempt: The information retrieved does not include the current temperature in Tokyo, which is necessary to create the flashcard summary as requested. A direct search for the current temperature in Tokyo or a weather report source that includes this information is needed.')]",
"stack": "---------------------------------------------------------------------------
InvalidUpdateError Traceback (most recent call last)
Cell In[24], line 1
----> 1 for step in chain.stream({\"messages\":
2 [
3 HumanMessage(
4 content=\"Find the current temperature in Tokyo, then, respond with a flashcard summarizing this information\"
5 )
6 ]}
7 ):
8 print(step)
File ~/workspace/PyriStage/langgraph/.venv/lib/python3.12/site-packages/langgraph/pregel/__init__.py:1020, in Pregel.stream(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, debug)
1017 break
1019 # panic on failure or timeout
-> 1020 _panic_or_proceed(all_futures, loop.step)
1021 # don't keep futures around in memory longer than needed
1022 del done, inflight, futures
File ~/workspace/PyriStage/langgraph/.venv/lib/python3.12/site-packages/langgraph/pregel/__init__.py:1450, in _panic_or_proceed(futs, step, timeout_exc_cls)
1448 inflight.pop().cancel()
1449 # raise the exception
-> 1450 raise exc
1452 if inflight:
1453 # if we got here means we timed out
1454 while inflight:
1455 # cancel all pending tasks
File ~/workspace/PyriStage/langgraph/.venv/lib/python3.12/site-packages/langgraph/pregel/executor.py:60, in BackgroundExecutor.done(self, task)
58 def done(self, task: concurrent.futures.Future) -> None:
59 try:
---> 60 task.result()
61 except GraphInterrupt:
62 # This exception is an interruption signal, not an error
63 # so we don't want to re-raise it on exit
64 self.tasks.pop(task)
File /usr/local/Cellar/python@3.12/3.12.4/Frameworks/Python.framework/Versions/3.12/lib/python3.12/concurrent/futures/_base.py:449, in Future.result(self, timeout)
447 raise CancelledError()
448 elif self._state == FINISHED:
--> 449 return self.__get_result()
451 self._condition.wait(timeout)
453 if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
File /usr/local/Cellar/python@3.12/3.12.4/Frameworks/Python.framework/Versions/3.12/lib/python3.12/concurrent/futures/_base.py:401, in Future.__get_result(self)
399 if self._exception:
400 try:
--> 401 raise self._exception
402 finally:
403 # Break a reference cycle with the exception in self._exception
404 self = None
File /usr/local/Cellar/python@3.12/3.12.4/Frameworks/Python.framework/Versions/3.12/lib/python3.12/concurrent/futures/thread.py:58, in _WorkItem.run(self)
55 return
57 try:
---> 58 result = self.fn(*self.args, **self.kwargs)
59 except BaseException as exc:
60 self.future.set_exception(exc)
File ~/workspace/PyriStage/langgraph/.venv/lib/python3.12/site-packages/langgraph/pregel/retry.py:26, in run_with_retry(task, retry_policy)
24 task.writes.clear()
25 # run the task
---> 26 task.proc.invoke(task.input, task.config)
27 # if successful, end
28 break
File ~/workspace/PyriStage/langgraph/.venv/lib/python3.12/site-packages/langchain_core/runnables/base.py:2878, in RunnableSequence.invoke(self, input, config, **kwargs)
2876 input = context.run(step.invoke, input, config, **kwargs)
2877 else:
-> 2878 input = context.run(step.invoke, input, config)
2879 # finish the root run
2880 except BaseException as e:
File ~/workspace/PyriStage/langgraph/.venv/lib/python3.12/site-packages/langgraph/utils.py:93, in RunnableCallable.invoke(self, input, config, **kwargs)
91 kwargs = {**self.kwargs, **kwargs}
92 if self.trace:
---> 93 ret = self._call_with_config(
94 self.func, input, merge_configs(self.config, config), **kwargs
95 )
96 else:
97 config = merge_configs(self.config, config)
File ~/workspace/PyriStage/langgraph/.venv/lib/python3.12/site-packages/langchain_core/runnables/base.py:1785, in Runnable._call_with_config(self, func, input, config, run_type, **kwargs)
1781 context = copy_context()
1782 context.run(_set_config_context, child_config)
1783 output = cast(
1784 Output,
-> 1785 context.run(
1786 call_func_with_variable_args, # type: ignore[arg-type]
1787 func, # type: ignore[arg-type]
1788 input, # type: ignore[arg-type]
1789 config,
1790 run_manager,
1791 **kwargs,
1792 ),
1793 )
1794 except BaseException as e:
1795 run_manager.on_chain_error(e)
File ~/workspace/PyriStage/langgraph/.venv/lib/python3.12/site-packages/langchain_core/runnables/config.py:397, in call_func_with_variable_args(func, input, config, run_manager, **kwargs)
395 if run_manager is not None and accepts_run_manager(func):
396 kwargs[\"run_manager\"] = run_manager
--> 397 return func(input, **kwargs)
File ~/workspace/PyriStage/langgraph/.venv/lib/python3.12/site-packages/langgraph/pregel/write.py:98, in ChannelWrite._write(self, input, config)
93 # process entries into values
94 values = [
95 input if write.value is PASSTHROUGH else write.value for write in entries
96 ]
97 values = [
---> 98 val if write.mapper is None else write.mapper.invoke(val, config)
99 for val, write in zip(values, entries)
100 ]
101 values = [
102 (write.channel, val)
103 for val, write in zip(values, entries)
104 if not write.skip_none or val is not None
105 ]
106 # write packets and values
File ~/workspace/PyriStage/langgraph/.venv/lib/python3.12/site-packages/langgraph/utils.py:102, in RunnableCallable.invoke(self, input, config, **kwargs)
100 if accepts_config(self.func):
101 kwargs[\"config\"] = config
--> 102 ret = context.run(self.func, input, **kwargs)
103 if isinstance(ret, Runnable) and self.recurse:
104 return ret.invoke(input, config)
File ~/workspace/PyriStage/langgraph/.venv/lib/python3.12/site-packages/langgraph/graph/state.py:543, in CompiledStateGraph.attach_node.<locals>._get_state_key(input, config, key)
541 return value if value is not None else SKIP_WRITE
542 else:
--> 543 raise InvalidUpdateError(f\"Expected dict, got {input}\")
InvalidUpdateError: Expected dict, got [AIMessage(content='Thought: The search results provided do not contain the current temperature in Tokyo directly, making it impossible to respond accurately with a flashcard summarizing this information.'), SystemMessage(content='Context from last attempt: The information retrieved does not include the current temperature in Tokyo, which is necessary to create the flashcard summary as requested. A direct search for the current temperature in Tokyo or a weather report source that includes this information is needed.')]"
}
The LLM Compiler example has a small issue which is erroring out of any Replan functionality. The above code is an example I have added to the Jupyter notebook, since none of the current examples require a replan. The _parse_joiner_output function should return a dict, but in the case of a replan it excludes the required {"messages": ... } formatting.
I have fixed this issue locally, added a Replan example, and will link a PR shortly in the comments.
Checked other resources
Example Code
Error Message and Stack Trace (if applicable)
Description
https://github.com/langchain-ai/langgraph/issues/1513 and https://github.com/langchain-ai/langgraph/issues/1459 are related issues.
The LLM Compiler example has a small issue which is erroring out of any Replan functionality. The above code is an example I have added to the Jupyter notebook, since none of the current examples require a replan. The
_parse_joiner_output
function should return a dict, but in the case of a replan it excludes the required{"messages": ... }
formatting.I have fixed this issue locally, added a Replan example, and will link a PR shortly in the comments.
System Info
langchain==0.2.15 langchain-anthropic==0.1.23 langchain-community==0.2.13 langchain-core==0.2.35 langchain-openai==0.1.23 langchain-text-splitters==0.2.2
MacOS Python 3.12.4