Closed arihant-jha closed 1 month ago
i am facing the same issue is there any update about it?. I also tried with ANTHROPIC but same error occurs.
System info langchain ==0.1.8 langchain-anthropic==0.1.13 langchain-community ==0.0.38 langchain-core ==0.1.52 langchain-openai ==0.1.7 langchain-text-splitters== 0.2.0 langgraph == 0.0.50 langsmith ==0.1.29
platform windows (anaconda env) python=3.11.8
ERROR Message:
================================ Human Message =================================
KeyError Traceback (most recent call last) Cell In[18], line 45 41 for question in tutorial_questions: 42 events = part_1_graph.stream( 43 {"messages": ("user", question)}, config, stream_mode="values" 44 ) ---> 45 for event in events: 46 _print_event(event, _printed)
File c:\Users\User.conda\envs\ragui\Lib\site-packages\langgraph\pregel__init.py:834, in Pregel.stream(self, input, config, stream_mode, output_keys, input_keys, interrupt_before, interrupt_after, debug) 827 done, inflight = concurrent.futures.wait( 828 futures, 829 return_when=concurrent.futures.FIRST_EXCEPTION, 830 timeout=self.step_timeout, [831](file:///C:/Users/User/.conda/envs/ragui/Lib/site-packages/langgraph/pregel/init.py:831) ) [833](file:///C:/Users/User/.conda/envs/ragui/Lib/site-packages/langgraph/pregel/init.py:833) # panic on failure or timeout --> [834](file:///C:/Users/User/.conda/envs/ragui/Lib/site-packages/langgraph/pregel/init__.py:834) _panic_or_proceed(done, inflight, step) 836 # combine pending writes from all tasks 837 pending_writes = deque[tuple[str, Any]]()
File c:\Users\User.conda\envs\ragui\Lib\site-packages\langgraph\pregel__init__.py:1334, in _panic_or_proceed(done, inflight, step) 1332 inflight.pop().cancel() 1333 # raise the exception -> 1334 raise exc 1336 if inflight: 1337 # if we got here means we timed out 1338 while inflight: 1339 # cancel all pending tasks
File c:\Users\User.conda\envs\ragui\Lib\concurrent\futures\thread.py:58, in _WorkItem.run(self) 55 return 57 try: ---> 58 result = self.fn(*self.args, **self.kwargs) 59 except BaseException as exc: 60 self.future.set_exception(exc)
File c:\Users\User.conda\envs\ragui\Lib\site-packages\langgraph\pregel\retry.py:66, in run_with_retry(task, retry_policy) 64 task.writes.clear() 65 # run the task ---> 66 task.proc.invoke(task.input, task.config) 67 # if successful, end 68 break
File c:\Users\User.conda\envs\ragui\Lib\site-packages\langchain_core\runnables\base.py:2499, in RunnableSequence.invoke(self, input, config) 2497 try: 2498 for i, step in enumerate(self.steps): -> 2499 input = step.invoke( 2500 input, 2501 # mark each step as a child run 2502 patch_config( 2503 config, callbacks=run_manager.get_child(f"seq:step:{i+1}") 2504 ), 2505 ) 2506 # finish the root run 2507 except BaseException as e:
File c:\Users\User.conda\envs\ragui\Lib\site-packages\langgraph\utils.py:89, in RunnableCallable.invoke(self, input, config) 83 context.run(var_child_runnable_config.set, config) 84 kwargs = ( 85 {self.kwargs, "config": config} 86 if accepts_config(self.func) 87 else self.kwargs 88 ) ---> 89 ret = context.run(self.func, input, kwargs) 90 if isinstance(ret, Runnable) and self.recurse: 91 return ret.invoke(input, config)
File c:\Users\User.conda\envs\ragui\Lib\site-packages\langgraph\graph\graph.py:75, in Branch._route(self, input, config, reader, writer) 73 result = [result] 74 if self.ends: ---> 75 destinations = [self.ends[r] for r in result] 76 else: 77 destinations = result
File c:\Users\User.conda\envs\ragui\Lib\site-packages\langgraph\graph\graph.py:75, in
KeyError: 'tools'
Pushed a fix to the notebook! Last week we updated the tools_condition to return the node "tools" (since that's a better long-term home for it); hadn't originally fixed the notebook to label the nodes as "tools" instaed of "action"
Working now <3 Thanks for the prompt fix!
Awesome! Keep the feedback coming!
Checked other resources
Example Code
Error Message and Stack Trace (if applicable)
KeyError Traceback (most recent call last) Cell In[73], line 36 24 thread_id = str(uuid.uuid4()) 26 config = { 27 "configurable": { 28 # The passenger_id is used in our flight tools to (...) 33 } 34 } ---> 36 part_1_graph.invoke({"messages": ("user", tutorial_questions[0])}, config) 38 # _printed = set() 39 # for question in tutorial_questions: 40 # events = part_1_graph.stream( (...) 43 # for event in events: 44 # _print_event(event, _printed)
File ~/miniconda3/envs/asr/lib/python3.12/site-packages/langgraph/pregel/init.py:1245, in Pregel.invoke(self, input, config, stream_mode, output_keys, input_keys, interrupt_before, interrupt_after, debug, kwargs) 1243 else: 1244 chunks = [] -> 1245 for chunk in self.stream( 1246 input, 1247 config, 1248 stream_mode=stream_mode, 1249 output_keys=output_keys, 1250 input_keys=input_keys, 1251 interrupt_before=interrupt_before, 1252 interrupt_after=interrupt_after, 1253 debug=debug, 1254 kwargs, 1255 ): 1256 if stream_mode == "values": 1257 latest = chunk
File ~/miniconda3/envs/asr/lib/python3.12/site-packages/langgraph/pregel/init.py:834, in Pregel.stream(self, input, config, stream_mode, output_keys, input_keys, interrupt_before, interrupt_after, debug) 827 done, inflight = concurrent.futures.wait( 828 futures, 829 return_when=concurrent.futures.FIRST_EXCEPTION, 830 timeout=self.step_timeout, 831 ) 833 # panic on failure or timeout --> 834 _panic_or_proceed(done, inflight, step) 836 # combine pending writes from all tasks 837 pending_writes = deque[tuple[str, Any]]()
File ~/miniconda3/envs/asr/lib/python3.12/site-packages/langgraph/pregel/init.py:1334, in _panic_or_proceed(done, inflight, step) 1332 inflight.pop().cancel() 1333 # raise the exception -> 1334 raise exc 1336 if inflight: 1337 # if we got here means we timed out 1338 while inflight: 1339 # cancel all pending tasks
File ~/miniconda3/envs/asr/lib/python3.12/concurrent/futures/thread.py:58, in _WorkItem.run(self) 55 return 57 try: ---> 58 result = self.fn(*self.args, **self.kwargs) 59 except BaseException as exc: 60 self.future.set_exception(exc)
File ~/miniconda3/envs/asr/lib/python3.12/site-packages/langgraph/pregel/retry.py:66, in run_with_retry(task, retry_policy) 64 task.writes.clear() 65 # run the task ---> 66 task.proc.invoke(task.input, task.config) 67 # if successful, end 68 break
File ~/miniconda3/envs/asr/lib/python3.12/site-packages/langchain_core/runnables/base.py:2368, in RunnableSequence.invoke(self, input, config) 2366 try: 2367 for i, step in enumerate(self.steps): -> 2368 input = step.invoke( 2369 input, 2370 # mark each step as a child run 2371 patch_config( 2372 config, callbacks=run_manager.get_child(f"seq:step:{i+1}") 2373 ), 2374 ) 2375 # finish the root run 2376 except BaseException as e:
File ~/miniconda3/envs/asr/lib/python3.12/site-packages/langgraph/utils.py:89, in RunnableCallable.invoke(self, input, config) 83 context.run(var_child_runnable_config.set, config) 84 kwargs = ( 85 {self.kwargs, "config": config} 86 if accepts_config(self.func) 87 else self.kwargs 88 ) ---> 89 ret = context.run(self.func, input, kwargs) 90 if isinstance(ret, Runnable) and self.recurse: 91 return ret.invoke(input, config)
File ~/miniconda3/envs/asr/lib/python3.12/site-packages/langgraph/graph/graph.py:75, in Branch._route(self, input, config, reader, writer) 73 result = [result] 74 if self.ends: ---> 75 destinations = [self.ends[r] for r in result] 76 else: 77 destinations = result
KeyError: 'tools'
Description
I'm trying to run the customer-support tutorial with the Openai Chatbot, but it is producing the above error for me. The tutorial is originally for Antrhopic Chat Model, but it also had commented code snippet for running the same code with gpt, which when I'm trying is giving me this error. The code looks good to me, the tools binding is present in the agent_runnable also, the agent is returning responses for simple questions not using tools.
System Info
langchain==0.2.0 langchain-anthropic==0.1.13 langchain-community==0.2.0 langchain-core==0.2.0 langchain-openai==0.1.7 langchain-text-splitters==0.2.0 langgraph==0.0.50
platform MacOS Python 3.12.3