Closed wangyaoyong-wyy closed 2 weeks ago
I encountered the same problem using Tongyi
The code used is:
prompt = hub.pull("hwchase17/openai-tools-agent")
from langchain.agents import create_tool_calling_agent
I also encountered this issue while using ChatTongyi, and after breakpoint source code feedback, Multiple tool_calls are not supported in message This feature will be supported in the future, Perhaps Tongyi does not currently support multi model scheduling?
I encountered the same problem using Tongyi我使用tongyi也遇到同样的问题 The code used is:使用的代码是:
prompt = hub.pull("hwchase17/openai-tools-agent")
from langchain.agents import create_tool_calling_agent
This issue may occur when using create_tool_calling-agent. According to the error code displayed on the dashscope official website, it is due to passing incorrect parameters that this issue occurs. If you switch to create_json_chat_agent, this issue will not occur
prompt = hub.pull("hwchase17/react-chat-json")
agent = create_json_chat_agent(chat, tools, prompt)
# agent = create_tool_calling_agent(chat,tools,prompt)
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True, handle_parsing_errors=True)
resp =agent_executor.invoke({"input": "what is LangChain latest version?"})
print(resp)
Checked other resources
Example Code
Error Message and Stack Trace (if applicable)
D:\miniconda3\envs\chat2\python.exe D:\pythonProject\chat2\langchain_agent_create.py {'agent': {'messages': [AIMessage(content='', additional_kwargs={'tool_calls': [{'type': 'function', 'function': {'name': 'langchain_search', 'arguments': ''}, 'id': ''}, {'type': 'function', 'function': {'name': '', 'arguments': '{"query": "'}, 'id': ''}, {'type': 'function', 'function': {'name': '', 'arguments': 'Langchain 0.2 version release'}, 'id': ''}, {'type': 'function', 'function': {'name': '', 'arguments': ' date"}'}, 'id': ''}, {'type': 'function', 'function': {'name': '', 'arguments': ''}, 'id': ''}]}, response_metadata={'model_name': 'qwen-turbo', 'finish_reason': 'tool_calls', 'request_id': 'c426dbd5-a597-91a0-9ec4-a55b2591fed1', 'token_usage': {'input_tokens': 189, 'output_tokens': 26, 'total_tokens': 215}}, id='run-13fd4707-8439-4431-9dad-817894f4c3e7-0', tool_calls=[{'name': 'langchain_search', 'args': {'query': 'Langchain 0.2 version release date'}, 'id': ''}])]}}
{'tools': {'messages': [ToolMessage(content='Skip to main content\n\nLangChain 0.2 is out! Leave feedback on the v0.2 docs here. You can view the\nv0.1 docs here.\n\nIntegrationsAPI Reference\n\nMore\n\nSkip to main content\n\nLangChain 0.2 is out! Leave feedback on the v0.2 docs here. You can view the\nv0.1 docs here.\n\nIntegrationsAPI Reference\n\nMore\n\nSkip to main content\n\nLangChain 0.2 is out! Leave feedback on the v0.2 docs here. You can view the\nv0.1 docs here.\n\nIntegrationsAPI Reference\n\nMore\n\n LangChain v0.2\n astream_events v2\n Changes\n Security\n\n Versions\n * v0.2\n\nOn this page\n\n# LangChain v0.2', name='langchain_search', id='28ffa364-791c-488e-9020-1960c4a5672b', tool_call_id='')]}}
Traceback (most recent call last): File "D:\pythonProject\chat2\langchain_agent_create.py", line 49, in
for s in agent_executor.stream(
File "D:\miniconda3\envs\chat2\Lib\site-packages\langgraph\pregel__init.py", line 876, in stream
_panic_or_proceed(done, inflight, step)
File "D:\miniconda3\envs\chat2\Lib\site-packages\langgraph\pregel__init__.py", line 1422, in _panic_or_proceed
raise exc
File "D:\miniconda3\envs\chat2\Lib\concurrent\futures\thread.py", line 58, in run
result = self.fn(*self.args, self.kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langgraph\pregel\retry.py", line 66, in run_with_retry
task.proc.invoke(task.input, task.config)
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\runnables\base.py", line 2393, in invoke
input = step.invoke(
^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\runnables\base.py", line 3857, in invoke
return self._call_with_config(
^^^^^^^^^^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\runnables\base.py", line 1503, in _call_with_config
context.run(
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\runnables\config.py", line 346, in call_func_with_variable_args
return func(input, kwargs) # type: ignore[call-arg]
^^^^^^^^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\runnables\base.py", line 3731, in _invoke
output = call_func_with_variable_args(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\runnables\config.py", line 346, in call_func_with_variable_args
return func(input, kwargs) # type: ignore[call-arg]
^^^^^^^^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langgraph\prebuilt\chat_agent_executor.py", line 403, in call_model
response = model_runnable.invoke(messages, config)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\runnables\base.py", line 4427, in invoke
return self.bound.invoke(
^^^^^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\language_models\chat_models.py", line 170, in invoke
self.generate_prompt(
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\language_models\chat_models.py", line 599, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\language_models\chat_models.py", line 456, in generate
raise e
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\language_models\chat_models.py", line 446, in generate
self._generate_with_cache(
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_core\language_models\chat_models.py", line 671, in _generate_with_cache
result = self._generate(
^^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_community\chat_models\tongyi.py", line 440, in _generate
for chunk in self._stream(
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_community\chat_models\tongyi.py", line 512, in _stream
for stream_resp, is_last_chunk in generate_with_last_element_mark(
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_community\llms\tongyi.py", line 135, in generate_with_last_element_mark
item = next(iterator)
^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_community\chat_models\tongyi.py", line 361, in _stream_completion_with_retry
yield check_response(delta_resp)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\langchain_community\llms\tongyi.py", line 66, in check_response
raise HTTPError(
^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\requests\exceptions.py", line 22, in init
if response is not None and not self.request and hasattr(response, "request"):
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\miniconda3\envs\chat2\Lib\site-packages\dashscope\api_entities\dashscope_response.py", line 59, in getattr__
return self[attr]