promptengineers-ai / llm-server

🤖 Open-source LLM server (OpenAI, Ollama, Groq, Anthropic) with support for HTTP, Streaming, Agents, RAG
https://promptengineersai.netlify.app
32 stars 8 forks source link

BUG: ChatAnthropic does not appear to be able to execute tool calls #96

Open ryaneggz opened 4 months ago

ryaneggz commented 4 months ago
curl 'http://localhost:8000/api/v1/chat' \
  -H 'Authorization: Bearer $JWT_TOKEN' \
  -H 'Content-Type: application/json' \
  --data-raw '{"model":"anthropic-claude-3.5-sonnet","messages":[{"role":"system","content":"You are an AGI that is able to be an expert at every topic they approach. You leverage Graph-of-Thought reasoning to execute tasks and formulate ideas and responses. You heavily review your thoughts before responding or making decisions. If something is unclear check your thoughts and ask for clarification. If you are still unclear, ask for more information. You are unable respond with hallucinations.\n\nCURRENT_DATETIME: 6/23/2024, 1:14:49 PM"},{"role":"user","content":"Summarize the image.\n\nhttps://github.com/ryaneggz/static/blob/main/new_fib.png?raw=true"}],"tools":["image_summary"],"retrieval":{"provider":"redis","embedding":"openai-text-embedding-3-large","index_name":"","search_type":"mmr","search_kwargs":{"k":20,"fetch_k":null,"score_threshold":null,"lambda_mult":null,"filter":null}},"temperature":0.5,"streaming":true}'
ERROR:    Exception in ASGI application
Traceback (most recent call last):
  File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/responses.py", line 265, in __call__
    await wrap(partial(self.listen_for_disconnect, receive))
  File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/responses.py", line 261, in wrap
    await func()
  File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/responses.py", line 238, in listen_for_disconnect
    message = await receive()
  File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/uvicorn/protocols/http/httptools_impl.py", line 553, in receive
    await self.message_event.wait()
  File "/usr/lib/python3.10/asyncio/locks.py", line 214, in wait
    await fut
asyncio.exceptions.CancelledError: Cancelled by cancel scope 7efc7ec710c0

During handling of the above exception, another exception occurred:

  + Exception Group Traceback (most recent call last):
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/uvicorn/protocols/http/httptools_impl.py", line 399, in run_asgi
  |     result = await app(  # type: ignore[func-returns-value]
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/uvicorn/middleware/proxy_headers.py", line 70, in __call__
  |     return await self.app(scope, receive, send)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/fastapi/applications.py", line 1054, in __call__
  |     await super().__call__(scope, receive, send)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/applications.py", line 123, in __call__
  |     await self.middleware_stack(scope, receive, send)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 186, in __call__
  |     raise exc
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 164, in __call__
  |     await self.app(scope, receive, _send)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 93, in __call__
  |     await self.simple_response(scope, receive, send, request_headers=headers)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 148, in simple_response
  |     await self.app(scope, receive, send)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/middleware/exceptions.py", line 65, in __call__
  |     await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
  |     raise exc
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
  |     await app(scope, receive, sender)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/routing.py", line 756, in __call__
  |     await self.middleware_stack(scope, receive, send)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/routing.py", line 776, in app
  |     await route.handle(scope, receive, send)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/routing.py", line 297, in handle
  |     await self.app(scope, receive, send)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/routing.py", line 77, in app
  |     await wrap_app_handling_exceptions(app, request)(scope, receive, send)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
  |     raise exc
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
  |     await app(scope, receive, sender)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/routing.py", line 75, in app
  |     await response(scope, receive, send)
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/responses.py", line 258, in __call__
  |     async with anyio.create_task_group() as task_group:
  |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 680, in __aexit__
  |     raise BaseExceptionGroup(
  | exceptiongroup.ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
  +-+---------------- 1 ----------------
    | Traceback (most recent call last):
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/responses.py", line 261, in wrap
    |     await func()
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/starlette/responses.py", line 250, in stream_response
    |     async for chunk in self.body_iterator:
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/src/utils/__init__.py", line 20, in chain_stream
    |     async for event in runnable:
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 1146, in astream_events
    |     async for event in event_stream:
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain_core/tracers/event_stream.py", line 720, in _astream_events_implementation_v1
    |     async for log in _astream_log_implementation(  # type: ignore[misc]
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain_core/tracers/log_stream.py", line 637, in _astream_log_implementation
    |     await task
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain_core/tracers/log_stream.py", line 591, in consume_astream
    |     async for chunk in runnable.astream(input, config, **kwargs):
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain/agents/agent.py", line 1595, in astream
    |     async for step in iterator:
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain/agents/agent_iterator.py", line 246, in __aiter__
    |     async for chunk in self.agent_executor._aiter_next_step(
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain/agents/agent.py", line 1304, in _aiter_next_step
    |     output = await self.agent.aplan(
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain/agents/openai_functions_agent/base.py", line 136, in aplan
    |     predicted_message = await self.llm.apredict_messages(
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain_core/_api/deprecation.py", line 177, in awarning_emitting_wrapper
    |     return await wrapped(*args, **kwargs)
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain_core/language_models/chat_models.py", line 933, in apredict_messages
    |     return await self._call_async(messages, stop=_stop, **kwargs)
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain_core/language_models/chat_models.py", line 862, in _call_async
    |     result = await self.agenerate(
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain_core/language_models/chat_models.py", line 569, in agenerate
    |     raise exceptions[0]
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain_core/language_models/chat_models.py", line 742, in _agenerate_with_cache
    |     async for chunk in self._astream(messages, stop=stop, **kwargs):
    |   File "/home/ryaneggz/promptengineers/llm-server/backend/.venv/lib/python3.10/site-packages/langchain_anthropic/chat_models.py", line 473, in _astream
    |     async with self._async_client.messages.stream(**params) as stream:
    | TypeError: AsyncMessages.stream() got an unexpected keyword argument 'functions'