File "/workspace/.venv/lib/python3.11/site-packages/httpx/_client.py", line 1661, in send
response = await self._send_handling_auth(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/httpx/_client.py", line 1689, in _send_handling_auth
response = await self._send_handling_redirects(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/httpx/_client.py", line 1726, in _send_handling_redirects
response = await self._send_single_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/httpx/_client.py", line 1763, in _send_single_request
response = await transport.handle_async_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/httpx/_transports/default.py", line 372, in handle_async_request
with map_httpcore_exceptions():
File "/usr/local/lib/python3.11/contextlib.py", line 158, in __exit__
self.gen.throw(typ, value, traceback)
File "/workspace/.venv/lib/python3.11/site-packages/httpx/_transports/default.py", line 86, in map_httpcore_exceptions
raise mapped_exc(message) from exc
httpx.ReadTimeout
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/workspace/src/backend/chat.py", line 63, in stream_qa_objects
search_response = await perform_search(query)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/src/backend/search/search_service.py", line 101, in perform_search
raise HTTPException(
fastapi.exceptions.HTTPException: 500: There was an error while searching.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/workspace/src/backend/main.py", line 97, in generator
async for obj in stream_qa_objects(chat_request):
File "/workspace/src/backend/chat.py", line 119, in stream_qa_objects
raise HTTPException(status_code=500, detail=detail)
fastapi.exceptions.HTTPException: 500: 500: There was an error while searching.
INFO: 192.168.65.1:36091 - "POST /chat HTTP/1.1" 200 OK
Traceback (most recent call last):
File "/workspace/src/backend/chat.py", line 97, in stream_qa_objects
related_queries = await (
^^^^^^^
File "/workspace/src/backend/related_queries.py", line 11, in generate_related_queries
related = llm.structured_complete(
^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/src/backend/llm/base.py", line 50, in structured_complete
return self.client.chat.completions.create(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/instructor/client.py", line 91, in create
return self.create_fn(
^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/instructor/patch.py", line 143, in new_create_sync
response = retry_sync(
^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/instructor/retry.py", line 152, in retry_sync
for attempt in max_retries:
File "/workspace/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 435, in __iter__
do = self.iter(retry_state=retry_state)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 368, in iter
result = action(retry_state)
^^^^^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 410, in exc_check
raise retry_exc.reraise()
^^^^^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 183, in reraise
raise self.last_attempt.result()
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/concurrent/futures/_base.py", line 449, in result
return self.__get_result()
^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/concurrent/futures/_base.py", line 401, in __get_result
raise self._exception
File "/workspace/.venv/lib/python3.11/site-packages/instructor/retry.py", line 158, in retry_sync
return process_response(
^^^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/instructor/process_response.py", line 142, in process_response
model = response_model.from_response(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/instructor/function_calls.py", line 115, in from_response
return cls.parse_tools(completion, validation_context, strict)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/.venv/lib/python3.11/site-packages/instructor/function_calls.py", line 205, in parse_tools
tool_call.function.name == cls.openai_schema["name"] # type: ignore[index]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
AssertionError: Tool name does not match
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/workspace/src/backend/main.py", line 97, in generator
async for obj in stream_qa_objects(chat_request):
File "/workspace/src/backend/chat.py", line 119, in stream_qa_objects
raise HTTPException(status_code=500, detail=detail)
fastapi.exceptions.HTTPException: 500: Tool name does not match
Received this error with Llama3.