run-llama / sec-insights

A real world full-stack application using LlamaIndex
https://www.secinsights.ai/
MIT License
2.32k stars 631 forks source link

Localstack not retrieving information from S3 #111

Open drkpkg opened 2 months ago

drkpkg commented 2 months ago

I'm having this problem when I try to retrieve a response from the backend.

I got this error on google cloud:

llama-app-fastapi-1 | Error in message publisher llama-app-fastapi-1 | Traceback (most recent call last): llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiohttp/connector.py", line 988, in _wrap_create_connection llama-app-fastapi-1 | return await self._loop.create_connection(*args, **kwargs) llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/usr/local/lib/python3.11/asyncio/base_events.py", line 1085, in create_connection llama-app-fastapi-1 | raise exceptions[0] llama-app-fastapi-1 | File "/usr/local/lib/python3.11/asyncio/base_events.py", line 1069, in create_connection llama-app-fastapi-1 | sock = await self._connect_sock( llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/usr/local/lib/python3.11/asyncio/base_events.py", line 973, in _connect_sock llama-app-fastapi-1 | await self.sock_connect(sock, address) llama-app-fastapi-1 | File "/usr/local/lib/python3.11/asyncio/selector_events.py", line 634, in sock_connect llama-app-fastapi-1 | return await fut llama-app-fastapi-1 | ^^^^^^^^^ llama-app-fastapi-1 | File "/usr/local/lib/python3.11/asyncio/futures.py", line 287, in __await__ llama-app-fastapi-1 | yield self # This tells Task to wait for completion. llama-app-fastapi-1 | ^^^^^^^^^^ llama-app-fastapi-1 | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 339, in __wakeup llama-app-fastapi-1 | future.result() llama-app-fastapi-1 | File "/usr/local/lib/python3.11/asyncio/futures.py", line 203, in result llama-app-fastapi-1 | raise self._exception.with_traceback(self._exception_tb) llama-app-fastapi-1 | File "/usr/local/lib/python3.11/asyncio/selector_events.py", line 674, in _sock_connect_cb llama-app-fastapi-1 | raise OSError(err, f'Connect call failed {address}') llama-app-fastapi-1 | ConnectionRefusedError: [Errno 111] Connect call failed ('127.0.0.1', 4566) llama-app-fastapi-1 | llama-app-fastapi-1 | During handling of the above exception, another exception occurred: llama-app-fastapi-1 | llama-app-fastapi-1 | Traceback (most recent call last): llama-app-fastapi-1 | File "/app/app/api/endpoints/conversation.py", line 149, in event_publisher llama-app-fastapi-1 | await task llama-app-fastapi-1 | File "/usr/local/lib/python3.11/asyncio/futures.py", line 290, in __await__ llama-app-fastapi-1 | return self.result() # May raise too. llama-app-fastapi-1 | ^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/usr/local/lib/python3.11/asyncio/futures.py", line 203, in result llama-app-fastapi-1 | raise self._exception.with_traceback(self._exception_tb) llama-app-fastapi-1 | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 267, in __step llama-app-fastapi-1 | result = coro.send(None) llama-app-fastapi-1 | ^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/app/app/chat/messaging.py", line 134, in handle_chat_message llama-app-fastapi-1 | chat_engine = await get_chat_engine( llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/app/app/chat/engine.py", line 249, in get_chat_engine llama-app-fastapi-1 | s3_fs = get_s3_fs() llama-app-fastapi-1 | ^^^^^^^^^^^ llama-app-fastapi-1 | File "/app/app/chat/engine.py", line 76, in get_s3_fs llama-app-fastapi-1 | s3.mkdir(settings.S3_BUCKET_NAME) llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/fsspec/asyn.py", line 121, in wrapper llama-app-fastapi-1 | return sync(self.loop, func, *args, **kwargs) llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/fsspec/asyn.py", line 106, in sync llama-app-fastapi-1 | raise return_result llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/fsspec/asyn.py", line 61, in _runner llama-app-fastapi-1 | result[0] = await coro llama-app-fastapi-1 | ^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/s3fs/core.py", line 877, in _mkdir llama-app-fastapi-1 | await self._call_s3("create_bucket", **params) llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/s3fs/core.py", line 348, in _call_s3 llama-app-fastapi-1 | return await _error_wrapper( llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/s3fs/core.py", line 140, in _error_wrapper llama-app-fastapi-1 | raise err llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/s3fs/core.py", line 113, in _error_wrapper llama-app-fastapi-1 | return await func(*args, **kwargs) llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/client.py", line 366, in _make_api_call llama-app-fastapi-1 | http, parsed_response = await self._make_request( llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/client.py", line 391, in _make_request llama-app-fastapi-1 | return await self._endpoint.make_request( llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/endpoint.py", line 100, in _send_request llama-app-fastapi-1 | while await self._needs_retry( llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/endpoint.py", line 262, in _needs_retry llama-app-fastapi-1 | responses = await self._event_emitter.emit( llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/hooks.py", line 66, in _emit llama-app-fastapi-1 | response = await resolve_awaitable(handler(**kwargs)) llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/_helpers.py", line 15, in resolve_awaitable llama-app-fastapi-1 | return await obj llama-app-fastapi-1 | ^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/retryhandler.py", line 107, in _call llama-app-fastapi-1 | if await resolve_awaitable(self._checker(**checker_kwargs)): llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/_helpers.py", line 15, in resolve_awaitable llama-app-fastapi-1 | return await obj llama-app-fastapi-1 | ^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/retryhandler.py", line 126, in _call llama-app-fastapi-1 | should_retry = await self._should_retry( llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/retryhandler.py", line 165, in _should_retry llama-app-fastapi-1 | return await resolve_awaitable( llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/_helpers.py", line 15, in resolve_awaitable llama-app-fastapi-1 | return await obj llama-app-fastapi-1 | ^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/retryhandler.py", line 174, in _call llama-app-fastapi-1 | checker(attempt_number, response, caught_exception) llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/botocore/retryhandler.py", line 247, in __call__ llama-app-fastapi-1 | return self._check_caught_exception( llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/botocore/retryhandler.py", line 416, in _check_caught_exception llama-app-fastapi-1 | raise caught_exception llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/endpoint.py", line 181, in _do_get_response llama-app-fastapi-1 | http_response = await self._send(request) llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/endpoint.py", line 285, in _send llama-app-fastapi-1 | return await self.http_session.send(request) llama-app-fastapi-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ llama-app-fastapi-1 | File "/root/.cache/pypoetry/virtualenvs/llama-app-backend-9TtSrW0h-py3.11/lib/python3.11/site-packages/aiobotocore/httpsession.py", line 253, in send llama-app-fastapi-1 | raise EndpointConnectionError(endpoint_url=request.url, error=e) llama-app-fastapi-1 | botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL: "http://localhost:4566/my-bucket" 

But the bucket my-bucket exist. There is any config I pass out?