openai / chatgpt-retrieval-plugin

The ChatGPT Retrieval Plugin lets you easily find personal or work documents by asking questions in natural language.
MIT License
21.08k stars 3.69k forks source link

supabase local poetry test errors #413

Open nyck33 opened 9 months ago

nyck33 commented 9 months ago
poetry-env) nyck33@nyck33-lenovo:~/projects/chatgpt-retrieval-plugin-nyck33-fork$ poetry run pytest -s ./tests/datastore/providers/supabase/test_supabase_datastore.py
========================================================= test session starts ==========================================================
platform linux -- Python 3.10.13, pytest-7.3.1, pluggy-1.0.0
rootdir: /home/nyck33/projects/chatgpt-retrieval-plugin-nyck33-fork
configfile: pyproject.toml
plugins: asyncio-0.20.3, anyio-3.7.0, cov-4.1.0
asyncio: mode=auto
collected 8 items                                                                                                                      

tests/datastore/providers/supabase/test_supabase_datastore.py FFFFFFFF

=============================================================== FAILURES ===============================================================
_____________________________________________________________ test_upsert ______________________________________________________________

supabase_datastore = <datastore.providers.supabase_datastore.SupabaseDataStore object at 0x7f5f29957bb0>
initial_document_chunks = {'first-doc': [DocumentChunk(id='first-doc-4', text='Lorem ipsum 4', metadata=DocumentChunkMetadata(source=None, sourc..., 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])]}

    @pytest.mark.asyncio
    async def test_upsert(
        supabase_datastore: SupabaseDataStore,
        initial_document_chunks: Dict[str, List[DocumentChunk]],
    ) -> None:
        """Test basic upsert."""
>       doc_ids = await supabase_datastore._upsert(initial_document_chunks)

tests/datastore/providers/supabase/test_supabase_datastore.py:63: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
datastore/providers/pgvector_datastore.py:96: in _upsert
    await self.client.upsert("documents", json)
datastore/providers/supabase_datastore.py:43: in upsert
    self.client.table(table).upsert(json).execute()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <postgrest._sync.request_builder.SyncQueryRequestBuilder object at 0x7f5f28143e50>

    def execute(self) -> APIResponse:
        """Execute the query.

        .. tip::
            This is the last method called, after the query is built.

        Returns:
            :class:`APIResponse`

        Raises:
            :class:`APIError` If the API raised an error.
        """
        r = self.session.request(
            self.http_method,
            self.path,
            json=self.json,
            params=self.params,
            headers=self.headers,
        )
        try:
            if (
                200 <= r.status_code <= 299
            ):  # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok)
                return APIResponse.from_http_request_response(r)
            else:
>               raise APIError(r.json())
E               postgrest.exceptions.APIError: {}

../../miniconda3/envs/poetry-env/lib/python3.10/site-packages/postgrest/_sync/request_builder.py:68: APIError
______________________________________________________________ test_query ______________________________________________________________

supabase_datastore = <datastore.providers.supabase_datastore.SupabaseDataStore object at 0x7f5f29957490>
initial_document_chunks = {'first-doc': [DocumentChunk(id='first-doc-4', text='Lorem ipsum 4', metadata=DocumentChunkMetadata(source=None, sourc..., 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])]}
queries = [QueryWithEmbedding(query='Query 1', filter=None, top_k=1, embedding=[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0...0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])]

    @pytest.mark.asyncio
    async def test_query(
        supabase_datastore: SupabaseDataStore,
        initial_document_chunks: Dict[str, List[DocumentChunk]],
        queries: List[QueryWithEmbedding],
    ) -> None:
        """Test basic query."""
        # insert to prepare for test
>       await supabase_datastore._upsert(initial_document_chunks)

tests/datastore/providers/supabase/test_supabase_datastore.py:75: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
datastore/providers/pgvector_datastore.py:96: in _upsert
    await self.client.upsert("documents", json)
datastore/providers/supabase_datastore.py:43: in upsert
    self.client.table(table).upsert(json).execute()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <postgrest._sync.request_builder.SyncQueryRequestBuilder object at 0x7f5f28866320>

    def execute(self) -> APIResponse:
        """Execute the query.

        .. tip::
            This is the last method called, after the query is built.

        Returns:
            :class:`APIResponse`

        Raises:
            :class:`APIError` If the API raised an error.
        """
        r = self.session.request(
            self.http_method,
            self.path,
            json=self.json,
            params=self.params,
            headers=self.headers,
        )
        try:
            if (
                200 <= r.status_code <= 299
            ):  # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok)
                return APIResponse.from_http_request_response(r)
            else:
>               raise APIError(r.json())
E               postgrest.exceptions.APIError: {}

../../miniconda3/envs/poetry-env/lib/python3.10/site-packages/postgrest/_sync/request_builder.py:68: APIError
_____________________________________________________________ test_delete ______________________________________________________________

supabase_datastore = <datastore.providers.supabase_datastore.SupabaseDataStore object at 0x7f5f35d7c850>

    @pytest.mark.asyncio
    async def test_delete(supabase_datastore):
        await supabase_datastore.delete(delete_all=True)
        chunk1 = DocumentChunk(
            id="chunk1",
            text="Sample text",
            embedding=[1] * 1536,
            metadata=DocumentChunkMetadata(),
        )
        chunk2 = DocumentChunk(
            id="chunk2",
            text="Another text",
            embedding=[1] * 1536,
            metadata=DocumentChunkMetadata(),
        )
>       await supabase_datastore._upsert({"doc1": [chunk1], "doc2": [chunk2]})

tests/datastore/providers/supabase/test_supabase_datastore.py:245: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
datastore/providers/pgvector_datastore.py:96: in _upsert
    await self.client.upsert("documents", json)
datastore/providers/supabase_datastore.py:43: in upsert
    self.client.table(table).upsert(json).execute()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <postgrest._sync.request_builder.SyncQueryRequestBuilder object at 0x7f5f288d09a0>

    def execute(self) -> APIResponse:
        """Execute the query.

        .. tip::
            This is the last method called, after the query is built.

        Returns:
            :class:`APIResponse`

        Raises:
            :class:`APIError` If the API raised an error.
        """
        r = self.session.request(
            self.http_method,
            self.path,
            json=self.json,
            params=self.params,
            headers=self.headers,
        )
        try:
            if (
                200 <= r.status_code <= 299
            ):  # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok)
                return APIResponse.from_http_request_response(r)
            else:
>               raise APIError(r.json())
E               postgrest.exceptions.APIError: {}

../../miniconda3/envs/poetry-env/lib/python3.10/site-packages/postgrest/_sync/request_builder.py:68: APIError
________________________________________________________ test_upsert_new_chunk _________________________________________________________

supabase_datastore = <datastore.providers.supabase_datastore.SupabaseDataStore object at 0x7f5f28887550>

    @pytest.mark.asyncio
    async def test_upsert_new_chunk(supabase_datastore):
        await supabase_datastore.delete(delete_all=True)
        chunk = DocumentChunk(
            id="chunk1",
            text="Sample text",
            embedding=[1] * 1536,
            metadata=DocumentChunkMetadata(),
        )
>       ids = await supabase_datastore._upsert({"doc1": [chunk]})

tests/datastore/providers/supabase/test_supabase_datastore.py:113: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
datastore/providers/pgvector_datastore.py:96: in _upsert
    await self.client.upsert("documents", json)
datastore/providers/supabase_datastore.py:43: in upsert
    self.client.table(table).upsert(json).execute()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <postgrest._sync.request_builder.SyncQueryRequestBuilder object at 0x7f5f28886e30>

    def execute(self) -> APIResponse:
        """Execute the query.

        .. tip::
            This is the last method called, after the query is built.

        Returns:
            :class:`APIResponse`

        Raises:
            :class:`APIError` If the API raised an error.
        """
        r = self.session.request(
            self.http_method,
            self.path,
            json=self.json,
            params=self.params,
            headers=self.headers,
        )
        try:
            if (
                200 <= r.status_code <= 299
            ):  # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok)
                return APIResponse.from_http_request_response(r)
            else:
>               raise APIError(r.json())
E               postgrest.exceptions.APIError: {}

../../miniconda3/envs/poetry-env/lib/python3.10/site-packages/postgrest/_sync/request_builder.py:68: APIError
______________________________________________________ test_upsert_existing_chunk ______________________________________________________

supabase_datastore = <datastore.providers.supabase_datastore.SupabaseDataStore object at 0x7f5f288d3820>

    @pytest.mark.asyncio
    async def test_upsert_existing_chunk(supabase_datastore):
        await supabase_datastore.delete(delete_all=True)
        chunk = DocumentChunk(
            id="chunk1",
            text="Sample text",
            embedding=[1] * 1536,
            metadata=DocumentChunkMetadata(),
        )
>       ids = await supabase_datastore._upsert({"doc1": [chunk]})

tests/datastore/providers/supabase/test_supabase_datastore.py:126: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
datastore/providers/pgvector_datastore.py:96: in _upsert
    await self.client.upsert("documents", json)
datastore/providers/supabase_datastore.py:43: in upsert
    self.client.table(table).upsert(json).execute()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <postgrest._sync.request_builder.SyncQueryRequestBuilder object at 0x7f5f288d01f0>

    def execute(self) -> APIResponse:
        """Execute the query.

        .. tip::
            This is the last method called, after the query is built.

        Returns:
            :class:`APIResponse`

        Raises:
            :class:`APIError` If the API raised an error.
        """
        r = self.session.request(
            self.http_method,
            self.path,
            json=self.json,
            params=self.params,
            headers=self.headers,
        )
        try:
            if (
                200 <= r.status_code <= 299
            ):  # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok)
                return APIResponse.from_http_request_response(r)
            else:
>               raise APIError(r.json())
E               postgrest.exceptions.APIError: {}

../../miniconda3/envs/poetry-env/lib/python3.10/site-packages/postgrest/_sync/request_builder.py:68: APIError
___________________________________________________________ test_query_score ___________________________________________________________

supabase_datastore = <datastore.providers.supabase_datastore.SupabaseDataStore object at 0x7f5f28141bd0>

    @pytest.mark.asyncio
    async def test_query_score(supabase_datastore):
        await supabase_datastore.delete(delete_all=True)
        chunk1 = DocumentChunk(
            id="chunk1",
            text="Sample text",
            embedding=[1] * 1536,
            metadata=DocumentChunkMetadata(),
        )
        chunk2 = DocumentChunk(
            id="chunk2",
            text="Another text",
            embedding=[-1 if i % 2 == 0 else 1 for i in range(1536)],
            metadata=DocumentChunkMetadata(),
        )
>       await supabase_datastore._upsert({"doc1": [chunk1], "doc2": [chunk2]})

tests/datastore/providers/supabase/test_supabase_datastore.py:165: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
datastore/providers/pgvector_datastore.py:96: in _upsert
    await self.client.upsert("documents", json)
datastore/providers/supabase_datastore.py:43: in upsert
    self.client.table(table).upsert(json).execute()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <postgrest._sync.request_builder.SyncQueryRequestBuilder object at 0x7f5f281404f0>

    def execute(self) -> APIResponse:
        """Execute the query.

        .. tip::
            This is the last method called, after the query is built.

        Returns:
            :class:`APIResponse`

        Raises:
            :class:`APIError` If the API raised an error.
        """
        r = self.session.request(
            self.http_method,
            self.path,
            json=self.json,
            params=self.params,
            headers=self.headers,
        )
        try:
            if (
                200 <= r.status_code <= 299
            ):  # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok)
                return APIResponse.from_http_request_response(r)
            else:
>               raise APIError(r.json())
E               postgrest.exceptions.APIError: {}

../../miniconda3/envs/poetry-env/lib/python3.10/site-packages/postgrest/_sync/request_builder.py:68: APIError
__________________________________________________________ test_query_filter ___________________________________________________________

supabase_datastore = <datastore.providers.supabase_datastore.SupabaseDataStore object at 0x7f5f2885f550>

    @pytest.mark.asyncio
    async def test_query_filter(supabase_datastore):
        await supabase_datastore.delete(delete_all=True)
        chunk1 = DocumentChunk(
            id="chunk1",
            text="Sample text",
            embedding=[1] * 1536,
            metadata=DocumentChunkMetadata(
                source="email", created_at="2021-01-01", author="John"
            ),
        )
        chunk2 = DocumentChunk(
            id="chunk2",
            text="Another text",
            embedding=[1] * 1536,
            metadata=DocumentChunkMetadata(
                source="chat", created_at="2022-02-02", author="Mike"
            ),
        )
>       await supabase_datastore._upsert({"doc1": [chunk1], "doc2": [chunk2]})

tests/datastore/providers/supabase/test_supabase_datastore.py:197: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
datastore/providers/pgvector_datastore.py:96: in _upsert
    await self.client.upsert("documents", json)
datastore/providers/supabase_datastore.py:43: in upsert
    self.client.table(table).upsert(json).execute()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <postgrest._sync.request_builder.SyncQueryRequestBuilder object at 0x7f5f2885cc10>

    def execute(self) -> APIResponse:
        """Execute the query.

        .. tip::
            This is the last method called, after the query is built.

        Returns:
            :class:`APIResponse`

        Raises:
            :class:`APIError` If the API raised an error.
        """
        r = self.session.request(
            self.http_method,
            self.path,
            json=self.json,
            params=self.params,
            headers=self.headers,
        )
        try:
            if (
                200 <= r.status_code <= 299
            ):  # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok)
                return APIResponse.from_http_request_response(r)
            else:
>               raise APIError(r.json())
E               postgrest.exceptions.APIError: {}

../../miniconda3/envs/poetry-env/lib/python3.10/site-packages/postgrest/_sync/request_builder.py:68: APIError
___________________________________________________________ test_delete_all ____________________________________________________________

supabase_datastore = <datastore.providers.supabase_datastore.SupabaseDataStore object at 0x7f5f288d1ff0>

    @pytest.mark.asyncio
    async def test_delete_all(supabase_datastore):
        await supabase_datastore.delete(delete_all=True)
        chunk = DocumentChunk(
            id="chunk",
            text="Another text",
            embedding=[1] * 1536,
            metadata=DocumentChunkMetadata(),
        )
>       await supabase_datastore._upsert({"doc": [chunk]})

tests/datastore/providers/supabase/test_supabase_datastore.py:274: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
datastore/providers/pgvector_datastore.py:96: in _upsert
    await self.client.upsert("documents", json)
datastore/providers/supabase_datastore.py:43: in upsert
    self.client.table(table).upsert(json).execute()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <postgrest._sync.request_builder.SyncQueryRequestBuilder object at 0x7f5f29a11870>

    def execute(self) -> APIResponse:
        """Execute the query.

        .. tip::
            This is the last method called, after the query is built.

        Returns:
            :class:`APIResponse`

        Raises:
            :class:`APIError` If the API raised an error.
        """
        r = self.session.request(
            self.http_method,
            self.path,
            json=self.json,
            params=self.params,
            headers=self.headers,
        )
        try:
            if (
                200 <= r.status_code <= 299
            ):  # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok)
                return APIResponse.from_http_request_response(r)
            else:
>               raise APIError(r.json())
E               postgrest.exceptions.APIError: {}

../../miniconda3/envs/poetry-env/lib/python3.10/site-packages/postgrest/_sync/request_builder.py:68: APIError
======================================================= short test summary info ========================================================
FAILED tests/datastore/providers/supabase/test_supabase_datastore.py::test_upsert - postgrest.exceptions.APIError: {}
FAILED tests/datastore/providers/supabase/test_supabase_datastore.py::test_query - postgrest.exceptions.APIError: {}
FAILED tests/datastore/providers/supabase/test_supabase_datastore.py::test_delete - postgrest.exceptions.APIError: {}
FAILED tests/datastore/providers/supabase/test_supabase_datastore.py::test_upsert_new_chunk - postgrest.exceptions.APIError: {}
FAILED tests/datastore/providers/supabase/test_supabase_datastore.py::test_upsert_existing_chunk - postgrest.exceptions.APIError: {}
FAILED tests/datastore/providers/supabase/test_supabase_datastore.py::test_query_score - postgrest.exceptions.APIError: {}
FAILED tests/datastore/providers/supabase/test_supabase_datastore.py::test_query_filter - postgrest.exceptions.APIError: {}
FAILED tests/datastore/providers/supabase/test_supabase_datastore.py::test_delete_all - postgrest.exceptions.APIError: {}
========================================================== 8 failed in 2.43s =====================================================

and

poetry-env) nyck33@nyck33-lenovo:~/projects/chatgpt-retrieval-plugin-nyck33-fork$ docker ps
CONTAINER ID   IMAGE                                             COMMAND                  CREATED          STATUS                    PORTS                                              NAMES
c982cadf94a8   public.ecr.aws/supabase/studio:20240205-b145c86   "docker-entrypoint.s…"   20 minutes ago   Up 20 minutes (healthy)   0.0.0.0:54323->3000/tcp                            supabase_studio_chatgpt-retrieval-plugin-nyck33-fork
8389d578480d   public.ecr.aws/supabase/postgres-meta:v0.75.0     "docker-entrypoint.s…"   20 minutes ago   Up 20 minutes (healthy)   8080/tcp                                           supabase_pg_meta_chatgpt-retrieval-plugin-nyck33-fork
85127f80cb0c   public.ecr.aws/supabase/edge-runtime:v1.34.1      "sh -c 'mkdir -p /ho…"   20 minutes ago   Up 20 minutes             8081/tcp                                           supabase_edge_runtime_chatgpt-retrieval-plugin-nyck33-fork
2cbc0c5bbb07   public.ecr.aws/supabase/imgproxy:v3.8.0           "imgproxy"               20 minutes ago   Up 20 minutes (healthy)   8080/tcp                                           storage_imgproxy_chatgpt-retrieval-plugin-nyck33-fork
a92dab833db7   public.ecr.aws/supabase/storage-api:v0.46.4       "docker-entrypoint.s…"   20 minutes ago   Up 20 minutes (healthy)   5000/tcp                                           supabase_storage_chatgpt-retrieval-plugin-nyck33-fork
a9b334883cd6   public.ecr.aws/supabase/postgrest:v12.0.1         "/bin/postgrest"         20 minutes ago   Up 20 minutes             3000/tcp                                           supabase_rest_chatgpt-retrieval-plugin-nyck33-fork
4783682ff2ad   public.ecr.aws/supabase/realtime:v2.25.50         "/usr/bin/tini -s -g…"   20 minutes ago   Up 20 minutes (healthy)   4000/tcp                                           realtime-dev.supabase_realtime_chatgpt-retrieval-plugin-nyck33-fork
6aa79ba3fc80   public.ecr.aws/supabase/inbucket:3.0.3            "/start-inbucket.sh …"   20 minutes ago   Up 20 minutes (healthy)   1100/tcp, 2500/tcp, 0.0.0.0:54324->9000/tcp        supabase_inbucket_chatgpt-retrieval-plugin-nyck33-fork
c450470fabf8   public.ecr.aws/supabase/gotrue:v2.132.3           "auth"                   20 minutes ago   Up 20 minutes (healthy)   9999/tcp                                           supabase_auth_chatgpt-retrieval-plugin-nyck33-fork
df2f1fa5c9cd   public.ecr.aws/supabase/kong:2.8.1                "sh -c 'cat <<'EOF' …"   20 minutes ago   Up 20 minutes (healthy)   8001/tcp, 8443-8444/tcp, 0.0.0.0:54321->8000/tcp   supabase_kong_chatgpt-retrieval-plugin-nyck33-fork
22e1b37ecccf   public.ecr.aws/supabase/postgres:15.1.0.147       "sh -c 'cat <<'EOF' …"   20 minutes ago   Up 20 minutes (healthy)   0.0.0.0:54322->5432/tcp                            supabase_db_chatgpt-retrieval-plugin-nyck33-fork
(poetry-env) nyck33@nyck33-lenovo:~/projects/chatgpt-retrieval-plugin-nyck33-fork$ 

and

(poetry-env) nyck33@nyck33-lenovo:~/projects/chatgpt-retrieval-plugin-nyck33-fork$ echo $SUPABASE_URL
http://localhost:54321
(poetry-env) nyck33@nyck33-lenovo:~/projects/chatgpt-retrieval-plugin-nyck33-fork$ echo $SUPABASE_SERVICE_ROLE_KEY
eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU

I'm lost at this point so going to try to get this going with a pg vector database I already have on supabase but those errors like APIError {} don't tell me anything at all. How can I get it to output something more informative?

nyck33 commented 9 months ago
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 42abe2d53940 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 24dc9b3ec868 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 71d17227d994 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 3008d6de3a31 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 24dc9b3ec868 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 71d17227d994 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 71d17227d994 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 3008d6de3a31 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it ecf699f83504 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 3fa1ffc7d382 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 575a8978a900 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 1047e8ce042c psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it bd35e78f43c5 psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 6f3409c8bdce psql -U postgres
OCI runtime exec failed: exec failed: unable to start container process: exec: "psql": executable file not found in $PATH: unknown
nyck33@nyck33-lenovo:~/Downloads$ docker exec -it 433e59fdad6d psql -U postgres
psql (15.5 (Ubuntu 15.5-1.pgdg20.04+1), server 15.1 (Ubuntu 15.1-1.pgdg20.04+1))
Type "help" for help.

postgres=> \dt
Did not find any relations.
postgres=> 

what is wrong? docker containers are running fine but there is ntohing there. Shouldn't there be some dummy database, tables and data in there?

nyck33 commented 9 months ago
sql (15.5 (Ubuntu 15.5-1.pgdg20.04+1), server 15.1 (Ubuntu 15.1-1.pgdg20.04+1))
You are now connected to database "postgres" as user "postgres".
postgres=> \dt storage.*
                   List of relations
 Schema  |    Name    | Type  |         Owner          
---------+------------+-------+------------------------
 storage | buckets    | table | supabase_storage_admin
 storage | migrations | table | supabase_storage_admin
 storage | objects    | table | supabase_storage_admin
(3 rows)

postgres=> SELECT * FROM storage.buckets;
 id | name | owner | created_at | updated_at | public | avif_autodetection | file_size_limit |
 allowed_mime_types | owner_id 
----+------+-------+------------+------------+--------+--------------------+-----------------+
--------------------+----------
(0 rows)

postgres=> SELECT * FROM storage.migrations;
 id |                     name                     |                   hash                   
|        executed_at         
----+----------------------------------------------+------------------------------------------
+----------------------------
  0 | create-migrations-table                      | e18db593bcde2aca2a408c4d1100f6abba2195df 
| 2024-02-09 03:23:03.410595
  1 | initialmigration                             | 6ab16121fbaa08bbd11b712d05f358f9b555d777 
| 2024-02-09 03:23:03.418161
  2 | storage-schema                               | 5c7968fd083fcea04050c1b7f6253c9771b99011 
| 2024-02-09 03:23:03.42877
  3 | pathtoken-column                             | 2cb1b0004b817b29d5b0a971af16bafeede4b70d 
| 2024-02-09 03:23:03.451691
  4 | add-migrations-rls                           | 427c5b63fe1c5937495d9c635c263ee7a5905058 
| 2024-02-09 03:23:03.587734
  5 | add-size-functions                           | 79e081a1455b63666c1294a440f8ad4b1e6a7f84 
| 2024-02-09 03:23:03.611573
  6 | change-column-name-in-get-size               | f93f62afdf6613ee5e7e815b30d02dc990201044 
| 2024-02-09 03:23:03.647155
  7 | add-rls-to-buckets                           | e7e7f86adbc51049f341dfe8d30256c1abca17aa 
| 2024-02-09 03:23:03.65256
  8 | add-public-to-buckets                        | fd670db39ed65f9d08b01db09d6202503ca2bab3 
| 2024-02-09 03:23:03.656095
  9 | fix-search-function                          | 3a0af29f42e35a4d101c259ed955b67e1bee6825 
| 2024-02-09 03:23:03.659932
 10 | search-files-search-function                 | 68dc14822daad0ffac3746a502234f486182ef6e 
| 2024-02-09 03:23:03.663831
 11 | add-trigger-to-auto-update-updated_at-column | 7425bdb14366d1739fa8a18c83100636d74dcaa2 
| 2024-02-09 03:23:03.66853
 12 | add-automatic-avif-detection-flag            | 8e92e1266eb29518b6a4c5313ab8f29dd0d08df9 
| 2024-02-09 03:23:03.673022
 13 | add-bucket-custom-limits                     | cce962054138135cd9a8c4bcd531598684b25e7d 
| 2024-02-09 03:23:03.676271
 14 | use-bytes-for-max-size                       | 941c41b346f9802b411f06f30e972ad4744dad27 
| 2024-02-09 03:23:03.680022
 15 | add-can-insert-object-function               | 934146bc38ead475f4ef4b555c524ee5d66799e5 
| 2024-02-09 03:23:03.732832
 16 | add-version                                  | 76debf38d3fd07dcfc747ca49096457d95b1221b 
| 2024-02-09 03:23:03.738643
 17 | drop-owner-foreign-key                       | f1cbb288f1b7a4c1eb8c38504b80ae2a0153d101 
| 2024-02-09 03:23:03.742333
 18 | add_owner_id_column_deprecate_owner          | e7a511b379110b08e2f214be852c35414749fe66 
| 2024-02-09 03:23:03.746892
 19 | alter-default-value-objects-id               | 02e5e22a78626187e00d173dc45f58fa66a4f043 
| 2024-02-09 03:23:03.753184
(20 rows)

postgres=> SELECT * FROM storage.objects;
 id | bucket_id | name | owner | created_at | updated_at | last_accessed_at | metadata | path_
tokens | version | owner_id 
----+-----------+------+-------+------------+------------+------------------+----------+------
-------+---------+----------
(0 rows)

postgres=> 

Can you update documentation with the schema of the databases for the local development docker containers?

nyck33 commented 9 months ago
33-fork/examples/providers$ cd ../..
(poetry-env) nyck33@nyck33-lenovo:~/projects/chatgpt-retrieval-plugin-nyck33-fork$ poetry run dev
Warning: 'dev' is an entry point defined in pyproject.toml, but it's not installed as a script. You may get improper `sys.argv[0]`.

The support to run uninstalled scripts will be removed in a future release.

Run `poetry install` to resolve and get rid of this message.

INFO:     Will watch for changes in these directories: ['/home/nyck33/projects/chatgpt-retrieval-plugin-nyck33-fork']
INFO:     Uvicorn running on http://localhost:3333 (Press CTRL+C to quit)
INFO:     Started reloader process [240929] using WatchFiles
INFO:     Started server process [240959]
INFO:     Waiting for application startup.
INFO:     Application startup complete.
INFO:     127.0.0.1:46646 - "GET / HTTP/1.1" 404 Not Found
INFO:     127.0.0.1:46652 - "GET /docs HTTP/1.1" 200 OK
INFO:     127.0.0.1:46652 - "GET /openapi.json HTTP/1.1" 200 OK
2024-02-09 21:02:18.207 | INFO     | services.date:to_unix_timestamp:23 - Invalid date format: string
2024-02-09 21:02:18.208 | INFO     | services.date:to_unix_timestamp:23 - Invalid date format: string
2024-02-09 21:02:18.225 | ERROR    | datastore.providers.pgvector_datastore:_query:151 - {'code': 'PGRST202', 'details': 'Searched for the function public.match_page_sections with parameters in_author, in_document_id, in_embedding, in_end_date, in_match_count, in_source, in_source_id, in_start_date or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache.', 'hint': None, 'message': 'Could not find the function public.match_page_sections(in_author, in_document_id, in_embedding, in_end_date, in_match_count, in_source, in_source_id, in_start_date) in the schema cache'}
INFO:     127.0.0.1:56084 - "POST /query HTTP/1.1" 200 OK
2024-02-09 21:07:23.152 | ERROR    | datastore.providers.pgvector_datastore:_query:151 - {'code': 'PGRST202', 'details': 'Searched for the function public.match_page_sections with parameters in_author, in_document_id, in_embedding, in_end_date, in_match_count, in_source, in_source_id, in_start_date or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache.', 'hint': None, 'message': 'Could not find the function public.match_page_sections(in_author, in_document_id, in_embedding, in_end_date, in_match_count, in_source, in_source_id, in_start_date) in the schema cache'}
INFO:     127.0.0.1:41736 - "POST /query HTTP/1.1" 200 OK
2024-02-09 21:30:16.855 | ERROR    | datastore.providers.pgvector_datastore:_query:151 - {'code': 'PGRST202', 'details': 'Searched for the function public.match_page_sections with parameters in_author, in_document_id, in_embedding, in_end_date, in_match_count, in_source, in_source_id, in_start_date or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache.', 'hint': None, 'message': 'Could not find the function public.match_page_sections(in_author, in_document_id, in_embedding, in_end_date, in_match_count, in_source, in_source_id, in_start_date) in the schema cache'}
INFO:     127.0.0.1:42288 - "POST /query HTTP/1.1" 200 OK

Did you forget to include a function in the Docker images?