run-llama / llama_index

LlamaIndex is a data framework for your LLM applications
https://docs.llamaindex.ai
MIT License
36.48k stars 5.21k forks source link

[Feature Request]: Support for OpenSearch as a chat store #15926

Open brycecf opened 1 month ago

brycecf commented 1 month ago

Feature Description

OpenSearch support as a chat store

Reason

No response

Value of Feature

Expands architecture options. Like Postgres, OpenSearch is an open-source vector store which developers can self-host.

dosubot[bot] commented 1 month ago

To support OpenSearch as a chat store, you can extend the existing chat store interfaces provided by LlamaIndex. Here's a basic example of how you might start implementing an OpenSearchChatStore:

from llama_index.core.storage.chat_store.base import BaseChatStore
from opensearchpy import OpenSearch
from llama_index.core.llms import ChatMessage

class OpenSearchChatStore(BaseChatStore):
    def __init__(self, host, port, index_name):
        self.client = OpenSearch(
            hosts=[{'host': host, 'port': port}]
        )
        self.index_name = index_name

    def set_messages(self, key: str, messages: List[ChatMessage]) -> None:
        for message in messages:
            self.client.index(index=self.index_name, body={"key": key, "message": message})

    def get_messages(self, key: str) -> List[ChatMessage]:
        response = self.client.search(index=self.index_name, body={"query": {"match": {"key": key}}})
        return [ChatMessage(**hit["_source"]["message"]) for hit in response["hits"]["hits"]]

    def add_message(self, key: str, message: ChatMessage) -> None:
        self.client.index(index=self.index_name, body={"key": key, "message": message})

    def delete_messages(self, key: str) -> Optional[List[ChatMessage]]:
        response = self.client.delete_by_query(index=self.index_name, body={"query": {"match": {"key": key}}})
        return [ChatMessage(**hit["_source"]["message"]) for hit in response["hits"]["hits"]]

    def delete_message(self, key: str, idx: int) -> Optional[ChatMessage]:
        # Implement logic to delete a specific message by index if needed
        pass

    def delete_last_message(self, key: str) -> Optional[ChatMessage]:
        # Implement logic to delete the last message if needed
        pass

    def get_keys(self) -> List[str]:
        response = self.client.search(index=self.index_name, body={"aggs": {"keys": {"terms": {"field": "key"}}}})
        return [bucket["key"] for bucket in response["aggregations"]["keys"]["buckets"]]

# Example usage
chat_store = OpenSearchChatStore(host="localhost", port=9200, index_name="chat_history")

chat_memory = ChatMemoryBuffer.from_defaults(
    token_limit=3000,
    chat_store=chat_store,
    chat_store_key="user1",
)

This example demonstrates how to create a new chat store that interacts with OpenSearch, following the pattern of existing chat stores in LlamaIndex [1][2][3][4].

To continue talking to Dosu, mention @dosu.