langchain-ai / streamlit-agent

Reference implementations of several LangChain agents as Streamlit apps
Apache License 2.0
1.16k stars 584 forks source link

Not sure how to initialize `st.session_state['messages']` in my chat code #16

Closed josoroma closed 10 months ago

josoroma commented 11 months ago
image

Hitting this error:

Traceback (most recent call last):
  File "/Users/josoroma/Library/Caches/pypoetry/virtualenvs/etl-Fd0mW_QZ-py3.11/lib/python3.11/site-packages/streamlit/runtime/scriptrunner/script_runner.py", line 552, in _run_script
    exec(code, module.__dict__)
  File "/Users/josoroma/sites/etl-pipeline-for-langchain-docs/chat.py", line 111, in <module>
    ChatApp().main()
  File "/Users/josoroma/sites/etl-pipeline-for-langchain-docs/chat.py", line 83, in main
    for msg in st.session_state['messages']:
               ~~~~~~~~~~~~~~~~^^^^^^^^^^^^
  File "/Users/josoroma/Library/Caches/pypoetry/virtualenvs/etl-Fd0mW_QZ-py3.11/lib/python3.11/site-packages/streamlit/runtime/state/session_state_proxy.py", line 90, in __getitem__
    return get_session_state()[key]
           ~~~~~~~~~~~~~~~~~~~^^^^^
  File "/Users/josoroma/Library/Caches/pypoetry/virtualenvs/etl-Fd0mW_QZ-py3.11/lib/python3.11/site-packages/streamlit/runtime/state/safe_session_state.py", line 111, in __getitem__
    raise KeyError(key)
KeyError: 'messages'

My code:

import os
import weaviate
import openai
from pydantic import BaseModel
from langchain.callbacks.base import BaseCallbackHandler
# from langchain.chat_models import ChatOpenAI
from langchain.schema import ChatMessage
import streamlit as st

class Document(BaseModel):
    content: str

class QueryResult(BaseModel):
    document: Document

class StreamHandler(BaseCallbackHandler):
    def __init__(self, container, initial_text=""):
        self.container = container
        self.text = initial_text

    def on_llm_new_token(self, token: str, **kwargs) -> None:
        self.text += token
        self.container.markdown(self.text)

class ChatApp:
    def __init__(self):
        self.client = None
        self.get_env_variables()
        self.client = self.get_client()

    def get_env_variable(self, var_name):
        var_value = os.getenv(var_name)
        return var_value

    def get_env_variables(self):
        with st.sidebar:
            self.OPENAI_API_KEY = self.get_env_variable("OPENAI_API_KEY") or st.text_input("OpenAI API Key", type="password")
            self.WEAVIATE_HOST = self.get_env_variable("WEAVIATE_HOST") or st.text_input("Weaviate Host")
            self.WEAVIATE_AUTH_API_KEY = self.get_env_variable("WEAVIATE_AUTH_API_KEY") or st.text_input("Bearer Token", type="password")

        if not self.OPENAI_API_KEY or not self.WEAVIATE_HOST or not self.WEAVIATE_AUTH_API_KEY:
            st.info("Please add your OpenAI API Key, Weaviate Host, and Bearer Token to continue.")
            st.stop()

        openai.api_key = self.OPENAI_API_KEY

    def get_client(self):
        try:
            client = weaviate.Client(
                url=self.WEAVIATE_HOST,
                auth_client_secret=weaviate.AuthApiKey(self.WEAVIATE_AUTH_API_KEY),
                additional_headers={"X-OpenAI-Api-Key": self.OPENAI_API_KEY},
            )
        except Exception as e:
            st.error(f"Error occurred while creating the Weaviate client: {str(e)}")
            st.stop()

        return client

    def client_query(self, question: str):
        generatePrompt = "Respond to the human as helpfully and accurately as possible: {text}"
        nearText = {"concepts": [f"{question}"]}

        try:
            response = (
                self.client.query
                .get("Document", ["content"])
                .with_generate(single_prompt=generatePrompt)
                .with_near_text(nearText)
                .with_limit(1)
                .do()
            )
        except Exception as e:
            st.error(f"Error occurred while querying the Weaviate client: {str(e)}")
            st.stop()

        return response

    def main(self):
        if 'messages' not in st.session_state:
            st.session_state['messages'] = [ChatMessage(role="assistant", content="How can I help you?")]

        for msg in st.session_state['messages']:
            st.chat_message(msg.role).write(msg.content)

        if prompt := st.text_input("Your input:"):
            st.session_state['messages'].append(ChatMessage(role="user", content=prompt))
            st.chat_message("user").write(prompt)

            response = self.client_query(prompt)
            if response:
                try:
                    for document in response['data']['Get']['Document']:
                        try:
                            generativeOpenAI = document['_additional']['generate']["singleResult"]
                            content = document['content']
                        except KeyError as ke:
                            st.markdown(f"Error: Expected keys not found in the document. {ke}")
                            continue

                        if generativeOpenAI:
                            st.session_state['messages'].append(ChatMessage(role="assistant", content=generativeOpenAI))
                            st.chat_message("assistant").write(generativeOpenAI)
                        if content:
                            st.session_state['messages'].append(ChatMessage(role="assistant", content=content))
                            st.chat_message("assistant").write(content)
                except KeyError as ke:
                    st.markdown(f"Error: Expected keys not found in the response. {ke}")

if __name__ == "__main__":
    ChatApp().main()
Muhtasham commented 11 months ago

Hello are you using correct Streamlit version?

sfc-gh-jcarroll commented 10 months ago

Hi, looks like this request is for app code that was not from this repo. Might be a good one to post for help in the forum:

https://discuss.streamlit.io/c/questions/5