Chainlit / cookbook

Chainlit's cookbook repo
https://github.com/Chainlit/chainlit
726 stars 270 forks source link

Coroutine bug when using autogen's groupchat with chainlit #135

Closed tituslhy closed 1 month ago

tituslhy commented 1 month ago

Describe the bug When using autogen in chainlit, the current cookbook inherits autogen's UserProxyAgent and ConversableAgents and amend their send() methods.

Using the latest versions of both librares (chainlit==1.1.306, pyautogen==0.2.32), this only works for single agent-agent conversations. When using a groupchat this fails because the user_proxy_agent is trying to send a coroutine instead of a message.

To Reproduce The Error app.py

from autogen import (
    UserProxyAgent, 
    ConversableAgent,
    Agent
)
import chainlit as cl
import os
from dotenv import load_dotenv, find_dotenv
import autogen
from typing import Union, Optional, Dict 
import warnings

_ = load_dotenv(find_dotenv())
warnings.filterwarnings("ignore")

def chat_new_message(message, sender):
    cl.run_sync(
        cl.Message(
            content="",
            author=sender.name,
        ).send()
    )
    content = message.get('content')
    cl.run_sync(
        cl.Message(
            content=content,
            author=sender.name,
        ).send()
    )

async def ask_helper(func, **kwargs):
    res = await func(**kwargs).send()
    while not res:
        res = await func(**kwargs).send()
    return res

class ChainlitConversableAgent(ConversableAgent):
    def send(
        self,
        message: Union[Dict, str],
        recipient: Agent,
        request_reply: Optional[bool] = None,
        silent: Optional[bool] = False,
    ) -> bool:
        if isinstance(message, dict):
            message = message['content']
        cl.run_sync(
            cl.Message(
                content = f"{self.name} *Sending message to '{recipient.name}':*\n\n{message}",
                author=self.name,
            ).send()
        )
        super(ChainlitConversableAgent, self).send(
            message=message,
            recipient=recipient,
            request_reply=request_reply,
            silent=silent,
        )

class ChainlitUserProxyAgent(UserProxyAgent):
    async def get_human_input(self, prompt: str)-> str:
        if prompt.startswith(
            "Provide feedback to assistant. Press enter to skip and use auto-reply"
        ):
            res = cl.run_sync(
                ask_helper(
                    cl.AskActionMessage,
                    content="Continue or provide feedback?",
                    actions=[
                        cl.Action(
                            name="continue",
                            value="continue",
                            label="✅ Continue"
                        ),
                        cl.Action(
                            name="feedback",
                            value="feedback",
                            label="💬 Provide feedback",
                        ),
                        cl.Action(
                            name="exit",
                            value="exit",
                            label="🔚 Exit Conversation" 
                        ),
                    ]
                )
            )
            if res.get("value") == "continue":
                return ""
            if res.get("value") == "exit":
                return "exit"

        reply = cl.run_sync(ask_helper(cl.AskUserMessage, 
                                       content = prompt,
                                       timeout=60))

        return reply["content"].strip()

    def send(
        self,
        message: Union[Dict, str],
        recipient: Agent,
        request_reply: Optional[bool] = None,
        silent: Optional[bool] = False,
    ):  
        if isinstance(message, dict):
            message = message['content']

        cl.run_sync(
            cl.Message(
                content=f'*Sending message to "{recipient.name}"*:\n\n{message}',
                author="UserProxyAgent",
            ).send()
        )
        super(ChainlitUserProxyAgent, self).send(
            message=message,
            recipient=recipient,
            request_reply=request_reply,
            silent=silent,
        )

MAX_ITER=10

assistant = ChainlitConversableAgent(
    name="Assistant", llm_config=llm_config,
    system_message="""Assistant. Assist the User Proxy in the task.""",
    description="Assistant Agent"
)

user_proxy = ChainlitUserProxyAgent(
    name="User_Proxy",
    human_input_mode="ALWAYS",
    llm_config=llm_config,
    code_execution_config=False,
    system_message="""Manager. Do the task. Collaborate with the Assistant to finish the task.
                    """,
    description="User Proxy Agent"
)
groupchat = autogen.GroupChat(agents=[user_proxy, assistant], messages=[], max_round=MAX_ITER)
manager = autogen.GroupChatManager(groupchat=groupchat,llm_config=llm_config)

USER_PROXY_NAME = "Query Agent"
ASSISTANT = "Assistant"

@cl.on_chat_start
async def on_chat_start():
    cl.user_session.set(USER_PROXY_NAME, user_proxy)
    cl.user_session.set(ASSISTANT, assistant)

    msg = cl.Message(content=f"""Hello! What task would you like to get done today?      
                     """, 
                     author="User_Proxy")
    await msg.send()

@cl.on_message
async def on_message(message: cl.Message):

    CONTEXT = message.content
    assistant = cl.user_session.get(ASSISTANT)
    user_proxy = cl.user_session.get(USER_PROXY_NAME)
    groupchat = autogen.GroupChat(agents=[user_proxy, assistant], messages=[], max_round=MAX_ITER)
    manager = autogen.GroupChatManager(groupchat=groupchat,llm_config=llm_config)
    # -------------------- Conversation Logic. Edit to change your first message based on the Task you want to get done. ----------------------------- # 
    if len(groupchat.messages) == 0:
        message = f"""Do the task based on the user input: {CONTEXT}."""
        await cl.Message(content=f"""Starting agents on task...""").send()
        await cl.make_async(user_proxy.initiate_chat)( manager, message=message, )
    elif len(groupchat.messages) < MAX_ITER:
        await cl.make_async(user_proxy.send)( manager, message=CONTEXT, )
    elif len(groupchat.messages) == MAX_ITER:  
        await cl.make_async(user_proxy.send)( manager, message="exit", )

The error is when the ChainlitUserProxyAgent tries to send a message because the message happens to be of type coroutine

Traceback:

2024-07-26 15:57:01 - 'coroutine' object is not iterable
Traceback (most recent call last):
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/chainlit/utils.py", line 44, in wrapper
    return await user_function(**params_values)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Projects/AI-Sandbox/llm/experiments/autogen/groupchat/basicapp.py", line 62, in on_message
    await cl.make_async(user_proxy.initiate_chat)( manager, message=message, )
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/asyncer/_main.py", line 358, in wrapper
    return await anyio.to_thread.run_sync(
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/anyio/to_thread.py", line 33, in run_sync
    return await get_asynclib().run_sync_in_worker_thread(
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/anyio/_backends/_asyncio.py", line 877, in run_sync_in_worker_thread
    return await future
           ^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/asyncio/futures.py", line 287, in __await__
    yield self  # This tells Task to wait for completion.
    ^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/asyncio/tasks.py", line 385, in __wakeup
    future.result()
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/asyncio/futures.py", line 203, in result
    raise self._exception.with_traceback(self._exception_tb)
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/anyio/_backends/_asyncio.py", line 807, in run
    result = context.run(func, *args)
             ^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 1018, in initiate_chat
    self.send(msg2send, recipient, silent=silent)
  File "/App/tlim2/Projects/AI-Sandbox/llm/experiments/autogen/groupchat/utils.py", line 119, in send
    super(ChainlitUserProxyAgent, self).send(
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 655, in send
    recipient.receive(message, self, request_reply, silent)
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 818, in receive
    reply = self.generate_reply(messages=self.chat_messages[sender], sender=sender)
            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 1972, in generate_reply
    final, reply = reply_func(self, messages=messages, sender=sender, config=reply_func_tuple["config"])
                   ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/groupchat.py", line 1080, in run_chat
    speaker.send(reply, self, request_reply=False, silent=silent)
  File "/App/tlim2/Projects/AI-Sandbox/llm/experiments/autogen/groupchat/utils.py", line 119, in send
    super(ChainlitUserProxyAgent, self).send(
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 653, in send
    valid = self._append_oai_message(message, "assistant", recipient)
            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 574, in _append_oai_message
    message = self._message_to_dict(message)
              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 534, in _message_to_dict
    return dict(message)
           ^^^^^^^^^^^^^

I'm not too sure why it's a coroutine when it's on chainlit but this works fine on Jupyter Notebooks.

tituslhy commented 1 month ago

I've narrowed it down. The coroutine is the async def get_human_input method in the ChainlitUserProxyAgent itself. Still can't fix it though. When I removed the "async". Now I just get "Provide feedback to chat_manager. Press enter to skip and use auto-reply, or type 'exit' to end the conversation:" showing up in my chainlit panel but when I type a reply I get a problem with the last line of the get_human_input method because it's a string

2024-07-26 16:29:29 - 'content'
Traceback (most recent call last):
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/chainlit/utils.py", line 44, in wrapper
    return await user_function(**params_values)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Projects/AI-Sandbox/llm/experiments/autogen/groupchat/basicapp.py", line 62, in on_message
    await cl.make_async(user_proxy.initiate_chat)( manager, message=message, )
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/asyncer/_main.py", line 358, in wrapper
    return await anyio.to_thread.run_sync(
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/anyio/to_thread.py", line 33, in run_sync
    return await get_asynclib().run_sync_in_worker_thread(
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/anyio/_backends/_asyncio.py", line 877, in run_sync_in_worker_thread
    return await future
           ^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/asyncio/futures.py", line 287, in __await__
    yield self  # This tells Task to wait for completion.
    ^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/asyncio/tasks.py", line 385, in __wakeup
    future.result()
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/asyncio/futures.py", line 203, in result
    raise self._exception.with_traceback(self._exception_tb)
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/anyio/_backends/_asyncio.py", line 807, in run
    result = context.run(func, *args)
             ^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 1018, in initiate_chat
    self.send(msg2send, recipient, silent=silent)
  File "/App/tlim2/Projects/AI-Sandbox/llm/experiments/autogen/groupchat/utils.py", line 107, in send
    super(ChainlitUserProxyAgent, self).send(
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 655, in send
    recipient.receive(message, self, request_reply, silent)
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 818, in receive
    reply = self.generate_reply(messages=self.chat_messages[sender], sender=sender)
            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 1972, in generate_reply
    final, reply = reply_func(self, messages=messages, sender=sender, config=reply_func_tuple["config"])
                   ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/groupchat.py", line 1052, in run_chat
    reply = speaker.generate_reply(sender=self)
            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 1972, in generate_reply
    final, reply = reply_func(self, messages=messages, sender=sender, config=reply_func_tuple["config"])
                   ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Anaconda3/envs/llamaindex/lib/python3.12/site-packages/autogen/agentchat/conversable_agent.py", line 1724, in check_termination_and_human_reply
    reply = self.get_human_input(
            ^^^^^^^^^^^^^^^^^^^^^
  File "/App/tlim2/Projects/AI-Sandbox/llm/experiments/autogen/groupchat/utils.py", line 92, in get_human_input
    return reply["content"].strip()
           ~~~~~^^^^^^^^^^^
KeyError: 'content'