griptape-ai / griptape

Modular Python framework for AI agents and workflows with chain-of-thought reasoning, tools, and memory.
https://www.griptape.ai
Apache License 2.0
2.01k stars 170 forks source link

ConversationMemory ErrorArtifact prompt_stack not clear why an issue occurs. ValueError: Unsupported artifact type: <class 'griptape.artifacts.error_artifact.ErrorArtifact'> #1028

Closed jm-nab closed 3 months ago

jm-nab commented 3 months ago

Is your feature request related to a problem? Please describe.

When run.input generates an ErrorArtifact, the prompt stack throws a misdirecting error:

@define
class ConversationMemory(BaseConversationMemory):
    def try_add_run(self, run: Run) -> None:
        self.runs.append(run)

        if self.max_runs:
            while len(self.runs) > self.max_runs:
                self.runs.pop(0)

    def to_prompt_stack(self, last_n: Optional[int] = None) -> PromptStack:
        prompt_stack = PromptStack()
        runs = self.runs[-last_n:] if last_n else self.runs
        for run in runs:
            prompt_stack.add_user_message(run.input)
            prompt_stack.add_assistant_message(run.output)
        return prompt_stack
[07/29/24 21:49:47] ERROR    PromptTask vector_routing
                             Unsupported artifact type: <class 'griptape.artifacts.error_artifact.ErrorArtifact'>
                             Traceback (most recent call last):
                               File "/home/jm/repos/test/vertexai_loaders/venv_311/lib/python3.11/site-packages/griptape/tasks/base_task.py", line 137, in execute
                                 self.output = self.run()
                                               ^^^^^^^^^^
                               File "/home/jm/repos/test/vertexai_loaders/venv_311/lib/python3.11/site-packages/griptape/tasks/prompt_task.py", line 92, in run
                                 message = self.prompt_driver.run(self.prompt_stack)
                                                                  ^^^^^^^^^^^^^^^^^
                               File "/home/jm/repos/test/vertexai_loaders/venv_311/lib/python3.11/site-packages/griptape/tasks/prompt_task.py", line 56, in prompt_stack
                                 memory.add_to_prompt_stack(stack, 1 if system_template else 0)
                               File "/home/jm/repos/test/vertexai_loaders/venv_311/lib/python3.11/site-packages/griptape/memory/structure/base_conversation_memory.py", line 75, in add_to_prompt_stack
                                 memory_inputs = self.to_prompt_stack(num_runs_to_fit_in_prompt).messages
                                                 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
                               File "/home/jm/repos/test/vertexai_loaders/venv_311/lib/python3.11/site-packages/griptape/memory/structure/conversation_memory.py", line 22, in to_prompt_stack
                                 prompt_stack.add_assistant_message(run.output)
                               File "/home/jm/repos/test/vertexai_loaders/venv_311/lib/python3.11/site-packages/griptape/common/prompt_stack/prompt_stack.py", line 48, in add_assistant_message
                                 return self.add_message(artifact, Message.ASSISTANT_ROLE)
                                        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
                               File "/home/jm/repos/test/vertexai_loaders/venv_311/lib/python3.11/site-packages/griptape/common/prompt_stack/prompt_stack.py", line 26, in add_message
                                 new_content = self.__process_artifact(artifact)
                                               ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
                               File "/home/jm/repos/test/vertexai_loaders/venv_311/lib/python3.11/site-packages/griptape/common/prompt_stack/prompt_stack.py", line 65, in __process_artifact
                                 raise ValueError(f"Unsupported artifact type: {type(artifact)}")
                             ValueError: Unsupported artifact type: <class 'griptape.artifacts.error_artifact.ErrorArtifact'>

Describe the solution you'd like

When a run generates an ErrorArtifact, surfacing the reason for the error would help. I tried setting a pdb.set_trace, and the ErrorArtifact had an empty .value and .exception

Describe alternatives you've considered

from __future__ import annotations
from attrs import define
from typing import Optional
from griptape.artifacts import ErrorArtifact
from griptape.memory.structure import Run, BaseConversationMemory
from griptape.common import PromptStack

@define
class ConversationMemory(BaseConversationMemory):
    def try_add_run(self, run: Run) -> None:
        self.runs.append(run)

        if self.max_runs:
            while len(self.runs) > self.max_runs:
                self.runs.pop(0)

    def to_prompt_stack(self, last_n: Optional[int] = None) -> PromptStack:
        prompt_stack = PromptStack()
        runs = self.runs[-last_n:] if last_n else self.runs
        for run in runs:
            prompt_stack.add_user_message(run.input)
            if isinstance(run.output, ErrorArtifact):
                breakpoint()
                print(run.output)
            prompt_stack.add_assistant_message(run.output)
        return prompt_stack

Additional context

vertexai_loaders/venv_311/lib/python3.11/site-packages/griptape/memory/structure/conversation_memory.py(25)to_prompt_stack()
-> print(run.output)

(Pdb) print(run.output)

(Pdb) print(run.output.exception)
None

(Pdb) print(type(run.output))
<class 'griptape.artifacts.error_artifact.ErrorArtifact'>

(Pdb) print(run.output.to_json())
{"type": "ErrorArtifact", "id": "e067ec4c5c014d79aa813c148e23f413", "reference": null, "meta": {}, "name": "e067ec4c5c014d79aa813c148e23f413", "value": ""}

Notes:

 RedisConversationMemoryDriver(
                index=conversation_id,
                conversation_id=conversation_id,
                host=host,
                port=port,
            )
collindutter commented 3 months ago

@jm-nab thanks for opening this issue! Are you able to provide a minimal reproducible example that shows how your input is raising an ErrorArtifact? This context will help us figure out the best path forward here.

jm-nab commented 3 months ago

I've been able to find where the error is being rehydrated out of the conversation memory.

I believe what happened was I had set a breakpoint(), and I "ctrl+c" out of it, and it might have caused a race condition, error exception of some kind that got placed on to the memory.

(Pdb) for x in memory.runs:print(type(x.output))
<class 'griptape.artifacts.text_artifact.TextArtifact'>
<class 'griptape.artifacts.text_artifact.TextArtifact'>
<class 'griptape.artifacts.text_artifact.TextArtifact'>
<class 'griptape.artifacts.error_artifact.ErrorArtifact'>

(Pdb) memory.runs[-1].input.value
'Analyze the given task and provide a comprehensive response based on your expertise. \n                Use the appropriate RAG tool for internal documentation queries when necessary. \n                If the task involves coding, provide well-commented code snippets. \n                If it\'s a research question, provide detailed and accurate information. \n                Suggested collection: ```json\n{\n "collection": "confluence-ITENG"\n}\n```\n                Task: How can I get a docker container deployed for my app in AWS?'

(Pdb) type(memory.runs[-1].output)
<class 'griptape.artifacts.error_artifact.ErrorArtifact'>

(Pdb) memory.runs[-1].output
ErrorArtifact(type='ErrorArtifact', id='e067ec4c5c014d79aa813c148e23f413', reference=None, meta={}, name='e067ec4c5c014d79aa813c148e23f413', value='', exception=None)

(Pdb) where
  <frozen runpy>(198)_run_module_as_main()
  <frozen runpy>(88)_run_code()
  /home/jm/repos/test/vertexai_loaders/src/vertexai_loaders/drivers/mixture_of_experts.py(515)<module>()
-> result = get_experts(query, conversation_id=start + idx)
  /home/jm/repos/test/vertexai_loaders/src/vertexai_loaders/drivers/mixture_of_experts.py(298)get_experts()
-> workflow = Workflow(
  <attrs generated init griptape.structures.workflow.Workflow>(35)__init__()
-> _setattr('conversation_memory', __attr_factory_conversation_memory(self))
  /home/jm/repos/test/vertexai_loaders/venv_311/lib/python3.11/site-packages/griptape/structures/structure.py(56)<lambda>()
-> lambda self: ConversationMemory(driver=self.config.conversation_memory_driver), takes_self=True
  <attrs generated init griptape.memory.structure.conversation_memory.ConversationMemory>(14)__init__()
-> self.__attrs_post_init__()
> /home/jm/repos/test/vertexai_loaders/venv_311/lib/python3.11/site-packages/griptape/memory/structure/base_conversation_memory.py(27)__attrs_post_init__()
-> if memory is not None:

Whatever is raising and adding the error artifact to the conversation memory doesn't seem to exception information, it might be in a custom class that I have written, and neglected to do the exception raising properly?

From the memory:

ErrorArtifact(type='ErrorArtifact', id='e067ec4c5c014d79aa813c148e23f413', reference=None, meta={}, name='e067ec4c5c014d79aa813c148e23f413', value='', exception=None)