Framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks.
I am encountering an error when attempting to use memory in the Crew AI framework.
The code works perfectly without memory enabled.
The error occurs when memory and embeddings are enabled.
The Code
import os
import streamlit as st
from crewai import Agent, Task, Crew, Process
from langchain_openai import AzureChatOpenAI
from langchain.tools import DuckDuckGoSearchRun
from crewai_tools import SerperDevTool
from langchain.agents import Tool
from dotenv import load_dotenv
load_dotenv()
AZURE_OPENAI_KEY = os.environ["AZURE_OPENAI_KEY"]
AZURE_OPENAI_ENDPOINT = os.environ["AZURE_OPENAI_ENDPOINT"]
AZURE_OPENAI_KEY=os.environ.get("AZURE_OPENAI_VERSION")
AZURE_OPENAI_DEPLOYMENT=os.environ.get("AZURE_OPENAI_DEPLOYMENT")
azure_llm = AzureChatOpenAI(
azure_endpoint=os.environ.get("AZURE_OPENAI_ENDPOINT"),
api_key=os.environ.get("AZURE_OPENAI_KEY"),
model_name=os.environ.get("AZURE_OPENAI_DEPLOYMENT"),
api_version=os.environ.get("AZURE_OPENAI_VERSION")
)
father_agent = Agent(
role='Father',
goal='You are the father of a kid. The kid may ask you any question.'
'Your goal is to provide a satisfactory answer to the kid.',
verbose=True,
memory=True,
llm=azure_llm ,
max_rpm=400,
backstory=(
"You are a 40 year old male. You live in the city of San Jose with your wife and kid who is 10 years old."
),
tools=[],
allow_delegation=True
)
father_task = Task(
description=(
"Your task is to answer the {question} of your kid in a satisfactory "
"and legible way so that it makes sense to your kid. "
),
expected_output='Answer to your kid question',
tools=[],
# human_input = True,
agent=father_agent
)
parent_crew = Crew(
agents=[father_agent],
tasks=[father_task],
process=Process.sequential,
memory=True,
verbose=True,
embedder={
"provider": "azure_openai",
"config":{
"model": 'text-embedding-ada-002',
"deployment_name": "text-embedding-ada-002"
}
}
)
st.title('Ask Father')
question = st.text_input("Kid: ")
if question:
answer = parent_crew.kickoff({"question": question})
st.text("Father: ")
st.write(answer)
The Error
[DEBUG]: == Working Agent: Father
[INFO]: == Starting Task: Your task is to answer the what is earth of your kid in a satisfactory and legible way so that it makes sense to your kid.
2024-06-19 10:16:30.587 Uncaught app exception
Traceback (most recent call last):
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/streamlit/runtime/scriptrunner/script_runner.py", line 600, in _run_script
exec(code, module.dict)
File "/home/maklinux/repos/webapp_front_back_sapien/backend/test_crew_02_withmemory.py", line 71, in
answer = parent_crew.kickoff({"question": question})
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/crewai/crew.py", line 264, in kickoff
result = self._run_sequential_process()
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/crewai/crew.py", line 305, in _run_sequential_process
output = task.execute(context=task_output)
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/crewai/task.py", line 183, in execute
result = self._execute(
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/crewai/task.py", line 192, in _execute
result = agent.execute_task(
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/crewai/agent.py", line 222, in execute_task
memory = contextual_memory.build_context_for_task(task, context)
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/crewai/memory/contextual/contextual_memory.py", line 24, in build_context_for_task
context.append(self._fetch_stm_context(query))
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/crewai/memory/contextual/contextual_memory.py", line 33, in _fetch_stm_context
stm_results = self.stm.search(query)
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/crewai/memory/short_term/short_term_memory.py", line 23, in search
return self.storage.search(query=query, score_threshold=score_threshold) # type: ignore # BUG? The reference is to the parent class, but the parent class does not have this parameters
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/crewai/memory/storage/rag_storage.py", line 95, in search
else self.app.search(query, limit)
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/embedchain/embedchain.py", line 650, in search
return [{"context": c[0], "metadata": c[1]} for c in self.db.query(params)]
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/embedchain/vectordb/chroma.py", line 220, in query
result = self.collection.query(
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/chromadb/api/models/Collection.py", line 327, in query
valid_query_embeddings = self._embed(input=valid_query_texts)
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/chromadb/api/models/Collection.py", line 633, in _embed
return self._embedding_function(input=input)
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/chromadb/api/types.py", line 193, in call
result = call(self, input)
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/embedchain/embedder/base.py", line 20, in call
return self.embedding_fn(input)
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/langchain_community/embeddings/openai.py", line 668, in embed_documents
return self._get_len_safe_embeddings(texts, engine=engine)
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/langchain_community/embeddings/openai.py", line 494, in _get_len_safe_embeddings
response = embed_with_retry(
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/langchain_community/embeddings/openai.py", line 116, in embed_with_retry
return embeddings.client.create(kwargs)
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/openai/resources/embeddings.py", line 114, in create
return self._post(
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/openai/_base_client.py", line 1240, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/openai/_base_client.py", line 921, in request
return self._request(
File "/home/maklinux/repos/webapp_front_back_sapien/venv/lib/python3.10/site-packages/openai/_base_client.py", line 1020, in _request
raise self._make_status_error_from_response(err.response) from None
openai.AuthenticationError: Error code: 401 - {'statusCode': 401, 'message': 'Unauthorized. Access token is missing, invalid, audience is incorrect (https://cognitiveservices.azure.com), or have expired.'}
I'm facing same issue.
My embedding function has different api-base and token than chat endpoint.
Code would have to be changed to map azure openai embedding correctly using api_base , api_key and version
I am encountering an error when attempting to use memory in the Crew AI framework.
The Code
The Error