I'm using the LLM Compiler Agent Cookbook with some mods so that it reads pdf files instead of Wikipedia. Aside from how the pdfs are parsed, the bones of the application remain the same. The code executes until the very end when the "response = agent.chat(...)" statement which causes the "ValueError: Tool search not found" error.
from llama_index import ServiceContext
from llama_index.llms import OpenAI
from llama_index.callbacks import CallbackManager
Define Toolset
from llama_index.readers import PDFReader
from llama_index import load_index_from_storage, StorageContext
from llama_index.node_parser import SentenceSplitter
from llama_index.tools import QueryEngineTool, ToolMetadata
from llama_index import VectorStoreIndex
Setup LLMCompilerAgent
from llama_hub.llama_packs.agents.llm_compiler.step import LLMCompilerAgentWorker
from llama_index.agent import AgentRunner
Setup Data
data_folder = "./data" # Path to the folder containing the PDF files
response = agent.chat(
"Tell me about Rogers Corp and the industries they serve."
)
print(str(response))
Relevant Logs/Tracbacks
> Running step d1f788b7-290d-42c1-80b2-86b2a3c70205 for task aa0924d3-e110-4567-8abc-5da5af7f13c9.
> Step count: 0
> Plan: 1. search("Rogers Corp")
2. vector_tool_nyse-rog-2023-10K-23693240.pdf({"input": "$1"})
3. join()<END_OF_PLAN>
Traceback (most recent call last):
File "C:\Users\User\AppData\Local\Programs\Python\Python311\Lib\runpy.py", line 198, in _run_module_as_main
return _run_code(code, main_globals, None,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\User\AppData\Local\Programs\Python\Python311\Lib\runpy.py", line 88, in _run_code
exec(code, run_globals)
File "c:\Users\User\.vscode\extensions\ms-python.python-2023.22.1\pythonFiles\lib\python\debugpy\adapter/../..\debugpy\launcher/../..\debugpy\__main__.py", line 39, in <module>
cli.main()
File "c:\Users\User\.vscode\extensions\ms-python.python-2023.22.1\pythonFiles\lib\python\debugpy\adapter/../..\debugpy\launcher/../..\debugpy/..\debugpy\server\cli.py", line 430, in main
run()
File "c:\Users\User\.vscode\extensions\ms-python.python-2023.22.1\pythonFiles\lib\python\debugpy\adapter/../..\debugpy\launcher/../..\debugpy/..\debugpy\server\cli.py", line 284, in run_file
runpy.run_path(target, run_name="__main__")
File "c:\Users\User\.vscode\extensions\ms-python.python-2023.22.1\pythonFiles\lib\python\debugpy\_vendored\pydevd\_pydevd_bundle\pydevd_runpy.py", line 321, in run_path
return _run_module_code(code, init_globals, run_name,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "c:\Users\User\.vscode\extensions\ms-python.python-2023.22.1\pythonFiles\lib\python\debugpy\_vendored\pydevd\_pydevd_bundle\pydevd_runpy.py", line 135, in _run_module_code
_run_code(code, mod_globals, init_globals,
File "c:\Users\User\.vscode\extensions\ms-python.python-2023.22.1\pythonFiles\lib\python\debugpy\_vendored\pydevd\_pydevd_bundle\pydevd_runpy.py", line 124, in _run_code
exec(code, run_globals)
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\pdf_app.py", line 95, in <module>
response = agent.chat(
^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_index\callbacks\utils.py", line 41, in wrapper
return func(self, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_index\agent\runner\base.py", line 497, in chat
chat_response = self._chat(
^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_index\agent\runner\base.py", line 442, in _chat
cur_step_output = self._run_step(
^^^^^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_index\agent\runner\base.py", line 304, in _run_step
cur_step_output = self.agent_worker.run_step(step, task, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_index\callbacks\utils.py", line 41, in wrapper
return func(self, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_hub\llama_packs\agents\llm_compiler\step.py", line 400, in run_step
return asyncio.run(self.arun_step(step=step, task=task, **kwargs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\User\AppData\Local\Programs\Python\Python311\Lib\asyncio\runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "C:\Users\User\AppData\Local\Programs\Python\Python311\Lib\asyncio\runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\User\AppData\Local\Programs\Python\Python311\Lib\asyncio\base_events.py", line 653, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_index\callbacks\utils.py", line 56, in async_wrapper
return await func(self, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_hub\llama_packs\agents\llm_compiler\step.py", line 407, in arun_step
return await self._arun_step(step, task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_hub\llama_packs\agents\llm_compiler\step.py", line 371, in _arun_step
task_dict = self.output_parser.parse(cast(str, llm_response.message.content))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_hub\llama_packs\agents\llm_compiler\output_parser.py", line 56, in parse
return get_graph_dict(results, self.tools)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_hub\llama_packs\agents\llm_compiler\utils.py", line 112, in get_graph_dict
task = instantiate_new_step(
^^^^^^^^^^^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_hub\llama_packs\agents\llm_compiler\utils.py", line 84, in instantiate_new_step
tool = _find_tool(tool_name, tools)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "c:\Users\User\Documents\PythonProjects\CompilerAgent\.venv\Lib\site-packages\llama_hub\llama_packs\agents\llm_compiler\utils.py", line 54, in _find_tool
raise ValueError(f"Tool {tool_name} not found.")
ValueError: Tool search not found.
Bug Description
I'm using the LLM Compiler Agent Cookbook with some mods so that it reads pdf files instead of Wikipedia. Aside from how the pdfs are parsed, the bones of the application remain the same. The code executes until the very end when the "response = agent.chat(...)" statement which causes the "ValueError: Tool search not found" error.
You can download a SEC 10-K pdf from this website: https://stocklight.com/stocks/us/nyse-rog/rogers/annual-reports
Version
version 0.9.34
Steps to Reproduce
import os from dotenv import load_dotenv
Setup LLM + Service Context
from llama_index import ServiceContext from llama_index.llms import OpenAI from llama_index.callbacks import CallbackManager
Define Toolset
from llama_index.readers import PDFReader from llama_index import load_index_from_storage, StorageContext from llama_index.node_parser import SentenceSplitter from llama_index.tools import QueryEngineTool, ToolMetadata from llama_index import VectorStoreIndex
Setup LLMCompilerAgent
from llama_hub.llama_packs.agents.llm_compiler.step import LLMCompilerAgentWorker from llama_index.agent import AgentRunner
Setup Data
data_folder = "./data" # Path to the folder containing the PDF files
Set OpenAI API key as an environment variable
load_dotenv() llm = OpenAI(model="gpt-3.5-turbo", openai_api_key=os.getenv("OPENAI_API_KEY"))
node_parser = SentenceSplitter()
service_context = ServiceContext.from_defaults(llm=llm) callback_manager = CallbackManager([])
Build agents dictionary
query_engine_tools = []
Define the directory for storing vector indexes
vector_index_dir = "./vector_indexes"
Iterate through the PDF files in the data folder
for filename in os.listdir(data_folder): if filename.endswith(".pdf"): file_path = os.path.join(data_folder, filename) pdf_reader = PDFReader() pdf_data = pdf_reader.load_data(file_path)
llm = OpenAI(model="gpt-3.5-turbo")
agent_worker = LLMCompilerAgentWorker.from_tools( query_engine_tools, llm=llm, verbose=True, callback_manager=callback_manager, ) agent = AgentRunner(agent_worker, callback_manager=callback_manager)
Test out Queries
response = agent.chat( "Tell me about Rogers Corp and the industries they serve." ) print(str(response))
Relevant Logs/Tracbacks