Open shizidushu opened 3 weeks ago
@shizidushu as the first error stated, I'm pretty sure you just need to enable nested async
import nest_asyncio
nest_asyncio.apply()
@logan-markewich
After add the code you mentioned and run the async version of code. Here is the trackback:
DEBUG:httpcore.http11:send_request_headers.started request=<Request [b'POST']>
send_request_headers.started request=<Request [b'POST']>
Traceback (most recent call last):
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 31, in asyncio_run
raise RuntimeError(
RuntimeError: Nested async detected. Use async functions where possible (`aquery`, `aretrieve`, `arun`, etc.). Otherwise, use `import nest_asyncio; nest_asyncio.apply()` to enable nested async or use in a jupyter notebook.
If you are experiencing while using async functions and not in a notebook, please raise an issue on github, as it indicates a bad design pattern.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\4-Working-Project\LLM-RAG-Knowloadege-Base\examples\parse_html_table_unstructured.py", line 54, in <module>
asyncio.run(main())
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\nest_asyncio.py", line 30, in run
return loop.run_until_complete(task)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\nest_asyncio.py", line 98, in run_until_complete
return f.result()
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\futures.py", line 201, in result
raise self._exception.with_traceback(self._exception_tb)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\tasks.py", line 232, in __step
result = coro.send(None)
File "D:\4-Working-Project\LLM-RAG-Knowloadege-Base\examples\parse_html_table_unstructured.py", line 48, in main
raw_nodes_2020 = node_parser.get_nodes_from_documents(docs_2020)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\interface.py", line 129, in get_nodes_from_documents
nodes = self._parse_nodes(documents, show_progress=show_progress, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\base_element.py", line 120, in _parse_nodes
nodes = self.get_nodes_from_node(node)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\unstructured_element.py", line 67, in get_nodes_from_node
self.extract_table_summaries(table_elements)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\base_element.py", line 208, in extract_table_summaries
summary_outputs = asyncio_run(summary_co)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 42, in asyncio_run
return asyncio.run(coro)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\nest_asyncio.py", line 30, in run
return loop.run_until_complete(task)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\nest_asyncio.py", line 98, in run_until_complete
return f.result()
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\futures.py", line 201, in result
raise self._exception.with_traceback(self._exception_tb)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\tasks.py", line 232, in __step
result = coro.send(None)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 144, in run_jobs
results = await tqdm_asyncio.gather(*pool_jobs, desc=desc)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tqdm\asyncio.py", line 79, in gather
res = [await f for f in cls.as_completed(ifs, loop=loop, timeout=timeout,
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tqdm\asyncio.py", line 79, in <listcomp>
res = [await f for f in cls.as_completed(ifs, loop=loop, timeout=timeout,
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\tasks.py", line 571, in _wait_for_one
return f.result() # May raise f.exception().
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\futures.py", line 201, in result
raise self._exception.with_traceback(self._exception_tb)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\tasks.py", line 232, in __step
result = coro.send(None)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tqdm\asyncio.py", line 76, in wrap_awaitable
return i, await f
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 137, in worker
return await job
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\base_element.py", line 192, in _get_table_output
response = await query_engine.aquery(summary_query_str)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\base\base_query_engine.py", line 63, in aquery
query_result = await self._aquery(str_or_query_bundle)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\query_engine\retriever_query_engine.py", line 206, in _aquery
response = await self._response_synthesizer.asynthesize(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\base.py", line 305, in asynthesize
response_str = await self.aget_response(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\compact_and_refine.py", line 23, in aget_response
return await super().aget_response(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 363, in aget_response
response = await self._agive_response_single(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 482, in _agive_response_single
structured_response = await program.acall(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 92, in acall
answer = await self._llm.astructured_predict(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\llms\llm.py", line 395, in astructured_predict
result = await program.acall(**prompt_args)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\program\openai\base.py", line 209, in acall
chat_response = await self._llm.achat(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai_like\base.py", line 147, in achat
return await super().achat(messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\llms\callbacks.py", line 75, in wrapped_async_llm_chat
f_return_val = await f(_self, messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai\base.py", line 598, in achat
return await achat_fn(messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 142, in async_wrapped
return await fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 58, in __call__
do = await self.iter(retry_state=retry_state)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 110, in iter
result = await action(retry_state)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 78, in inner
return fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\__init__.py", line 390, in <lambda>
self._add_action_func(lambda rs: rs.outcome.result())
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\concurrent\futures\_base.py", line 451, in result
return self.__get_result()
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\concurrent\futures\_base.py", line 403, in __get_result
raise self._exception
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 61, in __call__
result = await fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai\base.py", line 656, in _achat
openai_message = response.choices[0].message
AttributeError: 'str' object has no attribute 'choices'
0%| | 0/91 [00:01<?, ?it/s]
Bug Description
I try to run example https://docs.llamaindex.ai/en/stable/examples/query_engine/sec_tables/tesla_10q_table/ with
OpenAILike
I cannot run the sync version of code, it errors
So I wrap it in asyncio, then it errors
Version
0.10.37
Steps to Reproduce
use asyncio
code when not use async
Relevant Logs/Tracbacks
Traceback(If use async in code):
Trackback(if not use async in code):