Traceback (most recent call last):
File "/home/omer/work/kitty-server4/external/tools/AutoPrompt/run_pipeline.py", line 44, in <module>
best_prompt = pipeline.run_pipeline(opt.num_steps)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/omer/work/kitty-server4/external/tools/AutoPrompt/optimization_pipeline.py", line 281, in run_pipeline
stop_criteria = self.step(i, num_steps_remaining)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/omer/work/kitty-server4/external/tools/AutoPrompt/optimization_pipeline.py", line 253, in step
records = self.predictor.apply(self.dataset, self.batch_id, leq=True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/omer/work/kitty-server4/external/tools/AutoPrompt/estimator/estimator_llm.py", line 95, in apply
return self.apply_dataframe(batch_records)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/omer/work/kitty-server4/external/tools/AutoPrompt/estimator/estimator_llm.py", line 75, in apply_dataframe
all_results = self.chain.batch_invoke(mini_batch_inputs, self.num_workers)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/omer/work/kitty-server4/external/tools/AutoPrompt/utils/llm_chain.py", line 150, in batch_invoke
results = asyncio.run(self.async_batch_invoke(inputs[i:i + num_workers]))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/omer/.pyenv/versions/3.11.2/lib/python3.11/asyncio/runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/home/omer/.pyenv/versions/3.11.2/lib/python3.11/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/omer/.pyenv/versions/3.11.2/lib/python3.11/asyncio/base_events.py", line 653, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "/home/omer/work/kitty-server4/external/tools/AutoPrompt/utils/llm_chain.py", line 120, in async_batch_invoke
all_res = await self.retry_operation(tasks)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/omer/work/kitty-server4/external/tools/AutoPrompt/utils/llm_chain.py", line 98, in retry_operation
done, pending = await asyncio.wait(tasks, timeout=delay)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/omer/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 415, in wait
raise TypeError("Passing coroutines is forbidden, use tasks explicitly.")
TypeError: Passing coroutines is forbidden, use tasks explicitly.
Fix: async tasks need to be explicitly defined to overcome this error - "TypeError: Passing coroutines is forbidden, use tasks explicitly."
It enables one to run with the following parameters in the config:
The error: