csmizzle / conductor

Automation tools for LLMs and Agents
0 stars 0 forks source link

Too many Requests #26

Closed csmizzle closed 3 months ago

csmizzle commented 3 months ago

rottlingException) when calling the InvokeModelWithResponseStream operation: Too many requests, please wait before trying again. You have sent too many requests. Wait before trying again.') conductor-server-celery | Traceback (most recent call last): conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/celery/app/trace.py", line 453, in trace_task conductor-server-celery | R = retval = fun(*args, kwargs) conductor-server-celery | ^^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/celery/app/trace.py", line 736, in __protected_call__ conductor-server-celery | return self.run(*args, *kwargs) conductor-server-celery | ^^^^^^^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/app/agents/tasks.py", line 76, in run_url_marketing_report_thread_task conductor-server-celery | report = run_url_marketing_report_thread( conductor-server-celery | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/app/agents/utils.py", line 312, in run_url_marketing_report_thread conductor-server-celery | raise exception conductor-server-celery | File "/app/agents/utils.py", line 257, in run_url_marketing_report_thread conductor-server-celery | crew_run = run_marketing_crew( conductor-server-celery | ^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langsmith/run_helpers.py", line 576, in wrapper conductor-server-celery | raise e conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langsmith/run_helpers.py", line 573, in wrapper conductor-server-celery | function_result = run_container["context"].run(func, args, kwargs) conductor-server-celery | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/conductor/crews/marketing/init.py", line 45, in run_marketing_crew conductor-server-celery | crew_run = crew.run() conductor-server-celery | ^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/conductor/crews/marketing/crew.py", line 223, in run conductor-server-celery | result = crew.kickoff() conductor-server-celery | ^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/crewai/crew.py", line 264, in kickoff conductor-server-celery | result = self._run_sequential_process() conductor-server-celery | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/crewai/crew.py", line 305, in _run_sequential_process conductor-server-celery | output = task.execute(context=task_output) conductor-server-celery | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/crewai/task.py", line 183, in execute conductor-server-celery | result = self._execute( conductor-server-celery | ^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/crewai/task.py", line 192, in _execute conductor-server-celery | result = agent.execute_task( conductor-server-celery | ^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/crewai/agent.py", line 236, in execute_task conductor-server-celery | result = self.agent_executor.invoke( conductor-server-celery | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain/chains/base.py", line 163, in invoke conductor-server-celery | raise e conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain/chains/base.py", line 153, in invoke conductor-server-celery | self._call(inputs, run_manager=run_manager) conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/crewai/agents/executor.py", line 128, in _call conductor-server-celery | next_step_output = self._take_next_step( conductor-server-celery | ^^^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain/agents/agent.py", line 1138, in _take_next_step conductor-server-celery | [ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/crewai/agents/executor.py", line 192, in _iter_next_step conductor-server-celery | output = self.agent.plan( # type: ignore # Incompatible types in assignment (expression has type "AgentAction | AgentFinish | list[AgentAction]", variable has type "AgentAction") conductor-server-celery | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain/agents/agent.py", line 397, in plan conductor-server-celery | for chunk in self.runnable.stream(inputs, config={"callbacks": callbacks}): conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_core/runnables/base.py", line 2875, in stream conductor-server-celery | yield from self.transform(iter([input]), config, kwargs) conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_core/runnables/base.py", line 2862, in transform conductor-server-celery | yield from self._transform_stream_with_config( conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_core/runnables/base.py", line 1881, in _transform_stream_with_config conductor-server-celery | chunk: Output = context.run(next, iterator) # type: ignore conductor-server-celery | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_core/runnables/base.py", line 2826, in _transform conductor-server-celery | for output in final_pipeline: conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_core/runnables/base.py", line 1282, in transform conductor-server-celery | for ichunk in input: conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_core/runnables/base.py", line 4736, in transform conductor-server-celery | yield from self.bound.transform( conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_core/runnables/base.py", line 1300, in transform conductor-server-celery | yield from self.stream(final, config, kwargs) conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 249, in stream conductor-server-celery | raise e conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 229, in stream conductor-server-celery | for chunk in self._stream(messages, stop=stop, **kwargs): conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_community/chat_models/bedrock.py", line 249, in _stream conductor-server-celery | for chunk in self._prepare_input_and_invoke_stream( conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_community/llms/bedrock.py", line 656, in _prepare_input_and_invoke_stream conductor-server-celery | for chunk in LLMInputOutputAdapter.prepare_output_stream( conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/langchain_community/llms/bedrock.py", line 203, in prepare_output_stream conductor-server-celery | for event in stream: conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/botocore/eventstream.py", line 603, in iter conductor-server-celery | parsed_event = self._parse_event(event) conductor-server-celery | ^^^^^^^^^^^^^^^^^^^^^^^^ conductor-server-celery | File "/usr/local/lib/python3.12/site-packages/botocore/eventstream.py", line 619, in _parse_event conductor-server-celery | raise EventStreamError(parsed_response, self._operation_name) conductor-server-celery | botocore.exceptions.EventStreamError: An error occurred (throttlingException) when calling the InvokeModelWithResponseStream operation: Too many requests, please wait before trying again. You have sent too many requests. Wait before trying again.