Open MyraBaba opened 3 months ago
even the original gpt-4o gives below error in chainlit
model
none is not an allowed value (type=type_error.none.not_allowed)
Traceback (most recent call last):
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/site-packages/chainlit/utils.py", line 40, in wrapper
return await user_function(params_values)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/tulpar/Projects/ODS/graph_websearch_agent/app/chat.py", line 194, in main
response = await cl.make_async(chat_workflow.invoke_workflow)(message)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/site-packages/asyncer/_main.py", line 358, in wrapper
return await anyio.to_thread.run_sync(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/site-packages/anyio/to_thread.py", line 33, in run_sync
return await get_asynclib().run_sync_in_worker_thread(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/site-packages/anyio/_backends/_asyncio.py", line 877, in run_sync_in_worker_thread
return await future
^^^^^^^^^^^^
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/asyncio/futures.py", line 287, in await
yield self # This tells Task to wait for completion.
^^^^^^^^^^
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/asyncio/tasks.py", line 349, in __wakeup
future.result()
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/asyncio/futures.py", line 203, in result
raise self._exception.with_traceback(self._exception_tb)
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/site-packages/anyio/_backends/_asyncio.py", line 807, in run
result = context.run(func, args)
^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/tulpar/Projects/ODS/graph_websearch_agent/app/chat.py", line 61, in invoke_workflow
for event in self.workflow.stream(dict_inputs, limit):
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/site-packages/langgraph/pregel/init.py", line 963, in stream
_panic_or_proceed(done, inflight, step)
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/site-packages/langgraph/pregel/init.py", line 1489, in _panic_or_proceed
raise exc
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/concurrent/futures/thread.py", line 58, in run
result = self.fn(self.args, self.kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/site-packages/langgraph/pregel/retry.py", line 66, in run_with_retry
task.proc.invoke(task.input, task.config)
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 2406, in invoke
input = step.invoke(input, config, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/Caskroom/miniconda/base/envs/agent_env/lib/python3.11/site-packages/langgraph/utils.py", line 95, in invoke
ret = context.run(self.func, input, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/tulpar/Projects/ODS/graph_websearch_agent/agent_graph/graph.py", line 47, in
How I can use the gpt-3 instead gpt 4 ?
Best
server = 'openai' model = 'gpt-3.5-turbo' model_endpoint = None