When I use NEXTJS to do research,I get the following error:
Error in reading JSON, attempting to repair JSON
Error using json_repair: the JSON object must be str, bytes or bytearray, not NoneType
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "D:\python\gpt-researcher\gpt_researcher\master\actions.py", line 108, in choose_agent
response = await create_chat_completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\python\gpt-researcher\gpt_researcher\utils\llm.py", line 96, in create_chat_completion
response = await provider.get_chat_response(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\python\gpt-researcher\gpt_researcher\llm_provider\openai\openai.py", line 62, in get_chat_response
output = await self.llm.ainvoke(messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\langchain_core\language_models\chat_models.py", line 295, in ainvoke
llm_result = await self.agenerate_prompt(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\langchain_core\language_models\chat_models.py", line 724, in agenerate_prompt
return await self.agenerate(
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\langchain_core\language_models\chat_models.py", line 684, in agenerate
raise exceptions[0]
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\langchain_core\language_models\chat_models.py", line 883, in _agenerate_with_cache
result = await self._agenerate(
^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\langchain_openai\chat_models\base.py", line 666, in _agenerate
response = await self.async_client.create(messages=message_dicts, **params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai\resources\chat\completions.py", line 1283, in create
return await self._post(
^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1805, in post
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1503, in request
return await self._request(
^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1584, in _request
return await self._retry_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1630, in _retry_request
return await self._request(
^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1584, in _request
return await self._retry_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1630, in _retry_request
return await self._request(
^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1599, in _request
raise self._make_status_error_from_response(err.response) from None
openai.InternalServerError: <!DOCTYPE html>
When I use NEXTJS to do research,I get the following error:
Error in reading JSON, attempting to repair JSON Error using json_repair: the JSON object must be str, bytes or bytearray, not NoneType ERROR: Exception in ASGI application Traceback (most recent call last): File "D:\python\gpt-researcher\gpt_researcher\master\actions.py", line 108, in choose_agent response = await create_chat_completion( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\python\gpt-researcher\gpt_researcher\utils\llm.py", line 96, in create_chat_completion response = await provider.get_chat_response( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\python\gpt-researcher\gpt_researcher\llm_provider\openai\openai.py", line 62, in get_chat_response output = await self.llm.ainvoke(messages) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\langchain_core\language_models\chat_models.py", line 295, in ainvoke llm_result = await self.agenerate_prompt( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\langchain_core\language_models\chat_models.py", line 724, in agenerate_prompt return await self.agenerate( ^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\langchain_core\language_models\chat_models.py", line 684, in agenerate raise exceptions[0] File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\langchain_core\language_models\chat_models.py", line 883, in _agenerate_with_cache result = await self._agenerate( ^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\langchain_openai\chat_models\base.py", line 666, in _agenerate response = await self.async_client.create(messages=message_dicts, **params) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai\resources\chat\completions.py", line 1283, in create return await self._post( ^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1805, in post return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1503, in request return await self._request( ^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1584, in _request return await self._retry_request( ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1630, in _retry_request return await self._request( ^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1584, in _request return await self._retry_request( ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1630, in _retry_request return await self._request( ^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\openai_base_client.py", line 1599, in _request raise self._make_status_error_from_response(err.response) from None openai.InternalServerError: <!DOCTYPE html>
Bad gateway Error code 502
Browser
WorkingCloudflare
WorkingHost
ErrorWhat happened?
The web server reported a bad gateway error.
What can I do?
Please try again in a few minutes.
Cloudflare Ray ID: 8ae8b25a9cc163da • Your IP: 125.120.101.223 • Performance & security by Cloudflare
During handling of the above exception, another exception occurred:
Traceback (most recent call last): File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\uvicorn\protocols\websockets\websockets_impl.py", line 244, in run_asgi result = await self.app(self.scope, self.asgi_receive, self.asgi_send) # type: ignore[func-returns-value] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\uvicorn\middleware\proxy_headers.py", line 70, in call return await self.app(scope, receive, send) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\fastapi\applications.py", line 1054, in call await super().call(scope, receive, send) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette\applications.py", line 123, in call await self.middleware_stack(scope, receive, send) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette\middleware\errors.py", line 151, in call await self.app(scope, receive, send) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette\middleware\cors.py", line 77, in call await self.app(scope, receive, send) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette\middleware\exceptions.py", line 65, in call await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette_exception_handler.py", line 64, in wrapped_app raise exc File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette_exception_handler.py", line 53, in wrapped_app await app(scope, receive, sender) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette\routing.py", line 756, in call await self.middleware_stack(scope, receive, send) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette\routing.py", line 776, in app await route.handle(scope, receive, send) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette\routing.py", line 373, in handle await self.app(scope, receive, send) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette\routing.py", line 96, in app await wrap_app_handling_exceptions(app, session)(scope, receive, send) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette_exception_handler.py", line 64, in wrapped_app raise exc File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette_exception_handler.py", line 53, in wrapped_app await app(scope, receive, sender) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\starlette\routing.py", line 94, in app await func(session) File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\site-packages\fastapi\routing.py", line 348, in app await dependant.call(*values) File "D:\python\gpt-researcher\backend\server.py", line 89, in websocket_endpoint report = await manager.start_streaming( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\python\gpt-researcher\backend\websocket_manager.py", line 60, in start_streaming report = await run_agent(task, report_type, report_source, source_urls, tone, websocket, headers) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\python\gpt-researcher\backend\websocket_manager.py", line 97, in run_agent report = await researcher.run() ^^^^^^^^^^^^^^^^^^^^^^ File "D:\python\gpt-researcher\backend\report_type\basic_report\basic_report.py", line 41, in run await researcher.conduct_research() File "D:\python\gpt-researcher\gpt_researcher\master\agent.py", line 112, in conduct_research self.agent, self.role = await choose_agent( ^^^^^^^^^^^^^^^^^^^ File "D:\python\gpt-researcher\gpt_researcher\master\actions.py", line 126, in choose_agent return await handle_json_error(response) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\python\gpt-researcher\gpt_researcher\master\actions.py", line 137, in handle_json_error json_string = extract_json_with_regex(response) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\python\gpt-researcher\gpt_researcher\master\actions.py", line 153, in extract_json_with_regex json_match = re.search(r"{.?}", response, re.DOTALL) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\LiDK\anaconda3\envs\gpt_researcher\Lib\re__init__.py", line 176, in search return _compile(pattern, flags).search(string) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ TypeError: expected string or bytes-like object, got 'NoneType' INFO: connection closed