SYSTEM: Command list_files returned: ['..\..\..\..\AutoGPTLocal\Auto-GPT\autogpt\auto_gpt_workspace\source\my-document (2).pdf', '..\..\..\..\AutoGPTLocal\Auto-GPT\autogpt\auto_gpt_workspace\source\my-document (3).pdf']
Traceback (most recent call last):
File "", line 198, in _run_module_as_main
File "", line 88, in _run_code
File "C:\auto-gpt\Auto-GPT\autogpt__main.py", line 5, in
autogpt.cli.main()
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1130, in call
return self.main(*args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1055, in main
rv = self.invoke(ctx)
^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1635, in invoke
rv = super().invoke(ctx)
^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1404, in invoke
return ctx.invoke(self.callback, ctx.params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 760, in invoke
return callback(*args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\decorators.py", line 26, in new_func
return f(get_current_context(), *args, *kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\cli.py", line 96, in main
run_auto_gpt(
File "C:\auto-gpt\Auto-GPT\autogpt\main.py", line 197, in run_auto_gpt
agent.start_interaction_loop()
File "C:\auto-gpt\Auto-GPT\autogpt\agent\agent.py", line 130, in start_interaction_loop
assistant_reply = chat_with_ai(
^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\chat.py", line 112, in chat_with_ai
new_summary_message, trimmed_messages = agent.history.trim_messages(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\memory\message_history.py", line 79, in trim_messages
new_summary_message = self.update_running_summary(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\memory\message_history.py", line 194, in update_running_summary
self.summary = create_chat_completion(prompt)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils__init__.py", line 53, in metered_func
return func(args, kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils__init__.py", line 87, in _wrapped
return func(*args, *kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils__init__.py", line 235, in create_chat_completion
response = api_manager.create_chat_completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\api_manager.py", line 61, in create_chat_completion
response = openai.ChatCompletion.create(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_resources\chat_completion.py", line 25, in create
return super().create(args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_resources\abstract\engine_apiresource.py", line 153, in create
response, , api_key = requestor.request(
^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 226, in request
resp, got_stream = self._interpret_response(result, stream)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 619, in _interpret_response
self._interpret_response_line(
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 682, in _interpret_response_line
raise self.handle_error_response(
openai.error.InvalidRequestError: This model's maximum context length is 4097 tokens. However, your messages resulted in 4310 tokens. Please reduce the length of the messages.
SYSTEM: Command list_files returned: ['..\..\..\..\AutoGPTLocal\Auto-GPT\autogpt\auto_gpt_workspace\source\my-document (2).pdf', '..\..\..\..\AutoGPTLocal\Auto-GPT\autogpt\auto_gpt_workspace\source\my-document (3).pdf'] Traceback (most recent call last): File "", line 198, in _run_module_as_main
File "", line 88, in _run_code
File "C:\auto-gpt\Auto-GPT\autogpt__main.py", line 5, in
autogpt.cli.main()
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1130, in call
return self.main(*args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1055, in main
rv = self.invoke(ctx)
^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1635, in invoke
rv = super().invoke(ctx)
^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1404, in invoke
return ctx.invoke(self.callback, ctx.params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 760, in invoke
return callback(*args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\decorators.py", line 26, in new_func
return f(get_current_context(), *args, *kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\cli.py", line 96, in main
run_auto_gpt(
File "C:\auto-gpt\Auto-GPT\autogpt\main.py", line 197, in run_auto_gpt
agent.start_interaction_loop()
File "C:\auto-gpt\Auto-GPT\autogpt\agent\agent.py", line 130, in start_interaction_loop
assistant_reply = chat_with_ai(
^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\chat.py", line 112, in chat_with_ai
new_summary_message, trimmed_messages = agent.history.trim_messages(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\memory\message_history.py", line 79, in trim_messages
new_summary_message = self.update_running_summary(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\memory\message_history.py", line 194, in update_running_summary
self.summary = create_chat_completion(prompt)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils__init__.py", line 53, in metered_func
return func(args, kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils__init__.py", line 87, in _wrapped
return func(*args, *kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils__init__.py", line 235, in create_chat_completion
response = api_manager.create_chat_completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\api_manager.py", line 61, in create_chat_completion
response = openai.ChatCompletion.create(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_resources\chat_completion.py", line 25, in create
return super().create(args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_resources\abstract\engine_apiresource.py", line 153, in create
response, , api_key = requestor.request(
^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 226, in request
resp, got_stream = self._interpret_response(result, stream)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 619, in _interpret_response
self._interpret_response_line(
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 682, in _interpret_response_line
raise self.handle_error_response(
openai.error.InvalidRequestError: This model's maximum context length is 4097 tokens. However, your messages resulted in 4310 tokens. Please reduce the length of the messages.