File "E:\autogpt\my_folder\lib\runpy.py", line 196, in _run_module_as_main
return _run_code(code, main_globals, None,
File "E:\autogpt\my_folder\lib\runpy.py", line 86, in _run_code
exec(code, run_globals)
File "E:\autogpt\Auto-GPT\autogpt__main.py", line 572, in
main()
File "E:\autogpt\Auto-GPT\autogpt\main.py", line 396, in main
agent.start_interaction_loop()
File "E:\autogpt\Auto-GPT\autogpt\main__.py", line 448, in start_interaction_loop
assistant_reply = chat.chat_with_ai(
File "E:\autogpt\Auto-GPT\autogpt\chat.py", line 95, in chat_with_ai
) = generate_context(prompt, relevant_memory, full_message_history, model)
File "E:\autogpt\Auto-GPT\autogpt\chat.py", line 43, in generate_context
current_tokens_used = token_counter.count_message_tokens(current_context, model)
File "E:\autogpt\Auto-GPT\autogpt\token_counter.py", line 24, in count_message_tokens
encoding = tiktoken.encoding_for_model(model)
File "E:\autogpt\my_folder\lib\site-packages\tiktoken\model.py", line 75, in encoding_for_model
return get_encoding(encoding_name)
File "E:\autogpt\my_folder\lib\site-packages\tiktoken\registry.py", line 63, in get_encoding
enc = Encoding(**constructor())
File "E:\autogpt\my_folder\lib\site-packages\tiktoken_ext\openai_public.py", line 64, in cl100k_base
mergeable_ranks = load_tiktoken_bpe(
File "E:\autogpt\my_folder\lib\site-packages\tiktoken\load.py", line 115, in load_tiktoken_bpe
return {
File "E:\autogpt\my_folder\lib\site-packages\tiktoken\load.py", line 115, in
return {
ValueError: not enough values to unpack (expected 2, got 1)
更新了tiktoken但是问题还没有解决
File "E:\autogpt\my_folder\lib\runpy.py", line 196, in _run_module_as_main return _run_code(code, main_globals, None, File "E:\autogpt\my_folder\lib\runpy.py", line 86, in _run_code exec(code, run_globals) File "E:\autogpt\Auto-GPT\autogpt__main.py", line 572, in
main()
File "E:\autogpt\Auto-GPT\autogpt\ main.py", line 396, in main
agent.start_interaction_loop()
File "E:\autogpt\Auto-GPT\autogpt\main__.py", line 448, in start_interaction_loop
assistant_reply = chat.chat_with_ai(
File "E:\autogpt\Auto-GPT\autogpt\chat.py", line 95, in chat_with_ai
) = generate_context(prompt, relevant_memory, full_message_history, model)
File "E:\autogpt\Auto-GPT\autogpt\chat.py", line 43, in generate_context
current_tokens_used = token_counter.count_message_tokens(current_context, model)
File "E:\autogpt\Auto-GPT\autogpt\token_counter.py", line 24, in count_message_tokens
encoding = tiktoken.encoding_for_model(model)
File "E:\autogpt\my_folder\lib\site-packages\tiktoken\model.py", line 75, in encoding_for_model
return get_encoding(encoding_name)
File "E:\autogpt\my_folder\lib\site-packages\tiktoken\registry.py", line 63, in get_encoding
enc = Encoding(**constructor())
File "E:\autogpt\my_folder\lib\site-packages\tiktoken_ext\openai_public.py", line 64, in cl100k_base
mergeable_ranks = load_tiktoken_bpe(
File "E:\autogpt\my_folder\lib\site-packages\tiktoken\load.py", line 115, in load_tiktoken_bpe
return {
File "E:\autogpt\my_folder\lib\site-packages\tiktoken\load.py", line 115, in
return {
ValueError: not enough values to unpack (expected 2, got 1)
更新了tiktoken但是问题还没有解决