启动项目后,第一个回答没有问题,第二个回答时候,发生报错,报错内容如下:“hi
Traceback (most recent call last):
File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\gradio\queueing.py", line 495, in call_prediction
output = await route_utils.call_process_api(
File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\gradio\route_utils.py", line 232, in call_process_api
output = await app.get_blocks().process_api(
File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\gradio\blocks.py", line 1561, in process_api
result = await self.call_function(
File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\gradio\blocks.py", line 1179, in call_function
prediction = await anyio.to_thread.run_sync(
File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\anyio\to_thread.py", line 56, in run_sync
return await get_async_backend().run_sync_in_worker_thread(
File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\anyio_backends_asyncio.py", line 2134, in run_sync_in_worker_thread
return await future
File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\anyio_backends_asyncio.py", line 851, in run
result = context.run(func, args)
File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\gradio\utils.py", line 678, in wrapper
response = f(args, *kwargs)
File "H:\agent\Chinese-LangChain\main.py", line 90, in predict
result = application.get_llm_answer(query=input, web_content=web_content) # 传入用户输入和网络内容
File "H:\agent\Chinese-LangChain\clc\langchain_application.py", line 85, in get_llm_answer
result = self.llm_service._call(prompt)
File "H:\agent\Chinese-LangChain\clc\gpt_service.py", line 41, in call
response, = self.model.chat(
File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context
return func(args, **kwargs)
File "C:\Users\Administrator.cache\huggingface\modules\transformers_modules\chatglm3-6B-32k\modeling_chatglm.py", line 1034, in chat
inputs = tokenizer.build_chat_input(query, history=history, role=role)
File "C:\Users\Administrator.cache\huggingface\modules\transformers_modules\chatglm3-6B-32k\tokenization_chatglm.py", line 189, in build_chat_input
content = item["content"]
TypeError: list indices must be integers or slices, not str”。
启动项目后,第一个回答没有问题,第二个回答时候,发生报错,报错内容如下:“hi Traceback (most recent call last): File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\gradio\queueing.py", line 495, in call_prediction output = await route_utils.call_process_api( File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\gradio\route_utils.py", line 232, in call_process_api output = await app.get_blocks().process_api( File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\gradio\blocks.py", line 1561, in process_api result = await self.call_function( File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\gradio\blocks.py", line 1179, in call_function prediction = await anyio.to_thread.run_sync( File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\anyio\to_thread.py", line 56, in run_sync return await get_async_backend().run_sync_in_worker_thread( File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\anyio_backends_asyncio.py", line 2134, in run_sync_in_worker_thread
return await future File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\anyio_backends_asyncio.py", line 851, in run result = context.run(func, args) File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\gradio\utils.py", line 678, in wrapper response = f(args, *kwargs) File "H:\agent\Chinese-LangChain\main.py", line 90, in predict result = application.get_llm_answer(query=input, web_content=web_content) # 传入用户输入和网络内容 File "H:\agent\Chinese-LangChain\clc\langchain_application.py", line 85, in get_llm_answer result = self.llm_service._call(prompt) File "H:\agent\Chinese-LangChain\clc\gpt_service.py", line 41, in call response, = self.model.chat( File "H:\anaconda3\envs\chineseLangChain\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context return func(args, **kwargs) File "C:\Users\Administrator.cache\huggingface\modules\transformers_modules\chatglm3-6B-32k\modeling_chatglm.py", line 1034, in chat
inputs = tokenizer.build_chat_input(query, history=history, role=role) File "C:\Users\Administrator.cache\huggingface\modules\transformers_modules\chatglm3-6B-32k\tokenization_chatglm.py", line 189, in build_chat_input content = item["content"] TypeError: list indices must be integers or slices, not str”。