问题描述 / Problem Description
使用langchain-chatchat master分支基于xinference chatglm3调用开启Agent能力,问答报错 unhashable type: 'slice',不开启Agent问答正常
复现问题的步骤 / Steps to Reproduce
chatchat -a
实际结果 / Actual Result
Langchain-chatchat侧报错日志
2024-06-27 15:12:01,050 httpx 3879649 INFO HTTP Request: POST http://127.0.0.1:9997/v1/chat/completions "HTTP/1.1 200 OK"
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/sse_starlette/sse.py", line 269, in call
await wrap(partial(self.listen_for_disconnect, receive))
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/sse_starlette/sse.py", line 258, in wrap
await func()
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/sse_starlette/sse.py", line 215, in listen_for_disconnect
message = await receive()
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/uvicorn/protocols/http/h11_impl.py", line 535, in receive
await self.message_event.wait()
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/asyncio/locks.py", line 214, in wait
await fut
asyncio.exceptions.CancelledError: Cancelled by cancel scope fffdcb470100
During handling of the above exception, another exception occurred:
Exception Group Traceback (most recent call last):
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/uvicorn/protocols/http/h11_impl.py", line 407, in run_asgi
| result = await app( # type: ignore[func-returns-value]
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/uvicorn/middleware/proxy_headers.py", line 69, in call
| return await self.app(scope, receive, send)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/fastapi/applications.py", line 1054, in call
| await super().call(scope, receive, send)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/applications.py", line 123, in call
| await self.middleware_stack(scope, receive, send)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/middleware/errors.py", line 186, in call
| raise exc
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/middleware/errors.py", line 164, in call
| await self.app(scope, receive, _send)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/middleware/cors.py", line 83, in call
| await self.app(scope, receive, send)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/middleware/exceptions.py", line 62, in call
| await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/routing.py", line 758, in call
| await self.middleware_stack(scope, receive, send)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/routing.py", line 778, in app
| await route.handle(scope, receive, send)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/routing.py", line 299, in handle
| await self.app(scope, receive, send)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/routing.py", line 79, in app
| await wrap_app_handling_exceptions(app, request)(scope, receive, send)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/starlette/routing.py", line 77, in app
| await response(scope, receive, send)
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/sse_starlette/sse.py", line 255, in call
| async with anyio.create_task_group() as task_group:
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 678, in aexit
| raise BaseExceptionGroup(
| exceptiongroup.ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
+-+---------------- 1 ----------------
| Traceback (most recent call last):
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/sse_starlette/sse.py", line 258, in wrap
| await func()
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/sse_starlette/sse.py", line 245, in stream_response
| async for data in self.body_iterator:
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/chatchat/server/api_server/openai_routes.py", line 84, in generator
| async for chunk in await method(params):
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 147, in aiter
| async for item in self._iterator:
| File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 174, in stream
| raise APIError(
| openai.APIError: An error occurred during streaming
+------------------------------------
2024-06-27 15:12:01,264 root 3879649 ERROR peer closed connection without sending complete message body (incomplete chunked read)
Traceback (most recent call last):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_transports/default.py", line 67, in map_httpcore_exceptions
yield
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_transports/default.py", line 252, in aiter
async for part in self._httpcore_stream:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpcore/_async/connection_pool.py", line 367, in aiter
raise exc from None
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpcore/_async/connection_pool.py", line 363, in aiter
async for part in self._stream:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpcore/_async/http11.py", line 349, in aiter
raise exc
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpcore/_async/http11.py", line 341, in aiter
async for chunk in self._connection._receive_response_body(kwargs):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpcore/_async/http11.py", line 210, in _receive_response_body
event = await self._receive_event(timeout=timeout)
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpcore/_async/http11.py", line 220, in _receive_event
with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/contextlib.py", line 153, in exit
self.gen.throw(typ, value, traceback)
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpcore/_exceptions.py", line 14, in map_exceptions
raise to_exc(exc) from exc
httpcore.RemoteProtocolError: peer closed connection without sending complete message body (incomplete chunked read)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/chatchat/server/utils.py", line 46, in wrap_done
await fn
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 2536, in ainvoke
input = await step.ainvoke(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/chains/base.py", line 212, in ainvoke
raise e
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/chains/base.py", line 203, in ainvoke
await self._acall(inputs, run_manager=run_manager)
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/agents/agent.py", line 1481, in _acall
next_step_output = await self._atake_next_step(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/agents/agent.py", line 1275, in _atake_next_step
[
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/agents/agent.py", line 1275, in
[
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/agents/agent.py", line 1303, in _aiter_next_step
output = await self.agent.aplan(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/agents/agent.py", line 436, in aplan
async for chunk in self.runnable.astream(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 2900, in astream
async for chunk in self.atransform(input_aiter(), config, kwargs):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 2883, in atransform
async for chunk in self._atransform_stream_with_config(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 1985, in _atransform_stream_with_config
chunk = cast(Output, await py_anext(iterator))
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 2853, in _atransform
async for output in final_pipeline:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 1316, in atransform
async for ichunk in input:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 4748, in atransform
async for item in self.bound.atransform(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 1334, in atransform
async for output in self.astream(final, config, kwargs):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/language_models/chat_models.py", line 319, in astream
raise e
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/language_models/chat_models.py", line 297, in astream
async for chunk in self._astream(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_openai/chat_models/base.py", line 485, in _astream
async for chunk in await self.async_client.create(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 147, in aiter
async for item in self._iterator:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 160, in stream
async for sse in iterator:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 151, in _iter_events
async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 302, in aiter_bytes
async for chunk in self._aiter_chunks(iterator):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 313, in _aiter_chunks
async for chunk in iterator:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_models.py", line 933, in aiter_bytes
async for raw_bytes in self.aiter_raw():
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_models.py", line 991, in aiter_raw
async for raw_stream_bytes in self.stream:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_client.py", line 147, in aiter
async for chunk in self._stream:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_transports/default.py", line 251, in aiter
with map_httpcore_exceptions():
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/contextlib.py", line 153, in exit
self.gen.throw(typ, value, traceback)
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_transports/default.py", line 84, in map_httpcore_exceptions
raise mapped_exc(message) from exc
httpx.RemoteProtocolError: peer closed connection without sending complete message body (incomplete chunked read)
xinference侧报错日志
2024-06-27 15:12:01,255 xinference.api.restful_api 3874224 ERROR Chat completion stream got an error: [address=0.0.0.0:36809, pid=3905684] unhashable type: 'slice'
Traceback (most recent call last):
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xinference/api/restful_api.py", line 1476, in stream_results
async for item in iterator:
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/api.py", line 340, in anext
return await self._actor_ref.xoscar_next__(self._uid)
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/backends/context.py", line 227, in send
return self._process_result_message(result)
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/backends/context.py", line 102, in _process_result_message
raise message.as_instanceof_cause()
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/backends/pool.py", line 659, in send
result = await self._run_coro(message.message_id, coro)
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/backends/pool.py", line 370, in _run_coro
return await coro
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/api.py", line 384, in on_receive
return await super().on_receive(message) # type: ignore
File "xoscar/core.pyx", line 558, in on_receive
raise ex
File "xoscar/core.pyx", line 520, in xoscar.core._BaseActor.__on_receive
async with self._lock:
File "xoscar/core.pyx", line 521, in xoscar.core._BaseActor.on_receive__
with debug_async_timeout('actor_lock_timeout',
File "xoscar/core.pyx", line 526, in xoscar.core._BaseActor.on_receive
result = await result
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/api.py", line 431, in __xoscar_next
raise e
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/api.py", line 417, in __xoscar_next__
r = await asyncio.to_thread(_wrapper, gen)
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/asyncio/threads.py", line 25, in to_thread
return await loop.run_in_executor(None, func_call)
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/concurrent/futures/thread.py", line 58, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/api.py", line 402, in _wrapper
return next(_gen)
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xinference/core/model.py", line 300, in _to_json_generator
for v in gen:
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xinference/model/llm/utils.py", line 544, in _to_chat_completion_chunks
for i, chunk in enumerate(chunks):
File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xinference/model/llm/pytorch/chatglm.py", line 177, in _stream_generator
chunk_text = chunk_text[last_chunk_text_length:]
TypeError: [address=0.0.0.0:36809, pid=3905684] unhashable type: 'slice'
问题描述 / Problem Description 使用langchain-chatchat master分支基于xinference chatglm3调用开启Agent能力,问答报错 unhashable type: 'slice',不开启Agent问答正常
复现问题的步骤 / Steps to Reproduce
实际结果 / Actual Result Langchain-chatchat侧报错日志 2024-06-27 15:12:01,050 httpx 3879649 INFO HTTP Request: POST http://127.0.0.1:9997/v1/chat/completions "HTTP/1.1 200 OK" ERROR: Exception in ASGI application Traceback (most recent call last): File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/sse_starlette/sse.py", line 269, in call await wrap(partial(self.listen_for_disconnect, receive)) File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/sse_starlette/sse.py", line 258, in wrap await func() File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/sse_starlette/sse.py", line 215, in listen_for_disconnect message = await receive() File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/uvicorn/protocols/http/h11_impl.py", line 535, in receive await self.message_event.wait() File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/asyncio/locks.py", line 214, in wait await fut asyncio.exceptions.CancelledError: Cancelled by cancel scope fffdcb470100
During handling of the above exception, another exception occurred:
The above exception was the direct cause of the following exception:
Traceback (most recent call last): File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/chatchat/server/utils.py", line 46, in wrap_done await fn File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 2536, in ainvoke input = await step.ainvoke( File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/chains/base.py", line 212, in ainvoke raise e File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/chains/base.py", line 203, in ainvoke await self._acall(inputs, run_manager=run_manager) File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/agents/agent.py", line 1481, in _acall next_step_output = await self._atake_next_step( File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/agents/agent.py", line 1275, in _atake_next_step [ File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/agents/agent.py", line 1275, in
[
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/agents/agent.py", line 1303, in _aiter_next_step
output = await self.agent.aplan(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain/agents/agent.py", line 436, in aplan
async for chunk in self.runnable.astream(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 2900, in astream
async for chunk in self.atransform(input_aiter(), config, kwargs):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 2883, in atransform
async for chunk in self._atransform_stream_with_config(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 1985, in _atransform_stream_with_config
chunk = cast(Output, await py_anext(iterator))
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 2853, in _atransform
async for output in final_pipeline:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 1316, in atransform
async for ichunk in input:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 4748, in atransform
async for item in self.bound.atransform(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 1334, in atransform
async for output in self.astream(final, config, kwargs):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/language_models/chat_models.py", line 319, in astream
raise e
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_core/language_models/chat_models.py", line 297, in astream
async for chunk in self._astream(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/langchain_openai/chat_models/base.py", line 485, in _astream
async for chunk in await self.async_client.create(
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 147, in aiter
async for item in self._iterator:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 160, in stream
async for sse in iterator:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 151, in _iter_events
async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 302, in aiter_bytes
async for chunk in self._aiter_chunks(iterator):
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/openai/_streaming.py", line 313, in _aiter_chunks
async for chunk in iterator:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_models.py", line 933, in aiter_bytes
async for raw_bytes in self.aiter_raw():
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_models.py", line 991, in aiter_raw
async for raw_stream_bytes in self.stream:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_client.py", line 147, in aiter
async for chunk in self._stream:
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_transports/default.py", line 251, in aiter
with map_httpcore_exceptions():
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/contextlib.py", line 153, in exit
self.gen.throw(typ, value, traceback)
File "/home/xx/miniconda3/envs/RAG_new/lib/python3.10/site-packages/httpx/_transports/default.py", line 84, in map_httpcore_exceptions
raise mapped_exc(message) from exc
httpx.RemoteProtocolError: peer closed connection without sending complete message body (incomplete chunked read)
xinference侧报错日志 2024-06-27 15:12:01,255 xinference.api.restful_api 3874224 ERROR Chat completion stream got an error: [address=0.0.0.0:36809, pid=3905684] unhashable type: 'slice' Traceback (most recent call last): File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xinference/api/restful_api.py", line 1476, in stream_results async for item in iterator: File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/api.py", line 340, in anext return await self._actor_ref.xoscar_next__(self._uid) File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/backends/context.py", line 227, in send return self._process_result_message(result) File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/backends/context.py", line 102, in _process_result_message raise message.as_instanceof_cause() File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/backends/pool.py", line 659, in send result = await self._run_coro(message.message_id, coro) File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/backends/pool.py", line 370, in _run_coro return await coro File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/api.py", line 384, in on_receive return await super().on_receive(message) # type: ignore File "xoscar/core.pyx", line 558, in on_receive raise ex File "xoscar/core.pyx", line 520, in xoscar.core._BaseActor.__on_receive async with self._lock: File "xoscar/core.pyx", line 521, in xoscar.core._BaseActor.on_receive__ with debug_async_timeout('actor_lock_timeout', File "xoscar/core.pyx", line 526, in xoscar.core._BaseActor.on_receive result = await result File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/api.py", line 431, in __xoscar_next raise e File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/api.py", line 417, in __xoscar_next__ r = await asyncio.to_thread(_wrapper, gen) File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/asyncio/threads.py", line 25, in to_thread return await loop.run_in_executor(None, func_call) File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/concurrent/futures/thread.py", line 58, in run result = self.fn(*self.args, **self.kwargs) File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xoscar/api.py", line 402, in _wrapper return next(_gen) File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xinference/core/model.py", line 300, in _to_json_generator for v in gen: File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xinference/model/llm/utils.py", line 544, in _to_chat_completion_chunks for i, chunk in enumerate(chunks): File "/home/xx/miniconda3/envs/LLM_x/lib/python3.10/site-packages/xinference/model/llm/pytorch/chatglm.py", line 177, in _stream_generator chunk_text = chunk_text[last_chunk_text_length:] TypeError: [address=0.0.0.0:36809, pid=3905684] unhashable type: 'slice'
环境信息 / Environment Information langchain-chatchat 0.3.0.20240625.1 transformers 4.40.2 torch 2.1.0 torch-npu 2.1.0.post3-20240523