[AllTalk Server] Warning Audio generation failed. Status: 'utf-8' codec can't decode bytes in position 0-1: unexpected end of data
Traceback (most recent call last):
File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\queueing.py", line 407, in call_prediction
output = await route_utils.call_process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\route_utils.py", line 226, in call_process_api
output = await app.get_blocks().process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\blocks.py", line 1550, in process_api
result = await self.call_function(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\blocks.py", line 1199, in call_function
prediction = await utils.async_iteration(iterator)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\utils.py", line 519, in async_iteration
return await iterator.anext()
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\utils.py", line 512, in anext
return await anyio.to_thread.run_sync(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\anyio\to_thread.py", line 33, in run_sync
return await get_asynclib().run_sync_in_worker_thread(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\anyio_backends_asyncio.py", line 877, in run_sync_in_worker_thread
return await future
^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\anyio_backends_asyncio.py", line 807, in run
result = context.run(func, args)
^^^^^^^^^^^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\utils.py", line 495, in run_sync_iterator_async
return next(iterator)
^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\utils.py", line 649, in gen_wrapper
yield from f(args, **kwargs)
File "S:\LLM\text-generation-webui\modules\chat.py", line 406, in generate_chat_reply_wrapper
yield chat_html_wrapper(history, state['name1'], state['name2'], state['mode'], state['chat_style'], state['character_menu']), history
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\modules\html_generator.py", line 309, in chat_html_wrapper
if mode == 'instruct':
^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\modules\html_generator.py", line 233, in generate_cai_chat_html
We use ?character and ?time.time() to force the browser to reset caches
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\modules\html_generator.py", line 233, in
We use ?character and ?time.time() to force the browser to reset caches
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "S:\LLM\text-generation-webui\modules\html_generator.py", line 53, in convert_to_markdown
File "S:\LLM\text-generation-webui\installer_files\env\Lib\re__init__.py", line 185, in sub
return _compile(pattern, flags).sub(repl, string, count)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: expected string or bytes-like object, got 'NoneType'
[AllTalk Server] Warning Audio generation failed. Status: 'utf-8' codec can't decode bytes in position 0-1: unexpected end of data Traceback (most recent call last): File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\queueing.py", line 407, in call_prediction output = await route_utils.call_process_api( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\route_utils.py", line 226, in call_process_api output = await app.get_blocks().process_api( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\blocks.py", line 1550, in process_api result = await self.call_function( ^^^^^^^^^^^^^^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\blocks.py", line 1199, in call_function prediction = await utils.async_iteration(iterator) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\utils.py", line 519, in async_iteration return await iterator.anext() ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\utils.py", line 512, in anext return await anyio.to_thread.run_sync( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\anyio\to_thread.py", line 33, in run_sync return await get_asynclib().run_sync_in_worker_thread( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\anyio_backends_asyncio.py", line 877, in run_sync_in_worker_thread return await future ^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\anyio_backends_asyncio.py", line 807, in run result = context.run(func, args) ^^^^^^^^^^^^^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\utils.py", line 495, in run_sync_iterator_async return next(iterator) ^^^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\installer_files\env\Lib\site-packages\gradio\utils.py", line 649, in gen_wrapper yield from f(args, **kwargs) File "S:\LLM\text-generation-webui\modules\chat.py", line 406, in generate_chat_reply_wrapper yield chat_html_wrapper(history, state['name1'], state['name2'], state['mode'], state['chat_style'], state['character_menu']), history ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\modules\html_generator.py", line 309, in chat_html_wrapper if mode == 'instruct': ^^^^^^^^^^^^ File "S:\LLM\text-generation-webui\modules\html_generator.py", line 233, in generate_cai_chat_html
We use ?character and ?time.time() to force the browser to reset caches
File "S:\LLM\text-generation-webui\modules\html_generator.py", line 233, in
We use ?character and ?time.time() to force the browser to reset caches
File "S:\LLM\text-generation-webui\modules\html_generator.py", line 53, in convert_to_markdown File "S:\LLM\text-generation-webui\installer_files\env\Lib\re__init__.py", line 185, in sub return _compile(pattern, flags).sub(repl, string, count) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ TypeError: expected string or bytes-like object, got 'NoneType'