Closed liulsg closed 6 months ago
Error occurred when executing OllamaVision:
llama runner process no longer running: 3221225477
File "G:\AI\ComfyUI_M\ComfyUI\execution.py", line 151, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\ComfyUI\execution.py", line 81, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\ComfyUI\execution.py", line 74, in map_node_over_list results.append(getattr(obj, func)(slice_dict(input_data_all, i))) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\ComfyUI\custom_nodes\comfyui-ollama\CompfyuiOllama.py", line 64, in ollama_vision response = client.generate(model=model, prompt=query, images=images_b64) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\python_miniconda\Lib\site-packages\ollama_client.py", line 126, in generate return self._request_stream( ^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\python_miniconda\Lib\site-packages\ollama_client.py", line 97, in _request_stream return self._stream(*args, *kwargs) if stream else self._request(args, kwargs).json() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\python_miniconda\Lib\site-packages\ollama_client.py", line 73, in _request raise ResponseError(e.response.text, e.response.status_code) from None
Hello, how do you solve it? I met the same question.
Error occurred when executing OllamaVision:
llama runner process no longer running: 3221225477
File "G:\AI\ComfyUI_M\ComfyUI\execution.py", line 151, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\ComfyUI\execution.py", line 81, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\ComfyUI\execution.py", line 74, in map_node_over_list results.append(getattr(obj, func)(slice_dict(input_data_all, i))) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\ComfyUI\custom_nodes\comfyui-ollama\CompfyuiOllama.py", line 64, in ollama_vision response = client.generate(model=model, prompt=query, images=images_b64) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\python_miniconda\Lib\site-packages\ollama_client.py", line 126, in generate return self._request_stream( ^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\python_miniconda\Lib\site-packages\ollama_client.py", line 97, in _request_stream return self._stream(*args, *kwargs) if stream else self._request(args, kwargs).json() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\AI\ComfyUI_M\python_miniconda\Lib\site-packages\ollama_client.py", line 73, in _request raise ResponseError(e.response.text, e.response.status_code) from None