Traceback (most recent call last):
File "D:\AI\ComfyUI_windows_portable\ComfyUI\main.py", line 76, in <module>
import execution
File "D:\AI\ComfyUI_windows_portable\ComfyUI\execution.py", line 11, in <module>
import nodes
File "D:\AI\ComfyUI_windows_portable\ComfyUI\nodes.py", line 21, in <module>
import comfy.diffusers_load
File "D:\AI\ComfyUI_windows_portable\ComfyUI\comfy\diffusers_load.py", line 3, in <module>
import comfy.sd
File "D:\AI\ComfyUI_windows_portable\ComfyUI\comfy\sd.py", line 5, in <module>
from comfy import model_management
File "D:\AI\ComfyUI_windows_portable\ComfyUI\comfy\model_management.py", line 119, in <module>
total_vram = get_total_memory(get_torch_device()) / (1024 * 1024)
^^^^^^^^^^^^^^^^^^
File "D:\AI\ComfyUI_windows_portable\ComfyUI\comfy\model_management.py", line 88, in get_torch_device
return torch.device(torch.cuda.current_device())
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\cuda\__init__.py", line 787, in current_device
_lazy_init()
File "D:\AI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\cuda\__init__.py", line 293, in _lazy_init
raise AssertionError("Torch not compiled with CUDA enabled")
AssertionError: Torch not compiled with CUDA enabled
Now Cuda does not work for Comfyui anymore.