No operator found for memory_efficient_attention_forward with inputs:
query : shape=(40, 1024, 1, 64) (torch.float16)
key : shape=(40, 1024, 1, 64) (torch.float16)
value : shape=(40, 1024, 1, 64) (torch.float16)
attn_bias :
p : 0.0
decoderF is not supported because:
xFormers wasn't build with CUDA support
attn_bias type is
operator wasn't built - see python -m xformers.info for more info
flshattF@0.0.0 is not supported because:
xFormers wasn't build with CUDA support
operator wasn't built - see python -m xformers.info for more info
tritonflashattF is not supported because:
xFormers wasn't build with CUDA support
operator wasn't built - see python -m xformers.info for more info
triton is not available
cutlassF is not supported because:
xFormers wasn't build with CUDA support
operator wasn't built - see python -m xformers.info for more info
smallkF is not supported because:
max(query.shape[-1] != value.shape[-1]) > 32
xFormers wasn't build with CUDA support
dtype=torch.float16 (supported: {torch.float32})
operator wasn't built - see python -m xformers.info for more info
unsupported embed per head: 64
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\execution.py", line 151, in recursive_execute
output_data, output_ui = get_output_data(obj, input_data_all)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\execution.py", line 81, in get_output_data
return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\execution.py", line 74, in map_node_over_list
results.append(getattr(obj, func)(slice_dict(input_data_all, i)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\nodes_v2.py", line 494, in sample
_samples = self.sampler(denoiser, noised_z, cond=positive[i], uc=negative[i], x_center=sample.unsqueeze(0), control_scale=control_scale_end,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\diffusionmodules\sampling.py", line 441, in call
x = self.sampler_step(
^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\diffusionmodules\sampling.py", line 418, in sampler_step
denoised = self.denoise(x, denoiser, sigma_hat, cond, uc, control_scale=control_scale)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\diffusionmodules\sampling.py", line 400, in denoise
denoised = denoiser(self.guider.prepare_inputs(x, sigma, cond, uc), control_scale)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\nodes_v2.py", line 468, in
denoiser = lambda input, sigma, c, control_scale: SUPIR_model.denoiser(SUPIR_model.model, input, sigma, c, control_scale)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\diffusionmodules\denoiser.py", line 73, in call
return network(input c_in, c_noise, cond, control_scale) c_out + input c_skip
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl
return self._call_impl(*args, *kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl
return forward_call(args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\diffusionmodules\wrappers.py", line 96, in forward
out = self.diffusion_model(
^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl
return self._call_impl(*args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl
return forward_call(*args, *kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\SUPIR\modules\SUPIR_v0.py", line 654, in forward
h = self.project_modules[adapter_idx](control[control_idx], h, control_scale=control_scale)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl
return self._call_impl(args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl
return forward_call(*args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\SUPIR\modules\SUPIR_v0.py", line 147, in forward
x = self.attn(x, context)
^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl
return self._call_impl(*args, *kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl
return forward_call(args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\attention.py", line 365, in forward
out = xformers.ops.memory_efficient_attention(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\xformers\ops\fmha__init.py", line 223, in memory_efficient_attention
return _memory_efficient_attention(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\xformers\ops\fmha\init.py", line 321, in _memory_efficient_attention
return _memory_efficient_attention_forward(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\xformers\ops\fmha\init__.py", line 337, in _memory_efficient_attention_forward
op = _dispatch_fw(inp, False)
^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\xformers\ops\fmha\dispatch.py", line 120, in _dispatch_fw
return _run_priority_list(
^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\xformers\ops\fmha\dispatch.py", line 63, in _run_priority_list
raise NotImplementedError(msg)
Error occurred when executing SUPIR_sample:
No operator found for
memory_efficient_attention_forward
with inputs: query : shape=(40, 1024, 1, 64) (torch.float16) key : shape=(40, 1024, 1, 64) (torch.float16) value : shape=(40, 1024, 1, 64) (torch.float16) attn_bias : p : 0.0decoderF
is not supported because: xFormers wasn't build with CUDA support attn_bias type is operator wasn't built - seepython -m xformers.info
for more infoflshattF@0.0.0
is not supported because: xFormers wasn't build with CUDA support operator wasn't built - seepython -m xformers.info
for more infotritonflashattF
is not supported because: xFormers wasn't build with CUDA support operator wasn't built - seepython -m xformers.info
for more info triton is not availablecutlassF
is not supported because: xFormers wasn't build with CUDA support operator wasn't built - seepython -m xformers.info
for more infosmallkF
is not supported because: max(query.shape[-1] != value.shape[-1]) > 32 xFormers wasn't build with CUDA support dtype=torch.float16 (supported: {torch.float32}) operator wasn't built - seepython -m xformers.info
for more info unsupported embed per head: 64File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\execution.py", line 151, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\execution.py", line 81, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\execution.py", line 74, in map_node_over_list results.append(getattr(obj, func)(slice_dict(input_data_all, i))) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\nodes_v2.py", line 494, in sample _samples = self.sampler(denoiser, noised_z, cond=positive[i], uc=negative[i], x_center=sample.unsqueeze(0), control_scale=control_scale_end, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\diffusionmodules\sampling.py", line 441, in call x = self.sampler_step( ^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\diffusionmodules\sampling.py", line 418, in sampler_step denoised = self.denoise(x, denoiser, sigma_hat, cond, uc, control_scale=control_scale) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\diffusionmodules\sampling.py", line 400, in denoise denoised = denoiser(self.guider.prepare_inputs(x, sigma, cond, uc), control_scale) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\nodes_v2.py", line 468, in denoiser = lambda input, sigma, c, control_scale: SUPIR_model.denoiser(SUPIR_model.model, input, sigma, c, control_scale) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\diffusionmodules\denoiser.py", line 73, in call return network(input c_in, c_noise, cond, control_scale) c_out + input c_skip ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl return self._call_impl(*args, *kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl return forward_call(args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\diffusionmodules\wrappers.py", line 96, in forward out = self.diffusion_model( ^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl return self._call_impl(*args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl return forward_call(*args, *kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\SUPIR\modules\SUPIR_v0.py", line 654, in forward h = self.project_modules[adapter_idx](control[control_idx], h, control_scale=control_scale) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl return self._call_impl(args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl return forward_call(*args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\SUPIR\modules\SUPIR_v0.py", line 147, in forward x = self.attn(x, context) ^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl return self._call_impl(*args, *kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl return forward_call(args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-SUPIR\sgm\modules\attention.py", line 365, in forward out = xformers.ops.memory_efficient_attention( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\xformers\ops\fmha__init.py", line 223, in memory_efficient_attention return _memory_efficient_attention( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\xformers\ops\fmha\init.py", line 321, in _memory_efficient_attention return _memory_efficient_attention_forward( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\xformers\ops\fmha\init__.py", line 337, in _memory_efficient_attention_forward op = _dispatch_fw(inp, False) ^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\xformers\ops\fmha\dispatch.py", line 120, in _dispatch_fw return _run_priority_list( ^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\xformers\ops\fmha\dispatch.py", line 63, in _run_priority_list raise NotImplementedError(msg)