No operator found for memory_efficient_attention_forward with inputs:
query : shape=(1, 577, 16, 64) (torch.float16)
key : shape=(1, 577, 16, 64) (torch.float16)
value : shape=(1, 577, 16, 64) (torch.float16)
attn_bias :
p : 0.0
decoderF is not supported because:
xFormers wasn't build with CUDA support
attn_bias type is
operator wasn't built - see python -m xformers.info for more info
flshattF@0.0.0 is not supported because:
xFormers wasn't build with CUDA support
operator wasn't built - see python -m xformers.info for more info
cutlassF is not supported because:
xFormers wasn't build with CUDA support
operator wasn't built - see python -m xformers.info for more info
smallkF is not supported because:
max(query.shape[-1] != value.shape[-1]) > 32
xFormers wasn't build with CUDA support
dtype=torch.float16 (supported: {torch.float32})
has custom scale
operator wasn't built - see python -m xformers.info for more info
unsupported embed per head: 64
File "D:\Desktop\comfyui\ComfyUI\execution.py", line 151, in recursive_execute
output_data, output_ui = get_output_data(obj, input_data_all)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI\execution.py", line 81, in get_output_data
return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI\custom_nodes\ComfyUI-0246\utils.py", line 381, in new_func
res_value = old_func(final_args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI\execution.py", line 74, in map_node_over_list
results.append(getattr(obj, func)(slice_dict(input_data_all, i)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI\custom_nodes\PuLID_ComfyUI\pulid.py", line 379, in apply_pulid
id_cond_vit, id_vit_hidden = eva_clip(face_features_image, return_all_features=False, return_hidden=True, shuffle=False)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl
return self._call_impl(args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl
return forward_call(*args, *kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI\custom_nodes\PuLID_ComfyUI\eva_clip\eva_vit_model.py", line 544, in forward
x, hidden_states = self.forward_features(x, return_all_features, return_hidden, shuffle)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI\custom_nodes\PuLID_ComfyUI\eva_clip\eva_vit_model.py", line 531, in forward_features
x = blk(x, rel_pos_bias=rel_pos_bias)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl
return self._call_impl(args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl
return forward_call(*args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI\custom_nodes\PuLID_ComfyUI\eva_clip\eva_vit_model.py", line 293, in forward
x = x + self.drop_path(self.attn(self.norm1(x), rel_pos_bias=rel_pos_bias, attn_mask=attn_mask))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl
return self._call_impl(*args, *kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl
return forward_call(args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI\custom_nodes\PuLID_ComfyUI\eva_clip\eva_vit_model.py", line 208, in forward
x = xops.memory_efficient_attention(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\xformers\ops\fmha__init.py", line 268, in memory_efficient_attention
return _memory_efficient_attention(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\xformers\ops\fmha__init__.py", line 387, in _memory_efficient_attention
return _memory_efficient_attention_forward(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\xformers\ops\fmha\init__.py", line 403, in _memory_efficient_attention_forward
op = _dispatch_fw(inp, False)
^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\xformers\ops\fmha\dispatch.py", line 125, in _dispatch_fw
return _run_priority_list(
^^^^^^^^^^^^^^^^^^^
File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\xformers\ops\fmha\dispatch.py", line 65, in _run_priority_list
raise NotImplementedError(msg)
Error occurred when executing ApplyPulid:
No operator found for
memory_efficient_attention_forward
with inputs: query : shape=(1, 577, 16, 64) (torch.float16) key : shape=(1, 577, 16, 64) (torch.float16) value : shape=(1, 577, 16, 64) (torch.float16) attn_bias : p : 0.0decoderF
is not supported because: xFormers wasn't build with CUDA support attn_bias type is operator wasn't built - seepython -m xformers.info
for more infoflshattF@0.0.0
is not supported because: xFormers wasn't build with CUDA support operator wasn't built - seepython -m xformers.info
for more infocutlassF
is not supported because: xFormers wasn't build with CUDA support operator wasn't built - seepython -m xformers.info
for more infosmallkF
is not supported because: max(query.shape[-1] != value.shape[-1]) > 32 xFormers wasn't build with CUDA support dtype=torch.float16 (supported: {torch.float32}) has custom scale operator wasn't built - seepython -m xformers.info
for more info unsupported embed per head: 64File "D:\Desktop\comfyui\ComfyUI\execution.py", line 151, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI\execution.py", line 81, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI\custom_nodes\ComfyUI-0246\utils.py", line 381, in new_func res_value = old_func(final_args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI\execution.py", line 74, in map_node_over_list results.append(getattr(obj, func)(slice_dict(input_data_all, i))) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI\custom_nodes\PuLID_ComfyUI\pulid.py", line 379, in apply_pulid id_cond_vit, id_vit_hidden = eva_clip(face_features_image, return_all_features=False, return_hidden=True, shuffle=False) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl return self._call_impl(args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl return forward_call(*args, *kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI\custom_nodes\PuLID_ComfyUI\eva_clip\eva_vit_model.py", line 544, in forward x, hidden_states = self.forward_features(x, return_all_features, return_hidden, shuffle) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI\custom_nodes\PuLID_ComfyUI\eva_clip\eva_vit_model.py", line 531, in forward_features x = blk(x, rel_pos_bias=rel_pos_bias) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl return self._call_impl(args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl return forward_call(*args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI\custom_nodes\PuLID_ComfyUI\eva_clip\eva_vit_model.py", line 293, in forward x = x + self.drop_path(self.attn(self.norm1(x), rel_pos_bias=rel_pos_bias, attn_mask=attn_mask)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1532, in _wrapped_call_impl return self._call_impl(*args, *kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\torch\nn\modules\module.py", line 1541, in _call_impl return forward_call(args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI\custom_nodes\PuLID_ComfyUI\eva_clip\eva_vit_model.py", line 208, in forward x = xops.memory_efficient_attention( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\xformers\ops\fmha__init.py", line 268, in memory_efficient_attention return _memory_efficient_attention( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\xformers\ops\fmha__init__.py", line 387, in _memory_efficient_attention return _memory_efficient_attention_forward( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\xformers\ops\fmha\init__.py", line 403, in _memory_efficient_attention_forward op = _dispatch_fw(inp, False) ^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\xformers\ops\fmha\dispatch.py", line 125, in _dispatch_fw return _run_priority_list( ^^^^^^^^^^^^^^^^^^^ File "D:\Desktop\comfyui\ComfyUI.ext\Lib\site-packages\xformers\ops\fmha\dispatch.py", line 65, in _run_priority_list raise NotImplementedError(msg)