Closed magicwang1111 closed 4 days ago
I think I had this too with sageattention 1.0.5, reverting back to 1.0.3 made it work again.
I think I had this too with sageattention 1.0.5, reverting back to 1.0.3 made it work again.
thx,this error is fixed.you can add it in requirements.
noise_pred = self.transformer( ^^^^^^^^^^^^^^^^^ File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/torch/nn/modules/module.py", line 1532, in _wrapped_call_impl return self._call_impl(*args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/torch/nn/modules/module.py", line 1541, in _call_impl return forward_call(*args, *kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/data/wangxi/ComfyUI/custom_nodes/ComfyUI-CogVideoXWrapper/custom_cogvideox_transformer_3d.py", line 685, in forward hidden_states, encoder_hidden_states = block( ^^^^^^ File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/torch/nn/modules/module.py", line 1532, in _wrapped_call_impl return self._call_impl(args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/torch/nn/modules/module.py", line 1541, in _call_impl return forward_call(*args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/data/wangxi/ComfyUI/custom_nodes/ComfyUI-CogVideoXWrapper/custom_cogvideox_transformer_3d.py", line 282, in forward attn_hidden_states, attn_encoder_hidden_states = self.attn1( ^^^^^^^^^^^ File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/torch/nn/modules/module.py", line 1532, in _wrapped_call_impl return self._call_impl(*args, *kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/torch/nn/modules/module.py", line 1541, in _call_impl return forward_call(args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/diffusers/models/attention_processor.py", line 495, in forward return self.processor( ^^^^^^^^^^^^^^^ File "/data/wangxi/ComfyUI/custom_nodes/ComfyUI-CogVideoXWrapper/custom_cogvideox_transformer_3d.py", line 129, in call hidden_states = sageattn_func(query, key, value, attn_mask=attention_mask, dropout_p=0.0,is_causal=False) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/torch/_dynamo/eval_frame.py", line 451, in _fn return fn(*args, kwargs) ^^^^^^^^^^^^^^^^^^^ File "/data/wangxi/ComfyUI/custom_nodes/ComfyUI-CogVideoXWrapper/custom_cogvideox_transformer_3d.py", line 50, in sageattn_func return sageattn(query, key, value, attn_mask=attn_mask, dropout_p=dropout_p,is_causal=is_causal) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/sageattention/core.py", line 106, in sageattn o = attn_true(q_int8, k_int8, v, q_scale, k_scale, tensor_layout=tensor_layout, output_dtype=dtype) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/sageattention/attn_qk_int8_per_block.py", line 113, in forward _attn_fwd[grid]( File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/triton/runtime/jit.py", line 167, in
return lambda *args, *kwargs: self.run(grid=grid, warmup=False, args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/triton/runtime/jit.py", line 416, in run
self.cache[device][key] = compile(
^^^^^^^^
File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/triton/compiler/compiler.py", line 191, in compile
module = src.make_ir(options)
^^^^^^^^^^^^^^^^^^^^
File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/triton/compiler/compiler.py", line 117, in make_ir
return ast_to_ttir(self.fn, self, options=options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/data/wangxi/miniconda3/envs/comfyuimain/lib/python3.11/site-packages/triton/compiler/code_generator.py", line 1231, in ast_to_ttir
raise CompilationError(fn.src, node, repr(e)) from e
triton.compiler.errors.CompilationError: at 39:55: O_block_ptr = Out + (off_z stride_oz + off_h stride_oh) + offs_m[:, None] * stride_on + offs_k[None, :]
AssertionError('First input (fp16) and second input (bf16) must have the same dtype!')
commit 6f9e4ff6477d51ef29e2f7eea9ff2bbd6986b007 (HEAD -> 1.5_test, origin/1.5_test) Author: kijai 40791699+kijai@users.noreply.github.com Date: Sun Nov 17 22:23:40 2024 +0200
commit e70da23ac2b4724624537e503b0cdaf93d24a74e Author: kijai <40791699+kijai@users.noreply.