cubiq / ComfyUI_IPAdapter_plus

GNU General Public License v3.0
3.14k stars 237 forks source link

Error occurred when executing KSampler Please help me ! #520

Closed binbrain1990 closed 1 month ago

binbrain1990 commented 2 months ago

Error occurred when executing KSampler:

Query/Key/Value should either all have the same dtype, or (in the quantized case) Key/Value should have dtype torch.int32 query.dtype: torch.float16 key.dtype : torch.float32 value.dtype: torch.float32

File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\execution.py", line 151, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\execution.py", line 81, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\execution.py", line 74, in map_node_over_list results.append(getattr(obj, func)(slice_dict(input_data_all, i))) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\nodes.py", line 1344, in sample return common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\nodes.py", line 1314, in common_ksampler samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 9, in informative_sample return original_sample(*args, *kwargs) # This code helps interpret error messages that occur within exceptions but does not have any impact on other operations. File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\custom_nodes\ComfyUI-AnimateDiff-Evolved\animatediff\sampling.py", line 279, in motion_sample return orig_comfy_sample(model, noise, args, kwargs) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\custom_nodes\ComfyUI-Advanced-ControlNet\adv_control\control_reference.py", line 47, in refcn_sample return orig_comfy_sample(model, args, kwargs) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\sample.py", line 37, in sample samples = sampler.sample(noise, positive, negative, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=sigmas, callback=callback, disable_pbar=disable_pbar, seed=seed) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\samplers.py", line 755, in sample return sample(self.model, noise, positive, negative, cfg, self.device, sampler, sigmas, self.model_options, latent_image=latent_image, denoise_mask=denoise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\samplers.py", line 657, in sample return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\samplers.py", line 644, in sample output = self.inner_sample(noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\samplers.py", line 623, in inner_sample samples = sampler.sample(self, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\samplers.py", line 534, in sample samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, self.extra_options) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\python\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context return func(args, kwargs) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\k_diffusion\sampling.py", line 737, in sample_ddpm return generic_step_sampler(model, x, sigmas, extra_args, callback, disable, noise_sampler, DDPMSampler_step) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\k_diffusion\sampling.py", line 726, in generic_step_sampler denoised = model(x, sigmas[i] * s_in, *extra_args) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\samplers.py", line 272, in call out = self.inner_model(x, sigma, model_options=model_options, seed=seed) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\samplers.py", line 610, in call return self.predict_noise(args, kwargs) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\samplers.py", line 613, in predict_noise return sampling_function(self.inner_model, x, timestep, self.conds.get("negative", None), self.conds.get("positive", None), self.cfg, model_options=model_options, seed=seed) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\samplers.py", line 258, in sampling_function out = calc_cond_batch(model, conds, x, timestep, model_options) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\samplers.py", line 218, in calc_cond_batch output = model.apply_model(inputx, timestep, c).chunk(batch_chunks) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\model_base.py", line 97, in apply_model model_output = self.diffusion_model(xc, t, context=context, control=control, transformer_options=transformer_options, extra_conds).float() File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl return self._call_impl(*args, kwargs) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl return forward_call(*args, *kwargs) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\custom_nodes\FreeU_Advanced\nodes.py", line 176, in tempforward h = forward_timestep_embed(module, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 44, in forward_timestep_embed x = layer(x, context, transformer_options) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl return self._call_impl(args, kwargs) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl return forward_call(*args, kwargs) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\ldm\modules\attention.py", line 633, in forward x = block(x, context=context[i], transformer_options=transformer_options) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl return self._call_impl(*args, *kwargs) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl return forward_call(args, kwargs) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\ldm\modules\attention.py", line 460, in forward return checkpoint(self._forward, (x, context, transformer_options), self.parameters(), self.checkpoint) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\ldm\modules\diffusionmodules\util.py", line 191, in checkpoint return func(*inputs) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\ldm\modules\attention.py", line 557, in _forward n = attn2_replace_patch[block_attn2](n, context_attn2, value_attn2, extra_options) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\custom_nodes\ComfyUI_IPAdapter_plus\CrossAttentionPatch.py", line 161, in call out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\comfy\ldm\modules\attention.py", line 327, in attention_xformers out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=mask) File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\python\lib\site-packages\xformers\ops\fmha__init.py", line 223, in memory_efficient_attention return _memory_efficient_attention( File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\python\lib\site-packages\xformers\ops\fmha__init__.py", line 321, in _memory_efficient_attention return _memory_efficient_attention_forward( File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\python\lib\site-packages\xformers\ops\fmha\init__.py", line 334, in _memory_efficient_attention_forward inp.validate_inputs() File "F:\ComfyUI-aki-v1.3\ComfyUI-aki-v1.3\python\lib\site-packages\xformers\ops\fmha\common.py", line 121, in validate_inputs raise ValueError(

cubiq commented 2 months ago

try to run comfy in --force-fp16 mode

LQPso commented 1 month ago

try to run comfy in --force-fp16 mode

it works, thanks bro

silence-1 commented 1 week ago

try to run comfy in --force-fp16 mode

it works, thanks bro

may I ask how to run --force-fp16 mode? I also have the problem.When I use ipadapter,Ksampler will rise the alert

just add --force-fp16 in run_nvidia_gpu.bat did not work image

LQPso commented 6 days ago

try to run comfy in --force-fp16 mode

it works, thanks bro

may I ask how to run --force-fp16 mode? I also have the problem.When I use ipadapter,Ksampler will rise the alert

just add --force-fp16 in run_nvidia_gpu.bat did not work image

sorry,i forget this, but tyr to check your model :xl model can't use with 1.5 ipa model, a lot problems from this situation