TencentQQGYLab / ComfyUI-ELLA

ELLA nodes for ComfyUI
298 stars 13 forks source link

Error when trying to replicate example workflow. Might be related to VRAM optimizations #10

Closed FNSpd closed 2 months ago

FNSpd commented 2 months ago

Error occurred when executing KSampler:

Expected all tensors to be on the same device, but found at least two devices, cpu and cuda:0! (when checking argument for argument mat2 in method wrapper_CUDA_mm)

FNSpd commented 2 months ago

File "D:\ComfyUI\execution.py", line 151, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) File "D:\ComfyUI\execution.py", line 81, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) File "D:\ComfyUI\execution.py", line 74, in map_node_over_list results.append(getattr(obj, func)(slice_dict(input_data_all, i))) File "D:\ComfyUI\nodes.py", line 1344, in sample return common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise) File "D:\ComfyUI\nodes.py", line 1314, in common_ksampler samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, File "D:\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 22, in informative_sample raise e File "D:\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 9, in informative_sample return original_sample(*args, *kwargs) # This code helps interpret error messages that occur within exceptions but does not have any impact on other operations. File "D:\ComfyUI\custom_nodes\ComfyUI-Advanced-ControlNet\adv_control\control_reference.py", line 47, in refcn_sample return orig_comfy_sample(model, args, kwargs) File "D:\ComfyUI\comfy\sample.py", line 37, in sample samples = sampler.sample(noise, positive, negative, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=sigmas, callback=callback, disable_pbar=disable_pbar, seed=seed) File "D:\ComfyUI\comfy\samplers.py", line 755, in sample return sample(self.model, noise, positive, negative, cfg, self.device, sampler, sigmas, self.model_options, latent_image=latent_image, denoise_mask=denoise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) File "D:\ComfyUI\comfy\samplers.py", line 657, in sample return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) File "D:\ComfyUI\comfy\samplers.py", line 644, in sample output = self.inner_sample(noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) File "D:\ComfyUI\comfy\samplers.py", line 623, in inner_sample samples = sampler.sample(self, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar) File "D:\ComfyUI\comfy\samplers.py", line 534, in sample samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, self.extra_options) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context return func(*args, *kwargs) File "D:\ComfyUI\comfy\k_diffusion\sampling.py", line 137, in sample_euler denoised = model(x, sigma_hat s_in, extra_args) File "D:\ComfyUI\comfy\samplers.py", line 272, in call out = self.inner_model(x, sigma, model_options=model_options, seed=seed) File "D:\ComfyUI\comfy\samplers.py", line 610, in call return self.predict_noise(*args, kwargs) File "D:\ComfyUI\comfy\samplers.py", line 613, in predict_noise return sampling_function(self.inner_model, x, timestep, self.conds.get("negative", None), self.conds.get("positive", None), self.cfg, model_options=model_options, seed=seed) File "D:\ComfyUI\comfy\samplers.py", line 258, in sampling_function out = calc_cond_batch(model, conds, x, timestep, model_options) File "D:\ComfyUI\comfy\samplers.py", line 216, in calc_cond_batch output = model_options['model_function_wrapper'](model.apply_model, {"input": inputx, "timestep": timestep, "c": c, "cond_or_uncond": cond_or_uncond}).chunk(batch_chunks) File "D:\ComfyUI\custom_nodes\ComfyUI-ELLA\ella.py", line 62, in call return apply_model(inputx, timestep, c) File "D:\ComfyUI\comfy\model_base.py", line 97, in apply_model model_output = self.diffusion_model(xc, t, context=context, control=control, transformer_options=transformer_options, extra_conds).float() File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(*args, *kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(args, kwargs) File "D:\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 850, in forward h = forward_timestep_embed(module, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) File "D:\ComfyUI\comfy\ldm\modules\diffusionmodules\openaimodel.py", line 44, in forward_timestep_embed x = layer(x, context, transformer_options) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(*args, kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(*args, *kwargs) File "D:\ComfyUI\comfy\ldm\modules\attention.py", line 633, in forward x = block(x, context=context[i], transformer_options=transformer_options) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(args, kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(*args, kwargs) File "D:\ComfyUI\custom_nodes\ComfyUI-layerdiffuse\lib_layerdiffusion\attention_sharing.py", line 253, in forward return func(self, x, context, transformer_options) File "D:\ComfyUI\comfy\ldm\modules\attention.py", line 460, in forward return checkpoint(self._forward, (x, context, transformer_options), self.parameters(), self.checkpoint) File "D:\ComfyUI\comfy\ldm\modules\diffusionmodules\util.py", line 191, in checkpoint return func(inputs) File "D:\ComfyUI\comfy\ldm\modules\attention.py", line 560, in _forward n = self.attn2(n, context=context_attn2, value=value_attn2) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(args, kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(*args, kwargs) File "D:\ComfyUI\comfy\ldm\modules\attention.py", line 404, in forward k = self.to_k(context) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(*args, *kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(args, kwargs) File "D:\ComfyUI\comfy\ops.py", line 52, in forward return super().forward(*args, **kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\linear.py", line 116, in forward return F.linear(input, self.weight, self.bias)

JettHu commented 2 months ago

A bit strange, can you paste comfyui startup parameters?

FNSpd commented 2 months ago

This error shows up with those args: python main.py --listen --normalvram --fp32-text-enc --force-fp32

If I only leave --listen, it shows this when using FP16 on TextEncoder node: Error occurred when executing EllaApply:

"LayerNormKernelImpl" not implemented for 'Half'

File "D:\ComfyUI\execution.py", line 151, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) File "D:\ComfyUI\execution.py", line 81, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) File "D:\ComfyUI\execution.py", line 74, in map_node_over_list results.append(getattr(obj, func)(slice_dict(input_data_all, i))) File "D:\ComfyUI\custom_nodes\ComfyUI-ELLA\ella.py", line 106, in apply _cond, _uncond = ella_proxy.prepare_conds() File "D:\ComfyUI\custom_nodes\ComfyUI-ELLA\ella.py", line 39, in prepare_conds cond = self.ella(torch.Tensor([999]).to(torch.int64), self.embeds[0]) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(*args, kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(*args, *kwargs) File "D:\ComfyUI\custom_nodes\ComfyUI-ELLA\model.py", line 296, in forward return self.connector(t5_embeds, timestep_embedding=time_embedding) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(args, kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(*args, kwargs) File "D:\ComfyUI\custom_nodes\ComfyUI-ELLA\model.py", line 106, in forward latents = p_block(x, latents, timestep_embedding=timestep_embedding) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(*args, *kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(args, kwargs) File "D:\ComfyUI\custom_nodes\ComfyUI-ELLA\model.py", line 65, in forward normed_latents = self.ln_1(latents, timestep_embedding) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(*args, kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(*args, kwargs) File "D:\ComfyUI\custom_nodes\ComfyUI-ELLA\model.py", line 28, in forward return self.norm(x) (1 + scale) + shift File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(args, kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(*args, kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\normalization.py", line 201, in forward return F.layer_norm( File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\functional.py", line 2546, in layer_norm return torch.layer_norm(input, normalized_shape, weight, bias, eps, torch.backends.cudnn.enabled)

Or this, if I select FP32: Error occurred when executing EllaApply:

mat1 and mat2 must have the same dtype, but got Float and Half

File "D:\ComfyUI\execution.py", line 151, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) File "D:\ComfyUI\execution.py", line 81, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) File "D:\ComfyUI\execution.py", line 74, in map_node_over_list results.append(getattr(obj, func)(slice_dict(input_data_all, i))) File "D:\ComfyUI\custom_nodes\ComfyUI-ELLA\ella.py", line 106, in apply _cond, _uncond = ella_proxy.prepare_conds() File "D:\ComfyUI\custom_nodes\ComfyUI-ELLA\ella.py", line 39, in prepare_conds cond = self.ella(torch.Tensor([999]).to(torch.int64), self.embeds[0]) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(*args, kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(*args, *kwargs) File "D:\ComfyUI\custom_nodes\ComfyUI-ELLA\model.py", line 294, in forward time_embedding = self.time_embedding(ori_time_feature) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(args, kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(*args, kwargs) File "D:\ComfyUI\custom_nodes\ComfyUI-ELLA\model.py", line 183, in forward sample = self.linear_1(sample) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1511, in _wrapped_call_impl return self._call_impl(*args, *kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1520, in _call_impl return forward_call(args, kwargs) File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\linear.py", line 116, in forward return F.linear(input, self.weight, self.bias)

JettHu commented 2 months ago

Reproduced successfully. It seems that it is caused by the lack of compatibility for text_encoder_device returns cpu under fp32-text-enc + force-fp32. It will be fixed later.

JettHu commented 2 months ago

Already fixed.

JettHu commented 2 months ago

If possible, update the plugin and try again. If there is no problem, you can close the issue.

FNSpd commented 2 months ago

Thanks, it works now. Error from second message is still present, but it works with --fp32-text-enc now