File "A:\AI\ComfyUI-aki-v1.3\execution.py", line 151, in recursive_execute
output_data, output_ui = get_output_data(obj, input_data_all)
File "A:\AI\ComfyUI-aki-v1.3\execution.py", line 81, in get_output_data
return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True)
File "A:\AI\ComfyUI-aki-v1.3\execution.py", line 74, in map_node_over_list
results.append(getattr(obj, func)(*slice_dict(input_data_all, i)))
File "A:\AI\ComfyUI-aki-v1.3\custom_nodes\ComfyUI-Kolors-MZ__init__.py", line 66, in encode
return mz_kolors_core.MZ_ChatGLM3TextEncodeV2_call(kwargs)
File "A:\AI\ComfyUI-aki-v1.3\custom_nodes\ComfyUI-Kolors-MZ\mz_kolors_core.py", line 135, in MZ_ChatGLM3TextEncodeV2_call
prompt_embeds, pooled_output = chatglm3_text_encode(
File "A:\AI\ComfyUI-aki-v1.3\custom_nodes\ComfyUI-Kolors-MZ\mz_kolors_core.py", line 33, in chatglm3_text_encode
text_encoder.to(device)
File "A:\AI\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\modeling_utils.py", line 2796, in to
return super().to(args, **kwargs)
File "A:\AI\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1173, in to
return self._apply(convert)
File "A:\AI\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 779, in _apply
module._apply(fn)
File "A:\AI\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 804, in _apply
param_applied = fn(param)
File "A:\AI\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1159, in convert
return t.to(
Error occurred when executing MZ_ChatGLM3_V2:
Allocation on device
File "A:\AI\ComfyUI-aki-v1.3\execution.py", line 151, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) File "A:\AI\ComfyUI-aki-v1.3\execution.py", line 81, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) File "A:\AI\ComfyUI-aki-v1.3\execution.py", line 74, in map_node_over_list results.append(getattr(obj, func)(*slice_dict(input_data_all, i))) File "A:\AI\ComfyUI-aki-v1.3\custom_nodes\ComfyUI-Kolors-MZ__init__.py", line 66, in encode return mz_kolors_core.MZ_ChatGLM3TextEncodeV2_call(kwargs) File "A:\AI\ComfyUI-aki-v1.3\custom_nodes\ComfyUI-Kolors-MZ\mz_kolors_core.py", line 135, in MZ_ChatGLM3TextEncodeV2_call prompt_embeds, pooled_output = chatglm3_text_encode( File "A:\AI\ComfyUI-aki-v1.3\custom_nodes\ComfyUI-Kolors-MZ\mz_kolors_core.py", line 33, in chatglm3_text_encode text_encoder.to(device) File "A:\AI\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\modeling_utils.py", line 2796, in to return super().to(args, **kwargs) File "A:\AI\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1173, in to return self._apply(convert) File "A:\AI\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 779, in _apply module._apply(fn) File "A:\AI\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 804, in _apply param_applied = fn(param) File "A:\AI\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1159, in convert return t.to(