Closed abozahran closed 2 weeks ago
2024-11-10 16:26:07,064 - root - INFO - Prompt executed in 1.27 seconds 2024-11-10 16:29:23,933 - root - INFO - got prompt 2024-11-10 16:29:25,319 - root - ERROR - !!! Exception during processing !!! str expected, not NoneType 2024-11-10 16:29:25,320 - root - ERROR - Traceback (most recent call last): File "C:\ComfyUI\execution.py", line 323, in execute output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb) File "C:\ComfyUI\execution.py", line 198, in get_output_data return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb) File "C:\ComfyUI\execution.py", line 169, in _map_node_over_list process_inputs(input_dict, i) File "C:\ComfyUI\execution.py", line 158, in process_inputs results.append(getattr(obj, func)(inputs)) File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\nodes.py", line 200, in interpolate all_outputs = gimmvfi_model(xs, coord_inputs, t=timesteps, ds_factor=ds_factor) File "c:\users\zahran\appdata\local\programs\python\python310\lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl return self._call_impl(*args, *kwargs) File "c:\users\zahran\appdata\local\programs\python\python310\lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl return forward_call(args, kwargs) File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\gimmvfi\generalizable_INR\gimmvfi_f.py", line 331, in forward normal_inr_flows = self.predict_flow(normal_flows, coord, t, flows) File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\gimmvfi\generalizable_INR\gimmvfi_f.py", line 156, in predict_flow tmp_pixel_latent_0 = softsplat( File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\gimmvfi\generalizable_INR\modules\softsplat.py", line 316, in softsplat tenOut = softsplat_func.apply(tenIn, tenFlow) File "c:\users\zahran\appdata\local\programs\python\python310\lib\site-packages\torch\autograd\function.py", line 575, in apply return super().apply(*args, *kwargs) # type: ignore[misc] File "c:\users\zahran\appdata\local\programs\python\python310\lib\site-packages\torch\amp\autocast_mode.py", line 476, in decorate_fwd return fwd(args, **kwargs) File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\gimmvfi\generalizable_INR\modules\softsplat.py", line 365, in forward cuda_launch( File "cupy_util.pyx", line 64, in cupy._util.memoize.decorator.ret File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\gimmvfi\generalizable_INR\modules\softsplat.py", line 266, in cuda_launch os.environ["CUDA_HOME"] = cupy.cuda.get_cuda_path() File "c:\users\zahran\appdata\local\programs\python\python310\lib\os.py", line 685, in setitem value = self.encodevalue(value) File "c:\users\zahran\appdata\local\programs\python\python310\lib\os.py", line 743, in check_str raise TypeError("str expected, not %s" % type(value).name) TypeError: str expected, not NoneType
Note: the models downloaded automatically
I believe GIMM-VFI needs cuda-toolkit installed, and it can't find that:
https://developer.nvidia.com/cuda-toolkit
I believe GIMM-VFI needs cuda-toolkit installed, and it can't find that: https://developer.nvidia.com/cuda-toolkit
Problem fixed thanks
2024-11-10 16:26:07,064 - root - INFO - Prompt executed in 1.27 seconds 2024-11-10 16:29:23,933 - root - INFO - got prompt 2024-11-10 16:29:25,319 - root - ERROR - !!! Exception during processing !!! str expected, not NoneType 2024-11-10 16:29:25,320 - root - ERROR - Traceback (most recent call last): File "C:\ComfyUI\execution.py", line 323, in execute output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb) File "C:\ComfyUI\execution.py", line 198, in get_output_data return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb) File "C:\ComfyUI\execution.py", line 169, in _map_node_over_list process_inputs(input_dict, i) File "C:\ComfyUI\execution.py", line 158, in process_inputs results.append(getattr(obj, func)(inputs)) File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\nodes.py", line 200, in interpolate all_outputs = gimmvfi_model(xs, coord_inputs, t=timesteps, ds_factor=ds_factor) File "c:\users\zahran\appdata\local\programs\python\python310\lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl return self._call_impl(*args, *kwargs) File "c:\users\zahran\appdata\local\programs\python\python310\lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl return forward_call(args, kwargs) File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\gimmvfi\generalizable_INR\gimmvfi_f.py", line 331, in forward normal_inr_flows = self.predict_flow(normal_flows, coord, t, flows) File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\gimmvfi\generalizable_INR\gimmvfi_f.py", line 156, in predict_flow tmp_pixel_latent_0 = softsplat( File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\gimmvfi\generalizable_INR\modules\softsplat.py", line 316, in softsplat tenOut = softsplat_func.apply(tenIn, tenFlow) File "c:\users\zahran\appdata\local\programs\python\python310\lib\site-packages\torch\autograd\function.py", line 575, in apply return super().apply(*args, *kwargs) # type: ignore[misc] File "c:\users\zahran\appdata\local\programs\python\python310\lib\site-packages\torch\amp\autocast_mode.py", line 476, in decorate_fwd return fwd(args, **kwargs) File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\gimmvfi\generalizable_INR\modules\softsplat.py", line 365, in forward cuda_launch( File "cupy_util.pyx", line 64, in cupy._util.memoize.decorator.ret File "C:\ComfyUI\custom_nodes\ComfyUI-GIMM-VFI\gimmvfi\generalizable_INR\modules\softsplat.py", line 266, in cuda_launch os.environ["CUDA_HOME"] = cupy.cuda.get_cuda_path() File "c:\users\zahran\appdata\local\programs\python\python310\lib\os.py", line 685, in setitem value = self.encodevalue(value) File "c:\users\zahran\appdata\local\programs\python\python310\lib\os.py", line 743, in check_str raise TypeError("str expected, not %s" % type(value).name) TypeError: str expected, not NoneType
Note: the models downloaded automatically