Error occurred when executing SamplerCustomAdvanced:
'ForgeParams4bit' object has no attribute 'quant_storage'
File "D:\comfyui\ComfyUI-aki-v1.1\execution.py", line 152, in recursive_execute
output_data, output_ui = get_output_data(obj, input_data_all)
File "D:\comfyui\ComfyUI-aki-v1.1\execution.py", line 82, in get_output_data
return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True)
File "D:\comfyui\ComfyUI-aki-v1.1\execution.py", line 75, in map_node_over_list
results.append(getattr(obj, func)(**slice_dict(input_data_all, i)))
File "D:\comfyui\ComfyUI-aki-v1.1\comfy_extras\nodes_custom_sampler.py", line 612, in sample
samples = guider.sample(noise.generate_noise(latent), latent_image, sampler, sigmas, denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=noise.seed)
File "D:\comfyui\ComfyUI-aki-v1.1\comfy\samplers.py", line 706, in sample
self.inner_model, self.conds, self.loaded_models = comfy.sampler_helpers.prepare_sampling(self.model_patcher, noise.shape, self.conds)
File "D:\comfyui\ComfyUI-aki-v1.1\comfy\sampler_helpers.py", line 66, in prepare_sampling
comfy.model_management.load_models_gpu([model] + models, memory_required=memory_required, minimum_memory_required=minimum_memory_required)
File "D:\comfyui\ComfyUI-aki-v1.1\comfy\model_management.py", line 526, in load_models_gpu
cur_loaded_model = loaded_model.model_load(lowvram_model_memory, force_patch_weights=force_patch_weights)
File "D:\comfyui\ComfyUI-aki-v1.1\comfy\model_management.py", line 323, in model_load
self.model.unpatch_model(self.model.offload_device)
File "D:\comfyui\ComfyUI-aki-v1.1\comfy\model_patcher.py", line 618, in unpatch_model
self.model.to(device_to)
File "D:\comfyui\ComfyUI-aki-v1.1\venv\lib\site-packages\torch\nn\modules\module.py", line 1160, in to
return self._apply(convert)
File "D:\comfyui\ComfyUI-aki-v1.1\venv\lib\site-packages\torch\nn\modules\module.py", line 810, in _apply
module._apply(fn)
File "D:\comfyui\ComfyUI-aki-v1.1\venv\lib\site-packages\torch\nn\modules\module.py", line 810, in _apply
module._apply(fn)
File "D:\comfyui\ComfyUI-aki-v1.1\venv\lib\site-packages\torch\nn\modules\module.py", line 833, in _apply
param_applied = fn(param)
File "D:\comfyui\ComfyUI-aki-v1.1\venv\lib\site-packages\torch\nn\modules\module.py", line 1158, in convert
return t.to(device, dtype if t.is_floating_point() or t.is_complex() else None, non_blocking)
File "D:\comfyui\ComfyUI-aki-v1.1\custom_nodes\ComfyUI_bitsandbytes_NF4__init__.py", line 64, in to
quant_storage=self.quant_storage,
workflow:
![Uploading 8.png…]()
Error occurred when executing SamplerCustomAdvanced:
'ForgeParams4bit' object has no attribute 'quant_storage'
File "D:\comfyui\ComfyUI-aki-v1.1\execution.py", line 152, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) File "D:\comfyui\ComfyUI-aki-v1.1\execution.py", line 82, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) File "D:\comfyui\ComfyUI-aki-v1.1\execution.py", line 75, in map_node_over_list results.append(getattr(obj, func)(**slice_dict(input_data_all, i))) File "D:\comfyui\ComfyUI-aki-v1.1\comfy_extras\nodes_custom_sampler.py", line 612, in sample samples = guider.sample(noise.generate_noise(latent), latent_image, sampler, sigmas, denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=noise.seed) File "D:\comfyui\ComfyUI-aki-v1.1\comfy\samplers.py", line 706, in sample self.inner_model, self.conds, self.loaded_models = comfy.sampler_helpers.prepare_sampling(self.model_patcher, noise.shape, self.conds) File "D:\comfyui\ComfyUI-aki-v1.1\comfy\sampler_helpers.py", line 66, in prepare_sampling comfy.model_management.load_models_gpu([model] + models, memory_required=memory_required, minimum_memory_required=minimum_memory_required) File "D:\comfyui\ComfyUI-aki-v1.1\comfy\model_management.py", line 526, in load_models_gpu cur_loaded_model = loaded_model.model_load(lowvram_model_memory, force_patch_weights=force_patch_weights) File "D:\comfyui\ComfyUI-aki-v1.1\comfy\model_management.py", line 323, in model_load self.model.unpatch_model(self.model.offload_device) File "D:\comfyui\ComfyUI-aki-v1.1\comfy\model_patcher.py", line 618, in unpatch_model self.model.to(device_to) File "D:\comfyui\ComfyUI-aki-v1.1\venv\lib\site-packages\torch\nn\modules\module.py", line 1160, in to return self._apply(convert) File "D:\comfyui\ComfyUI-aki-v1.1\venv\lib\site-packages\torch\nn\modules\module.py", line 810, in _apply module._apply(fn) File "D:\comfyui\ComfyUI-aki-v1.1\venv\lib\site-packages\torch\nn\modules\module.py", line 810, in _apply module._apply(fn) File "D:\comfyui\ComfyUI-aki-v1.1\venv\lib\site-packages\torch\nn\modules\module.py", line 833, in _apply param_applied = fn(param) File "D:\comfyui\ComfyUI-aki-v1.1\venv\lib\site-packages\torch\nn\modules\module.py", line 1158, in convert return t.to(device, dtype if t.is_floating_point() or t.is_complex() else None, non_blocking) File "D:\comfyui\ComfyUI-aki-v1.1\custom_nodes\ComfyUI_bitsandbytes_NF4__init__.py", line 64, in to quant_storage=self.quant_storage, workflow: ![Uploading 8.png…]()