File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\
n\modules\module.py:1527, in Module._call_impl(self, *args, *kwargs)
1522 # If we don't have any hooks, we want to skip the rest of the logic in
1523 # this function, and just call forward.
1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1525 or _global_backward_pre_hooks or _global_backward_hooks
1526 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1527 return forward_call(args, **kwargs)
1529 try:
1530 result = None
File c:\StableCascade\modules\controlnet.py:77, in ControlNet.forward(self, x)
76 def forward(self, x):
---> 77 x = self.backbone(x)
78 projoutputs = [None for in range(max(self.proj_blocks) + 1)]
79 for i, idx in enumerate(self.proj_blocks):
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\
n\modules\module.py:1527, in Module._call_impl(self, *args, *kwargs)
1522 # If we don't have any hooks, we want to skip the rest of the logic in
1523 # this function, and just call forward.
1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1525 or _global_backward_pre_hooks or _global_backward_hooks
1526 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1527 return forward_call(args, **kwargs)
1529 try:
1530 result = None
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\
n\modules\container.py:215, in Sequential.forward(self, input)
213 def forward(self, input):
214 for module in self:
--> 215 input = module(input)
216 return input
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\
n\modules\module.py:1527, in Module._call_impl(self, *args, *kwargs)
1522 # If we don't have any hooks, we want to skip the rest of the logic in
1523 # this function, and just call forward.
1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1525 or _global_backward_pre_hooks or _global_backward_hooks
1526 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1527 return forward_call(args, **kwargs)
1529 try:
1530 result = None
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\
n\modules\container.py:215, in Sequential.forward(self, input)
213 def forward(self, input):
214 for module in self:
--> 215 input = module(input)
216 return input
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\
n\modules\module.py:1527, in Module._call_impl(self, *args, *kwargs)
1522 # If we don't have any hooks, we want to skip the rest of the logic in
1523 # this function, and just call forward.
1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1525 or _global_backward_pre_hooks or _global_backward_hooks
1526 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1527 return forward_call(args, **kwargs)
1529 try:
1530 result = None
Hi! I'm trying to execute the
controlnet.ipynb
notebook for the inpaintint example and the execution raises aRuntimeError
.Input type (torch.FloatTensor) and weight type (CPUBFloat16Type) should be the same or input should be a MKLDNN tensor and weight is a dense tensor
Is there any way to make it work?
Thanks for your help!
File c:\StableCascade\train\train_c_controlnet.py:149, in WurstCore.get_cnet(self, batch, models, extras, cnet_input, **kwargs) 147 cnet_input_preview = cnet_input 148 cnet_input, cnet_input_preview = cnet_input.to(self.device), cnet_input_preview.to(self.device) --> 149 cnet = models.controlnet(cnet_input) 150 return cnet, cnet_input_preview
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\module.py:1518, in Module._wrapped_call_impl(self, *args, kwargs) 1516 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1517 else: -> 1518 return self._call_impl(args, kwargs)
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\module.py:1527, in Module._call_impl(self, *args, *kwargs) 1522 # If we don't have any hooks, we want to skip the rest of the logic in 1523 # this function, and just call forward. 1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1525 or _global_backward_pre_hooks or _global_backward_hooks 1526 or _global_forward_hooks or _global_forward_pre_hooks): -> 1527 return forward_call(args, **kwargs) 1529 try: 1530 result = None
File c:\StableCascade\modules\controlnet.py:77, in ControlNet.forward(self, x) 76 def forward(self, x): ---> 77 x = self.backbone(x) 78 projoutputs = [None for in range(max(self.proj_blocks) + 1)] 79 for i, idx in enumerate(self.proj_blocks):
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\module.py:1518, in Module._wrapped_call_impl(self, *args, kwargs) 1516 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1517 else: -> 1518 return self._call_impl(args, kwargs)
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\module.py:1527, in Module._call_impl(self, *args, *kwargs) 1522 # If we don't have any hooks, we want to skip the rest of the logic in 1523 # this function, and just call forward. 1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1525 or _global_backward_pre_hooks or _global_backward_hooks 1526 or _global_forward_hooks or _global_forward_pre_hooks): -> 1527 return forward_call(args, **kwargs) 1529 try: 1530 result = None
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\container.py:215, in Sequential.forward(self, input) 213 def forward(self, input): 214 for module in self: --> 215 input = module(input) 216 return input
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\module.py:1518, in Module._wrapped_call_impl(self, *args, kwargs) 1516 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1517 else: -> 1518 return self._call_impl(args, kwargs)
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\module.py:1527, in Module._call_impl(self, *args, *kwargs) 1522 # If we don't have any hooks, we want to skip the rest of the logic in 1523 # this function, and just call forward. 1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1525 or _global_backward_pre_hooks or _global_backward_hooks 1526 or _global_forward_hooks or _global_forward_pre_hooks): -> 1527 return forward_call(args, **kwargs) 1529 try: 1530 result = None
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\container.py:215, in Sequential.forward(self, input) 213 def forward(self, input): 214 for module in self: --> 215 input = module(input) 216 return input
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\module.py:1518, in Module._wrapped_call_impl(self, *args, kwargs) 1516 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1517 else: -> 1518 return self._call_impl(args, kwargs)
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\module.py:1527, in Module._call_impl(self, *args, *kwargs) 1522 # If we don't have any hooks, we want to skip the rest of the logic in 1523 # this function, and just call forward. 1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1525 or _global_backward_pre_hooks or _global_backward_hooks 1526 or _global_forward_hooks or _global_forward_pre_hooks): -> 1527 return forward_call(args, **kwargs) 1529 try: 1530 result = None
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\conv.py:460, in Conv2d.forward(self, input) 459 def forward(self, input: Tensor) -> Tensor: --> 460 return self._conv_forward(input, self.weight, self.bias)
File c:\Users\Me\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\ n\modules\conv.py:456, in Conv2d._conv_forward(self, input, weight, bias) 452 if self.padding_mode != 'zeros': 453 return F.conv2d(F.pad(input, self._reversed_padding_repeated_twice, mode=self.padding_mode), 454 weight, bias, self.stride, 455 _pair(0), self.dilation, self.groups) --> 456 return F.conv2d(input, weight, bias, self.stride, 457 self.padding, self.dilation, self.groups)