GengDavid / pytorch-cpn

A PyTorch re-implementation of CPN (Cascaded Pyramid Network for Multi-Person Pose Estimation)
GNU General Public License v3.0
484 stars 98 forks source link

RuntimeError: The size of tensor a (512) must match the size of tensor b (256) at non-singleton dimension 1 #45

Closed VedVyapak closed 2 years ago

VedVyapak commented 2 years ago

While training I am getting this error

<ipython-input-67-b0b9c64f728a> in forward(self, x)
     94         print("")
     95 
---> 96         out += residual
     97 
     98         out = self.relu(out)

RuntimeError: The size of tensor a (512) must match the size of tensor b (256) at non-singleton dimension 1

The code block is the origin of the error

class Bottleneck(nn.Module):
    expansion = 4

    def __init__(self, inplanes, planes, stride=1, downsample=None):
        super(Bottleneck, self).__init__()
        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
        self.bn1 = nn.BatchNorm2d(planes)
        self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
                               padding=1, bias=False)
        self.bn2 = nn.BatchNorm2d(planes)
        self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
        self.bn3 = nn.BatchNorm2d(planes * 4)
        self.relu = nn.ReLU(inplace=True)
        self.downsample = downsample 
        self.stride = stride

    def forward(self, x):
        residual = x

        out = self.conv1(x)
        out = self.bn1(out)
        out = self.relu(out)

        out = self.conv2(out)
        out = self.bn2(out)
        out = self.relu(out)

        out = self.conv3(out)
        out = self.bn3(out)

        if self.downsample is not None:
            residual = self.downsample(x)

        out += residual

        out = self.relu(out)

        return out

When I printed the size of out and residual it was this

torch.Size([12, 256, 96, 72])
torch.Size([12, 256, 96, 72])

torch.Size([12, 256, 96, 72])
torch.Size([12, 256, 96, 72])

torch.Size([12, 256, 96, 72])
torch.Size([12, 256, 96, 72])

torch.Size([12, 512, 48, 36])
torch.Size([12, 512, 48, 36])

torch.Size([12, 512, 48, 36])
torch.Size([12, 512, 48, 36])

torch.Size([12, 512, 48, 36])
torch.Size([12, 512, 48, 36])

torch.Size([12, 512, 48, 36])
torch.Size([12, 512, 48, 36])

torch.Size([12, 1024, 24, 18])
torch.Size([12, 1024, 24, 18])

torch.Size([12, 1024, 24, 18])
torch.Size([12, 1024, 24, 18])

torch.Size([12, 1024, 24, 18])
torch.Size([12, 1024, 24, 18])

torch.Size([12, 1024, 24, 18])
torch.Size([12, 1024, 24, 18])

torch.Size([12, 1024, 24, 18])
torch.Size([12, 1024, 24, 18])

torch.Size([12, 1024, 24, 18])
torch.Size([12, 1024, 24, 18])

torch.Size([12, 2048, 12, 9])
torch.Size([12, 2048, 12, 9])

torch.Size([12, 2048, 12, 9])
torch.Size([12, 2048, 12, 9])

torch.Size([12, 2048, 12, 9])
torch.Size([12, 2048, 12, 9])

torch.Size([12, 256, 12, 9])
torch.Size([12, 512, 12, 9])

How can I solve this issue?