sksq96 / pytorch-summary

Model summary in PyTorch similar to `model.summary()` in Keras
MIT License
3.98k stars 412 forks source link

torch.add() does not show up in the output of summary #198

Open Rander2002 opened 7 months ago

Rander2002 commented 7 months ago

torch.add() doesn't show up in the output of summary. Is there any way to show all the correct sequential layers in the forward method of the Net? My Net as follows.

`import torch import torch.nn as nn from math import sqrt from torchsummary import summary

from torchkeras import summary

class Conv_ReLU_Block(nn.Module): def init(self): super(Conv_ReLU_Block, self).init() self.conv = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=False) self.relu = nn.ReLU(inplace=True)

def forward(self, x):
    return self.relu(self.conv(x))

class Net(nn.Module): def init(self): super(Net, self).init() self.residual_layer = self.make_layer(Conv_ReLU_Block, 18) self.input = nn.Conv2d(in_channels=1, out_channels=64, kernel_size=3, stride=1, padding=1, bias=False) self.output = nn.Conv2d(in_channels=64, out_channels=1, kernel_size=3, stride=1, padding=1, bias=False) self.relu = nn.ReLU(inplace=True)

    for m in self.modules():
        if isinstance(m, nn.Conv2d):
            n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
            m.weight.data.normal_(0, sqrt(2. / n))

def make_layer(self, block, num_of_layer):
    layers = []
    for _ in range(num_of_layer):
        layers.append(block())
    return nn.Sequential(*layers)

def forward(self, x):
    residual = x
    out1 = self.relu(self.input(x))
    out2 = self.residual_layer(out1)
    out3 = self.output(out2)
    out = torch.add(out3, residual)
    return out

if name == 'main': model = Net()

summary(model, input_size=(1, 33, 33))

`