Here is a simple code giving a different number of parameters between pytorch and torchsummayX.
It seems that torchsummayX does not count torch.nn.parameter layers.
import torch
import torchsummaryX
class Net(torch.nn.Module):
def __init__(self):
super(Net,self).__init__()
self.p = torch.nn.Parameter(torch.zeros(1))
self.conv1 = torch.nn.Conv1d(1,2,kernel_size=3)
def forward(self, x):
x *= self.p
x = self.conv1(x)
return x
def get_n_params(model):
pp=0
for p in list(model.parameters()):
nn=1
for s in list(p.size()):
nn = nn*s
pp += nn
return pp
net = Net()
x = torch.rand([64,1,10])
print("number of parameters = ", get_n_params(net))
torchsummaryX.summary(net, x)
Here is a simple code giving a different number of parameters between pytorch and torchsummayX. It seems that torchsummayX does not count
torch.nn.parameter
layers.Returns 9 vs. 8: