xvjiarui / GCNet

GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond
Apache License 2.0
1.19k stars 165 forks source link

runtime inrease about 15ms? #16

Closed boundles closed 5 years ago

boundles commented 5 years ago

hi, I use gcnet with the setting as 'resnet50-fpn+c3~c5 r16', but runtime increases about 15ms, could you tell me the reason?

import torch from torch import nn import torch.nn.functional as F

def kaiming_init(module, a=0, mode='fan_out', nonlinearity='relu', bias=0, distribution='normal'): assert distribution in ['uniform', 'normal'] if distribution == 'uniform': nn.init.kaiminguniform( module.weight, a=a, mode=mode, nonlinearity=nonlinearity) else: nn.init.kaimingnormal( module.weight, a=a, mode=mode, nonlinearity=nonlinearity) if hasattr(module, 'bias') and module.bias is not None: nn.init.constant_(module.bias, bias)

def constantinit(module, val, bias=0): nn.init.constant(module.weight, val) if hasattr(module, 'bias') and module.bias is not None: nn.init.constant_(module.bias, bias)

def last_zero_init(m): if isinstance(m, nn.Sequential): constant_init(m[-1], val=0) m[-1].inited = True else: constant_init(m, val=0) m.inited = True

class ContextBlock2d(nn.Module):

def __init__(self, inplanes, ratio = 1./16.):
    super(ContextBlock2d, self).__init__()

    self.inplanes = inplanes
    self.planes = int(self.inplanes * ratio)

    self.channel_add_conv = nn.Sequential(
        nn.Conv2d(self.inplanes, self.planes, kernel_size=1),
        nn.ReLU(inplace=True),
        nn.Conv2d(self.planes, self.inplanes, kernel_size=1)
    )

    self.reset_parameters()

def reset_parameters(self):
    last_zero_init(self.channel_add_conv)

def spatial_pool(self, x):
    batch, channel, height, width = x.size()
    # [N, C, 1, 1]
    context = F.avg_pool2d(x, (height, width))

    return context

def forward(self, x):
    # [N, C, 1, 1]
    context = self.spatial_pool(x)

    # [N, C, 1, 1]
    channel_add_term = self.channel_add_conv(context)
    out = x + channel_add_term

    return out
xvjiarui commented 5 years ago

Sorry for the late reply.

Please find the training/inference time comparison in the updated table.