hi, I use gcnet with the setting as 'resnet50-fpn+c3~c5 r16', but runtime increases about 15ms, could you tell me the reason?
import torch
from torch import nn
import torch.nn.functional as F
def kaiming_init(module,
a=0,
mode='fan_out',
nonlinearity='relu',
bias=0,
distribution='normal'):
assert distribution in ['uniform', 'normal']
if distribution == 'uniform':
nn.init.kaiminguniform(
module.weight, a=a, mode=mode, nonlinearity=nonlinearity)
else:
nn.init.kaimingnormal(
module.weight, a=a, mode=mode, nonlinearity=nonlinearity)
if hasattr(module, 'bias') and module.bias is not None:
nn.init.constant_(module.bias, bias)
def constantinit(module, val, bias=0):
nn.init.constant(module.weight, val)
if hasattr(module, 'bias') and module.bias is not None:
nn.init.constant_(module.bias, bias)
hi, I use gcnet with the setting as 'resnet50-fpn+c3~c5 r16', but runtime increases about 15ms, could you tell me the reason?
import torch from torch import nn import torch.nn.functional as F
def kaiming_init(module, a=0, mode='fan_out', nonlinearity='relu', bias=0, distribution='normal'): assert distribution in ['uniform', 'normal'] if distribution == 'uniform': nn.init.kaiminguniform( module.weight, a=a, mode=mode, nonlinearity=nonlinearity) else: nn.init.kaimingnormal( module.weight, a=a, mode=mode, nonlinearity=nonlinearity) if hasattr(module, 'bias') and module.bias is not None: nn.init.constant_(module.bias, bias)
def constantinit(module, val, bias=0): nn.init.constant(module.weight, val) if hasattr(module, 'bias') and module.bias is not None: nn.init.constant_(module.bias, bias)
def last_zero_init(m): if isinstance(m, nn.Sequential): constant_init(m[-1], val=0) m[-1].inited = True else: constant_init(m, val=0) m.inited = True
class ContextBlock2d(nn.Module):