Closed Yannis1995 closed 3 years ago
def BNReLUConv2d(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, padding_mode='zeros', use_original_conv=False): if use_original_conv or kernel_size == 1 or kernel_size == (1, 1): return super(ACNetBuilder, self).BNReLUConv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, padding_mode=padding_mode, use_original_conv=True) bn_layer = self.BatchNorm2d(num_features=in_channels) conv_layer = ACBlock(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, padding_mode=padding_mode, deploy=self.deploy) se = self.Sequential() se.add_module('bn', bn_layer) se.add_module('relu', self.ReLU()) se.add_module('acb', conv_layer) return se Is the bn layer in the acb?
def BNReLUConv2d(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, padding_mode='zeros', use_original_conv=False): if use_original_conv or kernel_size == 1 or kernel_size == (1, 1): return super(ACNetBuilder, self).BNReLUConv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, padding_mode=padding_mode, use_original_conv=True) bn_layer = self.BatchNorm2d(num_features=in_channels) conv_layer = ACBlock(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, padding_mode=padding_mode, deploy=self.deploy) se = self.Sequential() se.add_module('bn', bn_layer) se.add_module('relu', self.ReLU()) se.add_module('acb', conv_layer) return se Is the bn layer in the acb?