tdeboissiere / DeepLearningImplementations

Implementation of recent Deep Learning papers
MIT License
1.81k stars 652 forks source link

densenet on cifar10 #82

Open efferre99 opened 5 years ago

efferre99 commented 5 years ago

When I modify the parameters for cifar 10: depth=40 nb_filter=48 nb_dense_block=3 growth_rate=12 I obtain that some layers are connected to many dropout ones. Is this right? Layer (type) Output Shape Param # Connected to

input_10 (InputLayer) (None, 32, 32, 1) 0


initial_conv2D (Conv2D) (None, 32, 32, 48) 432 input_10[0][0]


batch_normalization_167 (BatchN (None, 32, 32, 48) 192 initial_conv2D[0][0]


activation_164 (Activation) (None, 32, 32, 48) 0 batch_normalization_167[0][0]


conv2d_161 (Conv2D) (None, 32, 32, 12) 5184 activation_164[0][0]


dropout_164 (Dropout) (None, 32, 32, 12) 0 conv2d_161[0][0]


concatenate_157 (Concatenate) (None, 32, 32, 60) 0 initial_conv2D[0][0]
dropout_164[0][0]


batch_normalization_168 (BatchN (None, 32, 32, 60) 240 concatenate_157[0][0]


activation_165 (Activation) (None, 32, 32, 60) 0 batch_normalization_168[0][0]


conv2d_162 (Conv2D) (None, 32, 32, 12) 6480 activation_165[0][0]


dropout_165 (Dropout) (None, 32, 32, 12) 0 conv2d_162[0][0]


concatenate_158 (Concatenate) (None, 32, 32, 72) 0 initial_conv2D[0][0]
dropout_164[0][0]
dropout_165[0][0]


batch_normalization_169 (BatchN (None, 32, 32, 72) 288 concatenate_158[0][0]


activation_166 (Activation) (None, 32, 32, 72) 0 batch_normalization_169[0][0]


conv2d_163 (Conv2D) (None, 32, 32, 12) 7776 activation_166[0][0]


dropout_166 (Dropout) (None, 32, 32, 12) 0 conv2d_163[0][0]


concatenate_159 (Concatenate) (None, 32, 32, 84) 0 initial_conv2D[0][0]
dropout_164[0][0]
dropout_165[0][0]
dropout_166[0][0]


batch_normalization_170 (BatchN (None, 32, 32, 84) 336 concatenate_159[0][0]


activation_167 (Activation) (None, 32, 32, 84) 0 batch_normalization_170[0][0]


conv2d_164 (Conv2D) (None, 32, 32, 12) 9072 activation_167[0][0]


dropout_167 (Dropout) (None, 32, 32, 12) 0 conv2d_164[0][0]


concatenate_160 (Concatenate) (None, 32, 32, 96) 0 initial_conv2D[0][0]
dropout_164[0][0]
dropout_165[0][0]
dropout_166[0][0]
dropout_167[0][0]


batch_normalization_171 (BatchN (None, 32, 32, 96) 384 concatenate_160[0][0]


activation_168 (Activation) (None, 32, 32, 96) 0 batch_normalization_171[0][0]


conv2d_165 (Conv2D) (None, 32, 32, 12) 10368 activation_168[0][0]


dropout_168 (Dropout) (None, 32, 32, 12) 0 conv2d_165[0][0]


concatenate_161 (Concatenate) (None, 32, 32, 108) 0 initial_conv2D[0][0]
dropout_164[0][0]
dropout_165[0][0]
dropout_166[0][0]
dropout_167[0][0]
dropout_168[0][0]


batch_normalization_172 (BatchN (None, 32, 32, 108) 432 concatenate_161[0][0]


activation_169 (Activation) (None, 32, 32, 108) 0 batch_normalization_172[0][0]


conv2d_166 (Conv2D) (None, 32, 32, 12) 11664 activation_169[0][0]


dropout_169 (Dropout) (None, 32, 32, 12) 0 conv2d_166[0][0]


concatenate_162 (Concatenate) (None, 32, 32, 120) 0 initial_conv2D[0][0]
dropout_164[0][0]
dropout_165[0][0]
dropout_166[0][0]
dropout_167[0][0]
dropout_168[0][0]
dropout_169[0][0]


batch_normalization_173 (BatchN (None, 32, 32, 120) 480 concatenate_162[0][0]


activation_170 (Activation) (None, 32, 32, 120) 0 batch_normalization_173[0][0]


conv2d_167 (Conv2D) (None, 32, 32, 12) 12960 activation_170[0][0]


dropout_170 (Dropout) (None, 32, 32, 12) 0 conv2d_167[0][0]


concatenate_163 (Concatenate) (None, 32, 32, 132) 0 initial_conv2D[0][0]
dropout_164[0][0]
dropout_165[0][0]
dropout_166[0][0]
dropout_167[0][0]
dropout_168[0][0]
dropout_169[0][0]
dropout_170[0][0]


batch_normalization_174 (BatchN (None, 32, 32, 132) 528 concatenate_163[0][0]


activation_171 (Activation) (None, 32, 32, 132) 0 batch_normalization_174[0][0]


conv2d_168 (Conv2D) (None, 32, 32, 12) 14256 activation_171[0][0]


dropout_171 (Dropout) (None, 32, 32, 12) 0 conv2d_168[0][0]


concatenate_164 (Concatenate) (None, 32, 32, 144) 0 initial_conv2D[0][0]
dropout_164[0][0]
dropout_165[0][0]
dropout_166[0][0]
dropout_167[0][0]
dropout_168[0][0]
dropout_169[0][0]
dropout_170[0][0]
dropout_171[0][0]


batch_normalization_175 (BatchN (None, 32, 32, 144) 576 concatenate_164[0][0]


activation_172 (Activation) (None, 32, 32, 144) 0 batch_normalization_175[0][0]


conv2d_169 (Conv2D) (None, 32, 32, 12) 15552 activation_172[0][0]


dropout_172 (Dropout) (None, 32, 32, 12) 0 conv2d_169[0][0]


concatenate_165 (Concatenate) (None, 32, 32, 156) 0 initial_conv2D[0][0]
dropout_164[0][0]
dropout_165[0][0]
dropout_166[0][0]
dropout_167[0][0]
dropout_168[0][0]
dropout_169[0][0]
dropout_170[0][0]
dropout_171[0][0]
dropout_172[0][0]


batch_normalization_176 (BatchN (None, 32, 32, 156) 624 concatenate_165[0][0]


activation_173 (Activation) (None, 32, 32, 156) 0 batch_normalization_176[0][0]


conv2d_170 (Conv2D) (None, 32, 32, 12) 16848 activation_173[0][0]


dropout_173 (Dropout) (None, 32, 32, 12) 0 conv2d_170[0][0]


concatenate_166 (Concatenate) (None, 32, 32, 168) 0 initial_conv2D[0][0]
dropout_164[0][0]
dropout_165[0][0]
dropout_166[0][0]
dropout_167[0][0]
dropout_168[0][0]
dropout_169[0][0]
dropout_170[0][0]
dropout_171[0][0]
dropout_172[0][0]
dropout_173[0][0]


batch_normalization_177 (BatchN (None, 32, 32, 168) 672 concatenate_166[0][0]


activation_174 (Activation) (None, 32, 32, 168) 0 batch_normalization_177[0][0]


conv2d_171 (Conv2D) (None, 32, 32, 12) 18144 activation_174[0][0]


dropout_174 (Dropout) (None, 32, 32, 12) 0 conv2d_171[0][0]


concatenate_167 (Concatenate) (None, 32, 32, 180) 0 initial_conv2D[0][0]
dropout_164[0][0]
dropout_165[0][0]
dropout_166[0][0]
dropout_167[0][0]
dropout_168[0][0]
dropout_169[0][0]
dropout_170[0][0]
dropout_171[0][0]
dropout_172[0][0]
dropout_173[0][0]
dropout_174[0][0]


batch_normalization_178 (BatchN (None, 32, 32, 180) 720 concatenate_167[0][0]


activation_175 (Activation) (None, 32, 32, 180) 0 batch_normalization_178[0][0]


conv2d_172 (Conv2D) (None, 32, 32, 12) 19440 activation_175[0][0]


dropout_175 (Dropout) (None, 32, 32, 12) 0 conv2d_172[0][0]


concatenate_168 (Concatenate) (None, 32, 32, 192) 0 initial_conv2D[0][0]
dropout_164[0][0]
dropout_165[0][0]
dropout_166[0][0]
dropout_167[0][0]
dropout_168[0][0]
dropout_169[0][0]
dropout_170[0][0]
dropout_171[0][0]
dropout_172[0][0]
dropout_173[0][0]
dropout_174[0][0]
dropout_175[0][0]


batch_normalization_179 (BatchN (None, 32, 32, 192) 768 concatenate_168[0][0]


activation_176 (Activation) (None, 32, 32, 192) 0 batch_normalization_179[0][0]


conv2d_173 (Conv2D) (None, 32, 32, 192) 36864 activation_176[0][0]


dropout_176 (Dropout) (None, 32, 32, 192) 0 conv2d_173[0][0]


average_pooling2d_5 (AveragePoo (None, 16, 16, 192) 0 dropout_176[0][0]


batch_normalization_180 (BatchN (None, 16, 16, 192) 768 average_pooling2d_5[0][0]


activation_177 (Activation) (None, 16, 16, 192) 0 batch_normalization_180[0][0]


conv2d_174 (Conv2D) (None, 16, 16, 12) 20736 activation_177[0][0]


dropout_177 (Dropout) (None, 16, 16, 12) 0 conv2d_174[0][0]


concatenate_169 (Concatenate) (None, 16, 16, 204) 0 average_pooling2d_5[0][0]
dropout_177[0][0]


batch_normalization_181 (BatchN (None, 16, 16, 204) 816 concatenate_169[0][0]


activation_178 (Activation) (None, 16, 16, 204) 0 batch_normalization_181[0][0]


conv2d_175 (Conv2D) (None, 16, 16, 12) 22032 activation_178[0][0]


dropout_178 (Dropout) (None, 16, 16, 12) 0 conv2d_175[0][0]


concatenate_170 (Concatenate) (None, 16, 16, 216) 0 average_pooling2d_5[0][0]
dropout_177[0][0]
dropout_178[0][0]


batch_normalization_182 (BatchN (None, 16, 16, 216) 864 concatenate_170[0][0]


activation_179 (Activation) (None, 16, 16, 216) 0 batch_normalization_182[0][0]


conv2d_176 (Conv2D) (None, 16, 16, 12) 23328 activation_179[0][0]


dropout_179 (Dropout) (None, 16, 16, 12) 0 conv2d_176[0][0]


concatenate_171 (Concatenate) (None, 16, 16, 228) 0 average_pooling2d_5[0][0]
dropout_177[0][0]
dropout_178[0][0]
dropout_179[0][0]


batch_normalization_183 (BatchN (None, 16, 16, 228) 912 concatenate_171[0][0]


activation_180 (Activation) (None, 16, 16, 228) 0 batch_normalization_183[0][0]


conv2d_177 (Conv2D) (None, 16, 16, 12) 24624 activation_180[0][0]


dropout_180 (Dropout) (None, 16, 16, 12) 0 conv2d_177[0][0]


concatenate_172 (Concatenate) (None, 16, 16, 240) 0 average_pooling2d_5[0][0]
dropout_177[0][0]
dropout_178[0][0]
dropout_179[0][0]
dropout_180[0][0]


batch_normalization_184 (BatchN (None, 16, 16, 240) 960 concatenate_172[0][0]


activation_181 (Activation) (None, 16, 16, 240) 0 batch_normalization_184[0][0]


conv2d_178 (Conv2D) (None, 16, 16, 12) 25920 activation_181[0][0]


dropout_181 (Dropout) (None, 16, 16, 12) 0 conv2d_178[0][0]


concatenate_173 (Concatenate) (None, 16, 16, 252) 0 average_pooling2d_5[0][0]
dropout_177[0][0]
dropout_178[0][0]
dropout_179[0][0]
dropout_180[0][0]
dropout_181[0][0]


batch_normalization_185 (BatchN (None, 16, 16, 252) 1008 concatenate_173[0][0]


activation_182 (Activation) (None, 16, 16, 252) 0 batch_normalization_185[0][0]


conv2d_179 (Conv2D) (None, 16, 16, 12) 27216 activation_182[0][0]


dropout_182 (Dropout) (None, 16, 16, 12) 0 conv2d_179[0][0]


concatenate_174 (Concatenate) (None, 16, 16, 264) 0 average_pooling2d_5[0][0]
dropout_177[0][0]
dropout_178[0][0]
dropout_179[0][0]
dropout_180[0][0]
dropout_181[0][0]
dropout_182[0][0]


batch_normalization_186 (BatchN (None, 16, 16, 264) 1056 concatenate_174[0][0]


activation_183 (Activation) (None, 16, 16, 264) 0 batch_normalization_186[0][0]


conv2d_180 (Conv2D) (None, 16, 16, 12) 28512 activation_183[0][0]


dropout_183 (Dropout) (None, 16, 16, 12) 0 conv2d_180[0][0]


concatenate_175 (Concatenate) (None, 16, 16, 276) 0 average_pooling2d_5[0][0]
dropout_177[0][0]
dropout_178[0][0]
dropout_179[0][0]
dropout_180[0][0]
dropout_181[0][0]
dropout_182[0][0]
dropout_183[0][0]


batch_normalization_187 (BatchN (None, 16, 16, 276) 1104 concatenate_175[0][0]


activation_184 (Activation) (None, 16, 16, 276) 0 batch_normalization_187[0][0]


conv2d_181 (Conv2D) (None, 16, 16, 12) 29808 activation_184[0][0]


dropout_184 (Dropout) (None, 16, 16, 12) 0 conv2d_181[0][0]


concatenate_176 (Concatenate) (None, 16, 16, 288) 0 average_pooling2d_5[0][0]
dropout_177[0][0]
dropout_178[0][0]
dropout_179[0][0]
dropout_180[0][0]
dropout_181[0][0]
dropout_182[0][0]
dropout_183[0][0]
dropout_184[0][0]


batch_normalization_188 (BatchN (None, 16, 16, 288) 1152 concatenate_176[0][0]


activation_185 (Activation) (None, 16, 16, 288) 0 batch_normalization_188[0][0]


conv2d_182 (Conv2D) (None, 16, 16, 12) 31104 activation_185[0][0]


dropout_185 (Dropout) (None, 16, 16, 12) 0 conv2d_182[0][0]


concatenate_177 (Concatenate) (None, 16, 16, 300) 0 average_pooling2d_5[0][0]
dropout_177[0][0]
dropout_178[0][0]
dropout_179[0][0]
dropout_180[0][0]
dropout_181[0][0]
dropout_182[0][0]
dropout_183[0][0]
dropout_184[0][0]
dropout_185[0][0]


batch_normalization_189 (BatchN (None, 16, 16, 300) 1200 concatenate_177[0][0]


activation_186 (Activation) (None, 16, 16, 300) 0 batch_normalization_189[0][0]


conv2d_183 (Conv2D) (None, 16, 16, 12) 32400 activation_186[0][0]


dropout_186 (Dropout) (None, 16, 16, 12) 0 conv2d_183[0][0]


concatenate_178 (Concatenate) (None, 16, 16, 312) 0 average_pooling2d_5[0][0]
dropout_177[0][0]
dropout_178[0][0]
dropout_179[0][0]
dropout_180[0][0]
dropout_181[0][0]
dropout_182[0][0]
dropout_183[0][0]
dropout_184[0][0]
dropout_185[0][0]
dropout_186[0][0]


batch_normalization_190 (BatchN (None, 16, 16, 312) 1248 concatenate_178[0][0]


activation_187 (Activation) (None, 16, 16, 312) 0 batch_normalization_190[0][0]


conv2d_184 (Conv2D) (None, 16, 16, 12) 33696 activation_187[0][0]


dropout_187 (Dropout) (None, 16, 16, 12) 0 conv2d_184[0][0]


concatenate_179 (Concatenate) (None, 16, 16, 324) 0 average_pooling2d_5[0][0]
dropout_177[0][0]
dropout_178[0][0]
dropout_179[0][0]
dropout_180[0][0]
dropout_181[0][0]
dropout_182[0][0]
dropout_183[0][0]
dropout_184[0][0]
dropout_185[0][0]
dropout_186[0][0]
dropout_187[0][0]


batch_normalization_191 (BatchN (None, 16, 16, 324) 1296 concatenate_179[0][0]


activation_188 (Activation) (None, 16, 16, 324) 0 batch_normalization_191[0][0]


conv2d_185 (Conv2D) (None, 16, 16, 12) 34992 activation_188[0][0]


dropout_188 (Dropout) (None, 16, 16, 12) 0 conv2d_185[0][0]


concatenate_180 (Concatenate) (None, 16, 16, 336) 0 average_pooling2d_5[0][0]
dropout_177[0][0]
dropout_178[0][0]
dropout_179[0][0]
dropout_180[0][0]
dropout_181[0][0]
dropout_182[0][0]
dropout_183[0][0]
dropout_184[0][0]
dropout_185[0][0]
dropout_186[0][0]
dropout_187[0][0]
dropout_188[0][0]


batch_normalization_192 (BatchN (None, 16, 16, 336) 1344 concatenate_180[0][0]


activation_189 (Activation) (None, 16, 16, 336) 0 batch_normalization_192[0][0]


conv2d_186 (Conv2D) (None, 16, 16, 336) 112896 activation_189[0][0]


dropout_189 (Dropout) (None, 16, 16, 336) 0 conv2d_186[0][0]


average_pooling2d_6 (AveragePoo (None, 8, 8, 336) 0 dropout_189[0][0]


batch_normalization_193 (BatchN (None, 8, 8, 336) 1344 average_pooling2d_6[0][0]


activation_190 (Activation) (None, 8, 8, 336) 0 batch_normalization_193[0][0]


conv2d_187 (Conv2D) (None, 8, 8, 12) 36288 activation_190[0][0]


dropout_190 (Dropout) (None, 8, 8, 12) 0 conv2d_187[0][0]


concatenate_181 (Concatenate) (None, 8, 8, 348) 0 average_pooling2d_6[0][0]
dropout_190[0][0]


batch_normalization_194 (BatchN (None, 8, 8, 348) 1392 concatenate_181[0][0]


activation_191 (Activation) (None, 8, 8, 348) 0 batch_normalization_194[0][0]


conv2d_188 (Conv2D) (None, 8, 8, 12) 37584 activation_191[0][0]


dropout_191 (Dropout) (None, 8, 8, 12) 0 conv2d_188[0][0]


concatenate_182 (Concatenate) (None, 8, 8, 360) 0 average_pooling2d_6[0][0]
dropout_190[0][0]
dropout_191[0][0]


batch_normalization_195 (BatchN (None, 8, 8, 360) 1440 concatenate_182[0][0]


activation_192 (Activation) (None, 8, 8, 360) 0 batch_normalization_195[0][0]


conv2d_189 (Conv2D) (None, 8, 8, 12) 38880 activation_192[0][0]


dropout_192 (Dropout) (None, 8, 8, 12) 0 conv2d_189[0][0]


concatenate_183 (Concatenate) (None, 8, 8, 372) 0 average_pooling2d_6[0][0]
dropout_190[0][0]
dropout_191[0][0]
dropout_192[0][0]


batch_normalization_196 (BatchN (None, 8, 8, 372) 1488 concatenate_183[0][0]


activation_193 (Activation) (None, 8, 8, 372) 0 batch_normalization_196[0][0]


conv2d_190 (Conv2D) (None, 8, 8, 12) 40176 activation_193[0][0]


dropout_193 (Dropout) (None, 8, 8, 12) 0 conv2d_190[0][0]


concatenate_184 (Concatenate) (None, 8, 8, 384) 0 average_pooling2d_6[0][0]
dropout_190[0][0]
dropout_191[0][0]
dropout_192[0][0]
dropout_193[0][0]


batch_normalization_197 (BatchN (None, 8, 8, 384) 1536 concatenate_184[0][0]


activation_194 (Activation) (None, 8, 8, 384) 0 batch_normalization_197[0][0]


conv2d_191 (Conv2D) (None, 8, 8, 12) 41472 activation_194[0][0]


dropout_194 (Dropout) (None, 8, 8, 12) 0 conv2d_191[0][0]


concatenate_185 (Concatenate) (None, 8, 8, 396) 0 average_pooling2d_6[0][0]
dropout_190[0][0]
dropout_191[0][0]
dropout_192[0][0]
dropout_193[0][0]
dropout_194[0][0]


batch_normalization_198 (BatchN (None, 8, 8, 396) 1584 concatenate_185[0][0]


activation_195 (Activation) (None, 8, 8, 396) 0 batch_normalization_198[0][0]


conv2d_192 (Conv2D) (None, 8, 8, 12) 42768 activation_195[0][0]


dropout_195 (Dropout) (None, 8, 8, 12) 0 conv2d_192[0][0]


concatenate_186 (Concatenate) (None, 8, 8, 408) 0 average_pooling2d_6[0][0]
dropout_190[0][0]
dropout_191[0][0]
dropout_192[0][0]
dropout_193[0][0]
dropout_194[0][0]
dropout_195[0][0]


batch_normalization_199 (BatchN (None, 8, 8, 408) 1632 concatenate_186[0][0]


activation_196 (Activation) (None, 8, 8, 408) 0 batch_normalization_199[0][0]


conv2d_193 (Conv2D) (None, 8, 8, 12) 44064 activation_196[0][0]


dropout_196 (Dropout) (None, 8, 8, 12) 0 conv2d_193[0][0]


concatenate_187 (Concatenate) (None, 8, 8, 420) 0 average_pooling2d_6[0][0]
dropout_190[0][0]
dropout_191[0][0]
dropout_192[0][0]
dropout_193[0][0]
dropout_194[0][0]
dropout_195[0][0]
dropout_196[0][0]


batch_normalization_200 (BatchN (None, 8, 8, 420) 1680 concatenate_187[0][0]


activation_197 (Activation) (None, 8, 8, 420) 0 batch_normalization_200[0][0]


conv2d_194 (Conv2D) (None, 8, 8, 12) 45360 activation_197[0][0]


dropout_197 (Dropout) (None, 8, 8, 12) 0 conv2d_194[0][0]


concatenate_188 (Concatenate) (None, 8, 8, 432) 0 average_pooling2d_6[0][0]
dropout_190[0][0]
dropout_191[0][0]
dropout_192[0][0]
dropout_193[0][0]
dropout_194[0][0]
dropout_195[0][0]
dropout_196[0][0]
dropout_197[0][0]


batch_normalization_201 (BatchN (None, 8, 8, 432) 1728 concatenate_188[0][0]


activation_198 (Activation) (None, 8, 8, 432) 0 batch_normalization_201[0][0]


conv2d_195 (Conv2D) (None, 8, 8, 12) 46656 activation_198[0][0]


dropout_198 (Dropout) (None, 8, 8, 12) 0 conv2d_195[0][0]


concatenate_189 (Concatenate) (None, 8, 8, 444) 0 average_pooling2d_6[0][0]
dropout_190[0][0]
dropout_191[0][0]
dropout_192[0][0]
dropout_193[0][0]
dropout_194[0][0]
dropout_195[0][0]
dropout_196[0][0]
dropout_197[0][0]
dropout_198[0][0]


batch_normalization_202 (BatchN (None, 8, 8, 444) 1776 concatenate_189[0][0]


activation_199 (Activation) (None, 8, 8, 444) 0 batch_normalization_202[0][0]


conv2d_196 (Conv2D) (None, 8, 8, 12) 47952 activation_199[0][0]


dropout_199 (Dropout) (None, 8, 8, 12) 0 conv2d_196[0][0]


concatenate_190 (Concatenate) (None, 8, 8, 456) 0 average_pooling2d_6[0][0]
dropout_190[0][0]
dropout_191[0][0]
dropout_192[0][0]
dropout_193[0][0]
dropout_194[0][0]
dropout_195[0][0]
dropout_196[0][0]
dropout_197[0][0]
dropout_198[0][0]
dropout_199[0][0]


batch_normalization_203 (BatchN (None, 8, 8, 456) 1824 concatenate_190[0][0]


activation_200 (Activation) (None, 8, 8, 456) 0 batch_normalization_203[0][0]


conv2d_197 (Conv2D) (None, 8, 8, 12) 49248 activation_200[0][0]


dropout_200 (Dropout) (None, 8, 8, 12) 0 conv2d_197[0][0]


concatenate_191 (Concatenate) (None, 8, 8, 468) 0 average_pooling2d_6[0][0]
dropout_190[0][0]
dropout_191[0][0]
dropout_192[0][0]
dropout_193[0][0]
dropout_194[0][0]
dropout_195[0][0]
dropout_196[0][0]
dropout_197[0][0]
dropout_198[0][0]
dropout_199[0][0]
dropout_200[0][0]


batch_normalization_204 (BatchN (None, 8, 8, 468) 1872 concatenate_191[0][0]


activation_201 (Activation) (None, 8, 8, 468) 0 batch_normalization_204[0][0]


conv2d_198 (Conv2D) (None, 8, 8, 12) 50544 activation_201[0][0]


dropout_201 (Dropout) (None, 8, 8, 12) 0 conv2d_198[0][0]


concatenate_192 (Concatenate) (None, 8, 8, 480) 0 average_pooling2d_6[0][0]
dropout_190[0][0]
dropout_191[0][0]
dropout_192[0][0]
dropout_193[0][0]
dropout_194[0][0]
dropout_195[0][0]
dropout_196[0][0]
dropout_197[0][0]
dropout_198[0][0]
dropout_199[0][0]
dropout_200[0][0]
dropout_201[0][0]


batch_normalization_205 (BatchN (None, 8, 8, 480) 1920 concatenate_192[0][0]


activation_202 (Activation) (None, 8, 8, 480) 0 batch_normalization_205[0][0]


global_average_pooling2d_4 (Glo (None, 480) 0 activation_202[0][0]


dense_7 (Dense) (None, 10) 4810 global_average_pooling2d_4[0][0]

Total params: 1,199,290 Trainable params: 1,178,698 Non-trainable params: 20,592