Open felixseriksson opened 5 years ago
Från model.summary() av UXception:
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 128, 128, 3) 0
__________________________________________________________________________________________________
block1_conv1 (Conv2D) (None, 63, 63, 32) 864 input_1[0][0]
__________________________________________________________________________________________________
block1_conv1_bn (BatchNormaliza (None, 63, 63, 32) 128 block1_conv1[0][0]
__________________________________________________________________________________________________
block1_conv1_act (Activation) (None, 63, 63, 32) 0 block1_conv1_bn[0][0]
__________________________________________________________________________________________________
block1_conv2 (Conv2D) (None, 61, 61, 64) 18432 block1_conv1_act[0][0]
__________________________________________________________________________________________________
block1_conv2_bn (BatchNormaliza (None, 61, 61, 64) 256 block1_conv2[0][0]
__________________________________________________________________________________________________
block1_conv2_act (Activation) (None, 61, 61, 64) 0 block1_conv2_bn[0][0]
__________________________________________________________________________________________________
block2_sepconv1 (SeparableConv2 (None, 61, 61, 128) 8768 block1_conv2_act[0][0]
__________________________________________________________________________________________________
block2_sepconv1_bn (BatchNormal (None, 61, 61, 128) 512 block2_sepconv1[0][0]
__________________________________________________________________________________________________
block2_sepconv2_act (Activation (None, 61, 61, 128) 0 block2_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block2_sepconv2 (SeparableConv2 (None, 61, 61, 128) 17536 block2_sepconv2_act[0][0]
__________________________________________________________________________________________________
block2_sepconv2_bn (BatchNormal (None, 61, 61, 128) 512 block2_sepconv2[0][0]
__________________________________________________________________________________________________
conv2d (Conv2D) (None, 31, 31, 128) 8192 block1_conv2_act[0][0]
__________________________________________________________________________________________________
block2_pool (MaxPooling2D) (None, 31, 31, 128) 0 block2_sepconv2_bn[0][0]
__________________________________________________________________________________________________
batch_normalization (BatchNorma (None, 31, 31, 128) 512 conv2d[0][0]
__________________________________________________________________________________________________
add (Add) (None, 31, 31, 128) 0 block2_pool[0][0]
batch_normalization[0][0]
__________________________________________________________________________________________________
block3_sepconv1_act (Activation (None, 31, 31, 128) 0 add[0][0]
__________________________________________________________________________________________________
block3_sepconv1 (SeparableConv2 (None, 31, 31, 256) 33920 block3_sepconv1_act[0][0]
__________________________________________________________________________________________________
block3_sepconv1_bn (BatchNormal (None, 31, 31, 256) 1024 block3_sepconv1[0][0]
__________________________________________________________________________________________________
block3_sepconv2_act (Activation (None, 31, 31, 256) 0 block3_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block3_sepconv2 (SeparableConv2 (None, 31, 31, 256) 67840 block3_sepconv2_act[0][0]
__________________________________________________________________________________________________
block3_sepconv2_bn (BatchNormal (None, 31, 31, 256) 1024 block3_sepconv2[0][0]
__________________________________________________________________________________________________
conv2d_1 (Conv2D) (None, 16, 16, 256) 32768 add[0][0]
__________________________________________________________________________________________________
block3_pool (MaxPooling2D) (None, 16, 16, 256) 0 block3_sepconv2_bn[0][0]
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 16, 16, 256) 1024 conv2d_1[0][0]
__________________________________________________________________________________________________
add_1 (Add) (None, 16, 16, 256) 0 block3_pool[0][0]
batch_normalization_1[0][0]
__________________________________________________________________________________________________
block4_sepconv1_act (Activation (None, 16, 16, 256) 0 add_1[0][0]
__________________________________________________________________________________________________
block4_sepconv1 (SeparableConv2 (None, 16, 16, 728) 188672 block4_sepconv1_act[0][0]
__________________________________________________________________________________________________
block4_sepconv1_bn (BatchNormal (None, 16, 16, 728) 2912 block4_sepconv1[0][0]
__________________________________________________________________________________________________
block4_sepconv2_act (Activation (None, 16, 16, 728) 0 block4_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block4_sepconv2 (SeparableConv2 (None, 16, 16, 728) 536536 block4_sepconv2_act[0][0]
__________________________________________________________________________________________________
block4_sepconv2_bn (BatchNormal (None, 16, 16, 728) 2912 block4_sepconv2[0][0]
__________________________________________________________________________________________________
conv2d_2 (Conv2D) (None, 8, 8, 728) 186368 add_1[0][0]
__________________________________________________________________________________________________
block4_pool (MaxPooling2D) (None, 8, 8, 728) 0 block4_sepconv2_bn[0][0]
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, 8, 8, 728) 2912 conv2d_2[0][0]
__________________________________________________________________________________________________
add_2 (Add) (None, 8, 8, 728) 0 block4_pool[0][0]
batch_normalization_2[0][0]
__________________________________________________________________________________________________
block5_sepconv1_act (Activation (None, 8, 8, 728) 0 add_2[0][0]
__________________________________________________________________________________________________
block5_sepconv1 (SeparableConv2 (None, 8, 8, 728) 536536 block5_sepconv1_act[0][0]
__________________________________________________________________________________________________
block5_sepconv1_bn (BatchNormal (None, 8, 8, 728) 2912 block5_sepconv1[0][0]
__________________________________________________________________________________________________
block5_sepconv2_act (Activation (None, 8, 8, 728) 0 block5_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block5_sepconv2 (SeparableConv2 (None, 8, 8, 728) 536536 block5_sepconv2_act[0][0]
__________________________________________________________________________________________________
block5_sepconv2_bn (BatchNormal (None, 8, 8, 728) 2912 block5_sepconv2[0][0]
__________________________________________________________________________________________________
block5_sepconv3_act (Activation (None, 8, 8, 728) 0 block5_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block5_sepconv3 (SeparableConv2 (None, 8, 8, 728) 536536 block5_sepconv3_act[0][0]
__________________________________________________________________________________________________
block5_sepconv3_bn (BatchNormal (None, 8, 8, 728) 2912 block5_sepconv3[0][0]
__________________________________________________________________________________________________
add_3 (Add) (None, 8, 8, 728) 0 block5_sepconv3_bn[0][0]
add_2[0][0]
__________________________________________________________________________________________________
block6_sepconv1_act (Activation (None, 8, 8, 728) 0 add_3[0][0]
__________________________________________________________________________________________________
block6_sepconv1 (SeparableConv2 (None, 8, 8, 728) 536536 block6_sepconv1_act[0][0]
__________________________________________________________________________________________________
block6_sepconv1_bn (BatchNormal (None, 8, 8, 728) 2912 block6_sepconv1[0][0]
__________________________________________________________________________________________________
block6_sepconv2_act (Activation (None, 8, 8, 728) 0 block6_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block6_sepconv2 (SeparableConv2 (None, 8, 8, 728) 536536 block6_sepconv2_act[0][0]
__________________________________________________________________________________________________
block6_sepconv2_bn (BatchNormal (None, 8, 8, 728) 2912 block6_sepconv2[0][0]
__________________________________________________________________________________________________
block6_sepconv3_act (Activation (None, 8, 8, 728) 0 block6_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block6_sepconv3 (SeparableConv2 (None, 8, 8, 728) 536536 block6_sepconv3_act[0][0]
__________________________________________________________________________________________________
block6_sepconv3_bn (BatchNormal (None, 8, 8, 728) 2912 block6_sepconv3[0][0]
__________________________________________________________________________________________________
add_4 (Add) (None, 8, 8, 728) 0 block6_sepconv3_bn[0][0]
add_3[0][0]
__________________________________________________________________________________________________
block7_sepconv1_act (Activation (None, 8, 8, 728) 0 add_4[0][0]
__________________________________________________________________________________________________
block7_sepconv1 (SeparableConv2 (None, 8, 8, 728) 536536 block7_sepconv1_act[0][0]
__________________________________________________________________________________________________
block7_sepconv1_bn (BatchNormal (None, 8, 8, 728) 2912 block7_sepconv1[0][0]
__________________________________________________________________________________________________
block7_sepconv2_act (Activation (None, 8, 8, 728) 0 block7_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block7_sepconv2 (SeparableConv2 (None, 8, 8, 728) 536536 block7_sepconv2_act[0][0]
__________________________________________________________________________________________________
block7_sepconv2_bn (BatchNormal (None, 8, 8, 728) 2912 block7_sepconv2[0][0]
__________________________________________________________________________________________________
block7_sepconv3_act (Activation (None, 8, 8, 728) 0 block7_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block7_sepconv3 (SeparableConv2 (None, 8, 8, 728) 536536 block7_sepconv3_act[0][0]
__________________________________________________________________________________________________
block7_sepconv3_bn (BatchNormal (None, 8, 8, 728) 2912 block7_sepconv3[0][0]
__________________________________________________________________________________________________
add_5 (Add) (None, 8, 8, 728) 0 block7_sepconv3_bn[0][0]
add_4[0][0]
__________________________________________________________________________________________________
block8_sepconv1_act (Activation (None, 8, 8, 728) 0 add_5[0][0]
__________________________________________________________________________________________________
block8_sepconv1 (SeparableConv2 (None, 8, 8, 728) 536536 block8_sepconv1_act[0][0]
__________________________________________________________________________________________________
block8_sepconv1_bn (BatchNormal (None, 8, 8, 728) 2912 block8_sepconv1[0][0]
__________________________________________________________________________________________________
block8_sepconv2_act (Activation (None, 8, 8, 728) 0 block8_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block8_sepconv2 (SeparableConv2 (None, 8, 8, 728) 536536 block8_sepconv2_act[0][0]
__________________________________________________________________________________________________
block8_sepconv2_bn (BatchNormal (None, 8, 8, 728) 2912 block8_sepconv2[0][0]
__________________________________________________________________________________________________
block8_sepconv3_act (Activation (None, 8, 8, 728) 0 block8_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block8_sepconv3 (SeparableConv2 (None, 8, 8, 728) 536536 block8_sepconv3_act[0][0]
__________________________________________________________________________________________________
block8_sepconv3_bn (BatchNormal (None, 8, 8, 728) 2912 block8_sepconv3[0][0]
__________________________________________________________________________________________________
add_6 (Add) (None, 8, 8, 728) 0 block8_sepconv3_bn[0][0]
add_5[0][0]
__________________________________________________________________________________________________
block9_sepconv1_act (Activation (None, 8, 8, 728) 0 add_6[0][0]
__________________________________________________________________________________________________
block9_sepconv1 (SeparableConv2 (None, 8, 8, 728) 536536 block9_sepconv1_act[0][0]
__________________________________________________________________________________________________
block9_sepconv1_bn (BatchNormal (None, 8, 8, 728) 2912 block9_sepconv1[0][0]
__________________________________________________________________________________________________
block9_sepconv2_act (Activation (None, 8, 8, 728) 0 block9_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block9_sepconv2 (SeparableConv2 (None, 8, 8, 728) 536536 block9_sepconv2_act[0][0]
__________________________________________________________________________________________________
block9_sepconv2_bn (BatchNormal (None, 8, 8, 728) 2912 block9_sepconv2[0][0]
__________________________________________________________________________________________________
block9_sepconv3_act (Activation (None, 8, 8, 728) 0 block9_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block9_sepconv3 (SeparableConv2 (None, 8, 8, 728) 536536 block9_sepconv3_act[0][0]
__________________________________________________________________________________________________
block9_sepconv3_bn (BatchNormal (None, 8, 8, 728) 2912 block9_sepconv3[0][0]
__________________________________________________________________________________________________
add_7 (Add) (None, 8, 8, 728) 0 block9_sepconv3_bn[0][0]
add_6[0][0]
__________________________________________________________________________________________________
block10_sepconv1_act (Activatio (None, 8, 8, 728) 0 add_7[0][0]
__________________________________________________________________________________________________
block10_sepconv1 (SeparableConv (None, 8, 8, 728) 536536 block10_sepconv1_act[0][0]
__________________________________________________________________________________________________
block10_sepconv1_bn (BatchNorma (None, 8, 8, 728) 2912 block10_sepconv1[0][0]
__________________________________________________________________________________________________
block10_sepconv2_act (Activatio (None, 8, 8, 728) 0 block10_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block10_sepconv2 (SeparableConv (None, 8, 8, 728) 536536 block10_sepconv2_act[0][0]
__________________________________________________________________________________________________
block10_sepconv2_bn (BatchNorma (None, 8, 8, 728) 2912 block10_sepconv2[0][0]
__________________________________________________________________________________________________
block10_sepconv3_act (Activatio (None, 8, 8, 728) 0 block10_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block10_sepconv3 (SeparableConv (None, 8, 8, 728) 536536 block10_sepconv3_act[0][0]
__________________________________________________________________________________________________
block10_sepconv3_bn (BatchNorma (None, 8, 8, 728) 2912 block10_sepconv3[0][0]
__________________________________________________________________________________________________
add_8 (Add) (None, 8, 8, 728) 0 block10_sepconv3_bn[0][0]
add_7[0][0]
__________________________________________________________________________________________________
block11_sepconv1_act (Activatio (None, 8, 8, 728) 0 add_8[0][0]
__________________________________________________________________________________________________
block11_sepconv1 (SeparableConv (None, 8, 8, 728) 536536 block11_sepconv1_act[0][0]
__________________________________________________________________________________________________
block11_sepconv1_bn (BatchNorma (None, 8, 8, 728) 2912 block11_sepconv1[0][0]
__________________________________________________________________________________________________
block11_sepconv2_act (Activatio (None, 8, 8, 728) 0 block11_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block11_sepconv2 (SeparableConv (None, 8, 8, 728) 536536 block11_sepconv2_act[0][0]
__________________________________________________________________________________________________
block11_sepconv2_bn (BatchNorma (None, 8, 8, 728) 2912 block11_sepconv2[0][0]
__________________________________________________________________________________________________
block11_sepconv3_act (Activatio (None, 8, 8, 728) 0 block11_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block11_sepconv3 (SeparableConv (None, 8, 8, 728) 536536 block11_sepconv3_act[0][0]
__________________________________________________________________________________________________
block11_sepconv3_bn (BatchNorma (None, 8, 8, 728) 2912 block11_sepconv3[0][0]
__________________________________________________________________________________________________
add_9 (Add) (None, 8, 8, 728) 0 block11_sepconv3_bn[0][0]
add_8[0][0]
__________________________________________________________________________________________________
block12_sepconv1_act (Activatio (None, 8, 8, 728) 0 add_9[0][0]
__________________________________________________________________________________________________
block12_sepconv1 (SeparableConv (None, 8, 8, 728) 536536 block12_sepconv1_act[0][0]
__________________________________________________________________________________________________
block12_sepconv1_bn (BatchNorma (None, 8, 8, 728) 2912 block12_sepconv1[0][0]
__________________________________________________________________________________________________
block12_sepconv2_act (Activatio (None, 8, 8, 728) 0 block12_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block12_sepconv2 (SeparableConv (None, 8, 8, 728) 536536 block12_sepconv2_act[0][0]
__________________________________________________________________________________________________
block12_sepconv2_bn (BatchNorma (None, 8, 8, 728) 2912 block12_sepconv2[0][0]
__________________________________________________________________________________________________
block12_sepconv3_act (Activatio (None, 8, 8, 728) 0 block12_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block12_sepconv3 (SeparableConv (None, 8, 8, 728) 536536 block12_sepconv3_act[0][0]
__________________________________________________________________________________________________
block12_sepconv3_bn (BatchNorma (None, 8, 8, 728) 2912 block12_sepconv3[0][0]
__________________________________________________________________________________________________
add_10 (Add) (None, 8, 8, 728) 0 block12_sepconv3_bn[0][0]
add_9[0][0]
__________________________________________________________________________________________________
block13_sepconv1_act (Activatio (None, 8, 8, 728) 0 add_10[0][0]
__________________________________________________________________________________________________
block13_sepconv1 (SeparableConv (None, 8, 8, 728) 536536 block13_sepconv1_act[0][0]
__________________________________________________________________________________________________
block13_sepconv1_bn (BatchNorma (None, 8, 8, 728) 2912 block13_sepconv1[0][0]
__________________________________________________________________________________________________
block13_sepconv2_act (Activatio (None, 8, 8, 728) 0 block13_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block13_sepconv2 (SeparableConv (None, 8, 8, 1024) 752024 block13_sepconv2_act[0][0]
__________________________________________________________________________________________________
block13_sepconv2_bn (BatchNorma (None, 8, 8, 1024) 4096 block13_sepconv2[0][0]
__________________________________________________________________________________________________
leaky_re_lu (LeakyReLU) (None, 8, 8, 1024) 0 block13_sepconv2_bn[0][0]
__________________________________________________________________________________________________
max_pooling2d (MaxPooling2D) (None, 4, 4, 1024) 0 leaky_re_lu[0][0]
__________________________________________________________________________________________________
dropout (Dropout) (None, 4, 4, 1024) 0 max_pooling2d[0][0]
__________________________________________________________________________________________________
conv2d_4 (Conv2D) (None, 4, 4, 512) 4719104 dropout[0][0]
__________________________________________________________________________________________________
leaky_re_lu_1 (LeakyReLU) (None, 4, 4, 512) 0 conv2d_4[0][0]
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, 4, 4, 512) 2048 leaky_re_lu_1[0][0]
__________________________________________________________________________________________________
conv2d_5 (Conv2D) (None, 4, 4, 512) 2359808 batch_normalization_4[0][0]
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, 4, 4, 512) 2048 conv2d_5[0][0]
__________________________________________________________________________________________________
leaky_re_lu_2 (LeakyReLU) (None, 4, 4, 512) 0 batch_normalization_6[0][0]
__________________________________________________________________________________________________
conv2d_6 (Conv2D) (None, 4, 4, 512) 2359808 leaky_re_lu_2[0][0]
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, 4, 4, 512) 2048 conv2d_6[0][0]
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, 4, 4, 512) 2048 conv2d_4[0][0]
__________________________________________________________________________________________________
add_12 (Add) (None, 4, 4, 512) 0 batch_normalization_7[0][0]
batch_normalization_5[0][0]
__________________________________________________________________________________________________
leaky_re_lu_3 (LeakyReLU) (None, 4, 4, 512) 0 add_12[0][0]
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, 4, 4, 512) 2048 leaky_re_lu_3[0][0]
__________________________________________________________________________________________________
conv2d_7 (Conv2D) (None, 4, 4, 512) 2359808 batch_normalization_8[0][0]
__________________________________________________________________________________________________
batch_normalization_10 (BatchNo (None, 4, 4, 512) 2048 conv2d_7[0][0]
__________________________________________________________________________________________________
leaky_re_lu_4 (LeakyReLU) (None, 4, 4, 512) 0 batch_normalization_10[0][0]
__________________________________________________________________________________________________
conv2d_8 (Conv2D) (None, 4, 4, 512) 2359808 leaky_re_lu_4[0][0]
__________________________________________________________________________________________________
batch_normalization_11 (BatchNo (None, 4, 4, 512) 2048 conv2d_8[0][0]
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, 4, 4, 512) 2048 add_12[0][0]
__________________________________________________________________________________________________
add_13 (Add) (None, 4, 4, 512) 0 batch_normalization_11[0][0]
batch_normalization_9[0][0]
__________________________________________________________________________________________________
leaky_re_lu_5 (LeakyReLU) (None, 4, 4, 512) 0 add_13[0][0]
__________________________________________________________________________________________________
conv2d_transpose (Conv2DTranspo (None, 8, 8, 256) 1179904 leaky_re_lu_5[0][0]
__________________________________________________________________________________________________
concatenate (Concatenate) (None, 8, 8, 1280) 0 conv2d_transpose[0][0]
leaky_re_lu[0][0]
__________________________________________________________________________________________________
dropout_1 (Dropout) (None, 8, 8, 1280) 0 concatenate[0][0]
__________________________________________________________________________________________________
conv2d_9 (Conv2D) (None, 8, 8, 256) 2949376 dropout_1[0][0]
__________________________________________________________________________________________________
leaky_re_lu_6 (LeakyReLU) (None, 8, 8, 256) 0 conv2d_9[0][0]
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, 8, 8, 256) 1024 leaky_re_lu_6[0][0]
__________________________________________________________________________________________________
conv2d_10 (Conv2D) (None, 8, 8, 256) 590080 batch_normalization_12[0][0]
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, 8, 8, 256) 1024 conv2d_10[0][0]
__________________________________________________________________________________________________
leaky_re_lu_7 (LeakyReLU) (None, 8, 8, 256) 0 batch_normalization_14[0][0]
__________________________________________________________________________________________________
conv2d_11 (Conv2D) (None, 8, 8, 256) 590080 leaky_re_lu_7[0][0]
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, 8, 8, 256) 1024 conv2d_11[0][0]
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, 8, 8, 256) 1024 conv2d_9[0][0]
__________________________________________________________________________________________________
add_14 (Add) (None, 8, 8, 256) 0 batch_normalization_15[0][0]
batch_normalization_13[0][0]
__________________________________________________________________________________________________
leaky_re_lu_8 (LeakyReLU) (None, 8, 8, 256) 0 add_14[0][0]
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, 8, 8, 256) 1024 leaky_re_lu_8[0][0]
__________________________________________________________________________________________________
conv2d_12 (Conv2D) (None, 8, 8, 256) 590080 batch_normalization_16[0][0]
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, 8, 8, 256) 1024 conv2d_12[0][0]
__________________________________________________________________________________________________
leaky_re_lu_9 (LeakyReLU) (None, 8, 8, 256) 0 batch_normalization_18[0][0]
__________________________________________________________________________________________________
conv2d_13 (Conv2D) (None, 8, 8, 256) 590080 leaky_re_lu_9[0][0]
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, 8, 8, 256) 1024 conv2d_13[0][0]
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, 8, 8, 256) 1024 add_14[0][0]
__________________________________________________________________________________________________
add_15 (Add) (None, 8, 8, 256) 0 batch_normalization_19[0][0]
batch_normalization_17[0][0]
__________________________________________________________________________________________________
leaky_re_lu_10 (LeakyReLU) (None, 8, 8, 256) 0 add_15[0][0]
__________________________________________________________________________________________________
conv2d_transpose_1 (Conv2DTrans (None, 16, 16, 128) 295040 leaky_re_lu_10[0][0]
__________________________________________________________________________________________________
concatenate_1 (Concatenate) (None, 16, 16, 856) 0 conv2d_transpose_1[0][0]
block4_sepconv2_bn[0][0]
__________________________________________________________________________________________________
dropout_2 (Dropout) (None, 16, 16, 856) 0 concatenate_1[0][0]
__________________________________________________________________________________________________
conv2d_14 (Conv2D) (None, 16, 16, 128) 986240 dropout_2[0][0]
__________________________________________________________________________________________________
leaky_re_lu_11 (LeakyReLU) (None, 16, 16, 128) 0 conv2d_14[0][0]
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, 16, 16, 128) 512 leaky_re_lu_11[0][0]
__________________________________________________________________________________________________
conv2d_15 (Conv2D) (None, 16, 16, 128) 147584 batch_normalization_20[0][0]
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, 16, 16, 128) 512 conv2d_15[0][0]
__________________________________________________________________________________________________
leaky_re_lu_12 (LeakyReLU) (None, 16, 16, 128) 0 batch_normalization_22[0][0]
__________________________________________________________________________________________________
conv2d_16 (Conv2D) (None, 16, 16, 128) 147584 leaky_re_lu_12[0][0]
__________________________________________________________________________________________________
batch_normalization_23 (BatchNo (None, 16, 16, 128) 512 conv2d_16[0][0]
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, 16, 16, 128) 512 conv2d_14[0][0]
__________________________________________________________________________________________________
add_16 (Add) (None, 16, 16, 128) 0 batch_normalization_23[0][0]
batch_normalization_21[0][0]
__________________________________________________________________________________________________
leaky_re_lu_13 (LeakyReLU) (None, 16, 16, 128) 0 add_16[0][0]
__________________________________________________________________________________________________
batch_normalization_24 (BatchNo (None, 16, 16, 128) 512 leaky_re_lu_13[0][0]
__________________________________________________________________________________________________
conv2d_17 (Conv2D) (None, 16, 16, 128) 147584 batch_normalization_24[0][0]
__________________________________________________________________________________________________
batch_normalization_26 (BatchNo (None, 16, 16, 128) 512 conv2d_17[0][0]
__________________________________________________________________________________________________
leaky_re_lu_14 (LeakyReLU) (None, 16, 16, 128) 0 batch_normalization_26[0][0]
__________________________________________________________________________________________________
conv2d_18 (Conv2D) (None, 16, 16, 128) 147584 leaky_re_lu_14[0][0]
__________________________________________________________________________________________________
batch_normalization_27 (BatchNo (None, 16, 16, 128) 512 conv2d_18[0][0]
__________________________________________________________________________________________________
batch_normalization_25 (BatchNo (None, 16, 16, 128) 512 add_16[0][0]
__________________________________________________________________________________________________
add_17 (Add) (None, 16, 16, 128) 0 batch_normalization_27[0][0]
batch_normalization_25[0][0]
__________________________________________________________________________________________________
leaky_re_lu_15 (LeakyReLU) (None, 16, 16, 128) 0 add_17[0][0]
__________________________________________________________________________________________________
conv2d_transpose_2 (Conv2DTrans (None, 32, 32, 64) 73792 leaky_re_lu_15[0][0]
__________________________________________________________________________________________________
zero_padding2d (ZeroPadding2D) (None, 32, 32, 256) 0 block3_sepconv2_bn[0][0]
__________________________________________________________________________________________________
concatenate_2 (Concatenate) (None, 32, 32, 320) 0 conv2d_transpose_2[0][0]
zero_padding2d[0][0]
__________________________________________________________________________________________________
dropout_3 (Dropout) (None, 32, 32, 320) 0 concatenate_2[0][0]
__________________________________________________________________________________________________
conv2d_19 (Conv2D) (None, 32, 32, 64) 184384 dropout_3[0][0]
__________________________________________________________________________________________________
leaky_re_lu_16 (LeakyReLU) (None, 32, 32, 64) 0 conv2d_19[0][0]
__________________________________________________________________________________________________
batch_normalization_28 (BatchNo (None, 32, 32, 64) 256 leaky_re_lu_16[0][0]
__________________________________________________________________________________________________
conv2d_20 (Conv2D) (None, 32, 32, 64) 36928 batch_normalization_28[0][0]
__________________________________________________________________________________________________
batch_normalization_30 (BatchNo (None, 32, 32, 64) 256 conv2d_20[0][0]
__________________________________________________________________________________________________
leaky_re_lu_17 (LeakyReLU) (None, 32, 32, 64) 0 batch_normalization_30[0][0]
__________________________________________________________________________________________________
conv2d_21 (Conv2D) (None, 32, 32, 64) 36928 leaky_re_lu_17[0][0]
__________________________________________________________________________________________________
batch_normalization_31 (BatchNo (None, 32, 32, 64) 256 conv2d_21[0][0]
__________________________________________________________________________________________________
batch_normalization_29 (BatchNo (None, 32, 32, 64) 256 conv2d_19[0][0]
__________________________________________________________________________________________________
add_18 (Add) (None, 32, 32, 64) 0 batch_normalization_31[0][0]
batch_normalization_29[0][0]
__________________________________________________________________________________________________
leaky_re_lu_18 (LeakyReLU) (None, 32, 32, 64) 0 add_18[0][0]
__________________________________________________________________________________________________
batch_normalization_32 (BatchNo (None, 32, 32, 64) 256 leaky_re_lu_18[0][0]
__________________________________________________________________________________________________
conv2d_22 (Conv2D) (None, 32, 32, 64) 36928 batch_normalization_32[0][0]
__________________________________________________________________________________________________
batch_normalization_34 (BatchNo (None, 32, 32, 64) 256 conv2d_22[0][0]
__________________________________________________________________________________________________
leaky_re_lu_19 (LeakyReLU) (None, 32, 32, 64) 0 batch_normalization_34[0][0]
__________________________________________________________________________________________________
conv2d_23 (Conv2D) (None, 32, 32, 64) 36928 leaky_re_lu_19[0][0]
__________________________________________________________________________________________________
batch_normalization_35 (BatchNo (None, 32, 32, 64) 256 conv2d_23[0][0]
__________________________________________________________________________________________________
batch_normalization_33 (BatchNo (None, 32, 32, 64) 256 add_18[0][0]
__________________________________________________________________________________________________
add_19 (Add) (None, 32, 32, 64) 0 batch_normalization_35[0][0]
batch_normalization_33[0][0]
__________________________________________________________________________________________________
leaky_re_lu_20 (LeakyReLU) (None, 32, 32, 64) 0 add_19[0][0]
__________________________________________________________________________________________________
conv2d_transpose_3 (Conv2DTrans (None, 64, 64, 32) 18464 leaky_re_lu_20[0][0]
__________________________________________________________________________________________________
zero_padding2d_1 (ZeroPadding2D (None, 64, 64, 128) 0 block2_sepconv2_bn[0][0]
__________________________________________________________________________________________________
concatenate_3 (Concatenate) (None, 64, 64, 160) 0 conv2d_transpose_3[0][0]
zero_padding2d_1[0][0]
__________________________________________________________________________________________________
dropout_4 (Dropout) (None, 64, 64, 160) 0 concatenate_3[0][0]
__________________________________________________________________________________________________
conv2d_24 (Conv2D) (None, 64, 64, 32) 46112 dropout_4[0][0]
__________________________________________________________________________________________________
leaky_re_lu_21 (LeakyReLU) (None, 64, 64, 32) 0 conv2d_24[0][0]
__________________________________________________________________________________________________
batch_normalization_36 (BatchNo (None, 64, 64, 32) 128 leaky_re_lu_21[0][0]
__________________________________________________________________________________________________
conv2d_25 (Conv2D) (None, 64, 64, 32) 9248 batch_normalization_36[0][0]
__________________________________________________________________________________________________
batch_normalization_38 (BatchNo (None, 64, 64, 32) 128 conv2d_25[0][0]
__________________________________________________________________________________________________
leaky_re_lu_22 (LeakyReLU) (None, 64, 64, 32) 0 batch_normalization_38[0][0]
__________________________________________________________________________________________________
conv2d_26 (Conv2D) (None, 64, 64, 32) 9248 leaky_re_lu_22[0][0]
__________________________________________________________________________________________________
batch_normalization_39 (BatchNo (None, 64, 64, 32) 128 conv2d_26[0][0]
__________________________________________________________________________________________________
batch_normalization_37 (BatchNo (None, 64, 64, 32) 128 conv2d_24[0][0]
__________________________________________________________________________________________________
add_20 (Add) (None, 64, 64, 32) 0 batch_normalization_39[0][0]
batch_normalization_37[0][0]
__________________________________________________________________________________________________
leaky_re_lu_23 (LeakyReLU) (None, 64, 64, 32) 0 add_20[0][0]
__________________________________________________________________________________________________
batch_normalization_40 (BatchNo (None, 64, 64, 32) 128 leaky_re_lu_23[0][0]
__________________________________________________________________________________________________
conv2d_27 (Conv2D) (None, 64, 64, 32) 9248 batch_normalization_40[0][0]
__________________________________________________________________________________________________
batch_normalization_42 (BatchNo (None, 64, 64, 32) 128 conv2d_27[0][0]
__________________________________________________________________________________________________
leaky_re_lu_24 (LeakyReLU) (None, 64, 64, 32) 0 batch_normalization_42[0][0]
__________________________________________________________________________________________________
conv2d_28 (Conv2D) (None, 64, 64, 32) 9248 leaky_re_lu_24[0][0]
__________________________________________________________________________________________________
batch_normalization_43 (BatchNo (None, 64, 64, 32) 128 conv2d_28[0][0]
__________________________________________________________________________________________________
batch_normalization_41 (BatchNo (None, 64, 64, 32) 128 add_20[0][0]
__________________________________________________________________________________________________
add_21 (Add) (None, 64, 64, 32) 0 batch_normalization_43[0][0]
batch_normalization_41[0][0]
__________________________________________________________________________________________________
leaky_re_lu_25 (LeakyReLU) (None, 64, 64, 32) 0 add_21[0][0]
__________________________________________________________________________________________________
conv2d_transpose_4 (Conv2DTrans (None, 128, 128, 16) 4624 leaky_re_lu_25[0][0]
__________________________________________________________________________________________________
dropout_5 (Dropout) (None, 128, 128, 16) 0 conv2d_transpose_4[0][0]
__________________________________________________________________________________________________
conv2d_29 (Conv2D) (None, 128, 128, 16) 2320 dropout_5[0][0]
__________________________________________________________________________________________________
leaky_re_lu_26 (LeakyReLU) (None, 128, 128, 16) 0 conv2d_29[0][0]
__________________________________________________________________________________________________
batch_normalization_44 (BatchNo (None, 128, 128, 16) 64 leaky_re_lu_26[0][0]
__________________________________________________________________________________________________
conv2d_30 (Conv2D) (None, 128, 128, 16) 2320 batch_normalization_44[0][0]
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, 128, 128, 16) 64 conv2d_30[0][0]
__________________________________________________________________________________________________
leaky_re_lu_27 (LeakyReLU) (None, 128, 128, 16) 0 batch_normalization_46[0][0]
__________________________________________________________________________________________________
conv2d_31 (Conv2D) (None, 128, 128, 16) 2320 leaky_re_lu_27[0][0]
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, 128, 128, 16) 64 conv2d_31[0][0]
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, 128, 128, 16) 64 conv2d_29[0][0]
__________________________________________________________________________________________________
add_22 (Add) (None, 128, 128, 16) 0 batch_normalization_47[0][0]
batch_normalization_45[0][0]
__________________________________________________________________________________________________
leaky_re_lu_28 (LeakyReLU) (None, 128, 128, 16) 0 add_22[0][0]
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, 128, 128, 16) 64 leaky_re_lu_28[0][0]
__________________________________________________________________________________________________
conv2d_32 (Conv2D) (None, 128, 128, 16) 2320 batch_normalization_48[0][0]
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, 128, 128, 16) 64 conv2d_32[0][0]
__________________________________________________________________________________________________
leaky_re_lu_29 (LeakyReLU) (None, 128, 128, 16) 0 batch_normalization_50[0][0]
__________________________________________________________________________________________________
conv2d_33 (Conv2D) (None, 128, 128, 16) 2320 leaky_re_lu_29[0][0]
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, 128, 128, 16) 64 conv2d_33[0][0]
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, 128, 128, 16) 64 add_22[0][0]
__________________________________________________________________________________________________
add_23 (Add) (None, 128, 128, 16) 0 batch_normalization_51[0][0]
batch_normalization_49[0][0]
__________________________________________________________________________________________________
leaky_re_lu_30 (LeakyReLU) (None, 128, 128, 16) 0 add_23[0][0]
__________________________________________________________________________________________________
dropout_6 (Dropout) (None, 128, 128, 16) 0 leaky_re_lu_30[0][0]
__________________________________________________________________________________________________
conv2d_34 (Conv2D) (None, 128, 128, 1) 17 dropout_6[0][0]
==================================================================================================
Total params: 38,431,449
Trainable params: 38,370,009
Non-trainable params: 61,440
__________________________________________________________________________________________________```