• 【658】DeepLabV3+ 详解说明


    [1] Keras 搭建自己的DeeplabV3+语义分割平台(Bubbliiiing 深度学习 教程)

    [2] 憨批的语义分割重制版8——Keras 搭建自己的DeeplabV3+语义分割平台

    [3] github - bubbliiiing/deeplabv3-plus-keras

    [4] FCN、Unet、deeplabv1、deeplabv2、deeplabv3、deeplabv3+的网络

    [5] Deeplabv3+网络结构详解与模型的搭建

      结构图

       

      相关说明:

    • 左边低级特征提供细节信息:下采样 2 次,也就是 2^2=4 倍
    • 右上空洞卷积:下采样 4 次,也就是 2^4=16 倍
    • 右上 5 层得到的是相同长宽

      代码实现:

    from keras.activations import relu
    from keras.layers import (Activation, Add, BatchNormalization, Conv2D,
                              DepthwiseConv2D)
    
    
    def _make_divisible(v, divisor, min_value=None):
        if min_value is None:
            min_value = divisor
        new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
        if new_v < 0.9 * v:
            new_v += divisor
        return new_v
    
    def relu6(x):
        return relu(x, max_value=6)
    
    def _inverted_res_block(inputs, expansion, stride, alpha, filters, block_id, skip_connection, rate=1):
        in_channels = inputs.shape[-1]
        pointwise_filters = _make_divisible(int(filters * alpha), 8)
        prefix = 'expanded_conv_{}_'.format(block_id)
    
        x = inputs
        #----------------------------------------------------#
        #   利用1x1卷积根据输入进来的通道数进行通道数上升
        #----------------------------------------------------#
        if block_id:
            x = Conv2D(expansion * in_channels, kernel_size=1, padding='same',
                       use_bias=False, activation=None,
                       name=prefix + 'expand')(x)
            x = BatchNormalization(epsilon=1e-3, momentum=0.999,
                                   name=prefix + 'expand_BN')(x)
            x = Activation(relu6, name=prefix + 'expand_relu')(x)
        else:
            prefix = 'expanded_conv_'
    
        #----------------------------------------------------#
        #   利用深度可分离卷积进行特征提取
        #----------------------------------------------------#
        x = DepthwiseConv2D(kernel_size=3, strides=stride, activation=None,
                            use_bias=False, padding='same', dilation_rate=(rate, rate),
                            name=prefix + 'depthwise')(x)
        x = BatchNormalization(epsilon=1e-3, momentum=0.999,
                               name=prefix + 'depthwise_BN')(x)
    
        x = Activation(relu6, name=prefix + 'depthwise_relu')(x)
    
        #----------------------------------------------------#
        #   利用1x1的卷积进行通道数的下降
        #----------------------------------------------------#
        x = Conv2D(pointwise_filters,
                   kernel_size=1, padding='same', use_bias=False, activation=None,
                   name=prefix + 'project')(x)
        x = BatchNormalization(epsilon=1e-3, momentum=0.999,
                               name=prefix + 'project_BN')(x)
    
        #----------------------------------------------------#
        #   添加残差边
        #----------------------------------------------------#
        if skip_connection:
            return Add(name=prefix + 'add')([inputs, x])
        return x
    
    def mobilenetV2(inputs, alpha=1, downsample_factor=8):
        if downsample_factor == 8:
            block4_dilation = 2
            block5_dilation = 4
            block4_stride = 1
            atrous_rates = (12, 24, 36)
        elif downsample_factor == 16:
            block4_dilation = 1
            block5_dilation = 2
            block4_stride = 2
            atrous_rates = (6, 12, 18)
        else:
            raise ValueError('Unsupported factor - `{}`, Use 8 or 16.'.format(downsample_factor))
        
        first_block_filters = _make_divisible(32 * alpha, 8)
        # 512,512,3 -> 256,256,32
        x = Conv2D(first_block_filters,
                    kernel_size=3,
                    strides=(2, 2), padding='same',
                    use_bias=False, name='Conv')(inputs)
        x = BatchNormalization(
            epsilon=1e-3, momentum=0.999, name='Conv_BN')(x)
        x = Activation(relu6, name='Conv_Relu6')(x)
    
        # 256,256,32 -> 256,256,16
        x = _inverted_res_block(x, filters=16, alpha=alpha, stride=1,
                                expansion=1, block_id=0, skip_connection=False)
    
        #---------------------------------------------------------------#
        # 256,256,16 -> 128,128,24
        x = _inverted_res_block(x, filters=24, alpha=alpha, stride=2,
                                expansion=6, block_id=1, skip_connection=False)
        x = _inverted_res_block(x, filters=24, alpha=alpha, stride=1,
                                expansion=6, block_id=2, skip_connection=True)
        skip1 = x
        #---------------------------------------------------------------#
        # 128,128,24 -> 64,64.32
        x = _inverted_res_block(x, filters=32, alpha=alpha, stride=2,
                                expansion=6, block_id=3, skip_connection=False)
        x = _inverted_res_block(x, filters=32, alpha=alpha, stride=1,
                                expansion=6, block_id=4, skip_connection=True)
        x = _inverted_res_block(x, filters=32, alpha=alpha, stride=1,
                                expansion=6, block_id=5, skip_connection=True)
        #---------------------------------------------------------------#
        # 64,64,32 -> 32,32.64
        x = _inverted_res_block(x, filters=64, alpha=alpha, stride=block4_stride,
                                expansion=6, block_id=6, skip_connection=False)
        x = _inverted_res_block(x, filters=64, alpha=alpha, stride=1, rate=block4_dilation,
                                expansion=6, block_id=7, skip_connection=True)
        x = _inverted_res_block(x, filters=64, alpha=alpha, stride=1, rate=block4_dilation,
                                expansion=6, block_id=8, skip_connection=True)
        x = _inverted_res_block(x, filters=64, alpha=alpha, stride=1, rate=block4_dilation,
                                expansion=6, block_id=9, skip_connection=True)
    
        # 32,32.64 -> 32,32.96
        x = _inverted_res_block(x, filters=96, alpha=alpha, stride=1, rate=block4_dilation,
                                expansion=6, block_id=10, skip_connection=False)
        x = _inverted_res_block(x, filters=96, alpha=alpha, stride=1, rate=block4_dilation,
                                expansion=6, block_id=11, skip_connection=True)
        x = _inverted_res_block(x, filters=96, alpha=alpha, stride=1, rate=block4_dilation,
                                expansion=6, block_id=12, skip_connection=True)
    
        #---------------------------------------------------------------#
        # 32,32.96 -> 32,32,160 -> 32,32,320
        x = _inverted_res_block(x, filters=160, alpha=alpha, stride=1, rate=block4_dilation,  # 1!
                                expansion=6, block_id=13, skip_connection=False)
        x = _inverted_res_block(x, filters=160, alpha=alpha, stride=1, rate=block5_dilation,
                                expansion=6, block_id=14, skip_connection=True)
        x = _inverted_res_block(x, filters=160, alpha=alpha, stride=1, rate=block5_dilation,
                                expansion=6, block_id=15, skip_connection=True)
    
        x = _inverted_res_block(x, filters=320, alpha=alpha, stride=1, rate=block5_dilation,
                                expansion=6, block_id=16, skip_connection=False)
        return x,atrous_rates,skip1
    import tensorflow as tf
    from keras import backend as K
    from keras.layers import (Activation, BatchNormalization, Concatenate, Conv2D,
                              DepthwiseConv2D, Dropout, GlobalAveragePooling2D,
                              Input, Lambda, Softmax, ZeroPadding2D)
    from keras.models import Model
    
    #from nets.mobilenet import mobilenetV2
    #from nets.Xception import Xception
    
    
    def SepConv_BN(x, filters, prefix, stride=1, kernel_size=3, rate=1, depth_activation=False, epsilon=1e-3):
        # 计算padding的数量,hw是否需要收缩
        if stride == 1:
            depth_padding = 'same'
        else:
            kernel_size_effective = kernel_size + (kernel_size - 1) * (rate - 1)
            pad_total = kernel_size_effective - 1
            pad_beg = pad_total // 2
            pad_end = pad_total - pad_beg
            x = ZeroPadding2D((pad_beg, pad_end))(x)
            depth_padding = 'valid'
        
        # 如果需要激活函数
        if not depth_activation:
            x = Activation('relu')(x)
    
        # 分离卷积,首先3x3分离卷积,再1x1卷积
        # 3x3采用膨胀卷积
        x = DepthwiseConv2D((kernel_size, kernel_size), strides=(stride, stride), dilation_rate=(rate, rate),
                            padding=depth_padding, use_bias=False, name=prefix + '_depthwise')(x)
        x = BatchNormalization(name=prefix + '_depthwise_BN', epsilon=epsilon)(x)
        if depth_activation:
            x = Activation('relu')(x)
    
        # 1x1卷积,进行压缩
        x = Conv2D(filters, (1, 1), padding='same',
                   use_bias=False, name=prefix + '_pointwise')(x)
        x = BatchNormalization(name=prefix + '_pointwise_BN', epsilon=epsilon)(x)
        if depth_activation:
            x = Activation('relu')(x)
    
        return x
    
    def Deeplabv3(n_classes, inputs_size, alpha=1., backbone="mobilenet", downsample_factor=16):
        img_input = Input(shape=inputs_size)
    
        if backbone=="xception":
            #----------------------------------#
            #   获得两个特征层
            #   浅层特征skip1   [128,128,256]
            #   主干部分x       [30,30,2048]
            #----------------------------------#
            x, atrous_rates, skip1 = Xception(img_input,alpha, downsample_factor=downsample_factor)
        elif backbone=="mobilenet":
            #----------------------------------#
            #   获得两个特征层
            #   浅层特征skip1   [128,128,24]
            #   主干部分x       [30,30,320]
            #----------------------------------#
            x, atrous_rates, skip1 = mobilenetV2(img_input,alpha, downsample_factor=downsample_factor)
        else:
            raise ValueError('Unsupported backbone - `{}`, Use mobilenet, xception.'.format(backbone))
    
        size_before = tf.keras.backend.int_shape(x)
    
        #-----------------------------------------#
        #   一共五个分支
        #   ASPP特征提取模块
        #   利用不同膨胀率的膨胀卷积进行特征提取
        #-----------------------------------------#
        # 分支0
        b0 = Conv2D(256, (1, 1), padding='same', use_bias=False, name='aspp0')(x)
        b0 = BatchNormalization(name='aspp0_BN', epsilon=1e-5)(b0)
        b0 = Activation('relu', name='aspp0_activation')(b0)
    
        # 分支1 rate = 6 (12)
        b1 = SepConv_BN(x, 256, 'aspp1',
                        rate=atrous_rates[0], depth_activation=True, epsilon=1e-5)
        # 分支2 rate = 12 (24)
        b2 = SepConv_BN(x, 256, 'aspp2',
                        rate=atrous_rates[1], depth_activation=True, epsilon=1e-5)
        # 分支3 rate = 18 (36)
        b3 = SepConv_BN(x, 256, 'aspp3',
                        rate=atrous_rates[2], depth_activation=True, epsilon=1e-5)
                        
        # 分支4 全部求平均后,再利用expand_dims扩充维度,之后利用1x1卷积调整通道
        b4 = GlobalAveragePooling2D()(x)
        b4 = Lambda(lambda x: K.expand_dims(x, 1))(b4)
        b4 = Lambda(lambda x: K.expand_dims(x, 1))(b4)
        b4 = Conv2D(256, (1, 1), padding='same', use_bias=False, name='image_pooling')(b4)
        b4 = BatchNormalization(name='image_pooling_BN', epsilon=1e-5)(b4)
        b4 = Activation('relu')(b4)
        # 直接利用resize_images扩充hw
        b4 = Lambda(lambda x: tf.compat.v1.image.resize_images(x, size_before[1:3], align_corners=True))(b4)
    
        #-----------------------------------------#
        #   将五个分支的内容堆叠起来
        #   然后1x1卷积整合特征。
        #-----------------------------------------#
        x = Concatenate()([b4, b0, b1, b2, b3])
        # 利用conv2d压缩 32,32,256
        x = Conv2D(256, (1, 1), padding='same', use_bias=False, name='concat_projection')(x)
        x = BatchNormalization(name='concat_projection_BN', epsilon=1e-5)(x)
        x = Activation('relu')(x)
        x = Dropout(0.1)(x)
    
        skip_size = tf.keras.backend.int_shape(skip1)
        #-----------------------------------------#
        #   将加强特征边上采样
        #-----------------------------------------#
        x = Lambda(lambda xx: tf.compat.v1.image.resize_images(xx, skip_size[1:3], align_corners=True))(x)
        #----------------------------------#
        #   浅层特征边
        #----------------------------------#
        dec_skip1 = Conv2D(48, (1, 1), padding='same',use_bias=False, name='feature_projection0')(skip1)
        dec_skip1 = BatchNormalization(name='feature_projection0_BN', epsilon=1e-5)(dec_skip1)
        dec_skip1 = Activation(tf.nn.relu)(dec_skip1)
    
        #-----------------------------------------#
        #   与浅层特征堆叠后利用卷积进行特征提取
        #-----------------------------------------#
        x = Concatenate()([x, dec_skip1])
        x = SepConv_BN(x, 256, 'decoder_conv0',
                        depth_activation=True, epsilon=1e-5)
        x = SepConv_BN(x, 256, 'decoder_conv1',
                        depth_activation=True, epsilon=1e-5)
    
        #-----------------------------------------#
        #   获得每个像素点的分类
        #-----------------------------------------#
        # 512,512
        size_before3 = tf.keras.backend.int_shape(img_input)
        # 512,512,21
        x = Conv2D(n_classes, (1, 1), padding='same')(x)
        x = Lambda(lambda xx:tf.compat.v1.image.resize_images(xx,size_before3[1:3], align_corners=True))(x)
        x = Softmax()(x)
    
        model = Model(img_input, x, name='deeplabv3plus')
        return model
    
    model = Deeplabv3(2, (512, 512, 3)) 
    model.summary() 
    
    # 结构图显示
    from keras.utils.vis_utils import plot_model
    plot_model(model, to_file='Flatten.png', show_shapes=True)
    

      结构图:

    Model: "deeplabv3plus"
    __________________________________________________________________________________________________
    Layer (type) Output Shape Param # Connected to
    ==================================================================================================
    input_8 (InputLayer) [(None, 512, 512, 3) 0
    __________________________________________________________________________________________________
    Conv (Conv2D) (None, 256, 256, 32) 864 input_8[0][0]
    __________________________________________________________________________________________________
    Conv_BN (BatchNormalization) (None, 256, 256, 32) 128 Conv[0][0]
    __________________________________________________________________________________________________
    Conv_Relu6 (Activation) (None, 256, 256, 32) 0 Conv_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_depthwise (Depthw (None, 256, 256, 32) 288 Conv_Relu6[0][0]
    __________________________________________________________________________________________________
    expanded_conv_depthwise_BN (Bat (None, 256, 256, 32) 128 expanded_conv_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_depthwise_relu (A (None, 256, 256, 32) 0 expanded_conv_depthwise_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_project (Conv2D) (None, 256, 256, 16) 512 expanded_conv_depthwise_relu[0][0
    __________________________________________________________________________________________________
    expanded_conv_project_BN (Batch (None, 256, 256, 16) 64 expanded_conv_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_1_expand (Conv2D) (None, 256, 256, 96) 1536 expanded_conv_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_1_expand_BN (Batc (None, 256, 256, 96) 384 expanded_conv_1_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_1_expand_relu (Ac (None, 256, 256, 96) 0 expanded_conv_1_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_1_depthwise (Dept (None, 128, 128, 96) 864 expanded_conv_1_expand_relu[0][0]
    __________________________________________________________________________________________________
    expanded_conv_1_depthwise_BN (B (None, 128, 128, 96) 384 expanded_conv_1_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_1_depthwise_relu (None, 128, 128, 96) 0 expanded_conv_1_depthwise_BN[0][0
    __________________________________________________________________________________________________
    expanded_conv_1_project (Conv2D (None, 128, 128, 24) 2304 expanded_conv_1_depthwise_relu[0]
    __________________________________________________________________________________________________
    expanded_conv_1_project_BN (Bat (None, 128, 128, 24) 96 expanded_conv_1_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_2_expand (Conv2D) (None, 128, 128, 144 3456 expanded_conv_1_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_2_expand_BN (Batc (None, 128, 128, 144 576 expanded_conv_2_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_2_expand_relu (Ac (None, 128, 128, 144 0 expanded_conv_2_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_2_depthwise (Dept (None, 128, 128, 144 1296 expanded_conv_2_expand_relu[0][0]
    __________________________________________________________________________________________________
    expanded_conv_2_depthwise_BN (B (None, 128, 128, 144 576 expanded_conv_2_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_2_depthwise_relu (None, 128, 128, 144 0 expanded_conv_2_depthwise_BN[0][0
    __________________________________________________________________________________________________
    expanded_conv_2_project (Conv2D (None, 128, 128, 24) 3456 expanded_conv_2_depthwise_relu[0]
    __________________________________________________________________________________________________
    expanded_conv_2_project_BN (Bat (None, 128, 128, 24) 96 expanded_conv_2_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_2_add (Add) (None, 128, 128, 24) 0 expanded_conv_1_project_BN[0][0]
    expanded_conv_2_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_3_expand (Conv2D) (None, 128, 128, 144 3456 expanded_conv_2_add[0][0]
    __________________________________________________________________________________________________
    expanded_conv_3_expand_BN (Batc (None, 128, 128, 144 576 expanded_conv_3_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_3_expand_relu (Ac (None, 128, 128, 144 0 expanded_conv_3_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_3_depthwise (Dept (None, 64, 64, 144) 1296 expanded_conv_3_expand_relu[0][0]
    __________________________________________________________________________________________________
    expanded_conv_3_depthwise_BN (B (None, 64, 64, 144) 576 expanded_conv_3_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_3_depthwise_relu (None, 64, 64, 144) 0 expanded_conv_3_depthwise_BN[0][0
    __________________________________________________________________________________________________
    expanded_conv_3_project (Conv2D (None, 64, 64, 32) 4608 expanded_conv_3_depthwise_relu[0]
    __________________________________________________________________________________________________
    expanded_conv_3_project_BN (Bat (None, 64, 64, 32) 128 expanded_conv_3_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_4_expand (Conv2D) (None, 64, 64, 192) 6144 expanded_conv_3_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_4_expand_BN (Batc (None, 64, 64, 192) 768 expanded_conv_4_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_4_expand_relu (Ac (None, 64, 64, 192) 0 expanded_conv_4_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_4_depthwise (Dept (None, 64, 64, 192) 1728 expanded_conv_4_expand_relu[0][0]
    __________________________________________________________________________________________________
    expanded_conv_4_depthwise_BN (B (None, 64, 64, 192) 768 expanded_conv_4_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_4_depthwise_relu (None, 64, 64, 192) 0 expanded_conv_4_depthwise_BN[0][0
    __________________________________________________________________________________________________
    expanded_conv_4_project (Conv2D (None, 64, 64, 32) 6144 expanded_conv_4_depthwise_relu[0]
    __________________________________________________________________________________________________
    expanded_conv_4_project_BN (Bat (None, 64, 64, 32) 128 expanded_conv_4_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_4_add (Add) (None, 64, 64, 32) 0 expanded_conv_3_project_BN[0][0]
    expanded_conv_4_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_5_expand (Conv2D) (None, 64, 64, 192) 6144 expanded_conv_4_add[0][0]
    __________________________________________________________________________________________________
    expanded_conv_5_expand_BN (Batc (None, 64, 64, 192) 768 expanded_conv_5_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_5_expand_relu (Ac (None, 64, 64, 192) 0 expanded_conv_5_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_5_depthwise (Dept (None, 64, 64, 192) 1728 expanded_conv_5_expand_relu[0][0]
    __________________________________________________________________________________________________
    expanded_conv_5_depthwise_BN (B (None, 64, 64, 192) 768 expanded_conv_5_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_5_depthwise_relu (None, 64, 64, 192) 0 expanded_conv_5_depthwise_BN[0][0
    __________________________________________________________________________________________________
    expanded_conv_5_project (Conv2D (None, 64, 64, 32) 6144 expanded_conv_5_depthwise_relu[0]
    __________________________________________________________________________________________________
    expanded_conv_5_project_BN (Bat (None, 64, 64, 32) 128 expanded_conv_5_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_5_add (Add) (None, 64, 64, 32) 0 expanded_conv_4_add[0][0]
    expanded_conv_5_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_6_expand (Conv2D) (None, 64, 64, 192) 6144 expanded_conv_5_add[0][0]
    __________________________________________________________________________________________________
    expanded_conv_6_expand_BN (Batc (None, 64, 64, 192) 768 expanded_conv_6_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_6_expand_relu (Ac (None, 64, 64, 192) 0 expanded_conv_6_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_6_depthwise (Dept (None, 32, 32, 192) 1728 expanded_conv_6_expand_relu[0][0]
    __________________________________________________________________________________________________
    expanded_conv_6_depthwise_BN (B (None, 32, 32, 192) 768 expanded_conv_6_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_6_depthwise_relu (None, 32, 32, 192) 0 expanded_conv_6_depthwise_BN[0][0
    __________________________________________________________________________________________________
    expanded_conv_6_project (Conv2D (None, 32, 32, 64) 12288 expanded_conv_6_depthwise_relu[0]
    __________________________________________________________________________________________________
    expanded_conv_6_project_BN (Bat (None, 32, 32, 64) 256 expanded_conv_6_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_7_expand (Conv2D) (None, 32, 32, 384) 24576 expanded_conv_6_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_7_expand_BN (Batc (None, 32, 32, 384) 1536 expanded_conv_7_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_7_expand_relu (Ac (None, 32, 32, 384) 0 expanded_conv_7_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_7_depthwise (Dept (None, 32, 32, 384) 3456 expanded_conv_7_expand_relu[0][0]
    __________________________________________________________________________________________________
    expanded_conv_7_depthwise_BN (B (None, 32, 32, 384) 1536 expanded_conv_7_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_7_depthwise_relu (None, 32, 32, 384) 0 expanded_conv_7_depthwise_BN[0][0
    __________________________________________________________________________________________________
    expanded_conv_7_project (Conv2D (None, 32, 32, 64) 24576 expanded_conv_7_depthwise_relu[0]
    __________________________________________________________________________________________________
    expanded_conv_7_project_BN (Bat (None, 32, 32, 64) 256 expanded_conv_7_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_7_add (Add) (None, 32, 32, 64) 0 expanded_conv_6_project_BN[0][0]
    expanded_conv_7_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_8_expand (Conv2D) (None, 32, 32, 384) 24576 expanded_conv_7_add[0][0]
    __________________________________________________________________________________________________
    expanded_conv_8_expand_BN (Batc (None, 32, 32, 384) 1536 expanded_conv_8_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_8_expand_relu (Ac (None, 32, 32, 384) 0 expanded_conv_8_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_8_depthwise (Dept (None, 32, 32, 384) 3456 expanded_conv_8_expand_relu[0][0]
    __________________________________________________________________________________________________
    expanded_conv_8_depthwise_BN (B (None, 32, 32, 384) 1536 expanded_conv_8_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_8_depthwise_relu (None, 32, 32, 384) 0 expanded_conv_8_depthwise_BN[0][0
    __________________________________________________________________________________________________
    expanded_conv_8_project (Conv2D (None, 32, 32, 64) 24576 expanded_conv_8_depthwise_relu[0]
    __________________________________________________________________________________________________
    expanded_conv_8_project_BN (Bat (None, 32, 32, 64) 256 expanded_conv_8_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_8_add (Add) (None, 32, 32, 64) 0 expanded_conv_7_add[0][0]
    expanded_conv_8_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_9_expand (Conv2D) (None, 32, 32, 384) 24576 expanded_conv_8_add[0][0]
    __________________________________________________________________________________________________
    expanded_conv_9_expand_BN (Batc (None, 32, 32, 384) 1536 expanded_conv_9_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_9_expand_relu (Ac (None, 32, 32, 384) 0 expanded_conv_9_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_9_depthwise (Dept (None, 32, 32, 384) 3456 expanded_conv_9_expand_relu[0][0]
    __________________________________________________________________________________________________
    expanded_conv_9_depthwise_BN (B (None, 32, 32, 384) 1536 expanded_conv_9_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_9_depthwise_relu (None, 32, 32, 384) 0 expanded_conv_9_depthwise_BN[0][0
    __________________________________________________________________________________________________
    expanded_conv_9_project (Conv2D (None, 32, 32, 64) 24576 expanded_conv_9_depthwise_relu[0]
    __________________________________________________________________________________________________
    expanded_conv_9_project_BN (Bat (None, 32, 32, 64) 256 expanded_conv_9_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_9_add (Add) (None, 32, 32, 64) 0 expanded_conv_8_add[0][0]
    expanded_conv_9_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_10_expand (Conv2D (None, 32, 32, 384) 24576 expanded_conv_9_add[0][0]
    __________________________________________________________________________________________________
    expanded_conv_10_expand_BN (Bat (None, 32, 32, 384) 1536 expanded_conv_10_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_10_expand_relu (A (None, 32, 32, 384) 0 expanded_conv_10_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_10_depthwise (Dep (None, 32, 32, 384) 3456 expanded_conv_10_expand_relu[0][0
    __________________________________________________________________________________________________
    expanded_conv_10_depthwise_BN ( (None, 32, 32, 384) 1536 expanded_conv_10_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_10_depthwise_relu (None, 32, 32, 384) 0 expanded_conv_10_depthwise_BN[0][
    __________________________________________________________________________________________________
    expanded_conv_10_project (Conv2 (None, 32, 32, 96) 36864 expanded_conv_10_depthwise_relu[0
    __________________________________________________________________________________________________
    expanded_conv_10_project_BN (Ba (None, 32, 32, 96) 384 expanded_conv_10_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_11_expand (Conv2D (None, 32, 32, 576) 55296 expanded_conv_10_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_11_expand_BN (Bat (None, 32, 32, 576) 2304 expanded_conv_11_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_11_expand_relu (A (None, 32, 32, 576) 0 expanded_conv_11_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_11_depthwise (Dep (None, 32, 32, 576) 5184 expanded_conv_11_expand_relu[0][0
    __________________________________________________________________________________________________
    expanded_conv_11_depthwise_BN ( (None, 32, 32, 576) 2304 expanded_conv_11_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_11_depthwise_relu (None, 32, 32, 576) 0 expanded_conv_11_depthwise_BN[0][
    __________________________________________________________________________________________________
    expanded_conv_11_project (Conv2 (None, 32, 32, 96) 55296 expanded_conv_11_depthwise_relu[0
    __________________________________________________________________________________________________
    expanded_conv_11_project_BN (Ba (None, 32, 32, 96) 384 expanded_conv_11_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_11_add (Add) (None, 32, 32, 96) 0 expanded_conv_10_project_BN[0][0]
    expanded_conv_11_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_12_expand (Conv2D (None, 32, 32, 576) 55296 expanded_conv_11_add[0][0]
    __________________________________________________________________________________________________
    expanded_conv_12_expand_BN (Bat (None, 32, 32, 576) 2304 expanded_conv_12_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_12_expand_relu (A (None, 32, 32, 576) 0 expanded_conv_12_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_12_depthwise (Dep (None, 32, 32, 576) 5184 expanded_conv_12_expand_relu[0][0
    __________________________________________________________________________________________________
    expanded_conv_12_depthwise_BN ( (None, 32, 32, 576) 2304 expanded_conv_12_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_12_depthwise_relu (None, 32, 32, 576) 0 expanded_conv_12_depthwise_BN[0][
    __________________________________________________________________________________________________
    expanded_conv_12_project (Conv2 (None, 32, 32, 96) 55296 expanded_conv_12_depthwise_relu[0
    __________________________________________________________________________________________________
    expanded_conv_12_project_BN (Ba (None, 32, 32, 96) 384 expanded_conv_12_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_12_add (Add) (None, 32, 32, 96) 0 expanded_conv_11_add[0][0]
    expanded_conv_12_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_13_expand (Conv2D (None, 32, 32, 576) 55296 expanded_conv_12_add[0][0]
    __________________________________________________________________________________________________
    expanded_conv_13_expand_BN (Bat (None, 32, 32, 576) 2304 expanded_conv_13_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_13_expand_relu (A (None, 32, 32, 576) 0 expanded_conv_13_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_13_depthwise (Dep (None, 32, 32, 576) 5184 expanded_conv_13_expand_relu[0][0
    __________________________________________________________________________________________________
    expanded_conv_13_depthwise_BN ( (None, 32, 32, 576) 2304 expanded_conv_13_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_13_depthwise_relu (None, 32, 32, 576) 0 expanded_conv_13_depthwise_BN[0][
    __________________________________________________________________________________________________
    expanded_conv_13_project (Conv2 (None, 32, 32, 160) 92160 expanded_conv_13_depthwise_relu[0
    __________________________________________________________________________________________________
    expanded_conv_13_project_BN (Ba (None, 32, 32, 160) 640 expanded_conv_13_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_14_expand (Conv2D (None, 32, 32, 960) 153600 expanded_conv_13_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_14_expand_BN (Bat (None, 32, 32, 960) 3840 expanded_conv_14_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_14_expand_relu (A (None, 32, 32, 960) 0 expanded_conv_14_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_14_depthwise (Dep (None, 32, 32, 960) 8640 expanded_conv_14_expand_relu[0][0
    __________________________________________________________________________________________________
    expanded_conv_14_depthwise_BN ( (None, 32, 32, 960) 3840 expanded_conv_14_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_14_depthwise_relu (None, 32, 32, 960) 0 expanded_conv_14_depthwise_BN[0][
    __________________________________________________________________________________________________
    expanded_conv_14_project (Conv2 (None, 32, 32, 160) 153600 expanded_conv_14_depthwise_relu[0
    __________________________________________________________________________________________________
    expanded_conv_14_project_BN (Ba (None, 32, 32, 160) 640 expanded_conv_14_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_14_add (Add) (None, 32, 32, 160) 0 expanded_conv_13_project_BN[0][0]
    expanded_conv_14_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_15_expand (Conv2D (None, 32, 32, 960) 153600 expanded_conv_14_add[0][0]
    __________________________________________________________________________________________________
    expanded_conv_15_expand_BN (Bat (None, 32, 32, 960) 3840 expanded_conv_15_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_15_expand_relu (A (None, 32, 32, 960) 0 expanded_conv_15_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_15_depthwise (Dep (None, 32, 32, 960) 8640 expanded_conv_15_expand_relu[0][0
    __________________________________________________________________________________________________
    expanded_conv_15_depthwise_BN ( (None, 32, 32, 960) 3840 expanded_conv_15_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_15_depthwise_relu (None, 32, 32, 960) 0 expanded_conv_15_depthwise_BN[0][
    __________________________________________________________________________________________________
    expanded_conv_15_project (Conv2 (None, 32, 32, 160) 153600 expanded_conv_15_depthwise_relu[0
    __________________________________________________________________________________________________
    expanded_conv_15_project_BN (Ba (None, 32, 32, 160) 640 expanded_conv_15_project[0][0]
    __________________________________________________________________________________________________
    expanded_conv_15_add (Add) (None, 32, 32, 160) 0 expanded_conv_14_add[0][0]
    expanded_conv_15_project_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_16_expand (Conv2D (None, 32, 32, 960) 153600 expanded_conv_15_add[0][0]
    __________________________________________________________________________________________________
    expanded_conv_16_expand_BN (Bat (None, 32, 32, 960) 3840 expanded_conv_16_expand[0][0]
    __________________________________________________________________________________________________
    expanded_conv_16_expand_relu (A (None, 32, 32, 960) 0 expanded_conv_16_expand_BN[0][0]
    __________________________________________________________________________________________________
    expanded_conv_16_depthwise (Dep (None, 32, 32, 960) 8640 expanded_conv_16_expand_relu[0][0
    __________________________________________________________________________________________________
    expanded_conv_16_depthwise_BN ( (None, 32, 32, 960) 3840 expanded_conv_16_depthwise[0][0]
    __________________________________________________________________________________________________
    expanded_conv_16_depthwise_relu (None, 32, 32, 960) 0 expanded_conv_16_depthwise_BN[0][
    __________________________________________________________________________________________________
    expanded_conv_16_project (Conv2 (None, 32, 32, 320) 307200 expanded_conv_16_depthwise_relu[0
    __________________________________________________________________________________________________
    expanded_conv_16_project_BN (Ba (None, 32, 32, 320) 1280 expanded_conv_16_project[0][0]
    __________________________________________________________________________________________________
    global_average_pooling2d_6 (Glo (None, 320) 0 expanded_conv_16_project_BN[0][0]
    __________________________________________________________________________________________________
    lambda_25 (Lambda) (None, 1, 320) 0 global_average_pooling2d_6[0][0]
    __________________________________________________________________________________________________
    aspp1_depthwise (DepthwiseConv2 (None, 32, 32, 320) 2880 expanded_conv_16_project_BN[0][0]
    __________________________________________________________________________________________________
    aspp2_depthwise (DepthwiseConv2 (None, 32, 32, 320) 2880 expanded_conv_16_project_BN[0][0]
    __________________________________________________________________________________________________
    aspp3_depthwise (DepthwiseConv2 (None, 32, 32, 320) 2880 expanded_conv_16_project_BN[0][0]
    __________________________________________________________________________________________________
    lambda_26 (Lambda) (None, 1, 1, 320) 0 lambda_25[0][0]
    __________________________________________________________________________________________________
    aspp1_depthwise_BN (BatchNormal (None, 32, 32, 320) 1280 aspp1_depthwise[0][0]
    __________________________________________________________________________________________________
    aspp2_depthwise_BN (BatchNormal (None, 32, 32, 320) 1280 aspp2_depthwise[0][0]
    __________________________________________________________________________________________________
    aspp3_depthwise_BN (BatchNormal (None, 32, 32, 320) 1280 aspp3_depthwise[0][0]
    __________________________________________________________________________________________________
    image_pooling (Conv2D) (None, 1, 1, 256) 81920 lambda_26[0][0]
    __________________________________________________________________________________________________
    activation_61 (Activation) (None, 32, 32, 320) 0 aspp1_depthwise_BN[0][0]
    __________________________________________________________________________________________________
    activation_63 (Activation) (None, 32, 32, 320) 0 aspp2_depthwise_BN[0][0]
    __________________________________________________________________________________________________
    activation_65 (Activation) (None, 32, 32, 320) 0 aspp3_depthwise_BN[0][0]
    __________________________________________________________________________________________________
    image_pooling_BN (BatchNormaliz (None, 1, 1, 256) 1024 image_pooling[0][0]
    __________________________________________________________________________________________________
    aspp0 (Conv2D) (None, 32, 32, 256) 81920 expanded_conv_16_project_BN[0][0]
    __________________________________________________________________________________________________
    aspp1_pointwise (Conv2D) (None, 32, 32, 256) 81920 activation_61[0][0]
    __________________________________________________________________________________________________
    aspp2_pointwise (Conv2D) (None, 32, 32, 256) 81920 activation_63[0][0]
    __________________________________________________________________________________________________
    aspp3_pointwise (Conv2D) (None, 32, 32, 256) 81920 activation_65[0][0]
    __________________________________________________________________________________________________
    activation_67 (Activation) (None, 1, 1, 256) 0 image_pooling_BN[0][0]
    __________________________________________________________________________________________________
    aspp0_BN (BatchNormalization) (None, 32, 32, 256) 1024 aspp0[0][0]
    __________________________________________________________________________________________________
    aspp1_pointwise_BN (BatchNormal (None, 32, 32, 256) 1024 aspp1_pointwise[0][0]
    __________________________________________________________________________________________________
    aspp2_pointwise_BN (BatchNormal (None, 32, 32, 256) 1024 aspp2_pointwise[0][0]
    __________________________________________________________________________________________________
    aspp3_pointwise_BN (BatchNormal (None, 32, 32, 256) 1024 aspp3_pointwise[0][0]
    __________________________________________________________________________________________________
    lambda_27 (Lambda) (None, 32, 32, 256) 0 activation_67[0][0]
    __________________________________________________________________________________________________
    aspp0_activation (Activation) (None, 32, 32, 256) 0 aspp0_BN[0][0]
    __________________________________________________________________________________________________
    activation_62 (Activation) (None, 32, 32, 256) 0 aspp1_pointwise_BN[0][0]
    __________________________________________________________________________________________________
    activation_64 (Activation) (None, 32, 32, 256) 0 aspp2_pointwise_BN[0][0]
    __________________________________________________________________________________________________
    activation_66 (Activation) (None, 32, 32, 256) 0 aspp3_pointwise_BN[0][0]
    __________________________________________________________________________________________________
    concatenate_7 (Concatenate) (None, 32, 32, 1280) 0 lambda_27[0][0]
    aspp0_activation[0][0]
    activation_62[0][0]
    activation_64[0][0]
    activation_66[0][0]
    __________________________________________________________________________________________________
    concat_projection (Conv2D) (None, 32, 32, 256) 327680 concatenate_7[0][0]
    __________________________________________________________________________________________________
    concat_projection_BN (BatchNorm (None, 32, 32, 256) 1024 concat_projection[0][0]
    __________________________________________________________________________________________________
    activation_68 (Activation) (None, 32, 32, 256) 0 concat_projection_BN[0][0]
    __________________________________________________________________________________________________
    feature_projection0 (Conv2D) (None, 128, 128, 48) 1152 expanded_conv_2_add[0][0]
    __________________________________________________________________________________________________
    dropout_4 (Dropout) (None, 32, 32, 256) 0 activation_68[0][0]
    __________________________________________________________________________________________________
    feature_projection0_BN (BatchNo (None, 128, 128, 48) 192 feature_projection0[0][0]
    __________________________________________________________________________________________________
    lambda_28 (Lambda) (None, 128, 128, 256 0 dropout_4[0][0]
    __________________________________________________________________________________________________
    activation_69 (Activation) (None, 128, 128, 48) 0 feature_projection0_BN[0][0]
    __________________________________________________________________________________________________
    concatenate_8 (Concatenate) (None, 128, 128, 304 0 lambda_28[0][0]
    activation_69[0][0]
    __________________________________________________________________________________________________
    decoder_conv0_depthwise (Depthw (None, 128, 128, 304 2736 concatenate_8[0][0]
    __________________________________________________________________________________________________
    decoder_conv0_depthwise_BN (Bat (None, 128, 128, 304 1216 decoder_conv0_depthwise[0][0]
    __________________________________________________________________________________________________
    activation_70 (Activation) (None, 128, 128, 304 0 decoder_conv0_depthwise_BN[0][0]
    __________________________________________________________________________________________________
    decoder_conv0_pointwise (Conv2D (None, 128, 128, 256 77824 activation_70[0][0]
    __________________________________________________________________________________________________
    decoder_conv0_pointwise_BN (Bat (None, 128, 128, 256 1024 decoder_conv0_pointwise[0][0]
    __________________________________________________________________________________________________
    activation_71 (Activation) (None, 128, 128, 256 0 decoder_conv0_pointwise_BN[0][0]
    __________________________________________________________________________________________________
    decoder_conv1_depthwise (Depthw (None, 128, 128, 256 2304 activation_71[0][0]
    __________________________________________________________________________________________________
    decoder_conv1_depthwise_BN (Bat (None, 128, 128, 256 1024 decoder_conv1_depthwise[0][0]
    __________________________________________________________________________________________________
    activation_72 (Activation) (None, 128, 128, 256 0 decoder_conv1_depthwise_BN[0][0]
    __________________________________________________________________________________________________
    decoder_conv1_pointwise (Conv2D (None, 128, 128, 256 65536 activation_72[0][0]
    __________________________________________________________________________________________________
    decoder_conv1_pointwise_BN (Bat (None, 128, 128, 256 1024 decoder_conv1_pointwise[0][0]
    __________________________________________________________________________________________________
    activation_73 (Activation) (None, 128, 128, 256 0 decoder_conv1_pointwise_BN[0][0]
    __________________________________________________________________________________________________
    conv2d_3 (Conv2D) (None, 128, 128, 2) 514 activation_73[0][0]
    __________________________________________________________________________________________________
    lambda_29 (Lambda) (None, 512, 512, 2) 0 conv2d_3[0][0]
    __________________________________________________________________________________________________
    softmax_1 (Softmax) (None, 512, 512, 2) 0 lambda_29[0][0]
    ==================================================================================================
    Total params: 2,753,714
    Trainable params: 2,714,930
    Non-trainable params: 38,784
    __________________________________________________________________________________________________

  • 相关阅读:
    tcp_wrapper过滤
    cobbler无人值守批量安装Linux系统
    PXE+kickstart无人值守安装CentOS 7
    kickstart文件详解
    SHELL脚本--shell数组基础
    SHELL脚本--管道和重定向基础
    第4章 DHCP服务
    第3章 NFS基本应用
    man sm-notify(sm-notify命令中文手册)
    man statd(rpc.statd中文手册)
  • 原文地址:https://www.cnblogs.com/alex-bn-lee/p/15234453.html
Copyright © 2020-2023  润新知