import keras.backend as Kfrom keras.layers import Activationfrom keras.layers import Conv2Dfrom keras.layers import Densefrom keras.layers import Flattenfrom keras.layers import GlobalAveragePooling2Dfrom keras.layers import Inputfrom keras.layers import MaxPooling2Dfrom keras.layers import merge, concatenatefrom keras.layers import Reshapefrom keras.layers import ZeroPadding2Dfrom keras.models import Modeldef VGG16(input_tensor):#----------------------------主干特征提取网络开始---------------------------## SSD结构,net字典net = {}# Block 1 **第一模块,两次卷积conv1_1和conv1_2,一次池化pool1**net[\'input\'] = input_tensor# 300,300,3 -> 150,150,64 **输入是图片由300×300(尺寸)×3(通道,RGB),输出是150×150×64(深度)的特征图**net[\'conv1_1\'] = Conv2D(64, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv1_1\')(net[\'input\'])net[\'conv1_2\'] = Conv2D(64, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv1_2\')(net[\'conv1_1\'])net[\'pool1\'] = MaxPooling2D((2, 2), strides=(2, 2), padding=\'same\',name=\'pool1\')(net[\'conv1_2\'])# Block 2 **第二模块,两次卷积conv2_1和conv2_2,一次池化pool2**# 150,150,64 -> 75,75,128 **输入是前一模块输出150,150,64 ,输出是75,75,128**net[\'conv2_1\'] = Conv2D(128, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv2_1\')(net[\'pool1\'])net[\'conv2_2\'] = Conv2D(128, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv2_2\')(net[\'conv2_1\'])net[\'pool2\'] = MaxPooling2D((2, 2), strides=(2, 2), padding=\'same\',name=\'pool2\')(net[\'conv2_2\'])# Block 3 **第三模块,三次卷积conv3_1,conv3_2和conv3_3,一次池化pool3**# 75,75,128 -> 38,38,256 **输入是前一模块输出75,75,128 ,输出是38,38,256**net[\'conv3_1\'] = Conv2D(256, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv3_1\')(net[\'pool2\'])net[\'conv3_2\'] = Conv2D(256, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv3_2\')(net[\'conv3_1\'])net[\'conv3_3\'] = Conv2D(256, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv3_3\')(net[\'conv3_2\'])net[\'pool3\'] = MaxPooling2D((2, 2), strides=(2, 2), padding=\'same\',name=\'pool3\')(net[\'conv3_3\'])# Block 4 **第四模块,三次卷积conv4_1,conv4_2和conv4_31.作为SSD输出特征层,一次池化pool4**# 38,38,256 -> 19,19,512 **输入是前一模块输出 38,38,256 ,输出是 19,19,512**net[\'conv4_1\'] = Conv2D(512, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv4_1\')(net[\'pool3\'])net[\'conv4_2\'] = Conv2D(512, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv4_2\')(net[\'conv4_1\'])net[\'conv4_3\'] = Conv2D(512, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv4_3\')(net[\'conv4_2\'])net[\'pool4\'] = MaxPooling2D((2, 2), strides=(2, 2), padding=\'same\',name=\'pool4\')(net[\'conv4_3\'])# Block 5 **第五模块,三次卷积conv5_1,conv5_2和conv5_3,一次池化pool5(步长是1 ),输出shape不会变化**# 19,19,512 -> 19,19,512 **输入是前一模块输出19,19,512 ,输出是19,19,512**net[\'conv5_1\'] = Conv2D(512, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv5_1\')(net[\'pool4\'])net[\'conv5_2\'] = Conv2D(512, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv5_2\')(net[\'conv5_1\'])net[\'conv5_3\'] = Conv2D(512, kernel_size=(3,3),activation=\'relu\',padding=\'same\',name=\'conv5_3\')(net[\'conv5_2\'])net[\'pool5\'] = MaxPooling2D((3, 3), strides=(1, 1), padding=\'same\',name=\'pool5\')(net[\'conv5_3\'])# FC6 **利用卷积代替VGG里的全连接层****1.膨胀卷积是对卷积核进行0填充。2.膨胀后的卷积核的大小:(1)设原始卷积核的大小是3*3(2)设膨胀率为2(3)则膨胀后的卷积核的大小为:dilation_rate*(kernel_size - 1)+1结果就是 2*(3-1)+1=5,膨胀后的卷积核大小是5*53.默认的dilation_rate=1(没有空洞),>1有空洞**# 19,19,512 -> 19,19,1024 **输入是前一模块输出19,19,512 ,输出是19,19,1024**net[\'fc6\'] = Conv2D(1024, kernel_size=(3,3), **dilation_rate**=(6, 6),activation=\'relu\', padding=\'same\',name=\'fc6\')(net[\'pool5\'])# x = Dropout(0.5, name=\'drop6\')(x)# FC7 **利用1×1卷积代替VGG里的全连接层,2. 输出作为SSD特征层**# 19,19,1024 -> 19,19,1024 **输入是前一模块输出19,19,1024,输出是19,19,1024**net[\'fc7\'] = Conv2D(1024, kernel_size=(1,1), activation=\'relu\',padding=\'same\', name=\'fc7\')(net[\'fc6\'])# x = Dropout(0.5, name=\'drop7\')(x)# Block 6 **第六模块,三次卷积conv6_1(1×1卷积核调整通道数),padding conv6_2 和conv6_2(对应SSD框架的conv8_1,padding conv8_2 和conv8_2)3.输出作为SSD特征层# 19,19,1024 -> 10,10,512 **输入是前一模块输出19,19,1024,输出是10,10,512**net[\'conv6_1\'] = Conv2D(256, kernel_size=(1,1), activation=\'relu\',padding=\'same\',name=\'conv6_1\')(net[\'fc7\'])net[\'conv6_2\'] = ZeroPadding2D(padding=((1, 1), (1, 1)), name=\'conv6_padding\')(net[\'conv6_1\'])net[\'conv6_2\'] = Conv2D(512, kernel_size=(3,3), strides=(2, 2),activation=\'relu\',name=\'conv6_2\')(net[\'conv6_2\'])# Block 7 **第七模块,三次卷积conv7_1(1×1卷积核调整通道数),padding conv7_2 和conv7_2(对应SSD框架的conv9_1,padding conv9_2 和conv9_2)4.输出作为SSD特征层**# 个人觉得:源码是padding=\'valid\'是错的,我给修改成same# 10,10,512 -> 5,5,256 **输入是前一模块输出10,10,512 ,输出是5,5,256**net[\'conv7_1\'] = Conv2D(128, kernel_size=(1,1), activation=\'relu\',padding=\'same\',name=\'conv7_1\')(net[\'conv6_2\'])net[\'conv7_2\'] = ZeroPadding2D(padding=((1, 1), (1, 1)), name=\'conv7_padding\')(net[\'conv7_1\'])net[\'conv7_2\'] = Conv2D(256, kernel_size=(3,3), strides=(2, 2),activation=\'relu\', \'\'\'padding=\'valid\' \'\'\' , padding=\'same\'name=\'conv7_2\')(net[\'conv7_2\'])# Block 8 **第八模块,三次卷积conv8_1(1×1卷积核调整通道数)和conv8_2(对应SSD框架的conv10_1 和conv10_2)5.输出作为SSD特征层**# 5,5,256 -> 3,3,256 **输入是前一模块输出5,5,256 ,输出是3,3,256 **net[\'conv8_1\'] = Conv2D(128, kernel_size=(1,1), activation=\'relu\',padding=\'same\',name=\'conv8_1\')(net[\'conv7_2\'])net[\'conv8_2\'] = Conv2D(256, kernel_size=(3,3), strides=(1, 1),activation=\'relu\', padding=\'valid\',name=\'conv8_2\')(net[\'conv8_1\'])# Block 9 **第九模块,三次卷积conv9_1(1×1卷积核调整通道数)和conv9_2(对应SSD框架的conv11_1 和conv11_2)6.输出作为SSD特征层**# 3,3,256 -> 1,1,256 **输入是前一模块输出3,3,256 ,输出是1,1,256 **net[\'conv9_1\'] = Conv2D(128, kernel_size=(1,1), activation=\'relu\',padding=\'same\',name=\'conv9_1\')(net[\'conv8_2\'])net[\'conv9_2\'] = Conv2D(256, kernel_size=(3,3), strides=(1, 1),activation=\'relu\', padding=\'valid\',name=\'conv9_2\')(net[\'conv9_1\'])#----------------------------主干特征提取网络结束---------------------------#return net
一、SSD特征提取网络代码详解
未经允许不得转载:爱站程序员基地 » 一、SSD特征提取网络代码详解