方法"/>
ResNeXt Group Block keras实现方法
方法一:使用for循环lambda层及concatenate实现group
def grouped_convolution_block(init, grouped_channels,cardinality, strides):# grouped_channels 每组的通道数# cardinality 多少组channel_axis = -1group_list = []for c in range(cardinality):x = Lambda(lambda z: z[:, :, :, c * grouped_channels:(c + 1) * grouped_channels])(init)x = Conv2D(grouped_channels, (3,3), padding='same', use_bias=False, strides=(strides, strides),kernel_initializer='he_normal', kernel_regularizer=l2(weight_decay))(x)group_list.append(x)group_merge = concatenate(group_list, axis=channel_axis)x = BatchNormalization()(group_merge)x = Activation('relu')(x)return xdef block_module(x,filters,cardinality,strides):# residual connectioninit = xgrouped_channels = int(filters / cardinality) # 如果没有down sampling就不需要这种操作if init._keras_shape[-1] != 2 * filters:init = Conv2D(filters * 2, (1, 1), padding='same', strides=(strides, strides),use_bias=False, kernel_initializer='he_normal', kernel_regularizer=l2(weight_decay))(init)init = BatchNormalization()(init)# conv1x = Conv2D(filters, (1, 1), padding='same', use_bias=False,kernel_initializer='he_normal', kernel_regularizer=l2(weight_decay))(x)x = BatchNormalization()(x)x = Activation('relu')(x)# conv2(group),选择在 group 的时候 down samplingx = grouped_convolution_block(x,grouped_channels,cardinality,strides)# conv3x = Conv2D(filters * 2, (1,1), padding='same', use_bias=False, kernel_initializer='he_normal',kernel_regularizer=l2(weight_decay))(x)x = BatchNormalization()(x)x = add([init, x])x = Activation('relu')(x)return x
方法二:使用DepthwiseConv2D(深度卷积)与for循环,自定义kernel_initializer实现
def block3(x, filters, kernel_size=3, stride=1, groups=32,conv_shortcut=True, name=None):"""A residual block.# Argumentsx: input tensor.filters: integer, filters of the bottleneck layer.kernel_size: default 3, kernel size of the bottleneck layer.stride: default 1, stride of the first layer.groups: default 32, group size for grouped convolution.conv_shortcut: default True, use convolution shortcut if True,otherwise identity shortcut.name: string, block label.# ReturnsOutput tensor for the residual block."""bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1if conv_shortcut is True:shortcut = layers.Conv2D((64 // groups) * filters, 1, strides=stride,use_bias=False, name=name + '_0_conv')(x)shortcut = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5,name=name + '_0_bn')(shortcut)else:shortcut = xx = layers.Conv2D(filters, 1, use_bias=False, name=name + '_1_conv')(x)x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5,name=name + '_1_bn')(x)x = layers.Activation('relu', name=name + '_1_relu')(x)c = filters // groupsx = layers.ZeroPadding2D(padding=((1, 1), (1, 1)), name=name + '_2_pad')(x)#深度卷积,输出的通道为filters*c 如:128*4x = layers.DepthwiseConv2D(kernel_size, strides=stride, depth_multiplier=c,use_bias=False, name=name + '_2_conv')(x)#kernel (1, 1,128*4, 128),其实就是个weights数组kernel = np.zeros((1, 1, filters * c, filters), dtype=np.float32)for i in range(filters):start = (i // c) * c * c + i % cend = start + c * c#128*4里面0,4,8...为1,其它为0,用于定义weights数组数据值kernel[:, :, start:end:c, i] = 1.x = layers.Conv2D(filters, 1, use_bias=False, trainable=False,kernel_initializer={'class_name': 'Constant','config': {'value': kernel}},name=name + '_2_gconv')(x)x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5,name=name + '_2_bn')(x)x = layers.Activation('relu', name=name + '_2_relu')(x)x = layers.Conv2D((64 // groups) * filters, 1,use_bias=False, name=name + '_3_conv')(x)x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5,name=name + '_3_bn')(x)x = layers.Add(name=name + '_add')([shortcut, x])x = layers.Activation('relu', name=name + '_out')(x)return x
更多推荐
ResNeXt Group Block keras实现方法
发布评论