123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129 |
- from keras import Input, Model
- from keras.layers import Dense, Conv2D, GlobalAveragePooling2D, BatchNormalization,\
- LeakyReLU, MaxPooling2D, UpSampling2D, Dropout, concatenate
- import keras.backend as K
- def cnn_net_drag(input_shape, output_shape=260):
- _input = Input(input_shape)
- use_bias = False
- down0 = Conv2D(16, (3, 3), use_bias=use_bias)(_input)
- down0 = BatchNormalization()(down0)
- down0 = LeakyReLU(alpha=0.1)(down0)
- down0 = Conv2D(16, (3, 3), use_bias=use_bias)(down0)
- down0 = BatchNormalization()(down0)
- down0 = LeakyReLU(alpha=0.1)(down0)
- down0_pool = MaxPooling2D((2, 2), strides=(2, 2))(down0)
- down1 = Conv2D(32, (3, 3), use_bias=use_bias)(down0_pool)
- down1 = BatchNormalization()(down1)
- down1 = LeakyReLU(alpha=0.1)(down1)
- down1 = Conv2D(32, (3, 3), use_bias=use_bias)(down1)
- down1 = BatchNormalization()(down1)
- down1 = LeakyReLU(alpha=0.1)(down1)
- down1_pool = MaxPooling2D((2, 2), strides=(2, 2))(down1)
- down2 = Conv2D(64, (3, 3), use_bias=use_bias)(down1_pool)
- down2 = BatchNormalization()(down2)
- down2 = LeakyReLU(alpha=0.1)(down2)
- down2 = Conv2D(64, (3, 3), use_bias=use_bias)(down2)
- down2 = BatchNormalization()(down2)
- down2 = LeakyReLU(alpha=0.1)(down2)
- down2_pool = MaxPooling2D((2, 2), strides=(2, 2))(down2)
- down3 = Conv2D(64, (3, 3), use_bias=use_bias)(down2_pool)
- down3 = BatchNormalization()(down3)
- down3 = LeakyReLU(alpha=0.1)(down3)
- down3 = Conv2D(64, (3, 3), use_bias=use_bias)(down3)
- down3 = BatchNormalization()(down3)
- down3 = LeakyReLU(alpha=0.1)(down3)
- down3_pool = MaxPooling2D((2, 2), strides=(2, 2))(down3)
- gap = GlobalAveragePooling2D()(down3_pool)
- dense = Dense(32, activation="relu")(gap)
- drop = Dropout(0.2)(dense)
- dense = Dense(output_shape, activation="softmax")(drop)
- model = Model(_input, dense)
- model.summary()
- return model
- def u_net_drag(input_shape, output_shape=260, cls_num=2):
- inputs = Input(shape=input_shape)
- use_bias = False
- # 128
- down1 = Conv2D(16, (3, 3), padding='same', use_bias=use_bias)(inputs)
- down1 = BatchNormalization()(down1)
- down1 = LeakyReLU(alpha=0.1)(down1)
- down1 = Conv2D(16, (1, 1), padding='same', use_bias=use_bias)(down1)
- down1 = BatchNormalization()(down1)
- down1 = LeakyReLU(alpha=0.1)(down1)
- down1_pool = MaxPooling2D((2, 2), strides=(2, 2))(down1)
- # 64
- down2 = Conv2D(32, (3, 3), padding='same', use_bias=use_bias)(down1_pool)
- down2 = BatchNormalization()(down2)
- down2 = LeakyReLU(alpha=0.1)(down2)
- down2 = Conv2D(32, (1, 1), padding='same', use_bias=use_bias)(down2)
- down2 = BatchNormalization()(down2)
- down2 = LeakyReLU(alpha=0.1)(down2)
- down2_pool = MaxPooling2D((2, 2), strides=(2, 2))(down2)
- # 32
- down3 = Conv2D(64, (3, 3), padding='same', use_bias=use_bias)(down2_pool)
- down3 = BatchNormalization()(down3)
- down3 = LeakyReLU(alpha=0.1)(down3)
- down3 = Conv2D(64, (1, 1), padding='same', use_bias=use_bias)(down3)
- down3 = BatchNormalization()(down3)
- down3 = LeakyReLU(alpha=0.1)(down3)
- down3_pool = MaxPooling2D((2, 2), strides=(2, 2))(down3)
- # 16
- center = Conv2D(64, (3, 3), padding='same', use_bias=use_bias)(down3_pool)
- center = BatchNormalization()(center)
- center = LeakyReLU(alpha=0.1)(center)
- center = Conv2D(64, (1, 1), padding='same', use_bias=use_bias)(center)
- center = BatchNormalization()(center)
- center = LeakyReLU(alpha=0.1)(center)
- # 32
- up3 = UpSampling2D((2, 2))(center)
- up3 = concatenate([down3, up3], axis=3)
- up3 = Conv2D(64, (3, 3), padding='same', use_bias=use_bias)(up3)
- up3 = BatchNormalization()(up3)
- up3 = LeakyReLU(alpha=0.1)(up3)
- up3 = Conv2D(64, (1, 1), padding='same', use_bias=use_bias)(up3)
- up3 = BatchNormalization()(up3)
- up3 = LeakyReLU(alpha=0.1)(up3)
- # 64
- up2 = UpSampling2D((2, 2))(up3)
- up2 = concatenate([down2, up2], axis=3)
- up2 = Conv2D(32, (3, 3), padding='same', use_bias=use_bias)(up2)
- up2 = BatchNormalization()(up2)
- up2 = LeakyReLU(alpha=0.1)(up2)
- up2 = Conv2D(32, (1, 1), padding='same', use_bias=use_bias)(up2)
- up2 = BatchNormalization()(up2)
- up2 = LeakyReLU(alpha=0.1)(up2)
- # 128
- up1 = UpSampling2D((2, 2))(up2)
- up1 = K.concatenate([down1, up1], axis=3)
- up1 = Conv2D(16, (3, 3), padding='same', use_bias=use_bias)(up1)
- up1 = BatchNormalization()(up1)
- up1 = LeakyReLU(alpha=0.1)(up1)
- up1 = Conv2D(16, (1, 1), padding='same', use_bias=use_bias)(up1)
- up1 = BatchNormalization()(up1)
- up1 = LeakyReLU(alpha=0.1)(up1)
- classify = Conv2D(1, (1, 1), activation='sigmoid')(up1)
- # classify = Dense(cls_num, activation="softmax")(up1)
- model = Model(inputs=inputs, outputs=classify)
- model.summary(line_length=100)
- return model
|