model.py 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129
  1. from keras import Input, Model
  2. from keras.layers import Dense, Conv2D, GlobalAveragePooling2D, BatchNormalization,\
  3. LeakyReLU, MaxPooling2D, UpSampling2D, Dropout, concatenate
  4. import keras.backend as K
  5. def cnn_net_drag(input_shape, output_shape=260):
  6. _input = Input(input_shape)
  7. use_bias = False
  8. down0 = Conv2D(16, (3, 3), use_bias=use_bias)(_input)
  9. down0 = BatchNormalization()(down0)
  10. down0 = LeakyReLU(alpha=0.1)(down0)
  11. down0 = Conv2D(16, (3, 3), use_bias=use_bias)(down0)
  12. down0 = BatchNormalization()(down0)
  13. down0 = LeakyReLU(alpha=0.1)(down0)
  14. down0_pool = MaxPooling2D((2, 2), strides=(2, 2))(down0)
  15. down1 = Conv2D(32, (3, 3), use_bias=use_bias)(down0_pool)
  16. down1 = BatchNormalization()(down1)
  17. down1 = LeakyReLU(alpha=0.1)(down1)
  18. down1 = Conv2D(32, (3, 3), use_bias=use_bias)(down1)
  19. down1 = BatchNormalization()(down1)
  20. down1 = LeakyReLU(alpha=0.1)(down1)
  21. down1_pool = MaxPooling2D((2, 2), strides=(2, 2))(down1)
  22. down2 = Conv2D(64, (3, 3), use_bias=use_bias)(down1_pool)
  23. down2 = BatchNormalization()(down2)
  24. down2 = LeakyReLU(alpha=0.1)(down2)
  25. down2 = Conv2D(64, (3, 3), use_bias=use_bias)(down2)
  26. down2 = BatchNormalization()(down2)
  27. down2 = LeakyReLU(alpha=0.1)(down2)
  28. down2_pool = MaxPooling2D((2, 2), strides=(2, 2))(down2)
  29. down3 = Conv2D(64, (3, 3), use_bias=use_bias)(down2_pool)
  30. down3 = BatchNormalization()(down3)
  31. down3 = LeakyReLU(alpha=0.1)(down3)
  32. down3 = Conv2D(64, (3, 3), use_bias=use_bias)(down3)
  33. down3 = BatchNormalization()(down3)
  34. down3 = LeakyReLU(alpha=0.1)(down3)
  35. down3_pool = MaxPooling2D((2, 2), strides=(2, 2))(down3)
  36. gap = GlobalAveragePooling2D()(down3_pool)
  37. dense = Dense(32, activation="relu")(gap)
  38. drop = Dropout(0.2)(dense)
  39. dense = Dense(output_shape, activation="softmax")(drop)
  40. model = Model(_input, dense)
  41. # model.summary()
  42. return model
  43. def u_net_drag(input_shape, output_shape=260, cls_num=2):
  44. inputs = Input(shape=input_shape)
  45. use_bias = False
  46. # 128
  47. down1 = Conv2D(16, (3, 3), padding='same', use_bias=use_bias)(inputs)
  48. down1 = BatchNormalization()(down1)
  49. down1 = LeakyReLU(alpha=0.1)(down1)
  50. down1 = Conv2D(16, (1, 1), padding='same', use_bias=use_bias)(down1)
  51. down1 = BatchNormalization()(down1)
  52. down1 = LeakyReLU(alpha=0.1)(down1)
  53. down1_pool = MaxPooling2D((2, 2), strides=(2, 2))(down1)
  54. # 64
  55. down2 = Conv2D(32, (3, 3), padding='same', use_bias=use_bias)(down1_pool)
  56. down2 = BatchNormalization()(down2)
  57. down2 = LeakyReLU(alpha=0.1)(down2)
  58. down2 = Conv2D(32, (1, 1), padding='same', use_bias=use_bias)(down2)
  59. down2 = BatchNormalization()(down2)
  60. down2 = LeakyReLU(alpha=0.1)(down2)
  61. down2_pool = MaxPooling2D((2, 2), strides=(2, 2))(down2)
  62. # 32
  63. down3 = Conv2D(64, (3, 3), padding='same', use_bias=use_bias)(down2_pool)
  64. down3 = BatchNormalization()(down3)
  65. down3 = LeakyReLU(alpha=0.1)(down3)
  66. down3 = Conv2D(64, (1, 1), padding='same', use_bias=use_bias)(down3)
  67. down3 = BatchNormalization()(down3)
  68. down3 = LeakyReLU(alpha=0.1)(down3)
  69. down3_pool = MaxPooling2D((2, 2), strides=(2, 2))(down3)
  70. # 16
  71. center = Conv2D(64, (3, 3), padding='same', use_bias=use_bias)(down3_pool)
  72. center = BatchNormalization()(center)
  73. center = LeakyReLU(alpha=0.1)(center)
  74. center = Conv2D(64, (1, 1), padding='same', use_bias=use_bias)(center)
  75. center = BatchNormalization()(center)
  76. center = LeakyReLU(alpha=0.1)(center)
  77. # 32
  78. up3 = UpSampling2D((2, 2))(center)
  79. up3 = concatenate([down3, up3], axis=3)
  80. up3 = Conv2D(64, (3, 3), padding='same', use_bias=use_bias)(up3)
  81. up3 = BatchNormalization()(up3)
  82. up3 = LeakyReLU(alpha=0.1)(up3)
  83. up3 = Conv2D(64, (1, 1), padding='same', use_bias=use_bias)(up3)
  84. up3 = BatchNormalization()(up3)
  85. up3 = LeakyReLU(alpha=0.1)(up3)
  86. # 64
  87. up2 = UpSampling2D((2, 2))(up3)
  88. up2 = concatenate([down2, up2], axis=3)
  89. up2 = Conv2D(32, (3, 3), padding='same', use_bias=use_bias)(up2)
  90. up2 = BatchNormalization()(up2)
  91. up2 = LeakyReLU(alpha=0.1)(up2)
  92. up2 = Conv2D(32, (1, 1), padding='same', use_bias=use_bias)(up2)
  93. up2 = BatchNormalization()(up2)
  94. up2 = LeakyReLU(alpha=0.1)(up2)
  95. # 128
  96. up1 = UpSampling2D((2, 2))(up2)
  97. up1 = K.concatenate([down1, up1], axis=3)
  98. up1 = Conv2D(16, (3, 3), padding='same', use_bias=use_bias)(up1)
  99. up1 = BatchNormalization()(up1)
  100. up1 = LeakyReLU(alpha=0.1)(up1)
  101. up1 = Conv2D(16, (1, 1), padding='same', use_bias=use_bias)(up1)
  102. up1 = BatchNormalization()(up1)
  103. up1 = LeakyReLU(alpha=0.1)(up1)
  104. classify = Conv2D(1, (1, 1), activation='sigmoid')(up1)
  105. # classify = Dense(cls_num, activation="softmax")(up1)
  106. model = Model(inputs=inputs, outputs=classify)
  107. # model.summary(line_length=100)
  108. return model