model.py 6.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195
  1. # -*- coding: utf-8 -*-
  2. """
  3. Created on Tue Jun 21 10:53:51 2022
  4. model
  5. @author: fangjiasheng
  6. """
  7. import os
  8. import sys
  9. sys.path.append(os.path.dirname(os.path.abspath(__file__)))
  10. from keras.layers import Lambda, Dense, Reshape, Conv2D, BatchNormalization, LeakyReLU, Masking, MaxPool2D, \
  11. MaxPooling2D, UpSampling2D, concatenate, Activation, GlobalAveragePooling2D, DepthwiseConv2D, Add
  12. from keras import layers, models, Sequential, Input, Model
  13. import keras.backend as K
  14. def direction_model(input_shape, output_shape):
  15. model = cnn_model(input_shape, output_shape)
  16. print(input_shape, output_shape)
  17. # model = mobile_net_v3_tiny(input_shape, output_shape)
  18. # model = fpn(input_shape, output_shape)
  19. # model.summary(line_length=100)
  20. return model
  21. def cnn_model(input_shape, output_shape):
  22. conv_num = 5
  23. # Input
  24. _input = Input(shape=input_shape, dtype="float32", name="input")
  25. conv = Conv2D(16, (3, 3), padding='same')(_input)
  26. bn = BatchNormalization()(conv)
  27. relu = LeakyReLU(alpha=0.)(bn)
  28. max_pool = MaxPool2D()(relu)
  29. for i in range(conv_num):
  30. conv = Conv2D(16, (3, 3), padding='same')(max_pool)
  31. bn = BatchNormalization()(conv)
  32. relu = LeakyReLU(alpha=0.)(bn)
  33. # conv = Conv2D(32, (1, 1), padding='same')(relu)
  34. # bn = BatchNormalization()(conv)
  35. # relu = LeakyReLU(alpha=0.)(bn)
  36. max_pool = MaxPool2D()(relu)
  37. # conv = Conv2D(16, (3, 3), padding='same')(max_pool)
  38. # bn = BatchNormalization()(conv)
  39. # relu = LeakyReLU(alpha=0.)(bn)
  40. max_pool = MaxPool2D((6, 6))(relu)
  41. dense = layers.Dense(output_shape, activation='softmax')(max_pool)
  42. squeeze = Lambda(lambda x: K.squeeze(x, axis=1))(dense)
  43. squeeze = Lambda(lambda x: K.squeeze(x, axis=1))(squeeze)
  44. model = Model(inputs=_input, outputs=squeeze)
  45. return model
  46. def fpn(input_shape, output_shape):
  47. # Input
  48. _input = Input(shape=input_shape, dtype="float32", name="input")
  49. # 192 -> 96
  50. conv = Conv2D(8, (3, 3), padding='same')(_input)
  51. bn = BatchNormalization()(conv)
  52. relu = LeakyReLU(alpha=0.)(bn)
  53. conv = Conv2D(8, (3, 3), padding='same')(relu)
  54. bn = BatchNormalization()(conv)
  55. relu = LeakyReLU(alpha=0.)(bn)
  56. max_pool_1 = MaxPool2D()(relu)
  57. # 96 -> 48
  58. conv = Conv2D(16, (3, 3), padding='same')(max_pool_1)
  59. bn = BatchNormalization()(conv)
  60. relu = LeakyReLU(alpha=0.)(bn)
  61. conv = Conv2D(16, (3, 3), padding='same')(relu)
  62. bn = BatchNormalization()(conv)
  63. relu = LeakyReLU(alpha=0.)(bn)
  64. max_pool_2 = MaxPool2D()(relu)
  65. # 48 -> 24
  66. conv = Conv2D(32, (3, 3), padding='same')(max_pool_2)
  67. bn = BatchNormalization()(conv)
  68. relu = LeakyReLU(alpha=0.)(bn)
  69. conv = Conv2D(32, (3, 3), padding='same')(relu)
  70. bn = BatchNormalization()(conv)
  71. relu = LeakyReLU(alpha=0.)(bn)
  72. max_pool_3 = MaxPool2D()(relu)
  73. #
  74. conv_pre = Conv2D(32, (1, 1))(max_pool_2)
  75. up_sample = UpSampling2D((2, 2))(max_pool_3)
  76. add_1 = Add()([conv_pre, up_sample])
  77. conv_pre = Conv2D(16, (1, 1))(max_pool_1)
  78. up_sample = UpSampling2D((2, 2))(max_pool_2)
  79. add_2 = Add()([conv_pre, up_sample])
  80. global_pool_1 = GlobalAveragePooling2D()(max_pool_3)
  81. global_pool_2 = GlobalAveragePooling2D()(add_1)
  82. global_pool_3 = GlobalAveragePooling2D()(add_2)
  83. reshape_1 = Reshape((1, 1, 32))(global_pool_1)
  84. reshape_2 = Reshape((1, 1, 32))(global_pool_2)
  85. reshape_3 = Reshape((1, 1, 16))(global_pool_3)
  86. conv_1 = Conv2D(64, (1, 1), padding='same')(reshape_1)
  87. conv_2 = Conv2D(64, (1, 1), padding='same')(reshape_2)
  88. conv_3 = Conv2D(64, (1, 1), padding='same')(reshape_3)
  89. conv_1 = Conv2D(output_shape, (1, 1), padding='same')(conv_1)
  90. conv_2 = Conv2D(output_shape, (1, 1), padding='same')(conv_2)
  91. conv_3 = Conv2D(output_shape, (1, 1), padding='same')(conv_3)
  92. reshape_1 = Reshape((output_shape,))(conv_1)
  93. reshape_2 = Reshape((output_shape,))(conv_2)
  94. reshape_3 = Reshape((output_shape,))(conv_3)
  95. add = Add()([reshape_1, reshape_2, reshape_3])
  96. softmax = Activation('softmax')(add)
  97. model = Model(_input, softmax)
  98. return model
  99. def tiny_unet(input_shape, output_shape):
  100. inputs = Input(shape=input_shape)
  101. use_bias = False
  102. # 128
  103. down1 = Conv2D(16, (3, 3), padding='same', use_bias=use_bias)(inputs)
  104. down1 = BatchNormalization()(down1)
  105. down1 = LeakyReLU(alpha=0.1)(down1)
  106. down1 = Conv2D(16, (1, 1), padding='same', use_bias=use_bias)(down1)
  107. down1 = BatchNormalization()(down1)
  108. down1 = LeakyReLU(alpha=0.1)(down1)
  109. down1_pool = MaxPooling2D((2, 2), strides=(2, 2))(down1)
  110. # 64
  111. down2 = Conv2D(32, (3, 3), padding='same', use_bias=use_bias)(down1_pool)
  112. down2 = BatchNormalization()(down2)
  113. down2 = LeakyReLU(alpha=0.1)(down2)
  114. down2 = Conv2D(32, (1, 1), padding='same', use_bias=use_bias)(down2)
  115. down2 = BatchNormalization()(down2)
  116. down2 = LeakyReLU(alpha=0.1)(down2)
  117. down2_pool = MaxPooling2D((2, 2), strides=(2, 2))(down2)
  118. # 32
  119. down3 = Conv2D(64, (3, 3), padding='same', use_bias=use_bias)(down2_pool)
  120. down3 = BatchNormalization()(down3)
  121. down3 = LeakyReLU(alpha=0.1)(down3)
  122. down3 = Conv2D(64, (1, 1), padding='same', use_bias=use_bias)(down3)
  123. down3 = BatchNormalization()(down3)
  124. down3 = LeakyReLU(alpha=0.1)(down3)
  125. down3_pool = MaxPooling2D((2, 2), strides=(2, 2))(down3)
  126. # 16
  127. center = Conv2D(64, (3, 3), padding='same', use_bias=use_bias)(down3_pool)
  128. center = BatchNormalization()(center)
  129. center = LeakyReLU(alpha=0.1)(center)
  130. center = Conv2D(64, (1, 1), padding='same', use_bias=use_bias)(center)
  131. center = BatchNormalization()(center)
  132. center = LeakyReLU(alpha=0.1)(center)
  133. # 32
  134. up3 = UpSampling2D((2, 2))(center)
  135. up3 = concatenate([down3, up3], axis=3)
  136. up3 = Conv2D(64, (3, 3), padding='same', use_bias=use_bias)(up3)
  137. up3 = BatchNormalization()(up3)
  138. up3 = LeakyReLU(alpha=0.1)(up3)
  139. up3 = Conv2D(64, (1, 1), padding='same', use_bias=use_bias)(up3)
  140. up3 = BatchNormalization()(up3)
  141. up3 = LeakyReLU(alpha=0.1)(up3)
  142. # 64
  143. up2 = UpSampling2D((2, 2))(up3)
  144. up2 = concatenate([down2, up2], axis=3)
  145. up2 = Conv2D(32, (3, 3), padding='same', use_bias=use_bias)(up2)
  146. up2 = BatchNormalization()(up2)
  147. up2 = LeakyReLU(alpha=0.1)(up2)
  148. up2 = Conv2D(32, (1, 1), padding='same', use_bias=use_bias)(up2)
  149. up2 = BatchNormalization()(up2)
  150. up2 = LeakyReLU(alpha=0.1)(up2)
  151. # 128
  152. up1 = UpSampling2D((2, 2))(up2)
  153. up1 = K.concatenate([down1, up1], axis=3)
  154. up1 = Conv2D(16, (3, 3), padding='same', use_bias=use_bias)(up1)
  155. up1 = BatchNormalization()(up1)
  156. up1 = LeakyReLU(alpha=0.1)(up1)
  157. up1 = Conv2D(16, (1, 1), padding='same', use_bias=use_bias)(up1)
  158. up1 = BatchNormalization()(up1)
  159. up1 = LeakyReLU(alpha=0.1)(up1)
  160. classify = Conv2D(output_shape, (1, 1), activation='softmax')(up1)
  161. model = Model(inputs=inputs, outputs=classify)
  162. return model