model.py 1.8 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. from keras import Input, Model
  2. from keras.layers import Dense, Conv2D, Reshape, BatchNormalization, \
  3. LeakyReLU, MaxPooling2D, Dropout
  4. def cnn_net(input_shape, output_shape=5710):
  5. _input = Input(input_shape)
  6. use_bias = False
  7. down0 = Conv2D(32, (3, 3), padding='same', use_bias=use_bias)(_input)
  8. down0 = BatchNormalization()(down0)
  9. down0 = LeakyReLU(alpha=0.1)(down0)
  10. down0 = Conv2D(32, (3, 3), padding='same', use_bias=use_bias)(down0)
  11. down0 = BatchNormalization()(down0)
  12. down0 = LeakyReLU(alpha=0.1)(down0)
  13. down0_pool = MaxPooling2D((2, 2), strides=(2, 2))(down0)
  14. down1 = Conv2D(64, (3, 3), padding='same', use_bias=use_bias)(down0_pool)
  15. down1 = BatchNormalization()(down1)
  16. down1 = LeakyReLU(alpha=0.1)(down1)
  17. down1 = Conv2D(64, (3, 3), padding='same', use_bias=use_bias)(down1)
  18. down1 = BatchNormalization()(down1)
  19. down1 = LeakyReLU(alpha=0.1)(down1)
  20. down1_pool = MaxPooling2D((2, 2), strides=(2, 2))(down1)
  21. down2 = Conv2D(128, (3, 3), padding='same', use_bias=use_bias)(down1_pool)
  22. down2 = BatchNormalization()(down2)
  23. down2 = LeakyReLU(alpha=0.1)(down2)
  24. down2 = Conv2D(128, (3, 3), padding='same', use_bias=use_bias)(down2)
  25. down2 = BatchNormalization()(down2)
  26. down2 = LeakyReLU(alpha=0.1)(down2)
  27. down2_pool = MaxPooling2D((2, 2), strides=(2, 2))(down2)
  28. conv = Conv2D(256, (3, 3))(down2_pool)
  29. bn = BatchNormalization()(conv)
  30. rl = LeakyReLU(alpha=0.1)(bn)
  31. conv = Conv2D(256, (3, 3))(rl)
  32. bn = BatchNormalization()(conv)
  33. rl = LeakyReLU(alpha=0.1)(bn)
  34. dense = Dense(128, activation="relu")(rl)
  35. drop = Dropout(0.2)(dense)
  36. dense = Dense(output_shape, activation="softmax")(drop)
  37. x = Reshape((output_shape,))(dense)
  38. model = Model(_input, x)
  39. # model.summary()
  40. return model