model.py 1.8 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344
  1. import sys
  2. import os
  3. sys.path.append(os.path.abspath("../.."))
  4. from keras import layers, models
  5. import tensorflow as tf
  6. from BiddingKG.dl.table_head.models.my_average_pooling import MyAveragePooling1D
  7. from BiddingKG.dl.table_head.models.self_attention import SeqSelfAttention
  8. def get_model(input_shape, output_shape):
  9. # Input
  10. input_1 = layers.Input(shape=input_shape[1:], dtype="float32")
  11. input_2 = layers.Input(shape=input_shape[1:], dtype="float32")
  12. input_3 = layers.Input(shape=input_shape[1:], dtype="float32")
  13. # Embedding
  14. # embed_1 = layers.Embedding(input_dim=6624, output_dim=32,
  15. # input_length=input_shape[1], mask_zero=True)(input_1)
  16. # embed_2 = layers.Embedding(input_dim=6624, output_dim=32,
  17. # input_length=input_shape[1], mask_zero=True)(input_2)
  18. # Bi-LSTM
  19. bi_lstm_1 = layers.Bidirectional(layers.LSTM(16, return_sequences=True))(input_1)
  20. bi_lstm_2 = layers.Bidirectional(layers.LSTM(16, return_sequences=True))(input_2)
  21. bi_lstm_3 = layers.Bidirectional(layers.LSTM(16, return_sequences=True))(input_3)
  22. # Self-Attention
  23. self_attention_1 = SeqSelfAttention(attention_activation='sigmoid')(bi_lstm_1)
  24. self_attention_2 = SeqSelfAttention(attention_activation='sigmoid')(bi_lstm_2)
  25. self_attention_3 = SeqSelfAttention(attention_activation='sigmoid')(bi_lstm_3)
  26. # Concat
  27. concat = layers.concatenate([self_attention_1, self_attention_2, self_attention_3])
  28. # Dense + Softmax
  29. output = layers.Dense(output_shape[0], activation="sigmoid")(concat)
  30. # mask mean pooling
  31. output = MyAveragePooling1D(axis=1, name='output')(output)
  32. model = models.Model(inputs=[input_1, input_2, input_3], outputs=output)
  33. model.summary()
  34. return model