import sys import os sys.path.append(os.path.abspath("../..")) from keras import layers, models import tensorflow as tf from BiddingKG.dl.table_head.models.my_average_pooling import MyAveragePooling1D from BiddingKG.dl.table_head.models.self_attention import SeqSelfAttention def get_model(input_shape, output_shape): # Input input_1 = layers.Input(shape=input_shape[1:], dtype="float32") input_2 = layers.Input(shape=input_shape[1:], dtype="float32") input_3 = layers.Input(shape=input_shape[1:], dtype="float32") # Embedding # embed_1 = layers.Embedding(input_dim=6624, output_dim=32, # input_length=input_shape[1], mask_zero=True)(input_1) # embed_2 = layers.Embedding(input_dim=6624, output_dim=32, # input_length=input_shape[1], mask_zero=True)(input_2) # Bi-LSTM bi_lstm_1 = layers.Bidirectional(layers.LSTM(16, return_sequences=True))(input_1) bi_lstm_2 = layers.Bidirectional(layers.LSTM(16, return_sequences=True))(input_2) bi_lstm_3 = layers.Bidirectional(layers.LSTM(16, return_sequences=True))(input_3) # Self-Attention self_attention_1 = SeqSelfAttention(attention_activation='sigmoid')(bi_lstm_1) self_attention_2 = SeqSelfAttention(attention_activation='sigmoid')(bi_lstm_2) self_attention_3 = SeqSelfAttention(attention_activation='sigmoid')(bi_lstm_3) # Concat concat = layers.concatenate([self_attention_1, self_attention_2, self_attention_3]) # Dense + Softmax output = layers.Dense(output_shape[0], activation="sigmoid")(concat) # mask mean pooling output = MyAveragePooling1D(axis=1, name='output')(output) model = models.Model(inputs=[input_1, input_2, input_3], outputs=output) model.summary() return model