|
|
|
@ -16,11 +16,11 @@ def build_model(want_answer_size, infact_answer_size):
|
|
|
|
|
inputs_infact_answer = Input(shape=(infact_answer_size, ), name='infact_answer_input')
|
|
|
|
|
x_1 = Embedding(want_answer_size, 128, name='want_answer_embedding', embeddings_initializer='he_normal', embeddings_regularizer=keras.regularizers.l2(0.01))(inputs_want_answer)
|
|
|
|
|
x_2 = Embedding(infact_answer_size, 128, name='infact_answer_embedding', embeddings_initializer='he_normal', embeddings_regularizer=keras.regularizers.l2(0.01))(inputs_infact_answer)
|
|
|
|
|
x_1 = GRU(128, dropout=0.4, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_1)
|
|
|
|
|
x_2 = GRU(128, dropout=0.4, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_2)
|
|
|
|
|
x_1 = GRU(128, dropout=0.2, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_1)
|
|
|
|
|
x_2 = GRU(128, dropout=0.2, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_2)
|
|
|
|
|
x = keras.layers.concatenate([x_1, x_2])
|
|
|
|
|
x = Flatten()(x)
|
|
|
|
|
x = Dropout(0.3)(x)
|
|
|
|
|
x = Dropout(0.4)(x)
|
|
|
|
|
x = Dense(64, activation='relu')(x)
|
|
|
|
|
predictions = Dense(2, activation='softmax')(x)
|
|
|
|
|
model = Model(inputs=[inputs_want_answer, inputs_infact_answer], outputs=predictions)
|
|
|
|
|