整理代码

master
aolingwen 6 years ago
parent 00bed3325a
commit 1cad75d724

@ -16,11 +16,11 @@ def build_model(want_answer_size, infact_answer_size):
inputs_infact_answer = Input(shape=(infact_answer_size, ), name='infact_answer_input')
x_1 = Embedding(want_answer_size, 128, name='want_answer_embedding', embeddings_initializer='he_normal', embeddings_regularizer=keras.regularizers.l2(0.01))(inputs_want_answer)
x_2 = Embedding(infact_answer_size, 128, name='infact_answer_embedding', embeddings_initializer='he_normal', embeddings_regularizer=keras.regularizers.l2(0.01))(inputs_infact_answer)
x_1 = GRU(128, dropout=0.4, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_1)
x_2 = GRU(128, dropout=0.4, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_2)
x_1 = GRU(128, dropout=0.2, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_1)
x_2 = GRU(128, dropout=0.2, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_2)
x = keras.layers.concatenate([x_1, x_2])
x = Flatten()(x)
x = Dropout(0.3)(x)
x = Dropout(0.4)(x)
x = Dense(64, activation='relu')(x)
predictions = Dense(2, activation='softmax')(x)
model = Model(inputs=[inputs_want_answer, inputs_infact_answer], outputs=predictions)

@ -1,13 +1,6 @@
import jieba
import pandas as pd
from sortedcontainers import SortedSet
import numpy as np
from sklearn.model_selection import train_test_split
from keras.layers import Dense, Embedding, Input, Flatten
from keras.layers import LSTM, GRU, Dropout
from keras.models import Model
import keras
from keras.utils import plot_model
def stopwordslist(filepath):

Loading…
Cancel
Save