From 1cad75d724b3f3671d510f9277c9db227d885a56 Mon Sep 17 00:00:00 2001 From: aolingwen <747620155@qq.com> Date: Mon, 17 Jun 2019 09:57:38 +0800 Subject: [PATCH] =?UTF-8?q?=E6=95=B4=E7=90=86=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- network.py | 6 +++--- utils.py | 7 ------- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/network.py b/network.py index d4e4294..e4e5783 100644 --- a/network.py +++ b/network.py @@ -16,11 +16,11 @@ def build_model(want_answer_size, infact_answer_size): inputs_infact_answer = Input(shape=(infact_answer_size, ), name='infact_answer_input') x_1 = Embedding(want_answer_size, 128, name='want_answer_embedding', embeddings_initializer='he_normal', embeddings_regularizer=keras.regularizers.l2(0.01))(inputs_want_answer) x_2 = Embedding(infact_answer_size, 128, name='infact_answer_embedding', embeddings_initializer='he_normal', embeddings_regularizer=keras.regularizers.l2(0.01))(inputs_infact_answer) - x_1 = GRU(128, dropout=0.4, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_1) - x_2 = GRU(128, dropout=0.4, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_2) + x_1 = GRU(128, dropout=0.2, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_1) + x_2 = GRU(128, dropout=0.2, return_sequences=True, recurrent_initializer='he_normal', recurrent_regularizer=keras.regularizers.l2(0.01))(x_2) x = keras.layers.concatenate([x_1, x_2]) x = Flatten()(x) - x = Dropout(0.3)(x) + x = Dropout(0.4)(x) x = Dense(64, activation='relu')(x) predictions = Dense(2, activation='softmax')(x) model = Model(inputs=[inputs_want_answer, inputs_infact_answer], outputs=predictions) diff --git a/utils.py b/utils.py index 63c3f88..b996d9a 100644 --- a/utils.py +++ b/utils.py @@ -1,13 +1,6 @@ import jieba -import pandas as pd from sortedcontainers import SortedSet import numpy as np -from sklearn.model_selection import train_test_split -from keras.layers import Dense, Embedding, Input, Flatten -from keras.layers import LSTM, GRU, Dropout -from keras.models import Model -import keras -from keras.utils import plot_model def stopwordslist(filepath):