refactor server training into separate file; add additional info to hyperband log

This commit is contained in:
2017-10-19 17:37:29 +02:00
parent d1da3d6ca3
commit a860f0da34
4 changed files with 137 additions and 14 deletions

View File

@@ -10,11 +10,11 @@ Model = namedtuple("Model", ["in_domains", "in_flows", "out_client", "out_server
def get_embedding(embedding_size, input_length, filter_size, kernel_size, hidden_dims, drop_out=0.5):
x = y = Input(shape=(input_length,))
y = Embedding(input_dim=dataset.get_vocab_size(), output_dim=embedding_size)(y)
y = Conv1D(filter_size, kernel_size=kernel_size, activation="relu")(y)
y = Conv1D(filter_size, kernel_size=3, activation="relu")(y)
y = Conv1D(filter_size, kernel_size=3, activation="relu")(y)
x = Input(shape=(input_length,))
y = Embedding(input_dim=dataset.get_vocab_size(), output_dim=embedding_size)(x)
y = Conv1D(filter_size, kernel_size=kernel_size, activation="relu", padding="same")(y)
y = Conv1D(filter_size, kernel_size=3, activation="relu", padding="same")(y)
y = Conv1D(filter_size, kernel_size=3, activation="relu", padding="same")(y)
y = GlobalAveragePooling1D()(y)
y = Dense(hidden_dims, activation="relu")(y)
return KerasModel(x, y)

View File

@@ -2,7 +2,7 @@ from collections import namedtuple
import keras
from keras.engine import Input, Model as KerasModel
from keras.layers import Activation, Conv1D, Dense, Dropout, Embedding, GlobalMaxPooling1D, TimeDistributed
from keras.layers import Conv1D, Dense, Dropout, Embedding, GlobalMaxPooling1D, TimeDistributed
import dataset
@@ -38,8 +38,7 @@ def get_embedding(embedding_size, input_length, filter_size, kernel_size, hidden
activation='relu')(y)
y = GlobalMaxPooling1D()(y)
y = Dropout(drop_out)(y)
y = Dense(hidden_dims)(y)
y = Activation('relu')(y)
y = Dense(hidden_dims, activation="relu")(y)
return KerasModel(x, y)