move vocab_size into implementation (not user dependent)

This commit is contained in:
2017-07-30 13:47:11 +02:00
parent d97785f646
commit ebaeb6b96e
6 changed files with 82 additions and 154 deletions

View File

@@ -1,6 +1,5 @@
import keras.backend as K
import dataset
from . import pauls_networks
from . import renes_networks
@@ -9,7 +8,6 @@ def get_models_by_params(params: dict):
# decomposing param section
# mainly embedding model
network_type = params.get("type")
vocab_size = len(dataset.get_character_dict()) + 1
embedding_size = params.get("embedding_size")
input_length = params.get("input_length")
filter_embedding = params.get("filter_embedding")
@@ -26,8 +24,8 @@ def get_models_by_params(params: dict):
dense_dim = params.get("dense_main")
# create models
networks = renes_networks if network_type == "rene" else pauls_networks
embedding_model = networks.get_embedding(vocab_size, embedding_size, input_length,
filter_embedding, kernel_embedding, hidden_embedding, drop_out=dropout)
embedding_model = networks.get_embedding(embedding_size, input_length, filter_embedding, kernel_embedding,
hidden_embedding, drop_out=dropout)
predict_model = networks.get_model(dropout, flow_features, domain_features, window_size, domain_length,
filter_main, kernel_main, dense_dim, embedding_model)