fix missing parameters, add flat network structure, make larger graphics
This commit is contained in:
parent
fbe6d6a584
commit
e2bf2dc90f
2
Makefile
2
Makefile
@ -66,6 +66,6 @@ hyper:
|
||||
python3 main.py --mode hyperband --batch 64 --train data/rk_data.csv.gz
|
||||
|
||||
clean:
|
||||
rm -r results/test/test*
|
||||
rm -r results/test/
|
||||
rm data/rk_mini.csv.gz_raw.h5
|
||||
rm data/rk_mini.csv.gz.h5
|
||||
|
@ -1,7 +1,6 @@
|
||||
import keras.backend as K
|
||||
|
||||
from . import pauls_networks
|
||||
from . import renes_networks
|
||||
from . import flat_2, pauls_networks, renes_networks
|
||||
|
||||
|
||||
def get_models_by_params(params: dict):
|
||||
@ -27,6 +26,8 @@ def get_models_by_params(params: dict):
|
||||
# create models
|
||||
if network_depth == "small":
|
||||
networks = pauls_networks
|
||||
elif network_depth == "flat":
|
||||
networks = flat_2
|
||||
elif network_depth == "medium":
|
||||
networks = renes_networks
|
||||
else:
|
||||
|
82
models/flat_2.py
Normal file
82
models/flat_2.py
Normal file
@ -0,0 +1,82 @@
|
||||
from collections import namedtuple
|
||||
|
||||
import keras
|
||||
from keras.activations import elu
|
||||
from keras.engine import Input, Model as KerasModel
|
||||
from keras.layers import Conv1D, Dense, Dropout, Embedding, GlobalAveragePooling1D, GlobalMaxPooling1D, TimeDistributed
|
||||
|
||||
import dataset
|
||||
|
||||
|
||||
def selu(x):
|
||||
"""Scaled Exponential Linear Unit. (Klambauer et al., 2017)
|
||||
# Arguments
|
||||
x: A tensor or variable to compute the activation function for.
|
||||
# References
|
||||
- [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)
|
||||
# copied from keras.io
|
||||
"""
|
||||
alpha = 1.6732632423543772848170429916717
|
||||
scale = 1.0507009873554804934193349852946
|
||||
return scale * elu(x, alpha)
|
||||
|
||||
|
||||
Model = namedtuple("Model", ["in_domains", "in_flows", "out_client", "out_server"])
|
||||
|
||||
|
||||
def get_embedding(embedding_size, input_length, filter_size, kernel_size, hidden_dims, drop_out=0.5) -> KerasModel:
|
||||
x = y = Input(shape=(input_length,))
|
||||
y = Embedding(input_dim=dataset.get_vocab_size(), output_dim=embedding_size)(y)
|
||||
y = Conv1D(filter_size,
|
||||
kernel_size,
|
||||
activation=selu)(y)
|
||||
y = GlobalAveragePooling1D()(y)
|
||||
y = Dense(hidden_dims, activation=selu)(y)
|
||||
return KerasModel(x, y)
|
||||
|
||||
|
||||
def get_model(cnnDropout, flow_features, domain_features, window_size, domain_length, cnn_dims, kernel_size,
|
||||
dense_dim, cnn, model_output="both") -> Model:
|
||||
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
|
||||
encoded = TimeDistributed(cnn, name="domain_cnn")(ipt_domains)
|
||||
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
|
||||
merged = keras.layers.concatenate([encoded, ipt_flows], -1)
|
||||
# CNN processing a small slides of flow windows
|
||||
y = Conv1D(cnn_dims,
|
||||
kernel_size,
|
||||
activation=selu,
|
||||
input_shape=(window_size, domain_features + flow_features))(merged)
|
||||
# remove temporal dimension by global max pooling
|
||||
y = GlobalMaxPooling1D()(y)
|
||||
y = Dropout(cnnDropout)(y)
|
||||
y = Dense(dense_dim, activation=selu)(y)
|
||||
out_client = Dense(1, activation='sigmoid', name="client")(y)
|
||||
out_server = Dense(1, activation='sigmoid', name="server")(y)
|
||||
|
||||
return Model(ipt_domains, ipt_flows, out_client, out_server)
|
||||
|
||||
|
||||
def get_new_model(dropout, flow_features, domain_features, window_size, domain_length, cnn_dims, kernel_size,
|
||||
dense_dim, cnn, model_output="both") -> Model:
|
||||
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
|
||||
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
|
||||
encoded = TimeDistributed(cnn, name="domain_cnn")(ipt_domains)
|
||||
merged = keras.layers.concatenate([encoded, ipt_flows], -1)
|
||||
y = Dense(dense_dim, activation=selu)(merged)
|
||||
out_server = Dense(1, activation="sigmoid", name="server")(y)
|
||||
merged = keras.layers.concatenate([merged, y], -1)
|
||||
# CNN processing a small slides of flow windows
|
||||
y = Conv1D(filters=cnn_dims,
|
||||
kernel_size=kernel_size,
|
||||
activation=selu,
|
||||
padding="same",
|
||||
input_shape=(window_size, domain_features + flow_features))(merged)
|
||||
# remove temporal dimension by global max pooling
|
||||
y = GlobalMaxPooling1D()(y)
|
||||
y = Dropout(dropout)(y)
|
||||
y = Dense(dense_dim,
|
||||
activation=selu,
|
||||
name="dense_client")(y)
|
||||
out_client = Dense(1, activation='sigmoid', name="client")(y)
|
||||
|
||||
return Model(ipt_domains, ipt_flows, out_client, out_server)
|
@ -52,7 +52,8 @@ def get_model(cnnDropout, flow_features, domain_features, window_size, domain_le
|
||||
# CNN processing a small slides of flow windows
|
||||
y = Conv1D(cnn_dims,
|
||||
kernel_size,
|
||||
activation='relu'
|
||||
activation='relu',
|
||||
input_shape=(window_size, domain_features + flow_features))(merged)
|
||||
# remove temporal dimension by global max pooling
|
||||
y = GlobalMaxPooling1D()(y)
|
||||
y = Dropout(cnnDropout)(y)
|
||||
|
@ -66,7 +66,7 @@ def get_new_model(dropout, flow_features, domain_features, window_size, domain_l
|
||||
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
|
||||
encoded = TimeDistributed(cnn, name="domain_cnn")(ipt_domains)
|
||||
merged = keras.layers.concatenate([encoded, ipt_flows], -1)
|
||||
y = Dense(dense_dim, activation="relu")(merged)
|
||||
y = Dense(dense_dim, activation=selu)(merged)
|
||||
out_server = Dense(1, activation="sigmoid", name="server")(y)
|
||||
merged = keras.layers.concatenate([merged, y], -1)
|
||||
# CNN processing a small slides of flow windows
|
||||
|
@ -35,8 +35,10 @@ def plot_clf():
|
||||
plt.clf()
|
||||
|
||||
|
||||
def plot_save(path, dpi=600):
|
||||
plt.savefig(path, dpi=dpi)
|
||||
def plot_save(path, dpi=300):
|
||||
fig = plt.gcf()
|
||||
fig.set_size_inches(18.5, 10.5)
|
||||
fig.savefig(path, dpi=dpi)
|
||||
plt.close()
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user