ma_cisco_malware/models/networks.py

250 lines
9.3 KiB
Python

from collections import namedtuple
import keras
import keras.backend as K
import numpy as np
from keras.engine import Input, Model as KerasModel
from keras.engine.topology import Layer
from keras.layers import Conv1D, Dense, Dropout, Embedding, GlobalAveragePooling1D, GlobalMaxPooling1D, TimeDistributed
from keras.regularizers import Regularizer
import dataset
Model = namedtuple("Model", ["in_domains", "in_flows", "out_client", "out_server"])
def get_domain_embedding_model(embedding_size, input_length, filter_size, kernel_size, hidden_dims,
drop_out=0.5) -> KerasModel:
x = y = Input(shape=(input_length,))
y = Embedding(input_dim=dataset.get_vocab_size(), output_dim=embedding_size)(y)
y = Conv1D(filter_size,
kernel_size,
activation='relu')(y)
y = GlobalMaxPooling1D()(y)
y = Dropout(drop_out)(y)
y = Dense(hidden_dims, activation="relu")(y)
return KerasModel(x, y)
def get_domain_embedding_model2(embedding_size, input_length, filter_size, kernel_size, hidden_dims,
drop_out=0.5) -> KerasModel:
x = y = Input(shape=(input_length,))
y = Embedding(input_dim=dataset.get_vocab_size(), output_dim=embedding_size)(y)
y = Conv1D(filter_size,
kernel_size,
activation='relu')(y)
y = Conv1D(filter_size,
kernel_size,
activation='relu')(y)
y = Conv1D(filter_size,
kernel_size,
activation='relu')(y)
y = GlobalAveragePooling1D()(y)
y = Dense(hidden_dims, activation="relu")(y)
return KerasModel(x, y)
def get_final_model(cnnDropout, flow_features, window_size, domain_length, cnn_dims, kernel_size,
dense_dim, cnn) -> Model:
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
encoded = TimeDistributed(cnn, name="domain_cnn")(ipt_domains)
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
merged = keras.layers.concatenate([encoded, ipt_flows], -1)
# CNN processing a small slides of flow windows
y = Conv1D(cnn_dims,
kernel_size,
activation='relu')(merged)
# remove temporal dimension by global max pooling
y = GlobalMaxPooling1D()(y)
y = Dropout(cnnDropout)(y)
y = Dense(dense_dim, activation='relu')(y)
out_client = Dense(1, activation='sigmoid', name="client")(y)
out_server = Dense(1, activation='sigmoid', name="server")(y)
return Model(ipt_domains, ipt_flows, out_client, out_server)
def get_inter_model(dropout, flow_features, window_size, domain_length, cnn_dims, kernel_size,
dense_dim, cnn) -> Model:
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
encoded = TimeDistributed(cnn, name="domain_cnn")(ipt_domains)
merged = keras.layers.concatenate([encoded, ipt_flows], -1)
y = Dense(dense_dim,
activation="relu",
name="dense_server")(merged)
out_server = Dense(1, activation="sigmoid", name="server")(y)
merged = keras.layers.concatenate([merged,
y], -1)
# CNN processing a small slides of flow windows
y = Conv1D(cnn_dims,
kernel_size,
activation='relu')(merged)
# remove temporal dimension by global max pooling
y = GlobalMaxPooling1D()(y)
y = Dropout(dropout)(y)
y = Dense(dense_dim,
activation='relu',
name="dense_client")(y)
out_client = Dense(1, activation='sigmoid', name="client")(y)
return Model(ipt_domains, ipt_flows, out_client, out_server)
def get_server_model(flow_features, domain_length, dense_dim, cnn):
ipt_domains = Input(shape=(domain_length,), name="ipt_domains")
ipt_flows = Input(shape=(flow_features,), name="ipt_flows")
encoded = cnn(ipt_domains)
cnn.name = "domain_cnn"
merged = keras.layers.concatenate([encoded, ipt_flows], -1)
y = Dense(dense_dim,
activation="relu",
name="dense_server")(merged)
out_server = Dense(1, activation="sigmoid", name="server")(y)
return KerasModel(inputs=[ipt_domains, ipt_flows], outputs=out_server)
def get_long_model(dropout, flow_features, window_size, domain_length, cnn_dims, kernel_size,
dense_dim, cnn) -> Model:
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
encoded = TimeDistributed(cnn, name="domain_cnn")(ipt_domains)
merged = keras.layers.concatenate([encoded, ipt_flows], -1)
y = Conv1D(cnn_dims,
kernel_size,
activation='relu', name="conv_server")(merged)
# remove temporal dimension by global max pooling
y = GlobalMaxPooling1D()(y)
y = Dropout(dropout)(y)
y = Dense(dense_dim,
activation="relu",
name="dense_server")(y)
out_server = Dense(1, activation="sigmoid", name="server")(y)
# CNN processing a small slides of flow windows
y = Conv1D(cnn_dims,
kernel_size,
activation='relu', name="conv_client")(merged)
# remove temporal dimension by global max pooling
y = GlobalMaxPooling1D()(y)
y = Dropout(dropout)(y)
y = Dense(dense_dim,
activation='relu',
name="dense_client")(y)
out_client = Dense(1, activation='sigmoid', name="client")(y)
return Model(ipt_domains, ipt_flows, out_client, out_server)
class CrossStitch2(Layer):
def __init__(self, **kwargs):
super(CrossStitch2, self).__init__(**kwargs)
def build(self, input_shape):
# Create a trainable weight variable for this layer.
self.s = self.add_weight(name='cross-stitch-s',
shape=(1,),
initializer='uniform',
trainable=True)
self.d = self.add_weight(name='cross-stitch-d',
shape=(1,),
initializer='uniform',
trainable=True)
super(CrossStitch2, self).build(input_shape)
def call(self, xs):
x1, x2 = xs
out = x1 * self.s + x2 * self.d
return out
def compute_output_shape(self, input_shape):
return input_shape[0]
class CrossStitchMix2(Layer):
def __init__(self, **kwargs):
super(CrossStitchMix2, self).__init__(**kwargs)
def build(self, input_shape):
# Create a trainable weight variable for this layer.
self.s = self.add_weight(name='cross-stitch-s',
shape=(1,),
initializer='uniform',
trainable=True)
self.d = self.add_weight(name='cross-stitch-d',
shape=(1,),
initializer='uniform',
trainable=True)
super(CrossStitchMix2, self).build(input_shape)
def call(self, xs):
x1, x2 = xs
out = K.concatenate((x1 * self.s, x2 * self.d), axis=-1)
return out
def compute_output_shape(self, input_shape):
return (input_shape[0][0], input_shape[0][1] + input_shape[1][1])
class L21(Regularizer):
"""Regularizer for L21 regularization.
Found at: https://bitbucket.org/ispamm/group-lasso-for-neural-networks-tensorflow-keras
# Arguments
C: Float; L21 regularization factor.
"""
def __init__(self, C=0.):
self.C = K.cast_to_floatx(C)
def __call__(self, x):
const_coeff = np.sqrt(K.int_shape(x)[1])
return self.C * const_coeff * K.sum(K.sqrt(K.sum(K.square(x), axis=1)))
def get_config(self):
return {'C': float(self.C)}
def get_sluice_model(dropout, flow_features, window_size, domain_length, cnn_dims, kernel_size,
dense_dim, cnn) -> Model:
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
encoded = TimeDistributed(cnn, name="domain_cnn")(ipt_domains)
merged = keras.layers.concatenate([encoded, ipt_flows], -1)
y1 = Conv1D(cnn_dims,
kernel_size,
activation='relu', name="conv_server")(merged)
y1 = GlobalMaxPooling1D()(y1)
y2 = Conv1D(cnn_dims,
kernel_size,
activation='relu', name="conv_client")(merged)
y2 = GlobalMaxPooling1D()(y2)
c11 = CrossStitch2()([y1, y2])
c12 = CrossStitch2()([y1, y2])
y1 = Dropout(dropout)(c11)
y1 = Dense(dense_dim,
activation="relu",
name="dense_server")(y1)
y2 = Dropout(dropout)(c12)
y2 = Dense(dense_dim,
activation='relu',
name="dense_client")(y2)
c21 = CrossStitch2()([y1, y2])
c22 = CrossStitch2()([y1, y2])
beta1 = CrossStitchMix2()([c11, c21])
beta2 = CrossStitchMix2()([c12, c22])
out_server = Dense(1, activation="sigmoid", name="server")(beta1)
out_client = Dense(1, activation='sigmoid', name="client")(beta2)
return Model(ipt_domains, ipt_flows, out_client, out_server)