2017-07-12 10:25:55 +02:00
|
|
|
import json
|
|
|
|
import logging
|
2017-07-09 23:58:08 +02:00
|
|
|
import os
|
2017-07-03 13:48:12 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
import numpy as np
|
2017-07-14 14:58:17 +02:00
|
|
|
import pandas as pd
|
|
|
|
import tensorflow as tf
|
2017-09-08 19:10:23 +02:00
|
|
|
from keras.callbacks import CSVLogger, EarlyStopping, ModelCheckpoint
|
|
|
|
from keras.models import Model, load_model
|
2017-06-30 10:12:20 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
import arguments
|
2017-06-30 10:12:20 +02:00
|
|
|
import dataset
|
2017-07-07 16:48:10 +02:00
|
|
|
import hyperband
|
2017-06-30 10:12:20 +02:00
|
|
|
import models
|
2017-07-12 10:25:55 +02:00
|
|
|
# create logger
|
2017-07-14 14:58:17 +02:00
|
|
|
import visualize
|
2017-09-01 10:42:26 +02:00
|
|
|
from arguments import get_model_args
|
2017-09-08 19:10:23 +02:00
|
|
|
from utils import exists_or_make_path, get_custom_class_weights
|
2017-07-14 14:58:17 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
logger = logging.getLogger('logger')
|
|
|
|
logger.setLevel(logging.DEBUG)
|
2017-07-03 13:48:12 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
# create console handler and set level to debug
|
|
|
|
ch = logging.StreamHandler()
|
|
|
|
ch.setLevel(logging.DEBUG)
|
2017-07-05 21:19:19 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
# create formatter
|
|
|
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
2017-07-05 21:19:19 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
# add formatter to ch
|
|
|
|
ch.setFormatter(formatter)
|
2017-07-03 13:48:12 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
# add ch to logger
|
|
|
|
logger.addHandler(ch)
|
2017-07-05 21:19:19 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
ch = logging.FileHandler("info.log")
|
|
|
|
ch.setLevel(logging.DEBUG)
|
2017-07-07 16:48:10 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
# create formatter
|
|
|
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
2017-07-03 13:48:12 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
# add formatter to ch
|
|
|
|
ch.setFormatter(formatter)
|
2017-07-03 13:48:12 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
# add ch to logger
|
|
|
|
logger.addHandler(ch)
|
2017-07-05 17:37:08 +02:00
|
|
|
|
2017-07-14 14:58:17 +02:00
|
|
|
args = arguments.parse()
|
2017-07-05 17:37:08 +02:00
|
|
|
|
2017-07-14 14:58:17 +02:00
|
|
|
if args.gpu:
|
|
|
|
config = tf.ConfigProto(log_device_placement=True)
|
|
|
|
config.gpu_options.per_process_gpu_memory_fraction = 0.5
|
|
|
|
config.gpu_options.allow_growth = True
|
|
|
|
session = tf.Session(config=config)
|
2017-06-30 10:42:21 +02:00
|
|
|
|
2017-07-30 13:47:11 +02:00
|
|
|
# default parameter
|
|
|
|
PARAMS = {
|
|
|
|
"type": args.model_type,
|
2017-09-01 10:42:26 +02:00
|
|
|
"depth": args.model_depth,
|
2017-09-07 15:53:58 +02:00
|
|
|
# "batch_size": 64,
|
2017-07-30 13:47:11 +02:00
|
|
|
"window_size": args.window,
|
|
|
|
"domain_length": args.domain_length,
|
|
|
|
"flow_features": 3,
|
|
|
|
#
|
2017-09-07 15:53:58 +02:00
|
|
|
'dropout': 0.5, # currently fix
|
2017-07-30 13:47:11 +02:00
|
|
|
'domain_features': args.domain_embedding,
|
|
|
|
'embedding_size': args.embedding,
|
|
|
|
'flow_features': 3,
|
2017-09-07 15:53:58 +02:00
|
|
|
'filter_embedding': args.filter_embedding,
|
|
|
|
'dense_embedding': args.dense_embedding,
|
|
|
|
'kernel_embedding': args.kernel_embedding,
|
|
|
|
'filter_main': args.filter_main,
|
|
|
|
'dense_main': args.dense_main,
|
|
|
|
'kernel_main': args.kernel_main,
|
2017-08-02 12:58:09 +02:00
|
|
|
'input_length': 40,
|
|
|
|
'model_output': args.model_output
|
2017-07-30 13:47:11 +02:00
|
|
|
}
|
|
|
|
|
2017-06-30 10:42:21 +02:00
|
|
|
|
2017-09-08 13:55:13 +02:00
|
|
|
def create_model(model, output_type):
|
|
|
|
if output_type == "both":
|
|
|
|
return Model(inputs=[model.in_domains, model.in_flows], outputs=(model.out_client, model.out_server))
|
|
|
|
elif output_type == "client":
|
|
|
|
return Model(inputs=[model.in_domains, model.in_flows], outputs=(model.out_client,))
|
|
|
|
else:
|
|
|
|
raise Exception("unknown model output")
|
|
|
|
|
|
|
|
|
2017-07-08 11:53:03 +02:00
|
|
|
def main_paul_best():
|
2017-07-14 14:58:17 +02:00
|
|
|
pauls_best_params = models.pauls_networks.best_config
|
|
|
|
main_train(pauls_best_params)
|
2017-07-08 11:53:03 +02:00
|
|
|
|
|
|
|
|
2017-07-07 16:48:10 +02:00
|
|
|
def main_hyperband():
|
|
|
|
params = {
|
|
|
|
# static params
|
|
|
|
"type": ["paul"],
|
2017-07-12 10:25:55 +02:00
|
|
|
"batch_size": [args.batch_size],
|
2017-07-07 16:48:10 +02:00
|
|
|
"window_size": [10],
|
|
|
|
"domain_length": [40],
|
|
|
|
"flow_features": [3],
|
|
|
|
"input_length": [40],
|
|
|
|
# model params
|
2017-07-29 19:47:02 +02:00
|
|
|
"embedding_size": [8, 16, 32, 64, 128, 256],
|
|
|
|
"filter_embedding": [8, 16, 32, 64, 128, 256],
|
2017-07-07 16:48:10 +02:00
|
|
|
"kernel_embedding": [1, 3, 5, 7, 9],
|
2017-07-29 19:47:02 +02:00
|
|
|
"hidden_embedding": [8, 16, 32, 64, 128, 256],
|
2017-07-07 16:48:10 +02:00
|
|
|
"dropout": [0.5],
|
2017-07-29 19:47:02 +02:00
|
|
|
"domain_features": [8, 16, 32, 64, 128, 256],
|
|
|
|
"filter_main": [8, 16, 32, 64, 128, 256],
|
2017-07-07 16:48:10 +02:00
|
|
|
"kernels_main": [1, 3, 5, 7, 9],
|
2017-07-29 19:47:02 +02:00
|
|
|
"dense_main": [8, 16, 32, 64, 128, 256],
|
2017-07-07 16:48:10 +02:00
|
|
|
}
|
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
logger.info("create training dataset")
|
2017-09-08 19:10:23 +02:00
|
|
|
domain_tr, flow_tr, name_tr, client_tr, server_tr = dataset.load_or_generate_h5data(args.train_h5data,
|
|
|
|
args.train_data,
|
|
|
|
args.domain_length, args.window)
|
2017-07-12 10:25:55 +02:00
|
|
|
hp = hyperband.Hyperband(params,
|
|
|
|
[domain_tr, flow_tr],
|
|
|
|
[client_tr, server_tr])
|
|
|
|
results = hp.run()
|
|
|
|
json.dump(results, open("hyperband.json"))
|
2017-07-07 16:48:10 +02:00
|
|
|
|
|
|
|
|
2017-07-30 14:07:39 +02:00
|
|
|
def main_train(param=None):
|
|
|
|
logger.info(f"Create model path {args.model_path}")
|
2017-07-11 21:06:58 +02:00
|
|
|
exists_or_make_path(args.model_path)
|
2017-07-30 14:07:39 +02:00
|
|
|
logger.info(f"Use command line arguments: {args}")
|
2017-07-11 21:06:58 +02:00
|
|
|
|
2017-09-08 19:10:23 +02:00
|
|
|
domain_tr, flow_tr, name_tr, client_tr, server_windows_tr = dataset.load_or_generate_h5data(args.train_h5data,
|
|
|
|
args.train_data,
|
|
|
|
args.domain_length,
|
|
|
|
args.window)
|
2017-07-12 10:25:55 +02:00
|
|
|
logger.info("define callbacks")
|
2017-07-14 14:58:17 +02:00
|
|
|
callbacks = []
|
|
|
|
callbacks.append(ModelCheckpoint(filepath=args.clf_model,
|
|
|
|
monitor='val_loss',
|
|
|
|
verbose=False,
|
|
|
|
save_best_only=True))
|
|
|
|
callbacks.append(CSVLogger(args.train_log))
|
2017-07-30 14:07:39 +02:00
|
|
|
logger.info(f"Use early stopping: {args.stop_early}")
|
2017-07-14 14:58:17 +02:00
|
|
|
if args.stop_early:
|
|
|
|
callbacks.append(EarlyStopping(monitor='val_loss',
|
|
|
|
patience=5,
|
|
|
|
verbose=False))
|
|
|
|
custom_metrics = models.get_metric_functions()
|
2017-07-14 15:57:52 +02:00
|
|
|
|
2017-07-30 12:50:26 +02:00
|
|
|
server_tr = np.max(server_windows_tr, axis=1)
|
2017-07-29 19:42:36 +02:00
|
|
|
|
2017-07-14 15:57:52 +02:00
|
|
|
if args.class_weights:
|
|
|
|
logger.info("class weights: compute custom weights")
|
2017-07-30 13:47:11 +02:00
|
|
|
custom_class_weights = get_custom_class_weights(client_tr.value, server_tr)
|
2017-07-14 21:01:08 +02:00
|
|
|
logger.info(custom_class_weights)
|
2017-07-14 15:57:52 +02:00
|
|
|
else:
|
|
|
|
logger.info("class weights: set default")
|
|
|
|
custom_class_weights = None
|
2017-07-30 13:47:11 +02:00
|
|
|
|
2017-09-01 10:42:26 +02:00
|
|
|
logger.info(f"select model: {args.model_type}")
|
2017-09-05 17:40:57 +02:00
|
|
|
if args.model_type == "staggered":
|
2017-09-07 14:24:55 +02:00
|
|
|
if not param:
|
|
|
|
param = PARAMS
|
|
|
|
logger.info(f"Generator model with params: {param}")
|
|
|
|
embedding, model, new_model = models.get_models_by_params(param)
|
|
|
|
|
2017-09-08 13:55:13 +02:00
|
|
|
model = create_model(new_model, args.model_output)
|
2017-09-07 14:24:55 +02:00
|
|
|
|
2017-07-30 13:47:11 +02:00
|
|
|
server_tr = np.expand_dims(server_windows_tr, 2)
|
2017-09-05 17:40:57 +02:00
|
|
|
logger.info("compile and train model")
|
|
|
|
embedding.summary()
|
|
|
|
model.summary()
|
|
|
|
logger.info(model.get_config())
|
|
|
|
|
|
|
|
model.compile(optimizer='adam',
|
|
|
|
loss='binary_crossentropy',
|
2017-09-07 14:24:55 +02:00
|
|
|
loss_weights={"client": 0.0, "server": 1.0},
|
2017-09-05 17:40:57 +02:00
|
|
|
metrics=['accuracy'] + custom_metrics)
|
|
|
|
|
2017-09-07 14:24:55 +02:00
|
|
|
model.fit({"ipt_domains": domain_tr, "ipt_flows": flow_tr},
|
|
|
|
{"client": client_tr, "server": server_tr},
|
|
|
|
batch_size=args.batch_size,
|
|
|
|
epochs=args.epochs,
|
|
|
|
shuffle=True,
|
|
|
|
validation_split=0.2,
|
|
|
|
class_weight=custom_class_weights)
|
2017-09-05 17:40:57 +02:00
|
|
|
|
2017-09-07 14:24:55 +02:00
|
|
|
model.get_layer("dense_server").trainable = False
|
2017-09-07 15:31:04 +02:00
|
|
|
model.get_layer("server").trainable = False
|
2017-09-07 14:24:55 +02:00
|
|
|
model.compile(optimizer='adam',
|
|
|
|
loss='binary_crossentropy',
|
|
|
|
loss_weights={"client": 1.0, "server": 0.0},
|
|
|
|
metrics=['accuracy'] + custom_metrics)
|
|
|
|
|
|
|
|
model.summary()
|
|
|
|
model.fit({"ipt_domains": domain_tr, "ipt_flows": flow_tr},
|
|
|
|
{"client": client_tr, "server": server_tr},
|
2017-09-05 17:40:57 +02:00
|
|
|
batch_size=args.batch_size,
|
|
|
|
epochs=args.epochs,
|
|
|
|
callbacks=callbacks,
|
|
|
|
shuffle=True,
|
|
|
|
validation_split=0.2,
|
|
|
|
class_weight=custom_class_weights)
|
|
|
|
|
2017-09-02 16:02:48 +02:00
|
|
|
else:
|
2017-09-07 14:24:55 +02:00
|
|
|
if not param:
|
|
|
|
param = PARAMS
|
|
|
|
logger.info(f"Generator model with params: {param}")
|
|
|
|
embedding, model, new_model = models.get_models_by_params(param)
|
|
|
|
|
2017-09-08 13:55:13 +02:00
|
|
|
model = create_model(model, args.model_output)
|
|
|
|
new_model = create_model(new_model, args.model_output)
|
2017-09-07 14:24:55 +02:00
|
|
|
|
2017-09-05 17:40:57 +02:00
|
|
|
if args.model_type == "inter":
|
|
|
|
server_tr = np.expand_dims(server_windows_tr, 2)
|
|
|
|
model = new_model
|
|
|
|
logger.info("compile and train model")
|
|
|
|
embedding.summary()
|
|
|
|
model.summary()
|
|
|
|
logger.info(model.get_config())
|
|
|
|
model.compile(optimizer='adam',
|
|
|
|
loss='binary_crossentropy',
|
|
|
|
metrics=['accuracy'] + custom_metrics)
|
|
|
|
|
|
|
|
if args.model_output == "both":
|
|
|
|
labels = [client_tr, server_tr]
|
|
|
|
elif args.model_output == "client":
|
|
|
|
labels = [client_tr]
|
|
|
|
elif args.model_output == "server":
|
|
|
|
labels = [server_tr]
|
|
|
|
else:
|
|
|
|
raise ValueError("unknown model output")
|
|
|
|
|
|
|
|
model.fit([domain_tr, flow_tr],
|
|
|
|
labels,
|
|
|
|
batch_size=args.batch_size,
|
|
|
|
epochs=args.epochs,
|
|
|
|
callbacks=callbacks,
|
|
|
|
shuffle=True,
|
|
|
|
validation_split=0.2,
|
|
|
|
class_weight=custom_class_weights)
|
2017-07-12 10:25:55 +02:00
|
|
|
logger.info("save embedding")
|
2017-07-08 15:04:58 +02:00
|
|
|
embedding.save(args.embedding_model)
|
2017-07-07 16:48:10 +02:00
|
|
|
|
2017-07-05 21:19:19 +02:00
|
|
|
|
2017-07-06 16:27:47 +02:00
|
|
|
def main_test():
|
2017-09-01 10:42:26 +02:00
|
|
|
logger.info("start test: load data")
|
2017-09-08 19:10:23 +02:00
|
|
|
domain_val, flow_val, _, _, _, _ = dataset.load_or_generate_raw_h5data(args.test_h5data,
|
|
|
|
args.test_data,
|
|
|
|
args.domain_length,
|
|
|
|
args.window)
|
|
|
|
domain_encs, _ = dataset.load_or_generate_domains(args.test_data, args.domain_length)
|
|
|
|
|
2017-09-01 10:42:26 +02:00
|
|
|
for model_args in get_model_args(args):
|
2017-09-08 13:55:13 +02:00
|
|
|
results = {}
|
2017-09-01 10:42:26 +02:00
|
|
|
logger.info(f"process model {model_args['model_path']}")
|
|
|
|
clf_model = load_model(model_args["clf_model"], custom_objects=models.get_metrics())
|
|
|
|
|
|
|
|
pred = clf_model.predict([domain_val, flow_val],
|
|
|
|
batch_size=args.batch_size,
|
|
|
|
verbose=1)
|
|
|
|
|
|
|
|
if args.model_output == "both":
|
|
|
|
c_pred, s_pred = pred
|
2017-09-08 13:55:13 +02:00
|
|
|
results["client_pred"] = c_pred
|
|
|
|
results["server_pred"] = s_pred
|
2017-09-01 10:42:26 +02:00
|
|
|
elif args.model_output == "client":
|
2017-09-08 13:55:13 +02:00
|
|
|
results["client_pred"] = pred
|
2017-09-01 10:42:26 +02:00
|
|
|
else:
|
2017-09-08 13:55:13 +02:00
|
|
|
results["server_pred"] = pred
|
2017-09-01 10:42:26 +02:00
|
|
|
|
|
|
|
embd_model = load_model(model_args["embedding_model"])
|
|
|
|
domain_embeddings = embd_model.predict(domain_encs, batch_size=args.batch_size, verbose=1)
|
2017-09-08 13:55:13 +02:00
|
|
|
results["domain_embds"] = domain_embeddings
|
2017-09-08 19:10:23 +02:00
|
|
|
|
|
|
|
dataset.save_predictions(model_args["model_path"], results)
|
2017-07-29 19:42:36 +02:00
|
|
|
|
|
|
|
|
2017-07-07 08:43:16 +02:00
|
|
|
def main_visualization():
|
2017-09-08 22:59:55 +02:00
|
|
|
_, _, name_val, hits_vt, hits_trusted, server_val = dataset.load_or_generate_raw_h5data(args.test_h5data,
|
|
|
|
args.test_data,
|
|
|
|
args.domain_length,
|
|
|
|
args.window)
|
|
|
|
|
|
|
|
results = dataset.load_predictions(args.model_path)
|
|
|
|
df = pd.DataFrame(data={
|
|
|
|
"names": name_val, "client_pred": results["client_pred"].flatten(),
|
|
|
|
"hits_vt": hits_vt, "hits_trusted": hits_trusted
|
|
|
|
})
|
|
|
|
df["client_val"] = np.logical_or(df.hits_vt == 1.0, df.hits_trusted >= 3)
|
|
|
|
df_user = df.groupby(df.names).max()
|
2017-09-10 18:06:40 +02:00
|
|
|
|
|
|
|
paul = dataset.load_predictions("results/paul/")
|
|
|
|
df_paul = pd.DataFrame(data={
|
|
|
|
"names": paul["testNames"].flatten(), "client_pred": paul["testScores"].flatten(),
|
|
|
|
"hits_vt": paul["testLabel"].flatten(), "hits_trusted": paul["testHits"].flatten()
|
|
|
|
})
|
|
|
|
df_paul["client_val"] = np.logical_or(df_paul.hits_vt == 1.0, df_paul.hits_trusted >= 3)
|
|
|
|
df_paul_user = df_paul.groupby(df_paul.names).max()
|
2017-09-08 19:10:23 +02:00
|
|
|
|
2017-09-05 17:40:57 +02:00
|
|
|
logger.info("plot model")
|
|
|
|
model = load_model(args.clf_model, custom_objects=models.get_metrics())
|
|
|
|
visualize.plot_model_as(model, os.path.join(args.model_path, "model.png"))
|
2017-09-08 19:10:23 +02:00
|
|
|
|
|
|
|
logger.info("plot training curve")
|
|
|
|
logs = pd.read_csv(args.train_log)
|
|
|
|
if "acc" in logs.keys():
|
|
|
|
visualize.plot_training_curve(logs, "", "{}/client_train.png".format(args.model_path))
|
|
|
|
elif "client_acc" in logs.keys() and "server_acc" in logs.keys():
|
|
|
|
visualize.plot_training_curve(logs, "client_", "{}/client_train.png".format(args.model_path))
|
|
|
|
visualize.plot_training_curve(logs, "server_", "{}/server_train.png".format(args.model_path))
|
|
|
|
else:
|
|
|
|
logger.warning("Error while plotting training curves")
|
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
logger.info("plot pr curve")
|
2017-09-01 10:42:26 +02:00
|
|
|
visualize.plot_clf()
|
2017-09-08 22:59:55 +02:00
|
|
|
visualize.plot_precision_recall(df.client_val.as_matrix(), df.client_pred.as_matrix(), args.model_path)
|
2017-09-10 18:06:40 +02:00
|
|
|
visualize.plot_precision_recall(df_paul.client_val.as_matrix(), df_paul.client_pred.as_matrix(), "paul")
|
2017-09-08 13:55:13 +02:00
|
|
|
visualize.plot_legend()
|
2017-09-07 17:38:21 +02:00
|
|
|
visualize.plot_save("{}/window_client_prc.png".format(args.model_path))
|
2017-09-08 19:10:23 +02:00
|
|
|
|
2017-07-12 10:25:55 +02:00
|
|
|
logger.info("plot roc curve")
|
2017-09-01 10:42:26 +02:00
|
|
|
visualize.plot_clf()
|
2017-09-08 22:59:55 +02:00
|
|
|
visualize.plot_roc_curve(df.client_val.as_matrix(), df.client_pred.as_matrix(), args.model_path)
|
2017-09-10 18:06:40 +02:00
|
|
|
visualize.plot_roc_curve(df_paul.client_val.as_matrix(), df_paul.client_pred.as_matrix(), "paul")
|
2017-09-08 13:55:13 +02:00
|
|
|
visualize.plot_legend()
|
2017-09-07 17:38:21 +02:00
|
|
|
visualize.plot_save("{}/window_client_roc.png".format(args.model_path))
|
2017-09-08 19:10:23 +02:00
|
|
|
|
2017-09-02 16:02:48 +02:00
|
|
|
visualize.plot_clf()
|
2017-09-08 22:59:55 +02:00
|
|
|
visualize.plot_precision_recall(df_user.client_val.as_matrix(), df_user.client_pred.as_matrix(), args.model_path)
|
2017-09-10 18:06:40 +02:00
|
|
|
visualize.plot_precision_recall(df_paul_user.client_val.as_matrix(), df_paul_user.client_pred.as_matrix(), "paul")
|
2017-09-08 13:55:13 +02:00
|
|
|
visualize.plot_legend()
|
2017-09-02 16:02:48 +02:00
|
|
|
visualize.plot_save("{}/user_client_prc.png".format(args.model_path))
|
2017-09-08 19:10:23 +02:00
|
|
|
|
2017-09-02 16:02:48 +02:00
|
|
|
visualize.plot_clf()
|
2017-09-08 22:59:55 +02:00
|
|
|
visualize.plot_roc_curve(df_user.client_val.as_matrix(), df_user.client_pred.as_matrix(), args.model_path)
|
2017-09-10 18:06:40 +02:00
|
|
|
visualize.plot_roc_curve(df_paul_user.client_val.as_matrix(), df_paul_user.client_pred.as_matrix(), "paul")
|
2017-09-08 13:55:13 +02:00
|
|
|
visualize.plot_legend()
|
2017-09-02 16:02:48 +02:00
|
|
|
visualize.plot_save("{}/user_client_roc.png".format(args.model_path))
|
2017-09-08 19:10:23 +02:00
|
|
|
|
2017-09-08 22:59:55 +02:00
|
|
|
visualize.plot_confusion_matrix(df.client_val.as_matrix(), df.client_pred.as_matrix().round(),
|
2017-07-14 15:57:52 +02:00
|
|
|
"{}/client_cov.png".format(args.model_path),
|
|
|
|
normalize=False, title="Client Confusion Matrix")
|
2017-09-08 22:59:55 +02:00
|
|
|
visualize.plot_confusion_matrix(df_user.client_val.as_matrix(), df_user.client_pred.as_matrix().round(),
|
2017-09-07 14:24:55 +02:00
|
|
|
"{}/user_cov.png".format(args.model_path),
|
|
|
|
normalize=False, title="User Confusion Matrix")
|
2017-09-05 17:40:57 +02:00
|
|
|
logger.info("visualize embedding")
|
|
|
|
domain_encs, labels = dataset.load_or_generate_domains(args.test_data, args.domain_length)
|
2017-09-08 22:59:55 +02:00
|
|
|
domain_embedding = results["domain_embds"]
|
2017-09-05 17:40:57 +02:00
|
|
|
visualize.plot_embedding(domain_embedding, labels, path="{}/embd.png".format(args.model_path))
|
2017-07-07 08:43:16 +02:00
|
|
|
|
|
|
|
|
2017-09-01 10:42:26 +02:00
|
|
|
def main_visualize_all():
|
2017-09-08 22:59:55 +02:00
|
|
|
_, _, name_val, hits_vt, hits_trusted, server_val = dataset.load_or_generate_raw_h5data(args.test_h5data,
|
|
|
|
args.test_data,
|
|
|
|
args.domain_length,
|
|
|
|
args.window)
|
|
|
|
|
|
|
|
def load_df(path):
|
|
|
|
res = dataset.load_predictions(path)
|
|
|
|
res = pd.DataFrame(data={
|
|
|
|
"names": name_val, "client_pred": res["client_pred"].flatten(),
|
|
|
|
"hits_vt": hits_vt, "hits_trusted": hits_trusted
|
|
|
|
})
|
|
|
|
res["client_val"] = np.logical_or(res.hits_vt == 1.0, res.hits_trusted >= 3)
|
|
|
|
return res
|
2017-09-10 18:06:40 +02:00
|
|
|
|
|
|
|
paul = dataset.load_predictions("results/paul/")
|
|
|
|
df_paul = pd.DataFrame(data={
|
|
|
|
"names": paul["testNames"].flatten(), "client_pred": paul["testScores"].flatten(),
|
|
|
|
"hits_vt": paul["testLabel"].flatten(), "hits_trusted": paul["testHits"].flatten()
|
|
|
|
})
|
|
|
|
df_paul["client_val"] = np.logical_or(df_paul.hits_vt == 1.0, df_paul.hits_trusted >= 3)
|
|
|
|
df_paul_user = df_paul.groupby(df_paul.names).max()
|
2017-09-08 22:59:55 +02:00
|
|
|
|
2017-09-01 10:42:26 +02:00
|
|
|
logger.info("plot pr curves")
|
|
|
|
visualize.plot_clf()
|
|
|
|
for model_args in get_model_args(args):
|
2017-09-08 22:59:55 +02:00
|
|
|
df = load_df(model_args["model_path"])
|
2017-09-10 18:06:40 +02:00
|
|
|
visualize.plot_precision_recall(df.client_val.as_matrix(), df.client_pred.as_matrix(), model_args["model_name"])
|
|
|
|
visualize.plot_precision_recall(df_paul.client_val.as_matrix(), df_paul.client_pred.as_matrix(), "paul")
|
2017-09-01 10:42:26 +02:00
|
|
|
visualize.plot_legend()
|
2017-09-07 17:38:21 +02:00
|
|
|
visualize.plot_save(f"{args.output_prefix}_window_client_prc.png")
|
2017-09-08 19:10:23 +02:00
|
|
|
|
2017-09-01 10:42:26 +02:00
|
|
|
logger.info("plot roc curves")
|
|
|
|
visualize.plot_clf()
|
|
|
|
for model_args in get_model_args(args):
|
2017-09-08 22:59:55 +02:00
|
|
|
df = load_df(model_args["model_path"])
|
2017-09-10 18:06:40 +02:00
|
|
|
visualize.plot_roc_curve(df.client_val.as_matrix(), df.client_pred.as_matrix(), model_args["model_name"])
|
|
|
|
visualize.plot_roc_curve(df_paul.client_val.as_matrix(), df_paul.client_pred.as_matrix(), "paul")
|
2017-09-01 10:42:26 +02:00
|
|
|
visualize.plot_legend()
|
2017-09-07 17:38:21 +02:00
|
|
|
visualize.plot_save(f"{args.output_prefix}_window_client_roc.png")
|
2017-09-08 19:10:23 +02:00
|
|
|
|
2017-09-04 13:37:26 +02:00
|
|
|
logger.info("plot user pr curves")
|
|
|
|
visualize.plot_clf()
|
|
|
|
for model_args in get_model_args(args):
|
2017-09-08 22:59:55 +02:00
|
|
|
df = load_df(model_args["model_path"])
|
|
|
|
df = df.groupby(df.names).max()
|
2017-09-10 18:06:40 +02:00
|
|
|
visualize.plot_precision_recall(df.client_val.as_matrix(), df.client_pred.as_matrix(), model_args["model_name"])
|
|
|
|
visualize.plot_precision_recall(df_paul_user.client_val.as_matrix(), df_paul_user.client_pred.as_matrix(), "paul")
|
2017-09-04 13:37:26 +02:00
|
|
|
visualize.plot_legend()
|
2017-09-05 17:40:57 +02:00
|
|
|
visualize.plot_save(f"{args.output_prefix}_user_client_prc.png")
|
2017-09-08 19:10:23 +02:00
|
|
|
|
2017-09-04 13:37:26 +02:00
|
|
|
logger.info("plot user roc curves")
|
|
|
|
visualize.plot_clf()
|
|
|
|
for model_args in get_model_args(args):
|
2017-09-08 22:59:55 +02:00
|
|
|
df = load_df(model_args["model_path"])
|
|
|
|
df = df.groupby(df.names).max()
|
2017-09-10 18:06:40 +02:00
|
|
|
visualize.plot_roc_curve(df.client_val.as_matrix(), df.client_pred.as_matrix(), model_args["model_name"])
|
|
|
|
visualize.plot_roc_curve(df_paul_user.client_val.as_matrix(), df_paul_user.client_pred.as_matrix(), "paul")
|
2017-09-04 13:37:26 +02:00
|
|
|
visualize.plot_legend()
|
2017-09-05 17:40:57 +02:00
|
|
|
visualize.plot_save(f"{args.output_prefix}_user_client_roc.png")
|
2017-09-04 13:37:26 +02:00
|
|
|
|
2017-09-01 10:42:26 +02:00
|
|
|
|
2017-07-07 08:43:16 +02:00
|
|
|
def main():
|
2017-07-30 15:49:37 +02:00
|
|
|
if "train" == args.mode:
|
2017-07-07 16:48:10 +02:00
|
|
|
main_train()
|
2017-07-30 15:49:37 +02:00
|
|
|
if "hyperband" == args.mode:
|
2017-07-07 16:48:10 +02:00
|
|
|
main_hyperband()
|
2017-07-30 15:49:37 +02:00
|
|
|
if "test" == args.mode:
|
2017-07-07 16:48:10 +02:00
|
|
|
main_test()
|
2017-07-30 15:49:37 +02:00
|
|
|
if "fancy" == args.mode:
|
2017-07-07 16:48:10 +02:00
|
|
|
main_visualization()
|
2017-09-01 10:42:26 +02:00
|
|
|
if "all_fancy" == args.mode:
|
|
|
|
main_visualize_all()
|
2017-07-30 15:49:37 +02:00
|
|
|
if "paul" == args.mode:
|
2017-07-08 11:53:03 +02:00
|
|
|
main_paul_best()
|
2017-07-07 08:43:16 +02:00
|
|
|
|
|
|
|
|
2017-06-30 10:12:20 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|