Deepxplore Implementation
Deepxplore Implementation
import license_execute as le
license_status = le.check()
if license_status == 'Valid@tAi':
import logging
import os
import pickle
from Utility import utility
import mlflow
import sys
from configparser import ConfigParser, ExtendedInterpolation
import h5py
import numpy
import tensorflow as tf
from tensorflow.keras.models import load_model
# saving metadata
def meta_info_save(component_name, data):
try:
with open(sep.join(['./Results', 'meta_info',
'meta_info_deepxplore.pkl']), 'rb') as f:
meta_dict = pickle.load(f)
meta_dict[component_name] = data
except:
os.makedirs(sep.join(['./Results', 'meta_info']), exist_ok=True)
meta_dict = dict()
# mlflow config
experiment_name = parser.get('mlflow', 'experiment_name')
run_id = parser.getint('mlflow', 'run_id')
# path config
X_train_path = parser.get('path', 'X_train')
y_train_path = parser.get('path', 'y_train')
X_test_path = parser.get('path', 'x_test')
y_test_path = parser.get('path', 'y_test')
class_name_to_label_dict_path = parser.get('path',
'class_name_to_label_dict')
deepxplore_results_path = parser.get('path', 'deepxplore_results_path')
model_path = parser.get('path', 'model_path')
alternative_model_1_path = parser.get('path',
'alternative_model_1_path')
alternative_model_2_path = parser.get('path',
'alternative_model_2_path')
# path parameter
n_samples = parser.getint('parameter', 'n_samples')
neuron_coverage_threshold = parser.getfloat('parameter',
'neuron_coverage_threshold')
# loading model
model_1 = load_model(model_path)
model_2 = load_model(alternative_model_1_path)
model_3 = load_model(alternative_model_2_path)
corner_cases_images_x = list()
corner_cases_images_y = list()
corner_cases_images_y_pred = list()
# Update the X_test and y_test with only correctly classified samples
correctly_classified_idx =
numpy.where(numpy.argmax(model_1.predict(X_test), axis=1) ==
numpy.array(y_test))
X_test = numpy.array(X_test[correctly_classified_idx])
y_test = numpy.array(y_test[correctly_classified_idx])
if parser.getboolean('transformation', f'{constrain}'):
print(constrain)
print(parser.get('transformation', f'{constrain}'))
args = obj(constrain.split('.')[-1])
# neuron coverage
# neuron_coverage(X_test, model1, model2, model3,
neuron_coverage_threshold)
# meta_info_save(component_name, data)
os.makedirs(generated_image_saving_path, exist_ok=True)
# Corner case image generation
corner_cases = corner_cases_image_generation(
X_test, y_test, model_1, model_2, model_3, args,
generated_image_saving_path)
corner_cases_images_x.extend(corner_cases[0])
corner_cases_images_y.extend(corner_cases[1])
corner_cases_images_y_pred.extend(corner_cases[2])
# Save as h5 format
# with h5py.File(sep.join(["Results", "DeepXplore_results",
'deepxplore_images_x.h5']), 'w') as hf:
# hf.create_dataset("deepxplore_images_x",
data=numpy.array(corner_cases_images_x))
#
# with h5py.File(sep.join(["Results", "DeepXplore_results",
'deepxplore_images_y.h5']), 'w') as hf:
# hf.create_dataset("deepxplore_images_y",
data=numpy.array(corner_cases_images_y))
#
# with h5py.File(sep.join(["Results", "DeepXplore_results",
'deepxplore_images_y_pred.h5']), 'w') as hf:
# hf.create_dataset("deepxplore_images_y_pred",
data=numpy.array(corner_cases_images_y_pred))
#mlflow.log_artifact(deepxplore_results_path)
utility.mlflow_logging(experiment_name =
'DeepXplore',artifacts=[deepxplore_results_path])
# print("exp_name:", exp_id)
# print("run_id: ", run_id)
def main_script():
tf.compat.v1.reset_default_graph()
tf.compat.v1.enable_eager_execution()
# logging directory
os.makedirs('logging', exist_ok=True)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter =
logging.Formatter('%(asctime)s:%(levelname)s:%(message)s:%(filename)s:%(module)
s')
file_handler = logging.FileHandler('logging/deepxplore_log.log')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
try:
execute_deepxplore()
logger.info("Metamorphic Testing pipeline successfully executed.")
except Exception as e:
logger.critical('Please fix Modeling pipeline')
logger.exception('Metamorphic Testing pipeline error!!')
logger.exception(e)
raise e
else:
print("Invalid License")
if __name__ == '__main__':
main_script()
This script appears to be executing the DeepXplore pipeline for generating corner cases to evaluate
machine learning models' robustness. It involves loading models, datasets, and transformation parameters
from configurations. Then, it performs various transformations on the input data to generate corner cases.
The generated corner cases are saved and logged using MLflow for further analysis. The script also
handles logging and error handling within a try-except block. Finally, it checks the license status before
executing the main script.