Deepxplore implementation
import license_execute as le
license_status = le.check()
if license_status == 'Valid@tAi':
import logging
import os
import pickle
from Utility import utility
import mlflow
import sys
from configparser import ConfigParser, ExtendedInterpolation
import h5py
import numpy
import tensorflow as tf
from tensorflow.keras.models import load_model
from Deepxplore import alternative_model_1_pipeline,
alternative_model_2_pipeline
from Deepxplore.corner_cases_generation import
corner_cases_image_generation
from Deepxplore.tranformation_utlis import sep
from Deepxplore.transformation_parameter import obj
# saving metadata
def meta_info_save(component_name, data):
try:
with open(sep.join(['./Results', 'meta_info',
'meta_info_deepxplore.pkl']), 'rb') as f:
meta_dict = pickle.load(f)
meta_dict[component_name] = data
with open(sep.join(['./Results', 'meta_info',
'meta_info_deepxplore.pkl']), "wb") as f:
pickle.dump(meta_dict, f)
except:
os.makedirs(sep.join(['./Results', 'meta_info']), exist_ok=True)
meta_dict = dict()
with open(sep.join(['./Results', 'meta_info',
'meta_info_deepxplore.pkl']), "wb") as f:
meta_dict[component_name] = data
pickle.dump(meta_dict, f)
# transformation parameter initialisation
def execute_deepxplore():
# parser
parser = ConfigParser(interpolation=ExtendedInterpolation())
parser.read('./Configs/config_deepxplore.properties')
# mlflow config
experiment_name = parser.get('mlflow', 'experiment_name')
run_id = parser.getint('mlflow', 'run_id')
# with mlflow.start_run(run_name=experiment_name,nested=True) as run:
# # exp_id = run.info.experiment_name
# # print(exp_id,"exp_id")
# # run_id = run.info.run_id
# # print(run_id,"run_id")
# exp_id = run.info.experiment_id
# run_id = run.info.run_uuid
# path config
X_train_path = parser.get('path', 'X_train')
y_train_path = parser.get('path', 'y_train')
X_test_path = parser.get('path', 'x_test')
y_test_path = parser.get('path', 'y_test')
class_name_to_label_dict_path = parser.get('path',
'class_name_to_label_dict')
deepxplore_results_path = parser.get('path', 'deepxplore_results_path')
model_path = parser.get('path', 'model_path')
alternative_model_1_path = parser.get('path',
'alternative_model_1_path')
alternative_model_2_path = parser.get('path',
'alternative_model_2_path')
# path parameter
n_samples = parser.getint('parameter', 'n_samples')
neuron_coverage_threshold = parser.getfloat('parameter',
'neuron_coverage_threshold')
# training alternative models
alternative_model_1_pipeline.model_training()
alternative_model_2_pipeline.model_training()
# loading model
model_1 = load_model(model_path)
model_2 = load_model(alternative_model_1_path)
model_3 = load_model(alternative_model_2_path)
# loading X_test and y_test and class_label_dict
with h5py.File(X_test_path, 'r') as hf:
X_test = hf['X_test'][:]
with h5py.File(y_test_path, 'r') as hf:
y_test = hf['y_test'][:]
corner_cases_images_x = list()
corner_cases_images_y = list()
corner_cases_images_y_pred = list()
# Update the X_test and y_test with only correctly classified samples
correctly_classified_idx =
numpy.where(numpy.argmax(model_1.predict(X_test), axis=1) ==
numpy.array(y_test))
X_test = numpy.array(X_test[correctly_classified_idx])
y_test = numpy.array(y_test[correctly_classified_idx])
for constrain in parser['transformation']:
if parser.getboolean('transformation', f'{constrain}'):
print(constrain)
print(parser.get('transformation', f'{constrain}'))
args = obj(constrain.split('.')[-1])
# creating dir for generated outputs
os.makedirs(sep.join([deepxplore_results_path,
"deepxplore_generated_outputs"]), exist_ok=True)
# neuron coverage
# neuron_coverage(X_test, model1, model2, model3,
neuron_coverage_threshold)
# meta_info_save(component_name, data)
# creating dir for given transformation generated images
generated_image_saving_path = sep.join(
[deepxplore_results_path, "deepxplore_generated_outputs",
args.transformation])
os.makedirs(generated_image_saving_path, exist_ok=True)
# Corner case image generation
corner_cases = corner_cases_image_generation(
X_test, y_test, model_1, model_2, model_3, args,
generated_image_saving_path)
corner_cases_images_x.extend(corner_cases[0])
corner_cases_images_y.extend(corner_cases[1])
corner_cases_images_y_pred.extend(corner_cases[2])
# Save as npz format
numpy.save(sep.join(["Results", "DeepXplore_results",
'deepxplore_images_x.npy']),
numpy.array(corner_cases_images_x))
numpy.save(sep.join(["Results", "DeepXplore_results",
'deepxplore_images_y.npy']),
numpy.array(corner_cases_images_y))
numpy.save(sep.join(["Results", "DeepXplore_results",
'deepxplore_images_y_pred.npy']),
numpy.array(corner_cases_images_y_pred))
# Save as h5 format
# with h5py.File(sep.join(["Results", "DeepXplore_results",
'deepxplore_images_x.h5']), 'w') as hf:
# hf.create_dataset("deepxplore_images_x",
data=numpy.array(corner_cases_images_x))
#
# with h5py.File(sep.join(["Results", "DeepXplore_results",
'deepxplore_images_y.h5']), 'w') as hf:
# hf.create_dataset("deepxplore_images_y",
data=numpy.array(corner_cases_images_y))
#
# with h5py.File(sep.join(["Results", "DeepXplore_results",
'deepxplore_images_y_pred.h5']), 'w') as hf:
# hf.create_dataset("deepxplore_images_y_pred",
data=numpy.array(corner_cases_images_y_pred))
#mlflow.log_artifact(deepxplore_results_path)
utility.mlflow_logging(experiment_name =
'DeepXplore',artifacts=[deepxplore_results_path])
# print("exp_name:", exp_id)
# print("run_id: ", run_id)
def main_script():
tf.compat.v1.reset_default_graph()
tf.compat.v1.enable_eager_execution()
# logging directory
os.makedirs('logging', exist_ok=True)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter =
logging.Formatter('%(asctime)s:%(levelname)s:%(message)s:%(filename)s:%(module)
s')
file_handler = logging.FileHandler('logging/deepxplore_log.log')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
try:
execute_deepxplore()
logger.info("Metamorphic Testing pipeline successfully executed.")
except Exception as e:
logger.critical('Please fix Modeling pipeline')
logger.exception('Metamorphic Testing pipeline error!!')
logger.exception(e)
raise e
else:
print("Invalid License")
if __name__ == '__main__':
main_script()
This script appears to be executing the DeepXplore pipeline for generating corner cases to evaluate
machine learning models' robustness. It involves loading models, datasets, and transformation parameters
from configurations. Then, it performs various transformations on the input data to generate corner cases.
The generated corner cases are saved and logged using MLflow for further analysis. The script also
handles logging and error handling within a try-except block. Finally, it checks the license status before
executing the main script.