Stuff
Stuff
# Set parameters
default_size = 224
batch_size = 32
base_path = '/content/drive/My Drive/lfw-deepfunneled'
# Data generators with on-the-fly data augmentation using this method allowed
#for more agumentation, this is used expecting the dataset of the source to be
#quite uniformed as each class is contain in a folder in the dataset which is
#like that for most of the online dataset
train_datagen = ImageDataGenerator(
preprocessing_function=preprocess_input,
rotation_range=20,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
brightness_range=[0.8, 1.2],
channel_shift_range=0.2,
validation_split=0.2 # for validation split
)
validation_generator = train_datagen.flow_from_directory(
directory=base_path, #those r the parameters of the function
target_size=(default_size, default_size),
batch_size=batch_size,
class_mode='categorical',
subset='validation' #as i assign this as validation, it used the other 20% to
be used as validating dataset
)
epochs = 10
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(8, 8))
plt.subplot(1, 2, 1)
plt.plot(epochs_range, acc, label='Training Accuracy')
plt.plot(epochs_range, val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(1, 2, 2)
plt.plot(epochs_range, loss, label='Training Loss')
plt.plot(epochs_range, val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()