# -*- coding: utf-8 -*- """face-classification.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/#fileId=https%3A//huggingface.co/spaces/Tarive/Nepali_Actors_Prediction/blob/main/face-classification.ipynb # Importing Libararies """ import os import numpy as np # linear algebra import pandas as pd import matplotlib.pyplot as plt import seaborn as sns import shutil from PIL import Image from sklearn.metrics import classification_report,confusion_matrix import tensorflow as tf from tensorflow.keras.preprocessing.image import ImageDataGenerator, array_to_img, load_img, img_to_array from matplotlib.pyplot import imshow from tensorflow.keras.callbacks import ReduceLROnPlateau from tensorflow.keras.optimizers import RMSprop from tensorflow.keras import Model from tensorflow.keras import layers """# Looking into structure of file arrangements""" DIR = '/kaggle/input/nepali-celeb-localized-face-dataset/Dataset/Dataset/' files = os.listdir(DIR) print(files) class_count = len(files) print(f'There are {class_count} classes.') # Remove Non JPG images for cls in files: cls_path = os.path.join(DIR, cls) imgs = os.listdir(cls_path) img = Image.open(os.path.join(cls_path,imgs[0])) print(f'Class {cls} contains {len(imgs)} images images of shape {img.size}.') """# Creating the data generator using ImageDataGenerator for the CNN""" def train_val_generators(): """ Creates the training and validation data generators Returns: train_generator, validation_generator: tuple containing the generators """ # Instantiate the ImageDataGenerator class, normalize pixel values and set arguments to augment the images datagen = ImageDataGenerator(rescale=1.0/255.0, rotation_range=40, width_shift_range=0.1, height_shift_range=0.1, shear_range=0.1, zoom_range=0.1, horizontal_flip=True, vertical_flip=True, fill_mode='nearest', validation_split=0.2) # Pass in the appropriate arguments to the flow_from_directory method train_generator = datagen.flow_from_directory(directory=DIR, batch_size=100, class_mode='categorical', shuffle=True, subset='training', target_size=(75,75)) # Pass in the appropriate arguments to the flow_from_directory method validation_generator = datagen.flow_from_directory(directory=DIR, batch_size=36, class_mode='categorical', shuffle = False, subset='validation', target_size=(75, 75)) return train_generator, validation_generator train_generator, validation_generator = train_val_generators() """# Define and compile the transfer learning model""" pre_trained_model = tf.keras.applications.inception_v3.InceptionV3( input_shape = (75, 75, 3), include_top = False, weights = 'imagenet') for layer in pre_trained_model.layers: layer.trainable = False pre_trained_model.summary() # Choose `mixed_7` as the last layer of your base model last_layer = pre_trained_model.get_layer('mixed7') print('last layer output shape: ', last_layer.output_shape) last_output = last_layer.output # Flatten the output layer to 1 dimension x = layers.Flatten()(last_output) # Add a fully connected layer with 1,024 hidden units and ReLU activation x = layers.Dense(512, activation='relu')(x) # Add a dropout rate of 0.2 x = layers.Dropout(0.2)(x) # Add a final sigmoid layer for classification x = layers.Dense (class_count, activation='softmax')(x) # Append the dense network to the base model model_transfer = Model(pre_trained_model.input, x) # Print the model summary. See your dense network connected at the end. model_transfer.summary() model_transfer.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) """# Creating a Callback class""" class myCallback(tf.keras.callbacks.Callback): # Define the correct function signature for on_epoch_end def on_epoch_end(self, epoch, logs={}): if (logs.get('val_accuracy') is not None and logs.get('val_accuracy') > 0.99): print(logs.get('val_accuracy')) print("\nReached 99% validation accuracy so cancelling training!") callbacks = myCallback() reduce_lr = ReduceLROnPlateau( monitor='val_loss', factor=0.25, patience=2, min_lr=0.00001, verbose=2 ) checkpoint_path = "/kaggle/working/cp.ckpt" checkpoint_dir = os.path.dirname(checkpoint_path) # Create a callback that saves the model's weights cp_callback = tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_path, save_weights_only=True, verbose=1) """# Train the model""" history1 = model_transfer.fit(train_generator, epochs=50, validation_data=validation_generator, callbacks=[callbacks, reduce_lr, cp_callback] ) print("Accuracy of the transfer_learning model is - " , model_transfer.evaluate(validation_generator)[1]*100 , "%") """# Evaluating Accuracy and Loss for the Model""" # Plot the chart for accuracy and loss on both training and validation acc = history1.history['accuracy'] val_acc = history1.history['val_accuracy'] loss = history1.history['loss'] val_loss = history1.history['val_loss'] epochs = range(len(acc)) plt.plot(epochs, acc, 'r', label='Training accuracy') plt.plot(epochs, val_acc, 'b', label='Validation accuracy') plt.title('Training and validation accuracy') plt.legend() plt.figure() plt.plot(epochs, loss, 'r', label='Training Loss') plt.plot(epochs, val_loss, 'b', label='Validation Loss') plt.title('Training and validation loss') plt.legend() plt.show() predictions = model_transfer.predict(validation_generator) predictions=np.argmax(predictions,axis=-1) print(predictions[:10]) print(validation_generator.labels[:10]) dict_cls = validation_generator.class_indices list(dict_cls.keys()) """# Evaluating Precision, Recall, F1-Score and Support for the Model""" print(classification_report(validation_generator.labels, predictions, target_names = list(dict_cls.keys()))) """# Plotting the Confusion Matrix for the Classification""" cm = confusion_matrix(validation_generator.labels,predictions) cm = pd.DataFrame(cm , index = list(dict_cls.keys()) , columns = list(dict_cls.keys())) plt.figure(figsize = (15,15)) sns.heatmap(cm,cmap= "Blues", linecolor = 'black' , linewidth = 1 , annot = True, fmt='') """# Sample Model Prediction""" def class_name(id): key_list = list(dict_cls.keys()) val_list = list(dict_cls.values()) position = val_list.index(id) return key_list[position] f, ax = plt.subplots(10,3) f.set_size_inches(10, 10) k = 0 for i in range(10): for j in range(3): true_cls = validation_generator.labels[k] true_cls = class_name(true_cls) pred_cls = predictions[k] pred_cls = class_name(pred_cls) ax[i,j].set_title(f'Actual = {true_cls}\n Predicted = {pred_cls}') img=plt.imread(DIR+validation_generator.filenames[k]) ax[i,j].imshow(img) ax[i,j].axis('off') k += 2 plt.tight_layout()