0% found this document useful (0 votes)
54 views2 pages

FED

The document loads a pre-trained emotion detection model and uses it to analyze facial expressions in a video file. It detects faces, extracts regions of interest, makes predictions on emotions, and counts the frequency of each predicted emotion. It then plots the counts and determines the most frequently predicted emotion.

Uploaded by

Ashe Jay
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
54 views2 pages

FED

The document loads a pre-trained emotion detection model and uses it to analyze facial expressions in a video file. It detects faces, extracts regions of interest, makes predictions on emotions, and counts the frequency of each predicted emotion. It then plots the counts and determines the most frequently predicted emotion.

Uploaded by

Ashe Jay
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 2

# from keras.preprocessing.

image import img_to_array


from keras.preprocessing import image
from keras.models import load_model
import matplotlib.pyplot as plt
from time import sleep
import tensorflow as tf
import seaborn as sns
import pandas as pd
import numpy as np
import os
import cv2
from sklearn.preprocessing import LabelEncoder
labelencoder= LabelEncoder()

from sklearn import preprocessing


import cv2
import numpy as np
from keras.models import model_from_json

emotion_dict = {0: "Angry", 1: "Disgusted", 2: "Fearful", 3: "Happy", 4: "Neutral",


5: "Sad", 6: "Surprised"}

# load json and create model


json_file = open('C:/Users/Arathy/Desktop/FED7/emotion_model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()

emotion_model = tf.keras.models.model_from_json(loaded_model_json)
print(emotion_model)
# load weights into new model
emotion_model.load_weights("C:/Users/Arathy/Desktop/FED7/emotion_model.h5")

# start the webcam feed


#cap = cv2.VideoCapture(0)

# pass here your video path


cap = cv2.VideoCapture("C:\\Users\\Arathy\\Desktop\\FED7\\sad.mp4")
expr = []
exdect=[]
while True:
# Find haar cascade to draw bounding box around face
ret, frame = cap.read()
frame = cv2.resize(frame, (1280, 800))
if not ret:
break
face_detector = cv2.CascadeClassifier(r'C:\Users\Arathy\Desktop\FED7\
haarcascade_frontalface_default.xml')
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)

# detect faces available on camera


num_faces = face_detector.detectMultiScale(gray_frame, scaleFactor=1.3,
minNeighbors=5)

# take each face available on the camera and Preprocess it


for (x, y, w, h) in num_faces:
cv2.rectangle(frame, (x, y-50), (x+w, y+h+10), (0, 255, 0), 4)
roi_gray_frame = gray_frame[y:y + h, x:x + w]
cropped_img = np.expand_dims(np.expand_dims(cv2.resize(roi_gray_frame, (48,
48)), -1), 0)

# predict the emotions


emotion_prediction = emotion_model.predict(cropped_img)
maxindex = int(np.argmax(emotion_prediction))
exdect.append(maxindex)
cv2.putText(frame, emotion_dict[maxindex], (x+5, y-20),
cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), 2, cv2.LINE_AA)
expr.append(emotion_prediction)

cv2.imshow('Emotion Detection', frame)


if cv2.waitKey(1) & 0xFF == ord('q'):
break

cap.release()
cv2.destroyAllWindows()

emotion_dict

exdect
res = {}

for i in exdect:
res[i] = exdect.count(i)

print(res)
# count the values

value_list = list(res.values())
print(value_list)
key_list = list(res.keys())
print(key_list)

import matplotlib.pyplot as plt


plt.scatter(value_list, key_list)
plt.xlabel('values')
plt.ylabel('key')
plt.show()

new_val = max(res, key= res.get)


print("maximum value from dictionary:",new_val)

satisfied=["Surprised","Happy","Neutral"]
not_satisfied=["Angry","Disgusted","Fearful","Sad"]

# find the most of expression


for i in emotion_dict:
if i == new_val:
# print(i,emotion_dict[i])
for s in satisfied:
if s==emotion_dict[i]:
print("Satisfied")
for s in not_satisfied:
if s==emotion_dict[i]:
print("Not satisfied")

You might also like