0% found this document useful (0 votes)
3 views

code try2

The document outlines a Flask application for real-time emotion detection using a webcam feed. It includes a main app file (app.py) that handles video streaming and emotion retrieval, an emotion detection module (emotion.py) that uses DeepFace for analyzing emotions, and an HTML front-end (index.html) for user interaction. The application displays detected emotions and notifies users of changes through a popup interface.

Uploaded by

ammartajudin1
Copyright
© © All Rights Reserved
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
3 views

code try2

The document outlines a Flask application for real-time emotion detection using a webcam feed. It includes a main app file (app.py) that handles video streaming and emotion retrieval, an emotion detection module (emotion.py) that uses DeepFace for analyzing emotions, and an HTML front-end (index.html) for user interaction. The application displays detected emotions and notifies users of changes through a popup interface.

Uploaded by

ammartajudin1
Copyright
© © All Rights Reserved
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 4

app.

py:

from flask import Flask, render_template, Response, jsonify


import cv2
from emotion import get_emotion

app = Flask(__name__)

last_detected_emotion = None

def generate_frames():
global last_detected_emotion
cap = cv2.VideoCapture(0)
while True:
success, frame = cap.read()
if not success:
break
else:
emotion = get_emotion(frame)
if emotion:
last_detected_emotion = emotion
cv2.putText(frame, f"Emotion: {emotion}", (10, 30),
cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2)

ret, buffer = cv2.imencode('.jpg', frame)


frame = buffer.tobytes()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')

@app.route('/')
def index():
return render_template('index.html')

@app.route('/video_feed')
def video_feed():
return Response(generate_frames(), mimetype='multipart/x-mixed-replace;
boundary=frame')

@app.route('/get_emotion')
def get_current_emotion():
global last_detected_emotion
return jsonify({'emotion': last_detected_emotion})

if __name__ == '__main__':
app.run(debug=True)

Emotion.py:

import cv2
from deepface import DeepFace
import time
import logging
import threading
import numpy as np

class EmotionDetector:
def __init__(self):
self.face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades +
'haarcascade_frontalface_default.xml')
self.confidence_threshold = 0.7
self.time_window = 3
self.emotion_votes = {}
self.last_detected_emotion = None
self.last_emotion_time = 0
self.lock = threading.Lock()

def detect_emotion(self, frame):


gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = self.face_cascade.detectMultiScale(gray_frame, scaleFactor=1.1,
minNeighbors=5, minSize=(30, 30))

if len(faces) == 0:
return None

x, y, w, h = faces[0]
face_roi = frame[y:y + h, x:x + w]

try:
result = DeepFace.analyze(face_roi, actions=['emotion'],
enforce_detection=False)
if isinstance(result, list):
result = result[0]

emotion = result['dominant_emotion']
confidence = result['emotion'][emotion]

if confidence >= self.confidence_threshold:


with self.lock:
if emotion in self.emotion_votes:
self.emotion_votes[emotion] += confidence
else:
self.emotion_votes[emotion] = confidence

current_time = time.time()
if current_time - self.last_emotion_time >= self.time_window:
with self.lock:
if self.emotion_votes:
final_emotion = max(self.emotion_votes,
key=self.emotion_votes.get)
self.emotion_votes = {}
self.last_detected_emotion = final_emotion
self.last_emotion_time = current_time
logging.info(f"Final emotion: {final_emotion}")
return final_emotion

except Exception as e:
logging.error(f"Error in emotion detection: {e}")

return None

# Initialize logging
logging.basicConfig(filename='emotion_detection.log', level=logging.INFO)

# Create a global instance of EmotionDetector


emotion_detector = EmotionDetector()

def get_emotion(frame):
return emotion_detector.detect_emotion(frame)

index.html:

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="stylesheet" href="/style.css">
<title>Real-time Emotion Detection</title>
<style>
#popup {
display: none;
position: fixed;
top: 20px;
right: 20px;
padding: 15px;
background-color: #4CAF50;
color: white;
border-radius: 5px;
z-index: 1000;
}
</style>
</head>
<body>
<h1>Real-time Emotion Detection</h1>
<div id="videoContainer">
<img id="videoFeed" src="" alt="Video Feed" style="display:none;">
</div>
<div id="detectedEmotion"></div>
<button id="detectEmotionBtn">Detect Emotion</button>
<div id="popup"></div>

<script>
let lastEmotion = null;

document.getElementById("detectEmotionBtn").onclick = function() {
startDetection();
};

function showEmotion(emotion) {
document.getElementById("detectedEmotion").textContent = `Detected Emotion: $
{emotion}`;
if (emotion !== lastEmotion) {
showPopup(`Emotion change detected: ${emotion}`);
lastEmotion = emotion;
}
}

function showPopup(message) {
const popup = document.getElementById("popup");
popup.textContent = message;
popup.style.display = "block";
setTimeout(() => {
popup.style.display = "none";
}, 3000); // Hide popup after 3 seconds
}

function startDetection() {
const videoFeed = document.getElementById("videoFeed");
videoFeed.style.display = "block"; // Show the video feed
videoFeed.src = "/video_feed"; // Start the video feed
console.log("Emotion detection started!");
startEmotionPolling();
}

function startEmotionPolling() {
setInterval(() => {
fetch('/get_emotion')
.then(response => response.json())
.then(data => {
if (data.emotion) {
showEmotion(data.emotion);
}
})
.catch(error => console.error('Error:', error));
}, 1000); // Poll every second
}
</script>
</body>
</html>

style.css:

body {
font-family: Arial, sans-serif;
display: flex;
flex-direction: column;
align-items: center;
background-color: #f0f0f0;
}

h1 {
margin-top: 20px;
}

#videoContainer {
margin-top: 20px;
border: 2px solid #000;
width: 640px; /* Adjust width as needed */
}

img {
width: 100%; /* Make the video frame responsive */
}

You might also like