code try1
code try1
body {
font-family: Arial, sans-serif;
display: flex;
flex-direction: column;
align-items: center;
background-color: #f0f0f0;
}
h1 {
margin-top: 20px;
}
#videoContainer {
margin-top: 20px;
border: 2px solid #000;
width: 640px; /* Adjust width as needed */
}
img {
width: 100%; /* Make the video frame responsive */
}
html:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="stylesheet" href="{{ url_for('static', filename='css/style.css') }}">
<title>Real-time Emotion Detection</title>
</head>
<body>
<h1>Real-time Emotion Detection</h1>
<div id="videoContainer">
<img id="videoFeed" src="" alt="Video Feed" style="display:none;">
</div>
<button id="detectEmotionBtn">Detect Emotion</button>
<script>
document.getElementById("detectEmotionBtn").onclick = function() {
startDetection();
};
function startDetection() {
const videoFeed = document.getElementById("videoFeed");
videoFeed.style.display = "block"; // Show the video feed
videoFeed.src = "{{ url_for('video_feed') }}"; // Start the video feed
console.log("Emotion detection started!");
}
</script>
</body>
</html>
app.py:
import cv2
from flask import Flask, render_template, Response
from deepface import DeepFace
app = Flask(__name__)
def generate_frames():
while True:
success, frame = cap.read()
if not success:
break
try:
# Perform emotion analysis on the face ROI
result = DeepFace.analyze(face_roi, actions=['emotion'],
enforce_detection=True)
emotion = result[0]['dominant_emotion']
except Exception as e:
print(f"Error in emotion detection: {e}")
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@app.route('/')
def index():
return render_template('index.html')
@app.route('/video_feed')
def video_feed():
return Response(generate_frames(), mimetype='multipart/x-mixed-replace;
boundary=frame')
if __name__ == '__main__':
app.run(debug=True, threaded=True)
# Note: Cap release should be handled appropriately (e.g., when the server shuts
down)
emotion.py:
import cv2
from deepface import DeepFace
import time
while True:
# Capture frame-by-frame
ret, frame = cap.read()
if not ret:
print("Error: Could not read frame.")
break
if not faces:
continue
except Exception as e:
print(f"Error in emotion detection: {e}")