Spaces:
Build error
Build error
import gradio as gr | |
from textblob import TextBlob | |
from deepface import DeepFace | |
import cv2 | |
import moviepy.editor as mp | |
import tempfile | |
import os | |
# Analyze text | |
def analyze_text(text): | |
blob = TextBlob(text) | |
polarity = blob.sentiment.polarity | |
emotion = "Positive" if polarity > 0 else "Negative" if polarity < 0 else "Neutral" | |
return f"Sentiment: {emotion} (Score: {polarity:.2f})" | |
# Analyze face | |
def analyze_face(image): | |
try: | |
result = DeepFace.analyze(image, actions=['emotion'], enforce_detection=False) | |
dominant_emotion = result[0]['dominant_emotion'] | |
return f"Dominant Emotion: {dominant_emotion}" | |
except Exception as e: | |
return f"Error analyzing face: {str(e)}" | |
# Analyze video | |
def analyze_video(video_path): | |
try: | |
temp_folder = tempfile.mkdtemp() | |
clip = mp.VideoFileClip(video_path) | |
frame = clip.get_frame(clip.duration / 2) | |
frame_path = os.path.join(temp_folder, "frame.jpg") | |
cv2.imwrite(frame_path, cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)) | |
result = DeepFace.analyze(frame_path, actions=['emotion'], enforce_detection=False) | |
dominant_emotion = result[0]['dominant_emotion'] | |
return f"Dominant Emotion in Video: {dominant_emotion}" | |
except Exception as e: | |
return f"Error analyzing video: {str(e)}" | |
# Build Gradio Interface | |
with gr.Blocks() as demo: | |
gr.Markdown("# 🧠 Emotion Decoder - Sentiment & Emotion Analysis") | |
with gr.Tab("Text Analysis"): | |
text_input = gr.Textbox(label="Enter text") | |
text_output = gr.Textbox(label="Sentiment Result") | |
text_button = gr.Button("Analyze Text") | |
text_button.click(analyze_text, inputs=text_input, outputs=text_output) | |
with gr.Tab("Face Emotion Detection"): | |
img_input = gr.Image(type="filepath", label="Upload an Image") | |
img_output = gr.Textbox(label="Emotion Result") | |
img_button = gr.Button("Analyze Face Emotion") | |
img_button.click(analyze_face, inputs=img_input, outputs=img_output) | |
with gr.Tab("Video Emotion Detection"): | |
video_input = gr.Video(label="Upload a Video") | |
video_output = gr.Textbox(label="Emotion Result") | |
video_button = gr.Button("Analyze Video Emotion") | |
video_button.click(analyze_video, inputs=video_input, outputs=video_output) | |
demo.launch() | |