logu29 commited on
Commit
d1d98e8
·
verified ·
1 Parent(s): 864cf03

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -4
app.py CHANGED
@@ -1,11 +1,60 @@
1
  import gradio as gr
2
  from textblob import TextBlob
 
 
 
 
 
3
 
4
- def analyze(text):
 
5
  blob = TextBlob(text)
6
  polarity = blob.sentiment.polarity
7
  sentiment = "Positive" if polarity > 0 else "Negative" if polarity < 0 else "Neutral"
8
- return f"Sentiment: {sentiment} (Polarity: {polarity:.2f})"
9
 
10
- iface = gr.Interface(fn=analyze, inputs="text", outputs="text")
11
- iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
  from textblob import TextBlob
3
+ from deepface import DeepFace
4
+ import moviepy.editor as mp
5
+ import cv2
6
+ import tempfile
7
+ import os
8
 
9
+ # Function to analyze text
10
+ def analyze_text(text):
11
  blob = TextBlob(text)
12
  polarity = blob.sentiment.polarity
13
  sentiment = "Positive" if polarity > 0 else "Negative" if polarity < 0 else "Neutral"
14
+ return f"Text Sentiment: {sentiment} (Polarity: {polarity:.2f})"
15
 
16
+ # Function to analyze image (face emotion)
17
+ def analyze_image(image):
18
+ try:
19
+ result = DeepFace.analyze(image, actions=['emotion'], enforce_detection=False)
20
+ dominant_emotion = result[0]['dominant_emotion']
21
+ return f"Detected Emotion: {dominant_emotion}"
22
+ except Exception as e:
23
+ return f"Error: {str(e)}"
24
+
25
+ # Function to analyze video (face emotion at center frame)
26
+ def analyze_video(video_file):
27
+ try:
28
+ tmpdir = tempfile.mkdtemp()
29
+ clip = mp.VideoFileClip(video_file)
30
+ frame = clip.get_frame(clip.duration / 2)
31
+ frame_path = os.path.join(tmpdir, "frame.jpg")
32
+ cv2.imwrite(frame_path, cv2.cvtColor(frame, cv2.COLOR_RGB2BGR))
33
+ result = DeepFace.analyze(frame_path, actions=['emotion'], enforce_detection=False)
34
+ dominant_emotion = result[0]['dominant_emotion']
35
+ return f"Video Emotion: {dominant_emotion}"
36
+ except Exception as e:
37
+ return f"Error: {str(e)}"
38
+
39
+ # Gradio UI
40
+ with gr.Blocks() as demo:
41
+ gr.Markdown("# 🧠 Emotion and Sentiment Analyzer")
42
+ with gr.Tab("Text Analysis"):
43
+ text_input = gr.Textbox(label="Enter Text")
44
+ text_output = gr.Textbox(label="Sentiment Result")
45
+ text_btn = gr.Button("Analyze Text")
46
+ text_btn.click(analyze_text, inputs=text_input, outputs=text_output)
47
+
48
+ with gr.Tab("Image Analysis"):
49
+ img_input = gr.Image(type="filepath", label="Upload Face Image")
50
+ img_output = gr.Textbox(label="Emotion Result")
51
+ img_btn = gr.Button("Analyze Image")
52
+ img_btn.click(analyze_image, inputs=img_input, outputs=img_output)
53
+
54
+ with gr.Tab("Video Analysis"):
55
+ video_input = gr.Video(label="Upload Face Video")
56
+ video_output = gr.Textbox(label="Emotion Result")
57
+ video_btn = gr.Button("Analyze Video")
58
+ video_btn.click(analyze_video, inputs=video_input, outputs=video_output)
59
+
60
+ demo.launch()