Update app.py
Browse files
app.py
CHANGED
@@ -3,15 +3,16 @@ from deepface import DeepFace
|
|
3 |
import easyocr
|
4 |
import matplotlib.pyplot as plt
|
5 |
from PIL import Image
|
|
|
6 |
|
7 |
def convert_to_jpg(image_path):
|
8 |
try:
|
9 |
img = Image.open(image_path)
|
10 |
-
jpg_path =
|
11 |
img.convert('RGB').save(jpg_path, 'JPEG')
|
12 |
return jpg_path
|
13 |
except Exception as e:
|
14 |
-
|
15 |
|
16 |
def perform_handwriting_recognition_easyocr(image_path):
|
17 |
try:
|
@@ -29,16 +30,16 @@ def analyze_face(image_path):
|
|
29 |
try:
|
30 |
jpg_path = convert_to_jpg(image_path)
|
31 |
handwriting_result = perform_handwriting_recognition_easyocr(jpg_path)
|
32 |
-
analysis = DeepFace.analyze(img_path=jpg_path, actions=["age", "gender", "emotion", "race"])
|
33 |
-
gender_scores = analysis
|
34 |
-
predicted_gender = "Male" if gender_scores
|
35 |
|
36 |
outputs = {
|
37 |
-
"age": analysis
|
38 |
"gender_scores": gender_scores,
|
39 |
"predicted_gender": predicted_gender,
|
40 |
-
"emotion": analysis
|
41 |
-
"race": analysis
|
42 |
"handwriting": handwriting_result,
|
43 |
}
|
44 |
return outputs
|
@@ -56,7 +57,7 @@ def verify_faces(image_path1, image_path2):
|
|
56 |
try:
|
57 |
jpg_path1 = convert_to_jpg(image_path1)
|
58 |
jpg_path2 = convert_to_jpg(image_path2)
|
59 |
-
result = DeepFace.verify(img1_path=jpg_path1, img2_path=jpg_path2)
|
60 |
confidence = result["distance"]
|
61 |
verified = result["verified"]
|
62 |
return f"Match Confidence: {confidence:.2f}", f"Are they the same person? {'Yes' if verified else 'No'}"
|
@@ -65,7 +66,7 @@ def verify_faces(image_path1, image_path2):
|
|
65 |
|
66 |
def visualize_gender(gender_scores):
|
67 |
labels = ["Male", "Female"]
|
68 |
-
scores = [gender_scores
|
69 |
fig, ax = plt.subplots(figsize=(4, 3))
|
70 |
ax.bar(labels, scores, color=["blue", "pink"])
|
71 |
ax.set_ylabel("Confidence (%)")
|
|
|
3 |
import easyocr
|
4 |
import matplotlib.pyplot as plt
|
5 |
from PIL import Image
|
6 |
+
import os
|
7 |
|
8 |
def convert_to_jpg(image_path):
|
9 |
try:
|
10 |
img = Image.open(image_path)
|
11 |
+
jpg_path = os.path.splitext(image_path)[0] + '.jpg'
|
12 |
img.convert('RGB').save(jpg_path, 'JPEG')
|
13 |
return jpg_path
|
14 |
except Exception as e:
|
15 |
+
raise ValueError(f"Error converting image: {str(e)}")
|
16 |
|
17 |
def perform_handwriting_recognition_easyocr(image_path):
|
18 |
try:
|
|
|
30 |
try:
|
31 |
jpg_path = convert_to_jpg(image_path)
|
32 |
handwriting_result = perform_handwriting_recognition_easyocr(jpg_path)
|
33 |
+
analysis = DeepFace.analyze(img_path=jpg_path, actions=["age", "gender", "emotion", "race"], enforce_detection=False)
|
34 |
+
gender_scores = analysis.get("gender", {"Man": 0, "Woman": 0})
|
35 |
+
predicted_gender = "Male" if gender_scores.get("Man", 0) > gender_scores.get("Woman", 0) else "Female"
|
36 |
|
37 |
outputs = {
|
38 |
+
"age": analysis.get("age", "Unknown"),
|
39 |
"gender_scores": gender_scores,
|
40 |
"predicted_gender": predicted_gender,
|
41 |
+
"emotion": analysis.get("dominant_emotion", "Unknown"),
|
42 |
+
"race": analysis.get("dominant_race", "Unknown"),
|
43 |
"handwriting": handwriting_result,
|
44 |
}
|
45 |
return outputs
|
|
|
57 |
try:
|
58 |
jpg_path1 = convert_to_jpg(image_path1)
|
59 |
jpg_path2 = convert_to_jpg(image_path2)
|
60 |
+
result = DeepFace.verify(img1_path=jpg_path1, img2_path=jpg_path2, enforce_detection=False)
|
61 |
confidence = result["distance"]
|
62 |
verified = result["verified"]
|
63 |
return f"Match Confidence: {confidence:.2f}", f"Are they the same person? {'Yes' if verified else 'No'}"
|
|
|
66 |
|
67 |
def visualize_gender(gender_scores):
|
68 |
labels = ["Male", "Female"]
|
69 |
+
scores = [gender_scores.get("Man", 0), gender_scores.get("Woman", 0)]
|
70 |
fig, ax = plt.subplots(figsize=(4, 3))
|
71 |
ax.bar(labels, scores, color=["blue", "pink"])
|
72 |
ax.set_ylabel("Confidence (%)")
|