shaktibiplab's picture
Update app.py
c0d9a98 verified
import gradio as gr
from deepface import DeepFace
import easyocr
import matplotlib.pyplot as plt
from PIL import Image
import os
def convert_to_jpg(image_path):
try:
img = Image.open(image_path)
jpg_path = os.path.splitext(image_path)[0] + '.jpg'
img.convert('RGB').save(jpg_path, 'JPEG')
return jpg_path
except Exception as e:
raise ValueError(f"Error converting image: {str(e)}")
def perform_handwriting_recognition_easyocr(image_path):
try:
jpg_path = convert_to_jpg(image_path)
reader = easyocr.Reader(['en', 'hi'], gpu=False)
results = reader.readtext(jpg_path)
if not results:
return "No text found."
recognized_texts = [result[1] for result in results]
return " ".join(recognized_texts)
except Exception as e:
return f"Error in handwriting recognition: {str(e)}"
def analyze_face(image_path):
try:
jpg_path = convert_to_jpg(image_path)
handwriting_result = perform_handwriting_recognition_easyocr(jpg_path)
analysis = DeepFace.analyze(img_path=jpg_path, actions=["age", "gender", "emotion", "race"], enforce_detection=False)
# Handle multiple faces in the image
if isinstance(analysis, list):
analysis = analysis[0] # Use the first detected face
gender_scores = analysis.get("gender", {"Man": 0, "Woman": 0})
predicted_gender = "Male" if gender_scores.get("Man", 0) > gender_scores.get("Woman", 0) else "Female"
outputs = {
"age": analysis.get("age", "Unknown"),
"gender_scores": gender_scores,
"predicted_gender": predicted_gender,
"emotion": analysis.get("dominant_emotion", "Unknown"),
"race": analysis.get("dominant_race", "Unknown"),
"handwriting": handwriting_result,
}
return outputs
except Exception as e:
return {
"age": "Error",
"gender_scores": {"Man": 0, "Woman": 0},
"predicted_gender": "Error",
"emotion": "Error",
"race": "Error",
"handwriting": f"Error: {str(e)}"
}
def verify_faces(image_path1, image_path2):
try:
jpg_path1 = convert_to_jpg(image_path1)
jpg_path2 = convert_to_jpg(image_path2)
result = DeepFace.verify(img1_path=jpg_path1, img2_path=jpg_path2, enforce_detection=False)
confidence = result["distance"]
verified = result["verified"]
return f"Match Confidence: {confidence:.2f}", f"Are they the same person? {'Yes' if verified else 'No'}"
except Exception as e:
return f"Error: {str(e)}", ""
def visualize_gender(gender_scores):
labels = ["Male", "Female"]
scores = [gender_scores.get("Man", 0), gender_scores.get("Woman", 0)]
fig, ax = plt.subplots(figsize=(4, 3))
ax.bar(labels, scores, color=["blue", "pink"])
ax.set_ylabel("Confidence (%)")
ax.set_title("Gender Prediction")
plt.tight_layout()
plt.close(fig)
return fig
def display_results(image_path):
results = analyze_face(image_path)
gender_chart = visualize_gender(results["gender_scores"])
return results["age"], gender_chart, results["predicted_gender"], results["emotion"], results["race"], results["handwriting"]
iface = gr.Blocks()
with iface:
with gr.Tab("Facial and Handwriting Analysis"):
with gr.Row():
image_input = gr.Image(type="filepath", label="Upload Image")
with gr.Row():
age_output = gr.Textbox(label="Predicted Age")
gender_plot = gr.Plot(label="Gender Confidence")
predicted_gender_output = gr.Textbox(label="Predicted Gender")
with gr.Row():
emotion_output = gr.Textbox(label="Predicted Emotion")
race_output = gr.Textbox(label="Predicted Race")
handwriting_output = gr.Textbox(label="Handwriting Recognition")
submit_button = gr.Button("Analyze")
submit_button.click(
display_results,
inputs=image_input,
outputs=[
age_output, gender_plot, predicted_gender_output,
emotion_output, race_output, handwriting_output
]
)
with gr.Tab("Face Verification"):
with gr.Row():
image_input1 = gr.Image(type="filepath", label="Upload First Image")
image_input2 = gr.Image(type="filepath", label="Upload Second Image")
with gr.Row():
match_confidence = gr.Textbox(label="Match Confidence")
verification_result = gr.Textbox(label="Verification Result")
verify_button = gr.Button("Verify")
verify_button.click(
verify_faces,
inputs=[image_input1, image_input2],
outputs=[match_confidence, verification_result]
)
iface.launch()