|
import gradio as gr |
|
import random |
|
from typing import List, Tuple |
|
import time |
|
import openai |
|
import os |
|
|
|
class EmmaChat: |
|
def __init__(self): |
|
self.client = openai( |
|
base_url="https://api.groq.com/openai/v1", |
|
api_key= os.environ.get("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") |
|
) |
|
|
|
self.system_message = """You are Aadhya, a 17-year-old who grew up in Mumbai. You have a passion for art and enjoy playing volleyball. You excel in math and physics and also have a strong interest in chemistry and biology. You constantly seek knowledge and are willing to work hard. Additionally, you are a good decision-maker and can anticipate many situations in advance. At your core, you are a kind-hearted person who strives to serve others and make a positive impact. You work toward a better future for both yourself and humanity.""" |
|
|
|
def generate_response(self, message: str, history: List[Tuple[str, str]]) -> str: |
|
|
|
messages = [{"role": "system", "content": self.system_message}] |
|
for user_msg, assistant_msg in history: |
|
messages.append({"role": "user", "content": user_msg}) |
|
messages.append({"role": "assistant", "content": assistant_msg}) |
|
messages.append({"role": "user", "content": message}) |
|
|
|
try: |
|
completion = self.client.chat.completions.create( |
|
model="llama-3.1-8b-instant", |
|
messages=messages, |
|
temperature=0.2, |
|
max_tokens=1024, |
|
stream=True |
|
) |
|
|
|
|
|
full_response = "" |
|
for chunk in completion: |
|
if chunk.choices[0].delta.content is not None: |
|
full_response += chunk.choices[0].delta.content |
|
yield full_response |
|
|
|
except Exception as e: |
|
yield f"I apologize, but I encountered an error: {str(e)}" |
|
|
|
def create_chat_interface() -> gr.ChatInterface: |
|
emma = EmmaChat() |
|
|
|
|
|
custom_css = """ |
|
.message.user div.content { |
|
background-color: #DCF8C6 !important; |
|
} |
|
.message.bot div.content { |
|
background-color: #E8E8E8 !important; |
|
} |
|
.message.user, .message.bot { |
|
padding: 1rem; |
|
} |
|
.avatar { |
|
border-radius: 50%; |
|
width: 40px; |
|
height: 40px; |
|
} |
|
.message-wrap { |
|
max-width: 800px; |
|
margin: 0 auto; |
|
} |
|
""" |
|
|
|
|
|
|
|
chat_interface = gr.ChatInterface( |
|
fn=emma.generate_response, |
|
title="Chat with Aadhya π©π»", |
|
description="""Aadhya is a 17-year-old from Mumbai with a passion for Art and a competitive spirit in volleyball. She excels in math, physics, chemistry, and biology, blending her analytical skills with a love for problem-solving. Driven by a desire to positively impact humanity, she is also committed to personal growth and excellence.""", |
|
examples=[ |
|
["Hi, can you intro yourself?"], |
|
["Is there any way I can get help from you? I'm glad to meet you."], |
|
["I'm so glad to connect with you! Do you think we can work together on anything?"], |
|
["How can I start a small garden at home?"] |
|
], |
|
theme=gr.themes.Soft( |
|
primary_hue="pink", |
|
secondary_hue="purple", |
|
), |
|
css=custom_css |
|
|
|
|
|
|
|
) |
|
|
|
return chat_interface |
|
|
|
if __name__ == "__main__": |
|
chat_interface = create_chat_interface() |
|
chat_interface.queue() |
|
chat_interface.launch( |
|
share=True, |
|
server_name="0.0.0.0", |
|
server_port=7000, |
|
show_api=False |
|
) |