Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
# Model ve tokenizer yükle | |
model_name = "microsoft/DialoGPT-medium" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
# Sohbet fonksiyonu | |
def chat_with_bot(input_text, chat_history=[]): | |
new_input_ids = tokenizer.encode(input_text + tokenizer.eos_token, return_tensors="pt") | |
bot_output = model.generate( | |
new_input_ids if not chat_history else torch.cat([chat_history, new_input_ids], dim=-1), | |
max_length=1000, | |
pad_token_id=tokenizer.eos_token_id, | |
) | |
response = tokenizer.decode(bot_output[:, new_input_ids.shape[-1]:][0], skip_special_tokens=True) | |
chat_history.append((input_text, response)) | |
return response, chat_history | |
# Gradio Arayüzü | |
demo = gr.Interface( | |
fn=chat_with_bot, | |
inputs=["text", "state"], # Giriş metni ve sohbet geçmişi | |
outputs=["text", "state"], # Yanıt ve sohbet geçmişi | |
live=True, | |
title="Sohbet Botu", | |
description="Hugging Face sohbet botu örneği", | |
) | |
demo.launch() # Gradio arayüzünü başlat |