File size: 1,128 Bytes
f07f4cf
 
 
 
bbe0967
f07f4cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer

# Model ve tokenizer yükle
model_name = "microsoft/DialoGPT-small"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)

# Sohbet fonksiyonu
def chat_with_bot(input_text, chat_history=[]):
    # Kullanıcı girdisini encode et
    new_input_ids = tokenizer.encode(input_text + tokenizer.eos_token, return_tensors="pt")
    bot_output = model.generate(
        new_input_ids if not chat_history else torch.cat([chat_history, new_input_ids], dim=-1),
        max_length=1000,
        pad_token_id=tokenizer.eos_token_id,
    )
    # Botun yanıtını çöz
    response = tokenizer.decode(bot_output[:, new_input_ids.shape[-1]:][0], skip_special_tokens=True)
    chat_history.append((input_text, response))
    return response, chat_history

# Gradio arayüzü
interface = gr.Interface(
    fn=chat_with_bot,
    inputs=["text", "state"],
    outputs=["text", "state"],
    live=True,
    title="Sohbet Botu",
    description="Bir chatbot ile konuşmaya başlayın!",
)

interface.launch()