Starchik commited on
Commit
37d3dff
·
verified ·
1 Parent(s): 780f612

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -17
app.py CHANGED
@@ -1,18 +1,54 @@
1
  import gradio as gr
2
- from transformers import pipeline
3
-
4
- def chat_with_model(user_input):
5
- messages = [{"role": "user", "content": user_input}]
6
- pipe = pipeline("text-generation", model="deepseek-ai/DeepSeek-R1", trust_remote_code=True)
7
- response = pipe(messages)
8
- return response[0]['generated_text']
9
-
10
- iface = gr.Interface(
11
- fn=chat_with_model,
12
- inputs=gr.Textbox(label="Enter your message"),
13
- outputs=gr.Textbox(label="Model Response"),
14
- title="Chat with DeepSeek-R1",
15
- description="Enter a message to interact with the DeepSeek-R1 model."
16
- )
17
-
18
- iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+
4
+ # Загрузка модели и токенизатора
5
+ model_name = "deepseek/r1"
6
+ model = AutoModelForCausalLM.from_pretrained(model_name)
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+
9
+ def inference(prompt, temperature, top_p, max_length):
10
+ try:
11
+ # Токенизация входного текста
12
+ inputs = tokenizer(prompt, return_tensors="np")
13
+
14
+ # Генерация ответа
15
+ outputs = model.generate(
16
+ inputs.input_ids,
17
+ max_length=max_length,
18
+ temperature=temperature,
19
+ top_p=top_p,
20
+ do_sample=True
21
+ )
22
+
23
+ # Декодирование ответа
24
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
25
+ return response
26
+
27
+ except Exception as e:
28
+ return f"Ошибка: {str(e)}"
29
+
30
+ # Создание интерфейса Gradio
31
+ with gr.Blocks() as demo:
32
+ gr.Markdown("# DeepSeek-r1 Model")
33
+
34
+ with gr.Row():
35
+ input_text = gr.Textbox(label="Входной текст", placeholder="Введите ваш текст здесь...")
36
+ output_text = gr.Textbox(label="Выходной текст", placeholder="Ответ модели появится здесь...")
37
+
38
+ with gr.Row():
39
+ max_length = gr.Slider(label="Max Length", minimum=1, maximum=1000, value=500)
40
+ temperature = gr.Slider(label="Temperature", minimum=0.1, maximum=2.0, value=1.0)
41
+ top_p = gr.Slider(label="Top P", minimum=0.1, maximum=1.0, value=1.0)
42
+
43
+ run_button = gr.Button("Run")
44
+
45
+ # Подключение функции к кнопке
46
+ run_button.click(
47
+ fn=inference,
48
+ inputs=[input_text, temperature, top_p, max_length],
49
+ outputs=output_text
50
+ )
51
+
52
+ # Запуск приложения
53
+ if __name__ == "__main__":
54
+ demo.launch()