Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -33,8 +33,8 @@ def load_model():
|
|
33 |
|
34 |
pipe = load_model()
|
35 |
|
36 |
-
@spaces.GPU(duration=
|
37 |
-
def generate_response(prompt, max_length=
|
38 |
# Create messages with system prompt
|
39 |
messages = [
|
40 |
{"role": "system", "content": "You are a helpful AI assistant. You always think step by step."},
|
@@ -79,7 +79,7 @@ demo = gr.Interface(
|
|
79 |
lines=5
|
80 |
),
|
81 |
],
|
82 |
-
outputs=gr.
|
83 |
title="benhaotang/phi4-qwq-sky-t1",
|
84 |
description=f""" To achieve CoT and science reasoning on small scale with a merge of CoT finetuned phi4 model.
|
85 |
|
|
|
33 |
|
34 |
pipe = load_model()
|
35 |
|
36 |
+
@spaces.GPU(duration=110)
|
37 |
+
def generate_response(prompt, max_length=1024):
|
38 |
# Create messages with system prompt
|
39 |
messages = [
|
40 |
{"role": "system", "content": "You are a helpful AI assistant. You always think step by step."},
|
|
|
79 |
lines=5
|
80 |
),
|
81 |
],
|
82 |
+
outputs=gr.gr.Textbox(label="Response", lines=10),
|
83 |
title="benhaotang/phi4-qwq-sky-t1",
|
84 |
description=f""" To achieve CoT and science reasoning on small scale with a merge of CoT finetuned phi4 model.
|
85 |
|