Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -51,10 +51,10 @@ unimath4 = """Goal:
|
|
51 |
additional_info_prompt = "/-Explain using mathematics-/"
|
52 |
|
53 |
examples = [
|
54 |
-
[unimath1, additional_info_prompt,
|
55 |
-
[unimath2, additional_info_prompt,
|
56 |
-
[unimath3, additional_info_prompt,
|
57 |
-
[unimath4, additional_info_prompt,
|
58 |
]
|
59 |
|
60 |
model_name = "Goedel-LM/Goedel-Prover-SFT"
|
@@ -79,6 +79,7 @@ def solve_math_problem(question, informal_prefix, max_tokens):
|
|
79 |
pad_token_id=model.generation_config.pad_token_id,
|
80 |
temperature=1.0,
|
81 |
top_p=0.95,
|
|
|
82 |
)
|
83 |
|
84 |
result = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
|
|
51 |
additional_info_prompt = "/-Explain using mathematics-/"
|
52 |
|
53 |
examples = [
|
54 |
+
[unimath1, additional_info_prompt, 2500],
|
55 |
+
[unimath2, additional_info_prompt, 2500],
|
56 |
+
[unimath3, additional_info_prompt, 2500],
|
57 |
+
[unimath4, additional_info_prompt, 2500]
|
58 |
]
|
59 |
|
60 |
model_name = "Goedel-LM/Goedel-Prover-SFT"
|
|
|
79 |
pad_token_id=model.generation_config.pad_token_id,
|
80 |
temperature=1.0,
|
81 |
top_p=0.95,
|
82 |
+
do_sample=True
|
83 |
)
|
84 |
|
85 |
result = tokenizer.decode(outputs[0], skip_special_tokens=True)
|