import spaces import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig import torch title = """# 🙋🏻‍♂️Welcome to🌟Tonic's🔮🪄DeepSeek📉Math You can build with this endpoint using 🔮🪄DeepSeek📉Math. The demo is still a work in progress and we're looking forward to build downstream tasks that showcase outstanding mathematical reasoning. Have any ideas ? join us below ! You can also use 🔮🪄DeepSeek📉Math by cloning this space. 🧬🔬🔍 Simply click here: Duplicate Space Join us : 🌟TeamTonic🌟 is always making cool demos! Join our active builder's🛠️community 👻 [![Join us on Discord](https://img.shields.io/discord/1109943800132010065?label=Discord&logo=discord&style=flat-square)](https://discord.gg/GWpVpekp) On 🤗Huggingface: [TeamTonic](https://huggingface.co./TeamTonic) & [MultiTransformer](https://huggingface.co./MultiTransformer) Math with [introspector](https://huggingface.co./introspector) On 🌐Github: [Tonic-AI](https://github.com/tonic-ai) & contribute to 🌟 [SciTonic](https://github.com/Tonic-AI/scitonic) 🤗Big thanks to Yuvi Sharma and all the folks at huggingface for the community grant 🤗 """ model_name = "deepseek-ai/deepseek-math-7b-instruct" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.bfloat16, device_map="auto") model.generation_config = GenerationConfig.from_pretrained(model_name) model.generation_config.pad_token_id = model.generation_config.eos_token_id @spaces.GPU def solve_math_problem(question, max_tokens): prompt = f"User: {question}\nPlease reason step by step, and put your final answer within \\boxed{{}}.\nAssistant:" input_ids = tokenizer(prompt, return_tensors="pt").input_ids.to(model.device) outputs = model.generate(input_ids, max_length=max_tokens + input_ids.shape[1], pad_token_id=model.generation_config.pad_token_id) result = tokenizer.decode(outputs[0], skip_special_tokens=True) return result def main(): with gr.Blocks() as demo: gr.Markdown(title) with gr.Row(): question = gr.Textbox(lines=5, label="Enter your math problem") max_tokens = gr.Slider(minimum=150, maximum=1200, default=250, label="Max Tokens") submit_button = gr.Button("Solve") output = gr.Textbox(label="🔮🪄DeepSeek📉Math") submit_button.click(fn=solve_math_problem, inputs=[question, max_tokens], outputs=output) demo.launch() if __name__ == "__main__": main()