commenteed out
Browse files- model/custom_agent.py +3 -2
model/custom_agent.py
CHANGED
@@ -106,7 +106,7 @@ class CustomHfAgent(Agent):
|
|
106 |
### https://github.com/huggingface/transformers/blob/main/src/transformers/tools/prompts.py -> run chat_template.txt
|
107 |
### https://huggingface.co/datasets/huggingface-tools/default-prompts/blob/main/chat_prompt_template.txt
|
108 |
###
|
109 |
-
|
110 |
|
111 |
checkpoint = "bigcode/starcoder"
|
112 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint, token = self.token)
|
@@ -149,4 +149,5 @@ class CustomHfAgent(Agent):
|
|
149 |
}
|
150 |
]
|
151 |
# prompt = tokenizer.apply_chat_template(messages, add_generation_prompt=True, tokenize=False)
|
152 |
-
|
|
|
|
106 |
### https://github.com/huggingface/transformers/blob/main/src/transformers/tools/prompts.py -> run chat_template.txt
|
107 |
### https://huggingface.co/datasets/huggingface-tools/default-prompts/blob/main/chat_prompt_template.txt
|
108 |
###
|
109 |
+
""" def format_prompt(self, task, chat_mode=False):
|
110 |
|
111 |
checkpoint = "bigcode/starcoder"
|
112 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint, token = self.token)
|
|
|
149 |
}
|
150 |
]
|
151 |
# prompt = tokenizer.apply_chat_template(messages, add_generation_prompt=True, tokenize=False)
|
152 |
+
print("formatted propmpt ---- " + prompt)
|
153 |
+
return prompt """
|