Chat template error

#36
by LeonhardP - opened

Using:

from transformers import AutoTokenizer

tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-3.2-1B")

class Conversation:
def init(self):
self.chat = [{"role": "system", "content": "You are a helpful assistant."}]

def add_human_message(self, message):
self.chat.append({"role": "user", "content": str(message)})

def add_bot_message(self, message):
self.chat.append({"role": "assistant", "content": str(message)})

def get_chat(self):
return self.chat

conversation = Conversation()

conversation.add_human_message("hi, how are you doing?")

tokenizer.apply_chat_template(conversation.get_chat(), tokenize=False)

results in:

ValueError: Cannot use chat template functions because tokenizer.chat_template is not set and no template argument was passed! For information about writing templates and setting the tokenizer.chat_template attribute, please see the documentation at https://huggingface.co./docs/transformers/main/en/chat_templating

Sign up or log in to comment