{ "add_bos_token": true, "add_eos_token": false, "add_prefix_space": null, "added_tokens_decoder": { "0": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "1": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "2": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "102": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "103": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "104": { "content": "<|start_header_id|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "105": { "content": "<|end_header_id|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "106": { "content": "<|eot_id|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true } }, "bos_token": "", "chat_template": "{{- bos_token }}\n\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"You are YuLan-Mini, created by RUC AI Box. You are a helpful assistant.\" %}\n{%- endif %}\n\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{%- for message in messages %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n{%- endfor %}\n\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}", "clean_up_tokenization_spaces": false, "eos_token": "", "extra_special_tokens": {}, "legacy": true, "model_max_length": 1000000000000000019884624838656, "pad_token": "", "padding_side": "right", "sp_model_kwargs": {}, "spaces_between_special_tokens": false, "split_special_tokens": false, "tokenizer_class": "LlamaTokenizer", "unk_token": "", "use_default_system_prompt": false }