i4never commited on
Commit
2a44b88
1 Parent(s): f270465

Upload tokenizer

Browse files
added_tokens.json CHANGED
@@ -1,8 +1,5 @@
1
  {
2
  "\n\n### Instruction:\n": 65107,
3
  "\n\n### Response:\n": 65108,
4
- "</s>": 2,
5
- "<pad>": 65109,
6
- "<s>": 1,
7
- "<unk>": 0
8
  }
 
1
  {
2
  "\n\n### Instruction:\n": 65107,
3
  "\n\n### Response:\n": 65108,
4
+ "<pad>": 65109
 
 
 
5
  }
special_tokens_map.json CHANGED
@@ -3,8 +3,32 @@
3
  "\n\n### Instruction:\n",
4
  "\n\n### Response:\n"
5
  ],
6
- "bos_token": "<s>",
7
- "eos_token": "</s>",
8
- "pad_token": "<pad>",
9
- "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  }
 
3
  "\n\n### Instruction:\n",
4
  "\n\n### Response:\n"
5
  ],
6
+ "bos_token": {
7
+ "content": "<s>",
8
+ "lstrip": false,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "</s>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<pad>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<unk>",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
  }
tokenizer.json CHANGED
@@ -78,12 +78,6 @@
78
  "post_processor": {
79
  "type": "TemplateProcessing",
80
  "single": [
81
- {
82
- "SpecialToken": {
83
- "id": "<s>",
84
- "type_id": 0
85
- }
86
- },
87
  {
88
  "Sequence": {
89
  "id": "A",
@@ -92,24 +86,12 @@
92
  }
93
  ],
94
  "pair": [
95
- {
96
- "SpecialToken": {
97
- "id": "<s>",
98
- "type_id": 0
99
- }
100
- },
101
  {
102
  "Sequence": {
103
  "id": "A",
104
  "type_id": 0
105
  }
106
  },
107
- {
108
- "SpecialToken": {
109
- "id": "<s>",
110
- "type_id": 1
111
- }
112
- },
113
  {
114
  "Sequence": {
115
  "id": "B",
@@ -117,17 +99,7 @@
117
  }
118
  }
119
  ],
120
- "special_tokens": {
121
- "<s>": {
122
- "id": "<s>",
123
- "ids": [
124
- 1
125
- ],
126
- "tokens": [
127
- "<s>"
128
- ]
129
- }
130
- }
131
  },
132
  "decoder": {
133
  "type": "Sequence",
 
78
  "post_processor": {
79
  "type": "TemplateProcessing",
80
  "single": [
 
 
 
 
 
 
81
  {
82
  "Sequence": {
83
  "id": "A",
 
86
  }
87
  ],
88
  "pair": [
 
 
 
 
 
 
89
  {
90
  "Sequence": {
91
  "id": "A",
92
  "type_id": 0
93
  }
94
  },
 
 
 
 
 
 
95
  {
96
  "Sequence": {
97
  "id": "B",
 
99
  }
100
  }
101
  ],
102
+ "special_tokens": {}
 
 
 
 
 
 
 
 
 
 
103
  },
104
  "decoder": {
105
  "type": "Sequence",
tokenizer_config.json CHANGED
@@ -1,6 +1,4 @@
1
  {
2
- "add_bos_token": false,
3
- "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
@@ -56,6 +54,7 @@
56
  "\n\n### Response:\n"
57
  ],
58
  "bos_token": "<s>",
 
59
  "clean_up_tokenization_spaces": false,
60
  "eos_token": "</s>",
61
  "legacy": true,
@@ -64,5 +63,5 @@
64
  "sp_model_kwargs": {},
65
  "tokenizer_class": "LlamaTokenizer",
66
  "unk_token": "<unk>",
67
- "use_default_system_prompt": true
68
  }
 
1
  {
 
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<unk>",
 
54
  "\n\n### Response:\n"
55
  ],
56
  "bos_token": "<s>",
57
+ "chat_template": "{% for message in messages %}{% if message['role'] == 'user' %}{{ '\n\n### Instruction:\n' }}{% elif message['role'] == 'assistant' %}{{ '\n\n### Response:\n' }}{% endif %}{{ message['content'] }}{{ eos_token }}{% endfor %}",
58
  "clean_up_tokenization_spaces": false,
59
  "eos_token": "</s>",
60
  "legacy": true,
 
63
  "sp_model_kwargs": {},
64
  "tokenizer_class": "LlamaTokenizer",
65
  "unk_token": "<unk>",
66
+ "use_default_system_prompt": false
67
  }