gpt-neox-da-small-hfcm / tokenizer_config.json
Prof. Peter Schneider-Kamp
fix path
c2d253a
raw
history blame contribute delete
769 Bytes
{
"add_bos_token": false,
"add_prefix_space": true,
"bos_token": {
"__type": "AddedToken",
"content": "<bos>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"eos_token": {
"__type": "AddedToken",
"content": "<eos>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"errors": "replace",
"model_max_length": 1000000000000000019884624838656,
"pad_token": null,
"special_tokens_map_file": "special_tokens_map.json",
"tokenizer_class": "GPT2Tokenizer",
"trim_offsets": true,
"unk_token": {
"__type": "AddedToken",
"content": "<unk>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
}
}