clmbr-t-base-random / config.json
lalaland1125's picture
Upload model
1a8f8bf
raw
history blame
229 Bytes
{
"architectures": [
"FEMRModel"
],
"task_config": null,
"torch_dtype": "float32",
"transformer_config": {
"model_type": "",
"n_layers": 12,
"vocab_size": 65536
},
"transformers_version": "4.35.2"
}