bwarner commited on
Commit
5756c58
1 Parent(s): 1e8d430

Bump `max_position_embeddings` to 8192

Browse files

also harmonize `layer_norm_eps` with `norm_eps` although the former isn't used

Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -23,10 +23,10 @@
23
  "initializer_cutoff_factor": 2.0,
24
  "initializer_range": 0.02,
25
  "intermediate_size": 1152,
26
- "layer_norm_eps": 1e-12,
27
  "local_attention": 128,
28
  "local_rope_theta": 10000.0,
29
- "max_position_embeddings": 512,
30
  "mlp_bias": false,
31
  "mlp_dropout": 0.0,
32
  "model_type": "modernbert",
 
23
  "initializer_cutoff_factor": 2.0,
24
  "initializer_range": 0.02,
25
  "intermediate_size": 1152,
26
+ "layer_norm_eps": 1e-05,
27
  "local_attention": 128,
28
  "local_rope_theta": 10000.0,
29
+ "max_position_embeddings": 8192,
30
  "mlp_bias": false,
31
  "mlp_dropout": 0.0,
32
  "model_type": "modernbert",