Fixed some numbers.
Browse files- config.json +3 -3
- model.safetensors +2 -2
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
"DeepseekV3ForCausalLM"
|
5 |
],
|
@@ -18,11 +18,11 @@
|
|
18 |
"hidden_act": "silu",
|
19 |
"hidden_size": 1024,
|
20 |
"initializer_range": 0.02,
|
21 |
-
"intermediate_size":
|
22 |
"kv_lora_rank": 512,
|
23 |
"max_position_embeddings": 163840,
|
24 |
"model_type": "deepseek_v3",
|
25 |
-
"moe_intermediate_size":
|
26 |
"moe_layer_freq": 1,
|
27 |
"n_group": 8,
|
28 |
"n_routed_experts": 32,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "DeepSeek-V3-1B-Test",
|
3 |
"architectures": [
|
4 |
"DeepseekV3ForCausalLM"
|
5 |
],
|
|
|
18 |
"hidden_act": "silu",
|
19 |
"hidden_size": 1024,
|
20 |
"initializer_range": 0.02,
|
21 |
+
"intermediate_size": 5376,
|
22 |
"kv_lora_rank": 512,
|
23 |
"max_position_embeddings": 163840,
|
24 |
"model_type": "deepseek_v3",
|
25 |
+
"moe_intermediate_size": 640,
|
26 |
"moe_layer_freq": 1,
|
27 |
"n_group": 8,
|
28 |
"n_routed_experts": 32,
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a9db5309afa023828200507bbe04d1aaff8667510b47b24ac999f340876da1ee
|
3 |
+
size 2099235336
|