Upload folder using huggingface_hub (#3)
Browse files- 9f60efa65c8a25943e7fec7604239136c60fb3bd364778272e2ddb71cf459775 (5ac27bf0382d97ff66ed2a78a0189958c8548281)
- config.json +7 -1
- smash_config.json +1 -1
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "/covalent/.cache/models/
|
3 |
"architectures": [
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
@@ -51,6 +51,12 @@
|
|
51 |
"rope_theta": 500000.0,
|
52 |
"tie_word_embeddings": true,
|
53 |
"torch_dtype": "float16",
|
|
|
|
|
|
|
|
|
|
|
|
|
54 |
"transformers_version": "4.46.2",
|
55 |
"use_cache": true,
|
56 |
"vocab_size": 128256,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "/covalent/.cache/models/tmpi486xracp_oaqqyd",
|
3 |
"architectures": [
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
|
|
51 |
"rope_theta": 500000.0,
|
52 |
"tie_word_embeddings": true,
|
53 |
"torch_dtype": "float16",
|
54 |
+
"transformers.js_config": {
|
55 |
+
"kv_cache_dtype": {
|
56 |
+
"fp16": "float16",
|
57 |
+
"q4f16": "float16"
|
58 |
+
}
|
59 |
+
},
|
60 |
"transformers_version": "4.46.2",
|
61 |
"use_cache": true,
|
62 |
"vocab_size": 128256,
|
smash_config.json
CHANGED
@@ -28,7 +28,7 @@
|
|
28 |
"quant_llm-int8_weight_bits": 8,
|
29 |
"max_batch_size": 1,
|
30 |
"device": "cuda",
|
31 |
-
"cache_dir": "/covalent/.cache/models/
|
32 |
"task": "",
|
33 |
"save_load_fn": "bitsandbytes",
|
34 |
"save_load_fn_args": {}
|
|
|
28 |
"quant_llm-int8_weight_bits": 8,
|
29 |
"max_batch_size": 1,
|
30 |
"device": "cuda",
|
31 |
+
"cache_dir": "/covalent/.cache/models/tmpi486xrac",
|
32 |
"task": "",
|
33 |
"save_load_fn": "bitsandbytes",
|
34 |
"save_load_fn_args": {}
|