Upload config.json with huggingface_hub
Browse files- config.json +1 -10
config.json
CHANGED
|
@@ -35,15 +35,6 @@
|
|
| 35 |
"q_lora_rank": 1536,
|
| 36 |
"qk_nope_head_dim": 128,
|
| 37 |
"qk_rope_head_dim": 64,
|
| 38 |
-
"quantization_config": {
|
| 39 |
-
"activation_scheme": "dynamic",
|
| 40 |
-
"fmt": "e4m3",
|
| 41 |
-
"quant_method": "fp8",
|
| 42 |
-
"weight_block_size": [
|
| 43 |
-
128,
|
| 44 |
-
128
|
| 45 |
-
]
|
| 46 |
-
},
|
| 47 |
"rms_norm_eps": 1e-06,
|
| 48 |
"rope_theta": 50000.0,
|
| 49 |
"routed_scaling_factor": 2.827,
|
|
@@ -66,4 +57,4 @@
|
|
| 66 |
"use_cache": true,
|
| 67 |
"v_head_dim": 128,
|
| 68 |
"vocab_size": 163840
|
| 69 |
-
}
|
|
|
|
| 35 |
"q_lora_rank": 1536,
|
| 36 |
"qk_nope_head_dim": 128,
|
| 37 |
"qk_rope_head_dim": 64,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 38 |
"rms_norm_eps": 1e-06,
|
| 39 |
"rope_theta": 50000.0,
|
| 40 |
"routed_scaling_factor": 2.827,
|
|
|
|
| 57 |
"use_cache": true,
|
| 58 |
"v_head_dim": 128,
|
| 59 |
"vocab_size": 163840
|
| 60 |
+
}
|