| { | |
| "architectures": [ | |
| "MistralForCausalLM" | |
| ], | |
| "attention_dropout": 0.0, | |
| "bos_token_id": 1, | |
| "eos_token_id": 2, | |
| "head_dim": 128, | |
| "hidden_act": "silu", | |
| "hidden_size": 5120, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 25600, | |
| "max_position_embeddings": 32768, | |
| "model_type": "mistral", | |
| "num_attention_heads": 32, | |
| "num_hidden_layers": 32, | |
| "num_key_value_heads": 8, | |
| "pad_token_id": 11, | |
| "rms_norm_eps": 1e-05, | |
| "rope_theta": 100000000.0, | |
| "sliding_window": null, | |
| "sparsity": [ | |
| { | |
| "layer_idx": 0, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.0.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 1, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.1.self_attn.v_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 1, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.1.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 2, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.2.self_attn.v_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 2, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.2.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 3, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.3.self_attn.v_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 4, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.4.self_attn.q_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 4, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.4.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 5, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.5.self_attn.k_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 5, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.5.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 7, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.7.self_attn.k_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 7, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.7.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 8, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.8.self_attn.k_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 9, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.9.self_attn.k_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 12, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.12.self_attn.q_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 15, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.15.self_attn.q_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 15, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.15.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 16, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.16.self_attn.q_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 17, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.17.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 18, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.18.self_attn.q_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 19, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.19.self_attn.q_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 19, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.19.self_attn.v_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 20, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.20.self_attn.q_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 20, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.20.self_attn.v_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 24, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.24.self_attn.k_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 24, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.24.self_attn.v_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 24, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.24.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 25, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.25.self_attn.q_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 25, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.25.self_attn.k_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 25, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.25.self_attn.v_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 25, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.25.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 26, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.26.self_attn.q_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 26, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.26.self_attn.v_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 27, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.27.self_attn.v_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 27, | |
| "layer_type": "mlp", | |
| "param_name": "model.layers.27.mlp.down_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 131072000, | |
| "zeroed": 65536000 | |
| }, | |
| { | |
| "layer_idx": 28, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.28.self_attn.q_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 30, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.30.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 31, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.31.self_attn.q_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| }, | |
| { | |
| "layer_idx": 31, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.31.self_attn.v_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 5242880, | |
| "zeroed": 2621440 | |
| }, | |
| { | |
| "layer_idx": 31, | |
| "layer_type": "self_attn", | |
| "param_name": "model.layers.31.self_attn.o_proj.weight", | |
| "sparsity": 50.0, | |
| "total": 20971520, | |
| "zeroed": 10485760 | |
| } | |
| ], | |
| "tie_word_embeddings": false, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.50.1", | |
| "unsloth_fixed": true, | |
| "use_cache": true, | |
| "vocab_size": 131072 | |
| } | |