{ "hidden_size": 1024, "intermediate_size": 3072, "max_window_layers": 28, "num_attention_heads": 16, "num_key_value_heads": 8, "num_hidden_layers": 28, "rope_theta": 1000000, "vocab_size": 215669, "lm_vocab_size": 151669, "lm_head_size": 64000, "bov_token_id": 151652 }