| { | |
| "_commit_hash": null, | |
| "_name_or_path": "./checkpoints/heron_git_v1/checkpoint-4800/", | |
| "architectures": [ | |
| "GitJapaneseStableLMAlphaForCausalLM" | |
| ], | |
| "auto_map": { | |
| "AutoConfig": "stabilityai/japanese-stablelm-base-alpha-7b--configuration_japanese_stablelm_alpha.JapaneseStableLMAlphaConfig", | |
| "AutoModelForCausalLM": "stabilityai/japanese-stablelm-base-alpha-7b--modeling_japanese_stablelm_alpha.JapaneseStableLMAlphaForCausalLM" | |
| }, | |
| "bos_token_id": 3, | |
| "classifier_dropout": 0.1, | |
| "eos_token_id": 3, | |
| "hidden_act": "silu", | |
| "hidden_size": 4096, | |
| "initializer_range": 0.02, | |
| "layer_norm_eps": 1e-05, | |
| "max_position_embeddings": 2048, | |
| "model_type": "git_japanese_stablelm_alpha", | |
| "num_attention_heads": 32, | |
| "num_hidden_layers": 32, | |
| "num_image_with_embedding": null, | |
| "rotary_emb_base": 10000, | |
| "rotary_pct": 0.25, | |
| "rotary_scale_base": 512, | |
| "tie_word_embeddings": false, | |
| "torch_dtype": "float16", | |
| "transformers_version": "4.30.2", | |
| "use_bias_in_mlp": false, | |
| "use_cache": true, | |
| "use_parallel_residual": true, | |
| "vision_config": { | |
| "dropout": 0.0, | |
| "hidden_size": 1024, | |
| "image_size": 336, | |
| "intermediate_size": 4096, | |
| "model_type": "clip_vision_model", | |
| "num_attention_heads": 16, | |
| "num_hidden_layers": 24, | |
| "patch_size": 14, | |
| "projection_dim": 768 | |
| }, | |
| "vision_model_name": "openai/clip-vit-large-patch14-336", | |
| "vocab_size": 65536 | |
| } | |