| { | |
| "architectures": [ | |
| "FlamingoModel" | |
| ], | |
| "clip_model_type": "openai/clip-vit-large-patch14", | |
| "dim": 768, | |
| "dim_visual": 1024, | |
| "lm": "facebook/opt-125m", | |
| "resampler_act": "sqrelu", | |
| "resampler_depth": 6, | |
| "resampler_dim_head": 64, | |
| "resampler_ff_mult": 4, | |
| "resampler_heads": 8, | |
| "resampler_num_latents": 64, | |
| "resampler_num_time_embeds": 4, | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.21.0", | |
| "xattn_act": "sqrelu", | |
| "xattn_dim_head": 64, | |
| "xattn_every": 1, | |
| "xattn_ff_mult": 4, | |
| "xattn_heads": 8 | |
| } | |