hqfang's picture
update to fp32 weights
2e8eebd
{
"adapter_config": {
"attention_dropout": 0.0,
"float32_attention": true,
"head_dim": 72,
"hidden_act": "silu",
"hidden_size": 1152,
"image_feature_dropout": 0.0,
"image_padding_embed": null,
"initializer_range": 0.02,
"intermediate_size": 18944,
"model_type": "",
"num_attention_heads": 16,
"num_key_value_heads": 16,
"residual_dropout": 0.0,
"text_hidden_size": 3584,
"vit_layers": [
-3,
-9
]
},
"architectures": [
"MolmoActForActionReasoning"
],
"auto_map": {
"AutoConfig": "configuration_molmoact.MolmoActConfig",
"AutoModelForImageTextToText": "modeling_molmoact.MolmoActForActionReasoning"
},
"image_patch_id": 152066,
"initializer_range": 0.02,
"llm_config": {
"additional_vocab_size": 128,
"attention_dropout": 0.0,
"embedding_dropout": 0.0,
"head_dim": 128,
"hidden_act": "silu",
"hidden_size": 3584,
"initializer_range": 0.02,
"intermediate_size": 18944,
"layer_norm_eps": 1e-06,
"max_position_embeddings": 4096,
"model_type": "molmoact_llm",
"norm_after": false,
"num_attention_heads": 28,
"num_hidden_layers": 28,
"num_key_value_heads": 4,
"qk_norm_type": "olmo",
"qkv_bias": true,
"residual_dropout": 0.0,
"rope_scaling": null,
"rope_theta": 1000000.0,
"use_cache": true,
"use_qk_norm": false,
"vocab_size": 152064
},
"model_type": "molmoact",
"n_action_bins": 256,
"norm_stats": {
"libero_spatial_no_noops_modified": {
"action": {
"max": [
0.9375,
0.9375,
0.9375,
0.1971428543329239,
0.33642858266830444,
0.375,
1.0
],
"mean": [
0.15312479436397552,
0.13707277178764343,
-0.15526802837848663,
-0.005176450591534376,
-0.01120874285697937,
-0.020194264128804207,
0.4578818082809448
],
"min": [
-0.9375,
-0.9375,
-0.9375,
-0.1875,
-0.3675000071525574,
-0.36000001430511475,
0.0
],
"q01": [
-0.7454732114076613,
-0.6616071462631226,
-0.9375,
-0.1071428582072258,
-0.20678570866584778,
-0.1842857152223587,
0.0
],
"q99": [
0.9375,
0.8758928775787354,
0.9321428537368774,
0.1039285734295845,
0.17678570747375488,
0.14571428298950195,
1.0
],
"std": [
0.41272708773612976,
0.34724321961402893,
0.50869220495224,
0.037266165018081665,
0.07244449853897095,
0.05762382969260216,
0.49827873706817627
]
},
"num_trajectories": 432,
"num_transitions": 52970,
"proprio": {
"max": [
0.1759040206670761,
0.3904820382595062,
1.3290715217590332,
3.4566118717193604,
1.2268599271774292,
1.0429412126541138,
0.0,
0.041053611785173416,
0.000775813648942858
],
"mean": [
-0.024462558329105377,
0.106529600918293,
1.0580483675003052,
3.0628468990325928,
-0.10464039444923401,
0.08307311683893204,
0.0,
0.01995457336306572,
-0.020162804052233696
],
"min": [
-0.3095473051071167,
-0.29250794649124146,
0.9095591306686401,
2.497488260269165,
-1.8006486892700195,
-0.7207611203193665,
0.0,
-0.0004703797458205372,
-0.041536275297403336
],
"q01": [
-0.2727657300233841,
-0.23721413239836692,
0.9160063165426254,
2.77949666261673,
-1.3187511622905732,
-0.41989982962608335,
0.0,
0.001503719249740243,
-0.03989770736545324
],
"q99": [
0.13529365032911292,
0.3629165390133857,
1.2862326657772063,
3.2829698753356933,
0.9332760351896285,
0.6325724506378171,
0.0,
0.039933966137468815,
-0.001671919699292631
],
"std": [
0.1101478561758995,
0.13784688711166382,
0.1044282391667366,
0.10451053828001022,
0.4112098217010498,
0.2176690548658371,
0.0,
0.017260896041989326,
0.0171116404235363
]
}
}
},
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.52.3",
"use_cache": true,
"vit_config": {
"attention_dropout": 0.0,
"float32_attention": true,
"head_dim": 72,
"hidden_act": "gelu_pytorch_tanh",
"hidden_size": 1152,
"image_default_input_size": [
378,
378
],
"image_num_pos": 729,
"image_patch_size": 14,
"initializer_range": 0.02,
"intermediate_size": 4304,
"layer_norm_eps": 1e-06,
"model_type": "molmoact_vit",
"num_attention_heads": 16,
"num_hidden_layers": 27,
"num_key_value_heads": 16,
"patch_bias": true,
"pre_layernorm": false,
"residual_dropout": 0.0,
"use_cls_token": false
}
}