openvla-7b-grounded / config.json
shrg7's picture
Upload OpenVLAForActionPrediction
1f6c912 verified
{
"arch_specifier": "no-align+fused-gelu-mlp",
"architectures": [
"OpenVLAForActionPrediction"
],
"auto_map": {
"AutoConfig": "configuration_prismatic.OpenVLAConfig",
"AutoModelForVision2Seq": "modeling_prismatic.OpenVLAForActionPrediction"
},
"hf_llm_id": "meta-llama/Llama-2-7b-hf",
"image_resize_strategy": "resize-naive",
"image_sizes": [
224,
224
],
"llm_backbone_id": "llama2-7b-pure",
"llm_max_length": 2048,
"model_type": "openvla",
"n_action_bins": 256,
"norm_stats": {
"bridge_dataset": {
"action": {
"mask": [
true,
true,
true,
true,
true,
true,
false
],
"max": [
0.41691166162490845,
0.25864794850349426,
0.21218234300613403,
3.122201919555664,
1.8618112802505493,
6.280478477478027,
1.0
],
"mean": [
0.00023341824999079108,
0.0001300419680774212,
-0.0001276263501495123,
-0.0001556589122628793,
-0.00040393511881120503,
0.0002355832839384675,
0.5764582753181458
],
"min": [
-0.4007510244846344,
-0.13874775171279907,
-0.22553899884223938,
-3.2010786533355713,
-1.8618112802505493,
-6.279075622558594,
0.0
],
"q01": [
-0.02872725307941437,
-0.04170349963009357,
-0.026093858778476715,
-0.08092105075716972,
-0.09288699507713317,
-0.20718276381492615,
0.0
],
"q99": [
0.028309678435325586,
0.040855254605412394,
0.040161586627364146,
0.08192047759890528,
0.07792850524187081,
0.20382574498653397,
1.0
],
"std": [
0.009765730239450932,
0.013689522631466389,
0.012667152099311352,
0.028534481301903725,
0.030637893825769424,
0.07691768556833267,
0.49736595153808594
]
},
"num_trajectories": 60064,
"num_transitions": 2135463,
"proprio": {
"max": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"mean": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"min": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"q01": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"q99": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"std": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
]
}
}
},
"output_projector_states": false,
"pad_to_multiple_of": 64,
"pad_token_id": 32000,
"text_config": {
"model_type": "llama",
"pad_token_id": 32000,
"torch_dtype": "bfloat16",
"vocab_size": 32064
},
"timm_model_ids": [
"vit_large_patch14_reg4_dinov2.lvd142m",
"vit_so400m_patch14_siglip_224"
],
"timm_override_act_layers": [
null,
null
],
"torch_dtype": "bfloat16",
"transformers_version": "4.40.1",
"use_fused_vision_backbone": true,
"vision_backbone_id": "dinosiglip-vit-so-224px"
}