moojink's picture
Modify unnorm_key in config.json, dataset_statistics.json
15b8b70
raw
history blame contribute delete
No virus
3.55 kB
{
"_name_or_path": "openvla/openvla-7b",
"arch_specifier": "no-align+fused-gelu-mlp",
"architectures": [
"OpenVLAForActionPrediction"
],
"auto_map": {
"AutoConfig": "openvla/openvla-7b--configuration_prismatic.OpenVLAConfig",
"AutoModelForVision2Seq": "openvla/openvla-7b--modeling_prismatic.OpenVLAForActionPrediction"
},
"hf_llm_id": "meta-llama/Llama-2-7b-hf",
"image_resize_strategy": "resize-naive",
"image_sizes": [
224,
224
],
"llm_backbone_id": "llama2-7b-pure",
"llm_max_length": 2048,
"model_type": "openvla",
"n_action_bins": 256,
"norm_stats": {
"libero_object": {
"action": {
"mean": [
0.07096529006958008,
0.13498851656913757,
-0.04601382836699486,
0.00123520044144243,
0.006998839322477579,
-0.015027612447738647,
0.46428999304771423
],
"std": [
0.2681235373020172,
0.43846824765205383,
0.4474974274635315,
0.024446550756692886,
0.049355510622262955,
0.042107198387384415,
0.49879148602485657
],
"max": [
0.9375,
0.8919642567634583,
0.9375,
0.17678570747375488,
0.35035714507102966,
0.1810714304447174,
1.0
],
"min": [
-0.8839285969734192,
-0.9375,
-0.9375,
-0.15000000596046448,
-0.29035714268684387,
-0.32892856001853943,
0.0
],
"q01": [
-0.5383928418159485,
-0.8758928775787354,
-0.9375,
-0.06964285671710968,
-0.11678571254014969,
-0.15964286029338837,
0.0
],
"q99": [
0.8464285731315613,
0.84375,
0.9375,
0.08142857253551483,
0.14892856776714325,
0.0867857113480568,
1.0
],
"mask": [
true,
true,
true,
true,
true,
true,
false
]
},
"proprio": {
"mean": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"std": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"max": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"min": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"q01": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"q99": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
]
},
"num_transitions": 66984,
"num_trajectories": 454
}
},
"output_projector_states": false,
"pad_to_multiple_of": 64,
"pad_token_id": 32000,
"text_config": {
"model_type": "llama",
"pad_token_id": 32000,
"torch_dtype": "bfloat16",
"vocab_size": 32064
},
"timm_model_ids": [
"vit_large_patch14_reg4_dinov2.lvd142m",
"vit_so400m_patch14_siglip_224"
],
"timm_override_act_layers": [
null,
null
],
"torch_dtype": "bfloat16",
"transformers_version": "4.40.1",
"use_fused_vision_backbone": true,
"vision_backbone_id": "dinosiglip-vit-so-224px"
}