{ "_name_or_path": "mPLUG/TinyChart-3B-768/siglip", "attention_dropout": 0.0, "attn_implementation": null, "hidden_act": "gelu_pytorch_tanh", "hidden_size": 1152, "image_mean": [ 0.5, 0.5, 0.5 ], "image_size": 768, "intermediate_size": 4304, "layer_norm_eps": 1e-06, "model_type": "siglip_vision_model", "num_attention_heads": 16, "num_channels": 3, "num_hidden_layers": 27, "patch_size": 14, "tome_r": 84, "transformers_version": "4.37.2" }