matthieulel's picture
Training in progress, epoch 0
4963f2d verified
raw
history blame contribute delete
No virus
1.8 kB
{
"_name_or_path": "facebook/dinov2-base-imagenet1k-1-layer",
"apply_layernorm": true,
"architectures": [
"Dinov2ForImageClassification"
],
"attention_probs_dropout_prob": 0.0,
"drop_path_rate": 0.0,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 768,
"id2label": {
"0": "Disturbed Galaxies",
"1": "Merging Galaxies",
"2": "Round Smooth Galaxies",
"3": "In-between Round Smooth Galaxies",
"4": "Cigar Shaped Smooth Galaxies",
"5": "Barred Spiral Galaxies",
"6": "Unbarred Tight Spiral Galaxies",
"7": "Unbarred Loose Spiral Galaxies",
"8": "Edge-on Galaxies without Bulge",
"9": "Edge-on Galaxies with Bulge"
},
"image_size": 518,
"initializer_range": 0.02,
"label2id": {
"Barred Spiral Galaxies": 5,
"Cigar Shaped Smooth Galaxies": 4,
"Disturbed Galaxies": 0,
"Edge-on Galaxies with Bulge": 9,
"Edge-on Galaxies without Bulge": 8,
"In-between Round Smooth Galaxies": 3,
"Merging Galaxies": 1,
"Round Smooth Galaxies": 2,
"Unbarred Loose Spiral Galaxies": 7,
"Unbarred Tight Spiral Galaxies": 6
},
"layer_norm_eps": 1e-06,
"layerscale_value": 1.0,
"mlp_ratio": 4,
"model_type": "dinov2",
"num_attention_heads": 12,
"num_channels": 3,
"num_hidden_layers": 12,
"out_features": [
"stage12"
],
"out_indices": [
12
],
"patch_size": 14,
"problem_type": "single_label_classification",
"qkv_bias": true,
"reshape_hidden_states": true,
"stage_names": [
"stem",
"stage1",
"stage2",
"stage3",
"stage4",
"stage5",
"stage6",
"stage7",
"stage8",
"stage9",
"stage10",
"stage11",
"stage12"
],
"torch_dtype": "float32",
"transformers_version": "4.37.2",
"use_swiglu_ffn": false
}