Kalaphant commited on
Commit
827aa98
1 Parent(s): a45391c

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -14
config.json CHANGED
@@ -4,9 +4,6 @@
4
  "num_layers": 12,
5
  "hidden_size": 768,
6
  "vocab_size": 50257,
7
- "video_resolution": "1080p",
8
- "frame_rate": 30,
9
- "num_frames": 300,
10
  "num_attention_heads": 12,
11
  "intermediate_size": 3072,
12
  "hidden_act": "gelu",
@@ -14,17 +11,15 @@
14
  "layer_norm_eps": 1e-12,
15
  "dropout": 0.1,
16
  "attention_dropout": 0.1,
17
- "num_labels": 2,
18
- "use_cache": true,
19
  "bos_token_id": 50256,
20
  "eos_token_id": 50256,
21
  "pad_token_id": 0,
22
  "unk_token_id": 50257,
23
  "special_tokens_map": {
24
- "bos_token": "<bos_token>",
25
- "eos_token": "<eos_token>",
26
- "pad_token": "<pad_token>",
27
- "unk_token": "<unk_token>"
28
  },
29
  "tokenizer_class": "BertTokenizer",
30
  "additional_special_tokens": [
@@ -32,11 +27,7 @@
32
  "<special_token_2>"
33
  ],
34
  "task_specific_params": {
35
- "text-to-video": {
36
- "video_resolution": "1080p",
37
- "frame_rate": 30,
38
- "num_frames": 300
39
- }
40
  },
41
  "device_map": "auto"
42
  }
 
4
  "num_layers": 12,
5
  "hidden_size": 768,
6
  "vocab_size": 50257,
 
 
 
7
  "num_attention_heads": 12,
8
  "intermediate_size": 3072,
9
  "hidden_act": "gelu",
 
11
  "layer_norm_eps": 1e-12,
12
  "dropout": 0.1,
13
  "attention_dropout": 0.1,
 
 
14
  "bos_token_id": 50256,
15
  "eos_token_id": 50256,
16
  "pad_token_id": 0,
17
  "unk_token_id": 50257,
18
  "special_tokens_map": {
19
+ "bos_token": "<bos>",
20
+ "eos_token": "<eos>",
21
+ "pad_token": "<pad>",
22
+ "unk_token": "<unk>"
23
  },
24
  "tokenizer_class": "BertTokenizer",
25
  "additional_special_tokens": [
 
27
  "<special_token_2>"
28
  ],
29
  "task_specific_params": {
30
+ "text-to-text": {}
 
 
 
 
31
  },
32
  "device_map": "auto"
33
  }