forked from e-bug/volta
-
Notifications
You must be signed in to change notification settings - Fork 0
/
ctrl_vilbert_base.json
52 lines (52 loc) · 1.67 KB
/
ctrl_vilbert_base.json
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
{
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"max_position_embeddings": 512,
"num_attention_heads": 12,
"pooler_size": 1024,
"type_vocab_size": 2,
"vocab_size": 30522,
"bert_model": "bert-base-uncased",
"do_lower_case": true,
"num_locs": 5,
"add_global_imgfeat": "first",
"image_embeddings": "vilbert",
"v_attention_probs_dropout_prob": 0.1,
"v_hidden_act": "gelu",
"v_hidden_dropout_prob": 0.1,
"v_feature_size": 2048,
"visual_target_weights": {"0": 1.0},
"v_hidden_size": 768,
"v_initializer_range": 0.02,
"v_pooler_size": 1024,
"v_num_attention_heads": 12,
"v_intermediate_size": 3072,
"fusion_method": "mul",
"clf_hidden_size": 1536,
"tt_attn_sublayers": [0,2,4,6,8,10,14,18,22,26,30,34],
"tv_attn_sublayers": [12,16,20,24,28,32],
"vt_attn_sublayers": [12,16,20,24,28,32],
"vv_attn_sublayers": [14,18,22,26,30,34],
"t_ff_sublayers": [1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35],
"v_ff_sublayers": [13,15,17,19,21,23,25,27,29,31,33,35],
"shared_sublayers": [],
"single_ln_sublayers": [],
"sublayer2attn_hidden_size": {},
"sublayer2num_attention_heads": {},
"sublayer2intermediate_size": {},
"sublayer2v_attn_hidden_size": {},
"sublayer2v_num_attention_heads": {},
"sublayer2v_intermediate_size": {},
"bert_layer2attn_sublayer": {
"0": 0, "1": 2, "2": 4, "3": 6, "4": 8, "5": 10,
"6": 14, "7": 18, "8": 22, "9": 26, "10": 30, "11": 34
},
"bert_layer2ff_sublayer": {
"0": 1, "1": 3, "2": 5, "3": 7, "4": 9, "5": 11,
"6": 15, "7": 19, "8": 23, "9": 27, "10": 31, "11": 35
}
}