-
Notifications
You must be signed in to change notification settings - Fork 30
/
vicap_config.yaml
94 lines (84 loc) · 1.91 KB
/
vicap_config.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
exp:
seed: 42
name: vicap
rank: 0
ngpus_per_node: 2
world_size: 2
checkpoint: '/home/quang/Downloads/grit_checkpoint_4ds.pth'
eval: False
resume: False
dataset:
overfit: False
ann_root: '${oc.env:DATA_ROOT}/annotations'
img_root: '${oc.env:DATA_ROOT}'
hdf5_path: '${oc.env:DATA_ROOT}/all_splits.h5' # this is used for freezed extractor; fast to train.
vocab_path: '${oc.env:DATA_ROOT}/annotations/vocab.json'
use_gri_feat: ${model.use_gri_feat}
use_reg_feat: ${model.use_reg_feat}
transform_cfg:
size: [384, 640] # (h, w)
resize_name: maxwh # normal, minmax, maxwh; maxwh is computationally friendly for training
randaug: True
model:
use_gri_feat: True
use_reg_feat: True
grid_feat_dim: 1024
frozen_stages: 2
beam_size: 5
beam_len: 20
dropout: 0.2
attn_dropout: 0.2
vocab_size: 10201
max_len: 54
pad_idx: 1
bos_idx: 2
eos_idx: 3
d_model: 512
n_heads: 8
grid_net:
n_memories: 1
n_layers: 3
cap_generator:
decoder_name: parallel # sequential concat
n_layers: 3
detector:
checkpoint: ''
d_model: 512
dim_feedforward: 1024
num_heads: 8
num_layers: 6
num_levels: 4
num_points: 4
num_queries: 150
num_classes: 1849
dropout: 0.1
activation: 'relu'
return_intermediate: True
with_box_refine: True
optimizer:
warmup_init_lr: 1e-5
min_lr: 1e-4
xe_lr: 1e-4 # if xe_lr == min_lr, then CosineAnneal doesn't affect anymore.
sc_lr: 5e-6
xe_backbone_lr: 1e-5
sc_backbone_lr: 5e-6
weight_decay: 0.01
beta_1: 0.9
beta_2: 0.99
batch_size: 16
num_workers: 2
freezing_xe_epochs: 0
freezing_sc_epochs: 0
finetune_xe_epochs: 10
finetune_sc_epochs: 10
freeze_detector: False
freeze_backbone: False
hydra:
run:
dir: outputs/${exp.name}
output_subdir: ./ # directory for saving the yaml configs
job:
config:
override_dirname:
exclude_keys:
- exp.name