-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathfinetune_FastVimH_448.yaml
82 lines (72 loc) · 2.07 KB
/
finetune_FastVimH_448.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
accum_iter: 1
batch_size: 128 # make sure it is overall 1024
num_workers: 24
pretrained_checkpoint_path: '/path/to/ckpt' # provide path to pretrained checkpoint from MAE/SSL
num_nodes: 1
pl_seed: 0
img_size: 448
patch_size: 14 # for now only implemented when both stride and patch size is same
scanpath_type: rowwise # rowwise, colwise
use_norm_after_ssm: True
rotate_every_block: True
collapse_method: mean # mean, max
blr: 0.001
layer_decay: 0.75
training_epochs: 50
gradient_clip_val: 3.0
weight_decay: 0.05
drop_path_rate: 0.3
min_lr: 0.00001
warmup_epochs: 5
num_classes: 1000
scaling_factor: 0.25 # required for aligning pretraining and finetuning
# scaling_factor: 1
vim_config:
_target_: models.fastvim.vim_huge_patch16_224_final_pool_mean_abs_pos_embed_with_noclstok_div2
img_size: ${img_size}
pretrained: false
rotate_every_block: ${rotate_every_block}
use_norm_after_ssm: ${use_norm_after_ssm}
num_classes: ${num_classes}
drop_path_rate: ${drop_path_rate}
channels: 3
patch_size: ${patch_size}
stride: ${patch_size}
scanpath_type: ${scanpath_type}
collapse_method: ${collapse_method}
pretrained_checkpoint_path: ${pretrained_checkpoint_path}
scaling_factor: ${scaling_factor}
# Configuration for the dataloader
data_config:
_target_: mae.datasets_finetune.load_DataModule
batch_size: ${batch_size}
num_workers: ${num_workers}
#augmentation
img_size: ${img_size}
hflip: 0.5
vflip: 0.0
eval_crop_ratio: 0.875
color_jitter: 0.0 # only randaug in fine tuning according to MAE
auto_augment: 'rand-m9-mstd0.5-inc1'
interpolation: 'bicubic'
re_prob: 0.25
re_mode: 'pixel'
re_count: 1
model_config:
_target_: mae.finetune_imagenet.SupervisedModule
backbone: ${vim_config}
num_classes: ${num_classes}
label_smoothing: 0.1
mixup: 0.8
cutmix: 1.0
mixup_mode: 'batch'
mixup_prob: 1.0
mixup_switch_prob: 0.5
weight_decay: ${weight_decay}
blr: ${blr}
batch_size: ${batch_size}
warmup_epochs: ${warmup_epochs}
scheduling_epochs: ${training_epochs}
min_lr: ${min_lr}
layer_decay: ${layer_decay}
accum_iter: ${accum_iter}