-
Notifications
You must be signed in to change notification settings - Fork 0
/
nary.base.yaml
68 lines (61 loc) · 1.83 KB
/
nary.base.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
# Training seed.
seed: 20221101
verbose: True
trainer:
# Early Stopping
early_stopping:
# Quantity to monitor
# choices = ("train_acc", "val_acc", "train_f1", "val_f1")
monitor: val_f1
# Number of times with no improvement after which training will be stopped
patience: 3
# if we want to min/max the monitored quantity.
# choices = ("auto", "min", "max")
metric_mode: max
verbose: ${verbose}
# Model Checkpoint
checkpoint:
# The best k models according to the quantity monitored will be saved.
filename: '{step:d}-{train_f1:.4f}-{val_f1:.4f}'
monitor: ${..early_stopping.monitor}
mode: ${..early_stopping.metric_mode}
# if ``save_top_k == k``,
# the best k models according to the quantity monitored will be saved.
# if ``save_top_k == 0``, no models are saved.
# if ``save_top_k == -1``, all models are saved.
save_top_k: 1
# every_n_train_steps: 100
verbose: ${verbose}
# Limits training to a minimum number of epochs
min_epochs: 1
# Limits training to a maximum number of epochs
max_epochs: 50
# If you do not want to use the entire dev set (for debugging or if it is huge)
# set how much of the dev set you want to use with this flag
val_check_interval: 0.05
gradient_clip_val: 5.0
model:
optimization:
lr: 1.0e-4
scheduler:
factor: 0.8
patience: 10
verbose: ${verbose}
min_lr: 1.0e-5
contrastive_learning:
enable: False
temperature: 0.05
lambda: 0.5
rdropout:
enable: False
lambda: 0.5
dropout: 0.3
max_length: ${data.max_seq_length}
n_labels: 5
model_name_or_path: D:\0pythonWS\0Latest\brc\biobert
data:
model_name_or_path: ${model.model_name_or_path}
data_dirpath: ./data/datasets/nary/processed/all/drug_gene_var/cv0
max_seq_length: 512
train_batch_size: 64
eval_batch_size: 64