forked from ljhgabe/BERT-ParsCit
-
Notifications
You must be signed in to change notification settings - Fork 0
/
cora.yaml
42 lines (33 loc) · 945 Bytes
/
cora.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
# @package _global_
# to execute this experiment run:
# python train.py experiment=example
defaults:
- override /datamodule: cora.yaml
- override /model: cora.yaml
- override /callbacks: default.yaml
- override /logger: null
- override /trainer: default.yaml
# all parameters below will be merged with parameters from default configurations set above
# this allows you to overwrite only specified parameters
# name of the run determines folder name in logs
name: "bert_token_classifier"
seed: 777
trainer:
min_epochs: 5
max_epochs: 15
gradient_clip_val: 0.5
module:
lr: 2e-5
model:
model_checkpoint: "/ssd1/jiahe/pretrained/scibert-synthetic-uncased-50k"
output_size: 13
cache_dir: "/ssd1/jiahe/parscit/"
datamodule:
data_repo: "myvision/cora-dataset-final"
train_batch_size: 8
num_workers: 0
pin_memory: False
data_cache_dir: "/ssd1/jiahe/cora/"
logger:
wandb:
tags: ["cora", "${name}"]