-
Notifications
You must be signed in to change notification settings - Fork 1
/
twotower.py
73 lines (59 loc) · 2.26 KB
/
twotower.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import torch.multiprocessing as mp
from torch.nn.parallel import DistributedDataParallel as DDP
from utils.Manager import Manager
from models.TwoTower import TwoTower
def main(rank, manager):
""" train/dev/test/tune the model (in distributed)
Args:
rank: current process id
world_size: total gpus
"""
manager.setup(rank)
loaders = manager.prepare()
from models.Embeddings.BERT import BERT_Embedding
embedding = BERT_Embedding(manager)
if manager.encoderN == 'cnn':
from models.Encoders.CNN import CNN_Encoder
encoderN = CNN_Encoder(manager)
elif manager.encoderN == 'transformer':
from models.Encoders.Transformer import Transformer_Encoder
manager.hidden_dim = 768
encoderN = Transformer_Encoder(manager)
elif manager.encoderN == 'mha':
from models.Encoders.MHA import MHA_Encoder
encoderN = MHA_Encoder(manager)
if manager.encoderU in ['lstm', 'gru']:
from models.Encoders.RNN import RNN_User_Encoder
encoderU = RNN_User_Encoder(manager)
elif manager.encoderU == 'avg':
from models.Encoders.Pooling import Average_Pooling
encoderU = Average_Pooling(manager)
elif manager.encoderU == 'attn':
from models.Encoders.Pooling import Attention_Pooling
encoderU = Attention_Pooling(manager)
elif manager.encoderU == 'mha':
from models.Encoders.MHA import MHA_User_Encoder
encoderU = MHA_User_Encoder(manager)
elif manager.encoderU == 'lstur':
from models.Encoders.RNN import LSTUR
encoderU = LSTUR(manager)
model = TwoTower(manager, embedding, encoderN, encoderU).to(rank)
if manager.world_size > 1:
model = DDP(model, device_ids=[rank], output_device=rank, find_unused_parameters=False)
if manager.mode == 'dev':
manager.evaluate(model, loaders, load=True)
elif manager.mode == 'train':
manager.train(model, loaders)
elif manager.mode == 'test':
manager.test(model, loaders)
if __name__ == "__main__":
manager = Manager()
if manager.world_size > 1:
mp.spawn(
main,
args=(manager,),
nprocs=manager.world_size,
join=True
)
else:
main(manager.device, manager)