-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathxformer.py
46 lines (34 loc) · 1.1 KB
/
xformer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import torch.multiprocessing as mp
from torch.nn.parallel import DistributedDataParallel as DDP
from utils.Manager import Manager
from models.XFormer import XFormer
def main(rank, manager):
""" train/dev/test/tune the model (in distributed)
Args:
rank: current process id
world_size: total gpus
"""
manager.setup(rank)
loaders = manager.prepare()
model = XFormer(manager).to(rank)
if manager.world_size > 1:
model = DDP(model, device_ids=[rank], output_device=rank, find_unused_parameters=False)
if manager.mode == 'dev':
manager.evaluate(model, loaders, load=True)
elif manager.mode == 'train':
manager.train(model, loaders)
elif manager.mode == 'test':
manager.test(model, loaders)
elif manager.mode == 'encode':
manager.encode(model, loaders)
if __name__ == "__main__":
manager = Manager()
if manager.world_size > 1:
mp.spawn(
main,
args=(manager,),
nprocs=manager.world_size,
join=True
)
else:
main(manager.device, manager)