-
Notifications
You must be signed in to change notification settings - Fork 6
/
train.py
52 lines (39 loc) · 1.09 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import os
import random
import time
import numpy as np
import ray
import torch
from src.config import config
from src.worker import Actor, GlobalBuffer, Learner
os.environ["OMP_NUM_THREADS"] = "1"
torch.manual_seed(0)
np.random.seed(0)
random.seed(0)
def main(num_actors=config.num_actors, log_interval=config.log_interval):
ray.init()
buffer = GlobalBuffer.remote()
learner = Learner.remote(buffer)
time.sleep(1)
actors = [
Actor.remote(i, 0.4 ** (1 + (i / (num_actors - 1)) * 7), learner, buffer)
for i in range(num_actors)
]
for actor in actors:
actor.run.remote()
while not ray.get(buffer.ready.remote()):
time.sleep(5)
ray.get(learner.stats.remote(5))
ray.get(buffer.stats.remote(5))
print("start training")
buffer.run.remote()
learner.run.remote()
done = False
while not done:
time.sleep(log_interval)
done = ray.get(learner.stats.remote(log_interval))
ray.get(buffer.stats.remote(log_interval))
print()
ray.shutdown()
if __name__ == "__main__":
main()