forked from kh-kim/stock_market_reinforcement_learning
-
Notifications
You must be signed in to change notification settings - Fork 0
/
market_model_builder.py
114 lines (91 loc) · 3.25 KB
/
market_model_builder.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
from deeplearning_assistant.model_builder import AbstractModelBuilder
class MarketPolicyGradientModelBuilder(AbstractModelBuilder):
def buildModel(self):
from keras.models import Model
from keras.layers import merge, Convolution2D, MaxPooling2D, Input, Dense, Flatten, Dropout, Reshape, TimeDistributed, BatchNormalization, Merge, merge
from keras.layers.advanced_activations import LeakyReLU
B = Input(shape = (3,))
b = Dense(5, activation = "relu")(B)
inputs = [B]
merges = [b]
for i in xrange(1):
S = Input(shape=[2, 60, 1])
inputs.append(S)
h = Convolution2D(2048, 3, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Convolution2D(2048, 5, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Convolution2D(2048, 10, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Convolution2D(2048, 20, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Convolution2D(2048, 40, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Flatten()(h)
h = Dense(512)(h)
h = LeakyReLU(0.001)(h)
merges.append(h)
h = Convolution2D(2048, 60, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Flatten()(h)
h = Dense(512)(h)
h = LeakyReLU(0.001)(h)
merges.append(h)
m = merge(merges, mode = 'concat', concat_axis = 1)
m = Dense(1024)(m)
m = LeakyReLU(0.001)(m)
m = Dense(512)(m)
m = LeakyReLU(0.001)(m)
m = Dense(256)(m)
m = LeakyReLU(0.001)(m)
V = Dense(2, activation = 'softmax')(m)
model = Model(input = inputs, output = V)
return model
class MarketModelBuilder(AbstractModelBuilder):
def buildModel(self):
from keras.models import Model
from keras.layers import merge, Convolution2D, MaxPooling2D, Input, Dense, Flatten, Dropout, Reshape, TimeDistributed, BatchNormalization, Merge, merge
from keras.layers.advanced_activations import LeakyReLU
dr_rate = 0.0
B = Input(shape = (3,))
b = Dense(5, activation = "relu")(B)
inputs = [B]
merges = [b]
for i in xrange(1):
S = Input(shape=[2, 60, 1])
inputs.append(S)
h = Convolution2D(64, 3, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Convolution2D(128, 5, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Convolution2D(256, 10, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Convolution2D(512, 20, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Convolution2D(1024, 40, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Flatten()(h)
h = Dense(2048)(h)
h = LeakyReLU(0.001)(h)
h = Dropout(dr_rate)(h)
merges.append(h)
h = Convolution2D(2048, 60, 1, border_mode = 'valid')(S)
h = LeakyReLU(0.001)(h)
h = Flatten()(h)
h = Dense(4096)(h)
h = LeakyReLU(0.001)(h)
h = Dropout(dr_rate)(h)
merges.append(h)
m = merge(merges, mode = 'concat', concat_axis = 1)
m = Dense(1024)(m)
m = LeakyReLU(0.001)(m)
m = Dropout(dr_rate)(m)
m = Dense(512)(m)
m = LeakyReLU(0.001)(m)
m = Dropout(dr_rate)(m)
m = Dense(256)(m)
m = LeakyReLU(0.001)(m)
m = Dropout(dr_rate)(m)
V = Dense(2, activation = 'linear', init = 'zero')(m)
model = Model(input = inputs, output = V)
return model