-
Notifications
You must be signed in to change notification settings - Fork 15
/
market_model_builder.py
114 lines (92 loc) · 3.5 KB
/
market_model_builder.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
from keras.models import Model
from keras.layers import concatenate, Conv2D, MaxPooling2D, Input, Dense, Flatten, Dropout, Reshape, TimeDistributed, BatchNormalization, Merge, merge
from keras.layers.advanced_activations import LeakyReLU
from keras import backend as K
K.set_image_dim_ordering('th')
from model_builder import AbstractModelBuilder
class MarketPolicyGradientModelBuilder(AbstractModelBuilder):
def buildModel(self):
B = Input(shape=(3,))
b = Dense(5, activation="relu")(B)
inputs = [B]
merges = [b]
for i in range(1):
S = Input(shape=(2, 60, 1)) #[2, 60, 1]
inputs.append(S)
h = Conv2D(2048, (3, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Conv2D(2048, (5, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Conv2D(2048, (10, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Conv2D(2048, (20, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Conv2D(2048, (40, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Flatten()(h)
h = Dense(512)(h)
h = LeakyReLU(0.001)(h)
merges.append(h)
h = Conv2D(2048, (60, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Flatten()(h)
h = Dense(512)(h)
h = LeakyReLU(0.001)(h)
merges.append(h)
#m = merge(merges, mode="concat", concat_axis=1)
m = concatenate(merges, axis=1)
m = Dense(1024)(m)
m = LeakyReLU(0.001)(m)
m = Dense(512)(m)
m = LeakyReLU(0.001)(m)
m = Dense(256)(m)
m = LeakyReLU(0.001)(m)
V = Dense(2, activation="softmax")(m)
model = Model(inputs=inputs, outputs=V)
return model
class MarketModelBuilder(AbstractModelBuilder):
def buildModel(self):
dr_rate = 0.0
B = Input(shape=(3,))
b = Dense(5, activation="relu")(B)
inputs = [B]
merges = [b]
for i in range(1):
S = Input(shape=(2, 60, 1)) #[2, 60, 1]
inputs.append(S)
h = Conv2D(64, (3, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Conv2D(128, (5, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Conv2D(256, (10, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Conv2D(512, (20, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Conv2D(1024, (40, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Flatten()(h)
h = Dense(2048)(h)
h = LeakyReLU(0.001)(h)
h = Dropout(dr_rate)(h)
merges.append(h)
h = Conv2D(2048, (60, 1), padding="valid")(S)
h = LeakyReLU(0.001)(h)
h = Flatten()(h)
h = Dense(4096)(h)
h = LeakyReLU(0.001)(h)
h = Dropout(dr_rate)(h)
merges.append(h)
#m = merge(merges, mode="concat", concat_axis=1)
m = concatenate(merges, axis=1)
m = Dense(1024)(m)
m = LeakyReLU(0.001)(m)
m = Dropout(dr_rate)(m)
m = Dense(512)(m)
m = LeakyReLU(0.001)(m)
m = Dropout(dr_rate)(m)
m = Dense(256)(m)
m = LeakyReLU(0.001)(m)
m = Dropout(dr_rate)(m)
V = Dense(2, activation="linear", kernel_initializer="zero")(m)
model = Model(inputs=inputs, outputs=V)
return model