-
Notifications
You must be signed in to change notification settings - Fork 0
/
model.py
70 lines (48 loc) · 1.98 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import sys
import numpy as np
from sklearn.ensemble import RandomForestClassifier
#from lightgbm import LGBMClassifier
from tensorflow.keras.layers import Dense, Dropout, Input
from tensorflow.keras.models import Model
class TitanicModeling:
def __init__(self):
pass
def run_sklearn_modeling(self, X, y, n_estimator):
model = self._get_rf_model(n_estimator)
#lgbm_model = self._get_lgbm_model(n_estimator)
model.fit(X, y)
#lgbm_model.fit(X, y)
model_info = {
'score' : {
'model_score' : model.score(X, y)
},
'params' : model.get_params()
}
return model, model_info
def run_keras_modeling(self, X, y):
model = self._get_keras_model()
model.fit(X, y, epochs=20, batch_size=10)
#predictions = model.predict(X)
#print('keras prediction : ', predictions[:5])
model_info = {
'score' : {
'model_score' : np.float64( round(model.evaluate(X, y)[1], 2) )
},
'params' : {'epochs':20, 'batch_size':10}
}
return model, model_info
def _get_rf_model(self, n_estimator):
return RandomForestClassifier(n_estimators=n_estimator, max_depth=5)
#def _get_lgbm_model(self, n_estimator):
# return LGBMClassifier(n_estimators=n_estimator)
def _get_keras_model(self):
inp = Input(shape=(3, ), name='inp_layer')
dense_layer_1 = Dense(32, activation='relu', name="dense_1")
dense_layer_2 = Dense(16, activation='relu', name="dense_2")
predict_layer = Dense(1, activation = 'sigmoid', name='predict_layer')
dense_vector_1 = dense_layer_1(inp)
dense_vector_2 = dense_layer_2(dense_vector_1)
predict_vector = predict_layer(dense_vector_2)
model = Model(inputs=inp, outputs=predict_vector)
model.compile(loss = 'binary_crossentropy', optimizer='adam', metrics=['acc'])
return model