-
Notifications
You must be signed in to change notification settings - Fork 3
/
NeuralNetGamma.py
40 lines (26 loc) · 1.34 KB
/
NeuralNetGamma.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
from NeuralNet import NeuralNet
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten, BatchNormalization
from keras.layers import Conv2D, MaxPooling2D
from keras import backend
class NeuralNetGamma(NeuralNet):
def beginTraining(self):
self.setTrainingParameters(100000, 5000, 16, 10)
def defineModel(self, inputShape : tuple, outputSize : int):
model = Sequential()
model.add(Conv2D(32, (3, 3), activation='relu', padding = 'same', input_shape=inputShape))
model.add(Conv2D(32, (3, 3), activation='relu', padding = 'same'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(64, (3, 3), activation='relu', padding = 'same'))
model.add(Conv2D(64, (3, 3), activation='relu', padding = 'same'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(128, (3, 3), activation='relu', padding = 'same'))
model.add(Conv2D(128, (3, 3), activation='relu', padding = 'same'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(1028, activation='relu'))
model.add(Dense(1028, activation='relu'))
model.add(Dense(128, activation='relu'))
model.add(Dense(outputSize, activation='linear'))
return model