-
Notifications
You must be signed in to change notification settings - Fork 0
/
layers.py
44 lines (39 loc) · 1.95 KB
/
layers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import tensorflow as tf
# Leaky relu
def lrelu(inputs, leak=0.2, scope="lrelu"):
with tf.variable_scope(scope):
f1 = 0.5 * (1 + leak)
f2 = 0.5 * (1 - leak)
return f1 * inputs + f2 * abs(inputs)
def convLayer(inputs, filters, kernel_size, strides, padding, activation, batch_normalization, training):
conv = tf.layers.conv2d(inputs=inputs,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding)
if activation == "relu" and batch_normalization == True:
return tf.nn.relu(tf.layers.batch_normalization(conv, training=training))
elif activation == "relu" and batch_normalization == False:
return tf.nn.relu(conv)
elif activation == "lrelu" and batch_normalization == True:
return lrelu(tf.layers.batch_normalization(conv, training=training), 0.2)
elif activation == "lrelu" and batch_normalization == False:
return lrelu(conv, 0.2)
else:
return conv
def deConvLayer(inputs, filters, kernel_size, strides, padding, activation, batch_normalization, training):
deConv = tf.layers.conv2d_transpose(inputs=inputs,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding)
if activation == "relu" and batch_normalization == True:
return tf.nn.relu(tf.layers.batch_normalization(deConv, training=training))
elif activation =="relu" and batch_normalization == False:
return tf.nn.relu(deConv)
elif activation =="lrelu" and batch_normalization == True:
return lrelu(tf.layers.batch_normalization(deConv, training=training), 0.2)
elif activation =="lrelu" and batch_normalization == False:
return lrelu(deConv,0.2)
else:
return deConv