-
Notifications
You must be signed in to change notification settings - Fork 0
/
linear_classifier.py
66 lines (43 loc) · 1.28 KB
/
linear_classifier.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import numpy as np
import itertools as it
import chained_operations as op
import neural_layers as nn
import data_utils
import plotting
import chained_optimizers as optimizers
print('get data')
label_names = data_utils.Cifar10.load_labels()
data, labels = data_utils.Cifar10.get_batch(1, raw=False)
print('build model')
x = nn.InputLayer(1024 * 3)
y = nn.Dense(x, 10)
y_ = op.Placeholder()
loss = nn.Softmax(y, y_)
print('train model')
batch_size = 100
graph = plotting.Graph('loss')
optimizer = optimizers.Adagrad(loss)
for count, mini_batch_x, mini_batch_y_ in zip(
it.count(),
data_utils.batches(data, batch_size),
data_utils.batches(labels, batch_size)):
batch_loss = optimizer.step({x: mini_batch_x, y_: mini_batch_y_})
batch_loss = np.mean(batch_loss)
print('%d loss - %f' % (count, batch_loss))
graph.maybe_add(count, count, batch_loss, True)
if batch_loss < 4:
break
print('deep dream')
images = nn.VariableLayer(1, 1024 * 3)
y.set_inputs(images)
y.lock()
y.set_inputs(x)
truths = np.zeros((1, 10))
truths[0][1] = 1
graph.clear()
optimizer = optimizers.Adagrad(loss)
for i in range(15):
loss = np.mean(optimizer.step({y_: truths}))
print('%d loss - %f' % (i, loss))
graph.maybe_add(i, i, loss, plot=True)
print('done')