-
Notifications
You must be signed in to change notification settings - Fork 14
/
one_layer_autoencoder_test.py
114 lines (88 loc) · 2.97 KB
/
one_layer_autoencoder_test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
import numpy as np
import pickle
import os
from PIL import Image
from scipy.spatial.distance import cosine, euclidean, correlation
import operator
import sys
"""
The program is used for testing the
shallow autoencoder network.
This takes three arguments.
first: dataset (jaffe for JAFFE test set, and lfw for LFW data set)
second: Number of Hidden Layers (300/500)
third: Number of iterations for each example (100 was giving the best results)
Once it takes all the images,
it converts them into 300/500 dimensional
space and uses cosine distance as a metric
to find the distance between two emotions.
"""
np.random.rand(30)
c = sys.argv[1]
hu = sys.argv[2]
if c == 'lfw':
test = os.listdir('lfw2/')
elif c == 'jaffe':
test = open('test_files.txt', 'r').read().split()
from keras.models import Sequential, Model
from keras.layers import Input, Dense, Activation
from keras import backend as K
#data = np.array(list(Image.open('resized_JAFFE_data_64_by_64/' +test[0]).getdata()))
train_dict = pickle.load(open('one_layer_ae_' + str(hu) + '_hn_rep/trained_' +str(hu)+ '_representations.txt', 'rb'))
tag_list = ['AN', 'SA', 'SU', 'HA', 'DI', 'FE', 'NE']
targets_list = []
for t in test:
for tag in tag_list:
if tag in t:
targets_list.append(tag)
#print len(targets_list)
#print targets_list
#print data.shape
#print test[0]
#print e
hidden_units = int(hu)
n_iter = int(sys.argv[3])
#data = np.reshape(data, (1, 4096))
np.random.rand(30)
inputs = Input(shape = (4096, ))
encoder = Dense(hidden_units)(inputs)
decoder = Dense(4096)(encoder)
model = Model(input = inputs, output = decoder)
model.compile(optimizer = 'adam', loss = 'mse')
a = []
b = []
for t in range(len(test)):
print t
if c == 'lfw':
im = Image.open('lfw2/' + test[t])
im = im.convert('L')
data= im.resize((64, 64))
data = np.array(data.getdata())
elif c == 'jaffe':
data = np.array(list(Image.open('resized_JAFFE_data_64_by_64/' +test[t]).getdata()))
data = np.reshape(data, (1, 4096))
model.fit(data, data, nb_epoch = n_iter)
#from keras import backend as K
encoder_func = K.function([model.layers[0].input], [model.layers[1].output])
encoder_output = np.array(encoder_func([data]))
hidden_rep = encoder_output[0, 0, :]
#print hidden_rep
#train_dict = pickle.load(open('one_layer_ae_300_hn_rep/trained_300_representations.txt', 'rb'))
new_dict = {}
for p in train_dict:
new_dict[p] = cosine(hidden_rep,train_dict[p])
order = sorted(new_dict.items(), key=operator.itemgetter(1))
a.append((order[0][0], order[1][0], order[2][0]))
#print a
#print targets_list
a = np.array(a)
targets_list = np.array(targets_list)
count = 0
for t in range(len(targets_list)):
if targets_list[t] in a[t]:
count +=1
print "Number of Correct Predictions (Top 2): " + str(count)
print "Total Number of Images: " +str(len(targets_list))
print "Accuracy : " + str((1.0 * count)/len(targets_list))
#print np.sum(a == targets_list)
#print len(targets_list)