-
Notifications
You must be signed in to change notification settings - Fork 0
/
nfoldtest.py
118 lines (97 loc) · 4.19 KB
/
nfoldtest.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
# %%
import os, argparse
from itertools import combinations
# parser = argparse.ArgumentParser()
# parser.add_argument('-m', '--model', default='model')
# parser.add_argument('-p', '--participant', default=1, type=int)
# parser.add_argument('-g', '--gpu', default='0')
# args = parser.parse_args(['-m','model_less_rtlhb_p1','-r','1','-p','1'])
# print(args)
# os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
# os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
import numpy as np
import tensorflow as tf
from numpy.random import randint, randn
from tensorflow.keras.models import load_model
from matplotlib import pyplot as plt
from sklearn import preprocessing
from sklearn.metrics.pairwise import cosine_similarity
np.set_printoptions(suppress=True)
from utils.visualize import fmriviz
from utils.preprocess import dataloader, preprocess, postprocess
#%%
class Test:
def __init__(self, snr, voxel_map, latent_dim):
self.snr = snr
self.voxel_map = voxel_map
self.latent_dim = latent_dim
self.n_voxels = voxel_map[0].shape[0]
def generate_latent_points(self, latent_dim, n_samples):
z_input = tf.random.normal((n_samples, latent_dim), mean=0.0, stddev=1.0, dtype=tf.dtypes.float32)
return z_input
def transform_fake_images(self, fake):
predictions = []
for img in fake:
vector = postprocess.img2vector(img, self.voxel_map)
predictions += [vector]
return np.array(predictions)
def evaluate(self, snr, combinations, predictions, dataset):
true_vectors = dataset
arr_similarity = []
for pair in combinations:
idx = list(pair)
# print(Y[idx])
similarity = postprocess.evaluate(snr, predictions[idx], true_vectors[idx])
arr_similarity += [similarity]
accuracy = np.mean(arr_similarity)
print('Match Metric: %f' % (accuracy))
return np.array(arr_similarity)
def classic_eval(self, snr, predictions, dataset, top=500):
true_images = dataset
arr_similarity = []
for i in range(len(predictions)):
similarity = postprocess.classic_eval(snr, predictions[i], true_images[i], 0.7, top)
arr_similarity += [similarity]
accuracy = sum(arr_similarity * 1)/len(arr_similarity)
print('Cosine Metric: %f' % (accuracy))
return np.array(arr_similarity)
def predict(self, model_name, Y):
n_classes = len(Y)
n_batch = int(n_classes / 2)
predictions = np.zeros((1, self.n_voxels))
latent_points = self.generate_latent_points(self.latent_dim, n_classes)
model = load_model(os.path.join('pretrained', model_name + '.h5'))
for i in range(n_batch):
start = i * 2
end = (i + 1) * 2
X = model.predict([latent_points[start:end], Y[start:end]])
fake_image = X[:,:,:,:,0]
preds = self.transform_fake_images(fake_image)
predictions = np.concatenate((predictions, preds), axis=0)
predictions = predictions[1:]
return predictions
def test(self, predictions, vectors):
n_classes = len(vectors)
test_combinations = list(combinations(range(n_classes), 2))
match_similarity = self.evaluate(self.snr, test_combinations, predictions, vectors)
cosine_500 = self.classic_eval(self.snr, predictions, vectors)
cosine_similarity = self.classic_eval(self.snr, predictions, vectors, -1)
return match_similarity, cosine_500, cosine_similarity
#%%
# participant = args.participant
# samples = dataloader.data[participant].samples
# voxel_map = dataloader.data[participant].voxel_map
# trial_map = dataloader.data[participant].trial_map
# features = dataloader.features
# labels = dataloader.data[participant].labels
# # Note: very important to have correct labels array
# nouns = list(trial_map.keys())
# lencoder = preprocessing.LabelEncoder()
# Y = lencoder.fit_transform(nouns)
# latent_dim = 1000
# true_vectors, embeddings = preprocess.prepare_data(features,trial_map,samples,nouns)
# snr = preprocess.get_snr(participant, samples, trial_map)
#%%
# testobj = Test(snr, voxel_map, latent_dim)
# testobj.predict_test(args.model, true_vectors, Y)
#%%