-
Notifications
You must be signed in to change notification settings - Fork 1
/
utils.py
156 lines (131 loc) · 5.26 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
"""Miscellaneous utility functions."""
import os
import os.path as op
import numpy as np
from neurosynth.base.dataset import download
from nilearn import plotting
from nimare.io import convert_neurosynth_to_dataset
from PIL import Image
from sklearn.metrics import pairwise_distances
def crop_image(image_fname):
image = Image.open(image_fname)
image.load()
image_data = np.asarray(image)
image_data_bw = image_data.mean(axis=2)
non_empty_columns = np.where(image_data_bw.mean(axis=0) < 255)[0]
non_empty_rows = np.where(image_data_bw.mean(axis=1) < 255)[0]
cropBox = (
min(non_empty_rows),
max(non_empty_rows),
min(non_empty_columns),
max(non_empty_columns),
)
image_data_new = image_data[cropBox[0] : cropBox[1] + 1, cropBox[2] : cropBox[3] + 1, :]
new_image = Image.fromarray(image_data_new)
new_image.save(image_fname)
def insert(matrix, indices):
matrix = np.insert(matrix, np.subtract(indices, np.arange(len(indices))), 0, axis=0)
return matrix
def calculate_affinity(matrix, sparsity):
# Generate percentile thresholds for 90th percentile
perc = np.array([np.percentile(x, sparsity) for x in matrix])
# Threshold each row of the matrix by setting values below 90th percentile to 0
for i in range(matrix.shape[0]):
matrix[i, matrix[i, :] < perc[i]] = 0
matrix[matrix < 0] = 0
# Now we are dealing with sparse vectors. Cosine similarity is used as affinity metric
matrix = 1 - pairwise_distances(matrix, metric="cosine")
return matrix
def neurosynth_download(ns_data_dir):
dataset_file = op.join(ns_data_dir, "neurosynth_dataset.pkl.gz")
os.makedirs(ns_data_dir, exist_ok=True)
download(ns_data_dir, unpack=True)
###############################################################################
# Convert Neurosynth database to NiMARE dataset file
# --------------------------------------------------
dset = convert_neurosynth_to_dataset(
op.join(ns_data_dir, "database.txt"), op.join(ns_data_dir, "features.txt")
)
dset.save(dataset_file)
def plot_surfaces(grad_dict, index, outdir, prefix, normalize=False, cmap="jet"):
grad_lh = grad_dict["grads_lh"][:, index]
grad_rh = grad_dict["grads_rh"][:, index]
if normalize:
if np.max(grad_lh[grad_lh > 0]) > np.abs(np.min(grad_lh[grad_lh < 0])):
grad_lh[grad_lh < 0] = grad_lh[grad_lh < 0] / np.abs(np.min(grad_lh[grad_lh < 0]))
grad_lh[grad_lh < 0] = grad_lh[grad_lh < 0] * np.max(grad_lh[grad_lh > 0])
else:
grad_lh[grad_lh > 0] = grad_lh[grad_lh > 0] / np.max(grad_lh[grad_lh > 0])
grad_lh[grad_lh > 0] = grad_lh[grad_lh > 0] * np.abs(np.min(grad_lh[grad_lh < 0]))
if np.max(grad_rh[grad_rh > 0]) > np.abs(np.min(grad_rh[grad_rh < 0])):
grad_rh[grad_rh < 0] = grad_rh[grad_rh < 0] / np.abs(np.min(grad_rh[grad_rh < 0]))
grad_rh[grad_rh < 0] = grad_rh[grad_rh < 0] * np.max(grad_rh[grad_rh > 0])
else:
grad_rh[grad_rh > 0] = grad_rh[grad_rh > 0] / np.max(grad_rh[grad_rh > 0])
grad_rh[grad_rh > 0] = grad_rh[grad_rh > 0] * np.abs(np.min(grad_rh[grad_rh < 0]))
plotting.plot_surf_stat_map(
grad_dict["pial_left"],
grad_lh,
hemi="left",
bg_map=grad_dict["sulc_left"],
bg_on_data=True,
threshold=np.finfo(np.float32).eps,
colorbar=False,
view="medial",
cmap=cmap,
output_file=op.join(outdir, "{0}-{1}_left_medial.png".format(prefix, index)),
)
plotting.plot_surf_stat_map(
grad_dict["pial_left"],
grad_lh,
hemi="left",
bg_map=grad_dict["sulc_left"],
bg_on_data=True,
threshold=np.finfo(np.float32).eps,
colorbar=False,
view="lateral",
cmap=cmap,
output_file=op.join(outdir, "{0}-{1}_left_lateral.png".format(prefix, index)),
)
plotting.plot_surf_stat_map(
grad_dict["pial_right"],
grad_rh,
hemi="right",
bg_map=grad_dict["sulc_right"],
bg_on_data=True,
threshold=np.finfo(np.float32).eps,
colorbar=False,
view="medial",
cmap=cmap,
output_file=op.join(outdir, "{0}-{1}_right_medial.png".format(prefix, index)),
)
plotting.plot_surf_stat_map(
grad_dict["pial_right"],
grad_rh,
hemi="right",
bg_map=grad_dict["sulc_right"],
bg_on_data=True,
threshold=np.finfo(np.float32).eps,
colorbar=True,
view="lateral",
cmap=cmap,
output_file=op.join(outdir, "{0}-{1}_right_lateral.png".format(prefix, index)),
)
def combine_plots(list_im, fname_out):
for i in list_im:
crop_image(i)
imgs = [Image.open(i) for i in list_im]
# pick the image which is the smallest, and resize the others to match it
# (can be arbitrary image shape here)
# min_shape = sorted([(np.sum(i.size), i.size) for i in imgs])[0][1] # unused
imgs_comb = np.hstack(
(
np.asarray(i.resize((int(i.size[0] * i.size[1] / imgs[3].size[1]), imgs[3].size[1])))
for i in imgs
)
)
# save that beautiful picture
imgs_comb = Image.fromarray(imgs_comb)
imgs_comb.save(fname_out)
for i in list_im:
os.remove(i)