@@ -119,6 +119,14 @@ def default(self, obj):
119
119
return json .JSONEncoder .default (self , obj )
120
120
121
121
122
+ def _rescale_similarities (similarities ):
123
+ if (similarities == 0 ).all ():
124
+ return similarities
125
+ similarities = similarities - similarities .min ()
126
+ similarities = similarities / similarities .max ()
127
+ return similarities
128
+
129
+
122
130
class NeuroQueryImageSearch :
123
131
"""Search for studies and terms with activation maps similar to an image.
124
132
@@ -133,11 +141,17 @@ class NeuroQueryImageSearch:
133
141
"image" (`nibabel.Nifti1Image` containing the input image).
134
142
135
143
"""
144
+
136
145
def __init__ (self ):
137
146
self .data = fetch_data ()
138
147
139
148
def __call__ (
140
- self , query_img , n_studies = 50 , n_terms = 20 , transform = "absolute_value"
149
+ self ,
150
+ query_img ,
151
+ n_studies = 50 ,
152
+ n_terms = 20 ,
153
+ transform = "absolute_value" ,
154
+ rescale_similarities = True ,
141
155
):
142
156
"""Search for studies and terms with activation maps similar to an image
143
157
@@ -156,6 +170,9 @@ def __call__(
156
170
helpful when comparing to the absolute value of the input image.
157
171
"absolute_value" is the default.
158
172
173
+ rescale_similarities : if `True` (the default), similarities are
174
+ rescaled to span the range [0, 1]
175
+
159
176
Returns
160
177
-------
161
178
results : dictionary with keys "image", "studies", "terms".
@@ -189,8 +206,9 @@ def __call__(
189
206
190
207
similarities = self .data ["studies_loadings" ].dot (query )
191
208
most_similar = np .argsort (similarities )[::- 1 ][:n_studies ]
192
- if (similarities > 0 ).any ():
193
- similarities /= similarities .max ()
209
+
210
+ if rescale_similarities :
211
+ similarities = _rescale_similarities (similarities )
194
212
study_results = (
195
213
self .data ["studies_info" ]
196
214
.iloc [most_similar ]
@@ -204,8 +222,8 @@ def __call__(
204
222
1 + self .data ["document_frequencies" ]["document_frequency" ].values
205
223
)
206
224
most_similar = np .argsort (similarities )[::- 1 ][:n_terms ]
207
- if ( similarities > 0 ). any () :
208
- similarities /= similarities . max ( )
225
+ if rescale_similarities :
226
+ similarities = _rescale_similarities ( similarities )
209
227
term_results = (
210
228
self .data ["document_frequencies" ]
211
229
.iloc [most_similar ]
@@ -262,6 +280,12 @@ def _get_parser():
262
280
"direction of activations by default the absolute value of the "
263
281
"input map is compared to activation patterns in the literature." ,
264
282
)
283
+ parser .add_argument (
284
+ "--no_rescaling" ,
285
+ action = "store_true" ,
286
+ help = "Disable rescaling the similarities. "
287
+ "By default they are mapped to the [0, 1] range." ,
288
+ )
265
289
return parser
266
290
267
291
@@ -281,6 +305,7 @@ def image_search(args=None):
281
305
n_studies = args .n_studies ,
282
306
n_terms = args .n_terms ,
283
307
transform = args .transform ,
308
+ rescale_similarities = (not args .no_rescaling )
284
309
)
285
310
if args .output is None :
286
311
results_to_html (results , image_name ).open_in_browser ()
0 commit comments