nimare 0.5.0rc1__py3-none-any.whl → 0.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nimare/_version.py CHANGED
@@ -8,11 +8,11 @@ import json
8
8
 
9
9
  version_json = '''
10
10
  {
11
- "date": "2025-03-13T11:20:33-0500",
11
+ "date": "2025-06-13T14:42:16-0500",
12
12
  "dirty": false,
13
13
  "error": null,
14
- "full-revisionid": "71d779964e5756b89c6237e1550112d9c8a77498",
15
- "version": "0.5.0rc1"
14
+ "full-revisionid": "f393f27b4e273646f4fdf425dfdba8d865099917",
15
+ "version": "0.5.1"
16
16
  }
17
17
  ''' # END VERSION_JSON
18
18
 
nimare/dataset.py CHANGED
@@ -631,12 +631,12 @@ class Dataset(NiMAREBase):
631
631
  return found_ids
632
632
 
633
633
  def get_studies_by_mask(self, mask):
634
- """Extract list of studies with at least one coordinate in mask.
634
+ """Extract list of studies with at least one focus in mask.
635
635
 
636
636
  Parameters
637
637
  ----------
638
- mask : img_like
639
- Mask across which to search for coordinates.
638
+ mask : :obj:`~nibabel.nifti1.Nifti1Image`
639
+ Mask with which to evaluate coordinates for inclusion.
640
640
 
641
641
  Returns
642
642
  -------
@@ -649,12 +649,16 @@ class Dataset(NiMAREBase):
649
649
  if not np.array_equal(dset_mask.affine, mask.affine):
650
650
  LGR.warning("Mask affine does not match Dataset affine. Assuming same space.")
651
651
 
652
+ # Convert coordinates to voxel indices
652
653
  dset_ijk = mm2vox(self.coordinates[["x", "y", "z"]].values, mask.affine)
653
654
  mask_data = mask.get_fdata()
654
- mask_coords = np.vstack(np.where(mask_data)).T
655
655
 
656
- # Check for presence of coordinates in mask
657
- in_mask = np.any(np.all(dset_ijk[:, None] == mask_coords[None, :], axis=-1), axis=-1)
656
+ # Clip coordinates to be within mask dimensions
657
+ shape = mask_data.shape
658
+ dset_ijk = np.clip(dset_ijk, 0, np.array(shape) - 1)
659
+
660
+ # Simply index into mask using voxel coordinates
661
+ in_mask = mask_data[dset_ijk[:, 0], dset_ijk[:, 1], dset_ijk[:, 2]] > 0
658
662
  found_ids = list(self.coordinates.loc[in_mask, "id"].unique())
659
663
 
660
664
  return found_ids
nimare/meta/utils.py CHANGED
@@ -33,7 +33,7 @@ def _convolve_sphere(kernel, ijks, index, max_shape):
33
33
 
34
34
  def np_all_axis1(x):
35
35
  """Numba compatible version of np.all(x, axis=1)."""
36
- out = np.ones(x.shape[0], dtype=np.bool8)
36
+ out = np.ones(x.shape[0], dtype=np.bool_)
37
37
  for i in range(x.shape[1]):
38
38
  out = np.logical_and(out, x[:, i])
39
39
  return out
nimare/reports/base.py CHANGED
@@ -288,7 +288,9 @@ def _gen_figures(results, img_key, diag_name, threshold, fig_dir):
288
288
  # Plot brain images if not empty
289
289
  if (results.maps[img_key] > threshold).any():
290
290
  img = results.get_map(img_key)
291
- plot_interactive_brain(img, fig_dir / "corrector_figure-interactive.html", threshold)
291
+ plot_interactive_brain(
292
+ img, fig_dir / "corrector_figure-interactive.html", threshold, quality="low"
293
+ )
292
294
  plot_static_brain(img, fig_dir / "corrector_figure-static.png", threshold)
293
295
  else:
294
296
  _no_maps_found(fig_dir / "corrector_figure-non.html")
@@ -540,7 +542,7 @@ class Report:
540
542
  rowvar=True,
541
543
  )
542
544
  else:
543
- corr = self.inputs_["corr_matrix"]
545
+ corr = self.results.estimator.inputs_["corr_matrix"]
544
546
 
545
547
  similarity_table = pd.DataFrame(
546
548
  index=ids_,
nimare/reports/figures.py CHANGED
@@ -1,5 +1,6 @@
1
1
  """Plot figures for report."""
2
2
 
3
+ import matplotlib as mpl
3
4
  import matplotlib.colors as mcolors
4
5
  import matplotlib.patches as mpatches
5
6
  import matplotlib.pyplot as plt
@@ -8,12 +9,12 @@ import pandas as pd
8
9
  import plotly.express as px
9
10
  from nilearn import datasets
10
11
  from nilearn.plotting import (
11
- plot_connectome,
12
+ plot_glass_brain,
12
13
  plot_img,
13
14
  plot_roi,
14
15
  plot_stat_map,
15
- view_connectome,
16
16
  view_img,
17
+ view_markers,
17
18
  )
18
19
  from ridgeplot import ridgeplot
19
20
  from scipy import stats
@@ -33,6 +34,9 @@ TABLE_STYLE = [
33
34
  ),
34
35
  ]
35
36
 
37
+ # Configure matplotlib for faster image saving
38
+ mpl.rcParams["savefig.bbox"] = "tight" # Only save actual plot content
39
+ mpl.rcParams["savefig.dpi"] = 100 # Lower DPI for faster encoding
36
40
 
37
41
  PXS_PER_STD = 30 # Number of pixels per study, control the size (height) of Plotly figures
38
42
  MAX_CHARS = 20 # Maximum number of characters for labels
@@ -111,6 +115,16 @@ def _reorder_matrix(mat, row_labels, col_labels, symmetric=False, reorder="singl
111
115
  return mat, row_labels, col_labels
112
116
 
113
117
 
118
+ _mni_template_cache = {}
119
+
120
+
121
+ def _get_cached_template(resolution):
122
+ """Get cached MNI template or load if not cached."""
123
+ if resolution not in _mni_template_cache:
124
+ _mni_template_cache[resolution] = datasets.load_mni152_template(resolution=resolution)
125
+ return _mni_template_cache[resolution]
126
+
127
+
114
128
  def plot_static_brain(img, out_filename, threshold=1e-06):
115
129
  """Plot static brain image.
116
130
 
@@ -131,7 +145,7 @@ def plot_static_brain(img, out_filename, threshold=1e-06):
131
145
  """
132
146
  _check_extention(out_filename, [".png", ".pdf", ".svg"])
133
147
 
134
- template = datasets.load_mni152_template(resolution=1)
148
+ template = _get_cached_template(resolution=1)
135
149
  fig = plot_stat_map(
136
150
  img,
137
151
  bg_img=template,
@@ -141,7 +155,7 @@ def plot_static_brain(img, out_filename, threshold=1e-06):
141
155
  display_mode="mosaic",
142
156
  symmetric_cbar=True,
143
157
  )
144
- fig.savefig(out_filename, dpi=300)
158
+ fig.savefig(out_filename, dpi=100)
145
159
  fig.close()
146
160
 
147
161
 
@@ -173,7 +187,7 @@ def plot_mask(mask, out_filename):
173
187
  alpha=0.7,
174
188
  display_mode="mosaic",
175
189
  )
176
- fig.savefig(out_filename, dpi=300)
190
+ fig.savefig(out_filename, dpi=100)
177
191
  fig.close()
178
192
 
179
193
 
@@ -182,6 +196,7 @@ def plot_coordinates(
182
196
  out_static_filename,
183
197
  out_interactive_filename,
184
198
  out_legend_filename,
199
+ max_coordinates=2000,
185
200
  ):
186
201
  """Plot static and interactive coordinates.
187
202
 
@@ -200,58 +215,71 @@ def plot_coordinates(
200
215
  out_legend_filename : :obj:`pathlib.Path`
201
216
  The name of an image file to export the legend plot to.
202
217
  Valid extensions are '.png', '.pdf', '.svg'.
218
+ max_coordinates : :obj:`int`, optional
219
+ Maximum number of coordinates to plot. If there are more coordinates,
220
+ they will be randomly sampled. Default is 2000.
203
221
  """
204
222
  _check_extention(out_static_filename, [".png", ".pdf", ".svg"])
205
223
  _check_extention(out_interactive_filename, [".html"])
206
224
  _check_extention(out_legend_filename, [".png", ".pdf", ".svg"])
207
225
 
208
- node_coords = coordinates_df[["x", "y", "z"]].to_numpy()
209
- n_coords = len(node_coords)
210
- adjacency_matrix = np.zeros((n_coords, n_coords))
226
+ # If there are too many coordinates, randomly sample them
227
+ if len(coordinates_df) > max_coordinates:
228
+ coordinates_df = coordinates_df.sample(n=max_coordinates, random_state=42)
211
229
 
212
- # Generate dictionary and array of colors for each unique ID
213
- ids = coordinates_df["study_id"].to_list()
214
- unq_ids = np.unique(ids)
215
- cmap = plt.colormaps["tab20"].resampled(len(unq_ids))
216
- colors_dict = {unq_id: mcolors.to_hex(cmap(i)) for i, unq_id in enumerate(unq_ids)}
217
- colors = [colors_dict[id_] for id_ in ids]
230
+ # Generate categorical colors for each study
231
+ unq_ids = coordinates_df["study_id"].unique()
232
+ n_studies = len(unq_ids)
218
233
 
219
- fig = plot_connectome(adjacency_matrix, node_coords, node_color=colors)
220
- fig.savefig(out_static_filename, dpi=300)
221
- fig.close()
234
+ # Use tab20 colormap with modulo for studies > 20
235
+ cmap = plt.colormaps["tab20"].resampled(20)
236
+ colors = [cmap(i % 20) for i in range(n_studies)]
237
+ colors_dict = {id_: mcolors.to_hex(color) for id_, color in zip(unq_ids, colors)}
222
238
 
223
- # Generate legend
224
- patches_lst = [
225
- mpatches.Patch(color=color, label=label) for label, color in colors_dict.items()
226
- ]
239
+ # Create glass brain plot
240
+ glass_brain = plot_glass_brain(None, display_mode="lyrz", plot_abs=False, alpha=0.1)
241
+
242
+ # Process all coordinates at once
243
+ all_coords = coordinates_df[["x", "y", "z"]].values
244
+ all_colors = np.array([colors_dict[id_] for id_ in coordinates_df["study_id"]])
245
+
246
+ # Add all coordinates in one call
247
+ glass_brain.add_markers(all_coords, marker_color=all_colors, marker_size=3)
248
+
249
+ glass_brain.savefig(out_static_filename, dpi=100)
250
+ glass_brain.close()
251
+
252
+ # Generate legend more efficiently using pre-computed values
253
+ patches_lst = [mpatches.Patch(color=color, label=id_) for id_, color in zip(unq_ids, colors)]
227
254
 
228
- # Plot legeng
229
- max_len_per_page = 200
230
- max_legend_len = max(len(id_) for id_ in unq_ids)
231
- ncol = 1 if max_legend_len > max_len_per_page else int(max_len_per_page / max_legend_len)
232
- labl_fig, ax = plt.subplots(1, 1)
233
- labl_fig.legend(
255
+ # Use already computed n_studies
256
+ ncol = max(1, min(8, n_studies // 10)) # Cap columns at 8
257
+
258
+ # Create minimal figure with appropriate size
259
+ figsize = (8, max(1, int(n_studies / ncol / 3))) # Scale height by entries per column
260
+ fig = plt.figure(figsize=figsize)
261
+ fig.legend(
234
262
  handles=patches_lst,
235
263
  ncol=ncol,
236
264
  fontsize=10,
237
265
  loc="center",
266
+ frameon=False, # Remove frame for cleaner look
238
267
  )
239
- ax.axis("off")
240
- labl_fig.savefig(out_legend_filename, bbox_inches="tight", dpi=300)
241
- plt.close()
242
-
243
- # Plot interactive connectome
244
- html_view = view_connectome(
245
- adjacency_matrix,
246
- node_coords,
247
- node_size=10,
248
- colorbar=False,
249
- node_color=colors,
268
+ fig.savefig(
269
+ out_legend_filename,
270
+ dpi=100,
271
+ bbox_inches="tight",
272
+ pil_kwargs={"optimize": True, "quality": 85},
250
273
  )
251
- html_view.save_as_html(out_interactive_filename)
274
+ plt.close(fig)
275
+
276
+ # Create interactive view with markers using pre-computed coordinates and colors
277
+ interactive_view = view_markers(all_coords, marker_color=all_colors, marker_size=3)
278
+ interactive_view.save_as_html(out_interactive_filename)
279
+ del interactive_view # Clean up
252
280
 
253
281
 
254
- def plot_interactive_brain(img, out_filename, threshold=1e-06):
282
+ def plot_interactive_brain(img, out_filename, threshold=1e-06, quality="medium"):
255
283
  """Plot interactive brain image.
256
284
 
257
285
  .. versionadded:: 0.1.0
@@ -267,18 +295,33 @@ def plot_interactive_brain(img, out_filename, threshold=1e-06):
267
295
  used to threshold the image: values below the threshold (in absolute value)
268
296
  are plotted as transparent. If 'auto' is given, the threshold is determined
269
297
  magically by analysis of the image. Default=1e-6.
298
+ quality: str, optional
299
+ Quality setting for the visualization. Options are:
300
+ - 'low': Uses 4mm resolution template, best for memory efficiency
301
+ - 'medium': Uses 2mm resolution template
302
+ - 'high': Uses 1mm resolution template (memory intensive)
303
+ Default is 'low' for memory efficiency.
270
304
  """
271
305
  _check_extention(out_filename, [".html"])
272
306
 
273
- template = datasets.load_mni152_template(resolution=1)
274
- html_view = view_img(
275
- img,
276
- bg_img=template,
277
- black_bg=False,
278
- threshold=threshold,
279
- symmetric_cmap=True,
280
- )
281
- html_view.save_as_html(out_filename)
307
+ # Set template resolution based on quality
308
+ resolution_map = {"low": 4, "medium": 2, "high": 1}
309
+ resolution = resolution_map.get(quality, 4) # Default to low if invalid quality
310
+
311
+ try:
312
+ template = datasets.load_mni152_template(resolution=resolution)
313
+ html_view = view_img(
314
+ img,
315
+ bg_img=template,
316
+ black_bg=False,
317
+ threshold=threshold,
318
+ symmetric_cmap=True,
319
+ )
320
+ html_view.save_as_html(out_filename)
321
+ finally:
322
+ # Cleanup resources
323
+ if "html_view" in locals():
324
+ del html_view
282
325
 
283
326
 
284
327
  def plot_heatmap(
@@ -405,7 +448,7 @@ def plot_clusters(img, out_filename):
405
448
  colorbar=True,
406
449
  display_mode="mosaic",
407
450
  )
408
- fig.savefig(out_filename, dpi=300)
451
+ fig.savefig(out_filename, dpi=100)
409
452
  fig.close()
410
453
 
411
454
 
@@ -605,7 +648,7 @@ def _plot_relcov_map(maps_arr, masker, out_filename):
605
648
  coverage_img = masker.inverse_transform(coverage_arr)
606
649
 
607
650
  # Plot coverage map
608
- template = datasets.load_mni152_template(resolution=1)
651
+ template = datasets.load_mni152_template(resolution=2)
609
652
  fig = plot_img(
610
653
  coverage_img,
611
654
  bg_img=template,
@@ -619,7 +662,7 @@ def _plot_relcov_map(maps_arr, masker, out_filename):
619
662
  vmax=1,
620
663
  display_mode="mosaic",
621
664
  )
622
- fig.savefig(out_filename, dpi=300)
665
+ fig.savefig(out_filename, dpi=100)
623
666
  fig.close()
624
667
 
625
668
 
@@ -647,5 +690,5 @@ def _plot_dof_map(dof_map, out_filename):
647
690
  vmin=0,
648
691
  display_mode="mosaic",
649
692
  )
650
- fig.savefig(out_filename, dpi=300)
693
+ fig.savefig(out_filename, dpi=100)
651
694
  fig.close()
@@ -2,10 +2,28 @@
2
2
 
3
3
  import os
4
4
  from glob import glob
5
+ from io import BytesIO
6
+ from unittest.mock import patch
5
7
 
6
8
  import nimare
7
9
 
8
10
 
11
+ def mock_urlopen(url):
12
+ """Mock URL opener that returns appropriate mock data based on file type."""
13
+ if "coordinates.tsv.gz" in url:
14
+ mock_data = b"x\ty\tz\n1\t2\t3\n4\t5\t6"
15
+ elif "metadata.tsv.gz" in url:
16
+ mock_data = b"id\ttitle\n1\tStudy 1\n2\tStudy 2"
17
+ elif "features.npz" in url:
18
+ mock_data = b"mock npz content"
19
+ elif "vocabulary.txt" in url:
20
+ mock_data = b"term1\nterm2\nterm3"
21
+ else:
22
+ mock_data = b"Mock file content"
23
+ return BytesIO(mock_data)
24
+
25
+
26
+ @patch("nimare.extract.extract.urlopen", side_effect=mock_urlopen)
9
27
  def test_fetch_neurosynth(tmp_path_factory):
10
28
  """Smoke test for extract.fetch_neurosynth.
11
29
 
@@ -19,15 +37,20 @@ def test_fetch_neurosynth(tmp_path_factory):
19
37
  source="abstract",
20
38
  vocab="terms",
21
39
  )
22
- files = glob(os.path.join(tmpdir, "neurosynth", "*"))
23
- assert len(files) == 4
24
-
25
- # One set of files found
40
+ # Check data_files structure
26
41
  assert isinstance(data_files, list)
27
42
  assert len(data_files) == 1
28
43
 
44
+ # Verify expected files in data_files
45
+ files_dict = data_files[0]
46
+ assert "coordinates" in files_dict
47
+ assert "metadata" in files_dict
48
+ assert "features" in files_dict
49
+ assert len(files_dict["features"]) == 1
50
+
29
51
 
30
- def test_fetch_neuroquery(tmp_path_factory):
52
+ @patch("nimare.extract.extract.urlopen", side_effect=mock_urlopen)
53
+ def test_fetch_neuroquery(mock_url, tmp_path_factory):
31
54
  """Smoke test for extract.fetch_neuroquery."""
32
55
  tmpdir = tmp_path_factory.mktemp("test_fetch_neuroquery")
33
56
  data_files = nimare.extract.fetch_neuroquery(
@@ -44,3 +67,10 @@ def test_fetch_neuroquery(tmp_path_factory):
44
67
  # One set of files found
45
68
  assert isinstance(data_files, list)
46
69
  assert len(data_files) == 1
70
+
71
+ # Verify mock was called with expected URLs
72
+ assert mock_url.call_count > 0 # Should be called for each file download
73
+ for call in mock_url.call_args_list:
74
+ url = call[0][0]
75
+ assert "neuroquery/neuroquery_data/blob" in url
76
+ assert "?raw=true" in url
@@ -108,3 +108,25 @@ def test_reports_ibma_smoke(tmp_path_factory, testdata_ibma, aggressive_mask):
108
108
  filename = "report.html"
109
109
  outpath = op.join(hedges_dir, filename)
110
110
  assert op.isfile(outpath)
111
+
112
+
113
+ def test_reports_ibma_multiple_contrasts_smoke(tmp_path_factory, testdata_ibma_multiple_contrasts):
114
+ """Smoke test for IBMA reports for multiple contrasts."""
115
+ tmpdir = tmp_path_factory.mktemp("test_reports_ibma_smoke")
116
+
117
+ # Generate a report with z maps as inputs
118
+ stouffers_dir = op.join(tmpdir, "stouffers")
119
+ workflow = IBMAWorkflow(
120
+ estimator=Stouffers(aggressive_mask=True),
121
+ corrector="fdr",
122
+ diagnostics="jackknife",
123
+ voxel_thresh=3.2,
124
+ output_dir=stouffers_dir,
125
+ )
126
+ results = workflow.fit(testdata_ibma_multiple_contrasts)
127
+
128
+ run_reports(results, stouffers_dir)
129
+
130
+ filename = "report.html"
131
+ outpath = op.join(stouffers_dir, filename)
132
+ assert op.isfile(outpath)
nimare/utils.py CHANGED
@@ -1286,9 +1286,9 @@ def b_spline_bases(masker_voxels, spacing, margin=10):
1286
1286
  x_spline_coords = x_spline.nonzero()
1287
1287
  y_spline_coords = y_spline.nonzero()
1288
1288
  z_spline_coords = z_spline.nonzero()
1289
- x_spline_sparse = sparse.COO(x_spline_coords, x_spline[x_spline_coords])
1290
- y_spline_sparse = sparse.COO(y_spline_coords, y_spline[y_spline_coords])
1291
- z_spline_sparse = sparse.COO(z_spline_coords, z_spline[z_spline_coords])
1289
+ x_spline_sparse = sparse.COO(x_spline_coords, x_spline[x_spline_coords], shape=x_spline.shape)
1290
+ y_spline_sparse = sparse.COO(y_spline_coords, y_spline[y_spline_coords], shape=y_spline.shape)
1291
+ z_spline_sparse = sparse.COO(z_spline_coords, z_spline[z_spline_coords], shape=z_spline.shape)
1292
1292
 
1293
1293
  # create spatial design matrix by tensor product of spline bases in 3 dimesion
1294
1294
  # Row sums of X are all 1=> There is no need to re-normalise X
nimare/workflows/base.py CHANGED
@@ -63,7 +63,12 @@ def _check_input(obj, clss, options, **kwargs):
63
63
  if obj == FWECorrector:
64
64
  kwargs["method"] = obj_str
65
65
 
66
- return _check_type(obj, clss, **kwargs)
66
+ # Apply kwargs (including n_cores) when obj is a class or string
67
+ if isinstance(obj, (str, type)):
68
+ return _check_type(obj, clss, **kwargs)
69
+
70
+ # If object is already instantiated, return it as-is
71
+ return _check_type(obj, clss)
67
72
 
68
73
 
69
74
  class Workflow(NiMAREBase):
@@ -93,23 +98,25 @@ class Workflow(NiMAREBase):
93
98
  diagnostics = [diagnostics]
94
99
 
95
100
  # Check inputs and set defaults if input is None
96
- estimator = (
97
- self._estm_default(n_cores=self.n_cores)
98
- if estimator is None
99
- else _check_input(estimator, self._estm_base, self._estm_options, n_cores=self.n_cores)
100
- )
101
-
102
- corrector = (
103
- self._corr_default(method=self._mcc_method, n_cores=self.n_cores)
104
- if corrector is None
105
- else _check_input(corrector, Corrector, self._corr_options, n_cores=self.n_cores)
106
- )
101
+ if estimator is None:
102
+ estimator = self._estm_default(n_cores=self.n_cores)
103
+ else:
104
+ estimator = _check_input(
105
+ estimator, self._estm_base, self._estm_options, n_cores=self.n_cores
106
+ )
107
+
108
+ if corrector is None:
109
+ corrector = self._corr_default(method=self._mcc_method, n_cores=self.n_cores)
110
+ else:
111
+ corrector = _check_input(
112
+ corrector, Corrector, self._corr_options, n_cores=self.n_cores
113
+ )
107
114
 
108
115
  diag_kwargs = {
109
116
  "voxel_thresh": self.voxel_thresh,
110
117
  "cluster_threshold": self.cluster_threshold,
111
- "n_cores": self.n_cores,
112
118
  }
119
+ diag_kwargs["n_cores"] = self.n_cores
113
120
  if diagnostics is None:
114
121
  diagnostics = [self._diag_default(**diag_kwargs)]
115
122
  else:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: nimare
3
- Version: 0.5.0rc1
3
+ Version: 0.5.1
4
4
  Summary: NiMARE: Neuroimaging Meta-Analysis Research Environment
5
5
  Home-page: https://github.com/neurostuff/NiMARE
6
6
  Author: NiMARE developers
@@ -1,11 +1,11 @@
1
1
  benchmarks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  benchmarks/bench_cbma.py,sha256=fg_EER8hohi6kD1Hno_uXsFntKuCLTvseu-6OqkHkoU,1382
3
3
  nimare/__init__.py,sha256=HHIq3EimSZSf3zJSDwuTjBig1GbRwHGYfRLSqI3yleE,802
4
- nimare/_version.py,sha256=B-yG1ELidF5YmffvLhUktR130qlkvoRlzyFTb5ItxHw,500
4
+ nimare/_version.py,sha256=zxY54giXQDJy4ANitRct_wX2SzKIjVggp3lnrvgHrKk,497
5
5
  nimare/base.py,sha256=9DlcRB2mW759p7XqpKG3wRE-MmPsEPiYTbq6V1Yile4,7826
6
6
  nimare/cli.py,sha256=Zvy5jN2KopH_MBke-gm8A0DbBZmIFGvnE1tjhbYib9I,3695
7
7
  nimare/correct.py,sha256=2eI0jR6_odND-2CzSlaxRU2um6ccLSapd7ERAYteBnE,17110
8
- nimare/dataset.py,sha256=LmjDE1GQ3ud33l3plvms_Uv8GB9_Xb-6kPtkIV2eYTw,24650
8
+ nimare/dataset.py,sha256=pSEccmbqQCYiwfjoy45tgYRTBt1CEFqmKFz_T8p95cA,24814
9
9
  nimare/diagnostics.py,sha256=sCatkXUM9rvrY9MMz6Q66njWSiWGifWwSNEMu3ldnVs,20263
10
10
  nimare/estimator.py,sha256=DtsSIyPDrKkpi-KNv2m-cZMvJO7CCfHLXHRhCT73sbY,5063
11
11
  nimare/generate.py,sha256=L4_c2sLAlF7XDKTm-3q4oOx8pLID2NaxG9YET5KSIZw,12475
@@ -14,7 +14,7 @@ nimare/nimads.py,sha256=2s5QnaLvrBt-kMrImGhG_p6r0unysufIIcPczr2bG0c,24342
14
14
  nimare/results.py,sha256=7szcyR6PkZAXBqbIGGWrw1nW9j9QCdpdl4MxUK_1Wzc,8190
15
15
  nimare/stats.py,sha256=XhXfFj6KHTPVSTXhbEid0qt8HLqJD82Bl5T23qmaf40,10098
16
16
  nimare/transforms.py,sha256=_kZO8N3IEHxd6Ir4IcewZtkWHpFknnjEnjsaD9b-1pg,30136
17
- nimare/utils.py,sha256=BJOOKgQuubamE0RBMJVubggQfaPgIH9wZ9Oou9DKvbU,46936
17
+ nimare/utils.py,sha256=rMuT1mhEYrb3R_2EAsz5A_f5VPWXzqRkH_saQTFybm4,47002
18
18
  nimare/annotate/__init__.py,sha256=hTla1yFYTJ8PDjm72ReeHa3qf0Que50Ww0fqz3Z86JI,434
19
19
  nimare/annotate/cogat.py,sha256=xzrepAuTkRenbECn4XYwgyul45r0tIMxCDKQV_ZFVb8,7457
20
20
  nimare/annotate/gclda.py,sha256=P2OQjuJn3DX0GVPte_VIVZf0LfO-yE_OhLbs6DwMaHQ,42592
@@ -35,15 +35,15 @@ nimare/meta/cbmr.py,sha256=h9CNMHW4KUh-YQYyGDRzqPN7G1tkHYAhrSgxrP8E61s,47769
35
35
  nimare/meta/ibma.py,sha256=qVw3ZhOlK6hgTdZZwa7TPDpfrGLf52dN00wH2Gw-Z8c,65442
36
36
  nimare/meta/kernel.py,sha256=5wN-6dbLkzqQ0WUSTrC0DJPSbbyGiZCls9M_TAYhNGY,19169
37
37
  nimare/meta/models.py,sha256=0QPlQTjWaNTeI8qTX-DHMXVjQSRD72SfJ2RZIYBZnCg,47054
38
- nimare/meta/utils.py,sha256=fvnFz4ek8APjQHhYBS441WYXE1N9huHaTiOO0Bvui4Q,18083
38
+ nimare/meta/utils.py,sha256=84T4I-wn-HTx7zwMqh0TdvzUbBe_vcpxIU4jA_pfuFU,18083
39
39
  nimare/meta/cbma/__init__.py,sha256=bWLrv5tL03U2ITttnnz3yerbg74w44qkZgdy32QMhqQ,219
40
40
  nimare/meta/cbma/ale.py,sha256=ZNjXC4MXhfSHvrTRppY04NkGtz_Iri696k5QMuBog7o,39273
41
41
  nimare/meta/cbma/base.py,sha256=-gJ4hW6_6T5oto3pre9GbWodHWRmPS2WZ1LJjB0BtMI,38382
42
42
  nimare/meta/cbma/mkda.py,sha256=V5vSQodiLZoZhkYPhous0a_-1pZoRjAIL8gmGyyoezc,60397
43
43
  nimare/reports/__init__.py,sha256=zGfrOPlMq2lTorKNEpyPblArFCnsvz3dRsvOHy_Ciag,428
44
- nimare/reports/base.py,sha256=DPYr1lTuS740pSv5Q-tE0UegU0mckG0JMI-2TKVe64Y,23489
44
+ nimare/reports/base.py,sha256=wN3r8-f4Z4B8TxgruAGFrIjQRoWamuhsln-a7ubUcXo,23544
45
45
  nimare/reports/default.yml,sha256=zTcWUlC9ntvacMcIIUAJPoFV8XrvPGqCH2uwovwB4Wc,6003
46
- nimare/reports/figures.py,sha256=MCsjDR53475Jz1_anktKvNOh0qUNPMx-dfvP6Vy0uUc,19783
46
+ nimare/reports/figures.py,sha256=lxa20_n3PZ1aJMvkbzYqaptJZTUpJotQT-Rjwv-DAz0,21835
47
47
  nimare/reports/report.tpl,sha256=ZweYiJF80zdaIwK1iawAl1pwmBZdPjpxXxY5C0U5yVs,5518
48
48
  nimare/resources/__init__.py,sha256=huwax0KmUCfwVlxSRYS0qwmEzaAcNkNnpNbKp1K0n1E,28
49
49
  nimare/resources/database_file_manifest.json,sha256=gn9aXERthi0KYzdI5cvYKcVXWG-BLa2BKNq-r2nQJAI,7441
@@ -72,7 +72,7 @@ nimare/tests/test_decode_continuous.py,sha256=KmdkulIIBBpjbao-TdFTVFuRd6lUx_-3Qq
72
72
  nimare/tests/test_decode_discrete.py,sha256=NL__uEvMori4HtmoZUVdj465lW4Qwr4ndgyOz5rB7dY,3293
73
73
  nimare/tests/test_diagnostics.py,sha256=VrfR_8nQKn2VF7dFdnTM7ZQy3Ou5eHdpaLhml5T6Pb0,6467
74
74
  nimare/tests/test_estimator_performance.py,sha256=tbK2Qr83rB0in-pB6MccnjLg4iHSyfilx-hTNDWQfe4,12749
75
- nimare/tests/test_extract.py,sha256=XJSxZTdy_hAej1J9CFK9zQk29rAM5KPiZKlopmUVCJ4,1206
75
+ nimare/tests/test_extract.py,sha256=nPaL07G9paLRCJzPOv79jH3mhOPs2YvQdghoLfcDz5A,2348
76
76
  nimare/tests/test_generate.py,sha256=LSh2APJsg87u2s2zydkrre3RVk_ZGpoB4d7uuvIPWYE,7318
77
77
  nimare/tests/test_io.py,sha256=QKr_zRGu8tyrpiLoLAjCV9ektxCTHRlKPWgyJRqQ9T8,10397
78
78
  nimare/tests/test_meta_ale.py,sha256=hccXSNzLGUgj6E4tCsiHZpuUFoBxXkP293-vtUS5jdE,11791
@@ -81,7 +81,7 @@ nimare/tests/test_meta_ibma.py,sha256=Yw4F0_pr3cpVSe7oeMlK0-btg1Uw58cenklOsIw87P
81
81
  nimare/tests/test_meta_kernel.py,sha256=Edk6lOsnqokg86mp9jAkokA203K61R7pjJEmyEEzV7E,8450
82
82
  nimare/tests/test_meta_mkda.py,sha256=9PuzNUKrTBjbCHdSnuOAToXbV7wp1O0TCdD537qGQkA,9206
83
83
  nimare/tests/test_nimads.py,sha256=3yzCO8rmUVfEYAt3HNnJcyAENorJ5BOWdJXY3hjrdP0,9807
84
- nimare/tests/test_reports.py,sha256=Qdz-PHjQwOneRmSCo0ac2d67BeGypWJIMi4OoiQrNik,3293
84
+ nimare/tests/test_reports.py,sha256=KIihXTAsIntsEG6xk3crMFLoQGiI5EOr0mesCne48CA,4030
85
85
  nimare/tests/test_stats.py,sha256=_GhpUC1u4hnFR2SZ-sHQqkJ5MwsyPsvwPEd2GkQmsHY,4030
86
86
  nimare/tests/test_transforms.py,sha256=mzEnufefhybs4r_dfRY6zQUAShepPMwKFY7S5amq3cs,10378
87
87
  nimare/tests/test_utils.py,sha256=JaJYexM_xJOa-Jhv5OdbQBZ-3IWba1-Dmf1-V580ACo,6443
@@ -106,14 +106,14 @@ nimare/tests/data/test_sleuth_file3.txt,sha256=UuTnokCy1oJz1_Bw6TMu49ZSroK_ony2U
106
106
  nimare/tests/data/test_sleuth_file4.txt,sha256=f87QglZBjK9JFJRbNipRX_YOXVjWLlTQ317r5MK6DwQ,90
107
107
  nimare/tests/data/test_sleuth_file5.txt,sha256=BCCAz8LGDdJUKUBbv-6btzvB5StlQhGglA89EeEHXZw,88
108
108
  nimare/workflows/__init__.py,sha256=yqd43gG8UJmaIT35vojNBDqRIunApmb8sbiplYArnBY,326
109
- nimare/workflows/base.py,sha256=-foQ9v8i6QThux5MWagnO78uqWEv69UYNtBys-FV_ro,6460
109
+ nimare/workflows/base.py,sha256=ip3UDD2JZcwfxo0nBGRBF4ZQjNWYDbkrCg0H6bOi7VM,6727
110
110
  nimare/workflows/cbma.py,sha256=2jYJs9kH7_LzFP6d7-oTHiTTgAFbtmiBNtBXSCSZPjg,7052
111
111
  nimare/workflows/ibma.py,sha256=lAkWtqSqnZiUUV460Bh046U9LeGhnry3bl8BFi-tx7s,4289
112
112
  nimare/workflows/macm.py,sha256=mVUBeKbTawhU93ApnkunZSUXZWo7qBPrM3dMGWfl0ik,2531
113
113
  nimare/workflows/misc.py,sha256=OWgHlSAnRI0-5Seii-bd48piIYsfEAF_aNKGorH1yJQ,1827
114
- nimare-0.5.0rc1.dist-info/LICENSE,sha256=PWPXnCGWh-FMiBZ61OnQ2BHFjPPlJJ7F0kFx_ryzp-M,1074
115
- nimare-0.5.0rc1.dist-info/METADATA,sha256=YyiKFKuawdfsp9SIW5pJ6nVgibwk27EYr8bKIointfA,4698
116
- nimare-0.5.0rc1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
117
- nimare-0.5.0rc1.dist-info/entry_points.txt,sha256=3w_hk9N2PWnKZkCaJyDlc0_kdn3rh35aiI21rSdvsuA,44
118
- nimare-0.5.0rc1.dist-info/top_level.txt,sha256=XnOcEXMs0BxdI8t3_ksTl96T8hykn9L7-bxLLraVrTI,18
119
- nimare-0.5.0rc1.dist-info/RECORD,,
114
+ nimare-0.5.1.dist-info/LICENSE,sha256=PWPXnCGWh-FMiBZ61OnQ2BHFjPPlJJ7F0kFx_ryzp-M,1074
115
+ nimare-0.5.1.dist-info/METADATA,sha256=0vv_-MOWMy_eTDMO7at8tK0ptngxcL0H1klFmqN1SBs,4695
116
+ nimare-0.5.1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
117
+ nimare-0.5.1.dist-info/entry_points.txt,sha256=3w_hk9N2PWnKZkCaJyDlc0_kdn3rh35aiI21rSdvsuA,44
118
+ nimare-0.5.1.dist-info/top_level.txt,sha256=XnOcEXMs0BxdI8t3_ksTl96T8hykn9L7-bxLLraVrTI,18
119
+ nimare-0.5.1.dist-info/RECORD,,