nimare 0.4.2rc4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nimare-0.4.2rc4/LICENSE +21 -0
- nimare-0.4.2rc4/MANIFEST.in +2 -0
- nimare-0.4.2rc4/PKG-INFO +124 -0
- nimare-0.4.2rc4/README.md +95 -0
- nimare-0.4.2rc4/benchmarks/__init__.py +0 -0
- nimare-0.4.2rc4/benchmarks/bench_cbma.py +57 -0
- nimare-0.4.2rc4/nimare/__init__.py +45 -0
- nimare-0.4.2rc4/nimare/_version.py +21 -0
- nimare-0.4.2rc4/nimare/annotate/__init__.py +21 -0
- nimare-0.4.2rc4/nimare/annotate/cogat.py +213 -0
- nimare-0.4.2rc4/nimare/annotate/gclda.py +924 -0
- nimare-0.4.2rc4/nimare/annotate/lda.py +147 -0
- nimare-0.4.2rc4/nimare/annotate/text.py +75 -0
- nimare-0.4.2rc4/nimare/annotate/utils.py +87 -0
- nimare-0.4.2rc4/nimare/base.py +217 -0
- nimare-0.4.2rc4/nimare/cli.py +124 -0
- nimare-0.4.2rc4/nimare/correct.py +462 -0
- nimare-0.4.2rc4/nimare/dataset.py +685 -0
- nimare-0.4.2rc4/nimare/decode/__init__.py +33 -0
- nimare-0.4.2rc4/nimare/decode/base.py +115 -0
- nimare-0.4.2rc4/nimare/decode/continuous.py +462 -0
- nimare-0.4.2rc4/nimare/decode/discrete.py +753 -0
- nimare-0.4.2rc4/nimare/decode/encode.py +110 -0
- nimare-0.4.2rc4/nimare/decode/utils.py +44 -0
- nimare-0.4.2rc4/nimare/diagnostics.py +510 -0
- nimare-0.4.2rc4/nimare/estimator.py +139 -0
- nimare-0.4.2rc4/nimare/extract/__init__.py +19 -0
- nimare-0.4.2rc4/nimare/extract/extract.py +466 -0
- nimare-0.4.2rc4/nimare/extract/utils.py +295 -0
- nimare-0.4.2rc4/nimare/generate.py +331 -0
- nimare-0.4.2rc4/nimare/io.py +635 -0
- nimare-0.4.2rc4/nimare/meta/__init__.py +39 -0
- nimare-0.4.2rc4/nimare/meta/cbma/__init__.py +6 -0
- nimare-0.4.2rc4/nimare/meta/cbma/ale.py +951 -0
- nimare-0.4.2rc4/nimare/meta/cbma/base.py +947 -0
- nimare-0.4.2rc4/nimare/meta/cbma/mkda.py +1361 -0
- nimare-0.4.2rc4/nimare/meta/cbmr.py +970 -0
- nimare-0.4.2rc4/nimare/meta/ibma.py +1683 -0
- nimare-0.4.2rc4/nimare/meta/kernel.py +501 -0
- nimare-0.4.2rc4/nimare/meta/models.py +1199 -0
- nimare-0.4.2rc4/nimare/meta/utils.py +494 -0
- nimare-0.4.2rc4/nimare/nimads.py +492 -0
- nimare-0.4.2rc4/nimare/reports/__init__.py +24 -0
- nimare-0.4.2rc4/nimare/reports/base.py +664 -0
- nimare-0.4.2rc4/nimare/reports/default.yml +123 -0
- nimare-0.4.2rc4/nimare/reports/figures.py +651 -0
- nimare-0.4.2rc4/nimare/reports/report.tpl +160 -0
- nimare-0.4.2rc4/nimare/resources/__init__.py +1 -0
- nimare-0.4.2rc4/nimare/resources/atlases/Harvard-Oxford-LICENSE +93 -0
- nimare-0.4.2rc4/nimare/resources/atlases/HarvardOxford-cort-maxprob-thr25-2mm.nii.gz +0 -0
- nimare-0.4.2rc4/nimare/resources/database_file_manifest.json +142 -0
- nimare-0.4.2rc4/nimare/resources/english_spellings.csv +1738 -0
- nimare-0.4.2rc4/nimare/resources/filenames.json +32 -0
- nimare-0.4.2rc4/nimare/resources/neurosynth_laird_studies.json +58773 -0
- nimare-0.4.2rc4/nimare/resources/neurosynth_stoplist.txt +396 -0
- nimare-0.4.2rc4/nimare/resources/nidm_pain_dset.json +1349 -0
- nimare-0.4.2rc4/nimare/resources/references.bib +541 -0
- nimare-0.4.2rc4/nimare/resources/semantic_knowledge_children.txt +325 -0
- nimare-0.4.2rc4/nimare/resources/semantic_relatedness_children.txt +249 -0
- nimare-0.4.2rc4/nimare/resources/templates/MNI152_2x2x2_brainmask.nii.gz +0 -0
- nimare-0.4.2rc4/nimare/resources/templates/tpl-MNI152NLin6Asym_res-01_T1w.nii.gz +0 -0
- nimare-0.4.2rc4/nimare/resources/templates/tpl-MNI152NLin6Asym_res-01_desc-brain_mask.nii.gz +0 -0
- nimare-0.4.2rc4/nimare/resources/templates/tpl-MNI152NLin6Asym_res-02_T1w.nii.gz +0 -0
- nimare-0.4.2rc4/nimare/resources/templates/tpl-MNI152NLin6Asym_res-02_desc-brain_mask.nii.gz +0 -0
- nimare-0.4.2rc4/nimare/results.py +225 -0
- nimare-0.4.2rc4/nimare/stats.py +276 -0
- nimare-0.4.2rc4/nimare/tests/__init__.py +1 -0
- nimare-0.4.2rc4/nimare/tests/conftest.py +229 -0
- nimare-0.4.2rc4/nimare/tests/data/amygdala_roi.nii.gz +0 -0
- nimare-0.4.2rc4/nimare/tests/data/data-neurosynth_version-7_coordinates.tsv.gz +0 -0
- nimare-0.4.2rc4/nimare/tests/data/data-neurosynth_version-7_metadata.tsv.gz +0 -0
- nimare-0.4.2rc4/nimare/tests/data/data-neurosynth_version-7_vocab-terms_source-abstract_type-tfidf_features.npz +0 -0
- nimare-0.4.2rc4/nimare/tests/data/data-neurosynth_version-7_vocab-terms_vocabulary.txt +100 -0
- nimare-0.4.2rc4/nimare/tests/data/neurosynth_dset.json +2868 -0
- nimare-0.4.2rc4/nimare/tests/data/neurosynth_laird_studies.json +58773 -0
- nimare-0.4.2rc4/nimare/tests/data/nidm_pain_dset.json +1349 -0
- nimare-0.4.2rc4/nimare/tests/data/nimads_annotation.json +1 -0
- nimare-0.4.2rc4/nimare/tests/data/nimads_studyset.json +1 -0
- nimare-0.4.2rc4/nimare/tests/data/test_baseline.txt +2 -0
- nimare-0.4.2rc4/nimare/tests/data/test_pain_dataset.json +1278 -0
- nimare-0.4.2rc4/nimare/tests/data/test_pain_dataset_multiple_contrasts.json +1242 -0
- nimare-0.4.2rc4/nimare/tests/data/test_sleuth_file.txt +18 -0
- nimare-0.4.2rc4/nimare/tests/data/test_sleuth_file2.txt +10 -0
- nimare-0.4.2rc4/nimare/tests/data/test_sleuth_file3.txt +5 -0
- nimare-0.4.2rc4/nimare/tests/data/test_sleuth_file4.txt +5 -0
- nimare-0.4.2rc4/nimare/tests/data/test_sleuth_file5.txt +5 -0
- nimare-0.4.2rc4/nimare/tests/test_annotate_cogat.py +32 -0
- nimare-0.4.2rc4/nimare/tests/test_annotate_gclda.py +86 -0
- nimare-0.4.2rc4/nimare/tests/test_annotate_lda.py +27 -0
- nimare-0.4.2rc4/nimare/tests/test_dataset.py +99 -0
- nimare-0.4.2rc4/nimare/tests/test_decode_continuous.py +132 -0
- nimare-0.4.2rc4/nimare/tests/test_decode_discrete.py +92 -0
- nimare-0.4.2rc4/nimare/tests/test_diagnostics.py +168 -0
- nimare-0.4.2rc4/nimare/tests/test_estimator_performance.py +385 -0
- nimare-0.4.2rc4/nimare/tests/test_extract.py +46 -0
- nimare-0.4.2rc4/nimare/tests/test_generate.py +247 -0
- nimare-0.4.2rc4/nimare/tests/test_io.py +240 -0
- nimare-0.4.2rc4/nimare/tests/test_meta_ale.py +298 -0
- nimare-0.4.2rc4/nimare/tests/test_meta_cbmr.py +295 -0
- nimare-0.4.2rc4/nimare/tests/test_meta_ibma.py +240 -0
- nimare-0.4.2rc4/nimare/tests/test_meta_kernel.py +209 -0
- nimare-0.4.2rc4/nimare/tests/test_meta_mkda.py +234 -0
- nimare-0.4.2rc4/nimare/tests/test_nimads.py +21 -0
- nimare-0.4.2rc4/nimare/tests/test_reports.py +110 -0
- nimare-0.4.2rc4/nimare/tests/test_stats.py +101 -0
- nimare-0.4.2rc4/nimare/tests/test_transforms.py +272 -0
- nimare-0.4.2rc4/nimare/tests/test_utils.py +200 -0
- nimare-0.4.2rc4/nimare/tests/test_workflows.py +221 -0
- nimare-0.4.2rc4/nimare/tests/utils.py +126 -0
- nimare-0.4.2rc4/nimare/transforms.py +907 -0
- nimare-0.4.2rc4/nimare/utils.py +1367 -0
- nimare-0.4.2rc4/nimare/workflows/__init__.py +14 -0
- nimare-0.4.2rc4/nimare/workflows/base.py +189 -0
- nimare-0.4.2rc4/nimare/workflows/cbma.py +165 -0
- nimare-0.4.2rc4/nimare/workflows/ibma.py +108 -0
- nimare-0.4.2rc4/nimare/workflows/macm.py +77 -0
- nimare-0.4.2rc4/nimare/workflows/misc.py +65 -0
- nimare-0.4.2rc4/nimare.egg-info/PKG-INFO +124 -0
- nimare-0.4.2rc4/nimare.egg-info/SOURCES.txt +128 -0
- nimare-0.4.2rc4/nimare.egg-info/dependency_links.txt +1 -0
- nimare-0.4.2rc4/nimare.egg-info/entry_points.txt +2 -0
- nimare-0.4.2rc4/nimare.egg-info/not-zip-safe +1 -0
- nimare-0.4.2rc4/nimare.egg-info/requires.txt +91 -0
- nimare-0.4.2rc4/nimare.egg-info/top_level.txt +2 -0
- nimare-0.4.2rc4/pypi_description.md +16 -0
- nimare-0.4.2rc4/pyproject.toml +36 -0
- nimare-0.4.2rc4/setup.cfg +132 -0
- nimare-0.4.2rc4/setup.py +13 -0
- nimare-0.4.2rc4/versioneer.py +2149 -0
nimare-0.4.2rc4/LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
MIT License
|
2
|
+
|
3
|
+
Copyright (c) 2018 NiMARE developers
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
13
|
+
copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21
|
+
SOFTWARE.
|
nimare-0.4.2rc4/PKG-INFO
ADDED
@@ -0,0 +1,124 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: nimare
|
3
|
+
Version: 0.4.2rc4
|
4
|
+
Summary: NiMARE: Neuroimaging Meta-Analysis Research Environment
|
5
|
+
Home-page: https://github.com/neurostuff/NiMARE
|
6
|
+
Author: NiMARE developers
|
7
|
+
Author-email: tsalo006@fiu.edu
|
8
|
+
Maintainer: Taylor Salo
|
9
|
+
Maintainer-email: tsalo006@fiu.edu
|
10
|
+
License: MIT
|
11
|
+
Classifier: Development Status :: 3 - Alpha
|
12
|
+
Classifier: Environment :: Console
|
13
|
+
Classifier: Intended Audience :: Science/Research
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
15
|
+
Classifier: Operating System :: OS Independent
|
16
|
+
Classifier: Programming Language :: Python :: 3.8
|
17
|
+
Classifier: Programming Language :: Python :: 3.9
|
18
|
+
Classifier: Programming Language :: Python :: 3.10
|
19
|
+
Classifier: Programming Language :: Python :: 3.11
|
20
|
+
Classifier: Programming Language :: Python :: 3.12
|
21
|
+
Classifier: Topic :: Scientific/Engineering
|
22
|
+
Requires-Python: >=3.8
|
23
|
+
Description-Content-Type: text/markdown
|
24
|
+
License-File: LICENSE
|
25
|
+
Requires-Dist: cognitiveatlas>=0.1.11
|
26
|
+
Requires-Dist: fuzzywuzzy
|
27
|
+
Requires-Dist: importlib-resources; python_version < "3.9"
|
28
|
+
Requires-Dist: jinja2
|
29
|
+
Requires-Dist: joblib>=1.3.0
|
30
|
+
Requires-Dist: matplotlib>=3.6.0
|
31
|
+
Requires-Dist: nibabel>=3.2.0
|
32
|
+
Requires-Dist: nilearn!=0.10.3,>=0.10.1
|
33
|
+
Requires-Dist: numba>=0.57.0
|
34
|
+
Requires-Dist: numpy>=1.22
|
35
|
+
Requires-Dist: pandas>=2.0.0
|
36
|
+
Requires-Dist: patsy
|
37
|
+
Requires-Dist: plotly
|
38
|
+
Requires-Dist: pymare>=0.0.8
|
39
|
+
Requires-Dist: pyyaml
|
40
|
+
Requires-Dist: requests
|
41
|
+
Requires-Dist: ridgeplot
|
42
|
+
Requires-Dist: scikit-learn>=1.0.0
|
43
|
+
Requires-Dist: scipy>=1.6.0
|
44
|
+
Requires-Dist: sparse>=0.13.0
|
45
|
+
Requires-Dist: statsmodels!=0.13.2
|
46
|
+
Requires-Dist: tqdm
|
47
|
+
Provides-Extra: gzip
|
48
|
+
Requires-Dist: indexed_gzip>=1.4.0; extra == "gzip"
|
49
|
+
Provides-Extra: cbmr
|
50
|
+
Requires-Dist: torch>=2.0; extra == "cbmr"
|
51
|
+
Provides-Extra: doc
|
52
|
+
Requires-Dist: m2r2; extra == "doc"
|
53
|
+
Requires-Dist: matplotlib; extra == "doc"
|
54
|
+
Requires-Dist: mistune; extra == "doc"
|
55
|
+
Requires-Dist: pillow; extra == "doc"
|
56
|
+
Requires-Dist: recommonmark; extra == "doc"
|
57
|
+
Requires-Dist: seaborn; extra == "doc"
|
58
|
+
Requires-Dist: sphinx>=3.5; extra == "doc"
|
59
|
+
Requires-Dist: sphinx-argparse; extra == "doc"
|
60
|
+
Requires-Dist: sphinx-copybutton; extra == "doc"
|
61
|
+
Requires-Dist: sphinx-gallery; extra == "doc"
|
62
|
+
Requires-Dist: sphinx_rtd_theme>=1.3.0; extra == "doc"
|
63
|
+
Requires-Dist: sphinxcontrib-bibtex; extra == "doc"
|
64
|
+
Requires-Dist: sphinxcontrib-mermaid; extra == "doc"
|
65
|
+
Requires-Dist: docutils<0.21,>=0.18.1; extra == "doc"
|
66
|
+
Provides-Extra: tests
|
67
|
+
Requires-Dist: coverage; extra == "tests"
|
68
|
+
Requires-Dist: coveralls; extra == "tests"
|
69
|
+
Requires-Dist: flake8-black; extra == "tests"
|
70
|
+
Requires-Dist: flake8-docstrings; extra == "tests"
|
71
|
+
Requires-Dist: flake8-isort; extra == "tests"
|
72
|
+
Requires-Dist: pytest; extra == "tests"
|
73
|
+
Requires-Dist: pytest-cov; extra == "tests"
|
74
|
+
Provides-Extra: minimum
|
75
|
+
Requires-Dist: matplotlib==3.6.0; extra == "minimum"
|
76
|
+
Requires-Dist: nibabel==4.0.0; extra == "minimum"
|
77
|
+
Requires-Dist: nilearn==0.10.1; extra == "minimum"
|
78
|
+
Requires-Dist: numpy==1.22; extra == "minimum"
|
79
|
+
Requires-Dist: pandas==2.0.0; extra == "minimum"
|
80
|
+
Requires-Dist: pymare==0.0.8; extra == "minimum"
|
81
|
+
Requires-Dist: scikit-learn==1.0.0; extra == "minimum"
|
82
|
+
Requires-Dist: scipy==1.6.0; extra == "minimum"
|
83
|
+
Requires-Dist: seaborn==0.13.0; extra == "minimum"
|
84
|
+
Provides-Extra: all
|
85
|
+
Requires-Dist: indexed_gzip>=1.4.0; extra == "all"
|
86
|
+
Requires-Dist: torch>=2.0; extra == "all"
|
87
|
+
Requires-Dist: m2r2; extra == "all"
|
88
|
+
Requires-Dist: matplotlib; extra == "all"
|
89
|
+
Requires-Dist: mistune; extra == "all"
|
90
|
+
Requires-Dist: pillow; extra == "all"
|
91
|
+
Requires-Dist: recommonmark; extra == "all"
|
92
|
+
Requires-Dist: seaborn; extra == "all"
|
93
|
+
Requires-Dist: sphinx>=3.5; extra == "all"
|
94
|
+
Requires-Dist: sphinx-argparse; extra == "all"
|
95
|
+
Requires-Dist: sphinx-copybutton; extra == "all"
|
96
|
+
Requires-Dist: sphinx-gallery; extra == "all"
|
97
|
+
Requires-Dist: sphinx_rtd_theme>=1.3.0; extra == "all"
|
98
|
+
Requires-Dist: sphinxcontrib-bibtex; extra == "all"
|
99
|
+
Requires-Dist: sphinxcontrib-mermaid; extra == "all"
|
100
|
+
Requires-Dist: docutils<0.21,>=0.18.1; extra == "all"
|
101
|
+
Requires-Dist: coverage; extra == "all"
|
102
|
+
Requires-Dist: coveralls; extra == "all"
|
103
|
+
Requires-Dist: flake8-black; extra == "all"
|
104
|
+
Requires-Dist: flake8-docstrings; extra == "all"
|
105
|
+
Requires-Dist: flake8-isort; extra == "all"
|
106
|
+
Requires-Dist: pytest; extra == "all"
|
107
|
+
Requires-Dist: pytest-cov; extra == "all"
|
108
|
+
|
109
|
+
# NiMARE
|
110
|
+
|
111
|
+
NiMARE (Neuroimaging Meta-Analysis Research Environment) is a Python
|
112
|
+
package for coordinate-based and image-based meta-analysis of
|
113
|
+
neuroimaging data.
|
114
|
+
|
115
|
+
# License
|
116
|
+
|
117
|
+
`NiMARE` is licensed under the terms of the MIT license. See the file
|
118
|
+
\'LICENSE\' for information on the history of this software, terms &
|
119
|
+
conditions for usage, and a DISCLAIMER OF ALL WARRANTIES.
|
120
|
+
|
121
|
+
All trademarks referenced herein are property of their respective
|
122
|
+
holders.
|
123
|
+
|
124
|
+
Copyright (c) 2018\--, NiMARE developers
|
@@ -0,0 +1,95 @@
|
|
1
|
+
# NiMARE: Neuroimaging Meta-Analysis Research Environment
|
2
|
+
A Python library for coordinate- and image-based meta-analysis.
|
3
|
+
|
4
|
+
[](https://pypi.python.org/pypi/nimare/)
|
5
|
+
[](https://pypi.python.org/pypi/nimare/)
|
6
|
+
[](https://github.com/neurostuff/NiMARE)
|
7
|
+
[](https://zenodo.org/badge/latestdoi/117724523)
|
8
|
+
[](https://opensource.org/licenses/MIT)
|
9
|
+
[](https://github.com/neurostuff/NiMARE/actions/workflows/testing.yml)
|
10
|
+
[](http://nimare.readthedocs.io/en/stable/?badge=stable)
|
11
|
+
[](https://codecov.io/gh/neurostuff/nimare)
|
12
|
+
[](https://github.com/psf/black)
|
13
|
+
[](https://mattermost.brainhack.org/brainhack/channels/nimare)
|
14
|
+
[](https://scicrunch.org/scicrunch/Resources/record/nlx_144509-1/SCR_017398/resolver?q=nimare&l=nimare)
|
15
|
+
[](https://doi.org/10.52294/001c.87681)
|
16
|
+
[](https://doi.org/10.55458/neurolibre.00007)
|
17
|
+
|
18
|
+
Currently, NiMARE implements a range of image- and coordinate-based meta-analytic algorithms, as well as several methods for advanced meta-analytic methods, like automated annotation and functional decoding.
|
19
|
+
|
20
|
+
## Installation
|
21
|
+
|
22
|
+
Please see our [installation instructions](https://nimare.readthedocs.io/en/stable/installation.html)
|
23
|
+
for information on how to install NiMARE.
|
24
|
+
|
25
|
+
### Installation with pip
|
26
|
+
```
|
27
|
+
pip install nimare
|
28
|
+
```
|
29
|
+
|
30
|
+
### Local installation (development version)
|
31
|
+
```
|
32
|
+
pip install git+https://github.com/neurostuff/NiMARE.git
|
33
|
+
```
|
34
|
+
|
35
|
+
## Citing NiMARE
|
36
|
+
|
37
|
+
If you use NiMARE in your research, we recommend citing the Zenodo DOI associated with the NiMARE version you used,
|
38
|
+
as well as the Aperture Neuro journal article for the NiMARE Jupyter book.
|
39
|
+
You can find the Zenodo DOI associated with each NiMARE release at https://zenodo.org/record/6642243#.YqiXNy-B1KM.
|
40
|
+
|
41
|
+
```BibTeX
|
42
|
+
# This is the Aperture Neuro paper.
|
43
|
+
@article{Salo2023,
|
44
|
+
doi = {10.52294/001c.87681},
|
45
|
+
url = {https://doi.org/10.52294/001c.87681},
|
46
|
+
year = {2023},
|
47
|
+
volume = {3},
|
48
|
+
pages = {1 - 32},
|
49
|
+
author = {Taylor Salo and Tal Yarkoni and Thomas E. Nichols and Jean-Baptiste Poline and Murat Bilgel and Katherine L. Bottenhorn and Dorota Jarecka and James D. Kent and Adam Kimbler and Dylan M. Nielson and Kendra M. Oudyk and Julio A. Peraza and Alexandre Pérez and Puck C. Reeders and Julio A. Yanes and Angela R. Laird},
|
50
|
+
title = {NiMARE: Neuroimaging Meta-Analysis Research Environment},
|
51
|
+
journal = {Aperture Neuro}
|
52
|
+
}
|
53
|
+
|
54
|
+
# This is the Zenodo citation for version 0.0.11.
|
55
|
+
@software{salo_taylor_2022_5826281,
|
56
|
+
author = {Salo, Taylor and
|
57
|
+
Yarkoni, Tal and
|
58
|
+
Nichols, Thomas E. and
|
59
|
+
Poline, Jean-Baptiste and
|
60
|
+
Kent, James D. and
|
61
|
+
Gorgolewski, Krzysztof J. and
|
62
|
+
Glerean, Enrico and
|
63
|
+
Bottenhorn, Katherine L. and
|
64
|
+
Bilgel, Murat and
|
65
|
+
Wright, Jessey and
|
66
|
+
Reeders, Puck and
|
67
|
+
Kimbler, Adam and
|
68
|
+
Nielson, Dylan N. and
|
69
|
+
Yanes, Julio A. and
|
70
|
+
Pérez, Alexandre and
|
71
|
+
Oudyk, Kendra M. and
|
72
|
+
Jarecka, Dorota and
|
73
|
+
Enge, Alexander and
|
74
|
+
Peraza, Julio A. and
|
75
|
+
Laird, Angela R.},
|
76
|
+
title = {neurostuff/NiMARE: 0.0.11},
|
77
|
+
month = jan,
|
78
|
+
year = 2022,
|
79
|
+
publisher = {Zenodo},
|
80
|
+
version = {0.0.11},
|
81
|
+
doi = {10.5281/zenodo.5826281},
|
82
|
+
url = {https://doi.org/10.5281/zenodo.5826281}
|
83
|
+
}
|
84
|
+
```
|
85
|
+
|
86
|
+
To cite NiMARE in your manuscript, we recommend something like the following:
|
87
|
+
|
88
|
+
> We used NiMARE v0.0.11 (RRID:SCR_017398; Salo et al., 2022a; Salo et al., 2022b).
|
89
|
+
|
90
|
+
## Contributing
|
91
|
+
|
92
|
+
Please see our [contributing guidelines](https://github.com/neurostuff/NiMARE/blob/main/CONTRIBUTING.md)
|
93
|
+
for more information on contributing to NiMARE.
|
94
|
+
|
95
|
+
We ask that all contributions to `NiMARE` respect our [code of conduct](https://github.com/neurostuff/NiMARE/blob/main/CODE_OF_CONDUCT.md).
|
File without changes
|
@@ -0,0 +1,57 @@
|
|
1
|
+
"""Benchmark the CBMA estimators."""
|
2
|
+
|
3
|
+
import os
|
4
|
+
|
5
|
+
import nimare
|
6
|
+
from nimare.meta.cbma import ALE, KDA, MKDAChi2, MKDADensity
|
7
|
+
from nimare.tests.utils import get_test_data_path
|
8
|
+
|
9
|
+
|
10
|
+
class TimeCBMA:
|
11
|
+
"""Time CBMA estimators."""
|
12
|
+
|
13
|
+
def setup(self):
|
14
|
+
"""
|
15
|
+
Setup the data.
|
16
|
+
|
17
|
+
Loads the dataset required for the benchmarks.
|
18
|
+
"""
|
19
|
+
self.dataset = nimare.dataset.Dataset(
|
20
|
+
os.path.join(get_test_data_path(), "test_pain_dataset.json")
|
21
|
+
)
|
22
|
+
|
23
|
+
def time_ale(self):
|
24
|
+
"""
|
25
|
+
Time the ALE estimator.
|
26
|
+
|
27
|
+
Fits the ALE estimator to the dataset and measures the time taken.
|
28
|
+
"""
|
29
|
+
meta = ALE()
|
30
|
+
meta.fit(self.dataset)
|
31
|
+
|
32
|
+
def time_mkdadensity(self):
|
33
|
+
"""
|
34
|
+
Time the MKDADensity estimator.
|
35
|
+
|
36
|
+
Fits the MKDADensity estimator to the dataset and measures the time taken.
|
37
|
+
"""
|
38
|
+
meta = MKDADensity()
|
39
|
+
meta.fit(self.dataset)
|
40
|
+
|
41
|
+
def time_kda(self):
|
42
|
+
"""
|
43
|
+
Time the KDA estimator.
|
44
|
+
|
45
|
+
Fits the KDA estimator to the dataset and measures the time taken.
|
46
|
+
"""
|
47
|
+
meta = KDA()
|
48
|
+
meta.fit(self.dataset)
|
49
|
+
|
50
|
+
def time_mkdachi2(self):
|
51
|
+
"""
|
52
|
+
Time the MKDAChi2 estimator.
|
53
|
+
|
54
|
+
Fits the MKDAChi2 estimator to the dataset and measures the time taken.
|
55
|
+
"""
|
56
|
+
meta = MKDAChi2()
|
57
|
+
meta.fit(self.dataset, self.dataset)
|
@@ -0,0 +1,45 @@
|
|
1
|
+
"""NiMARE: Neuroimaging Meta-Analysis Research Environment."""
|
2
|
+
|
3
|
+
import logging
|
4
|
+
import warnings
|
5
|
+
|
6
|
+
from ._version import get_versions
|
7
|
+
|
8
|
+
logging.basicConfig(level=logging.INFO)
|
9
|
+
|
10
|
+
with warnings.catch_warnings(record=True) as w:
|
11
|
+
warnings.simplefilter("ignore")
|
12
|
+
from . import (
|
13
|
+
annotate,
|
14
|
+
base,
|
15
|
+
correct,
|
16
|
+
dataset,
|
17
|
+
decode,
|
18
|
+
io,
|
19
|
+
meta,
|
20
|
+
reports,
|
21
|
+
resources,
|
22
|
+
stats,
|
23
|
+
utils,
|
24
|
+
workflows,
|
25
|
+
)
|
26
|
+
|
27
|
+
__version__ = get_versions()["version"]
|
28
|
+
|
29
|
+
__all__ = [
|
30
|
+
"base",
|
31
|
+
"dataset",
|
32
|
+
"meta",
|
33
|
+
"correct",
|
34
|
+
"annotate",
|
35
|
+
"decode",
|
36
|
+
"resources",
|
37
|
+
"io",
|
38
|
+
"stats",
|
39
|
+
"utils",
|
40
|
+
"reports",
|
41
|
+
"workflows",
|
42
|
+
"__version__",
|
43
|
+
]
|
44
|
+
|
45
|
+
del get_versions
|
@@ -0,0 +1,21 @@
|
|
1
|
+
|
2
|
+
# This file was generated by 'versioneer.py' (0.21) from
|
3
|
+
# revision-control system data, or from the parent directory name of an
|
4
|
+
# unpacked source archive. Distribution tarballs contain a pre-generated copy
|
5
|
+
# of this file.
|
6
|
+
|
7
|
+
import json
|
8
|
+
|
9
|
+
version_json = '''
|
10
|
+
{
|
11
|
+
"date": "2025-03-10T17:13:59-0500",
|
12
|
+
"dirty": false,
|
13
|
+
"error": null,
|
14
|
+
"full-revisionid": "72f0013b0d3d8243aedb7e29fa66a8e4e493eb38",
|
15
|
+
"version": "0.4.2rc4"
|
16
|
+
}
|
17
|
+
''' # END VERSION_JSON
|
18
|
+
|
19
|
+
|
20
|
+
def get_versions():
|
21
|
+
return json.loads(version_json)
|
@@ -0,0 +1,21 @@
|
|
1
|
+
"""Automated annotation tools."""
|
2
|
+
|
3
|
+
from . import cogat, gclda, lda, text, utils
|
4
|
+
from .cogat import CogAtLemmatizer, expand_counts, extract_cogat
|
5
|
+
from .gclda import GCLDAModel
|
6
|
+
from .lda import LDAModel
|
7
|
+
from .text import generate_counts
|
8
|
+
|
9
|
+
__all__ = [
|
10
|
+
"CogAtLemmatizer",
|
11
|
+
"expand_counts",
|
12
|
+
"extract_cogat",
|
13
|
+
"GCLDAModel",
|
14
|
+
"LDAModel",
|
15
|
+
"generate_counts",
|
16
|
+
"cogat",
|
17
|
+
"gclda",
|
18
|
+
"lda",
|
19
|
+
"text",
|
20
|
+
"utils",
|
21
|
+
]
|
@@ -0,0 +1,213 @@
|
|
1
|
+
"""Automated annotation of Cognitive Atlas labels."""
|
2
|
+
|
3
|
+
import logging
|
4
|
+
import re
|
5
|
+
|
6
|
+
import numpy as np
|
7
|
+
import pandas as pd
|
8
|
+
|
9
|
+
from nimare.annotate import utils
|
10
|
+
from nimare.extract import download_cognitive_atlas
|
11
|
+
from nimare.utils import _uk_to_us
|
12
|
+
|
13
|
+
LGR = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
class CogAtLemmatizer(object):
|
17
|
+
"""Replace synonyms and abbreviations with Cognitive Atlas identifiers in text.
|
18
|
+
|
19
|
+
Parameters
|
20
|
+
----------
|
21
|
+
ontology_df : :obj:`pandas.DataFrame`, optional
|
22
|
+
DataFrame with three columns (id, name, alias) and one row for each alias
|
23
|
+
(e.g., synonym or abbreviation) for each term in the Cognitive Atlas.
|
24
|
+
If None, loads ontology file from resources folder. Default is None.
|
25
|
+
|
26
|
+
Attributes
|
27
|
+
----------
|
28
|
+
ontology_ : :obj:`pandas.DataFrame`
|
29
|
+
Ontology in DataFrame form.
|
30
|
+
regex_ : :obj:`dict`
|
31
|
+
Dictionary linking aliases in ontology to regular expressions for lemmatization.
|
32
|
+
|
33
|
+
Notes
|
34
|
+
-----
|
35
|
+
The Cognitive Atlas :footcite:p:`poldrack2011cognitive` is an ontology for describing
|
36
|
+
cognitive neuroscience concepts and tasks.
|
37
|
+
|
38
|
+
References
|
39
|
+
----------
|
40
|
+
.. footbibliography::
|
41
|
+
|
42
|
+
See Also
|
43
|
+
--------
|
44
|
+
nimare.extract.download_cognitive_atlas : This function will be called automatically if
|
45
|
+
``ontology_df`` is not provided.
|
46
|
+
"""
|
47
|
+
|
48
|
+
def __init__(self, ontology_df=None):
|
49
|
+
if ontology_df is None:
|
50
|
+
cogat = download_cognitive_atlas()
|
51
|
+
self.ontology_ = pd.read_csv(cogat["ids"])
|
52
|
+
else:
|
53
|
+
assert isinstance(ontology_df, pd.DataFrame)
|
54
|
+
self.ontology_ = ontology_df
|
55
|
+
assert "id" in self.ontology_.columns
|
56
|
+
assert "name" in self.ontology_.columns
|
57
|
+
assert "alias" in self.ontology_.columns
|
58
|
+
|
59
|
+
# Create regex dictionary
|
60
|
+
regex_dict = {}
|
61
|
+
for term in ontology_df["alias"].values:
|
62
|
+
term_for_regex = term.replace("(", r"\(").replace(")", r"\)")
|
63
|
+
regex = "\\b" + term_for_regex + "\\b"
|
64
|
+
pattern = re.compile(regex, re.MULTILINE | re.IGNORECASE)
|
65
|
+
regex_dict[term] = pattern
|
66
|
+
self.regex_ = regex_dict
|
67
|
+
|
68
|
+
def transform(self, text, convert_uk=True):
|
69
|
+
"""Replace terms in text with unique Cognitive Atlas identifiers.
|
70
|
+
|
71
|
+
Parameters
|
72
|
+
----------
|
73
|
+
text : :obj:`str`
|
74
|
+
Text to convert.
|
75
|
+
convert_uk : :obj:`bool`, optional
|
76
|
+
Convert British English words in text to American English versions.
|
77
|
+
Default is True.
|
78
|
+
|
79
|
+
Returns
|
80
|
+
-------
|
81
|
+
text : :obj:`str`
|
82
|
+
Text with Cognitive Atlas terms replaced with unique Cognitive
|
83
|
+
Atlas identifiers.
|
84
|
+
"""
|
85
|
+
if convert_uk:
|
86
|
+
text = _uk_to_us(text)
|
87
|
+
|
88
|
+
for term_idx in self.ontology_.index:
|
89
|
+
term = self.ontology_["alias"].loc[term_idx]
|
90
|
+
term_id = self.ontology_["id"].loc[term_idx]
|
91
|
+
text = re.sub(self.regex_[term], term_id, text)
|
92
|
+
return text
|
93
|
+
|
94
|
+
|
95
|
+
def extract_cogat(text_df, id_df=None, text_column="abstract"):
|
96
|
+
"""Extract Cognitive Atlas terms and count instances using regular expressions.
|
97
|
+
|
98
|
+
Parameters
|
99
|
+
----------
|
100
|
+
text_df : (D x 2) :obj:`pandas.DataFrame`
|
101
|
+
Pandas dataframe with at least two columns: 'id' and the text.
|
102
|
+
D = document.
|
103
|
+
|
104
|
+
id_df : (T x 3) :obj:`pandas.DataFrame`
|
105
|
+
Cognitive Atlas ontology dataframe with one row for each term and at least three columns:
|
106
|
+
|
107
|
+
- ``"id"``: A unique identifier for each term.
|
108
|
+
- ``"alias"``: A natural language expression for each term.
|
109
|
+
- ``"name"``: The preferred name of each term. Currently unused.
|
110
|
+
|
111
|
+
text_column : :obj:`str`, optional
|
112
|
+
Name of column in text_df that contains text. Default is 'abstract'.
|
113
|
+
|
114
|
+
Returns
|
115
|
+
-------
|
116
|
+
counts_df : (D x T) :obj:`pandas.DataFrame`
|
117
|
+
Term counts for documents in the corpus.
|
118
|
+
One row for each document and one column for each term.
|
119
|
+
|
120
|
+
rep_text_df : (D x 2) :obj:`pandas.DataFrame`
|
121
|
+
An updated version of the ``text_df`` DataFrame with terms in the text column replaced
|
122
|
+
with their CogAt IDs.
|
123
|
+
|
124
|
+
Notes
|
125
|
+
-----
|
126
|
+
The Cognitive Atlas :footcite:p:`poldrack2011cognitive` is an ontology for describing
|
127
|
+
cognitive neuroscience concepts and tasks.
|
128
|
+
|
129
|
+
References
|
130
|
+
----------
|
131
|
+
.. footbibliography::
|
132
|
+
|
133
|
+
See Also
|
134
|
+
--------
|
135
|
+
nimare.extract.download_cognitive_atlas : This function will be called automatically if
|
136
|
+
``id_df`` is not provided.
|
137
|
+
"""
|
138
|
+
text_df = text_df.copy()
|
139
|
+
if id_df is None:
|
140
|
+
cogat = download_cognitive_atlas()
|
141
|
+
id_df = pd.read_csv(cogat["ids"])
|
142
|
+
gazetteer = sorted(id_df["id"].unique().tolist())
|
143
|
+
if "id" in text_df.columns:
|
144
|
+
text_df.set_index("id", inplace=True)
|
145
|
+
|
146
|
+
text_df[text_column] = text_df[text_column].fillna("")
|
147
|
+
text_df[text_column] = text_df[text_column].apply(_uk_to_us)
|
148
|
+
|
149
|
+
# Create regex dictionary
|
150
|
+
regex_dict = {}
|
151
|
+
for term in id_df["alias"].values:
|
152
|
+
term_for_regex = term.replace("(", r"\(").replace(")", r"\)")
|
153
|
+
regex = "\\b" + term_for_regex + "\\b"
|
154
|
+
pattern = re.compile(regex, re.MULTILINE | re.IGNORECASE)
|
155
|
+
regex_dict[term] = pattern
|
156
|
+
|
157
|
+
# Count
|
158
|
+
count_arr = np.zeros((text_df.shape[0], len(gazetteer)), int)
|
159
|
+
counts_df = pd.DataFrame(columns=gazetteer, index=text_df.index, data=count_arr)
|
160
|
+
for term_idx in id_df.index:
|
161
|
+
term = id_df["alias"].loc[term_idx]
|
162
|
+
term_id = id_df["id"].loc[term_idx]
|
163
|
+
pattern = regex_dict[term]
|
164
|
+
counts_df[term_id] += text_df[text_column].str.count(pattern).astype(int)
|
165
|
+
text_df[text_column] = text_df[text_column].str.replace(pattern, term_id, regex=True)
|
166
|
+
|
167
|
+
return counts_df, text_df
|
168
|
+
|
169
|
+
|
170
|
+
def expand_counts(counts_df, rel_df=None, weights=None):
|
171
|
+
"""Perform hierarchical expansion of counts across labels.
|
172
|
+
|
173
|
+
Parameters
|
174
|
+
----------
|
175
|
+
counts_df : (D x T) :obj:`pandas.DataFrame`
|
176
|
+
Term counts for a corpus. T = term, D = document.
|
177
|
+
rel_df : :obj:`pandas.DataFrame`
|
178
|
+
Long-form DataFrame of term-term relationships with at least three columns:
|
179
|
+
'input', 'output', and 'rel_type'.
|
180
|
+
weights : :obj:`dict`
|
181
|
+
Dictionary of weights per relationship type. E.g., {'isKind': 1}.
|
182
|
+
Unspecified relationship types default to 0.
|
183
|
+
|
184
|
+
Returns
|
185
|
+
-------
|
186
|
+
weighted_df : (D x T) :obj:`pandas.DataFrame`
|
187
|
+
Term counts for a corpus after hierarchical expansion.
|
188
|
+
"""
|
189
|
+
if rel_df is None:
|
190
|
+
cogat = download_cognitive_atlas()
|
191
|
+
rel_df = pd.read_csv(cogat["relationships"])
|
192
|
+
weights_df = utils._generate_weights(rel_df, weights=weights)
|
193
|
+
|
194
|
+
# First reorg counts_df so it has the same columns in the same order as
|
195
|
+
# weight_df
|
196
|
+
counts_columns = counts_df.columns.tolist()
|
197
|
+
weights_columns = weights_df.columns.tolist()
|
198
|
+
w_not_c = set(weights_columns) - set(counts_columns)
|
199
|
+
c_not_w = set(counts_columns) - set(weights_columns)
|
200
|
+
if c_not_w:
|
201
|
+
raise Exception(f"Columns found in counts but not weights: {', '.join(c_not_w)}")
|
202
|
+
|
203
|
+
for col in w_not_c:
|
204
|
+
counts_df[col] = 0
|
205
|
+
|
206
|
+
counts_df = counts_df[weights_columns]
|
207
|
+
|
208
|
+
# Now matrix multiplication
|
209
|
+
counts = counts_df.values
|
210
|
+
weights = weights_df.values
|
211
|
+
weighted = np.dot(counts, weights)
|
212
|
+
weighted_df = pd.DataFrame(index=counts_df.index, columns=counts_df.columns, data=weighted)
|
213
|
+
return weighted_df
|