vbi 0.2.1__tar.gz → 0.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (209) hide show
  1. {vbi-0.2.1/vbi.egg-info → vbi-0.2.2}/PKG-INFO +94 -48
  2. vbi-0.2.2/README.md +167 -0
  3. {vbi-0.2.1 → vbi-0.2.2}/docs/conf.py +1 -1
  4. vbi-0.2.2/docs/docker_build.rst +968 -0
  5. vbi-0.2.2/docs/docker_quickstart.rst +169 -0
  6. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/do_cpp.ipynb +2 -9
  7. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/do_nb.ipynb +5 -19
  8. vbi-0.2.2/docs/examples/ghb_sde_cupy.ipynb +184 -0
  9. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/intro.ipynb +15 -8
  10. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/intro_feature.ipynb +93 -49
  11. vbi-0.2.2/docs/examples/jansen_rit_sde_cpp.ipynb +3876 -0
  12. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/jansen_rit_sde_cupy.ipynb +2205 -32
  13. vbi-0.2.2/docs/examples/jansen_rit_sde_numba.ipynb +7314 -0
  14. vbi-0.2.2/docs/examples/mpr_sde_cpp.ipynb +215 -0
  15. vbi-0.2.2/docs/examples/mpr_sde_cupy.ipynb +390 -0
  16. vbi-0.2.2/docs/examples/mpr_sde_numba.ipynb +957 -0
  17. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/mpr_tvbk.ipynb +1 -1
  18. vbi-0.2.2/docs/examples/vep_sde.ipynb +562 -0
  19. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/vep_sde_numba.ipynb +41 -48
  20. vbi-0.2.2/docs/examples/wilson_cowan_cupy.ipynb +1306 -0
  21. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/wilson_cowan_sde_numba.ipynb +1860 -225
  22. vbi-0.2.2/docs/examples/ww_full_sde_cupy.ipynb +30830 -0
  23. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/ww_full_sde_numba.ipynb +1469 -3
  24. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/ww_sde_torch_kong.ipynb +2764 -51
  25. vbi-0.2.2/docs/index.rst +300 -0
  26. {vbi-0.2.1 → vbi-0.2.2}/pyproject.toml +44 -41
  27. {vbi-0.2.1 → vbi-0.2.2}/setup.py +1 -1
  28. vbi-0.2.2/vbi/__init__.py +91 -0
  29. vbi-0.2.2/vbi/_version.py +1 -0
  30. {vbi-0.2.1 → vbi-0.2.2}/vbi/feature_extraction/features.py +11 -3
  31. {vbi-0.2.1 → vbi-0.2.2}/vbi/feature_extraction/features_utils.py +9 -1
  32. {vbi-0.2.1 → vbi-0.2.2}/vbi/feature_extraction/utility.py +10 -2
  33. {vbi-0.2.1 → vbi-0.2.2}/vbi/inference.py +24 -3
  34. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/vep.py +2 -2
  35. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/numba/mpr.py +1 -1
  36. vbi-0.2.2/vbi/optional_deps.py +130 -0
  37. vbi-0.2.2/vbi/tests/README.md +105 -0
  38. {vbi-0.2.1 → vbi-0.2.2}/vbi/tests/_test_mpr_nb.py +9 -2
  39. vbi-0.2.2/vbi/tests/pytest.ini +6 -0
  40. vbi-0.2.2/vbi/tests/run_tests.py +100 -0
  41. {vbi-0.2.1 → vbi-0.2.2}/vbi/tests/test_features.py +39 -0
  42. {vbi-0.2.1 → vbi-0.2.2}/vbi/tests/test_ghb_cupy.py +12 -2
  43. {vbi-0.2.1 → vbi-0.2.2}/vbi/tests/test_mpr_cupy.py +12 -2
  44. {vbi-0.2.1 → vbi-0.2.2}/vbi/tests/test_mpr_numba.py +3 -0
  45. vbi-0.2.2/vbi/tests/test_suite.py +94 -0
  46. {vbi-0.2.1 → vbi-0.2.2}/vbi/utils.py +10 -4
  47. {vbi-0.2.1 → vbi-0.2.2/vbi.egg-info}/PKG-INFO +94 -48
  48. {vbi-0.2.1 → vbi-0.2.2}/vbi.egg-info/SOURCES.txt +6 -54
  49. {vbi-0.2.1 → vbi-0.2.2}/vbi.egg-info/requires.txt +17 -4
  50. vbi-0.2.1/.github/workflows/docker-image.yml +0 -51
  51. vbi-0.2.1/.github/workflows/tests.yml +0 -24
  52. vbi-0.2.1/.gitignore +0 -45
  53. vbi-0.2.1/.readthedocs.yaml +0 -19
  54. vbi-0.2.1/CHANGELOG.md +0 -112
  55. vbi-0.2.1/CONTRIBUTING.md +0 -64
  56. vbi-0.2.1/Dockerfile +0 -51
  57. vbi-0.2.1/README.md +0 -131
  58. vbi-0.2.1/docs/examples/.ipynb_checkpoints/intro-checkpoint.ipynb +0 -56
  59. vbi-0.2.1/docs/examples/__pycache__/helpers.cpython-310.pyc +0 -0
  60. vbi-0.2.1/docs/examples/ghb_sde_cupy.ipynb +0 -206
  61. vbi-0.2.1/docs/examples/jansen_rit_sde_cpp.ipynb +0 -460
  62. vbi-0.2.1/docs/examples/jansen_rit_sde_numba.ipynb +0 -587
  63. vbi-0.2.1/docs/examples/mpr_sde_cpp.ipynb +0 -174
  64. vbi-0.2.1/docs/examples/mpr_sde_cupy.ipynb +0 -605
  65. vbi-0.2.1/docs/examples/mpr_sde_numba.ipynb +0 -1020
  66. vbi-0.2.1/docs/examples/output/damp_oscillator_ts.jpeg +0 -0
  67. vbi-0.2.1/docs/examples/output/stat_vec.pt +0 -0
  68. vbi-0.2.1/docs/examples/output/theta.pt +0 -0
  69. vbi-0.2.1/docs/examples/output/vep/posterior.pkl +0 -0
  70. vbi-0.2.1/docs/examples/output/vep84/posterior.pkl +0 -0
  71. vbi-0.2.1/docs/examples/sbi-logs/NPE_C/2025-03-24T17_33_52.754987/events.out.tfevents.1742834032.ziaee.319710.0 +0 -0
  72. vbi-0.2.1/docs/examples/vep_sde.ipynb +0 -563
  73. vbi-0.2.1/docs/examples/wilson_cowan_cupy.ipynb +0 -457
  74. vbi-0.2.1/docs/examples/ww_full_sde_cupy.ipynb +0 -472
  75. vbi-0.2.1/docs/index.rst +0 -156
  76. vbi-0.2.1/vbi/__init__.py +0 -37
  77. vbi-0.2.1/vbi/_version.py +0 -17
  78. vbi-0.2.1/vbi/models/cpp/_src/__pycache__/mpr_sde.cpython-310.pyc +0 -0
  79. vbi-0.2.1/vbi/models/cpp/_src/_do.cpython-310-x86_64-linux-gnu.so +0 -0
  80. vbi-0.2.1/vbi/models/cpp/_src/_jr_sdde.cpython-310-x86_64-linux-gnu.so +0 -0
  81. vbi-0.2.1/vbi/models/cpp/_src/_jr_sde.cpython-310-x86_64-linux-gnu.so +0 -0
  82. vbi-0.2.1/vbi/models/cpp/_src/_km_sde.cpython-310-x86_64-linux-gnu.so +0 -0
  83. vbi-0.2.1/vbi/models/cpp/_src/_mpr_sde.cpython-310-x86_64-linux-gnu.so +0 -0
  84. vbi-0.2.1/vbi/models/cpp/_src/_vep.cpython-310-x86_64-linux-gnu.so +0 -0
  85. vbi-0.2.1/vbi/models/cpp/_src/_wc_ode.cpython-310-x86_64-linux-gnu.so +0 -0
  86. vbi-0.2.1/vbi/models/cpp/_src/do.py +0 -467
  87. vbi-0.2.1/vbi/models/cpp/_src/do_wrap.cxx +0 -12811
  88. vbi-0.2.1/vbi/models/cpp/_src/jr_sdde.py +0 -688
  89. vbi-0.2.1/vbi/models/cpp/_src/jr_sdde_wrap.cxx +0 -18718
  90. vbi-0.2.1/vbi/models/cpp/_src/jr_sde.py +0 -470
  91. vbi-0.2.1/vbi/models/cpp/_src/jr_sde_wrap.cxx +0 -13406
  92. vbi-0.2.1/vbi/models/cpp/_src/km_sde.py +0 -671
  93. vbi-0.2.1/vbi/models/cpp/_src/km_sde_wrap.cxx +0 -17367
  94. vbi-0.2.1/vbi/models/cpp/_src/mpr_sde.py +0 -711
  95. vbi-0.2.1/vbi/models/cpp/_src/mpr_sde_wrap.cxx +0 -18618
  96. vbi-0.2.1/vbi/models/cpp/_src/vep.py +0 -464
  97. vbi-0.2.1/vbi/models/cpp/_src/vep_wrap.cxx +0 -12968
  98. vbi-0.2.1/vbi/models/cpp/_src/wc_ode.py +0 -686
  99. vbi-0.2.1/vbi/models/cpp/_src/wc_ode_wrap.cxx +0 -24263
  100. vbi-0.2.1/vbi/models/pytorch/data/input/ROI_sim.mat +0 -0
  101. vbi-0.2.1/vbi/models/pytorch/data/input/fc_test.csv +0 -68
  102. vbi-0.2.1/vbi/models/pytorch/data/input/fc_train.csv +0 -68
  103. vbi-0.2.1/vbi/models/pytorch/data/input/fc_vali.csv +0 -68
  104. vbi-0.2.1/vbi/models/pytorch/data/input/fcd_test.mat +0 -0
  105. vbi-0.2.1/vbi/models/pytorch/data/input/fcd_test_high_window.mat +0 -0
  106. vbi-0.2.1/vbi/models/pytorch/data/input/fcd_test_low_window.mat +0 -0
  107. vbi-0.2.1/vbi/models/pytorch/data/input/fcd_train.mat +0 -0
  108. vbi-0.2.1/vbi/models/pytorch/data/input/fcd_vali.mat +0 -0
  109. vbi-0.2.1/vbi/models/pytorch/data/input/myelin.csv +0 -68
  110. vbi-0.2.1/vbi/models/pytorch/data/input/rsfc_gradient.csv +0 -68
  111. vbi-0.2.1/vbi/models/pytorch/data/input/run_label_testset.mat +0 -0
  112. vbi-0.2.1/vbi/models/pytorch/data/input/sc_test.csv +0 -68
  113. vbi-0.2.1/vbi/models/pytorch/data/input/sc_train.csv +0 -68
  114. vbi-0.2.1/vbi/models/pytorch/data/input/sc_vali.csv +0 -68
  115. vbi-0.2.1/vbi/papers/pavlides_pcb_2015/pavlides.py +0 -211
  116. vbi-0.2.1/vbi/tests/test_suite.py +0 -19
  117. vbi-0.2.1/vbi_log.png +0 -0
  118. {vbi-0.2.1 → vbi-0.2.2}/LICENSE +0 -0
  119. {vbi-0.2.1 → vbi-0.2.2}/MANIFEST.in +0 -0
  120. {vbi-0.2.1 → vbi-0.2.2}/docs/API.rst +0 -0
  121. {vbi-0.2.1 → vbi-0.2.2}/docs/Makefile +0 -0
  122. {vbi-0.2.1 → vbi-0.2.2}/docs/_static/Fig1.png +0 -0
  123. {vbi-0.2.1 → vbi-0.2.2}/docs/_static/custom.css +0 -0
  124. {vbi-0.2.1 → vbi-0.2.2}/docs/_static/vbi_flowchart.png +0 -0
  125. {vbi-0.2.1 → vbi-0.2.2}/docs/_static/vbi_log.png +0 -0
  126. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/.gitattributes +0 -0
  127. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/data/centers.txt +0 -0
  128. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/data/centres.txt +0 -0
  129. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/data/cortical.txt +0 -0
  130. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/data/tract_lengths.txt +0 -0
  131. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/data/weights.txt +0 -0
  132. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/data/weights1.txt +0 -0
  133. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/data/weights_68.txt +0 -0
  134. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/helpers.py +0 -0
  135. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/my_features.json +0 -0
  136. {vbi-0.2.1 → vbi-0.2.2}/docs/examples/my_features.py +0 -0
  137. {vbi-0.2.1 → vbi-0.2.2}/docs/models.rst +0 -0
  138. {vbi-0.2.1 → vbi-0.2.2}/docs/requirements.txt +0 -0
  139. {vbi-0.2.1 → vbi-0.2.2}/setup.cfg +0 -0
  140. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/__init__.py +0 -0
  141. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/connectivity_84/centers.txt +0 -0
  142. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/connectivity_84/centres.txt +0 -0
  143. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/connectivity_84/cortical.txt +0 -0
  144. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/connectivity_84/tract_lengths.txt +0 -0
  145. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/connectivity_84/weights.txt +0 -0
  146. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/connectivity_88/Aud_88.txt +0 -0
  147. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/connectivity_88/Bold.npz +0 -0
  148. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/connectivity_88/Labels.txt +0 -0
  149. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/connectivity_88/Region_labels.txt +0 -0
  150. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/connectivity_88/tract_lengths.txt +0 -0
  151. {vbi-0.2.1 → vbi-0.2.2}/vbi/dataset/connectivity_88/weights.txt +0 -0
  152. {vbi-0.2.1 → vbi-0.2.2}/vbi/feature_extraction/__init__.py +0 -0
  153. {vbi-0.2.1 → vbi-0.2.2}/vbi/feature_extraction/calc_features.py +0 -0
  154. {vbi-0.2.1 → vbi-0.2.2}/vbi/feature_extraction/features.json +0 -0
  155. {vbi-0.2.1 → vbi-0.2.2}/vbi/feature_extraction/features_settings.py +0 -0
  156. {vbi-0.2.1 → vbi-0.2.2}/vbi/feature_extraction/infodynamics.jar +0 -0
  157. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/__init__.py +0 -0
  158. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/__init__.py +0 -0
  159. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/__init__.py +0 -0
  160. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/bold.hpp +0 -0
  161. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/do.hpp +0 -0
  162. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/do.i +0 -0
  163. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/jr_sdde.hpp +0 -0
  164. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/jr_sdde.i +0 -0
  165. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/jr_sde.hpp +0 -0
  166. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/jr_sde.i +0 -0
  167. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/km_sde.hpp +0 -0
  168. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/km_sde.i +0 -0
  169. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/makefile +0 -0
  170. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/mpr_sde.hpp +0 -0
  171. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/mpr_sde.i +0 -0
  172. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/utility.hpp +0 -0
  173. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/vep.hpp +0 -0
  174. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/vep.i +0 -0
  175. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/wc_ode.hpp +0 -0
  176. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/_src/wc_ode.i +0 -0
  177. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/damp_oscillator.py +0 -0
  178. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/jansen_rit.py +0 -0
  179. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/km.py +0 -0
  180. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/mpr.py +0 -0
  181. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cpp/wc.py +0 -0
  182. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cupy/__init__.py +0 -0
  183. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cupy/bold.py +0 -0
  184. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cupy/ghb.py +0 -0
  185. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cupy/jansen_rit.py +0 -0
  186. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cupy/km.py +0 -0
  187. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cupy/mpr.py +0 -0
  188. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cupy/mpr_modified_bold.py +0 -0
  189. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cupy/utils.py +0 -0
  190. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cupy/wilson_cowan.py +0 -0
  191. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/cupy/ww.py +0 -0
  192. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/numba/__init__.py +0 -0
  193. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/numba/damp_oscillator.py +0 -0
  194. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/numba/ghb.py +0 -0
  195. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/numba/jansen_rit.py +0 -0
  196. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/numba/vep.py +0 -0
  197. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/numba/wilson_cowan.py +0 -0
  198. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/numba/ww.py +0 -0
  199. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/pytorch/__init__.py +0 -0
  200. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/pytorch/data/default_parameters.npz +0 -0
  201. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/pytorch/data/obs_kong0.npz +0 -0
  202. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/pytorch/ww_sde_kong.py +0 -0
  203. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/tvbk/__init__.py +0 -0
  204. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/tvbk/tvbk_wrapper.py +0 -0
  205. {vbi-0.2.1 → vbi-0.2.2}/vbi/models/tvbk/utils.py +0 -0
  206. {vbi-0.2.1 → vbi-0.2.2}/vbi/papers/__init__.py +0 -0
  207. {vbi-0.2.1 → vbi-0.2.2}/vbi/tests/__init__.py +0 -0
  208. {vbi-0.2.1 → vbi-0.2.2}/vbi.egg-info/dependency_links.txt +0 -0
  209. {vbi-0.2.1 → vbi-0.2.2}/vbi.egg-info/top_level.txt +0 -0
@@ -1,13 +1,14 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: vbi
3
- Version: 0.2.1
3
+ Version: 0.2.2
4
4
  Summary: Virtual brain inference.
5
5
  Author-email: Abolfazl Ziaeemehr <a.ziaeemehr@gmail.com>, Meysam Hashemi <meysam.hashemi@gmail.com>, Marmaduke Woodman <marmaduke.woodman@gmail.com>
6
- License-Expression: Apache-2.0
6
+ License: MIT
7
7
  Project-URL: homepage, https://ziaeemehr.github.io/vbi_paper/
8
8
  Project-URL: repository, https://github.com/Ziaeemehr/vbi_paper
9
9
  Classifier: Programming Language :: Python :: 3
10
10
  Classifier: Topic :: Scientific/Engineering :: Information Analysis
11
+ Classifier: License :: OSI Approved :: Apache Software License
11
12
  Classifier: Operating System :: OS Independent
12
13
  Requires-Python: >=3.8
13
14
  Description-Content-Type: text/markdown
@@ -23,14 +24,22 @@ Requires-Dist: nbconvert
23
24
  Requires-Dist: matplotlib
24
25
  Requires-Dist: setuptools-scm
25
26
  Requires-Dist: tqdm
26
- Requires-Dist: sbi
27
- Requires-Dist: torch
28
27
  Requires-Dist: parameterized
29
28
  Requires-Dist: scikit-learn
30
29
  Requires-Dist: pycatch22
31
30
  Requires-Dist: pytest
32
31
  Requires-Dist: swig
33
32
  Requires-Dist: rich
33
+ Provides-Extra: light
34
+ Provides-Extra: light-gpu
35
+ Requires-Dist: cupy; extra == "light-gpu"
36
+ Provides-Extra: inference
37
+ Requires-Dist: sbi; extra == "inference"
38
+ Requires-Dist: torch; extra == "inference"
39
+ Provides-Extra: inference-gpu
40
+ Requires-Dist: sbi; extra == "inference-gpu"
41
+ Requires-Dist: torch; extra == "inference-gpu"
42
+ Requires-Dist: cupy; extra == "inference-gpu"
34
43
  Provides-Extra: dev
35
44
  Provides-Extra: docs
36
45
  Requires-Dist: sphinx; extra == "docs"
@@ -39,17 +48,18 @@ Requires-Dist: nbformat; extra == "docs"
39
48
  Requires-Dist: nbsphinx; extra == "docs"
40
49
  Requires-Dist: cloud_sptheme; extra == "docs"
41
50
  Requires-Dist: sphinx_bootstrap_theme; extra == "docs"
42
- Provides-Extra: cu
43
- Requires-Dist: cupy; extra == "cu"
44
51
  Provides-Extra: all
45
- Requires-Dist: pytest; extra == "all"
52
+ Requires-Dist: sbi; extra == "all"
53
+ Requires-Dist: torch; extra == "all"
54
+ Requires-Dist: cupy; extra == "all"
46
55
  Requires-Dist: sphinx; extra == "all"
47
56
  Requires-Dist: numpydoc; extra == "all"
48
57
  Requires-Dist: nbformat; extra == "all"
49
58
  Requires-Dist: nbsphinx; extra == "all"
50
59
  Requires-Dist: cloud_sptheme; extra == "all"
51
60
  Requires-Dist: sphinx_bootstrap_theme; extra == "all"
52
- Requires-Dist: cupy; extra == "all"
61
+ Provides-Extra: cu
62
+ Requires-Dist: cupy; extra == "cu"
53
63
  Dynamic: license-file
54
64
 
55
65
  [![Test](https://github.com/ins-amu/vbi/actions/workflows/tests.yml/badge.svg)](https://github.com/ins-amu/vbi/actions/workflows/tests.yml)
@@ -69,22 +79,56 @@ Dynamic: license-file
69
79
  </p>
70
80
 
71
81
 
72
- ## installation
82
+ ## Installation
73
83
 
84
+ ### Quick Start
74
85
  ```bash
75
- conda env create --name vbi python=3.10
76
- conda activate vbi
77
- # from pip: Recommended
78
- pip install vbi
79
- # from source: More recent update
80
- git clone https://github.com/ins-amu/vbi.git
81
- cd vbi
82
- pip install .
83
-
84
- # pip install -e .[all,dev,docs]
85
-
86
- # To skip C++ compilation, use the following environment variable and install from source:
87
- SKIP_CPP=1 pip install -e .
86
+ conda env create --name vbi python=3.10
87
+ conda activate vbi
88
+ ```
89
+
90
+ ### Installation Options
91
+
92
+ VBI offers flexible installation options for different use cases:
93
+
94
+ ```bash
95
+ # Light version - CPU simulation only (numba + C++), without PyTorch and CuPy
96
+ # Includes: Brain simulation models, feature extraction, visualization
97
+ # Best for: Users who only need simulation capabilities, minimal dependencies
98
+ pip install vbi
99
+
100
+ # Light version with GPU acceleration (adds CuPy)
101
+ # Includes: Everything in light + CuPy for GPU-accelerated simulations
102
+ # Best for: GPU users who want fast simulations but don't need inference
103
+ pip install vbi[light-gpu]
104
+
105
+ # Parameter inference (CPU) - adds PyTorch and SBI
106
+ # Includes: Everything in light + PyTorch (CPU) + SBI for Bayesian inference
107
+ # Best for: Users who need parameter estimation but don't have GPU
108
+ pip install vbi[inference]
109
+
110
+ # Parameter inference with GPU - adds PyTorch, SBI, and CuPy
111
+ # Includes: Full functionality with GPU acceleration for both simulation and inference
112
+ # Best for: GPU users who need both fast simulation and parameter inference
113
+ pip install vbi[inference-gpu]
114
+
115
+ # All features - complete installation
116
+ # Includes: All above + documentation tools, development dependencies
117
+ # Best for: Developers, researchers who want all functionality
118
+ pip install vbi[all]
119
+ ```
120
+
121
+ ### From Source
122
+ ```bash
123
+ git clone https://github.com/ins-amu/vbi.git
124
+ cd vbi
125
+ pip install .
126
+
127
+ # Development installation with all dependencies
128
+ pip install -e .[all]
129
+
130
+ # To skip C++ compilation, use:
131
+ SKIP_CPP=1 pip install -e .
88
132
  ```
89
133
 
90
134
  ## Using Docker
@@ -98,13 +142,29 @@ To use the Docker image, you can pull it from the GitHub Container Registry and
98
142
 
99
143
  # with GPU
100
144
  docker run --gpus all --rm -it -p 8888:8888 ghcr.io/ins-amu/vbi:main
145
+ ```
146
+
147
+ ## Building and Using Docker Locally
101
148
 
149
+ For local development and customization, you can build the VBI Docker image yourself:
150
+
151
+ **Quick Start:**
152
+
153
+ ```bash
154
+ # Build the optimized image
155
+ docker build -t vbi:latest .
156
+
157
+ # Start with convenience script
158
+ ./run-vbi.sh start
159
+
160
+ # Or start manually
161
+ docker run --gpus all -p 8888:8888 vbi:latest
162
+ ```
102
163
 
103
- # or build it locally:
104
- docker build -t vbi-project . # build
105
- docker run --gpus all -it -p 8888:8888 vbi-project # use with gpu
164
+ **Complete Guides:**
106
165
 
107
- ```
166
+ - [Docker Build Guide](docs/docker_build.rst) - Comprehensive building guide with optimizations and troubleshooting
167
+ - [Docker Quickstart](docs/docker_quickstart.rst) - Quick reference for daily usage and container management
108
168
 
109
169
  - Quick check :
110
170
 
@@ -148,28 +208,14 @@ We welcome contributions to the VBI project! If you have suggestions, bug report
148
208
  ## Citation
149
209
 
150
210
  ```bibtex
151
- @article{VBI,
152
- title={Virtual Brain Inference (VBI): A flexible and integrative toolkit for efficient probabilistic inference on virtual brain models},
153
- author={Ziaeemehr, Abolfazl and Woodman, Marmaduke and Domide, Lia and Petkoski, Spase and Jirsa, Viktor and Hashemi, Meysam},
154
- DOI={10.7554/elife.106194.1},
155
- url={http://dx.doi.org/10.7554/eLife.106194.1},
156
- publisher={eLife Sciences Publications, Ltd},
157
- year={2025},
158
- abstract = {Network neuroscience has proven essential for understanding the principles and mechanisms
159
- underlying complex brain (dys)function and cognition. In this context, whole-brain network modeling–
160
- also known as virtual brain modeling–combines computational models of brain dynamics (placed at each network node)
161
- with individual brain imaging data (to coordinate and connect the nodes), advancing our understanding of
162
- the complex dynamics of the brain and its neurobiological underpinnings. However, there remains a critical
163
- need for automated model inversion tools to estimate control (bifurcation) parameters at large scales
164
- associated with neuroimaging modalities, given their varying spatio-temporal resolutions.
165
- This study aims to address this gap by introducing a flexible and integrative toolkit for efficient Bayesian inference
166
- on virtual brain models, called Virtual Brain Inference (VBI). This open-source toolkit provides fast simulations,
167
- taxonomy of feature extraction, efficient data storage and loading, and probabilistic machine learning algorithms,
168
- enabling biophysically interpretable inference from non-invasive and invasive recordings.
169
- Through in-silico testing, we demonstrate the accuracy and reliability of inference for commonly used
170
- whole-brain network models and their associated neuroimaging data. VBI shows potential to improve hypothesis
171
- evaluation in network neuroscience through uncertainty quantification, and contribute to advances in precision
172
- medicine by enhancing the predictive power of virtual brain models.}
211
+ @article{VBI,
212
+ author = {Ziaeemehr, Abolfazl and Woodman, Marmaduke and Domide, Lia and Petkoski, Spase and Jirsa, Viktor and Hashemi, Meysam},
213
+ title = {Virtual Brain Inference (VBI): A flexible and integrative toolkit for efficient probabilistic inference on virtual brain models},
214
+ journal = {bioRxiv},
215
+ year = {2025},
216
+ doi = {10.1101/2025.01.21.633922},
217
+ url = {https://doi.org/10.1101/2025.01.21.633922},
218
+ abstract = {Network neuroscience has proven essential for understanding the principles and mechanisms underlying complex brain (dys)function and cognition. In this context, whole-brain network modeling--also known as virtual brain modeling--combines computational models of brain dynamics (placed at each network node) with individual brain imaging data (to coordinate and connect the nodes), advancing our understanding of the complex dynamics of the brain and its neurobiological underpinnings. However, there remains a critical need for automated model inversion tools to estimate control (bifurcation) parameters at large scales and across neuroimaging modalities, given their varying spatio-temporal resolutions. This study aims to address this gap by introducing a flexible and integrative toolkit for efficient Bayesian inference on virtual brain models, called Virtual Brain Inference (VBI). This open-source toolkit provides fast simulations, taxonomy of feature extraction, efficient data storage and loading, and probabilistic machine learning algorithms, enabling biophysically interpretable inference from non-invasive and invasive recordings. Through in-silico testing, we demonstrate the accuracy and reliability of inference for commonly used whole-brain network models and their associated neuroimaging data. VBI shows potential to improve hypothesis evaluation in network neuroscience through uncertainty quantification, and contribute to advances in precision medicine by enhancing the predictive power of virtual brain models.}
173
219
  }
174
220
  ```
175
221
 
vbi-0.2.2/README.md ADDED
@@ -0,0 +1,167 @@
1
+ [![Test](https://github.com/ins-amu/vbi/actions/workflows/tests.yml/badge.svg)](https://github.com/ins-amu/vbi/actions/workflows/tests.yml)
2
+ [![Documentation Status](https://readthedocs.org/projects/vbi/badge/?version=latest)](https://vbi.readthedocs.io/latest/)
3
+ [![DOI](https://zenodo.org/badge/681090816.svg)](https://doi.org/10.5281/zenodo.14795543)
4
+ [![Docker Build](https://github.com/ins-amu/vbi/actions/workflows/docker-image.yml/badge.svg)](https://github.com/ins-amu/vbi/actions/workflows/docker-image.yml)
5
+ <!-- [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/ins-amu/vbi/main?labpath=docs/examples/intro.ipynb) -->
6
+
7
+
8
+ # VBI: Virtual Brain Inference
9
+ ---
10
+ [Getting Started](https://github.com/ins-amu/vbi/tree/main/docs/examples) |
11
+ [Documentation](https://vbi.readthedocs.io/latest/) |
12
+
13
+ <p align="center">
14
+ <img src="https://github.com/Ziaeemehr/vbi_paper/blob/main/vbi_log.png" width="250">
15
+ </p>
16
+
17
+
18
+ ## Installation
19
+
20
+ ### Quick Start
21
+ ```bash
22
+ conda env create --name vbi python=3.10
23
+ conda activate vbi
24
+ ```
25
+
26
+ ### Installation Options
27
+
28
+ VBI offers flexible installation options for different use cases:
29
+
30
+ ```bash
31
+ # Light version - CPU simulation only (numba + C++), without PyTorch and CuPy
32
+ # Includes: Brain simulation models, feature extraction, visualization
33
+ # Best for: Users who only need simulation capabilities, minimal dependencies
34
+ pip install vbi
35
+
36
+ # Light version with GPU acceleration (adds CuPy)
37
+ # Includes: Everything in light + CuPy for GPU-accelerated simulations
38
+ # Best for: GPU users who want fast simulations but don't need inference
39
+ pip install vbi[light-gpu]
40
+
41
+ # Parameter inference (CPU) - adds PyTorch and SBI
42
+ # Includes: Everything in light + PyTorch (CPU) + SBI for Bayesian inference
43
+ # Best for: Users who need parameter estimation but don't have GPU
44
+ pip install vbi[inference]
45
+
46
+ # Parameter inference with GPU - adds PyTorch, SBI, and CuPy
47
+ # Includes: Full functionality with GPU acceleration for both simulation and inference
48
+ # Best for: GPU users who need both fast simulation and parameter inference
49
+ pip install vbi[inference-gpu]
50
+
51
+ # All features - complete installation
52
+ # Includes: All above + documentation tools, development dependencies
53
+ # Best for: Developers, researchers who want all functionality
54
+ pip install vbi[all]
55
+ ```
56
+
57
+ ### From Source
58
+ ```bash
59
+ git clone https://github.com/ins-amu/vbi.git
60
+ cd vbi
61
+ pip install .
62
+
63
+ # Development installation with all dependencies
64
+ pip install -e .[all]
65
+
66
+ # To skip C++ compilation, use:
67
+ SKIP_CPP=1 pip install -e .
68
+ ```
69
+
70
+ ## Using Docker
71
+
72
+ To use the Docker image, you can pull it from the GitHub Container Registry and run it as follows:
73
+
74
+ ```bash
75
+ # Get it without building anything locally
76
+ # without GPU
77
+ docker run --rm -it -p 8888:8888 ghcr.io/ins-amu/vbi:main
78
+
79
+ # with GPU
80
+ docker run --gpus all --rm -it -p 8888:8888 ghcr.io/ins-amu/vbi:main
81
+ ```
82
+
83
+ ## Building and Using Docker Locally
84
+
85
+ For local development and customization, you can build the VBI Docker image yourself:
86
+
87
+ **Quick Start:**
88
+
89
+ ```bash
90
+ # Build the optimized image
91
+ docker build -t vbi:latest .
92
+
93
+ # Start with convenience script
94
+ ./run-vbi.sh start
95
+
96
+ # Or start manually
97
+ docker run --gpus all -p 8888:8888 vbi:latest
98
+ ```
99
+
100
+ **Complete Guides:**
101
+
102
+ - [Docker Build Guide](docs/docker_build.rst) - Comprehensive building guide with optimizations and troubleshooting
103
+ - [Docker Quickstart](docs/docker_quickstart.rst) - Quick reference for daily usage and container management
104
+
105
+ - Quick check :
106
+
107
+ ```python
108
+
109
+ import vbi
110
+ vbi.tests()
111
+ vbi.test_imports()
112
+
113
+
114
+ # Dependency Check
115
+ #
116
+ # Package Version Status
117
+ #━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
118
+ # vbi v0.1.3 ✅ Available
119
+ # numpy 1.24.4 ✅ Available
120
+ # scipy 1.10.1 ✅ Available
121
+ # matplotlib 3.7.5 ✅ Available
122
+ # sbi 0.22.0 ✅ Available
123
+ # torch 2.4.1+cu121 ✅ Available
124
+ # cupy 12.3.0 ✅ Available
125
+ #
126
+ # Torch GPU available: True
127
+ # Torch device count: 1
128
+ # Torch CUDA version: 12.1
129
+ # CuPy GPU available: True
130
+ # CuPy device count: 1
131
+ # CUDA Version: 11.8
132
+ # Device Name: NVIDIA RTX A5000
133
+ # Total Memory: 23.68 GB
134
+ # Compute Capability: 8.6
135
+
136
+ ```
137
+
138
+
139
+ ## Feedback and Contributions
140
+
141
+ We welcome contributions to the VBI project! If you have suggestions, bug reports, or feature requests, please open an issue on our [GitHub repository](https://github.com/ins-amu/vbi/issues). To contribute code, fork the repository, create a new branch for your feature or bugfix, and submit a pull request. Make sure to follow our coding standards and include tests for your changes. For detailed guidelines, please refer to our [CONTRIBUTING.md](https://github.com/ins-amu/vbi/blob/main/CONTRIBUTING.md) file. Thank you for helping us improve VBI!
142
+
143
+
144
+ ## Citation
145
+
146
+ ```bibtex
147
+ @article{VBI,
148
+ author = {Ziaeemehr, Abolfazl and Woodman, Marmaduke and Domide, Lia and Petkoski, Spase and Jirsa, Viktor and Hashemi, Meysam},
149
+ title = {Virtual Brain Inference (VBI): A flexible and integrative toolkit for efficient probabilistic inference on virtual brain models},
150
+ journal = {bioRxiv},
151
+ year = {2025},
152
+ doi = {10.1101/2025.01.21.633922},
153
+ url = {https://doi.org/10.1101/2025.01.21.633922},
154
+ abstract = {Network neuroscience has proven essential for understanding the principles and mechanisms underlying complex brain (dys)function and cognition. In this context, whole-brain network modeling--also known as virtual brain modeling--combines computational models of brain dynamics (placed at each network node) with individual brain imaging data (to coordinate and connect the nodes), advancing our understanding of the complex dynamics of the brain and its neurobiological underpinnings. However, there remains a critical need for automated model inversion tools to estimate control (bifurcation) parameters at large scales and across neuroimaging modalities, given their varying spatio-temporal resolutions. This study aims to address this gap by introducing a flexible and integrative toolkit for efficient Bayesian inference on virtual brain models, called Virtual Brain Inference (VBI). This open-source toolkit provides fast simulations, taxonomy of feature extraction, efficient data storage and loading, and probabilistic machine learning algorithms, enabling biophysically interpretable inference from non-invasive and invasive recordings. Through in-silico testing, we demonstrate the accuracy and reliability of inference for commonly used whole-brain network models and their associated neuroimaging data. VBI shows potential to improve hypothesis evaluation in network neuroscience through uncertainty quantification, and contribute to advances in precision medicine by enhancing the predictive power of virtual brain models.}
155
+ }
156
+ ```
157
+
158
+ This research has received funding from:
159
+
160
+ - EU's Horizon 2020 Framework Programme for Research and Innovation under the Specific Grant Agreements:
161
+ - No. 101147319 (EBRAINS 2.0 Project)
162
+ - No. 101137289 (Virtual Brain Twin Project)
163
+ - No. 101057429 (project environMENTAL)
164
+ - Government grant managed by the Agence Nationale de la Recherche:
165
+ - Reference ANR-22-PESN-0012 (France 2030 program)
166
+
167
+ The funders had no role in study design, data collection and analysis, decision to publish, or preparation of the manuscript.
@@ -7,7 +7,7 @@ from setuptools_scm import get_version
7
7
  # sys.path.insert(0, os.path.abspath("../vbi"))
8
8
  sys.path.insert(0, os.path.abspath(".."))
9
9
 
10
- needs_sphinx = "0.2.1"
10
+ needs_sphinx = "0.2"
11
11
 
12
12
  extensions = [
13
13
  "sphinx.ext.autodoc",