pydfc 1.0.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. pydfc-1.0.4/.gitattributes +1 -0
  2. pydfc-1.0.4/.github/workflows/test.yml +88 -0
  3. pydfc-1.0.4/.github/workflows/validate_cff.yml +24 -0
  4. pydfc-1.0.4/.gitignore +19 -0
  5. pydfc-1.0.4/CITATION.cff +33 -0
  6. pydfc-1.0.4/HCP_resting_state_analysis/FCS_estimate.py +149 -0
  7. pydfc-1.0.4/HCP_resting_state_analysis/dFC_assessment.py +85 -0
  8. pydfc-1.0.4/HCP_resting_state_analysis/functions/__init__.py +0 -0
  9. pydfc-1.0.4/HCP_resting_state_analysis/functions/dFC_funcs.py +4031 -0
  10. pydfc-1.0.4/HCP_resting_state_analysis/functions/post_analysis_funcs.py +1368 -0
  11. pydfc-1.0.4/HCP_resting_state_analysis/main.py +372 -0
  12. pydfc-1.0.4/HCP_resting_state_analysis/post_analysis.py +1245 -0
  13. pydfc-1.0.4/HCP_resting_state_analysis/similarity_measurement.py +58 -0
  14. pydfc-1.0.4/HCP_resting_state_analysis/test_dFC.py +360 -0
  15. pydfc-1.0.4/HCP_resting_state_analysis/visualization.py +1112 -0
  16. pydfc-1.0.4/LICENSE +21 -0
  17. pydfc-1.0.4/PKG-INFO +48 -0
  18. pydfc-1.0.4/README.rst +19 -0
  19. pydfc-1.0.4/dFC_methods_demo.ipynb +619 -0
  20. pydfc-1.0.4/multi_analysis_demo.ipynb +787 -0
  21. pydfc-1.0.4/pydfc/__init__.py +29 -0
  22. pydfc-1.0.4/pydfc/comparison/__init__.py +10 -0
  23. pydfc-1.0.4/pydfc/comparison/analytical.py +316 -0
  24. pydfc-1.0.4/pydfc/comparison/plotting.py +944 -0
  25. pydfc-1.0.4/pydfc/comparison/similarity_assessment.py +418 -0
  26. pydfc-1.0.4/pydfc/data_loader.py +351 -0
  27. pydfc-1.0.4/pydfc/dfc.py +303 -0
  28. pydfc-1.0.4/pydfc/dfc_methods/__init__.py +17 -0
  29. pydfc-1.0.4/pydfc/dfc_methods/base_dfc_method.py +312 -0
  30. pydfc-1.0.4/pydfc/dfc_methods/cap.py +148 -0
  31. pydfc-1.0.4/pydfc/dfc_methods/continuous_hmm.py +117 -0
  32. pydfc-1.0.4/pydfc/dfc_methods/discrete_hmm.py +168 -0
  33. pydfc-1.0.4/pydfc/dfc_methods/sliding_window.py +177 -0
  34. pydfc-1.0.4/pydfc/dfc_methods/sliding_window_clustr.py +260 -0
  35. pydfc-1.0.4/pydfc/dfc_methods/time_freq.py +198 -0
  36. pydfc-1.0.4/pydfc/dfc_methods/windowless.py +120 -0
  37. pydfc-1.0.4/pydfc/dfc_utils.py +1195 -0
  38. pydfc-1.0.4/pydfc/multi_analysis.py +245 -0
  39. pydfc-1.0.4/pydfc/task_utils.py +247 -0
  40. pydfc-1.0.4/pydfc/time_series.py +415 -0
  41. pydfc-1.0.4/pyproject.toml +36 -0
  42. pydfc-1.0.4/task_dFC/FCS_estimate.py +149 -0
  43. pydfc-1.0.4/task_dFC/dFC_assessment.py +102 -0
  44. pydfc-1.0.4/task_dFC/nifti_to_roi_signal.py +117 -0
  45. pydfc-1.0.4/task_dFC/validation.py +57 -0
  46. pydfc-1.0.4/tests/__init__.py +0 -0
  47. pydfc-1.0.4/tests/test_data_loader.py +35 -0
  48. pydfc-1.0.4/tox.ini +76 -0
@@ -0,0 +1 @@
1
+ *.nii.gz filter=lfs diff=lfs merge=lfs -text
@@ -0,0 +1,88 @@
1
+ name: Build and test
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ tags:
8
+ - "*"
9
+ pull_request:
10
+ branches:
11
+ - main
12
+ # Run weekly to avoid missing deprecations during low activity
13
+ schedule:
14
+ - cron: '0 0 * * 1'
15
+ # Allow job to be triggered manually from GitHub interface
16
+ workflow_dispatch:
17
+
18
+ defaults:
19
+ run:
20
+ shell: bash
21
+
22
+ # Force tox and pytest to use color
23
+ env:
24
+ FORCE_COLOR: true
25
+
26
+ concurrency:
27
+ group: ${{ github.workflow }}-${{ github.ref }}
28
+ cancel-in-progress: true
29
+
30
+ permissions:
31
+ contents: read
32
+
33
+ jobs:
34
+ test:
35
+ # Check each OS, all supported Python, minimum versions and latest releases
36
+ runs-on: ${{ matrix.os }}
37
+ strategy:
38
+ fail-fast: false
39
+ matrix:
40
+ os: ['ubuntu-latest']
41
+ python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
42
+ include:
43
+ # Basic dependencies only
44
+ - os: ubuntu-latest
45
+ python-version: 3.8
46
+ dependencies: 'min'
47
+
48
+ env:
49
+ DEPENDS: ${{ matrix.dependencies }}
50
+ ARCH: ${{ !contains(fromJSON('["none", "min"]'), matrix.dependencies) && matrix.architecture }}
51
+
52
+ steps:
53
+ - uses: actions/checkout@v4
54
+ with:
55
+ submodules: recursive
56
+ fetch-depth: 0
57
+ - name: Set up Python ${{ matrix.python-version }}
58
+ uses: actions/setup-python@v5
59
+ with:
60
+ python-version: ${{ matrix.python-version }}
61
+ architecture: ${{ matrix.architecture }}
62
+ allow-prereleases: true
63
+ - name: Display Python version
64
+ run: python -c "import sys; print(sys.version)"
65
+ - name: Install tox
66
+ run: |
67
+ python -m pip install --upgrade pip
68
+ python -m pip install tox tox-gh-actions
69
+ - name: Show tox config
70
+ run: tox c
71
+ - name: Run tox
72
+ run: tox -v --exit-and-dump-after 1200
73
+
74
+ publish:
75
+ runs-on: ubuntu-latest
76
+ environment: "Package deployment"
77
+ needs: [test]
78
+ permissions:
79
+ # Required for trusted publishing
80
+ id-token: write
81
+ if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
82
+ steps:
83
+ - uses: actions/checkout@v4
84
+ with:
85
+ submodules: recursive
86
+ fetch-depth: 0
87
+ - run: pipx run build
88
+ - uses: pypa/gh-action-pypi-publish@release/v1
@@ -0,0 +1,24 @@
1
+ ---
2
+ name: validate CITATION.cff
3
+
4
+ on:
5
+ push:
6
+ branches: ['*']
7
+ paths:
8
+ - CITATION.cff
9
+ - .github/workflows/validate_cff.yml
10
+ pull_request:
11
+ branches: ['*']
12
+ paths:
13
+ - CITATION.cff
14
+ - .github/workflows/validate_cff.yml
15
+
16
+ jobs:
17
+ validate_cff:
18
+ runs-on: ubuntu-latest
19
+ steps:
20
+ - uses: actions/checkout@v4
21
+ - name: Check whether the citation metadata from CITATION.cff is valid
22
+ uses: citation-file-format/cffconvert-github-action@2.0.0
23
+ with:
24
+ args: --validate
pydfc-1.0.4/.gitignore ADDED
@@ -0,0 +1,19 @@
1
+ **/.DS_Store
2
+ __pycache__
3
+ *.pyc
4
+ *.cpython
5
+ sample_data/sub-0001_task-restingstate_acq-mb3_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz
6
+ sample_data/sub-0001_task-restingstate_acq-mb3_desc-confounds_regressors.tsv
7
+ sample_data/sub-0002_task-restingstate_acq-mb3_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz
8
+ sample_data/sub-0002_task-restingstate_acq-mb3_desc-confounds_regressors.tsv
9
+ sample_data/sub-0003_task-restingstate_acq-mb3_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz
10
+ sample_data/sub-0003_task-restingstate_acq-mb3_desc-confounds_regressors.tsv
11
+ sample_data/sub-0004_task-restingstate_acq-mb3_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz
12
+ sample_data/sub-0004_task-restingstate_acq-mb3_desc-confounds_regressors.tsv
13
+ sample_data/sub-0005_task-restingstate_acq-mb3_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz
14
+ sample_data/sub-0005_task-restingstate_acq-mb3_desc-confounds_regressors.tsv
15
+
16
+ # build related
17
+ pydfc.egg-info
18
+ build
19
+ dist/
@@ -0,0 +1,33 @@
1
+ cff-version: 1.2.0
2
+
3
+ title: "pydfc"
4
+
5
+ version: 1.0.2
6
+
7
+ abstract:
8
+ "An implementation of several well-known dynamic Functional Connectivity assessment methods."
9
+
10
+ message: "If you use this software, please cite it as below."
11
+
12
+ repository-code: "https://github.com/neurodatascience/dFC.git"
13
+
14
+
15
+ contact:
16
+ - affiliation: "McGill University, Québec, Canada"
17
+ email: mohammad.torabi@mail.mcgill.ca
18
+ family-names: Torabi
19
+ given-names: Mohammad
20
+
21
+ authors:
22
+ - family-names: "Torabi"
23
+ given-names: "Mohammad"
24
+ orcid: "https://orcid.org/0000-0002-4429-8481"
25
+ affiliation: Biological and Biomedical Engineering program, McGill University, Québec, Canada"
26
+
27
+ license: MIT
28
+
29
+ keywords:
30
+ - dynamic functional connectivity
31
+ - analytical flexibility
32
+ - neuroimaging
33
+ - reproducibility
@@ -0,0 +1,149 @@
1
+ from functions.dFC_funcs import *
2
+ import numpy as np
3
+ import time
4
+ import hdf5storage
5
+ import scipy.io as sio
6
+ import os
7
+ os.environ["MKL_NUM_THREADS"] = '64'
8
+ os.environ["NUMEXPR_NUM_THREADS"] = '64'
9
+ os.environ["OMP_NUM_THREADS"] = '64'
10
+
11
+ print('################################# CODE started running ... #################################')
12
+
13
+ ################################# Parameters #################################
14
+
15
+ ###### DATA PARAMETERS ######
16
+
17
+ output_root = './'
18
+
19
+ # DATA_type is either 'sample' or 'Gordon' or 'simulated' or 'ICA'
20
+ params_data_load = {
21
+ 'DATA_type': 'Gordon',
22
+ 'SESSIONs':['Rest1_LR' , 'Rest1_RL', 'Rest2_LR', 'Rest2_RL'],
23
+ 'networks2include':['Auditory', 'CinguloOperc', 'Default', 'DorsalAttn', 'FrontoParietal',
24
+ 'MedialParietal', 'ParietoOccip', 'SMhand', 'SMmouth',
25
+ 'Salience', 'VentralAttn', 'Visual'],
26
+
27
+ 'data_root_simul': './../../../../DATA/TVB data/',
28
+ 'data_root_sample': './sampleDATA/',
29
+ 'data_root_gordon': './../../../../DATA/HCP/HCP_Gordon/',
30
+ 'data_root_ica': './../../../../DATA/HCP/HCP_PTN1200/node_timeseries/3T_HCP1200_MSMAll_d50_ts2/'
31
+ }
32
+
33
+ ###### MEASUREMENT PARAMETERS ######
34
+
35
+ # W is in sec
36
+
37
+ params_methods = {
38
+ # Sliding Parameters
39
+ 'W': 44, 'n_overlap': 0.5, 'sw_method':'pear_corr', 'tapered_window':True,
40
+ # TIME_FREQ
41
+ 'TF_method':'WTC',
42
+ # CLUSTERING AND DHMM
43
+ 'clstr_base_measure':'SlidingWindow',
44
+ # HMM
45
+ 'hmm_iter': 30, 'dhmm_obs_state_ratio': 16/24,
46
+ # State Parameters
47
+ 'n_states': 12, 'n_subj_clstrs': 20,
48
+ # Parallelization Parameters
49
+ 'n_jobs': 2, 'verbose': 0, 'backend': 'loky',
50
+ # SESSION
51
+ 'session': 'Rest1_LR',
52
+ # Hyper Parameters
53
+ 'normalization': True,
54
+ 'num_subj': 395,
55
+ 'num_select_nodes': 96,
56
+ 'num_time_point': 1200,
57
+ 'Fs_ratio': 1.00,
58
+ 'noise_ratio': 0.00,
59
+ 'num_realization': 1
60
+ }
61
+
62
+ ###### HYPER PARAMETERS ALTERNATIVE ######
63
+
64
+ MEASURES_name_lst = [
65
+ 'SlidingWindow',
66
+ 'Time-Freq',
67
+ 'CAP',
68
+ 'ContinuousHMM',
69
+ 'Windowless',
70
+ 'Clustering',
71
+ 'DiscreteHMM'
72
+ ]
73
+
74
+ alter_hparams = { \
75
+ 'session': ['Rest1_RL', 'Rest2_LR', 'Rest2_RL'],
76
+ # 'n_overlap': [0, 0.25, 0.75, 1],
77
+ # 'n_states': [6, 16],
78
+ # # 'normalization': [],
79
+ # 'num_subj': [50, 100, 200],
80
+ # 'num_select_nodes': [30, 50, 333],
81
+ # 'num_time_point': [800, 1000],
82
+ # 'Fs_ratio': [0.50, 0.75, 1.5],
83
+ # 'noise_ratio': [1.00, 2.00, 3.00],
84
+ # 'num_realization': []
85
+ }
86
+
87
+ ###### dFC ANALYZER PARAMETERS ######
88
+
89
+ params_dFC_analyzer = {
90
+ # Parallelization Parameters
91
+ 'n_jobs': None, 'verbose': 0, 'backend': 'loky'
92
+ }
93
+
94
+
95
+ ################################# LOAD DATA #################################
96
+
97
+ data_loader = DATA_LOADER(**params_data_load)
98
+ BOLD = data_loader.load()
99
+
100
+ ################################# Visualize BOLD #################################
101
+
102
+ # for session in BOLD:
103
+ # BOLD[session].visualize(start_time=0, end_time=50, nodes_lst=list(range(10)), \
104
+ # save_image=params_dFC_analyzer['save_image'], output_root=output_root+'BOLD_signal_'+session)
105
+
106
+ ################################# Measures of dFC #################################
107
+
108
+ dFC_analyzer = DFC_ANALYZER(
109
+ analysis_name='reproducibility assessment',
110
+ **params_dFC_analyzer
111
+ )
112
+
113
+ MEASURES_lst = dFC_analyzer.measures_initializer(
114
+ MEASURES_name_lst,
115
+ params_methods,
116
+ alter_hparams
117
+ )
118
+
119
+ tic = time.time()
120
+ print('Measurement Started ...')
121
+
122
+ ################################# estimate FCS #################################
123
+
124
+ task_id = int(os.getenv("SGE_TASK_ID"))
125
+ MEASURE_id = task_id-1 # SGE_TASK_ID starts from 1 not 0
126
+
127
+
128
+ if MEASURE_id >= len(MEASURES_lst):
129
+ print("MEASURE_id out of MEASURES_lst ")
130
+ else:
131
+ measure = MEASURES_lst[MEASURE_id]
132
+
133
+ print("FCS estimation started...")
134
+
135
+ time_series = BOLD[measure.params['session']]
136
+ if measure.is_state_based:
137
+ measure.estimate_FCS(time_series=time_series)
138
+
139
+ # dFC_analyzer.estimate_group_FCS(time_series_dict=BOLD)
140
+ print("FCS estimation done.")
141
+
142
+ print('Measurement required %0.3f seconds.' % (time.time() - tic, ))
143
+
144
+ # Save
145
+ np.save(output_root+'fitted_MEASURES/MEASURE_'+str(MEASURE_id)+'.npy', measure)
146
+ np.save(output_root+'dFC_analyzer.npy', dFC_analyzer)
147
+ np.save(output_root+'data_loader.npy', data_loader)
148
+
149
+ #################################################################################
@@ -0,0 +1,85 @@
1
+ from functions.dFC_funcs import *
2
+ import numpy as np
3
+ import time
4
+ import hdf5storage
5
+ import scipy.io as sio
6
+ import os
7
+ os.environ["MKL_NUM_THREADS"] = '64'
8
+ os.environ["NUMEXPR_NUM_THREADS"] = '64'
9
+ os.environ["OMP_NUM_THREADS"] = '64'
10
+
11
+ print('################################# subject-level dFC assessment CODE started running ... #################################')
12
+
13
+ ################################# Parameters #################################
14
+ ###### DATA PARAMETERS ######
15
+
16
+ input_root = './'
17
+ output_root = './'
18
+
19
+ ################################# LOAD #################################
20
+
21
+ dFC_analyzer = np.load(input_root+'dFC_analyzer.npy',allow_pickle='TRUE').item()
22
+ data_loader = np.load(input_root+'data_loader.npy',allow_pickle='TRUE').item()
23
+
24
+ ################################# LOAD FIT MEASURES #################################
25
+
26
+ if dFC_analyzer.MEASURES_fit_lst==[]:
27
+ ALL_RECORDS = os.listdir(input_root+'fitted_MEASURES/')
28
+ ALL_RECORDS = [i for i in ALL_RECORDS if 'MEASURE' in i]
29
+ ALL_RECORDS.sort()
30
+ MEASURES_fit_lst = list()
31
+ for s in ALL_RECORDS:
32
+ fit_measure = np.load(input_root+'fitted_MEASURES/'+s, allow_pickle='TRUE').item()
33
+ MEASURES_fit_lst.append(fit_measure)
34
+ dFC_analyzer.set_MEASURES_fit_lst(MEASURES_fit_lst)
35
+ print('fitted MEASURES loaded ...')
36
+ # np.save('./dFC_analyzer.npy', dFC_analyzer)
37
+
38
+ ################################# LOAD DATA #################################
39
+
40
+ task_id = int(os.getenv("SGE_TASK_ID"))
41
+ subj_id = data_loader.SUBJECTS[task_id-1] # SGE_TASK_ID starts from 1 not 0
42
+
43
+ BOLD = data_loader.load(subj_id2load=subj_id)
44
+
45
+ ################################# dFC ASSESSMENT #################################
46
+
47
+ tic = time.time()
48
+ print('Measurement Started ...')
49
+
50
+ print("dFCM estimation started...")
51
+ dFCM_dict = dFC_analyzer.subj_lvl_dFC_assess(time_series_dict=BOLD)
52
+ print("dFCM estimation done.")
53
+
54
+ print('Measurement required %0.3f seconds.' % (time.time() - tic, ))
55
+
56
+ ################################# SAVE DATA #################################
57
+
58
+ # folder = output_root+'dFC_assessed/SUBJ_'+str(subj_id)
59
+ # if not os.path.exists(folder):
60
+ # os.makedirs(folder)
61
+
62
+ # for dFCM_id, dFCM in enumerate(dFCM_dict['dFCM_lst']):
63
+ # np.save(folder+'/dFCM_'+str(dFCM_id)+'.npy', dFCM)
64
+
65
+ ################################# SIMILARITY MEASUREMENT #################################
66
+
67
+ similarity_assessment = SIMILARITY_ASSESSMENT(dFCM_lst=dFCM_dict['dFCM_lst'])
68
+
69
+ tic = time.time()
70
+ print('Measurement Started ...')
71
+
72
+ print("Similarity measurement started...")
73
+ SUBJ_output = similarity_assessment.run(FILTERS=dFC_analyzer.hyper_param_info, downsampling_method='default')
74
+ print("Similarity measurement done.")
75
+
76
+ print('Measurement required %0.3f seconds.' % (time.time() - tic, ))
77
+
78
+ # Save
79
+ folder = output_root+'similarity_measured'
80
+ if not os.path.exists(folder):
81
+ os.makedirs(folder)
82
+
83
+ np.save(folder+'/SUBJ_'+str(subj_id)+'_output.npy', SUBJ_output)
84
+
85
+ #################################################################################