py-neuromodulation 0.0.4__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. py_neuromodulation/ConnectivityDecoding/_get_grid_hull.m +34 -34
  2. py_neuromodulation/ConnectivityDecoding/_get_grid_whole_brain.py +95 -106
  3. py_neuromodulation/ConnectivityDecoding/_helper_write_connectome.py +107 -119
  4. py_neuromodulation/FieldTrip.py +589 -589
  5. py_neuromodulation/__init__.py +74 -13
  6. py_neuromodulation/_write_example_dataset_helper.py +83 -65
  7. py_neuromodulation/data/README +6 -6
  8. py_neuromodulation/data/dataset_description.json +8 -8
  9. py_neuromodulation/data/participants.json +32 -32
  10. py_neuromodulation/data/participants.tsv +2 -2
  11. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_space-mni_coordsystem.json +5 -5
  12. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_space-mni_electrodes.tsv +11 -11
  13. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_channels.tsv +11 -11
  14. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.json +18 -18
  15. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.vhdr +35 -35
  16. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/ieeg/sub-testsub_ses-EphysMedOff_task-gripforce_run-0_ieeg.vmrk +13 -13
  17. py_neuromodulation/data/sub-testsub/ses-EphysMedOff/sub-testsub_ses-EphysMedOff_scans.tsv +2 -2
  18. py_neuromodulation/grid_cortex.tsv +40 -40
  19. py_neuromodulation/liblsl/libpugixml.so.1.12 +0 -0
  20. py_neuromodulation/liblsl/linux/bionic_amd64/liblsl.1.16.2.so +0 -0
  21. py_neuromodulation/liblsl/linux/bookworm_amd64/liblsl.1.16.2.so +0 -0
  22. py_neuromodulation/liblsl/linux/focal_amd46/liblsl.1.16.2.so +0 -0
  23. py_neuromodulation/liblsl/linux/jammy_amd64/liblsl.1.16.2.so +0 -0
  24. py_neuromodulation/liblsl/linux/jammy_x86/liblsl.1.16.2.so +0 -0
  25. py_neuromodulation/liblsl/linux/noble_amd64/liblsl.1.16.2.so +0 -0
  26. py_neuromodulation/liblsl/macos/amd64/liblsl.1.16.2.dylib +0 -0
  27. py_neuromodulation/liblsl/macos/arm64/liblsl.1.16.0.dylib +0 -0
  28. py_neuromodulation/liblsl/windows/amd64/liblsl.1.16.2.dll +0 -0
  29. py_neuromodulation/liblsl/windows/x86/liblsl.1.16.2.dll +0 -0
  30. py_neuromodulation/nm_IO.py +413 -417
  31. py_neuromodulation/nm_RMAP.py +496 -531
  32. py_neuromodulation/nm_analysis.py +993 -1074
  33. py_neuromodulation/nm_artifacts.py +30 -25
  34. py_neuromodulation/nm_bispectra.py +154 -168
  35. py_neuromodulation/nm_bursts.py +292 -198
  36. py_neuromodulation/nm_coherence.py +251 -205
  37. py_neuromodulation/nm_database.py +149 -0
  38. py_neuromodulation/nm_decode.py +918 -992
  39. py_neuromodulation/nm_define_nmchannels.py +300 -302
  40. py_neuromodulation/nm_features.py +144 -116
  41. py_neuromodulation/nm_filter.py +219 -219
  42. py_neuromodulation/nm_filter_preprocessing.py +79 -91
  43. py_neuromodulation/nm_fooof.py +139 -159
  44. py_neuromodulation/nm_generator.py +45 -37
  45. py_neuromodulation/nm_hjorth_raw.py +52 -73
  46. py_neuromodulation/nm_kalmanfilter.py +71 -58
  47. py_neuromodulation/nm_linelength.py +21 -33
  48. py_neuromodulation/nm_logger.py +66 -0
  49. py_neuromodulation/nm_mne_connectivity.py +149 -112
  50. py_neuromodulation/nm_mnelsl_generator.py +90 -0
  51. py_neuromodulation/nm_mnelsl_stream.py +116 -0
  52. py_neuromodulation/nm_nolds.py +96 -93
  53. py_neuromodulation/nm_normalization.py +173 -214
  54. py_neuromodulation/nm_oscillatory.py +423 -448
  55. py_neuromodulation/nm_plots.py +585 -612
  56. py_neuromodulation/nm_preprocessing.py +83 -0
  57. py_neuromodulation/nm_projection.py +370 -394
  58. py_neuromodulation/nm_rereference.py +97 -95
  59. py_neuromodulation/nm_resample.py +59 -50
  60. py_neuromodulation/nm_run_analysis.py +325 -435
  61. py_neuromodulation/nm_settings.py +289 -68
  62. py_neuromodulation/nm_settings.yaml +244 -0
  63. py_neuromodulation/nm_sharpwaves.py +423 -401
  64. py_neuromodulation/nm_stats.py +464 -480
  65. py_neuromodulation/nm_stream.py +398 -0
  66. py_neuromodulation/nm_stream_abc.py +166 -218
  67. py_neuromodulation/nm_types.py +193 -0
  68. {py_neuromodulation-0.0.4.dist-info → py_neuromodulation-0.0.5.dist-info}/METADATA +29 -26
  69. py_neuromodulation-0.0.5.dist-info/RECORD +83 -0
  70. {py_neuromodulation-0.0.4.dist-info → py_neuromodulation-0.0.5.dist-info}/WHEEL +1 -1
  71. {py_neuromodulation-0.0.4.dist-info → py_neuromodulation-0.0.5.dist-info}/licenses/LICENSE +21 -21
  72. py_neuromodulation/nm_EpochStream.py +0 -92
  73. py_neuromodulation/nm_across_patient_decoding.py +0 -927
  74. py_neuromodulation/nm_cohortwrapper.py +0 -435
  75. py_neuromodulation/nm_eval_timing.py +0 -239
  76. py_neuromodulation/nm_features_abc.py +0 -39
  77. py_neuromodulation/nm_settings.json +0 -338
  78. py_neuromodulation/nm_stream_offline.py +0 -359
  79. py_neuromodulation/utils/_logging.py +0 -24
  80. py_neuromodulation-0.0.4.dist-info/RECORD +0 -72
@@ -1,927 +0,0 @@
1
- import numpy as np
2
- import os
3
- import pandas as pd
4
- from sklearn import metrics, linear_model, model_selection
5
-
6
- import py_neuromodulation
7
- from py_neuromodulation import nm_decode, nm_RMAP
8
-
9
-
10
- class AcrossPatientRunner:
11
- def __init__(
12
- self,
13
- outpath: str,
14
- model=linear_model.LogisticRegression(class_weight="balanced"),
15
- TRAIN_VAL_SPLIT=False,
16
- eval_method=metrics.balanced_accuracy_score,
17
- cv_method=model_selection.KFold(n_splits=3, shuffle=False),
18
- VERBOSE=False,
19
- use_nested_cv=True,
20
- RUN_BAY_OPT=False,
21
- ML_model_name="LM",
22
- cohorts: list = None,
23
- load_channel_all: bool = False,
24
- load_grid_point_all: bool = False,
25
- ) -> None:
26
-
27
- self.outpath = outpath
28
- self.model = model
29
- self.TRAIN_VAL_SPLIT = TRAIN_VAL_SPLIT
30
- self.use_nested_cv = use_nested_cv
31
- self.RUN_BAY_OPT = RUN_BAY_OPT
32
- self.VERBOSE = VERBOSE
33
- self.eval_method = eval_method
34
- self.ML_model_name = ML_model_name
35
- self.cv_method = cv_method
36
- self.cohorts = cohorts
37
-
38
- self.grid_cortex = pd.read_csv(
39
- os.path.join(py_neuromodulation.__path__[0], "grid_cortex.tsv"),
40
- sep="\t",
41
- ).to_numpy()
42
-
43
- self.RMAPSelector = nm_RMAP.RMAPChannelSelector()
44
-
45
- if load_channel_all is True:
46
- self.ch_all = np.load(
47
- os.path.join(self.outpath, "channel_all.npy"),
48
- allow_pickle="TRUE",
49
- ).item()
50
- if load_grid_point_all is True:
51
- self.grid_point_all = np.load(
52
- os.path.join(self.outpath, "grid_point_all.npy"),
53
- allow_pickle="TRUE",
54
- ).item()
55
-
56
- def init_decoder(self) -> nm_decode.Decoder:
57
-
58
- return nm_decode.Decoder(
59
- model=self.model,
60
- TRAIN_VAL_SPLIT=self.TRAIN_VAL_SPLIT,
61
- get_movement_detection_rate=True,
62
- eval_method=self.eval_method,
63
- VERBOSE=self.VERBOSE,
64
- cv_method=self.cv_method,
65
- use_nested_cv=self.use_nested_cv,
66
- RUN_BAY_OPT=self.RUN_BAY_OPT,
67
- )
68
-
69
- def eval_model(self, X_train, y_train, X_test, y_test):
70
-
71
- return self.decoder.wrapper_model_train(
72
- X_train,
73
- y_train,
74
- X_test,
75
- y_test,
76
- cv_res=nm_decode.CV_res(get_movement_detection_rate=True),
77
- )
78
-
79
- @staticmethod
80
- def get_data_sub_ch(channel_all, cohort, sub, ch):
81
-
82
- X_train = []
83
- y_train = []
84
-
85
- for f in channel_all[cohort][sub][ch].keys():
86
- X_train.append(channel_all[cohort][sub][ch][f]["data"])
87
- y_train.append(channel_all[cohort][sub][ch][f]["label"])
88
- if len(X_train) > 1:
89
- X_train = np.concatenate(X_train, axis=0)
90
- y_train = np.concatenate(y_train, axis=0)
91
- else:
92
- X_train = X_train[0]
93
- y_train = y_train[0]
94
-
95
- return X_train, y_train
96
-
97
- def get_patients_train_dict(self, sub_test, cohort_test, val_approach: str, data_select: dict):
98
- cohorts_train = {}
99
- for cohort in self.cohorts:
100
- if val_approach == "leave_1_cohort_out" and cohort == cohort_test:
101
- continue
102
- if (
103
- val_approach == "leave_1_sub_out_within_coh"
104
- and cohort != cohort_test
105
- ):
106
- continue
107
- cohorts_train[cohort] = []
108
- for sub in data_select[cohort]:
109
- if (
110
- val_approach == "leave_1_sub_out_within_coh"
111
- and sub == sub_test
112
- and cohort == cohort_test
113
- ):
114
- continue
115
- if (
116
- val_approach == "leave_1_sub_out_across_coh"
117
- and sub == sub_test
118
- ):
119
- continue
120
- cohorts_train[cohort].append(sub)
121
- return cohorts_train
122
-
123
- def get_data_grid_point(
124
- self, sub_test: str, cohort_test: str, best_gp_list: list
125
- ):
126
- for gp in best_gp_list:
127
- if gp in self.grid_point_all[cohort_test][sub_test]:
128
- X_test, y_test = self.get_data_sub_ch(
129
- self.grid_point_all, cohort_test, sub_test, gp
130
- )
131
- break
132
- else:
133
- continue
134
- return X_test, y_test
135
-
136
- def get_data_channels(self, sub_test: str, cohort_test: str, df_rmap: list):
137
- ch_test = df_rmap.query("cohort == @cohort_test and sub == @sub_test")[
138
- "ch"
139
- ].iloc[0]
140
- X_test, y_test = self.get_data_sub_ch(
141
- self.ch_all, cohort_test, sub_test, ch_test
142
- )
143
- return X_test, y_test
144
-
145
- def cross_val_approach_RMAP(
146
- self,
147
- val_approach: str = "leave_1_cohort_out",
148
- df_select: pd.DataFrame = None,
149
- select_best_gp: bool = False,
150
- add_UPDRS: bool = False,
151
- df_updrs: pd.DataFrame = None,
152
- ):
153
-
154
- if select_best_gp is True:
155
- best_gp_list = list(
156
- df_select.sort_values("performance_test", ascending=False)["ch"]
157
- )
158
- data_select = self.grid_point_all
159
- else:
160
- data_select = self.ch_all
161
-
162
- p_ = {}
163
- for cohort_test in self.cohorts:
164
- print(cohort_test)
165
- if cohort_test not in p_:
166
- p_[cohort_test] = {}
167
- for sub_test in data_select[cohort_test].keys():
168
- print(sub_test)
169
- if sub_test not in p_[cohort_test]:
170
- p_[cohort_test][sub_test] = {}
171
- if select_best_gp is True:
172
- X_test, y_test = self.get_data_grid_point(
173
- sub_test, cohort_test, best_gp_list
174
- )
175
- else:
176
- X_test, y_test = self.get_data_channels(
177
- sub_test, cohort_test, df_rmap=df_select
178
- )
179
-
180
- if add_UPDRS is True:
181
- updrs = df_updrs.query(
182
- "sub == @sub_test and cohort == @cohort_test"
183
- ).iloc[0]["UPDRS_total"]
184
- if np.isnan(updrs):
185
- continue
186
- X_test = np.concatenate(
187
- (
188
- X_test,
189
- np.expand_dims(
190
- np.repeat(updrs, repeats=X_test.shape[0]),
191
- axis=1,
192
- ),
193
- ),
194
- axis=1,
195
- )
196
-
197
- cohorts_train = self.get_patients_train_dict(
198
- sub_test, cohort_test, val_approach=val_approach, data_select=data_select
199
- )
200
-
201
- X_train_comb = []
202
- y_train_comb = []
203
- for cohort_train in list(cohorts_train.keys()):
204
- for sub_train in cohorts_train[cohort_train]:
205
- if select_best_gp is True:
206
- X_train, y_train = self.get_data_grid_point(
207
- sub_train, cohort_train, best_gp_list
208
- )
209
- else:
210
- X_train, y_train = self.get_data_channels(
211
- sub_train, cohort_train, df_rmap=df_select
212
- )
213
- if add_UPDRS is True:
214
- updrs = df_updrs.query(
215
- "sub == @sub_train and cohort == @cohort_train"
216
- ).iloc[0]["UPDRS_total"]
217
- if np.isnan(
218
- updrs
219
- ): # the returned True is here not boolean but np boolean
220
- continue
221
- X_train = np.concatenate(
222
- (
223
- X_train,
224
- np.expand_dims(
225
- np.repeat(
226
- updrs, repeats=X_train.shape[0]
227
- ),
228
- axis=1,
229
- ),
230
- ),
231
- axis=1,
232
- )
233
-
234
- X_train_comb.append(X_train)
235
- y_train_comb.append(y_train)
236
- if len(X_train_comb) > 1:
237
- X_train = np.concatenate(X_train_comb, axis=0)
238
- y_train = np.concatenate(y_train_comb, axis=0)
239
- else:
240
- X_train = X_train_comb[0]
241
- y_train = X_train_comb[0]
242
-
243
- self.decoder = self.init_decoder()
244
-
245
- # X_train, y_train, X_test, y_test = self.decoder.append_samples_val(X_train, y_train, X_test, y_test, 5)
246
-
247
- model = self.decoder.wrapper_model_train(
248
- X_train=X_train,
249
- y_train=y_train,
250
- return_fitted_model_only=True,
251
- )
252
- cv_res = self.decoder.eval_model(
253
- model,
254
- X_train,
255
- X_test,
256
- y_train,
257
- y_test,
258
- cv_res=nm_decode.CV_res(get_movement_detection_rate=True),
259
- save_data=False,
260
- )
261
- p_[cohort_test][sub_test] = cv_res
262
- np.save(
263
- os.path.join(
264
- self.outpath,
265
- self.ML_model_name + f"_performance_{val_approach}_RMAP.npy",
266
- ),
267
- p_,
268
- )
269
-
270
- def cross_val_p2p_RMAP(
271
- self,
272
- df_select: pd.DataFrame = None,
273
- select_best_gp: bool = False,
274
- ):
275
- if select_best_gp is True:
276
- best_gp_list = list(
277
- df_select.sort_values("performance_test", ascending=False)["ch"]
278
- )
279
- data_select = self.grid_point_all
280
- else:
281
- data_select = self.ch_all
282
-
283
- p_ = {}
284
- for cohort_test in self.cohorts:
285
- print(cohort_test)
286
- if cohort_test not in p_:
287
- p_[cohort_test] = {}
288
- for sub_test in data_select[cohort_test].keys():
289
- print(sub_test)
290
- if sub_test not in p_[cohort_test]:
291
- p_[cohort_test][sub_test] = {}
292
-
293
- if select_best_gp is True:
294
- X_test, y_test = self.get_data_grid_point(
295
- sub_test, cohort_test, best_gp_list
296
- )
297
- else:
298
- X_test, y_test = self.get_data_channels(
299
- sub_test, cohort_test, df_rmap=df_select
300
- )
301
- for cohort_train in self.cohorts:
302
- if cohort_train not in p_[cohort_test][sub_test]:
303
- p_[cohort_test][sub_test][cohort_train] = {}
304
- for sub_train in list(data_select[cohort_train].keys()):
305
- if (
306
- sub_train
307
- not in p_[cohort_test][sub_test][cohort_train]
308
- ):
309
- p_[cohort_test][sub_test][cohort_train][
310
- sub_train
311
- ] = {}
312
-
313
- if select_best_gp is True:
314
- X_train, y_train = self.get_data_grid_point(
315
- sub_train, cohort_train, best_gp_list
316
- )
317
- else:
318
- X_train, y_train = self.get_data_channels(
319
- sub_train, cohort_train, df_rmap=df_select
320
- )
321
- self.decoder = self.init_decoder()
322
-
323
- # X_train, y_train, X_test, y_test = self.decoder.append_samples_val(X_train, y_train, X_test, y_test, 5)
324
-
325
- model = self.decoder.wrapper_model_train(
326
- X_train=X_train,
327
- y_train=y_train,
328
- return_fitted_model_only=True,
329
- )
330
- cv_res = self.decoder.eval_model(
331
- model,
332
- X_train,
333
- X_test,
334
- y_train,
335
- y_test,
336
- cv_res=nm_decode.CV_res(
337
- get_movement_detection_rate=True
338
- ),
339
- save_data=False,
340
- )
341
- p_[cohort_test][sub_test][cohort_train][
342
- sub_train
343
- ] = cv_res
344
-
345
- np.save(
346
- os.path.join(
347
- self.outpath,
348
- self.ML_model_name + f"_performance_p2p_RMAP.npy",
349
- ),
350
- p_,
351
- )
352
-
353
- def leave_one_patient_out_RMAP(
354
- self, val_approach: str = "leave_1_cohort_out"
355
- ):
356
- p_ = {}
357
- for cohort_test in self.cohorts:
358
- if cohort_test not in p_:
359
- p_[cohort_test] = {}
360
- for sub_test in self.ch_all[cohort_test].keys():
361
- if sub_test not in p_:
362
- p_[cohort_test][sub_test] = {}
363
- for ch_test in self.ch_all[cohort_test][sub_test].keys():
364
- if ch_test not in p_[cohort_test][sub_test]:
365
- p_[cohort_test][sub_test][ch_test] = {}
366
-
367
- cohorts_train = self.get_patients_train_dict(
368
- sub_test, cohort_test, val_approach=val_approach
369
- )
370
-
371
- (
372
- cohort_train,
373
- sub_train,
374
- ch_train,
375
- ) = self.RMAPSelector.get_highest_corr_sub_ch(
376
- cohort_test,
377
- sub_test,
378
- ch_test,
379
- cohorts_train,
380
- path_dir=r"C:\Users\ICN_admin\OneDrive - Charité - Universitätsmedizin Berlin\Connectomics\DecodingToolbox_BerlinPittsburgh_Beijing\functional_connectivity",
381
- )
382
-
383
- X_train, y_train = self.get_data_sub_ch(
384
- self.ch_all, cohort_train, sub_train, ch_train
385
- )
386
- X_test, y_test = self.get_data_sub_ch(
387
- self.ch_all, cohort_test, sub_test, ch_test
388
- )
389
-
390
- self.decoder = self.init_decoder()
391
-
392
- model = self.decoder.wrapper_model_train(
393
- X_train=X_train,
394
- y_train=y_train,
395
- return_fitted_model_only=True,
396
- )
397
- cv_res = self.decoder.eval_model(
398
- model,
399
- X_train,
400
- X_test,
401
- y_train,
402
- y_test,
403
- cv_res=nm_decode.CV_res(
404
- get_movement_detection_rate=True
405
- ),
406
- save_data=False,
407
- append_samples=True,
408
- )
409
- p_[cohort_test][sub_test][ch_test] = cv_res
410
- np.save(
411
- os.path.join(
412
- self.outpath,
413
- self.ML_model_name
414
- + "_performance_leave_one_cohort_out_RMAP.npy",
415
- ),
416
- p_,
417
- )
418
-
419
- def run_cohort_leave_one_patient_out_CV_within_cohort(self):
420
-
421
- grid_point_all = np.load(
422
- os.path.join(self.outpath, "grid_point_all.npy"),
423
- allow_pickle="TRUE",
424
- ).item()
425
- performance_leave_one_patient_out = {}
426
-
427
- for cohort in self.cohorts:
428
- print("cohort: " + str(cohort))
429
- performance_leave_one_patient_out[cohort] = {}
430
-
431
- for grid_point in list(grid_point_all.keys()):
432
- print("grid point: " + str(grid_point))
433
- if cohort not in grid_point_all[grid_point]:
434
- continue
435
- if len(list(grid_point_all[grid_point][cohort].keys())) <= 1:
436
- continue # cannot do leave one out prediction with a single subject
437
- performance_leave_one_patient_out[cohort][grid_point] = {}
438
-
439
- for subject_test in list(
440
- grid_point_all[grid_point][cohort].keys()
441
- ):
442
- X_test = []
443
- y_test = []
444
- for run in list(
445
- grid_point_all[grid_point][cohort][subject_test].keys()
446
- ):
447
- if (
448
- grid_point_all[grid_point][cohort][subject_test][
449
- run
450
- ]["lat"]
451
- != "CON"
452
- ):
453
- continue
454
- X_test.append(
455
- grid_point_all[grid_point][cohort][subject_test][
456
- run
457
- ]["data"]
458
- )
459
- y_test.append(
460
- grid_point_all[grid_point][cohort][subject_test][
461
- run
462
- ]["label"]
463
- )
464
- if len(X_test) > 1:
465
- X_test = np.concatenate(X_test, axis=0)
466
- y_test = np.concatenate(y_test, axis=0)
467
- else:
468
- X_test = X_test[0]
469
- y_test = y_test[0]
470
- X_train = []
471
- y_train = []
472
- for subject_train in list(
473
- grid_point_all[grid_point][cohort].keys()
474
- ):
475
- if subject_test == subject_train:
476
- continue
477
- for run in list(
478
- grid_point_all[grid_point][cohort][
479
- subject_train
480
- ].keys()
481
- ):
482
- if (
483
- grid_point_all[grid_point][cohort][
484
- subject_train
485
- ][run]["lat"]
486
- != "CON"
487
- ):
488
- continue
489
- X_train.append(
490
- grid_point_all[grid_point][cohort][
491
- subject_train
492
- ][run]["data"]
493
- )
494
- y_train.append(
495
- grid_point_all[grid_point][cohort][
496
- subject_train
497
- ][run]["label"]
498
- )
499
- if len(X_train) > 1:
500
- X_train = np.concatenate(X_train, axis=0)
501
- y_train = np.concatenate(y_train, axis=0)
502
- else:
503
- X_train = X_train[0]
504
- y_train = y_train[0]
505
-
506
- # run here ML estimation
507
- self.decoder = self.init_decoder()
508
- model = self.decoder.wrapper_model_train(
509
- X_train=X_train,
510
- y_train=y_train,
511
- return_fitted_model_only=True,
512
- )
513
- # use initialized decoder
514
- try:
515
- cv_res = self.eval_model(
516
- X_train, y_train, X_test, y_test
517
- )
518
- except nm_decode.Decoder.ClassMissingException:
519
- continue
520
-
521
- performance_leave_one_patient_out[cohort][grid_point][
522
- subject_test
523
- ] = cv_res
524
-
525
- performance_leave_one_patient_out["grid_cortex"] = self.grid_cortex
526
- np.save(
527
- os.path.join(
528
- self.outpath,
529
- self.ML_model_name
530
- + "_performance_leave_one_patient_out_within_cohort.npy",
531
- ),
532
- performance_leave_one_patient_out,
533
- )
534
- return performance_leave_one_patient_out
535
-
536
- def run_cohort_leave_one_cohort_out_CV(self):
537
- grid_point_all = np.load(
538
- os.path.join(self.outpath, "grid_point_all.npy"),
539
- allow_pickle="TRUE",
540
- ).item()
541
- performance_leave_one_cohort_out = {}
542
-
543
- for cohort_test in self.cohorts:
544
- print("cohort: " + str(cohort_test))
545
- if cohort_test not in performance_leave_one_cohort_out:
546
- performance_leave_one_cohort_out[cohort_test] = {}
547
-
548
- for grid_point in list(grid_point_all.keys()):
549
- print("grid point: " + str(grid_point))
550
- if cohort_test not in grid_point_all[grid_point]:
551
- continue
552
- if len(list(grid_point_all[grid_point].keys())) == 1:
553
- continue # cannot do leave one cohort prediction with a single cohort
554
-
555
- X_train = []
556
- y_train = []
557
- for cohort_train in self.cohorts:
558
- if cohort_test == cohort_train:
559
- continue
560
- if cohort_train not in grid_point_all[grid_point]:
561
- continue
562
- for subject_test in list(
563
- grid_point_all[grid_point][cohort_train].keys()
564
- ):
565
- for run in list(
566
- grid_point_all[grid_point][cohort_train][
567
- subject_test
568
- ].keys()
569
- ):
570
- if (
571
- grid_point_all[grid_point][cohort_train][
572
- subject_test
573
- ][run]["lat"]
574
- != "CON"
575
- ):
576
- continue
577
- X_train.append(
578
- grid_point_all[grid_point][cohort_train][
579
- subject_test
580
- ][run]["data"]
581
- )
582
- y_train.append(
583
- grid_point_all[grid_point][cohort_train][
584
- subject_test
585
- ][run]["label"]
586
- )
587
- if len(X_train) > 1:
588
- X_train = np.concatenate(X_train, axis=0)
589
- y_train = np.concatenate(y_train, axis=0)
590
- else:
591
- X_train = X_train[0]
592
- y_train = y_train[0]
593
-
594
- # run here ML estimation
595
- self.decoder = self.init_decoder()
596
- model = self.decoder.wrapper_model_train(
597
- X_train=X_train,
598
- y_train=y_train,
599
- return_fitted_model_only=True,
600
- )
601
-
602
- performance_leave_one_cohort_out[cohort_test][grid_point] = {}
603
- for subject_test in list(
604
- grid_point_all[grid_point][cohort_test].keys()
605
- ):
606
- X_test = []
607
- y_test = []
608
- for run in list(
609
- grid_point_all[grid_point][cohort_test][
610
- subject_test
611
- ].keys()
612
- ):
613
- if (
614
- grid_point_all[grid_point][cohort_test][
615
- subject_test
616
- ][run]["lat"]
617
- != "CON"
618
- ):
619
- continue
620
- X_test.append(
621
- grid_point_all[grid_point][cohort_test][
622
- subject_test
623
- ][run]["data"]
624
- )
625
- y_test.append(
626
- grid_point_all[grid_point][cohort_test][
627
- subject_test
628
- ][run]["label"]
629
- )
630
- if len(X_test) > 1:
631
- X_test = np.concatenate(X_test, axis=0)
632
- y_test = np.concatenate(y_test, axis=0)
633
- else:
634
- X_test = X_test[0]
635
- y_test = y_test[0]
636
-
637
- cv_res = self.decoder.eval_model(
638
- model,
639
- X_train,
640
- X_test,
641
- y_train,
642
- y_test,
643
- cv_res=nm_decode.CV_res(),
644
- )
645
-
646
- performance_leave_one_cohort_out[cohort_test][grid_point][
647
- subject_test
648
- ] = cv_res
649
-
650
- performance_leave_one_cohort_out["grid_cortex"] = self.grid_cortex
651
- np.save(
652
- os.path.join(
653
- self.outpath,
654
- self.ML_model_name + "_performance_leave_one_cohort_out.npy",
655
- ),
656
- performance_leave_one_cohort_out,
657
- )
658
-
659
- def run_leave_one_patient_out_across_cohorts(self):
660
-
661
- grid_point_all = np.load(
662
- os.path.join(self.outpath, "grid_point_all.npy"),
663
- allow_pickle="TRUE",
664
- ).item()
665
- performance_leave_one_patient_out = {}
666
-
667
- for grid_point in list(grid_point_all.keys()):
668
- print("grid point: " + str(grid_point))
669
- for cohort in self.cohorts:
670
- print("cohort: " + str(cohort))
671
- if cohort not in performance_leave_one_patient_out:
672
- performance_leave_one_patient_out[cohort] = {}
673
-
674
- if cohort not in grid_point_all[grid_point]:
675
- continue
676
- if len(list(grid_point_all[grid_point][cohort].keys())) <= 1:
677
- continue # cannot do leave one out prediction with a single subject
678
-
679
- if grid_point not in performance_leave_one_patient_out[cohort]:
680
- performance_leave_one_patient_out[cohort][grid_point] = {}
681
-
682
- for subject_test in list(
683
- grid_point_all[grid_point][cohort].keys()
684
- ):
685
- X_test = []
686
- y_test = []
687
- for run in list(
688
- grid_point_all[grid_point][cohort][subject_test].keys()
689
- ):
690
- if (
691
- grid_point_all[grid_point][cohort][subject_test][
692
- run
693
- ]["lat"]
694
- != "CON"
695
- ):
696
- continue
697
- X_test.append(
698
- grid_point_all[grid_point][cohort][subject_test][
699
- run
700
- ]["data"]
701
- )
702
- y_test.append(
703
- grid_point_all[grid_point][cohort][subject_test][
704
- run
705
- ]["label"]
706
- )
707
- if len(X_test) > 1:
708
- X_test = np.concatenate(X_test, axis=0)
709
- y_test = np.concatenate(y_test, axis=0)
710
- else:
711
- X_test = X_test[0]
712
- y_test = y_test[0]
713
- X_train = []
714
- y_train = []
715
- for cohort_inner in list(
716
- grid_point_all[grid_point].keys()
717
- ): # available cohorts for that grid point
718
- for subject_train in list(
719
- grid_point_all[grid_point][cohort_inner].keys()
720
- ):
721
- if (subject_test == subject_train) and (
722
- cohort_inner == cohort
723
- ):
724
- continue
725
- for run in list(
726
- grid_point_all[grid_point][cohort_inner][
727
- subject_train
728
- ].keys()
729
- ):
730
- if (
731
- grid_point_all[grid_point][cohort_inner][
732
- subject_train
733
- ][run]["lat"]
734
- != "CON"
735
- ):
736
- continue
737
- X_train.append(
738
- grid_point_all[grid_point][cohort_inner][
739
- subject_train
740
- ][run]["data"]
741
- )
742
- y_train.append(
743
- grid_point_all[grid_point][cohort_inner][
744
- subject_train
745
- ][run]["label"]
746
- )
747
- if len(X_train) > 1:
748
- X_train = np.concatenate(X_train, axis=0)
749
- y_train = np.concatenate(y_train, axis=0)
750
- else:
751
- X_train = X_train[0]
752
- y_train = y_train[0]
753
-
754
- self.decoder = self.init_decoder()
755
- try:
756
- cv_res = self.eval_model(
757
- X_train, y_train, X_test, y_test
758
- )
759
- except nm_decode.Decoder.ClassMissingException:
760
- continue
761
-
762
- performance_leave_one_patient_out[cohort][grid_point][
763
- subject_test
764
- ] = cv_res
765
-
766
- performance_leave_one_patient_out["grid_cortex"] = self.grid_cortex
767
- np.save(
768
- os.path.join(
769
- self.outpath,
770
- self.ML_model_name
771
- + "_performance_leave_one_patient_out_across_cohorts.npy",
772
- ),
773
- performance_leave_one_patient_out,
774
- )
775
-
776
- def run_leave_nminus1_patient_out_across_cohorts(self):
777
-
778
- grid_point_all = np.load(
779
- os.path.join(self.outpath, "grid_point_all_re.npy"),
780
- allow_pickle="TRUE",
781
- ).item()
782
- performance_leave_one_patient_out = {}
783
-
784
- for grid_point in list(grid_point_all.keys()):
785
- print("grid point: " + str(grid_point))
786
- for cohort_train in self.cohorts:
787
- print("cohort: " + str(cohort_train))
788
- if cohort_train not in performance_leave_one_patient_out:
789
- performance_leave_one_patient_out[cohort_train] = {}
790
-
791
- if cohort_train not in grid_point_all[grid_point]:
792
- continue
793
- if (
794
- len(list(grid_point_all[grid_point][cohort_train].keys()))
795
- <= 1
796
- ):
797
- continue # cannot do leave one out prediction with a single subject
798
- if (
799
- grid_point
800
- not in performance_leave_one_patient_out[cohort_train]
801
- ):
802
- performance_leave_one_patient_out[cohort_train][
803
- grid_point
804
- ] = {}
805
-
806
- for subject_train in list(
807
- grid_point_all[grid_point][cohort_train].keys()
808
- ):
809
- X_train = []
810
- y_train = []
811
- for run in list(
812
- grid_point_all[grid_point][cohort_train][
813
- subject_train
814
- ].keys()
815
- ):
816
- if (
817
- grid_point_all[grid_point][cohort_train][
818
- subject_train
819
- ][run]["lat"]
820
- != "CON"
821
- ):
822
- continue
823
- X_train.append(
824
- grid_point_all[grid_point][cohort_train][
825
- subject_train
826
- ][run]["data"]
827
- )
828
- y_train.append(
829
- grid_point_all[grid_point][cohort_train][
830
- subject_train
831
- ][run]["label"]
832
- )
833
- if len(X_train) > 1:
834
- X_train = np.concatenate(X_train, axis=0)
835
- y_train = np.concatenate(y_train, axis=0)
836
- else:
837
- X_train = X_train[0]
838
- y_train = y_train[0]
839
-
840
- for cohort_test in list(grid_point_all[grid_point].keys()):
841
- for subject_test in list(
842
- grid_point_all[grid_point][cohort_test].keys()
843
- ):
844
- if (subject_test == subject_train) and (
845
- cohort_test == cohort_train
846
- ):
847
- continue
848
- X_test = []
849
- y_test = []
850
- for run in list(
851
- grid_point_all[grid_point][cohort_test][
852
- subject_test
853
- ].keys()
854
- ):
855
- if (
856
- grid_point_all[grid_point][cohort_test][
857
- subject_test
858
- ][run]["lat"]
859
- != "CON"
860
- ):
861
- continue
862
- X_test.append(
863
- grid_point_all[grid_point][cohort_test][
864
- subject_test
865
- ][run]["data"]
866
- )
867
- y_test.append(
868
- grid_point_all[grid_point][cohort_test][
869
- subject_test
870
- ][run]["label"]
871
- )
872
- if len(X_test) > 1:
873
- X_test = np.concatenate(X_test, axis=0)
874
- y_test = np.concatenate(y_test, axis=0)
875
- else:
876
- X_test = X_test[0]
877
- y_test = y_test[0]
878
-
879
- self.decoder = self.init_decoder()
880
- try:
881
- cv_res = self.eval_model(
882
- X_train, y_train, X_test, y_test
883
- )
884
- except nm_decode.Decoder.ClassMissingException:
885
- continue
886
-
887
- if (
888
- subject_train
889
- not in performance_leave_one_patient_out[
890
- cohort_train
891
- ][grid_point]
892
- ):
893
- performance_leave_one_patient_out[cohort_train][
894
- grid_point
895
- ][subject_train] = {}
896
- if (
897
- cohort_test
898
- not in performance_leave_one_patient_out[
899
- cohort_train
900
- ][grid_point][subject_train]
901
- ):
902
- performance_leave_one_patient_out[cohort_train][
903
- grid_point
904
- ][subject_train][cohort_test] = {}
905
- if (
906
- subject_test
907
- not in performance_leave_one_patient_out[
908
- cohort_train
909
- ][grid_point][subject_train][cohort_test]
910
- ):
911
- performance_leave_one_patient_out[cohort_train][
912
- grid_point
913
- ][subject_train][cohort_test][subject_test] = {}
914
-
915
- performance_leave_one_patient_out[cohort_train][
916
- grid_point
917
- ][subject_train][cohort_test][subject_test] = cv_res
918
-
919
- performance_leave_one_patient_out["grid_cortex"] = self.grid_cortex
920
- np.save(
921
- os.path.join(
922
- self.outpath,
923
- self.ML_model_name
924
- + "_performance_leave_nminus1_patient_out_across_cohorts.npy",
925
- ),
926
- performance_leave_one_patient_out,
927
- )