junifer 0.0.5.dev110__py3-none-any.whl → 0.0.5.dev131__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. junifer/_version.py +2 -2
  2. junifer/api/tests/test_api_utils.py +2 -0
  3. junifer/data/tests/test_masks.py +1 -1
  4. junifer/datagrabber/pattern.py +2 -1
  5. junifer/external/BrainPrint/brainprint/__init__.py +4 -0
  6. junifer/external/BrainPrint/brainprint/_version.py +3 -0
  7. junifer/external/BrainPrint/brainprint/asymmetry.py +91 -0
  8. junifer/external/BrainPrint/brainprint/brainprint.py +441 -0
  9. junifer/external/BrainPrint/brainprint/surfaces.py +258 -0
  10. junifer/external/BrainPrint/brainprint/utils/__init__.py +1 -0
  11. junifer/external/BrainPrint/brainprint/utils/_config.py +112 -0
  12. junifer/external/BrainPrint/brainprint/utils/utils.py +188 -0
  13. junifer/markers/__init__.py +1 -0
  14. junifer/markers/brainprint.py +662 -0
  15. junifer/markers/reho/_afni_reho.py +1 -1
  16. junifer/markers/reho/_junifer_reho.py +1 -1
  17. junifer/markers/reho/reho_parcels.py +2 -2
  18. junifer/markers/reho/reho_spheres.py +2 -2
  19. junifer/markers/tests/test_brainprint.py +47 -0
  20. {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev131.dist-info}/METADATA +2 -1
  21. {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev131.dist-info}/RECORD +26 -16
  22. {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev131.dist-info}/AUTHORS.rst +0 -0
  23. {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev131.dist-info}/LICENSE.md +0 -0
  24. {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev131.dist-info}/WHEEL +0 -0
  25. {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev131.dist-info}/entry_points.txt +0 -0
  26. {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev131.dist-info}/top_level.txt +0 -0
junifer/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.0.5.dev110'
16
- __version_tuple__ = version_tuple = (0, 0, 5, 'dev110')
15
+ __version__ = version = '0.0.5.dev131'
16
+ __version_tuple__ = version_tuple = (0, 0, 5, 'dev131')
@@ -45,6 +45,7 @@ def test_get_dependency_information_short() -> None:
45
45
  "httpx",
46
46
  "tqdm",
47
47
  "templateflow",
48
+ "lapy",
48
49
  "looseversion",
49
50
  ]
50
51
 
@@ -74,6 +75,7 @@ def test_get_dependency_information_long() -> None:
74
75
  "httpx",
75
76
  "tqdm",
76
77
  "templateflow",
78
+ "lapy",
77
79
  ]
78
80
  for key in dependency_list:
79
81
  assert key in dependency_information_keys
@@ -350,7 +350,7 @@ def test_get_mask_errors() -> None:
350
350
  with pytest.raises(ValueError, match=r"callable params"):
351
351
  get_mask(masks={"GM_prob0.2": {"param": 1}}, target_data=vbm_gm)
352
352
 
353
- # Pass only parametesr to the intersection function
353
+ # Pass only parameters to the intersection function
354
354
  with pytest.raises(
355
355
  ValueError, match=r" At least one mask is required."
356
356
  ):
@@ -6,6 +6,7 @@
6
6
  # License: AGPL
7
7
 
8
8
  import re
9
+ from copy import deepcopy
9
10
  from pathlib import Path
10
11
  from typing import Dict, List, Optional, Tuple, Union
11
12
 
@@ -364,7 +365,7 @@ class PatternDataGrabber(BaseDataGrabber):
364
365
  # Data type dictionary
365
366
  t_pattern = self.patterns[t_type]
366
367
  # Copy data type dictionary in output
367
- out[t_type] = t_pattern.copy()
368
+ out[t_type] = deepcopy(t_pattern)
368
369
  # Iterate to check for nested "types" like mask
369
370
  for k, v in t_pattern.items():
370
371
  # Resolve pattern for base data type
@@ -0,0 +1,4 @@
1
+ from ._version import __version__ # noqa: F401
2
+ from .brainprint import compute_asymmetry, compute_brainprint # noqa: F401
3
+ from .surfaces import surf_to_vtk # noqa: F401
4
+ from .utils._config import sys_info # noqa: F401
@@ -0,0 +1,3 @@
1
+ """Version number."""
2
+
3
+ __version__ = "0.4.0"
@@ -0,0 +1,91 @@
1
+ """
2
+ Contains asymmetry estimation functionality.
3
+ """
4
+ from typing import Dict
5
+
6
+ import numpy as np
7
+ from lapy import shapedna
8
+
9
+
10
+ def compute_asymmetry(
11
+ eigenvalues, distance: str = "euc", skip_cortex: bool = False
12
+ ) -> Dict[str, float]:
13
+ """
14
+ Compute lateral shape distances from BrainPrint analysis results.
15
+
16
+ Parameters
17
+ ----------
18
+ eigenvalues : _type_
19
+ BrainPrint analysis results.
20
+ distance : str, optional
21
+ ShapeDNA distance, by default "euc".
22
+ skip_cortex : bool, optional
23
+ Whether to skip white matter and pial surfaces, by default False.
24
+
25
+ Returns
26
+ -------
27
+ Dict[str, float]
28
+ {left_label}_{right_label}, distance.
29
+ """
30
+ # Define structures
31
+
32
+ # combined and individual aseg labels:
33
+ # - Left Striatum: left Caudate + Putamen + Accumbens
34
+ # - Right Striatum: right Caudate + Putamen + Accumbens
35
+ # - CorpusCallosum: 5 subregions combined
36
+ # - Cerebellum: brainstem + (left+right) cerebellum WM and GM
37
+ # - Ventricles: (left+right) lat.vent + inf.lat.vent + choroidplexus + 3rdVent + CSF
38
+ # - Lateral-Ventricle: lat.vent + inf.lat.vent + choroidplexus
39
+ # - 3rd-Ventricle: 3rd-Ventricle + CSF
40
+
41
+ structures_left_right = [
42
+ ("Left-Striatum", "Right-Striatum"),
43
+ ("Left-Lateral-Ventricle", "Right-Lateral-Ventricle"),
44
+ (
45
+ "Left-Cerebellum-White-Matter",
46
+ "Right-Cerebellum-White-Matter",
47
+ ),
48
+ ("Left-Cerebellum-Cortex", "Right-Cerebellum-Cortex"),
49
+ ("Left-Thalamus-Proper", "Right-Thalamus-Proper"),
50
+ ("Left-Caudate", "Right-Caudate"),
51
+ ("Left-Putamen", "Right-Putamen"),
52
+ ("Left-Pallidum", "Right-Pallidum"),
53
+ ("Left-Hippocampus", "Right-Hippocampus"),
54
+ ("Left-Amygdala", "Right-Amygdala"),
55
+ ("Left-Accumbens-area", "Right-Accumbens-area"),
56
+ ("Left-VentralDC", "Right-VentralDC"),
57
+ ]
58
+
59
+ cortex_2d_left_right = [
60
+ ("lh-white-2d", "rh-white-2d"),
61
+ ("lh-pial-2d", "rh-pial-2d"),
62
+ ]
63
+
64
+ structures = structures_left_right
65
+ if not skip_cortex:
66
+ structures += cortex_2d_left_right
67
+
68
+ distances = dict()
69
+ for left_label, right_label in structures:
70
+ left_eigenvalues, right_eigenvalues = (
71
+ eigenvalues[left_label][2:],
72
+ eigenvalues[right_label][2:],
73
+ )
74
+ has_nan = np.isnan(left_eigenvalues).any() or np.isnan(right_eigenvalues).any()
75
+ key = f"{left_label}_{right_label}"
76
+ if has_nan:
77
+ message = (
78
+ "NaNs found for {left_label} or {right_label}, "
79
+ "skipping asymmetry computation...".format(
80
+ left_label=left_label, right_label=right_label
81
+ )
82
+ )
83
+ print(message)
84
+ distances[key] = np.nan
85
+ else:
86
+ distances[key] = shapedna.compute_distance(
87
+ left_eigenvalues,
88
+ right_eigenvalues,
89
+ dist=distance,
90
+ )
91
+ return distances
@@ -0,0 +1,441 @@
1
+ """
2
+ Definition of the brainprint analysis execution functions..
3
+ """
4
+
5
+ import shutil
6
+ import warnings
7
+ from pathlib import Path
8
+ from typing import Dict, Tuple, Union
9
+
10
+ import numpy as np
11
+ from lapy import TriaMesh, shapedna
12
+
13
+ from . import __version__
14
+ from .asymmetry import compute_asymmetry
15
+ from .surfaces import create_surfaces, read_vtk
16
+ from .utils.utils import (
17
+ create_output_paths,
18
+ export_brainprint_results,
19
+ test_freesurfer,
20
+ validate_environment,
21
+ validate_subject_dir,
22
+ )
23
+
24
+ warnings.filterwarnings("ignore", ".*negative int.*")
25
+
26
+
27
+ def apply_eigenvalues_options(
28
+ eigenvalues: np.ndarray,
29
+ triangular_mesh: TriaMesh,
30
+ norm: str = "none",
31
+ reweight: bool = False,
32
+ ) -> np.ndarray:
33
+ """
34
+ Apply BrainPrint analysis configuration options to the ShapeDNA eigenvalues.
35
+
36
+ Parameters
37
+ ----------
38
+ eigenvalues : np.ndarray
39
+ ShapeDNA derived eigenvalues.
40
+ triangular_mesh : TriaMesh
41
+ Surface representation.
42
+ norm : str, optional
43
+ Eigenvalues normalization method (default is "none").
44
+ reweight : bool, optional
45
+ Whether to reweight eigenvalues or not (default is False).
46
+
47
+ Returns
48
+ -------
49
+ np.ndarray
50
+ Fixed eigenvalues.
51
+ """
52
+ if not triangular_mesh.is_oriented():
53
+ triangular_mesh.orient_()
54
+ if norm != "none":
55
+ eigenvalues = shapedna.normalize_ev(
56
+ geom=triangular_mesh,
57
+ evals=eigenvalues,
58
+ method=norm,
59
+ )
60
+ if reweight:
61
+ eigenvalues = shapedna.reweight_ev(eigenvalues)
62
+ return eigenvalues
63
+
64
+
65
+ def compute_surface_brainprint(
66
+ path: Path,
67
+ return_eigenvectors: bool = True,
68
+ num: int = 50,
69
+ norm: str = "none",
70
+ reweight: bool = False,
71
+ use_cholmod: bool = False,
72
+ ) -> Tuple[np.ndarray, Union[np.ndarray, None]]:
73
+ """
74
+ Compute BrainPrint eigenvalues and eigenvectors for the given surface.
75
+
76
+ Parameters
77
+ ----------
78
+ path : Path
79
+
80
+ Path to the *.vtk* surface path.
81
+ return_eigenvectors : bool, optional
82
+ Whether to store eigenvectors in the result (default is True).
83
+ num : int, optional
84
+ Number of eigenvalues to compute (default is 50).
85
+ norm : str, optional
86
+ Eigenvalues normalization method (default is "none").
87
+ reweight : bool, optional
88
+ Whether to reweight eigenvalues or not (default is False).
89
+ use_cholmod : bool, optional
90
+ If True, attempts to use the Cholesky decomposition for improved execution
91
+ speed. Requires the ``scikit-sparse`` library. If it can not be found, an error
92
+ will be thrown.
93
+ If False, will use slower LU decomposition. This is the default.
94
+
95
+ Returns
96
+ -------
97
+ Tuple[np.ndarray, Union[np.ndarray, None]]
98
+ Eigenvalues, eigenvectors (if returned).
99
+ """
100
+ triangular_mesh = read_vtk(path)
101
+ shape_dna = shapedna.compute_shapedna(
102
+ triangular_mesh,
103
+ k=num,
104
+ lump=False,
105
+ aniso=None,
106
+ aniso_smooth=10,
107
+ use_cholmod=use_cholmod,
108
+ )
109
+
110
+ eigenvectors = None
111
+ if return_eigenvectors:
112
+ eigenvectors = shape_dna["Eigenvectors"]
113
+
114
+ eigenvalues = shape_dna["Eigenvalues"]
115
+ eigenvalues = apply_eigenvalues_options(
116
+ eigenvalues, triangular_mesh, norm, reweight
117
+ )
118
+ eigenvalues = np.concatenate(
119
+ (
120
+ np.array(triangular_mesh.area(), ndmin=1),
121
+ np.array(triangular_mesh.volume(), ndmin=1),
122
+ eigenvalues,
123
+ )
124
+ )
125
+ return eigenvalues, eigenvectors
126
+
127
+
128
+ def compute_brainprint(
129
+ surfaces: Dict[str, Path],
130
+ keep_eigenvectors: bool = False,
131
+ num: int = 50,
132
+ norm: str = "none",
133
+ reweight: bool = False,
134
+ use_cholmod: bool = False,
135
+ ) -> Tuple[Dict[str, np.ndarray], Union[Dict[str, np.ndarray], None]]:
136
+ """
137
+ Compute ShapeDNA descriptors over several surfaces.
138
+
139
+ Parameters
140
+ ----------
141
+ surfaces : Dict[str, Path]
142
+ Dictionary mapping from labels to *.vtk* paths.
143
+ keep_eigenvectors : bool, optional
144
+ Whether to also return eigenvectors or not, by default False.
145
+ num : int, optional
146
+ Number of eigenvalues to compute, by default 50.
147
+ norm : str, optional
148
+ Eigenvalues normalization method, by default "none".
149
+ reweight : bool, optional
150
+ Whether to reweight eigenvalues or not, by default False.
151
+ use_cholmod : bool, optional
152
+ If True, attempts to use the Cholesky decomposition for improved execution
153
+ speed. Requires the ``scikit-sparse`` library. If it can not be found, an error
154
+ will be thrown. If False, will use slower LU decomposition. This is the default.
155
+
156
+ Returns
157
+ -------
158
+ Tuple[Dict[str, np.ndarray], Union[Dict[str, np.ndarray], None]]
159
+ Surface label to eigenvalues, surface label to eigenvectors (if
160
+ *keep_eigenvectors* is True).
161
+ """
162
+ eigenvalues = dict()
163
+ eigenvectors = dict() if keep_eigenvectors else None
164
+ for surface_label, surface_path in surfaces.items():
165
+ try:
166
+ (
167
+ surface_eigenvalues,
168
+ surface_eigenvectors,
169
+ ) = compute_surface_brainprint(
170
+ surface_path,
171
+ num=num,
172
+ norm=norm,
173
+ reweight=reweight,
174
+ return_eigenvectors=keep_eigenvectors,
175
+ use_cholmod=use_cholmod,
176
+ )
177
+ except Exception as e:
178
+ message = (
179
+ "BrainPrint analysis raised the following exception:\n"
180
+ "{exception}".format(exception=e)
181
+ )
182
+ warnings.warn(message)
183
+ eigenvalues[surface_label] = ["NaN"] * (num + 2)
184
+ else:
185
+ if len(surface_eigenvalues) == 0:
186
+ eigenvalues[surface_label] = ["NaN"] * (num + 2)
187
+ else:
188
+ eigenvalues[surface_label] = surface_eigenvalues
189
+ if keep_eigenvectors:
190
+ eigenvectors[surface_label] = surface_eigenvectors
191
+ return eigenvalues, eigenvectors
192
+
193
+
194
+ def run_brainprint(
195
+ subjects_dir: Path,
196
+ subject_id: str,
197
+ destination: Path = None,
198
+ num: int = 50,
199
+ skip_cortex: bool = False,
200
+ keep_eigenvectors: bool = False,
201
+ norm: str = "none",
202
+ reweight: bool = False,
203
+ asymmetry: bool = False,
204
+ asymmetry_distance: str = "euc",
205
+ keep_temp: bool = False,
206
+ use_cholmod: bool = False,
207
+ ):
208
+ """
209
+ Run the BrainPrint analysis.
210
+
211
+ Parameters
212
+ ----------
213
+ subjects_dir : Path
214
+ FreeSurfer's subjects directory.
215
+ subject_id : str
216
+ The subject identifier, as defined within the FreeSurfer's subjects
217
+ directory.
218
+ destination : Path, optional
219
+ If provided, will use this path as the results root directory, by
220
+ default None.
221
+ num : int, optional
222
+ Number of eigenvalues to compute, by default 50.
223
+ skip_cortex : bool, optional
224
+ _description_, by default False.
225
+ keep_eigenvectors : bool, optional
226
+ Whether to also return eigenvectors or not, by default False.
227
+ norm : str, optional
228
+ Eigenvalues normalization method, by default "none".
229
+ reweight : bool, optional
230
+ Whether to reweight eigenvalues or not, by default False.
231
+ asymmetry : bool, optional
232
+ Whether to calculate asymmetry between lateral structures, by default
233
+ False.
234
+ asymmetry_distance : str, optional
235
+ Distance measurement to use if *asymmetry* is set to True, by default
236
+ "euc".
237
+ keep_temp : bool, optional
238
+ Whether to keep the temporary files directory or not, by default False.
239
+ use_cholmod : bool, optional
240
+ If True, attempts to use the Cholesky decomposition for improved execution
241
+ speed. Requires the ``scikit-sparse`` library. If it can not be found, an error
242
+ will be thrown. If False, will use slower LU decomposition. This is the default.
243
+
244
+ Returns
245
+ -------
246
+ Tuple[Dict[str, np.ndarray], Union[Dict[str, np.ndarray], None], Union[Dict[str, float], None]]
247
+ A tuple containing dictionaries with BrainPrint analysis results.
248
+ - Eigenvalues
249
+ - Eigenvectors
250
+ - Distances
251
+ """ # noqa: E501
252
+ validate_environment()
253
+ test_freesurfer()
254
+ subject_dir = validate_subject_dir(subjects_dir, subject_id)
255
+ destination = create_output_paths(
256
+ subject_dir=subject_dir,
257
+ destination=destination,
258
+ )
259
+
260
+ surfaces = create_surfaces(subject_dir, destination, skip_cortex=skip_cortex)
261
+ eigenvalues, eigenvectors = compute_brainprint(
262
+ surfaces,
263
+ num=num,
264
+ norm=norm,
265
+ reweight=reweight,
266
+ keep_eigenvectors=keep_eigenvectors,
267
+ use_cholmod=use_cholmod,
268
+ )
269
+
270
+ distances = None
271
+ if asymmetry:
272
+ distances = compute_asymmetry(
273
+ eigenvalues,
274
+ distance=asymmetry_distance,
275
+ skip_cortex=skip_cortex,
276
+ )
277
+
278
+ csv_name = "{subject_id}.brainprint.csv".format(subject_id=subject_id)
279
+ csv_path = destination / csv_name
280
+ export_brainprint_results(csv_path, eigenvalues, eigenvectors, distances)
281
+ if not keep_temp:
282
+ shutil.rmtree(destination / "temp")
283
+ print(
284
+ "Returning matrices for eigenvalues, eigenvectors, and (optionally) distances."
285
+ )
286
+ print("The eigenvalue matrix contains area and volume as first two rows.")
287
+ return eigenvalues, eigenvectors, distances
288
+
289
+
290
+ class Brainprint:
291
+ __version__ = __version__
292
+
293
+ def __init__(
294
+ self,
295
+ subjects_dir: Path,
296
+ num: int = 50,
297
+ skip_cortex: bool = False,
298
+ keep_eigenvectors: bool = False,
299
+ norm: str = "none",
300
+ reweight: bool = False,
301
+ asymmetry: bool = False,
302
+ asymmetry_distance: str = "euc",
303
+ keep_temp: bool = False,
304
+ environment_validation: bool = True,
305
+ freesurfer_validation: bool = True,
306
+ use_cholmod: bool = False,
307
+ ) -> None:
308
+ """
309
+ Initializes a new :class:`Brainprint` instance.
310
+
311
+ Parameters
312
+ ----------
313
+ subjects_dir : Path
314
+ FreeSurfer's subjects directory
315
+ num : int, optional
316
+ Number of eigenvalues to compute, by default 50
317
+ norm : str, optional
318
+ Eigenvalues normalization method, by default "none"
319
+ reweight : bool, optional
320
+ Whether to reweight eigenvalues or not, by default False
321
+ skip_cortex : bool, optional
322
+ _description_, by default False
323
+ keep_eigenvectors : bool, optional
324
+ Whether to also return eigenvectors or not, by default False
325
+ asymmetry : bool, optional
326
+ Whether to calculate asymmetry between lateral structures, by
327
+ default False
328
+ asymmetry_distance : str, optional
329
+ Distance measurement to use if *asymmetry* is set to True, by
330
+ default "euc"
331
+ keep_temp : bool, optional
332
+ Whether to keep the temporary files directory or not, by default False
333
+ use_cholmod : bool, optional
334
+ If True, attempts to use the Cholesky decomposition for improved execution
335
+ speed. Requires the ``scikit-sparse`` library. If it can not be found, an
336
+ error will be thrown. If False, will use slower LU decomposition. This is
337
+ the default.
338
+ """
339
+ self.subjects_dir = subjects_dir
340
+ self.num = num
341
+ self.norm = norm
342
+ self.skip_cortex = skip_cortex
343
+ self.reweight = reweight
344
+ self.keep_eigenvectors = keep_eigenvectors
345
+ self.asymmetry = asymmetry
346
+ self.asymmetry_distance = asymmetry_distance
347
+ self.keep_temp = keep_temp
348
+ self.use_cholmod = use_cholmod
349
+
350
+ self._subject_id = None
351
+ self._destination = None
352
+ self._eigenvalues = None
353
+ self._eigenvectors = None
354
+ self._distances = None
355
+
356
+ if environment_validation:
357
+ validate_environment()
358
+ if freesurfer_validation:
359
+ test_freesurfer()
360
+
361
+ def run(self, subject_id: str, destination: Path = None) -> Dict[str, Path]:
362
+ """
363
+ Run Brainprint analysis for a specified subject.
364
+
365
+ Parameters
366
+ ----------
367
+ subject_id : str
368
+ The ID of the subject to analyze.
369
+ destination : Path, optional
370
+ The destination directory for analysis results, by default None.
371
+
372
+ Returns
373
+ -------
374
+ Dict[str, Path]
375
+ A dictionary containing paths to the generated analysis results.
376
+ """
377
+ self._eigenvalues = self._eigenvectors = self._distances = None
378
+ subject_dir = validate_subject_dir(self.subjects_dir, subject_id)
379
+ destination = create_output_paths(
380
+ subject_dir=subject_dir,
381
+ destination=destination,
382
+ )
383
+
384
+ surfaces = create_surfaces(
385
+ subject_dir, destination, skip_cortex=self.skip_cortex
386
+ )
387
+ self._eigenvalues, self._eigenvectors = compute_brainprint(
388
+ surfaces,
389
+ num=self.num,
390
+ norm=self.norm,
391
+ reweight=self.reweight,
392
+ keep_eigenvectors=self.keep_eigenvectors,
393
+ use_cholmod=self.use_cholmod,
394
+ )
395
+
396
+ if self.asymmetry:
397
+ self._distances = compute_asymmetry(
398
+ self._eigenvalues,
399
+ distance=self.asymmetry_distance,
400
+ skip_cortex=self.skip_cortex,
401
+ )
402
+
403
+ self.cleanup(destination=destination)
404
+ return self.export_results(destination=destination, subject_id=subject_id)
405
+
406
+ def export_results(self, destination: Path, subject_id: str) -> None:
407
+ """
408
+ Export Brainprint analysis results to a CSV file.
409
+
410
+ Parameters
411
+ ----------
412
+ destination : Path
413
+ The destination directory for analysis results.
414
+ subject_id : str
415
+ The ID of the subject being analyzed.
416
+
417
+ Returns
418
+ -------
419
+ None
420
+ """
421
+ csv_name = "{subject_id}.brainprint.csv".format(subject_id=subject_id)
422
+ csv_path = destination / csv_name
423
+ return export_brainprint_results(
424
+ csv_path, self._eigenvalues, self._eigenvectors, self._distances
425
+ )
426
+
427
+ def cleanup(self, destination: Path) -> None:
428
+ """
429
+ Clean up temporary files generated during the analysis.
430
+
431
+ Parameters
432
+ ----------
433
+ destination : Path
434
+ The destination directory for analysis results.
435
+
436
+ Returns
437
+ -------
438
+ None
439
+ """
440
+ if not self.keep_temp:
441
+ shutil.rmtree(destination / "temp")