AOT-biomaps 2.9.176__py3-none-any.whl → 2.9.300__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of AOT-biomaps might be problematic. Click here for more details.

Files changed (29) hide show
  1. AOT_biomaps/AOT_Acoustic/StructuredWave.py +2 -2
  2. AOT_biomaps/AOT_Acoustic/_mainAcoustic.py +11 -6
  3. AOT_biomaps/AOT_Experiment/Tomography.py +74 -4
  4. AOT_biomaps/AOT_Experiment/_mainExperiment.py +95 -55
  5. AOT_biomaps/AOT_Recon/AOT_Optimizers/DEPIERRO.py +48 -13
  6. AOT_biomaps/AOT_Recon/AOT_Optimizers/LS.py +406 -13
  7. AOT_biomaps/AOT_Recon/AOT_Optimizers/MAPEM.py +118 -38
  8. AOT_biomaps/AOT_Recon/AOT_Optimizers/MLEM.py +390 -102
  9. AOT_biomaps/AOT_Recon/AOT_Optimizers/PDHG.py +443 -12
  10. AOT_biomaps/AOT_Recon/AOT_PotentialFunctions/RelativeDifferences.py +10 -14
  11. AOT_biomaps/AOT_Recon/AOT_SparseSMatrix/SparseSMatrix_CSR.py +274 -0
  12. AOT_biomaps/AOT_Recon/AOT_SparseSMatrix/SparseSMatrix_SELL.py +331 -0
  13. AOT_biomaps/AOT_Recon/AOT_SparseSMatrix/__init__.py +2 -0
  14. AOT_biomaps/AOT_Recon/AOT_biomaps_kernels.cubin +0 -0
  15. AOT_biomaps/AOT_Recon/AlgebraicRecon.py +259 -153
  16. AOT_biomaps/AOT_Recon/AnalyticRecon.py +27 -42
  17. AOT_biomaps/AOT_Recon/BayesianRecon.py +84 -151
  18. AOT_biomaps/AOT_Recon/DeepLearningRecon.py +1 -1
  19. AOT_biomaps/AOT_Recon/PrimalDualRecon.py +162 -102
  20. AOT_biomaps/AOT_Recon/ReconEnums.py +27 -2
  21. AOT_biomaps/AOT_Recon/ReconTools.py +229 -12
  22. AOT_biomaps/AOT_Recon/__init__.py +1 -0
  23. AOT_biomaps/AOT_Recon/_mainRecon.py +72 -58
  24. AOT_biomaps/__init__.py +4 -53
  25. {aot_biomaps-2.9.176.dist-info → aot_biomaps-2.9.300.dist-info}/METADATA +2 -1
  26. aot_biomaps-2.9.300.dist-info/RECORD +47 -0
  27. aot_biomaps-2.9.176.dist-info/RECORD +0 -43
  28. {aot_biomaps-2.9.176.dist-info → aot_biomaps-2.9.300.dist-info}/WHEEL +0 -0
  29. {aot_biomaps-2.9.176.dist-info → aot_biomaps-2.9.300.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,32 @@
1
1
  from AOT_biomaps.AOT_Recon.AlgebraicRecon import AlgebraicRecon
2
- from AOT_biomaps.AOT_Recon.ReconEnums import ReconType, ProcessType
2
+ from AOT_biomaps.AOT_Recon.ReconEnums import ReconType, ProcessType, SMatrixType
3
3
  from AOT_biomaps.AOT_Recon.AOT_Optimizers import CP_KL, CP_TV
4
4
  from AOT_biomaps.AOT_Recon.ReconEnums import OptimizerType
5
5
 
6
6
  import os
7
7
  from datetime import datetime
8
8
  import numpy as np
9
+ import re
9
10
 
10
11
  class PrimalDualRecon(AlgebraicRecon):
11
12
  """
12
13
  This class implements the convex reconstruction process.
13
14
  It currently does not perform any operations but serves as a template for future implementations.
14
15
  """
15
- def __init__(self, alpha, theta=1.0, L=None, **kwargs):
16
+ def __init__(self, alpha, beta, theta=1.0, L=None, k_security=0.8, use_power_method=True, auto_alpha_gamma=0.05, apply_positivity_clamp=True, tikhonov_as_gradient=False, use_laplacian=True, laplacian_beta_scale=1.0, **kwargs):
16
17
  super().__init__(**kwargs)
17
18
  self.reconType = ReconType.Convex
18
- self.alpha = alpha # regularization parameter
19
+ self.alpha = alpha # TV regularization parameter (if None, alpha is auto-scaled)
20
+ self.beta=beta # Tikhonov regularization parameter
19
21
  self.theta = theta # relaxation parameter (between 1 and 2)
20
22
  self.L = L # norme spectrale de l'opérateur linéaire défini par les matrices P et P^T
23
+ self.k_security=k_security
24
+ self.use_power_method=use_power_method
25
+ self.auto_alpha_gamma=auto_alpha_gamma # gamma for auto alpha: alpha = gamma * data_term / tv_term
26
+ self.apply_positivity_clamp=apply_positivity_clamp
27
+ self.tikhonov_as_gradient=tikhonov_as_gradient # if True, apply -tau*2*beta*x instead of prox multiplicative
28
+ self.use_laplacian=use_laplacian # enable Laplacian (Hessian scalar) penalty
29
+ self.laplacian_beta_scale=laplacian_beta_scale # multiply beta for laplacian term if you want separate scaling
21
30
 
22
31
  def run(self, processType=ProcessType.PYTHON, withTumor=True):
23
32
  """
@@ -35,40 +44,34 @@ class PrimalDualRecon(AlgebraicRecon):
35
44
  raise NotImplementedError("CASToR convex reconstruction is not implemented yet.")
36
45
 
37
46
 
38
- def checkExistingFile(self, withTumor, date):
47
+ def checkExistingFile(self, date = None):
39
48
  """
40
49
  Check if the file already exists, based on current instance parameters.
41
50
  Returns:
42
51
  tuple: (bool: whether to save, str: the filepath)
43
52
  """
44
- date = datetime.now().strftime("%d%m")
53
+ if date is None:
54
+ date = datetime.now().strftime("%d%m")
45
55
  results_dir = os.path.join(
46
56
  self.saveDir,
47
57
  f'results_{date}_{self.optimizer.value}_Alpha_{self.alpha}_Theta_{self.theta}_L_{self.L}'
48
58
  )
49
59
  os.makedirs(results_dir, exist_ok=True)
50
60
 
51
- filename = 'reconPhantom.npy' if withTumor else 'reconLaser.npy'
52
- filepath = os.path.join(results_dir, filename)
61
+ if os.path.exists(os.path.join(results_dir,"indices.npy")):
62
+ return (True, results_dir)
53
63
 
54
- if os.path.exists(filepath):
55
- return (True, filepath)
64
+ return (False, results_dir)
56
65
 
57
- return (False, filepath)
58
-
59
-
60
-
61
- def load(self, withTumor=True, results_date=None, optimizer=None, alpha=None, theta=None, L=None, filePath=None):
66
+ def load(self, withTumor=True, results_date=None, optimizer=None, filePath=None, show_logs=True):
62
67
  """
63
- Load the reconstruction results and indices and store them in self.
68
+ Load the reconstruction results (reconPhantom or reconLaser) and indices as lists of 2D np arrays into self.
69
+ If the loaded file is a 3D array, it is split into a list of 2D arrays.
64
70
  Args:
65
- withTumor (bool): If True, loads the reconstruction with tumor; otherwise, loads the reconstruction without tumor.
66
- results_date (str): Date string (format "ddmm") to specify which results to load. If None, uses the most recent date in saveDir.
67
- optimizer (OptimizerType): Optimizer type to filter results. If None, uses the current optimizer of the instance.
68
- alpha (float): Alpha parameter to match the saved directory. If None, uses the current alpha of the instance.
69
- theta (float): Theta parameter to match the saved directory. If None, uses the current theta of the instance.
70
- L (float): L parameter to match the saved directory. If None, uses the current L of the instance.
71
- filePath (str): Optional. If provided, loads directly from this path (overrides saveDir and results_date).
71
+ withTumor: If True, loads reconPhantom (with tumor), else reconLaser (without tumor).
72
+ results_date: Date string (format "ddmm") to specify which results to load. If None, uses the most recent date in saveDir.
73
+ optimizer: Optimizer name (as string or enum) to filter results. If None, uses the current optimizer of the instance.
74
+ filePath: Optional. If provided, loads directly from this path (overrides saveDir and results_date).
72
75
  """
73
76
  if filePath is not None:
74
77
  # Mode chargement direct depuis un fichier
@@ -76,122 +79,179 @@ class PrimalDualRecon(AlgebraicRecon):
76
79
  recon_path = filePath
77
80
  if not os.path.exists(recon_path):
78
81
  raise FileNotFoundError(f"No reconstruction file found at {recon_path}.")
79
-
80
- if withTumor:
81
- self.reconPhantom = np.load(recon_path, allow_pickle=True)
82
+ # Charge les données
83
+ data = np.load(recon_path, allow_pickle=True)
84
+ # Découpe en liste de 2D si c'est un tableau 3D
85
+ if isinstance(data, np.ndarray) and data.ndim == 3:
86
+ if withTumor:
87
+ self.reconPhantom = [data[i, :, :] for i in range(data.shape[0])]
88
+ else:
89
+ self.reconLaser = [data[i, :, :] for i in range(data.shape[0])]
82
90
  else:
83
- self.reconLaser = np.load(recon_path, allow_pickle=True)
84
-
85
- # Essayer de charger les indices (fichier avec suffixe "_indices.npy" ou "reconIndices.npy")
86
- base_dir, file_name = os.path.split(recon_path)
87
- file_base, _ = os.path.splitext(file_name)
88
- indices_path = os.path.join(base_dir, f"{file_base}_indices.npy")
89
- if not os.path.exists(indices_path):
90
- indices_path = os.path.join(base_dir, 'reconIndices.npy') # Alternative
91
-
91
+ # Sinon, suppose que c'est déjà une liste de 2D
92
+ if withTumor:
93
+ self.reconPhantom = data
94
+ else:
95
+ self.reconLaser = data
96
+ # Essayer de charger les indices
97
+ base_dir, _ = os.path.split(recon_path)
98
+ indices_path = os.path.join(base_dir, "indices.npy")
92
99
  if os.path.exists(indices_path):
93
- self.indices = np.load(indices_path, allow_pickle=True)
100
+ indices_data = np.load(indices_path, allow_pickle=True)
101
+ if isinstance(indices_data, np.ndarray) and indices_data.ndim == 3:
102
+ self.indices = [indices_data[i, :, :] for i in range(indices_data.shape[0])]
103
+ else:
104
+ self.indices = indices_data
94
105
  else:
95
106
  self.indices = None
96
-
97
- print(f"Loaded reconstruction results and indices from {recon_path}")
107
+ if show_logs:
108
+ print(f"Loaded reconstruction results and indices from {recon_path}")
98
109
  else:
99
110
  # Mode chargement depuis le répertoire de résultats
100
111
  if self.saveDir is None:
101
112
  raise ValueError("Save directory is not specified. Please set saveDir before loading.")
102
-
103
- # Use current optimizer if not provided
113
+ # Determine optimizer name for path matching
104
114
  opt_name = optimizer.value if optimizer is not None else self.optimizer.value
105
-
106
- # Build the directory path
115
+ # Find the most recent results directory if no date is specified
116
+ dir_pattern = f'results_*_{opt_name}'
117
+ if opt_name == OptimizerType.CP_TV.value or opt_name == OptimizerType.CP_KL.value:
118
+ dir_pattern += f'_Alpha_{self.alpha}_Theta_{self.theta}_L_{self.L}'
107
119
  if results_date is None:
108
- dir_pattern = f'results_*_{opt_name}_Alpha_{alpha if alpha is not None else self.alpha}_Theta_{theta if theta is not None else self.theta}_L_{L if L is not None else self.L}'
109
120
  dirs = [d for d in os.listdir(self.saveDir) if os.path.isdir(os.path.join(self.saveDir, d)) and dir_pattern in d]
110
121
  if not dirs:
111
122
  raise FileNotFoundError(f"No matching results directory found for pattern '{dir_pattern}' in {self.saveDir}.")
112
123
  dirs.sort(reverse=True) # Most recent first
113
124
  results_dir = os.path.join(self.saveDir, dirs[0])
114
125
  else:
115
- results_dir = os.path.join(self.saveDir, f'results_{results_date}_{opt_name}_Alpha_{alpha if alpha is not None else self.alpha}_Theta_{theta if theta is not None else self.theta}_L_{L if L is not None else self.L}')
126
+ results_dir = os.path.join(self.saveDir, f'results_{results_date}_{opt_name}')
127
+ if opt_name == OptimizerType.CP_TV.value or opt_name == OptimizerType.CP_KL.value:
128
+ results_dir += f'_Alpha_{self.alpha}_Theta_{self.theta}_L_{self.L}'
116
129
  if not os.path.exists(results_dir):
117
130
  raise FileNotFoundError(f"Directory {results_dir} does not exist.")
118
-
119
131
  # Load reconstruction results
120
132
  recon_key = 'reconPhantom' if withTumor else 'reconLaser'
121
133
  recon_path = os.path.join(results_dir, f'{recon_key}.npy')
122
134
  if not os.path.exists(recon_path):
123
135
  raise FileNotFoundError(f"No reconstruction file found at {recon_path}.")
124
-
125
- if withTumor:
126
- self.reconPhantom = np.load(recon_path, allow_pickle=True)
136
+ data = np.load(recon_path, allow_pickle=True)
137
+ # Découpe en liste de 2D si c'est un tableau 3D
138
+ if isinstance(data, np.ndarray) and data.ndim == 3:
139
+ if withTumor:
140
+ self.reconPhantom = [data[i, :, :] for i in range(data.shape[0])]
141
+ else:
142
+ self.reconLaser = [data[i, :, :] for i in range(data.shape[0])]
127
143
  else:
128
- self.reconLaser = np.load(recon_path, allow_pickle=True)
129
-
130
- # Load saved indices
131
- indices_path = os.path.join(results_dir, 'reconIndices.npy')
132
- if not os.path.exists(indices_path):
133
- raise FileNotFoundError(f"No indices file found at {indices_path}.")
134
-
135
- self.indices = np.load(indices_path, allow_pickle=True)
136
-
137
- print(f"Loaded reconstruction results and indices from {results_dir}")
138
-
139
- def _convexReconPython(self, withTumor):
140
- if withTumor:
141
- y=self.experiment.AOsignal_withTumor
142
-
143
- else:
144
- y=self.experiment.AOsignal_withoutTumor
144
+ if withTumor:
145
+ self.reconPhantom = data
146
+ else:
147
+ self.reconLaser = data
148
+ # Try to load saved indices (if file exists)
149
+ indices_path = os.path.join(results_dir, 'indices.npy')
150
+ if os.path.exists(indices_path):
151
+ indices_data = np.load(indices_path, allow_pickle=True)
152
+ if isinstance(indices_data, np.ndarray) and indices_data.ndim == 3:
153
+ self.indices = [indices_data[i, :, :] for i in range(indices_data.shape[0])]
154
+ else:
155
+ self.indices = indices_data
156
+ else:
157
+ self.indices = None
158
+ if show_logs:
159
+ print(f"Loaded reconstruction results and indices from {results_dir}")
145
160
 
161
+ def _convexReconPython(self, withTumor,show_logs=True):
146
162
  if self.optimizer == OptimizerType.CP_TV:
147
163
  if withTumor:
148
164
  self.reconPhantom, self.indices = CP_TV(
149
- self.SMatrix,
150
- y=self.experiment.AOsignal_withTumor,
151
- alpha=self.alpha,
152
- theta=self.theta,
153
- numIterations=self.numIterations,
154
- isSavingEachIteration=self.isSavingEachIteration,
155
- L=self.L,
156
- withTumor=withTumor,
157
- device=None
158
- )
165
+ SMatrix = self.SMatrix,
166
+ y = self.experiment.AOsignal_withTumor,
167
+ alpha=self.alpha,
168
+ beta=self.beta,
169
+ theta=self.theta,
170
+ numIterations=self.numIterations,
171
+ isSavingEachIteration=self.isSavingEachIteration,
172
+ L=self.L,
173
+ withTumor=withTumor,
174
+ device=self.device,
175
+ max_saves=self.maxSaves,
176
+ show_logs=show_logs,
177
+ smatrixType= self.smatrixType,
178
+ k_security=self.k_security,
179
+ use_power_method=self.use_power_method,
180
+ auto_alpha_gamma=self.auto_alpha_gamma,
181
+ apply_positivity_clamp=self.apply_positivity_clamp,
182
+ tikhonov_as_gradient=self.tikhonov_as_gradient,
183
+ use_laplacian=self.use_laplacian,
184
+ laplacian_beta_scale=self.laplacian_beta_scale
185
+ )
159
186
  else:
160
187
  self.reconLaser, self.indices = CP_TV(
161
- self.SMatrix,
162
- y=self.experiment.AOsignal_withoutTumor,
163
- alpha=self.alpha,
164
- theta=self.theta,
165
- numIterations=self.numIterations,
166
- isSavingEachIteration=self.isSavingEachIteration,
167
- L=self.L,
168
- withTumor=withTumor,
169
- device=None
170
- )
188
+ SMatrix = self.SMatrix,
189
+ y = self.experiment.AOsignal_withoutTumor,
190
+ alpha=self.alpha,
191
+ beta=self.beta,
192
+ theta=self.theta,
193
+ numIterations=self.numIterations,
194
+ isSavingEachIteration=self.isSavingEachIteration,
195
+ L=self.L,
196
+ withTumor=withTumor,
197
+ device=self.device,
198
+ max_saves=self.maxSaves,
199
+ show_logs=show_logs,
200
+ smatrixType= self.smatrixType,
201
+ k_security=self.k_security,
202
+ use_power_method=self.use_power_method,
203
+ auto_alpha_gamma=self.auto_alpha_gamma,
204
+ apply_positivity_clamp=self.apply_positivity_clamp,
205
+ tikhonov_as_gradient=self.tikhonov_as_gradient,
206
+ use_laplacian=self.use_laplacian,
207
+ laplacian_beta_scale=self.laplacian_beta_scale
208
+ )
171
209
  elif self.optimizer == OptimizerType.CP_KL:
172
210
  if withTumor:
173
211
  self.reconPhantom, self.indices = CP_KL(
174
- self.SMatrix,
175
- y=self.experiment.AOsignal_withTumor,
176
- alpha=self.alpha,
177
- theta=self.theta,
178
- numIterations=self.numIterations,
179
- isSavingEachIteration=self.isSavingEachIteration,
180
- L=self.L,
181
- withTumor=withTumor,
182
- device=None
212
+ SMatrix = self.SMatrix,
213
+ y = self.experiment.AOsignal_withTumor,
214
+ alpha=self.alpha,
215
+ beta=self.beta,
216
+ theta=self.theta,
217
+ numIterations=self.numIterations,
218
+ isSavingEachIteration=self.isSavingEachIteration,
219
+ L=self.L,
220
+ withTumor=withTumor,
221
+ device=self.device,
222
+ max_saves=self.maxSaves,
223
+ show_logs=show_logs,
224
+ smatrixType= self.smatrixType,
225
+ k_security=self.k_security,
226
+ use_power_method=self.use_power_method,
227
+ auto_alpha_gamma=self.auto_alpha_gamma,
228
+ apply_positivity_clamp=self.apply_positivity_clamp,
229
+ tikhonov_as_gradient=self.tikhonov_as_gradient,
230
+ use_laplacian=self.use_laplacian,
231
+ laplacian_beta_scale=self.laplacian_beta_scale
183
232
  )
184
233
  else:
185
234
  self.reconLaser, self.indices = CP_KL(
186
- self.SMatrix,
187
- y=self.experiment.AOsignal_withoutTumor,
188
- alpha=self.alpha,
189
- theta=self.theta,
190
- numIterations=self.numIterations,
191
- isSavingEachIteration=self.isSavingEachIteration,
192
- L=self.L,
193
- withTumor=withTumor,
194
- device=None
235
+ SMatrix = self.SMatrix,
236
+ y = self.experiment.AOsignal_withoutTumor,
237
+ alpha=self.alpha,
238
+ beta=self.beta,
239
+ theta=self.theta,
240
+ numIterations=self.numIterations,
241
+ isSavingEachIteration=self.isSavingEachIteration,
242
+ L=self.L,
243
+ withTumor=withTumor,
244
+ device=self.device,
245
+ max_saves=self.maxSaves,
246
+ show_logs=show_logs,
247
+ smatrixType= self.smatrixType,
248
+ k_security=self.k_security,
249
+ use_power_method=self.use_power_method,
250
+ auto_alpha_gamma=self.auto_alpha_gamma,
251
+ apply_positivity_clamp=self.apply_positivity_clamp,
252
+ tikhonov_as_gradient=self.tikhonov_as_gradient,
253
+ use_laplacian=self.use_laplacian,
254
+ laplacian_beta_scale=self.laplacian_beta_scale
195
255
  )
196
256
  else:
197
257
  raise ValueError(f"Optimizer value must be CP_TV or CP_KL, got {self.optimizer}")
@@ -354,8 +354,33 @@ class NoiseType(Enum):
354
354
  - None: No noise is applied.
355
355
  """
356
356
  POISSON = 'poisson'
357
- """Poisson noise, typically used for emission data."""
357
+ """Poisson noise."""
358
358
  GAUSSIAN = 'gaussian'
359
- """Gaussian noise, typically used for transmission data."""
359
+ """Gaussian noise."""
360
360
  None_ = 'none'
361
361
  """No noise is applied."""
362
+
363
+ class SMatrixType(Enum):
364
+ """
365
+ Enum for different sparsing methods used in reconstructions.
366
+
367
+ Selection of sparsing methods:
368
+ - Thresholding: Sparsing based on a threshold value.
369
+ - TopK: Sparsing by retaining the top K values.
370
+ - None: No sparsing is applied.
371
+ """
372
+ DENSE = 'DENSE'
373
+ """No sparsing is applied."""
374
+ CSR = 'CSR'
375
+ """Sparsing based on a threshold value."""
376
+ COO = 'COO'
377
+ """Sparsing by retaining the top K values."""
378
+ SELL = 'SELL'
379
+ """Sparsing using sell C sigma method.
380
+ Optimized variant of ELLPACK, dividing the matrix into fixed-size "chunks" of `C` rows.
381
+ Non-zero elements are sorted by column within each chunk to improve memory coalescing on GPUs.
382
+ Rows are padded with zeros to align their length to the longest row in the chunk.
383
+ ** Ref : Kreutzer, M., Hager, G., Wellein, G., Fehske, H., & Bishop, A. R. (2014).
384
+ "A Unified Sparse Matrix Data Format for Efficient General Sparse Matrix-Vector Multiply on Modern Processors".
385
+ ACM Transactions on Mathematical Software, 41(2), 1–24. DOI: 10.1145/2592376.
386
+ """